gt stringclasses 1 value | context stringlengths 2.49k 119k |
|---|---|
# -*- coding: utf-8 -*-
'''
Control the Salt command interface
==================================
The Salt state is used to control the salt command interface. This state is
intended for use primarily from the state runner from the master.
The salt.state declaration can call out a highstate or a list of sls:
webservers:
salt.state:
- tgt: 'web*'
- sls:
- apache
- django
- core
- saltenv: prod
databasees:
salt.state:
- tgt: role:database
- tgt_type: grain
- highstate: True
'''
# Import python libs
import logging
# Import salt libs
import salt.utils
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'salt'
def __virtual__():
'''
Named salt
'''
return __virtualname__
def state(
name,
tgt,
ssh=False,
tgt_type=None,
ret='',
highstate=None,
sls=None,
env=None,
test=False,
fail_minions='',
allow_fail=0,
**kwargs):
'''
Invoke a state run on a given target
name
An arbitrary name used to track the state execution
tgt
The target specification for the state run.
tgt_type | expr_form
The target type to resolve, defaults to glob
ret
Optionally set a single or a list of returners to use
highstate
Defaults to None, if set to True the target systems will ignore any
sls references specified in the sls option and call state.highstate
on the targeted minions
sls
A group of sls files to execute. This can be defined as a single string
containing a single sls file, or a list of sls files
saltenv
The default salt environment to pull sls files from
ssh
Set to `True` to use the ssh client instaed of the standard salt client
roster
In the event of using salt-ssh, a roster system can be set
fail_minions
An optional list of targeted minions where failure is an option
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if env is not None:
msg = (
'Passing a salt environment should be done using \'saltenv\' not '
'\'env\'. This warning will go away in Salt Boron and this '
'will be the default and expected behaviour. Please update your '
'state files.'
)
salt.utils.warn_until('Boron', msg)
ret.setdefault('warnings', []).append(msg)
# No need to set __env__ = env since that's done in the state machinery
cmd_kw = {'arg': []}
if 'expr_form' in kwargs and not tgt_type:
tgt_type = kwargs['expr_form']
if not tgt_type:
tgt_type = 'glob'
cmd_kw['expr_form'] = tgt_type
cmd_kw['ssh'] = ssh
if highstate:
fun = 'state.highstate'
elif sls:
fun = 'state.sls'
if isinstance(sls, list):
sls = ','.join(sls)
cmd_kw['arg'].append(sls)
else:
ret['comment'] = 'No highstate or sls specified, no execution made'
ret['result'] = False
return ret
if test:
cmd_kw['arg'].append('test={0}'.format(test))
if __env__ != 'base':
cmd_kw['arg'].append('saltenv={0}'.format(__env__))
if ret:
cmd_kw['ret'] = ret
if __opts__['test'] is True:
ret['comment'] = (
'State run to be executed on target {0} as test={1}'
).format(tgt, str(test))
ret['result'] = None
return ret
cmd_ret = __salt__['saltutil.cmd'](tgt, fun, **cmd_kw)
ret['changes'] = cmd_ret
fail = set()
if isinstance(fail_minions, str):
fail_minions = [fail_minions]
for minion, m_ret in cmd_ret.items():
if minion in fail_minions:
continue
m_state = salt.utils.check_state_result(m_ret)
if not m_state:
fail.add(minion)
if fail:
ret['result'] = False
ret['comment'] = 'Run failed on minions: {0}'.format(', '.join(fail))
return ret
ret['comment'] = 'States ran successfully on {0}'.format(
', '.join(cmd_ret))
return ret
def function(
name,
tgt,
ssh=False,
tgt_type=None,
ret='',
arg=(),
**kwargs):
'''
Execute a single module function on a remote minion via salt or salt-ssh
name
The name of the function to run, aka cmd.run or pkg.install
tgt
The target specification, aka '*' for all minions
tgt_type | expr_form
The target type, defaults to glob
arg
The list of arguments to pass into the function
ret
Optionally set a single or a list of returners to use
ssh
Set to `True` to use the ssh client instaed of the standard salt client
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
cmd_kw = {'arg': []}
if 'expr_form' in kwargs and not tgt_type:
tgt_type = kwargs['expr_form']
if not tgt_type:
tgt_type = 'glob'
cmd_kw['expr_form'] = tgt_type
cmd_kw['ssh'] = ssh
fun = name
if ret:
cmd_kw['ret'] = ret
cmd_ret = __salt__['saltutil.cmd'](tgt, fun, **cmd_kw)
ret['changes'] = cmd_ret
ret['comment'] = 'Function {0} ran successfully on {0}'.format(
', '.join(cmd_ret))
return ret
| |
import pytest
import demistomock as demisto
import json
import io
from TOPdesk import Client, INTEGRATION_NAME, MAX_API_PAGE_SIZE, \
fetch_incidents, entry_types_command, call_types_command, categories_command, subcategories_command, \
list_persons_command, list_operators_command, branches_command, get_incidents_list_command, \
get_incidents_with_pagination, incident_do_command, incident_touch_command, attachment_upload_command, \
escalation_reasons_command, deescalation_reasons_command, archiving_reasons_command, capitalize_for_outputs, \
list_attachments_command
def util_load_json(path):
with io.open(path, mode='r', encoding='utf-8') as f:
return json.loads(f.read())
@pytest.fixture()
def client(requests_mock):
"""Client fixture for tests using the default client settings."""
requests_mock.get(
'https://test.com/api/version', json={"version": "3.1.4"})
return Client(
base_url='https://test.com/api/',
verify=False,
auth=('some_username', 'some_password')
)
@pytest.mark.parametrize('outputs, expected_capitalized_output', [
([{"hiThere": "hi"}], [{"HiThere": "hi"}]),
([{"hi": "hi"}], [{"Hi": "hi"}]),
([{"hiThere": {"wellHello": "hi"}}], [{"HiThere": {"WellHello": "hi"}}]),
([{"hiThere": {"wellHello": {"hiyaThere": "hi"}}}], [{"HiThere": {"WellHello": {"HiyaThere": "hi"}}}]),
])
def test_capitalize_outputs(outputs, expected_capitalized_output):
"""Unit test
Given
- output of API command
When
- returning output to XSOAR
Then
- validate the output is capitalized.
"""
assert capitalize_for_outputs(outputs) == expected_capitalized_output
@pytest.mark.parametrize('command, command_api_url, mock_response, expected_results', [
(entry_types_command,
'https://test.com/api/incidents/entry_types',
[{"id": "1st-id", "name": "entry-type-1"}, {"id": "2st-id", "name": "entry-type-2"}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.EntryType',
'outputs_key_field': 'Id'
}),
(call_types_command,
'https://test.com/api/incidents/call_types',
[{"id": "1st-id", "name": "call-type-1"}, {"id": "2st-id", "name": "call-type-2"}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.CallType',
'outputs_key_field': 'Id'
}),
(categories_command,
'https://test.com/api/incidents/categories',
[{"id": "1st-id", "name": "category-1"}, {"id": "2st-id", "name": "category-2"}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.Category',
'outputs_key_field': 'Id'
}),
(subcategories_command,
'https://test.com/api/incidents/subcategories',
[{"id": "1st-id-sub", "name": "subcategory-1", "category": {"id": "1st-id", "name": "category-1"}},
{"id": "2st-id-sub", "name": "subcategory-2", "category": {"id": "2st-id", "name": "category-2"}}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.Subcategory',
'outputs_key_field': 'Id'
}),
(escalation_reasons_command,
'https://test.com/api/incidents/escalation-reasons',
[{"id": "1st-id", "name": "escalation-name-1"}, {"id": "2st-id", "name": "escalation-name-2"}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.EscalationReason',
'outputs_key_field': 'Id'
}),
(deescalation_reasons_command,
'https://test.com/api/incidents/deescalation-reasons',
[{"id": "1st-id", "name": "deescalation-name-1"}, {"id": "2st-id", "name": "deescalation-name-2"}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.DeescalationReason',
'outputs_key_field': 'Id'
}),
(archiving_reasons_command,
'https://test.com/api/archiving-reasons',
[{"id": "1st-id", "name": "archiving-reason-1"}, {"id": "2st-id", "name": "archiving-reason-2"}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.ArchiveReason',
'outputs_key_field': 'Id'
}),
])
def test_list_command(client, requests_mock, command, command_api_url, mock_response, expected_results):
"""Unit test
Given
- A command that returns a list
When
- running the command
Then
- validate the entry context
"""
requests_mock.get(
command_api_url, json=mock_response)
command_results = command(client, {})
assert command_results.outputs_prefix == expected_results['outputs_prefix']
assert command_results.outputs_key_field == expected_results['outputs_key_field']
assert command_results.outputs == capitalize_for_outputs(mock_response)
@pytest.mark.parametrize('command, args, command_api_url, mock_response, expected_results', [
(entry_types_command, {'limit': '1'},
'https://test.com/api/incidents/entry_types',
[{"id": "1st-id", "name": "entry-type-1"}, {"id": "2st-id", "name": "entry-type-2"}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.EntryType',
'outputs_key_field': 'Id',
'outputs': [{"id": "1st-id", "name": "entry-type-1"}]}),
(call_types_command, {'limit': '2'},
'https://test.com/api/incidents/call_types',
[{"id": "1st-id", "name": "call-type-1"}, {"id": "2st-id", "name": "call-type-2"},
{"id": "3rd-id", "name": "call-type-3"}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.CallType',
'outputs_key_field': 'Id',
'outputs': [{"id": "1st-id", "name": "call-type-1"}, {"id": "2st-id", "name": "call-type-2"}]}),
(categories_command, {'limit': '-1'},
'https://test.com/api/incidents/categories',
[{"id": "1st-id", "name": "category-1"}, {"id": "2st-id", "name": "category-2"}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.Category',
'outputs_key_field': 'Id',
'outputs': [{"id": "1st-id", "name": "category-1"}, {"id": "2st-id", "name": "category-2"}]}),
(subcategories_command, {'limit': '1'},
'https://test.com/api/incidents/subcategories',
[{"id": "1st-id-sub", "name": "subcategory-1", "category": {"id": "1st-id", "name": "category-1"}},
{"id": "2st-id-sub", "name": "subcategory-2", "category": {"id": "2st-id", "name": "category-2"}}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.Subcategory',
'outputs_key_field': 'Id',
'outputs': [{"id": "1st-id-sub", "name": "subcategory-1",
"category": {"id": "1st-id", "name": "category-1"}}]}),
(escalation_reasons_command, {'limit': '2'},
'https://test.com/api/incidents/escalation-reasons',
[{"id": "1st-id", "name": "escalation-name-1"}, {"id": "2st-id", "name": "escalation-name-2"},
{"id": "3rd", "name": "escalation-name-3"}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.EscalationReason',
'outputs_key_field': 'Id',
'outputs': [{"id": "1st-id", "name": "escalation-name-1"}, {"id": "2st-id", "name": "escalation-name-2"}]}),
(deescalation_reasons_command, {'limit': '-1'},
'https://test.com/api/incidents/deescalation-reasons',
[{"id": "1st-id", "name": "deescalation-name-1"}, {"id": "2st-id", "name": "deescalation-name-2"}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.DeescalationReason',
'outputs_key_field': 'Id',
'outputs': [{"id": "1st-id", "name": "deescalation-name-1"},
{"id": "2st-id", "name": "deescalation-name-2"}]}),
(archiving_reasons_command, {'limit': '1'},
'https://test.com/api/archiving-reasons',
[{"id": "1st-id", "name": "archiving-reason-1"}, {"id": "2st-id", "name": "archiving-reason-2"}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.ArchiveReason',
'outputs_key_field': 'Id',
'outputs': [{"id": "1st-id", "name": "archiving-reason-1"}]})
])
def test_list_command_with_limit_arg(client, requests_mock, command, args, command_api_url, mock_response,
expected_results):
"""Unit test
Given
- A command that returns a list
When
- running the command
Then
- validate the entry context
"""
requests_mock.get(
command_api_url, json=mock_response)
command_results = command(client, args)
assert command_results.outputs_prefix == expected_results['outputs_prefix']
assert command_results.outputs_key_field == expected_results['outputs_key_field']
assert command_results.outputs == capitalize_for_outputs(expected_results['outputs'])
@pytest.mark.parametrize('command, command_api_url, mock_response_file, override_nodes, expected_results', [
(list_persons_command,
'https://test.com/api/persons',
'test_data/topdesk_person.json',
[{'id': '1st-person-id'}, {'id': '2nd-person-id'}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.Person',
'outputs_key_field': 'Id'
}),
(list_operators_command,
'https://test.com/api/operators',
'test_data/topdesk_operator.json',
[{'id': '1st-operator-id'}, {'id': '2nd-operator-id'}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.Operator',
'outputs_key_field': 'Id'
}),
(branches_command,
'https://test.com/api/branches',
'test_data/topdesk_branch.json',
[{"id": "1st-branch-id"}, {"id": "2nd-branch-id"}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.Branch',
'outputs_key_field': 'Id'
}),
(get_incidents_list_command,
'https://test.com/api/incidents',
'test_data/topdesk_incident.json',
[{"id": "1st-incident-id"}, {"id": "2nd-incident-id"}],
{
'outputs_prefix': f'{INTEGRATION_NAME}.Incident',
'outputs_key_field': 'Id'
})
])
def test_large_output_list_command(client,
requests_mock,
command,
command_api_url,
mock_response_file,
override_nodes,
expected_results):
"""Unit test
Given
- a command that returns a list
- file path of mocked response
When
- running the command
Then
- validate the entry context
"""
mock_topdesk_node = util_load_json(mock_response_file)
mock_topdesk_response = []
for node_override in override_nodes:
response_node = mock_topdesk_node.copy()
response_node['id'] = node_override['id']
mock_topdesk_response.append(response_node)
requests_mock.get(
command_api_url, json=mock_topdesk_response)
command_results = command(client, {})
assert command_results.outputs_prefix == expected_results['outputs_prefix']
assert command_results.outputs_key_field == expected_results['outputs_key_field']
assert command_results.outputs == capitalize_for_outputs(mock_topdesk_response)
@pytest.mark.parametrize('action, command_args, command_api_url, mock_response_file, override_node', [
("escalate",
{"id": "incident_id", "escalate_reason_id": "some_reason"},
'https://test.com/api/incidents/id/incident_id/escalate',
'test_data/topdesk_incident.json',
{'id': 'incident_id'}),
("deescalate",
{"id": "incident_id", "deescalate_reason_id": "some_reason"},
'https://test.com/api/incidents/id/incident_id/deescalate',
'test_data/topdesk_incident.json',
{'id': 'incident_id'}),
("archive",
{"id": "incident_id", "archive_reason_id": "some_reason"},
'https://test.com/api/incidents/id/incident_id/archive',
'test_data/topdesk_incident.json',
{'id': 'incident_id'}),
("unarchive",
{"id": "incident_id"},
'https://test.com/api/incidents/id/incident_id/unarchive',
'test_data/topdesk_incident.json',
{'id': 'incident_id'}),
("escalate",
{"number": "incident_number", "escalate_reason_id": "some_reason"},
'https://test.com/api/incidents/number/incident_number/escalate',
'test_data/topdesk_incident.json',
{'number': 'incident_number'}),
("deescalate",
{"number": "incident_number", "deescalate_reason_id": "some_reason"},
'https://test.com/api/incidents/number/incident_number/deescalate',
'test_data/topdesk_incident.json',
{'number': 'incident_number'}),
("archive",
{"number": "incident_number", "archive_reason_id": "some_reason"},
'https://test.com/api/incidents/number/incident_number/archive',
'test_data/topdesk_incident.json',
{'number': 'incident_number'}),
("unarchive",
{"number": "incident_number"},
'https://test.com/api/incidents/number/incident_number/unarchive',
'test_data/topdesk_incident.json',
{'number': 'incident_number'}),
("escalate",
{"id": "incident_id", "number": "incident_number", "escalate_reason_id": "some_reason"},
'https://test.com/api/incidents/id/incident_id/escalate',
'test_data/topdesk_incident.json',
{'id': 'incident_id'}),
("deescalate",
{"id": "incident_id", "number": "incident_number", "deescalate_reason_id": "some_reason"},
'https://test.com/api/incidents/id/incident_id/deescalate',
'test_data/topdesk_incident.json',
{'id': 'incident_id'}),
("archive",
{"id": "incident_id", "number": "incident_number", "archive_reason_id": "some_reason"},
'https://test.com/api/incidents/id/incident_id/archive',
'test_data/topdesk_incident.json',
{'id': 'incident_id'}),
("unarchive",
{"id": "incident_id", "number": "incident_number"},
'https://test.com/api/incidents/id/incident_id/unarchive',
'test_data/topdesk_incident.json',
{'id': 'incident_id'})
])
def test_incident_do_commands(client,
requests_mock,
action,
command_args,
command_api_url,
mock_response_file,
override_node):
"""Unit test
Given
- action: archive, unarchive, escalate, deescalate
- command args: id, number, reason_id
When
- running incident_do_command with the action and args
Then
- validate the correct request was called.
- validate the entry context.
"""
mock_topdesk_node = util_load_json(mock_response_file)
response_incident = mock_topdesk_node.copy()
if override_node.get('id', None):
response_incident['id'] = override_node['id']
elif override_node.get('number', None):
response_incident['number'] = override_node['number']
requests_mock.put(
command_api_url, json=response_incident)
command_results = incident_do_command(client=client,
args=command_args,
action=action)
assert requests_mock.called
if command_args.get(f"{action}_reason_id", None):
assert requests_mock.last_request.json() == {'id': command_args.get(f"{action}_reason_id", None)}
else:
assert requests_mock.last_request.json() == {}
assert command_results.outputs_prefix == f'{INTEGRATION_NAME}.Incident'
assert command_results.outputs_key_field == 'Id'
assert command_results.outputs == capitalize_for_outputs([response_incident])
@pytest.mark.parametrize('command_args, command_api_url, command_api_body', [
({"id": "incident_id", "file": "some_entry_id", "invisivle_for_caller": "false"},
'https://test.com/api/incidents/id/incident_id/attachments',
{"invisivle_for_caller": "false"}),
({"id": "incident_id", "file": "some_entry_id", "description": "some description"},
'https://test.com/api/incidents/id/incident_id/attachments',
{"description": "some description"}),
({"id": "incident_id", "file": "some_entry_id", "description": "some description", "invisivle_for_caller": "false"},
'https://test.com/api/incidents/id/incident_id/attachments',
{"description": "some description", "invisivle_for_caller": "false"}),
({"id": "incident_id", "file": "some_entry_id"},
'https://test.com/api/incidents/id/incident_id/attachments',
{})
])
def test_attachment_upload_command(client,
mocker,
requests_mock,
command_args,
command_api_url,
command_api_body):
"""Unit test
Given
- command args: id, file, description, invisivle_for_caller
When
- running attachment_upload_command with the command args
Then
- validate the correct request was called.
- validate the file is in the request.
- validate the entry context.
"""
mock_topdesk_node = util_load_json('test_data/topdesk_attachment.json')
response_attachment = mock_topdesk_node.copy()
requests_mock.post(
command_api_url, json=response_attachment)
mocker.patch.object(demisto, 'dt', return_value="made_up_file.txt")
mocker.patch.object(demisto, 'getFilePath', return_value={'path': 'test_data/mock_upload_file.txt'})
command_results = attachment_upload_command(client=client,
args=command_args)
output_attachment = response_attachment
output_attachment['downloadUrl'] = 'https://test.com/api/incidents/id/incident_id/attachments/some-id/download'
assert requests_mock.called
assert b'mock text file for attachment up' in requests_mock.last_request._request.body
assert command_results.outputs_prefix == f'{INTEGRATION_NAME}.Attachment'
assert command_results.outputs_key_field == 'Id'
assert command_results.outputs == capitalize_for_outputs([output_attachment])
@pytest.mark.parametrize('command_args, command_api_url, response_override', [
({"incident_id": "incident_id"},
'https://test.com/api/incidents/id/incident_id/attachments',
[{"id": "attachment-id-1", "downloadUrl": "/api/incidents/id/incident_id/attachments/attachment-id-1",
"expected": True}]),
({"incident_id": "incident_id", 'limit': '1'},
'https://test.com/api/incidents/id/incident_id/attachments',
[{"id": "attachment-id-1", "downloadUrl": "/api/incidents/id/incident_id/attachments/attachment-id-1",
"expected": True},
{"id": "attachment-id-2", "downloadUrl": "/api/incidents/id/incident_id/attachments/attachment-id-2",
"expected": False}]),
({"incident_number": "incident_number"},
'https://test.com/api/incidents/number/incident_number/attachments',
[{"id": "attachment-id-1", "downloadUrl": "/api/incidents/id/incident_id/attachments/attachment-id-1",
"expected": True}])
])
def test_attachment_list_command(client,
requests_mock,
command_args,
command_api_url,
response_override):
"""Unit test
Given
- command args: id, file, description, invisivle_for_caller
When
- running attachment_upload_command with the command args
Then
- validate the correct request was called.
- validate the file is in the request.
- validate the entry context.
"""
mock_topdesk_node = util_load_json('test_data/topdesk_attachment.json')
response = []
expected = []
for attachment_override in response_override:
response_attachment = mock_topdesk_node.copy()
response_attachment["id"] = attachment_override["id"]
response_attachment["downloadUrl"] = attachment_override["downloadUrl"]
response.append(response_attachment)
if attachment_override["expected"]:
expected_attachment = mock_topdesk_node.copy()
expected_attachment["id"] = attachment_override["id"]
expected_attachment["downloadUrl"] = f'https://test.com{attachment_override["downloadUrl"]}'
expected.append(expected_attachment)
requests_mock.get(command_api_url, json=response)
command_results = list_attachments_command(client=client,
args=command_args)
assert command_results.outputs_prefix == f'{INTEGRATION_NAME}.Attachment'
assert command_results.outputs_key_field == 'Id'
assert command_results.outputs == capitalize_for_outputs(expected)
@pytest.mark.parametrize('create_func, command_args, command_api_url, mock_response_file,'
' expected_last_request_body', [
(True, # Create
{"caller": "some_caller"},
'https://test.com/api/incidents/',
'test_data/topdesk_incident.json',
{'callerLookup': {'id': 'some_caller'}}),
(True, # Create
{"caller": "some_caller", "description": "some_change"},
'https://test.com/api/incidents/',
'test_data/topdesk_incident.json',
{'callerLookup': {'id': 'some_caller'},
'briefDescription': 'some_change'}),
(True, # Create
{"caller": "some_caller", "description": "some_change", "category": "some_category_id"},
'https://test.com/api/incidents/',
'test_data/topdesk_incident.json',
{'callerLookup': {'id': 'some_caller'},
'briefDescription': 'some_change', 'category': {'name': 'some_category_id'}}),
(False, # Update
{"caller": "some_caller", "id": "incident_id"},
'https://test.com/api/incidents/id/incident_id',
'test_data/topdesk_incident.json',
{'callerLookup': {'id': 'some_caller'}}),
(False, # Update
{"caller": "some_caller", "number": "incident_number"},
'https://test.com/api/incidents/number/incident_number',
'test_data/topdesk_incident.json',
{'callerLookup': {'id': 'some_caller'}}),
(False, # Update
{"caller": "some_caller", "number": "incident_number", "description": "some_change"},
'https://test.com/api/incidents/number/incident_number',
'test_data/topdesk_incident.json',
{'callerLookup': {'id': 'some_caller'},
'briefDescription': 'some_change'})
])
def test_caller_lookup_incident_touch_commands(client,
requests_mock,
create_func,
command_args,
command_api_url,
mock_response_file,
expected_last_request_body):
"""Unit test
Given
- whether the command is Create or Update
- command args
When
- running the command with a caller as a registered caller.
Then
- validate 1 request was called.
- validate the correct request was called.
- validate the entry context.
"""
client_func = client.update_incident
request_method = "put"
action = "updating"
if create_func:
client_func = client.create_incident
request_method = "post"
action = "creating"
mock_topdesk_node = util_load_json(mock_response_file)
response_incident = mock_topdesk_node.copy()
request_command = getattr(requests_mock, request_method)
request_command(command_api_url, json=response_incident)
command_results = incident_touch_command(client=client,
args=command_args,
client_func=client_func,
action=action)
assert requests_mock.call_count == 2
assert requests_mock.last_request.json() == expected_last_request_body
assert command_results.outputs_prefix == f'{INTEGRATION_NAME}.Incident'
assert command_results.outputs_key_field == 'Id'
assert command_results.outputs == capitalize_for_outputs([response_incident])
@pytest.mark.parametrize('create_func, command_args, command_api_url, mock_response_file,'
' expected_last_request_body', [
(True, # Create
{"caller": "some_caller"},
'https://test.com/api/incidents/',
'test_data/topdesk_incident.json',
{'caller': {'dynamicName': 'some_caller'}}),
(False, # Update
{"caller": "some_caller", "id": "incident_id"},
'https://test.com/api/incidents/id/incident_id',
'test_data/topdesk_incident.json',
{'caller': {'dynamicName': 'some_caller'}}),
(False, # Update
{"caller": "some_caller", "number": "incident_number"},
'https://test.com/api/incidents/number/incident_number',
'test_data/topdesk_incident.json',
{'caller': {'dynamicName': 'some_caller'}}),
])
def test_non_registered_caller_incident_touch_commands(client,
requests_mock,
create_func,
command_args,
command_api_url,
mock_response_file,
expected_last_request_body):
"""Unit test
Given
- whether the command is Create or Update
- command args
When
- running the command with a caller as a non registered caller.
Then
- validate 2 requests were called.
- validate the entry context.
"""
client_func = client.update_incident
request_method = "put"
action = "updating"
if create_func:
client_func = client.create_incident
request_method = "post"
action = "creating"
mock_topdesk_node = util_load_json(mock_response_file)
response_incident = mock_topdesk_node.copy()
request_command = getattr(requests_mock, request_method)
def callback_func(request, _):
if 'callerLookup' in request.json():
return {"message": "The value for the field 'callerLookup.id' cannot be parsed."}
else:
return response_incident
request_command(command_api_url, json=callback_func)
command_results = incident_touch_command(client=client,
args=command_args,
client_func=client_func,
action=action)
assert requests_mock.call_count == 3
assert requests_mock.last_request.json() == expected_last_request_body
assert command_results.outputs_prefix == f'{INTEGRATION_NAME}.Incident'
assert command_results.outputs_key_field == 'Id'
assert command_results.outputs == capitalize_for_outputs([response_incident])
@pytest.mark.parametrize('command, command_args, command_api_request', [
(branches_command,
{'page_size': 2},
'https://test.com/api/branches?page_size=2'),
(branches_command,
{'start': 2},
'https://test.com/api/branches?start=2'),
(branches_command,
{'query': 'id==1st-branch-id'},
'https://test.com/api/branches?query=id==1st-branch-id'),
(branches_command,
{'page_size': 2, 'start': 2, 'query': 'id==1st-branch-id'},
'https://test.com/api/branches?start=2&page_size=2&query=id==1st-branch-id'),
(branches_command,
{'page_size': 2, 'query': 'id==1st-branch-id'},
'https://test.com/api/branches?page_size=2&query=id==1st-branch-id'),
(branches_command,
{'fields': 'id,name'},
'https://test.com/api/branches?$fields=id,name'),
(list_operators_command,
{'page_size': 2},
'https://test.com/api/operators?page_size=2'),
(list_operators_command,
{'start': 2},
'https://test.com/api/operators?start=2'),
(list_operators_command,
{'query': 'id==1st-operator-id'},
'https://test.com/api/operators?query=id==1st-operator-id'),
(list_operators_command,
{'page_size': 2, 'start': 2, 'query': 'id==1st-operator-id'},
'https://test.com/api/operators?start=2&page_size=2&query=id==1st-operator-id'),
(list_operators_command,
{'page_size': 2, 'query': 'id==1st-operator-id'},
'https://test.com/api/operators?page_size=2&query=id==1st-operator-id'),
(list_persons_command,
{'page_size': 2},
'https://test.com/api/persons?page_size=2'),
(list_persons_command,
{'start': 2},
'https://test.com/api/persons?start=2'),
(list_persons_command,
{'query': 'id==1st-person-id'},
'https://test.com/api/persons?query=id==1st-person-id'),
(list_persons_command,
{'page_size': 2, 'start': 2, 'query': 'id==1st-person-id'},
'https://test.com/api/persons?start=2&page_size=2&query=id==1st-person-id'),
(list_persons_command,
{'page_size': 2, 'query': 'id==1st-person-id'},
'https://test.com/api/persons?page_size=2&query=id==1st-person-id'),
(list_persons_command,
{'fields': 'id,status'},
'https://test.com/api/persons?$fields=id,status'),
(get_incidents_list_command,
{'page_size': 2},
'https://test.com/api/incidents?page_size=2'),
(get_incidents_list_command,
{'start': 2},
'https://test.com/api/incidents?start=2'),
(get_incidents_list_command,
{'query': 'id=1st-incident-id'},
'https://test.com/api/incidents?id=1st-incident-id'),
(get_incidents_list_command,
{'page_size': 2, 'start': 2, 'query': 'id=1st-incident-id'},
'https://test.com/api/incidents?start=2&page_size=2&id=1st-incident-id'),
(get_incidents_list_command,
{'page_size': 2, 'query': 'id=1st-incident-id'},
'https://test.com/api/incidents?page_size=2&id=1st-incident-id'),
(get_incidents_list_command,
{'fields': 'id,number'},
'https://test.com/api/incidents?fields=id,number')
])
def test_large_output_list_command_with_args(client,
requests_mock,
command,
command_args,
command_api_request):
"""Unit test
Given
- command that returns a list
- command args: page_size, start, query
When
- running the command with given args
Then
- validate the correct request was called
- validate the request body is as expected
"""
requests_mock.get(
command_api_request, json=[{}])
command(client, command_args)
assert requests_mock.called
assert requests_mock.last_request.json() == {}
@pytest.mark.parametrize('command_args, command_api_request, call_count', [
({'max_fetch': 2,
'creation_date_start': '2020-02-10T06:32:36Z',
'creation_date_end': '2020-03-10T06:32:36Z',
'query': 'id=1st-incident-id'},
[('https://test.com/api/incidents?page_size=2&id=1st-incident-id',
{'creation_date_start': '2020-02-10T06:32:36Z',
'creation_date_end': '2020-03-10T06:32:36Z'})], 1),
({'max_fetch': 2 * MAX_API_PAGE_SIZE,
'creation_date_start': '2020-02-10T06:32:36Z',
'creation_date_end': '2020-03-10T06:32:36Z',
'query': 'id=1st-incident-id'},
[(f'https://test.com/api/incidents?page_size={MAX_API_PAGE_SIZE}&id=1st-incident-id',
{'creation_date_start': '2020-02-10T06:32:36Z',
'creation_date_end': '2020-03-10T06:32:36Z'}),
(f'https://test.com/api/incidents'
f'?start={MAX_API_PAGE_SIZE}&page_size={MAX_API_PAGE_SIZE}&id=1st-incident-id',
{'creation_date_start': '2020-02-10T06:32:36Z',
'creation_date_end': '2020-03-10T06:32:36Z'})], 2)
])
def test_get_incidents_with_pagination(client,
requests_mock,
command_args,
command_api_request,
call_count):
"""Unit test
Given
- start, modification_date_start, modification_date_end and query arguments.
When
- running get_incidents_with_pagination function with arguments.
Then
validate the pagination logic is implemented correctly:
- validate the correct parameters in the request.
- validate the number of requests preformed.
"""
for request in command_api_request:
requests_mock.get(
request[0], json=[{}])
get_incidents_with_pagination(client=client,
max_fetch=command_args.get('max_fetch', None),
query=command_args.get('query', None),
creation_date_start=command_args.get('creation_date_start', None),
creation_date_end=command_args.get('creation_date_end', None))
for called_request, mocked_request in zip(requests_mock._adapter.request_history[1:], command_api_request):
assert called_request._request.url == mocked_request[0]
assert called_request.json() == mocked_request[1]
assert requests_mock.call_count == call_count + 1
@pytest.mark.parametrize('command, new_query, command_args, command_api_request', [
(list_persons_command,
False, # Rest old query
{"query": "status=firstLine&id=5"},
'https://test.com/api/persons?query=status==firstLine&id==5'),
(list_persons_command,
False, # Rest old query
{"query": "status=firstLine"},
'https://test.com/api/persons?query=status==firstLine'),
(list_persons_command,
False, # Rest old query
{"fields": "id,status"},
'https://test.com/api/persons?$fields=id,status'),
(list_operators_command,
False, # Old query
{"query": "status=firstLine"},
'https://test.com/api/operators?query=status==firstLine'),
(list_operators_command,
False, # Old query
{"query": "status==firstLine"},
'https://test.com/api/operators?query=status==firstLine'),
(branches_command,
False, # Old query
{"query": "status=firstLine"},
'https://test.com/api/branches?query=status==firstLine'),
(branches_command,
False, # Old query
{"query": "status==firstLine"},
'https://test.com/api/branches?query=status==firstLine'),
(branches_command,
False, # Old query
{"fields": "id,name"},
'https://test.com/api/branches?$fields=id,name'),
(get_incidents_list_command,
False, # Old query
{"query": "status=firstLine"},
'https://test.com/api/incidents?status=firstLine'),
(get_incidents_list_command,
False, # Old query
{"status": "firstLine"},
'https://test.com/api/incidents?status=firstLine'),
(get_incidents_list_command,
False, # Old query
{"query": 'caller_id=some_caller', "status": "firstLine"},
'https://test.com/api/incidents?caller_id=some_caller&status=firstLine'),
(get_incidents_list_command,
False, # Old query
{"query": 'caller_id==some_caller', "status": "firstLine"},
'https://test.com/api/incidents?caller_id=some_caller&status=firstLine'),
(get_incidents_list_command,
False, # Old query
{"query": 'caller_id=some_caller', "status": "firstLine", "branch_id": "some_branch"},
'https://test.com/api/incidents?caller_id=some_caller&status=firstLine&branch=some_branch'),
(get_incidents_list_command,
False, # Old query
{"fields": "id,number"},
'https://test.com/api/incidents?fields=id,number'),
(get_incidents_list_command,
True, # New query
{"query": "status==firstLine"},
'https://test.com/api/incidents?query=status==firstLine'),
(get_incidents_list_command,
True, # New query
{"status": "firstLine"},
'https://test.com/api/incidents?query=status==firstLine'),
(get_incidents_list_command,
True, # New query
{"query": 'caller_id==some_caller', "status": "firstLine"},
'https://test.com/api/incidents?query=caller_id==some_caller&status==firstLine'),
(get_incidents_list_command,
True, # New query
{"query": 'status==firstLine', "caller_id": "some_caller_id", "branch_id": "some_branch"},
'https://test.com/api/incidents?query=status==firstLine&caller==some_caller_id&branch==some_branch')
])
def test_old_new_query(requests_mock,
command,
new_query,
command_args,
command_api_request):
"""Unit test
Given
- command args
- which type of query is supported
When
- running the command
Then
- validate the correct request url was called.
"""
version = "3.1.4"
if new_query:
version = "3.4.0"
requests_mock.get(
'https://test.com/api/version', json={"version": version})
client = Client(
base_url='https://test.com/api',
verify=False,
auth=('some_username', 'some_password')
)
requests_mock.get(command_api_request, json=[{}])
command(client=client, args=command_args)
assert requests_mock.called
@pytest.mark.parametrize('command_args', [
({"category": "blah"}), ({"subcategory": "blah"}), ({"call_type": "blah"}), ({"entry_type": "blah"})
])
def test_unsupported_old_query_param(client, command_args):
"""Unit test
Given
- client with old query setting. The old query does not support all args that the new query supports.
- unsupported command args of get_incidents_list_command.
When
- running get_incidents_list_command with the unsupported param.
Then
- validate KeyError is raised.
"""
with pytest.raises(KeyError, match=" is not supported in older TOPdeskRestApi versions."):
get_incidents_list_command(client=client, args=command_args)
@pytest.mark.parametrize('topdesk_incidents_override, last_fetch_time, updated_fetch_time', [
([{ # Last fetch is before incident creation
'number': 'TEST-1',
'creationDate': '2020-02-10T06:32:36.303000+0000',
'occurred': '2020-02-10T06:32:36Z',
'will_be_fetched': True
}], '2020-01-11T06:32:36.303000+0000', '2020-02-10T06:32:36.303000+0000'),
([{ # Last fetch is after one incident creation and before other.
'number': 'TEST-1',
'creationDate': '2020-01-10T06:32:36.303000+0000',
'occurred': '2020-01-10T06:32:36Z',
'will_be_fetched': False
}, {
'number': 'TEST-2',
'creationDate': '2020-03-10T06:32:36.303000+0000',
'occurred': '2020-03-10T06:32:36Z',
'will_be_fetched': True
}], '2020-02-11T06:32:36.303000+0000', '2020-03-10T06:32:36.303000+0000'),
([{ # Last fetch is at incident creation
'number': 'TEST-1',
'creationDate': '2020-02-10T06:32:36.303+0000',
'occurred': '2020-02-10T06:32:36Z',
'will_be_fetched': False
}], '2020-02-10T06:32:36.303000+0000', '2020-02-10T06:32:36.303000+0000'),
([{ # Same incident returned twice.
'number': 'TEST-1',
'creationDate': '2020-03-10T06:32:36.303000+0000',
'occurred': '2020-03-10T06:32:36Z',
'will_be_fetched': True
}, {
'number': 'TEST-1',
'creationDate': '2020-03-10T06:32:36.303000+0000',
'occurred': '2020-03-10T06:32:36Z',
'will_be_fetched': False
}], '2020-02-11T06:32:36.303000+0000', '2020-03-10T06:32:36.303000+0000'),
])
def test_fetch_incidents(client, requests_mock, topdesk_incidents_override, last_fetch_time, updated_fetch_time):
"""Unit test
Given
- fetch incidents args
When
- running fetch incidents command
Then
- validate The length of the results.
- validate the entry context.
- validate last_fetch is updated.
"""
mock_topdesk_incident = util_load_json('test_data/topdesk_incident.json')
mock_topdesk_response = []
expected_incidents = []
for incident_override in topdesk_incidents_override:
response_incident = mock_topdesk_incident.copy()
response_incident['number'] = incident_override['number']
response_incident['creationDate'] = incident_override['creationDate']
mock_topdesk_response.append(response_incident)
if incident_override['will_be_fetched']:
expected_incidents.append({
'name': f"TOPdesk incident {incident_override['number']}",
'details': json.dumps(response_incident),
'occurred': incident_override['occurred'],
'rawJSON': json.dumps(response_incident),
})
requests_mock.get(
'https://test.com/api/incidents', json=mock_topdesk_response)
last_run = {
'last_fetch': last_fetch_time
}
last_fetch, incidents = fetch_incidents(client=client,
last_run=last_run,
demisto_params={})
assert len(incidents) == len(expected_incidents)
for incident, expected_incident in zip(incidents, expected_incidents):
assert incident['name'] == expected_incident['name']
assert incident['details'] == expected_incident['details']
assert incident['occurred'] == expected_incident['occurred']
assert incident['rawJSON'] == expected_incident['rawJSON']
assert last_fetch == {'last_fetch': updated_fetch_time}
| |
import base64
import hmac
import time
import uuid
from django.conf import settings
from django.contrib.auth import authenticate
from django.core.exceptions import ImproperlyConfigured
from django.middleware.csrf import _sanitize_token, constant_time_compare
from django.utils.translation import ugettext as _
from tastypie.http import HttpUnauthorized
try:
from hashlib import sha1
except ImportError:
import sha
sha1 = sha.sha
try:
import python_digest
except ImportError:
python_digest = None
try:
import oauth2
except ImportError:
oauth2 = None
try:
import oauth_provider
except ImportError:
oauth_provider = None
# "Vendored" from https://github.com/django/django/blob/1.8.7/django/utils/http.py#L256-L266,
# which is removed in later version of Django
def same_origin(url1, url2):
"""
Checks if two URLs are 'same-origin'
"""
p1, p2 = urlparse(url1), urlparse(url2)
try:
o1 = (p1.scheme, p1.hostname, p1.port or PROTOCOL_TO_PORT[p1.scheme])
o2 = (p2.scheme, p2.hostname, p2.port or PROTOCOL_TO_PORT[p2.scheme])
return o1 == o2
except (ValueError, KeyError):
return False
class Authentication(object):
"""
A simple base class to establish the protocol for auth.
By default, this indicates the user is always authenticated.
"""
def __init__(self, require_active=True):
self.require_active = require_active
def is_authenticated(self, request, **kwargs):
"""
Identifies if the user is authenticated to continue or not.
Should return either ``True`` if allowed, ``False`` if not or an
``HttpResponse`` if you need something custom.
"""
return True
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns a combination of IP address and hostname.
"""
return "%s_%s" % (request.META.get('REMOTE_ADDR', 'noaddr'), request.META.get('REMOTE_HOST', 'nohost'))
def check_active(self, user):
"""
Ensures the user has an active account.
Optimized for the ``django.contrib.auth.models.User`` case.
"""
if not self.require_active:
# Ignore & move on.
return True
return user.is_active
class BasicAuthentication(Authentication):
"""
Handles HTTP Basic auth against a specific auth backend if provided,
or against all configured authentication backends using the
``authenticate`` method from ``django.contrib.auth``.
Optional keyword arguments:
``backend``
If specified, use a specific ``django.contrib.auth`` backend instead
of checking all backends specified in the ``AUTHENTICATION_BACKENDS``
setting.
``realm``
The realm to use in the ``HttpUnauthorized`` response. Default:
``django-tastypie``.
"""
def __init__(self, backend=None, realm='django-tastypie', **kwargs):
super(BasicAuthentication, self).__init__(**kwargs)
self.backend = backend
self.realm = realm
def _unauthorized(self):
response = HttpUnauthorized()
# FIXME: Sanitize realm.
response['WWW-Authenticate'] = 'Basic Realm="%s"' % self.realm
return response
def is_authenticated(self, request, **kwargs):
"""
Checks a user's basic auth credentials against the current
Django auth backend.
Should return either ``True`` if allowed, ``False`` if not or an
``HttpResponse`` if you need something custom.
"""
if not request.META.get('HTTP_AUTHORIZATION'):
return self._unauthorized()
try:
(auth_type, data) = request.META['HTTP_AUTHORIZATION'].split()
if auth_type.lower() != 'basic':
return self._unauthorized()
user_pass = base64.b64decode(data)
except:
return self._unauthorized()
bits = user_pass.split(':', 1)
if len(bits) != 2:
return self._unauthorized()
if self.backend:
user = self.backend.authenticate(username=bits[0], password=bits[1])
else:
user = authenticate(username=bits[0], password=bits[1])
if user is None:
return self._unauthorized()
if not self.check_active(user):
return False
request.user = user
return True
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns the user's basic auth username.
"""
return request.META.get('REMOTE_USER', 'nouser')
class ApiKeyAuthentication(Authentication):
"""
Handles API key auth, in which a user provides a username & API key.
Uses the ``ApiKey`` model that ships with tastypie. If you wish to use
a different model, override the ``get_key`` method to perform the key check
as suits your needs.
"""
def _unauthorized(self):
return HttpUnauthorized()
def extract_credentials(self, request):
if request.META.get('HTTP_AUTHORIZATION') and request.META['HTTP_AUTHORIZATION'].lower().startswith('apikey '):
(auth_type, data) = request.META['HTTP_AUTHORIZATION'].split()
if auth_type.lower() != 'apikey':
raise ValueError("Incorrect authorization header.")
username, api_key = data.split(':', 1)
else:
username = request.GET.get('username') or request.POST.get('username')
api_key = request.GET.get('api_key') or request.POST.get('api_key')
return username, api_key
def is_authenticated(self, request, **kwargs):
"""
Finds the user and checks their API key.
Should return either ``True`` if allowed, ``False`` if not or an
``HttpResponse`` if you need something custom.
"""
from django.contrib.auth.models import User
try:
username, api_key = self.extract_credentials(request)
except ValueError:
return self._unauthorized()
if not username or not api_key:
return self._unauthorized()
try:
user = User.objects.get(username=username)
except (User.DoesNotExist, User.MultipleObjectsReturned):
return self._unauthorized()
if not self.check_active(user):
return False
request.user = user
return self.get_key(user, api_key)
def get_key(self, user, api_key):
"""
Attempts to find the API key for the user. Uses ``ApiKey`` by default
but can be overridden.
"""
from tastypie.models import ApiKey
try:
ApiKey.objects.get(user=user, key=api_key)
except ApiKey.DoesNotExist:
return self._unauthorized()
return True
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns the user's username.
"""
username, api_key = self.extract_credentials(request)
return username or 'nouser'
class SessionAuthentication(Authentication):
"""
An authentication mechanism that piggy-backs on Django sessions.
This is useful when the API is talking to Javascript on the same site.
Relies on the user being logged in through the standard Django login
setup.
Requires a valid CSRF token.
"""
def is_authenticated(self, request, **kwargs):
"""
Checks to make sure the user is logged in & has a Django session.
"""
# Cargo-culted from Django 1.3/1.4's ``django/middleware/csrf.py``.
# We can't just use what's there, since the return values will be
# wrong.
# We also can't risk accessing ``request.POST``, which will break with
# the serialized bodies.
if request.method in ('GET', 'HEAD', 'OPTIONS', 'TRACE'):
return request.user.is_authenticated()
if getattr(request, '_dont_enforce_csrf_checks', False):
return request.user.is_authenticated()
csrf_token = _sanitize_token(request.COOKIES.get(settings.CSRF_COOKIE_NAME, ''))
if request.is_secure():
referer = request.META.get('HTTP_REFERER')
if referer is None:
return False
good_referer = 'https://%s/' % request.get_host()
if not same_origin(referer, good_referer):
return False
request_csrf_token = request.META.get('HTTP_X_CSRFTOKEN', '')
if not constant_time_compare(request_csrf_token, csrf_token):
return False
return request.user.is_authenticated()
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns the user's username.
"""
return request.user.username
class DigestAuthentication(Authentication):
"""
Handles HTTP Digest auth against a specific auth backend if provided,
or against all configured authentication backends using the
``authenticate`` method from ``django.contrib.auth``. However, instead of
the user's password, their API key should be used.
Optional keyword arguments:
``backend``
If specified, use a specific ``django.contrib.auth`` backend instead
of checking all backends specified in the ``AUTHENTICATION_BACKENDS``
setting.
``realm``
The realm to use in the ``HttpUnauthorized`` response. Default:
``django-tastypie``.
"""
def __init__(self, backend=None, realm='django-tastypie', **kwargs):
super(DigestAuthentication, self).__init__(**kwargs)
self.backend = backend
self.realm = realm
if python_digest is None:
raise ImproperlyConfigured("The 'python_digest' package could not be imported. It is required for use with the 'DigestAuthentication' class.")
def _unauthorized(self):
response = HttpUnauthorized()
new_uuid = uuid.uuid4()
opaque = hmac.new(str(new_uuid), digestmod=sha1).hexdigest()
response['WWW-Authenticate'] = python_digest.build_digest_challenge(time.time(), getattr(settings, 'SECRET_KEY', ''), self.realm, opaque, False)
return response
def is_authenticated(self, request, **kwargs):
"""
Finds the user and checks their API key.
Should return either ``True`` if allowed, ``False`` if not or an
``HttpResponse`` if you need something custom.
"""
if not request.META.get('HTTP_AUTHORIZATION'):
return self._unauthorized()
try:
(auth_type, data) = request.META['HTTP_AUTHORIZATION'].split(' ', 1)
if auth_type.lower() != 'digest':
return self._unauthorized()
except:
return self._unauthorized()
digest_response = python_digest.parse_digest_credentials(request.META['HTTP_AUTHORIZATION'])
# FIXME: Should the nonce be per-user?
if not python_digest.validate_nonce(digest_response.nonce, getattr(settings, 'SECRET_KEY', '')):
return self._unauthorized()
user = self.get_user(digest_response.username)
api_key = self.get_key(user)
if user is False or api_key is False:
return self._unauthorized()
expected = python_digest.calculate_request_digest(
request.method,
python_digest.calculate_partial_digest(digest_response.username, self.realm, api_key),
digest_response)
if not digest_response.response == expected:
return self._unauthorized()
if not self.check_active(user):
return False
request.user = user
return True
def get_user(self, username):
from django.contrib.auth.models import User
try:
user = User.objects.get(username=username)
except (User.DoesNotExist, User.MultipleObjectsReturned):
return False
return user
def get_key(self, user):
"""
Attempts to find the API key for the user. Uses ``ApiKey`` by default
but can be overridden.
Note that this behaves differently than the ``ApiKeyAuthentication``
method of the same name.
"""
from tastypie.models import ApiKey
try:
key = ApiKey.objects.get(user=user)
except ApiKey.DoesNotExist:
return False
return key.key
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns the user's username.
"""
if hasattr(request, 'user'):
if hasattr(request.user, 'username'):
return request.user.username
return 'nouser'
class OAuthAuthentication(Authentication):
"""
Handles OAuth, which checks a user's credentials against a separate service.
Currently verifies against OAuth 1.0a services.
This does *NOT* provide OAuth authentication in your API, strictly
consumption.
"""
def __init__(self, **kwargs):
super(OAuthAuthentication, self).__init__(**kwargs)
if oauth2 is None:
raise ImproperlyConfigured("The 'python-oauth2' package could not be imported. It is required for use with the 'OAuthAuthentication' class.")
if oauth_provider is None:
raise ImproperlyConfigured("The 'django-oauth-plus' package could not be imported. It is required for use with the 'OAuthAuthentication' class.")
def is_authenticated(self, request, **kwargs):
from oauth_provider.store import store, InvalidTokenError
if self.is_valid_request(request):
oauth_request = oauth_provider.utils.get_oauth_request(request)
consumer = store.get_consumer(request, oauth_request, oauth_request.get_parameter('oauth_consumer_key'))
try:
token = store.get_access_token(request, oauth_request, consumer, oauth_request.get_parameter('oauth_token'))
except oauth_provider.store.InvalidTokenError:
return oauth_provider.utils.send_oauth_error(oauth2.Error(_('Invalid access token: %s') % oauth_request.get_parameter('oauth_token')))
try:
self.validate_token(request, consumer, token)
except oauth2.Error, e:
return oauth_provider.utils.send_oauth_error(e)
if consumer and token:
if not self.check_active(token.user):
return False
request.user = token.user
return True
return oauth_provider.utils.send_oauth_error(oauth2.Error(_('You are not allowed to access this resource.')))
return oauth_provider.utils.send_oauth_error(oauth2.Error(_('Invalid request parameters.')))
def is_in(self, params):
"""
Checks to ensure that all the OAuth parameter names are in the
provided ``params``.
"""
from oauth_provider.consts import OAUTH_PARAMETERS_NAMES
for param_name in OAUTH_PARAMETERS_NAMES:
if param_name not in params:
return False
return True
def is_valid_request(self, request):
"""
Checks whether the required parameters are either in the HTTP
``Authorization`` header sent by some clients (the preferred method
according to OAuth spec) or fall back to ``GET/POST``.
"""
auth_params = request.META.get("HTTP_AUTHORIZATION", [])
return self.is_in(auth_params) or self.is_in(request.REQUEST)
def validate_token(self, request, consumer, token):
oauth_server, oauth_request = oauth_provider.utils.initialize_server_request(request)
return oauth_server.verify_request(oauth_request, consumer, token)
class MultiAuthentication(object):
"""
An authentication backend that tries a number of backends in order.
"""
def __init__(self, *backends, **kwargs):
super(MultiAuthentication, self).__init__(**kwargs)
self.backends = backends
def is_authenticated(self, request, **kwargs):
"""
Identifies if the user is authenticated to continue or not.
Should return either ``True`` if allowed, ``False`` if not or an
``HttpResponse`` if you need something custom.
"""
unauthorized = False
for backend in self.backends:
check = backend.is_authenticated(request, **kwargs)
if check:
if isinstance(check, HttpUnauthorized):
unauthorized = unauthorized or check
else:
request._authentication_backend = backend
return check
return unauthorized
def get_identifier(self, request):
"""
Provides a unique string identifier for the requestor.
This implementation returns a combination of IP address and hostname.
"""
try:
return request._authentication_backend.get_identifier(request)
except AttributeError:
return 'nouser'
| |
#!/usr/bin/env python
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
'''
GcloudCLI class that wraps the oc commands in a subprocess
'''
import atexit
import json
import os
import random
# Not all genearated modules use this.
# pylint: disable=unused-import
import re
import shutil
import string
import subprocess
import tempfile
import yaml
# Not all genearated modules use this.
# pylint: disable=unused-import
import copy
# pylint: disable=import-error
from apiclient.discovery import build
# pylint: disable=import-error
from oauth2client.client import GoogleCredentials
class GcloudCLIError(Exception):
'''Exception class for openshiftcli'''
pass
# pylint: disable=too-few-public-methods
class GcloudCLI(object):
''' Class to wrap the command line tools '''
def __init__(self, credentials=None, verbose=False):
''' Constructor for OpenshiftCLI '''
self.scope = None
if not credentials:
self.credentials = GoogleCredentials.get_application_default()
else:
tmp = tempfile.NamedTemporaryFile()
tmp.write(json.dumps(credentials))
tmp.seek(0)
self.credentials = GoogleCredentials.from_stream(tmp.name)
tmp.close()
self.scope = build('compute', 'beta', credentials=self.credentials)
self.verbose = verbose
def _create_image(self, image_name, image_info):
'''create an image name'''
cmd = ['compute', 'images', 'create', image_name]
for key, val in image_info.items():
if val:
cmd.extend(['--%s' % key, val])
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _delete_image(self, image_name):
'''delete image by name '''
cmd = ['compute', 'images', 'delete', image_name]
if image_name:
cmd.extend(['describe', image_name])
else:
cmd.append('list')
cmd.append('-q')
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _list_images(self, image_name=None):
'''list images.
if name is supplied perform a describe and return
'''
cmd = ['compute', 'images']
if image_name:
cmd.extend(['describe', image_name])
else:
cmd.append('list')
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _list_deployments(self, simple=True):
'''list deployments by name '''
cmd = ['deployment-manager', 'deployments', 'list']
if simple:
cmd.append('--simple-list')
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _delete_deployment(self, dname):
'''list deployments by name '''
cmd = ['deployment-manager', 'deployments', 'delete', dname, '-q']
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _create_deployment(self, dname, config=None, opts=None):
''' create a deployment'''
cmd = ['deployment-manager', 'deployments', 'create', dname]
if config:
if isinstance(config, dict):
config = Utils.create_file(dname, config)
if isinstance(config, str) and os.path.exists(config):
cmd.extend(['--config=%s' % config])
if opts:
for key, val in opts.items():
cmd.append('--%s=%s' % (key, val))
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _update_deployment(self, dname, config=None, opts=None):
''' create a deployment'''
cmd = ['deployment-manager', 'deployments', 'update', dname]
if config:
if isinstance(config, dict):
config = Utils.create_file(dname, config)
if isinstance(config, str) and os.path.exists(config):
cmd.extend(['--config=%s' % config])
if opts:
for key, val in opts.items():
cmd.append('--%s=%s' % (key, val))
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _list_manifests(self, deployment, mname=None):
''' list manifests
if a name is specified then perform a describe
'''
cmd = ['deployment-manager', 'manifests', '--deployment', deployment]
if mname:
cmd.extend(['describe', mname])
else:
cmd.append('list')
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def _delete_address(self, aname):
''' list addresses
if a name is specified then perform a describe
'''
cmd = ['compute', 'addresses', 'delete', aname, '-q']
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _list_addresses(self, aname=None):
''' list addresses
if a name is specified then perform a describe
'''
cmd = ['compute', 'addresses']
if aname:
cmd.extend(['describe', aname])
else:
cmd.append('list')
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _create_address(self, address_name, address_info, address=None, isglobal=False):
''' create a deployment'''
cmd = ['compute', 'addresses', 'create', address_name]
if address:
cmd.append(address)
if isglobal:
cmd.append('--global')
for key, val in address_info.items():
if val:
cmd.extend(['--%s' % key, val])
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _list_metadata(self):
'''create metadata'''
cmd = ['compute', 'project-info', 'describe']
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _delete_metadata(self, keys, remove_all=False):
'''create metadata'''
cmd = ['compute', 'project-info', 'remove-metadata']
if remove_all:
cmd.append('--all')
else:
cmd.append('--keys')
cmd.append(','.join(keys))
cmd.append('-q')
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _create_metadata(self, metadata=None, metadata_from_file=None):
'''create metadata'''
cmd = ['compute', 'project-info', 'add-metadata']
data = None
if metadata_from_file:
cmd.append('--metadata-from-file')
data = metadata_from_file
else:
cmd.append('--metadata')
data = metadata
cmd.append(','.join(['%s=%s' % (key, val) for key, val in data.items()]))
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _list_service_accounts(self, sa_name=None):
'''return service accounts '''
cmd = ['iam', 'service-accounts']
if sa_name:
cmd.extend(['describe', sa_name])
else:
cmd.append('list')
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def _delete_service_account(self, sa_name):
'''delete service account '''
cmd = ['iam', 'service-accounts', 'delete', sa_name, '-q']
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def _create_service_account(self, sa_name, display_name=None):
'''create service account '''
cmd = ['iam', 'service-accounts', 'create', sa_name]
if display_name:
cmd.extend(['--display-name', display_name])
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def _update_service_account(self, sa_name, display_name=None):
'''update service account '''
cmd = ['iam', 'service-accounts', 'update', sa_name]
if display_name:
cmd.extend(['--display-name', display_name])
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def _delete_service_account_key(self, sa_name, key_id):
'''delete service account key'''
cmd = ['iam', 'service-accounts', 'keys', 'delete', key_id, '--iam-account', sa_name, '-q']
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _list_service_account_keys(self, sa_name):
'''return service account keys '''
cmd = ['iam', 'service-accounts', 'keys', 'list', '--iam-account', sa_name]
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def _create_service_account_key(self, sa_name, outputfile, key_format='p12'):
'''create service account key '''
# Ensure we remove the key file
atexit.register(Utils.cleanup, [outputfile])
cmd = ['iam', 'service-accounts', 'keys', 'create', outputfile,
'--iam-account', sa_name, '--key-file-type', key_format]
return self.gcloud_cmd(cmd, output=True, output_type='raw')
def _list_project_policy(self, project):
'''create service account key '''
cmd = ['projects', 'get-iam-policy', project]
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def _add_project_policy(self, project, member, role):
'''create service account key '''
cmd = ['projects', 'add-iam-policy-binding', project, '--member', member, '--role', role]
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def _remove_project_policy(self, project, member, role):
'''create service account key '''
cmd = ['projects', 'remove-iam-policy-binding', project, '--member', member, '--role', role]
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def _set_project_policy(self, project, policy_path):
'''create service account key '''
cmd = ['projects', 'set-iam-policy', project, policy_path]
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def _list_zones(self):
''' list zones '''
cmd = ['compute', 'zones', 'list']
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def _config_set(self, config_param, config_value, config_section):
''' set config params with gcloud config set '''
param = config_section + '/' + config_param
cmd = ['config', 'set', param, config_value]
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def _list_config(self):
'''return config '''
cmd = ['config', 'list']
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
def list_disks(self, zone=None, disk_name=None):
'''return a list of disk objects in this project and zone'''
cmd = ['beta', 'compute', 'disks']
if disk_name and zone:
cmd.extend(['describe', disk_name, '--zone', zone])
else:
cmd.append('list')
cmd.extend(['--format', 'json'])
return self.gcloud_cmd(cmd, output=True, output_type='json')
# disabling too-many-arguments as these are all required for the disk labels
# pylint: disable=too-many-arguments
def _set_disk_labels(self, project, zone, dname, labels, finger_print):
'''create service account key '''
if labels == None:
labels = {}
self.scope = build('compute', 'beta', credentials=self.credentials)
body = {'labels': labels, 'labelFingerprint': finger_print}
result = self.scope.disks().setLabels(project=project,
zone=zone,
resource=dname,
body=body,
).execute()
return result
def gcloud_cmd(self, cmd, output=False, output_type='json'):
'''Base command for gcloud '''
cmds = ['/usr/bin/gcloud']
cmds.extend(cmd)
rval = {}
results = ''
err = None
if self.verbose:
print ' '.join(cmds)
proc = subprocess.Popen(cmds,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env={})
stdout, stderr = proc.communicate()
rval = {"returncode": proc.returncode,
"results": results,
"cmd": ' '.join(cmds),
}
if proc.returncode == 0:
if output:
if output_type == 'json':
try:
rval['results'] = json.loads(stdout)
except ValueError as err:
if "No JSON object could be decoded" in err.message:
err = err.message
elif output_type == 'raw':
rval['results'] = stdout
if self.verbose:
print stdout
print stderr
if err:
rval.update({"err": err,
"stderr": stderr,
"stdout": stdout,
"cmd": cmds
})
else:
rval.update({"stderr": stderr,
"stdout": stdout,
"results": {},
})
return rval
################################################################################
# utilities and helpers for generation
################################################################################
class Utils(object):
''' utilities for openshiftcli modules '''
COMPUTE_URL_BASE = 'https://www.googleapis.com/compute/v1/'
@staticmethod
def create_file(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
path = os.path.join('/tmp', rname)
with open(path, 'w') as fds:
if ftype == 'yaml':
fds.write(yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
fds.write(json.dumps(data))
else:
fds.write(data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [path])
return path
@staticmethod
def global_compute_url(project, collection, rname):
'''build the global compute url for a resource'''
return ''.join([Utils.COMPUTE_URL_BASE, 'projects/', project, '/global/', collection, '/', rname])
@staticmethod
def zonal_compute_url(project, zone, collection, rname):
'''build the zone compute url for a resource'''
return ''.join([Utils.COMPUTE_URL_BASE, 'projects/', project, '/zones/', zone, '/', collection, '/', rname])
@staticmethod
def generate_random_name(size):
'''generate a random string of lowercase and digits the length of size'''
return ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(size))
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
# pylint: disable=too-many-instance-attributes
class GcloudComputeImage(GcloudCLI):
''' Class to wrap the gcloud compute images command'''
# pylint allows 5
# pylint: disable=too-many-arguments
def __init__(self,
iname=None,
desc=None,
family=None,
licenses=None,
source_disk=None,
source_disk_zone=None,
source_uri=None,
verbose=False):
''' Constructor for gcloud resource '''
super(GcloudComputeImage, self).__init__()
self.image_name = iname
self.desc = desc
self.family = family
self.licenses = licenses
self.source_disk = source_disk
self.source_disk_zone = source_disk_zone
self.source_uri = source_uri
self.verbose = verbose
def list_images(self, image_name=None):
'''return a list of images'''
results = self._list_images(image_name)
if results['returncode'] == 0:
results['results'] = results['results'].strip().split('\n')[1:]
return results
def exists(self):
''' return whether an image exists '''
images = self.list_images()
if images['returncode'] != 0:
if 'was not found' in images['stderr']:
images['returncode'] = 0
return images
raise GcloudCLIError('Something went wrong. Results: %s' % images['stderr'])
return any([self.image_name in line for line in images['results']])
def delete_image(self):
'''delete an image'''
return self._delete_image(self.image_name)
def create_image(self):
'''create an image'''
image_info = {}
image_info['description'] = self.desc
image_info['family'] = self.family
image_info['licenses'] = self.licenses
image_info['source-disk'] = self.source_disk
image_info['source-disk-zone'] = self.source_disk_zone
image_info['source-uri'] = self.source_uri
return self._create_image(self.image_name, image_info)
# vim: expandtab:tabstop=4:shiftwidth=4
#pylint: disable=too-many-branches
def main():
''' ansible module for gcloud compute images'''
module = AnsibleModule(
argument_spec=dict(
# credentials
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
name=dict(default=None, type='str'),
description=dict(default=None, type='str'),
family=dict(default=None, type='str'),
licenses=dict(default=None, type='list'),
source_disk=dict(default=None, type='str'),
source_disk_zone=dict(default=None, type='str'),
source_uri=dict(default=None, type='str'),
),
supports_check_mode=True,
)
gimage = GcloudComputeImage(module.params['name'],
module.params['description'],
module.params['family'],
module.params['licenses'],
module.params['source_disk'],
module.params['source_disk_zone'],
module.params['source_uri'])
state = module.params['state']
api_rval = gimage.list_images(module.params['name'])
#####
# Get
#####
if state == 'list':
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval, state="list")
module.exit_json(changed=False, results=api_rval['results'], state="list")
########
# Delete
########
if state == 'absent':
if gimage.exists():
if module.check_mode:
module.exit_json(changed=False, msg='Would have performed a delete.')
api_rval = gimage.delete_image()
module.exit_json(changed=True, results=api_rval, state="absent")
module.exit_json(changed=False, state="absent")
if state == 'present':
########
# Create
########
if not gimage.exists():
if module.check_mode:
module.exit_json(changed=False, msg='Would have performed a create.')
# Create it here
api_rval = gimage.create_image()
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
module.exit_json(changed=True, results=api_rval, state="present")
module.exit_json(changed=False, results=api_rval, state="present")
module.exit_json(failed=True,
changed=False,
results='Unknown state passed. %s' % state,
state="unknown")
#if __name__ == '__main__':
# gcloud = GcloudComputeImage('rhel-7-base-2016-06-10')
# print gcloud.list_images()
# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
# import module snippets. This are required
from ansible.module_utils.basic import *
main()
| |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import itertools
import pytest
import numpy as np
from numpy.testing import assert_allclose, assert_almost_equal
from astropy.convolution.convolve import convolve, convolve_fft
from astropy.convolution.kernels import (AiryDisk2DKernel, Box1DKernel, Box2DKernel, CustomKernel,
Gaussian1DKernel, Gaussian2DKernel, Kernel1D, Kernel2D,
Model1DKernel, Model2DKernel, RickerWavelet1DKernel,
RickerWavelet2DKernel, Ring2DKernel, Tophat2DKernel,
Trapezoid1DKernel, TrapezoidDisk2DKernel)
from astropy.convolution.utils import KernelSizeError
from astropy.modeling.models import Box2D, Gaussian1D, Gaussian2D
from astropy.utils.compat.optional_deps import HAS_SCIPY # noqa
from astropy.utils.exceptions import AstropyUserWarning
WIDTHS_ODD = [3, 5, 7, 9]
WIDTHS_EVEN = [2, 4, 8, 16]
MODES = ['center', 'linear_interp', 'oversample', 'integrate']
KERNEL_TYPES = [Gaussian1DKernel, Gaussian2DKernel,
Box1DKernel, Box2DKernel,
Trapezoid1DKernel, TrapezoidDisk2DKernel,
RickerWavelet1DKernel, Tophat2DKernel, AiryDisk2DKernel,
Ring2DKernel]
NUMS = [1, 1., np.float32(1.), np.float64(1.)]
# Test data
delta_pulse_1D = np.zeros(81)
delta_pulse_1D[40] = 1
delta_pulse_2D = np.zeros((81, 81))
delta_pulse_2D[40, 40] = 1
random_data_1D = np.random.rand(61)
random_data_2D = np.random.rand(61, 61)
class TestKernels:
"""
Test class for the built-in convolution kernels.
"""
@pytest.mark.skipif('not HAS_SCIPY')
@pytest.mark.parametrize(('width'), WIDTHS_ODD)
def test_scipy_filter_gaussian(self, width):
"""
Test GaussianKernel against SciPy ndimage gaussian filter.
"""
from scipy.ndimage import gaussian_filter
gauss_kernel_1D = Gaussian1DKernel(width)
gauss_kernel_1D.normalize()
gauss_kernel_2D = Gaussian2DKernel(width)
gauss_kernel_2D.normalize()
astropy_1D = convolve(delta_pulse_1D, gauss_kernel_1D, boundary='fill')
astropy_2D = convolve(delta_pulse_2D, gauss_kernel_2D, boundary='fill')
scipy_1D = gaussian_filter(delta_pulse_1D, width)
scipy_2D = gaussian_filter(delta_pulse_2D, width)
assert_almost_equal(astropy_1D, scipy_1D, decimal=12)
assert_almost_equal(astropy_2D, scipy_2D, decimal=12)
@pytest.mark.skipif('not HAS_SCIPY')
@pytest.mark.parametrize(('width'), WIDTHS_ODD)
def test_scipy_filter_gaussian_laplace(self, width):
"""
Test RickerWavelet kernels against SciPy ndimage gaussian laplace filters.
"""
from scipy.ndimage import gaussian_laplace
ricker_kernel_1D = RickerWavelet1DKernel(width)
ricker_kernel_2D = RickerWavelet2DKernel(width)
astropy_1D = convolve(delta_pulse_1D, ricker_kernel_1D, boundary='fill', normalize_kernel=False)
astropy_2D = convolve(delta_pulse_2D, ricker_kernel_2D, boundary='fill', normalize_kernel=False)
with pytest.raises(Exception) as exc:
astropy_1D = convolve(delta_pulse_1D, ricker_kernel_1D, boundary='fill', normalize_kernel=True)
assert 'sum is close to zero' in exc.value.args[0]
with pytest.raises(Exception) as exc:
astropy_2D = convolve(delta_pulse_2D, ricker_kernel_2D, boundary='fill', normalize_kernel=True)
assert 'sum is close to zero' in exc.value.args[0]
# The Laplace of Gaussian filter is an inverted Ricker Wavelet filter.
scipy_1D = -gaussian_laplace(delta_pulse_1D, width)
scipy_2D = -gaussian_laplace(delta_pulse_2D, width)
# There is a slight deviation in the normalization. They differ by a
# factor of ~1.0000284132604045. The reason is not known.
assert_almost_equal(astropy_1D, scipy_1D, decimal=5)
assert_almost_equal(astropy_2D, scipy_2D, decimal=5)
@pytest.mark.parametrize(('kernel_type', 'width'), list(itertools.product(KERNEL_TYPES, WIDTHS_ODD)))
def test_delta_data(self, kernel_type, width):
"""
Test smoothing of an image with a single positive pixel
"""
if kernel_type == AiryDisk2DKernel and not HAS_SCIPY:
pytest.skip("Omitting AiryDisk2DKernel, which requires SciPy")
if not kernel_type == Ring2DKernel:
kernel = kernel_type(width)
else:
kernel = kernel_type(width, width * 0.2)
if kernel.dimension == 1:
c1 = convolve_fft(delta_pulse_1D, kernel, boundary='fill', normalize_kernel=False)
c2 = convolve(delta_pulse_1D, kernel, boundary='fill', normalize_kernel=False)
assert_almost_equal(c1, c2, decimal=12)
else:
c1 = convolve_fft(delta_pulse_2D, kernel, boundary='fill', normalize_kernel=False)
c2 = convolve(delta_pulse_2D, kernel, boundary='fill', normalize_kernel=False)
assert_almost_equal(c1, c2, decimal=12)
@pytest.mark.parametrize(('kernel_type', 'width'), list(itertools.product(KERNEL_TYPES, WIDTHS_ODD)))
def test_random_data(self, kernel_type, width):
"""
Test smoothing of an image made of random noise
"""
if kernel_type == AiryDisk2DKernel and not HAS_SCIPY:
pytest.skip("Omitting AiryDisk2DKernel, which requires SciPy")
if not kernel_type == Ring2DKernel:
kernel = kernel_type(width)
else:
kernel = kernel_type(width, width * 0.2)
if kernel.dimension == 1:
c1 = convolve_fft(random_data_1D, kernel, boundary='fill', normalize_kernel=False)
c2 = convolve(random_data_1D, kernel, boundary='fill', normalize_kernel=False)
assert_almost_equal(c1, c2, decimal=12)
else:
c1 = convolve_fft(random_data_2D, kernel, boundary='fill', normalize_kernel=False)
c2 = convolve(random_data_2D, kernel, boundary='fill', normalize_kernel=False)
assert_almost_equal(c1, c2, decimal=12)
@pytest.mark.parametrize(('width'), WIDTHS_ODD)
def test_uniform_smallkernel(self, width):
"""
Test smoothing of an image with a single positive pixel
Instead of using kernel class, uses a simple, small kernel
"""
kernel = np.ones([width, width])
c2 = convolve_fft(delta_pulse_2D, kernel, boundary='fill')
c1 = convolve(delta_pulse_2D, kernel, boundary='fill')
assert_almost_equal(c1, c2, decimal=12)
@pytest.mark.parametrize(('width'), WIDTHS_ODD)
def test_smallkernel_vs_Box2DKernel(self, width):
"""
Test smoothing of an image with a single positive pixel
"""
kernel1 = np.ones([width, width]) / width ** 2
kernel2 = Box2DKernel(width)
c2 = convolve_fft(delta_pulse_2D, kernel2, boundary='fill')
c1 = convolve_fft(delta_pulse_2D, kernel1, boundary='fill')
assert_almost_equal(c1, c2, decimal=12)
def test_convolve_1D_kernels(self):
"""
Check if convolving two kernels with each other works correctly.
"""
gauss_1 = Gaussian1DKernel(3)
gauss_2 = Gaussian1DKernel(4)
test_gauss_3 = Gaussian1DKernel(5)
with pytest.warns(AstropyUserWarning, match=r'Both array and kernel '
r'are Kernel instances'):
gauss_3 = convolve(gauss_1, gauss_2)
assert np.all(np.abs((gauss_3 - test_gauss_3).array) < 0.01)
def test_convolve_2D_kernels(self):
"""
Check if convolving two kernels with each other works correctly.
"""
gauss_1 = Gaussian2DKernel(3)
gauss_2 = Gaussian2DKernel(4)
test_gauss_3 = Gaussian2DKernel(5)
with pytest.warns(AstropyUserWarning, match=r'Both array and kernel '
r'are Kernel instances'):
gauss_3 = convolve(gauss_1, gauss_2)
assert np.all(np.abs((gauss_3 - test_gauss_3).array) < 0.01)
@pytest.mark.parametrize(('number'), NUMS)
def test_multiply_scalar(self, number):
"""
Check if multiplying a kernel with a scalar works correctly.
"""
gauss = Gaussian1DKernel(3)
gauss_new = number * gauss
assert_almost_equal(gauss_new.array, gauss.array * number, decimal=12)
@pytest.mark.parametrize(('number'), NUMS)
def test_multiply_scalar_type(self, number):
"""
Check if multiplying a kernel with a scalar works correctly.
"""
gauss = Gaussian1DKernel(3)
gauss_new = number * gauss
assert type(gauss_new) is Gaussian1DKernel
@pytest.mark.parametrize(('number'), NUMS)
def test_rmultiply_scalar_type(self, number):
"""
Check if multiplying a kernel with a scalar works correctly.
"""
gauss = Gaussian1DKernel(3)
gauss_new = gauss * number
assert type(gauss_new) is Gaussian1DKernel
def test_multiply_kernel1d(self):
"""Test that multiplying two 1D kernels raises an exception."""
gauss = Gaussian1DKernel(3)
with pytest.raises(Exception):
gauss * gauss
def test_multiply_kernel2d(self):
"""Test that multiplying two 2D kernels raises an exception."""
gauss = Gaussian2DKernel(3)
with pytest.raises(Exception):
gauss * gauss
def test_multiply_kernel1d_kernel2d(self):
"""
Test that multiplying a 1D kernel with a 2D kernel raises an
exception.
"""
with pytest.raises(Exception):
Gaussian1DKernel(3) * Gaussian2DKernel(3)
def test_add_kernel_scalar(self):
"""Test that adding a scalar to a kernel raises an exception."""
with pytest.raises(Exception):
Gaussian1DKernel(3) + 1
def test_model_1D_kernel(self):
"""
Check Model1DKernel against Gaussian1Dkernel
"""
stddev = 5.
gauss = Gaussian1D(1. / np.sqrt(2 * np.pi * stddev**2), 0, stddev)
model_gauss_kernel = Model1DKernel(gauss, x_size=21)
gauss_kernel = Gaussian1DKernel(stddev, x_size=21)
assert_almost_equal(model_gauss_kernel.array, gauss_kernel.array,
decimal=12)
def test_model_2D_kernel(self):
"""
Check Model2DKernel against Gaussian2Dkernel
"""
stddev = 5.
gauss = Gaussian2D(1. / (2 * np.pi * stddev**2), 0, 0, stddev, stddev)
model_gauss_kernel = Model2DKernel(gauss, x_size=21)
gauss_kernel = Gaussian2DKernel(stddev, x_size=21)
assert_almost_equal(model_gauss_kernel.array, gauss_kernel.array,
decimal=12)
def test_custom_1D_kernel(self):
"""
Check CustomKernel against Box1DKernel.
"""
# Define one dimensional array:
array = np.ones(5)
custom = CustomKernel(array)
custom.normalize()
box = Box1DKernel(5)
c2 = convolve(delta_pulse_1D, custom, boundary='fill')
c1 = convolve(delta_pulse_1D, box, boundary='fill')
assert_almost_equal(c1, c2, decimal=12)
def test_custom_2D_kernel(self):
"""
Check CustomKernel against Box2DKernel.
"""
# Define one dimensional array:
array = np.ones((5, 5))
custom = CustomKernel(array)
custom.normalize()
box = Box2DKernel(5)
c2 = convolve(delta_pulse_2D, custom, boundary='fill')
c1 = convolve(delta_pulse_2D, box, boundary='fill')
assert_almost_equal(c1, c2, decimal=12)
def test_custom_1D_kernel_list(self):
"""
Check if CustomKernel works with lists.
"""
custom = CustomKernel([1, 1, 1, 1, 1])
assert custom.is_bool is True
def test_custom_2D_kernel_list(self):
"""
Check if CustomKernel works with lists.
"""
custom = CustomKernel([[1, 1, 1],
[1, 1, 1],
[1, 1, 1]])
assert custom.is_bool is True
def test_custom_1D_kernel_zerosum(self):
"""
Check if CustomKernel works when the input array/list
sums to zero.
"""
array = [-2, -1, 0, 1, 2]
custom = CustomKernel(array)
with pytest.warns(AstropyUserWarning, match=r'kernel cannot be '
r'normalized because it sums to zero'):
custom.normalize()
assert custom.truncation == 0.
assert custom._kernel_sum == 0.
def test_custom_2D_kernel_zerosum(self):
"""
Check if CustomKernel works when the input array/list
sums to zero.
"""
array = [[0, -1, 0], [-1, 4, -1], [0, -1, 0]]
custom = CustomKernel(array)
with pytest.warns(AstropyUserWarning, match=r'kernel cannot be '
r'normalized because it sums to zero'):
custom.normalize()
assert custom.truncation == 0.
assert custom._kernel_sum == 0.
def test_custom_kernel_odd_error(self):
"""
Check if CustomKernel raises if the array size is odd.
"""
with pytest.raises(KernelSizeError):
CustomKernel([1, 1, 1, 1])
def test_add_1D_kernels(self):
"""
Check if adding of two 1D kernels works.
"""
box_1 = Box1DKernel(5)
box_2 = Box1DKernel(3)
box_3 = Box1DKernel(1)
box_sum_1 = box_1 + box_2 + box_3
box_sum_2 = box_2 + box_3 + box_1
box_sum_3 = box_3 + box_1 + box_2
ref = [1/5., 1/5. + 1/3., 1 + 1/3. + 1/5., 1/5. + 1/3., 1/5.]
assert_almost_equal(box_sum_1.array, ref, decimal=12)
assert_almost_equal(box_sum_2.array, ref, decimal=12)
assert_almost_equal(box_sum_3.array, ref, decimal=12)
# Assert that the kernels haven't changed
assert_almost_equal(box_1.array, [0.2, 0.2, 0.2, 0.2, 0.2], decimal=12)
assert_almost_equal(box_2.array, [1/3., 1/3., 1/3.], decimal=12)
assert_almost_equal(box_3.array, [1], decimal=12)
def test_add_2D_kernels(self):
"""
Check if adding of two 1D kernels works.
"""
box_1 = Box2DKernel(3)
box_2 = Box2DKernel(1)
box_sum_1 = box_1 + box_2
box_sum_2 = box_2 + box_1
ref = [[1 / 9., 1 / 9., 1 / 9.],
[1 / 9., 1 + 1 / 9., 1 / 9.],
[1 / 9., 1 / 9., 1 / 9.]]
ref_1 = [[1 / 9., 1 / 9., 1 / 9.],
[1 / 9., 1 / 9., 1 / 9.],
[1 / 9., 1 / 9., 1 / 9.]]
assert_almost_equal(box_2.array, [[1]], decimal=12)
assert_almost_equal(box_1.array, ref_1, decimal=12)
assert_almost_equal(box_sum_1.array, ref, decimal=12)
assert_almost_equal(box_sum_2.array, ref, decimal=12)
def test_Gaussian1DKernel_even_size(self):
"""
Check if even size for GaussianKernel works.
"""
gauss = Gaussian1DKernel(3, x_size=10)
assert gauss.array.size == 10
def test_Gaussian2DKernel_even_size(self):
"""
Check if even size for GaussianKernel works.
"""
gauss = Gaussian2DKernel(3, x_size=10, y_size=10)
assert gauss.array.shape == (10, 10)
# https://github.com/astropy/astropy/issues/3605
def test_Gaussian2DKernel_rotated(self):
gauss = Gaussian2DKernel(
x_stddev=3, y_stddev=1.5, theta=0.7853981633974483,
x_size=5, y_size=5) # rotated 45 deg ccw
ans = [[0.02267712, 0.02464785, 0.02029238, 0.01265463, 0.00597762],
[0.02464785, 0.03164847, 0.03078144, 0.02267712, 0.01265463],
[0.02029238, 0.03078144, 0.03536777, 0.03078144, 0.02029238],
[0.01265463, 0.02267712, 0.03078144, 0.03164847, 0.02464785],
[0.00597762, 0.01265463, 0.02029238, 0.02464785, 0.02267712]]
assert_allclose(gauss, ans, rtol=0.001) # Rough comparison at 0.1 %
def test_normalize_peak(self):
"""
Check if normalize works with peak mode.
"""
custom = CustomKernel([1, 2, 3, 2, 1])
custom.normalize(mode='peak')
assert custom.array.max() == 1
def test_check_kernel_attributes(self):
"""
Check if kernel attributes are correct.
"""
box = Box2DKernel(5)
# Check truncation
assert box.truncation == 0
# Check model
assert isinstance(box.model, Box2D)
# Check center
assert box.center == [2, 2]
# Check normalization
box.normalize()
assert_almost_equal(box._kernel_sum, 1., decimal=12)
# Check separability
assert box.separable
@pytest.mark.parametrize(('kernel_type', 'mode'), list(itertools.product(KERNEL_TYPES, MODES)))
def test_discretize_modes(self, kernel_type, mode):
"""
Check if the different modes result in kernels that work with convolve.
Use only small kernel width, to make the test pass quickly.
"""
if kernel_type == AiryDisk2DKernel and not HAS_SCIPY:
pytest.skip("Omitting AiryDisk2DKernel, which requires SciPy")
if not kernel_type == Ring2DKernel:
kernel = kernel_type(3)
else:
kernel = kernel_type(3, 3 * 0.2)
if kernel.dimension == 1:
c1 = convolve_fft(delta_pulse_1D, kernel, boundary='fill', normalize_kernel=False)
c2 = convolve(delta_pulse_1D, kernel, boundary='fill', normalize_kernel=False)
assert_almost_equal(c1, c2, decimal=12)
else:
c1 = convolve_fft(delta_pulse_2D, kernel, boundary='fill', normalize_kernel=False)
c2 = convolve(delta_pulse_2D, kernel, boundary='fill', normalize_kernel=False)
assert_almost_equal(c1, c2, decimal=12)
@pytest.mark.parametrize(('width'), WIDTHS_EVEN)
def test_box_kernels_even_size(self, width):
"""
Check if BoxKernel work properly with even sizes.
"""
kernel_1D = Box1DKernel(width)
assert kernel_1D.shape[0] % 2 != 0
assert kernel_1D.array.sum() == 1.
kernel_2D = Box2DKernel(width)
assert np.all([_ % 2 != 0 for _ in kernel_2D.shape])
assert kernel_2D.array.sum() == 1.
def test_kernel_normalization(self):
"""
Test that repeated normalizations do not change the kernel [#3747].
"""
kernel = CustomKernel(np.ones(5))
kernel.normalize()
data = np.copy(kernel.array)
kernel.normalize()
assert_allclose(data, kernel.array)
kernel.normalize()
assert_allclose(data, kernel.array)
def test_kernel_normalization_mode(self):
"""
Test that an error is raised if mode is invalid.
"""
with pytest.raises(ValueError):
kernel = CustomKernel(np.ones(3))
kernel.normalize(mode='invalid')
def test_kernel1d_int_size(self):
"""
Test that an error is raised if ``Kernel1D`` ``x_size`` is not
an integer.
"""
with pytest.raises(TypeError):
Gaussian1DKernel(3, x_size=1.2)
def test_kernel2d_int_xsize(self):
"""
Test that an error is raised if ``Kernel2D`` ``x_size`` is not
an integer.
"""
with pytest.raises(TypeError):
Gaussian2DKernel(3, x_size=1.2)
def test_kernel2d_int_ysize(self):
"""
Test that an error is raised if ``Kernel2D`` ``y_size`` is not
an integer.
"""
with pytest.raises(TypeError):
Gaussian2DKernel(3, x_size=5, y_size=1.2)
def test_kernel1d_initialization(self):
"""
Test that an error is raised if an array or model is not
specified for ``Kernel1D``.
"""
with pytest.raises(TypeError):
Kernel1D()
def test_kernel2d_initialization(self):
"""
Test that an error is raised if an array or model is not
specified for ``Kernel2D``.
"""
with pytest.raises(TypeError):
Kernel2D()
def test_array_keyword_not_allowed(self):
"""
Regression test for issue #10439
"""
x = np.ones([10, 10])
with pytest.raises(TypeError, match=r".* allowed .*"):
AiryDisk2DKernel(2, array=x)
Box1DKernel(2, array=x)
Box2DKernel(2, array=x)
Gaussian1DKernel(2, array=x)
Gaussian2DKernel(2, array=x)
RickerWavelet1DKernel(2, array=x)
RickerWavelet2DKernel(2, array=x)
Model1DKernel(Gaussian1D(1, 0, 2), array=x)
Model2DKernel(Gaussian2D(1, 0, 0, 2, 2), array=x)
Ring2DKernel(9, 8, array=x)
Tophat2DKernel(2, array=x)
Trapezoid1DKernel(2, array=x)
Trapezoid1DKernel(2, array=x)
| |
from boto.exception import BotoServerError
class ResponseErrorFactory(BotoServerError):
def __new__(cls, *args, **kw):
error = BotoServerError(*args, **kw)
newclass = globals().get(error.error_code, ResponseError)
obj = newclass.__new__(newclass, *args, **kw)
obj.__dict__.update(error.__dict__)
return obj
class ResponseError(BotoServerError):
"""Undefined response error.
"""
retry = False
def __repr__(self):
return '{}({}, {},\n\t{})'.format(self.__class__.__name__,
self.status, self.reason,
self.error_message)
def __str__(self):
return 'FPS Response Error: {0.status} {0.__class__.__name__} {1}\n' \
'{2}\n' \
'{0.error_message}'.format(self,
self.retry and '(Retriable)' or '',
self.__doc__.strip())
class RetriableResponseError(ResponseError):
retry = True
class AccessFailure(RetriableResponseError):
"""Account cannot be accessed.
"""
class AccountClosed(RetriableResponseError):
"""Account is not active.
"""
class AccountLimitsExceeded(RetriableResponseError):
"""The spending or receiving limit on the account is exceeded.
"""
class AmountOutOfRange(ResponseError):
"""The transaction amount is more than the allowed range.
"""
class AuthFailure(RetriableResponseError):
"""AWS was not able to validate the provided access credentials.
"""
class ConcurrentModification(RetriableResponseError):
"""A retriable error can happen when two processes try to modify the
same data at the same time.
"""
class DuplicateRequest(ResponseError):
"""A different request associated with this caller reference already
exists.
"""
class InactiveInstrument(ResponseError):
"""Payment instrument is inactive.
"""
class IncompatibleTokens(ResponseError):
"""The transaction could not be completed because the tokens have
incompatible payment instructions.
"""
class InstrumentAccessDenied(ResponseError):
"""The external calling application is not the recipient for this
postpaid or prepaid instrument.
"""
class InstrumentExpired(ResponseError):
"""The prepaid or the postpaid instrument has expired.
"""
class InsufficientBalance(RetriableResponseError):
"""The sender, caller, or recipient's account balance has
insufficient funds to complete the transaction.
"""
class InternalError(RetriableResponseError):
"""A retriable error that happens due to some transient problem in
the system.
"""
class InvalidAccountState(RetriableResponseError):
"""The account is either suspended or closed.
"""
class InvalidAccountState_Caller(RetriableResponseError):
"""The developer account cannot participate in the transaction.
"""
class InvalidAccountState_Recipient(RetriableResponseError):
"""Recipient account cannot participate in the transaction.
"""
class InvalidAccountState_Sender(RetriableResponseError):
"""Sender account cannot participate in the transaction.
"""
class InvalidCallerReference(ResponseError):
"""The Caller Reference does not have a token associated with it.
"""
class InvalidClientTokenId(ResponseError):
"""The AWS Access Key Id you provided does not exist in our records.
"""
class InvalidDateRange(ResponseError):
"""The end date specified is before the start date or the start date
is in the future.
"""
class InvalidParams(ResponseError):
"""One or more parameters in the request is invalid.
"""
class InvalidPaymentInstrument(ResponseError):
"""The payment method used in the transaction is invalid.
"""
class InvalidPaymentMethod(ResponseError):
"""Specify correct payment method.
"""
class InvalidRecipientForCCTransaction(ResponseError):
"""This account cannot receive credit card payments.
"""
class InvalidSenderRoleForAccountType(ResponseError):
"""This token cannot be used for this operation.
"""
class InvalidTokenId(ResponseError):
"""You did not install the token that you are trying to cancel.
"""
class InvalidTokenId_Recipient(ResponseError):
"""The recipient token specified is either invalid or canceled.
"""
class InvalidTokenId_Sender(ResponseError):
"""The sender token specified is either invalid or canceled or the
token is not active.
"""
class InvalidTokenType(ResponseError):
"""An invalid operation was performed on the token, for example,
getting the token usage information on a single use token.
"""
class InvalidTransactionId(ResponseError):
"""The specified transaction could not be found or the caller did not
execute the transaction or this is not a Pay or Reserve call.
"""
class InvalidTransactionState(ResponseError):
"""The transaction is not complete, or it has temporarily failed.
"""
class NotMarketplaceApp(RetriableResponseError):
"""This is not an marketplace application or the caller does not
match either the sender or the recipient.
"""
class OriginalTransactionFailed(ResponseError):
"""The original transaction has failed.
"""
class OriginalTransactionIncomplete(RetriableResponseError):
"""The original transaction is still in progress.
"""
class PaymentInstrumentNotCC(ResponseError):
"""The payment method specified in the transaction is not a credit
card. You can only use a credit card for this transaction.
"""
class PaymentMethodNotDefined(ResponseError):
"""Payment method is not defined in the transaction.
"""
class PrepaidFundingLimitExceeded(RetriableResponseError):
"""An attempt has been made to fund the prepaid instrument
at a level greater than its recharge limit.
"""
class RefundAmountExceeded(ResponseError):
"""The refund amount is more than the refundable amount.
"""
class SameSenderAndRecipient(ResponseError):
"""The sender and receiver are identical, which is not allowed.
"""
class SameTokenIdUsedMultipleTimes(ResponseError):
"""This token is already used in earlier transactions.
"""
class SenderNotOriginalRecipient(ResponseError):
"""The sender in the refund transaction is not
the recipient of the original transaction.
"""
class SettleAmountGreaterThanDebt(ResponseError):
"""The amount being settled or written off is
greater than the current debt.
"""
class SettleAmountGreaterThanReserveAmount(ResponseError):
"""The amount being settled is greater than the reserved amount.
"""
class SignatureDoesNotMatch(ResponseError):
"""The request signature calculated by Amazon does not match the
signature you provided.
"""
class TokenAccessDenied(ResponseError):
"""Permission to cancel the token is denied.
"""
class TokenNotActive(ResponseError):
"""The token is canceled.
"""
class TokenNotActive_Recipient(ResponseError):
"""The recipient token is canceled.
"""
class TokenNotActive_Sender(ResponseError):
"""The sender token is canceled.
"""
class TokenUsageError(ResponseError):
"""The token usage limit is exceeded.
"""
class TransactionDenied(ResponseError):
"""The transaction is not allowed.
"""
class TransactionFullyRefundedAlready(ResponseError):
"""The transaction has already been completely refunded.
"""
class TransactionTypeNotRefundable(ResponseError):
"""You cannot refund this transaction.
"""
class UnverifiedAccount_Recipient(ResponseError):
"""The recipient's account must have a verified bank account or a
credit card before this transaction can be initiated.
"""
class UnverifiedAccount_Sender(ResponseError):
"""The sender's account must have a verified U.S. credit card or
a verified U.S bank account before this transaction can be
initiated.
"""
class UnverifiedBankAccount(ResponseError):
"""A verified bank account should be used for this transaction.
"""
class UnverifiedEmailAddress_Caller(ResponseError):
"""The caller account must have a verified email address.
"""
class UnverifiedEmailAddress_Recipient(ResponseError):
"""The recipient account must have a verified
email address for receiving payments.
"""
class UnverifiedEmailAddress_Sender(ResponseError):
"""The sender account must have a verified
email address for this payment.
"""
| |
# Copyright (c) 2015 Presslabs SRL
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from rest_framework import serializers
from silver.api.serializers.common import CustomerUrl, PDFUrl
from silver.api.serializers.transaction_serializers import TransactionSerializer
from silver.models import DocumentEntry, Customer, Invoice, Proforma, BillingDocumentBase
class DocumentEntrySerializer(serializers.HyperlinkedModelSerializer):
product_code = serializers.SlugRelatedField(
slug_field='value',
read_only=True
)
class Meta:
model = DocumentEntry
fields = ('description', 'unit', 'unit_price', 'quantity', 'total',
'total_before_tax', 'start_date', 'end_date', 'prorated',
'product_code')
class DocumentUrl(serializers.HyperlinkedIdentityField):
def __init__(self, proforma_view_name, invoice_view_name, *args, **kwargs):
# the view_name is required on HIF init, but we only know what it will
# be in get_url
kwargs['view_name'] = ''
super(DocumentUrl, self).__init__(*args, **kwargs)
self.invoice_view_name = invoice_view_name
self.proforma_view_name = proforma_view_name
def get_url(self, obj, view_name, request, format):
view_name = (self.invoice_view_name if obj.kind == 'invoice' else
self.proforma_view_name)
lookup_value = getattr(obj, self.lookup_field)
if not lookup_value:
return
kwargs = {
self.lookup_url_kwarg: str(lookup_value)
}
return self.reverse(view_name, kwargs=kwargs,
request=request, format=format)
class DocumentSerializer(serializers.HyperlinkedModelSerializer):
"""
A read-only serializers for Proformas and Invoices
"""
customer = CustomerUrl(view_name='customer-detail',
queryset=Customer.objects.all())
pdf_url = PDFUrl(view_name='pdf', source='*', read_only=True)
url = DocumentUrl(proforma_view_name='proforma-detail',
invoice_view_name='invoice-detail', )
transactions = serializers.SerializerMethodField()
def get_transactions(self, document):
if document.kind == 'invoice':
transactions = document.invoice_transactions.all()
elif document.kind == 'proforma':
transactions = document.proforma_transactions.all()
else:
return []
for transaction in transactions:
# This is done to avoid prefetching already prefetched resources
transaction.payment_method.customer = document.customer
transaction.provider = document.provider
return TransactionSerializer(transactions, many=True,
context=self.context).data
class Meta:
model = BillingDocumentBase
fields = ('id', 'url', 'kind', 'series', 'number', 'provider',
'customer', 'due_date', 'issue_date', 'paid_date',
'cancel_date', 'sales_tax_name', 'sales_tax_percent',
'transaction_currency', 'currency', 'state', 'total',
'total_in_transaction_currency', 'pdf_url', 'transactions')
read_only_fields = fields
class InvoiceSerializer(serializers.HyperlinkedModelSerializer):
invoice_entries = DocumentEntrySerializer(many=True)
pdf_url = PDFUrl(view_name='pdf', source='*', read_only=True)
customer = CustomerUrl(view_name='customer-detail',
queryset=Customer.objects.all())
transactions = TransactionSerializer(many=True, read_only=True)
class Meta:
model = Invoice
fields = ('id', 'series', 'number', 'provider', 'customer',
'archived_provider', 'archived_customer', 'due_date',
'issue_date', 'paid_date', 'cancel_date', 'sales_tax_name',
'sales_tax_percent', 'currency', 'transaction_currency',
'transaction_xe_rate', 'transaction_xe_date', 'state', 'proforma',
'invoice_entries', 'total', 'total_in_transaction_currency',
'pdf_url', 'transactions')
read_only_fields = ('archived_provider', 'archived_customer', 'total',
'total_in_transaction_currency')
extra_kwargs = {
'transaction_currency': {'required': False},
'proforma': {'source': 'related_document', 'view_name': 'proforma-detail'}
}
def create(self, validated_data):
entries = validated_data.pop('invoice_entries', None)
# Create the new invoice object
invoice = Invoice.objects.create(**validated_data)
# Add the invoice entries
for entry in entries:
entry_dict = dict()
entry_dict['invoice'] = invoice
for field in entry.items():
entry_dict[field[0]] = field[1]
DocumentEntry.objects.create(**entry_dict)
return invoice
def update(self, instance, validated_data):
# The provider has changed => force the generation of the correct number
# corresponding to the count of the new provider
current_provider = instance.provider
new_provider = validated_data.get('provider')
if new_provider and new_provider != current_provider:
instance.number = None
updateable_fields = instance.updateable_fields
for field_name in updateable_fields:
field_value = validated_data.get(field_name,
getattr(instance, field_name))
setattr(instance, field_name, field_value)
instance.save()
return instance
def validate(self, data):
data = super(InvoiceSerializer, self).validate(data)
if self.instance:
self.instance.clean()
if self.instance and data['state'] != self.instance.state:
msg = "Direct state modification is not allowed." \
" Use the corresponding endpoint to update the state."
raise serializers.ValidationError(msg)
return data
class ProformaSerializer(serializers.HyperlinkedModelSerializer):
proforma_entries = DocumentEntrySerializer(many=True)
pdf_url = PDFUrl(view_name='pdf', source='*', read_only=True)
customer = CustomerUrl(view_name='customer-detail',
queryset=Customer.objects.all())
transactions = TransactionSerializer(many=True, read_only=True)
class Meta:
model = Proforma
fields = ('id', 'series', 'number', 'provider', 'customer',
'archived_provider', 'archived_customer', 'due_date',
'issue_date', 'paid_date', 'cancel_date', 'sales_tax_name',
'sales_tax_percent', 'currency', 'transaction_currency',
'transaction_xe_rate', 'transaction_xe_date', 'state', 'invoice',
'proforma_entries', 'total', 'total_in_transaction_currency',
'pdf_url', 'transactions')
read_only_fields = ('archived_provider', 'archived_customer', 'total',
'total_in_transaction_currency')
extra_kwargs = {
'transaction_currency': {'required': False},
'invoice': {'source': 'related_document', 'view_name': 'invoice-detail'}
}
def create(self, validated_data):
entries = validated_data.pop('proforma_entries', None)
proforma = Proforma.objects.create(**validated_data)
for entry in entries:
entry_dict = dict()
entry_dict['proforma'] = proforma
for field in entry.items():
entry_dict[field[0]] = field[1]
DocumentEntry.objects.create(**entry_dict)
return proforma
def update(self, instance, validated_data):
# The provider has changed => force the generation of the correct number
# corresponding to the count of the new provider
current_provider = instance.provider
new_provider = validated_data.get('provider')
if new_provider and new_provider != current_provider:
instance.number = None
updateable_fields = instance.updateable_fields
for field_name in updateable_fields:
field_value = validated_data.get(field_name,
getattr(instance, field_name))
setattr(instance, field_name, field_value)
instance.save()
return instance
def validate(self, data):
data = super(ProformaSerializer, self).validate(data)
if self.instance:
self.instance.clean()
if self.instance and data['state'] != self.instance.state:
msg = "Direct state modification is not allowed." \
" Use the corresponding endpoint to update the state."
raise serializers.ValidationError(msg)
return data
| |
'''gen_python_api.py generates a python.api file for SciTE
The generated api file includes
*) all Python keywords
*) all builtin functions
*) all module attributes
Module functions are represented by their docstring if available,
otherwise by the function definition from the source file.
Classes are represented by their constructor if available.
Usage:
Edit the list of modules which should be excluded. This list is located
some lines below. Look for excludemodulelist = [...]
Specify the modules whose contents should be added as global names
(i.e. from parrot import *). Look for addasgloballist = [...]
Start the script by typing 'python gen_python_api.py' in the shell.
Don't start it from within SciTE on Win32 systems, because some
modules need a TTY when they are imported.
Copy the generated python.api file into your SciTE directory and
add the following lines to your SciTEUser.properties file:
api.*.py=$(SciteDefaultHome)/python.api
api.*.pyw=$(SciteDefaultHome)/python.api
autocomplete.choose.single=1
autocomplete.python.ignorecase=1
autocomplete.python.start.characters=.
autocomplete.python.fillups=(
#autocompleteword.automatic
calltip.python.ignorecase=1
calltip.python.word.characters=._$(chars.alpha)$(chars.numeric)
Restart SciTE. Enjoy.
by Markus Gritsch (gritsch@iue.tuwien.ac.at)
'''
# if one of these substrings is found in a specific sys.path directory,
# the modules in this particular directory are not processed
excludedirlist = ['lib-tk', 'idle', 'Lightflow', 'plat-linux-i386',
'win32', 'pythonwin', 'plat-win',
'test', 'distutils', 'encodings', 'OpenGL', 'gnome', 'pyglade',
'happydoc', 'pygame', 'mx',
'wxPython']
# list of modules which should be excluded
excludemodulelist = ['win32traceutil', 'win32pdh', 'perfmondata', 'tzparse',
'libqtcmodule-2.2', 'libqtc',
'win32com',
'GDK', 'GTK', 'GdkImlib', 'GtkExtra', 'Gtkinter', 'gtk', 'GTKconst',
'zip_it']
# switch for excluding modules whose names begin with _
exclude_modules = 1
# list of modules whose contents should be added as global names
addasgloballist = ['qt']
# list of modules which are otherwise not accessible
# sourcefile-parsing is NOT done for these modules
# also activate the add_manual_modules-switch below
manuallist = []# ['os.path']
# import modules of the following type (the dot must be present!!)
moduletypes = ['.py', '.pyd', '.dll', '.so']
# some switches
add_keywords = 1 # e.g. print
add_builtins = 1 # e.g. open()
add_builtin_modules = 1 # e.g. sys
add_manual_modules = 1 # modules from manuallist
add_package_modules = 1 # modules which are directories with __init__.py files
add_other_modules = 1 # all the other modules
#------------------------------------------------------------------------------
import string, re, sys, os, types
api = {}
def processName(entryprefix, moduleprefix, name, ns):
exec 'hasdoc = hasattr("' + moduleprefix + name + '", "__doc__")' in ns
exec 'nametype = type("' + moduleprefix + name + '")' in ns
if ns['hasdoc']:
exec 'doc = "' + moduleprefix + name + '".__doc__' in ns
pattern = re.compile('^ *' + name + r' *\(.*?\)')
if ns['doc'] and type(ns['doc']) == types.StringType: # 'and'-part added by Peter Schoen <schoen@ZTT.Fh-Worms.DE>
if pattern.search(ns['doc']):
if not api.has_key(entryprefix + name):
api[entryprefix + name] = entryprefix + string.strip(string.split(ns['doc'], '\n')[0]) + '\n'
return
else:
if ns['nametype'] in [types.ClassType, types.FunctionType]:
api[entryprefix + name] = entryprefix + name + '(??) [doc: ' + string.strip(string.split(ns['doc'], '\n')[0]) + ']' + '\n'
if not api.has_key(entryprefix + name):
if ns['nametype'] == types.ClassType:
api[entryprefix + name] = entryprefix + name + '(??) [class]\n'
elif ns['nametype'] == types.FunctionType:
api[entryprefix + name] = entryprefix + name + '(??) [function]\n'
elif ns['nametype'] == types.ModuleType:
api[entryprefix + name] = entryprefix + name + ':: [module]\n'
else:
api[entryprefix + name] = entryprefix + name + '\n'
def processModule(module, file=''):
print ' ', string.ljust(module, 22), ': importing ...',
if module in excludemodulelist:
print 'in exclude list'
return
if exclude_modules and module[0] == '_':
print 'modulename begins with _'
return
#~ if module in addasgloballist:
#~ entryprefix = ''
#~ else:
#~ entryprefix = module + '.'
entryprefix = module + '.'
for addasglobal in addasgloballist:
if module[:len(addasglobal)] == addasglobal:
entryprefix = module[len(addasglobal)+1:]
break
ns = {}
try:
exec 'import ' + module in ns
print 'ok,',
except:
print sys.exc_info()[0]
return
print 'processing ...',
try:
exec 'names = dir(%s)' % module in ns
except:
print sys.exc_info()[0]
return
for name in ns['names']:
processName(entryprefix, module + '.', name, ns)
print 'ok,',
# parse module source file if available
if file[-3:] != '.py':
print 'no source file'
return
print 'parsing ...',
try:
f = open(file, 'rt')
except IOError:
print sys.exc_info()[0]
return
contents = f.readlines()
f.close()
def_p = re.compile(r'^def (\w*)( *\(.*?\)):')
class_p = re.compile(r'^class +(\w*)')
init_p = re.compile(r'^[ \t]+def +__init__\(\w*, *(.*?)\):')
inclass = 0
classname = ''
for line in contents:
def_m = def_p.search(line)
if def_m:
name = def_m.group(1)
if api.has_key(entryprefix + name):
docindex = string.find(api[entryprefix + name], '[doc:')
if docindex + 1:
doc = ' ' + api[entryprefix + name][docindex:] # trailing \n included
api[entryprefix + name] = entryprefix + name + def_m.group(2) + doc
if api[entryprefix + name] == entryprefix + name + '(??) [function]\n':
api[entryprefix + name] = entryprefix + name + def_m.group(2) + '\n'
if inclass:
init_m = init_p.search(line)
if init_m:
if api.has_key(entryprefix + classname):
docindex = string.find(api[entryprefix + classname], '[doc:')
if docindex + 1:
doc = ' ' + api[entryprefix + classname][docindex:] # trailing \n included
api[entryprefix + classname] = entryprefix + classname + '(' + init_m.group(1) + ')' + doc
if api[entryprefix + classname] == entryprefix + classname + '(??) [class]\n':
api[entryprefix + classname] = entryprefix + classname + '(' + init_m.group(1) + ')' + '\n'
inclass = 0
if not line[0] in ' \t\n':
inclass = 0
class_m = class_p.search(line)
if class_m:
inclass = 1
classname = class_m.group(1)
print 'ok'
def processFolder(folder, prefix=''):
print 'processing', folder,
for excludedir in excludedirlist:
if string.find(folder, excludedir) + 1:
print '... in exclude list',
folder = ''
break
print
if folder == '' or not os.path.isdir(folder):
return
entries = os.listdir(folder)
for entry in entries:
if add_package_modules and \
os.path.isdir(folder + os.sep + entry) and \
os.path.isfile(folder + os.sep + entry + os.sep + '__init__.py'):
# package
processFolder(folder + os.sep + entry, prefix=prefix+entry+'.')
print '-done with', folder + os.sep + entry
elif prefix and entry == '__init__.py':
# modules which are directories with __init__.py files
# The probing of 'prefix' is unfortunately necessary, because of
# the incorrect behavior of some packages (e.g. PIL) which add
# their directory to the searchpath via a .pth file AND are
# packages because of an __init__.py file.
module = prefix[:-1]
file = folder + os.sep + entry
processModule(module, file)
elif add_other_modules:
# normal file-modules
root, ext = os.path.splitext(entry)
if not ext in moduletypes:
continue
if entry[-9:] == 'module.so':
module = prefix + entry[:-9]
else:
module = prefix + root
file = folder + os.sep + entry
processModule(module, file)
#------------------------------------------------------------------------------
# keywords
if add_keywords:
print '\nadding keywords ...',
keywords = string.split('''and assert break class continue def del elif else
except exec finally for from global if import in is lambda None not or pass
print raise return try while''')
for keyword in keywords:
api[keyword] = keyword + '\n'
print 'ok'
# __builtins__
if add_builtins:
print '\nadding __builtins__ ...',
for builtin in dir(__builtins__):
processName(entryprefix = '', moduleprefix = '', name = builtin, ns = {})
print 'ok'
# sys.builtin_module_names
if add_builtin_modules:
print '\nprocessing builtin modules'
for module in sys.builtin_module_names:
processModule(module)
# modules specified in manuallist
if add_manual_modules:
print '\nprocessing modules specified in manuallist'
for module in manuallist:
processModule(module)
# modules from sys.path
if add_package_modules or add_other_modules:
print '\nprocessing searchpath'
# avoid duplicated entries in sys.path
folders = {}
for folder in sys.path:
folders[folder] = None
for folder in folders.keys():
if folder != os.getcwd():
processFolder(folder)
#------------------------------------------------------------------------------
# sorting
print 'sorting api file ...',
apilist = api.values()
apilist.sort()
print 'done'
# saving
print 'saving api file ...',
f = open('python.api', 'wt')
f.writelines(apilist)
f.close()
print 'done\n'
| |
# -*- coding: utf-8 -*-
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2007,, Frank Scholz <coherence@beebits.net>
import time
from coherence.extern.simple_plugin import Plugin
from coherence import log
import coherence.extern.louie as louie
from coherence.upnp.core.utils import getPage
from coherence.extern.et import parse_xml
from coherence.upnp.core import DIDLLite
from twisted.internet import defer, reactor
class Backend(log.Loggable, Plugin):
""" the base class for all backends
if there are any UPnP service actions, that can't
be handled by the service classes itself, or need some
special adjustments for the backend, they need to be
defined here.
Like maybe upnp_Browse for the CDS Browse action.
"""
implements = [] # list the device classes here
# like [BinaryLight'] or ['MediaServer','MediaRenderer']
logCategory = 'backend'
def __init__(self, server, **kwargs):
""" the init method for a backend,
should probably most of the time be overwritten
when the init is done, send a signal to its device
the device will then setup and announce itself,
after that it calls the backends upnp_init method
"""
self.config = kwargs
self.server = server # the UPnP device that's hosting that backend
""" do whatever is necessary with the stuff we can
extract from the config dict,
connect maybe to an external data-source and
start up the backend
after that's done, tell Coherence about it
"""
log.Loggable.__init__(self)
Plugin.__init__(self)
""" this has to be done in the actual backend, maybe it has
to wait for an answer from an external data-source first
"""
#self.init_completed()
def init_completed(self, *args, **kwargs):
""" inform Coherence that this backend is ready for
announcement
this method just accepts any form of arguments
as we don't under which circumstances it is called
"""
louie.send('Coherence.UPnP.Backend.init_completed',
None, backend=self)
def upnp_init(self):
""" this method gets called after the device is fired,
here all initializations of service related state variables
should happen, as the services aren't available before that point
"""
pass
class BackendStore(Backend):
""" the base class for all MediaServer backend stores
"""
logCategory = 'backend_store'
def __init__(self, server, *args, **kwargs):
""" the init method for a MediaServer backend,
should probably most of the time be overwritten
when the init is done, send a signal to its device
the device will then setup and announce itself,
after that it calls the backends upnp_init method
"""
Backend.__init__(self, server, *args)
self.config = kwargs
self.server = server # the UPnP device that's hosting that backend
self.update_id = 0
""" do whatever is necessary with the stuff we can
extract from the config dict
"""
""" in case we want so serve something via
the MediaServer web backend
the BackendItem should pass an URI assembled
of urlbase + '/' + id to the DIDLLite.Resource
"""
self.urlbase = kwargs.get('urlbase', '')
if not self.urlbase.endswith('/'):
self.urlbase += '/'
# create a mapping for hex-numbers 0x04 to 0x17
m = self.wmc_mapping = {
'4': '4', '5': '5', '6': '6', '7': '7', '8': '8', '9': '9',
'A': 'A', 'B': 'B', 'C': 'C', 'D': 'D', 'E': 'E', 'F': 'F',
'10': '10', '11': '11',
# 12 is missing
'13': '13', '14': '14', '15': '15', '16': '16', '17': '17',
}
m['4'] = m['8'] = m['B'] = lambda: self._get_all_items(0)
""" and send out the signal when ready
"""
#louie.send('Coherence.UPnP.Backend.init_completed', None, backend=self)
def release(self):
""" if anything needs to be cleaned up upon
shutdown of this backend, this is the place
for it
"""
pass
def _get_all_items(self, id):
""" a helper method to get all items as a response
to some XBox 360 UPnP Search action
probably never be used as the backend will overwrite
the wmc_mapping with more appropriate methods
"""
items = []
item = self.get_by_id(id)
if item is not None:
containers = [item]
while len(containers) > 0:
container = containers.pop()
if container.mimetype not in ['root', 'directory']:
continue
for child in container.get_children(0, 0):
if child.mimetype in ['root', 'directory']:
containers.append(child)
else:
items.append(child)
return items
def get_by_id(self, id):
""" called by the CDS or the MediaServer web
id is the id property of our DIDLLite item
if this MediaServer implements containers, that can
share their content, like 'all tracks', 'album' and
'album_of_artist' - they all have the same track item as content -
then the id may be passed by the CDS like this:
'id@container' or 'id@container@container@container...'
therefore a
if isinstance(id, basestring):
id = id.split('@',1)
id = id[0]
may be appropriate as the first thing to do
when entering this method
should return
- None when no matching item for that id is found,
- a BackendItem,
- or a Deferred
"""
return None
class BackendItem(log.Loggable):
""" the base class for all MediaServer backend items
"""
logCategory = 'backend_item'
def __init__(self, *args, **kwargs):
""" most of the time we collect the necessary data for
an UPnP ContentDirectoryService Container or Object
and instantiate it here
self.item = DIDLLite.Container(id,parent_id,name,...)
or
self.item = DIDLLite.MusicTrack(id,parent_id,name,...)
To make that a valid UPnP CDS Object it needs one or
more DIDLLite.Resource(uri,protocolInfo)
self.item.res = []
res = DIDLLite.Resource(url, 'http-get:*:%s:*' % mimetype)
url : the urlbase of our backend + '/' + our id
res.size = size
self.item.res.append(res)
"""
log.Loggable.__init__(self)
self.name = u'my_name' # the basename of a file, the album title,
# the artists name,...
# is expected to be unicode
self.item = None
self.update_id = 0 # the update id of that item,
# when an UPnP ContentDirectoryService Container
# this should be incremented on every modification
self.location = None # the filepath of our media file, or alternatively
# a FilePath or a ReverseProxyResource object
self.cover = None # if we have some album art image, let's put
# the filepath or link into here
def get_children(self, start=0, end=0):
""" called by the CDS and the MediaServer web
should return
- a list of its childs[start:end]
- or a Deferred
if end == 0, the request is for all childs
after start - childs[start:]
"""
pass
def get_child_count(self):
""" called by the CDS
should return
- the number of its childs - len(childs)
- or a Deferred
"""
def get_item(self):
""" called by the CDS and the MediaServer web
should return
- an UPnP ContentDirectoryServer DIDLLite object
- or a Deferred
"""
return self.item
def get_name(self):
""" called by the MediaServer web
should return
- the name of the item,
it is always expected to be in unicode
"""
return self.name
def get_path(self):
""" called by the MediaServer web
should return
- the filepath where to find the media file
that this item does refer to
"""
return self.location
def get_cover(self):
""" called by the MediaServer web
should return
- the filepath where to find the album art file
only needed when we have created for that item
an albumArtURI property that does point back to us
"""
return self.cover
def __repr__(self):
return "%s[%s]" % (self.__class__.__name__, self.get_name())
class BackendRssMixin:
def update_data(self, rss_url, container=None, encoding="utf-8"):
""" creates a deferred chain to retrieve the rdf file,
parse and extract the metadata and reschedule itself
"""
def fail(f):
self.info("fail %r", f)
self.debug(f.getTraceback())
return f
dfr = getPage(rss_url)
dfr.addCallback(parse_xml, encoding=encoding)
dfr.addErrback(fail)
dfr.addCallback(self.parse_data, container)
dfr.addErrback(fail)
dfr.addBoth(self.queue_update, rss_url, container)
return dfr
def parse_data(self, xml_data, container):
""" extract media info and create BackendItems
"""
pass
def queue_update(self, error_or_failure, rss_url, container):
from twisted.internet import reactor
reactor.callLater(self.refresh, self.update_data, rss_url, container)
class Container(BackendItem):
def __init__(self, parent, title):
BackendItem.__init__(self)
self.parent = parent
if self.parent is not None:
self.parent_id = self.parent.get_id()
else:
self.parent_id = -1
self.store = None
self.storage_id = None
self.name = title
self.mimetype = 'directory'
self.children = []
self.children_ids = {}
self.children_by_external_id = {}
self.update_id = 0
self.item = None
self.sorted = False
def childs_sort(x, y):
return cmp(x.name, y.name)
self.sorting_method = childs_sort
def register_child(self, child, external_id=None):
id = self.store.append_item(child)
child.url = self.store.urlbase + str(id)
child.parent = self
if external_id is not None:
child.external_id = external_id
self.children_by_external_id[external_id] = child
def add_child(self, child, external_id=None, update=True):
id = self.register_child(child, external_id)
if self.children is None:
self.children = []
self.children.append(child)
self.sorted = False
if update == True:
self.update_id += 1
def remove_child(self, child, external_id=None, update=True):
self.children.remove(child)
self.store.remove_item(child)
if update == True:
self.update_id += 1
if external_id is not None:
child.external_id = None
del self.children_by_external_id[external_id]
def get_children(self, start=0, end=0):
if self.sorted == False:
self.children.sort(cmp=self.sorting_method)
self.sorted = True
if end != 0:
return self.children[start:end]
return self.children[start:]
def get_child_count(self):
if self.children is None:
return 0
return len(self.children)
def get_path(self):
return self.store.urlbase + str(self.storage_id)
def get_item(self):
if self.item is None:
self.item = DIDLLite.Container(self.storage_id, self.parent_id, self.name)
self.item.childCount = len(self.children)
return self.item
def get_name(self):
return self.name
def get_id(self):
return self.storage_id
def get_update_id(self):
return self.update_id
class LazyContainer(Container):
logCategory = 'lazyContainer'
def __init__(self, parent, title, external_id=None, refresh=0, childrenRetriever=None, **kwargs):
Container.__init__(self, parent, title)
self.childrenRetrievingNeeded = True
self.childrenRetrievingDeferred = None
self.childrenRetriever = childrenRetriever
self.children_retrieval_campaign_in_progress = False
self.childrenRetriever_params = kwargs
self.childrenRetriever_params['parent'] = self
self.has_pages = (self.childrenRetriever_params.has_key('per_page'))
self.external_id = None
self.external_id = external_id
self.retrieved_children = {}
self.last_updated = 0
self.refresh = refresh
def replace_by(self, item):
if self.external_id is not None and item.external_id is not None:
return (self.external_id == item.external_id)
return True
def add_child(self, child, external_id=None, update=True):
if self.children_retrieval_campaign_in_progress is True:
self.retrieved_children[external_id] = child
else:
Container.add_child(self, child, external_id=external_id, update=update)
def update_children(self, new_children, old_children):
children_to_be_removed = {}
children_to_be_replaced = {}
children_to_be_added = {}
# Phase 1
# let's classify the item between items to be removed,
# to be updated or to be added
self.debug("Refresh pass 1:%d %d", len(new_children), len(old_children))
for id, item in old_children.items():
children_to_be_removed[id] = item
for id, item in new_children.items():
if old_children.has_key(id):
#print(id, "already there")
children_to_be_replaced[id] = old_children[id]
del children_to_be_removed[id]
else:
children_to_be_added[id] = new_children[id]
# Phase 2
# Now, we remove, update or add the relevant items
# to the list of items
self.debug("Refresh pass 2: %d %d %d", len(children_to_be_removed), len(children_to_be_replaced), len(children_to_be_added))
# Remove relevant items from Container children
for id, item in children_to_be_removed.items():
self.remove_child(item, external_id=id, update=False)
# Update relevant items from Container children
for id, item in children_to_be_replaced.items():
old_item = item
new_item = new_children[id]
replaced = False
if self.replace_by:
#print "Replacement method available: Try"
replaced = old_item.replace_by(new_item)
if replaced is False:
#print "No replacement possible: we remove and add the item again"
self.remove_child(old_item, external_id=id, update=False)
self.add_child(new_item, external_id=id, update=False)
# Add relevant items to COntainer children
for id, item in children_to_be_added.items():
self.add_child(item, external_id=id, update=False)
self.update_id += 1
def start_children_retrieval_campaign(self):
#print "start_update_campaign"
self.last_updated = time.time()
self.retrieved_children = {}
self.children_retrieval_campaign_in_progress = True
def end_children_retrieval_campaign(self, success=True):
#print "end_update_campaign"
self.children_retrieval_campaign_in_progress = False
if success is True:
self.update_children(self.retrieved_children, self.children_by_external_id)
self.update_id += 1
self.last_updated = time.time()
self.retrieved_children = {}
def retrieve_children(self, start=0, page=0):
def items_retrieved(result, page, start_offset):
if self.childrenRetrievingNeeded is True:
new_offset = len(self.retrieved_children)
return self.retrieve_children(new_offset, page + 1) # we try the next page
return self.retrieved_children
self.childrenRetrievingNeeded = False
if self.has_pages is True:
self.childrenRetriever_params['offset'] = start
self.childrenRetriever_params['page'] = page
d = self.childrenRetriever(**self.childrenRetriever_params)
d.addCallback(items_retrieved, page, start)
return d
def retrieve_all_children(self, start=0, request_count=0):
def all_items_retrieved (result):
#print "All children retrieved!"
self.end_children_retrieval_campaign(True)
return Container.get_children(self, start, request_count)
def error_while_retrieving_items (error):
#print "Error while retrieving all children!"
self.end_children_retrieval_campaign(False)
return Container.get_children(self, start, request_count)
# if first retrieval and refresh required
# we start a looping call to periodically update the children
#if ((self.last_updated == 0) and (self.refresh > 0)):
# task.LoopingCall(self.retrieve_children,0,0).start(self.refresh, now=False)
self.start_children_retrieval_campaign()
if self.childrenRetriever is not None:
d = self.retrieve_children(start)
if start == 0:
d.addCallbacks(all_items_retrieved, error_while_retrieving_items)
return d
else:
self.end_children_retrieval_campaign()
return self.children
def get_children(self, start=0, request_count=0):
# Check if an update is needed since last update
current_time = time.time()
delay_since_last_updated = current_time - self.last_updated
period = self.refresh
if (period > 0) and (delay_since_last_updated > period):
self.info("Last update is older than %d s -> update data", period)
self.childrenRetrievingNeeded = True
if self.childrenRetrievingNeeded is True:
#print "children Retrieving IS Needed (offset is %d)" % start
return self.retrieve_all_children()
else:
return Container.get_children(self, start, request_count)
ROOT_CONTAINER_ID = 0
SEED_ITEM_ID = 1000
class AbstractBackendStore (BackendStore):
def __init__(self, server, **kwargs):
BackendStore.__init__(self, server, **kwargs)
self.next_id = SEED_ITEM_ID
self.store = {}
def len(self):
return len(self.store)
def set_root_item(self, item):
return self.append_item(item, storage_id=ROOT_CONTAINER_ID)
def get_root_id(self):
return ROOT_CONTAINER_ID
def get_root_item(self):
return self.get_by_id(ROOT_CONTAINER_ID)
def append_item(self, item, storage_id=None):
if storage_id is None:
storage_id = self.getnextID()
self.store[storage_id] = item
item.storage_id = storage_id
item.store = self
return storage_id
def remove_item(self, item):
del self.store[item.storage_id]
item.storage_id = -1
item.store = None
def get_by_id(self, id):
if isinstance(id, basestring):
id = id.split('@', 1)
id = id[0].split('.')[0]
try:
return self.store[int(id)]
except (ValueError, KeyError):
pass
return None
def getnextID(self):
ret = self.next_id
self.next_id += 1
return ret
def __repr__(self):
return self.__class__.__name__
| |
# Copyright 2012-2013 James McCauley
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A stupid L3 switch
For each switch:
1) Keep a table that maps IP addresses to MAC addresses and switch ports.
Stock this table using information from ARP and IP packets.
2) When you see an ARP query, try to answer it using information in the table
from step 1. If the info in the table is old, just flood the query.
3) Flood all other ARPs.
4) When you see an IP packet, if you know the destination port (because it's
in the table from step 1), install a flow for it.
"""
from pox.core import core
import pox
log = core.getLogger()
from pox.lib.packet.ethernet import ethernet, ETHER_BROADCAST
from pox.lib.packet.ipv4 import ipv4
from pox.lib.packet.arp import arp
from pox.lib.addresses import IPAddr, EthAddr
from pox.lib.util import str_to_bool, dpid_to_str
from pox.lib.recoco import Timer
import pox.openflow.libopenflow_01 as of
from pox.lib.revent import *
import time
# Timeout for flows
FLOW_IDLE_TIMEOUT = 600
# Timeout for ARP entries
ARP_TIMEOUT = 600 * 2
# Maximum number of packet to buffer on a switch for an unknown IP
MAX_BUFFERED_PER_IP = 5
# Maximum time to hang on to a buffer for an unknown IP in seconds
MAX_BUFFER_TIME = 5
class Entry (object):
"""
Not strictly an ARP entry.
We use the port to determine which port to forward traffic out of.
We use the MAC to answer ARP replies.
We use the timeout so that if an entry is older than ARP_TIMEOUT, we
flood the ARP request rather than try to answer it ourselves.
"""
def __init__ (self, port, mac):
self.timeout = time.time() + ARP_TIMEOUT
self.port = port
self.mac = mac
def __eq__ (self, other):
if type(other) == tuple:
return (self.port,self.mac)==other
else:
return (self.port,self.mac)==(other.port,other.mac)
def __ne__ (self, other):
return not self.__eq__(other)
def isExpired (self):
if self.port == of.OFPP_NONE: return False
return time.time() > self.timeout
def dpid_to_mac (dpid):
return EthAddr("%012x" % (dpid & 0xffFFffFFffFF,))
class l3_switch (EventMixin):
def __init__ (self, fakeways = [], arp_for_unknowns = False):
# These are "fake gateways" -- we'll answer ARPs for them with MAC
# of the switch they're connected to.
self.fakeways = set(fakeways)
# If this is true and we see a packet for an unknown
# host, we'll ARP for it.
self.arp_for_unknowns = arp_for_unknowns
# (dpid,IP) -> expire_time
# We use this to keep from spamming ARPs
self.outstanding_arps = {}
# (dpid,IP) -> [(expire_time,buffer_id,in_port), ...]
# These are buffers we've gotten at this datapath for this IP which
# we can't deliver because we don't know where they go.
self.lost_buffers = {}
# For each switch, we map IP addresses to Entries
self.arpTable = {}
# This timer handles expiring stuff
self._expire_timer = Timer(5, self._handle_expiration, recurring=True)
self.listenTo(core)
def _handle_expiration (self):
# Called by a timer so that we can remove old items.
empty = []
for k,v in self.lost_buffers.iteritems():
dpid,ip = k
for item in list(v):
expires_at,buffer_id,in_port = item
if expires_at < time.time():
# This packet is old. Tell this switch to drop it.
v.remove(item)
po = of.ofp_packet_out(buffer_id = buffer_id, in_port = in_port)
core.openflow.sendToDPID(dpid, po)
if len(v) == 0: empty.append(k)
# Remove empty buffer bins
for k in empty:
del self.lost_buffers[k]
def _send_lost_buffers (self, dpid, ipaddr, macaddr, port):
"""
We may have "lost" buffers -- packets we got but didn't know
where to send at the time. We may know now. Try and see.
"""
if (dpid,ipaddr) in self.lost_buffers:
# Yup!
bucket = self.lost_buffers[(dpid,ipaddr)]
del self.lost_buffers[(dpid,ipaddr)]
log.debug("Sending %i buffered packets to %s from %s"
% (len(bucket),ipaddr,dpid_to_str(dpid)))
for _,buffer_id,in_port in bucket:
po = of.ofp_packet_out(buffer_id=buffer_id,in_port=in_port)
po.actions.append(of.ofp_action_dl_addr.set_dst(macaddr))
po.actions.append(of.ofp_action_output(port = port))
core.openflow.sendToDPID(dpid, po)
def _handle_GoingUpEvent (self, event):
self.listenTo(core.openflow)
log.debug("Up...")
def _handle_PacketIn (self, event):
dpid = event.connection.dpid
inport = event.port
packet = event.parsed
if not packet.parsed:
log.warning("%i %i ignoring unparsed packet", dpid, inport)
return
if dpid not in self.arpTable:
# New switch -- create an empty table
self.arpTable[dpid] = {}
for fake in self.fakeways:
self.arpTable[dpid][IPAddr(fake)] = Entry(of.OFPP_NONE,
dpid_to_mac(dpid))
if packet.type == ethernet.LLDP_TYPE:
# Ignore LLDP packets
return
if isinstance(packet.next, ipv4):
log.debug("%i %i IP %s => %s", dpid,inport,
packet.next.srcip,packet.next.dstip)
# Send any waiting packets...
self._send_lost_buffers(dpid, packet.next.srcip, packet.src, inport)
# Learn or update port/MAC info
if packet.next.srcip in self.arpTable[dpid]:
if self.arpTable[dpid][packet.next.srcip] != (inport, packet.src):
log.info("%i %i RE-learned %s", dpid,inport,packet.next.srcip)
else:
log.debug("%i %i learned %s", dpid,inport,str(packet.next.srcip))
self.arpTable[dpid][packet.next.srcip] = Entry(inport, packet.src)
# Try to forward
dstaddr = packet.next.dstip
if dstaddr in self.arpTable[dpid]:
# We have info about what port to send it out on...
prt = self.arpTable[dpid][dstaddr].port
mac = self.arpTable[dpid][dstaddr].mac
if prt == inport:
log.warning("%i %i not sending packet for %s back out of the " +
"input port" % (dpid, inport, str(dstaddr)))
else:
log.debug("%i %i installing flow for %s => %s out port %i"
% (dpid, inport, packet.next.srcip, dstaddr, prt))
actions = []
actions.append(of.ofp_action_dl_addr.set_dst(mac))
actions.append(of.ofp_action_output(port = prt))
match = of.ofp_match.from_packet(packet, inport)
match.dl_src = None # Wildcard source MAC
msg = of.ofp_flow_mod(command=of.OFPFC_ADD,
idle_timeout=FLOW_IDLE_TIMEOUT,
hard_timeout=of.OFP_FLOW_PERMANENT,
buffer_id=event.ofp.buffer_id,
actions=actions,
match=of.ofp_match.from_packet(packet,
inport))
event.connection.send(msg.pack())
elif self.arp_for_unknowns:
# We don't know this destination.
# First, we track this buffer so that we can try to resend it later
# if we learn the destination, second we ARP for the destination,
# which should ultimately result in it responding and us learning
# where it is
# Add to tracked buffers
if (dpid,dstaddr) not in self.lost_buffers:
self.lost_buffers[(dpid,dstaddr)] = []
bucket = self.lost_buffers[(dpid,dstaddr)]
entry = (time.time() + MAX_BUFFER_TIME,event.ofp.buffer_id,inport)
bucket.append(entry)
while len(bucket) > MAX_BUFFERED_PER_IP: del bucket[0]
# Expire things from our outstanding ARP list...
self.outstanding_arps = {k:v for k,v in
self.outstanding_arps.iteritems() if v > time.time()}
# Check if we've already ARPed recently
if (dpid,dstaddr) in self.outstanding_arps:
# Oop, we've already done this one recently.
return
# And ARP...
self.outstanding_arps[(dpid,dstaddr)] = time.time() + 4
r = arp()
r.hwtype = r.HW_TYPE_ETHERNET
r.prototype = r.PROTO_TYPE_IP
r.hwlen = 6
r.protolen = r.protolen
r.opcode = r.REQUEST
r.hwdst = ETHER_BROADCAST
r.protodst = dstaddr
r.hwsrc = packet.src
r.protosrc = packet.next.srcip
e = ethernet(type=ethernet.ARP_TYPE, src=packet.src,
dst=ETHER_BROADCAST)
e.set_payload(r)
log.debug("%i %i ARPing for %s on behalf of %s" % (dpid, inport,
str(r.protodst), str(r.protosrc)))
msg = of.ofp_packet_out()
msg.data = e.pack()
msg.actions.append(of.ofp_action_output(port = of.OFPP_FLOOD))
msg.in_port = inport
event.connection.send(msg)
elif isinstance(packet.next, arp):
a = packet.next
log.debug("%i %i ARP %s %s => %s", dpid, inport,
{arp.REQUEST:"request",arp.REPLY:"reply"}.get(a.opcode,
'op:%i' % (a.opcode,)), str(a.protosrc), str(a.protodst))
if a.prototype == arp.PROTO_TYPE_IP:
if a.hwtype == arp.HW_TYPE_ETHERNET:
if a.protosrc != 0:
# Learn or update port/MAC info
if a.protosrc in self.arpTable[dpid]:
if self.arpTable[dpid][a.protosrc] != (inport, packet.src):
log.info("%i %i RE-learned %s", dpid,inport,str(a.protosrc))
else:
log.debug("%i %i learned %s", dpid,inport,str(a.protosrc))
self.arpTable[dpid][a.protosrc] = Entry(inport, packet.src)
# Send any waiting packets...
self._send_lost_buffers(dpid, a.protosrc, packet.src, inport)
if a.opcode == arp.REQUEST:
# Maybe we can answer
if a.protodst in self.arpTable[dpid]:
# We have an answer...
if not self.arpTable[dpid][a.protodst].isExpired():
# .. and it's relatively current, so we'll reply ourselves
r = arp()
r.hwtype = a.hwtype
r.prototype = a.prototype
r.hwlen = a.hwlen
r.protolen = a.protolen
r.opcode = arp.REPLY
r.hwdst = a.hwsrc
r.protodst = a.protosrc
r.protosrc = a.protodst
r.hwsrc = self.arpTable[dpid][a.protodst].mac
e = ethernet(type=packet.type, src=dpid_to_mac(dpid),
dst=a.hwsrc)
e.set_payload(r)
log.debug("%i %i answering ARP for %s" % (dpid, inport,
str(r.protosrc)))
msg = of.ofp_packet_out()
msg.data = e.pack()
msg.actions.append(of.ofp_action_output(port =
of.OFPP_IN_PORT))
msg.in_port = inport
event.connection.send(msg)
return
# Didn't know how to answer or otherwise handle this ARP, so just flood it
log.debug("%i %i flooding ARP %s %s => %s" % (dpid, inport,
{arp.REQUEST:"request",arp.REPLY:"reply"}.get(a.opcode,
'op:%i' % (a.opcode,)), str(a.protosrc), str(a.protodst)))
msg = of.ofp_packet_out(in_port = inport, data = event.ofp,
action = of.ofp_action_output(port = of.OFPP_FLOOD))
event.connection.send(msg)
def launch (fakeways="", arp_for_unknowns=None):
fakeways = fakeways.replace(","," ").split()
fakeways = [IPAddr(x) for x in fakeways]
if arp_for_unknowns is None:
arp_for_unknowns = len(fakeways) > 0
else:
arp_for_unknowns = str_to_bool(arp_for_unknowns)
core.registerNew(l3_switch, fakeways, arp_for_unknowns)
| |
import cv2
import face_recognition as fr
import logging
import numpy as np
import struct
import time
from tornado import gen
from gateway import net, face
from gateway.app import gateway
from gateway.camera.recognizor import recognize_face
from gateway.camera.tracker import track_object
from gateway.firebase import fcm
MOVE_TOP = 0x01
MOVE_BOTTOM = 0x02
MOVE_LEFT = 0x04
MOVE_RIGHT = 0x08
class CameraDevice(object):
def __init__(self, stream, address, executor):
self.__stream = stream
self.address = address
self.resolution = None
self.framerate = None
self.moving = False
self.auto_mode = True
self.__watchers = {}
self.__executor = executor
self.__object_tracking_future = None
self.__face_recognition_future = None
# For object tracking
self.__running_average_image = None
self.__running_average_in_display_color_depth = None
self.__difference = None
self.__last_target_count = None
self.__last_target_change_time = None
self.__last_frame_entity_list = None
self._last_notified_faces = {}
self._last_notified_moving_objects = None
def to_dict(self):
return {
'id': id(self),
'address': {
'ip': self.address[0],
'port': self.address[1]
},
'resolution': {
'width': self.resolution[0],
'height': self.resolution[1]
},
'framerate': self.framerate
}
@gen.coroutine
def send(self, opcode, body=None):
packet = net.encode_packet(opcode, body)
yield self.__stream.write(packet)
@gen.coroutine
def broadcast_to_watchers(self, packet):
for k in self.__watchers.keys():
stream = self.__watchers.get(k)
if not stream.closed():
yield stream.write(packet)
def subscribe(self, stream):
if id(stream) not in self.__watchers:
self.__watchers[id(stream)] = stream
def unsubscribe(self, stream):
if id(stream) in self.__watchers:
del self.__watchers[id(stream)]
def move(self, direction):
if not self.moving:
self.moving = True
body = struct.pack('!H', direction)
self.send(net.Opcode.MOVE_REQUEST, body)
def try_image_processing(self, frame):
self.__fetch_object_tracking_result()
self._fetch_face_recognition_result()
is_object_trackable = self.__object_tracking_future is None
is_face_recognizable = self.__face_recognition_future is None
if is_object_trackable or is_face_recognizable:
frame = np.frombuffer(frame, np.uint8)
frame = cv2.imdecode(frame, 1)
if is_object_trackable:
self.__execute_object_tracking(frame)
if is_face_recognizable:
self._execute_face_recognition(frame)
def _notify_moving_objects(self):
now = time.time()
if self._last_notified_moving_objects is None \
or (now - self._last_notified_moving_objects) > 60:
message_title = "Moving Object is detected"
message_body = "Camera {} detect moving objects".format(id(self))
result = fcm.notify_all(message_title, message_body)
if result:
logging.debug("Notify moving object result: {}".format(result))
def _handle_object_tracking_result(self, point):
width, height = self.resolution
x, y = point
if self.auto_mode:
self._notify_moving_objects()
if y <= height * 0.3:
logging.debug("[Camera {}] Move BOTTOM".format(id(self)))
self.move(MOVE_BOTTOM)
elif y >= height * 0.7:
logging.debug("[Camera {}] Move TOP".format(id(self)))
self.move(MOVE_TOP)
if x <= width * 0.3:
logging.debug("[Camera {}] Move RIGHT".format(id(self)))
self.move(MOVE_RIGHT)
elif x >= width * 0.7:
logging.debug("[Camera {}] Move LEFT".format(id(self)))
self.move(MOVE_LEFT)
def __fetch_object_tracking_result(self):
if self.__object_tracking_future:
if self.__object_tracking_future.done():
point, \
running_average_image, \
running_average_in_display_color_depth, \
difference, \
last_target_count, \
last_target_change_time, \
last_frame_entity_list = self.__object_tracking_future.result()
self.__running_average_image = running_average_image
self.__running_average_in_display_color_depth = running_average_in_display_color_depth
self.__difference = difference
self.__last_target_count = last_target_count
self.__last_target_change_time = last_target_change_time
self.__last_frame_entity_list = last_frame_entity_list
if point is not None:
self._handle_object_tracking_result(point)
self.__object_tracking_future = None
def __execute_object_tracking(self, frame):
self.__object_tracking_future = self.__executor.submit(track_object,
frame,
self.__running_average_image,
self.__running_average_in_display_color_depth,
self.__difference,
self.__last_target_count,
self.__last_target_change_time,
self.__last_frame_entity_list)
def _handle_face_recognition_result(self, faces):
now = time.time()
to_notify_names = []
for name, distance in faces:
if name in self._last_notified_faces:
if (now - self._last_notified_faces[name]) > 60:
to_notify_names.append(name)
else:
to_notify_names.append(name)
if len(to_notify_names) > 0:
if 'Unknown' in to_notify_names:
message_title = "Warning"
message_body = "Unknown people are detected"
else:
message_title = "People are detected"
message_body = to_notify_names
result = fcm.notify_all(message_title, message_body)
if result:
logging.debug('Notify result: {}'.format(result))
for name in filter(lambda x: x is not 'Unknown', to_notify_names):
if name in self._last_notified_faces:
del self._last_notified_faces[name]
self._last_notified_faces[name] = now
def _fetch_face_recognition_result(self):
if self.__face_recognition_future:
if self.__face_recognition_future.done():
faces = self.__face_recognition_future.result()
if len(faces) > 0:
logging.debug('Face recognition result: %s', faces)
self._handle_face_recognition_result(faces)
self.__face_recognition_future = None
def _execute_face_recognition(self, frame):
self.__face_recognition_future = self.__executor.submit(
face.recognize_face, frame, gateway.faces)
| |
"""
Helper for views.py
"""
from base_handler import base_handler, REMOVED_TENANTS
import traceback
from flask import g, render_template
import datetime
import json
class netmon_handler(base_handler):
def __init__(self):
"""
Manages all the operations related with network monitoring
:return:
"""
try:
self.cobra_apic_object = netmon_handler.init_connections()
self.exception = None
except Exception as e:
self.exception = e
print traceback.print_exc()
def network_list(self, obj_response):
"""
Returns a list of networks grouped by tenant
:param obj_response:
:return:
"""
if self.exception is not None:
obj_response.script("create_notification('Connection problem', '" + str(self.exception).replace("'", "").
replace('"', '').replace("\n", "")[0:100] + "', 'danger', 0)")
return
try:
network_tree = []
for tenant in self.cobra_apic_object.get_all_tenants():
if tenant.name not in REMOVED_TENANTS:
network_tree.append({
"text": tenant.name,
"selectable": False,
"color": "#FFFFFF",
"backColor": "#245580"
})
network_aps = self.cobra_apic_object.get_ap_by_tenant(str(tenant.dn))
if len(network_aps) > 0:
network_tree[len(network_tree) - 1]["nodes"] = []
for network_ap in network_aps:
network_tree[len(network_tree) - 1]["nodes"].append({
"text": network_ap.name,
"selectable": False,
"color": "#000000",
"backColor": "#FFFFFF"
})
networks = self.cobra_apic_object.get_epg_by_ap(str(network_ap.dn))
if len(networks) > 0:
network_tree[len(network_tree) - 1]["nodes"][len(network_tree[len(network_tree) - 1]["nodes"]) - 1]["nodes"] = []
for network in networks:
network_tree[len(network_tree) - 1]["nodes"][len(network_tree[len(network_tree) - 1]["nodes"]) - 1]["nodes"].append({
"text": network.name,
})
obj_response.script('$("#busy_indicator").hide()')
data = json.dumps(network_tree, ensure_ascii=False)
obj_response.script('set_network_tree(' + data + ')')
except Exception as e:
print traceback.print_exc()
obj_response.script("create_notification('Can not retrieve networks', '" + str(e).replace("'", "").
replace('"', '').replace("\n", "")[0:100] + "', 'danger', 0)")
finally:
g.db.close()
obj_response.html("#delete_network_response", '')
def get_endpoints(self, obj_response, form_values):
"""
Return a list of end points associated to an end point group
:param obj_response:
:param form_values:
:return:
"""
if self.exception is not None:
obj_response.script("create_notification('Connection problem', '" + str(self.exception).replace("'", "").
replace('"', '').replace("\n", "")[0:100] + "', 'danger', 0)")
return
try:
epg_dn = 'uni/tn-%s/ap-%s/epg-%s' % (form_values['tenant'], form_values['ap'], form_values['network'])
html_response = render_template('netmon/endpoint_list.html',
endpoints=self.cobra_apic_object.get_endpoints(epg_dn),
tenant=form_values['tenant'],
ap=form_values['ap'],
network=form_values['network'])
obj_response.html("#endpoints", html_response)
obj_response.script('$("#endpoints_busy_indicator").hide()')
except Exception as e:
print traceback.print_exc()
obj_response.script("create_notification('Can not retrieve end points', '" + str(e).replace("'", "").
replace('"', '').replace("\n", "")[0:100] + "', 'danger', 0)")
finally:
g.db.close()
def get_epg_health_score(self, obj_response, form_values):
"""
Returns the health score of a specific end point group
:param obj_response:
:param form_values:
:return:
"""
if self.exception is not None:
obj_response.script("create_notification('Connection problem', '" + str(self.exception).replace("'", "").
replace('"', '').replace("\n", "")[0:100] + "', 'danger', 0)")
return
try:
epg = self.cobra_apic_object.get_epg(form_values['tenant'], form_values['ap'], form_values['network'])
score = self.cobra_apic_object.get_epg_health_score(str(epg.dn))
html_response = render_template('netmon/epg_score.html',
score=int(score))
obj_response.html("#epg_score", html_response)
except Exception as e:
print traceback.print_exc()
obj_response.script("create_notification('Can not retrieve score', '" + str(e).replace("'", "").
replace('"', '').replace("\n", "")[0:100] + "', 'danger', 0)")
finally:
g.db.close()
def get_faults_history(self, obj_response, form_values):
"""
Get the history of faults within an end point group
:param obj_response:
:param form_values:
:return:
"""
if self.exception is not None:
obj_response.script("create_notification('Connection problem', '" + str(self.exception).replace("'", "").
replace('"', '').replace("\n", "")[0:100] + "', 'danger', 0)")
return
try:
epg = self.cobra_apic_object.get_epg(form_values['tenant'], form_values['ap'], form_values['network'])
fault_list = self.cobra_apic_object.get_faults_history(str(epg.dn))
html_response = render_template('netmon/fault_list.html',
faults=fault_list)
obj_response.html("#history", html_response)
except Exception as e:
print traceback.print_exc()
obj_response.script("create_notification('Can not retrieve score', '" + str(e).replace("'", "").
replace('"', '').replace("\n", "")[0:100] + "', 'danger', 0)")
finally:
g.db.close()
def get_traffic_chart(self, obj_response, form_values):
"""
returns traffic statistics
:param obj_response:
:param form_values:
:return:
"""
if self.exception is not None:
obj_response.script("create_notification('Connection problem', '" + str(self.exception).replace("'", "").
replace('"', '').replace("\n", "")[0:100] + "', 'danger', 0)")
return
try:
epg = self.cobra_apic_object.get_epg(form_values['tenant'], form_values['ap'], form_values['network'])
traffic_list = self.cobra_apic_object.get_stats(str(epg.dn))
labels = []
data = []
for traffic in traffic_list:
date = datetime.datetime.strptime(traffic.repIntvEnd[:-13], "%Y-%m-%dT%H:%M")
labels.append(date.strftime('%H:%M'))
data.append(traffic.unicastPer)
obj_response.script("load_traffic_chart(%s, %s)" % (labels, data))
except Exception as e:
print traceback.print_exc()
obj_response.script("create_notification('Can not retrieve score', '" + str(e).replace("'", "").
replace('"', '').replace("\n", "")[0:100] + "', 'danger', 0)")
finally:
g.db.close()
def get_faults(self, obj_response, form_values):
"""
Return the active faults within the epg
:param obj_response:
:param form_values:
:return:
"""
if self.exception is not None:
obj_response.script("create_notification('Connection problem', '" + str(self.exception).replace("'", "").
replace('"', '').replace("\n", "")[0:100] + "', 'danger', 0)")
return
try:
epg = self.cobra_apic_object.get_epg(form_values['tenant'], form_values['ap'], form_values['network'])
fault_list = self.cobra_apic_object.get_faults(str(epg.dn))
html_response = render_template('netmon/fault_list.html',
faults=fault_list)
obj_response.html("#faults", html_response)
except Exception as e:
print traceback.print_exc()
obj_response.script("create_notification('Can not retrieve score', '" + str(e).replace("'", "").
replace('"', '').replace("\n", "")[0:100] + "', 'danger', 0)")
finally:
g.db.close()
def get_endpoint_track(self, obj_response, form_values):
"""
Shows the endpoint track
This operation is not supported in cobra, we are using direct api calls
:param obj_response:
:param form_values:
:return:
"""
if self.exception is not None:
obj_response.script("create_notification('Connection problem', '" + str(self.exception).replace("'", "").
replace('"', '').replace("\n", "")[0:100] + "', 'danger', 0)")
return
try:
epg = self.cobra_apic_object.get_epg(form_values['tenant'], form_values['ap'], form_values['network'])
api_apic = self.create_api_apic()
end_point_track_list = api_apic.get_endpoint_track(str(epg.dn) + '/cep-' + form_values['endpoint_mac'])
html_response = render_template('netmon/endpoint_track_list.html',
end_point_track_list=end_point_track_list)
obj_response.html("#network_track", html_response)
except Exception as e:
print traceback.print_exc()
obj_response.script("create_notification('Can not retrieve score', '" + str(e).replace("'", "").
replace('"', '').replace("\n", "")[0:100] + "', 'danger', 0)")
finally:
g.db.close()
| |
# Copyright (c) 2010 Christopher Rebert <code@rebertia.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
This module defines datatypes representing TritonLink Schedule of Classes data.
:copyright: (c) 2010 by Christopher Rebert.
:license: MIT, see :file:`LICENSE.txt` for more details.
"""
from __future__ import division
from warnings import simplefilter as _simplefilter, catch_warnings as _catch_warnings
from triton_scraper import config as _config
from triton_scraper.locations import UnknownLocation as _UnknownLocation
from triton_scraper.restriction_codes import restriction_code2description
class DaysOfWeekSet(frozenset):
"""A :class:`set` of days of the week.
Each individual day is represented using its corresponding entry in :attr:`DAYS_IN_ORDER`.
Iterating over the set yields the days in their conventional ordering; Sunday is considered the first day of the week."""
#: Normalized day of the week abbrevations used by :class:`DaysOfWeekSet`, in order, starting with Sunday. A tuple of strings.
DAYS_IN_ORDER = tuple('Sun Mon Tue Wed Thu Fri Sat'.split())
_DAYS_OF_WEEK_SET = set(DAYS_IN_ORDER)
#: Day of the week abbreviations used by TritonLink, in order, starting with Sunday. A tuple of strings.
UCSD_DAY_ABBEVIATIONS = tuple('Sun M Tu W Th F S'.split())
@classmethod
def from_ucsd_abbrevs(cls, ucsd_abbreviated):
"""Additional constructor.
:param ucsd_abbreviated: string representing a group of days of the week using TritonLink's day of the week abbreviations (see :attr:`UCSD_DAY_ABBEVIATIONS`; e.g. "TuTh")
"""
norm_days = set()
for index, name in enumerate(cls.UCSD_DAY_ABBEVIATIONS):
if ucsd_abbreviated.startswith(name):
ucsd_abbreviated = ucsd_abbreviated[len(name):]
norm_days.add(cls.DAYS_IN_ORDER[index])
if ucsd_abbreviated:
raise ValueError, "Unrecognized day name abbreviations: "+repr(ucsd_abbreviated)
return cls(norm_days)
def __init__(self, iterable):
"""Creates a new DaysOfWeekSet from an iterable containing strings from :attr:`DAYS_IN_ORDER`.
The ordering of the strings doesn't matter."""
with _catch_warnings(): # Suppress irrelevant DeprecationWarning
_simplefilter("ignore")
frozenset.__init__(self, iterable)
if self - self._DAYS_OF_WEEK_SET:
raise ValueError, "Non-day-names present"
def __iter__(self):
"""Yields day names in conventional order. A week is considered to start on Sunday."""
for day in self.DAYS_IN_ORDER:
if day in self:
yield day
def __repr__(self):
return "{%s}" % (", ".join(self))
_NORMAL_MEETING_TYPES = "lecture discussion lab tutorial seminar studio midterm problem_session review_session make_up_session film".split()
def _meeting_type_name2code(type_name):
return getattr(_config, type_name.upper()+"_CODE")
def add_meeting_list_properties(klass):
for type_name in _NORMAL_MEETING_TYPES:
property_name = type_name+"s"
extractor = lambda self, type_code=_meeting_type_name2code(type_name): self._code2meeting_list[type_code]
setattr(klass, property_name, property(extractor))
return klass
@add_meeting_list_properties
class CourseInstance(object):
"""An instance of a course. Two instances of the same course typically have different instructors and/or lecture times."""
_FORMAT = '{0.code} "{0.name}" ({0.units} units) with {0.instructor}\n\tPrerequisites: {0.prerequisites_url}'
def __init__(self, subject_code, course_number, name, units, restriction_codes=None, prerequisites_url=None):
#: Code for course's subject; e.g. "CSE"
#:
#: :type: string
self.subject_code = subject_code
#: Course "number"; e.g. "15L"
#:
#: :type: string
self.course_number = course_number
#: Descriptive name of course.
#:
#: :type: string
self.name = name
if restriction_codes is None:
restriction_codes = []
#: Human-readable descriptions of registration restrictions applicable to course.
#:
#: :type: list of strings
self.restrictions = set(restriction_code2description(restrict_code) for restrict_code in restriction_codes)
#: Number of credit units; NaN if variable.
#:
#: :type: float
self.units = units
#: URL of page listing prerequisites for course.
#:
#: :type: string or None
self.prerequisites_url = prerequisites_url
self._code2meeting_list = dict( (_meeting_type_name2code(type_name), []) for type_name in _NORMAL_MEETING_TYPES)
#: Final exam
#:
#: :type: :class:`OneShotEvent`
self.final = None
self.instructor = None #FIXME
# @property
# def instructor(self):
# """Course's instructor.
#
# :type: :class:`Instructor` or :class:`InstructorTBA`"""
# # if not self._instructor:
# # raise ValueError, self.code+" has no instructor!"
# return self._instructor or InstructorTBA()
# @instructor.setter
# def instructor(self, value):
# if not value:
# raise ValueError, "Attempted to set invalid instructor value: "+repr(value)
#
# if not self._instructor:
# self._instructor = value
@property
def code(self):
"""Full course code; e.g. "CSE 15L".
:type: string
"""
return "%s %s" % (self.subject_code, self.course_number)
def __repr__(self):
parts = [self._FORMAT.format(self)]
if not self.restrictions:
parts.append("\tUnrestricted")
else:
parts.append("\tRestrictions: " + ", ".join(self.restrictions))
meeting_types = [("Seminars", self.seminars), ("Studios", self.studios), ("Lectures", self.lectures), ("Discussions", self.discussions), ("Labs", self.labs), ("Tutorials", self.tutorials), ("Films", self.films), ("Problem Sessions", self.problem_sessions), ("Review Sessions", self.review_sessions), ("Make-up Sessions", self.make_up_sessions), ("Midterms", self.midterms)]
for name, meetings in meeting_types:
if meetings:
part = "\t%s:\n\t\t%s" % (name, "\n\t\t".join(str(meeting) for meeting in meetings))
parts.append(part)
parts.append("\tFinal: "+str(self.final))
return "\n".join(parts)
def add_meeting(self, meeting_type_code, meeting):
"""
:param meeting_type_code:
:type meeting_type_code: string
:param meeting:
:type meeting:
"""
try:
self._code2meeting_list[meeting_type_code].append(meeting)
except KeyError:
raise ValueError, "Unrecognized meeting type code: %s" % repr(mtg_type)
else:
if self.instructor is None and hasattr(meeting, 'instructor') and meeting.instructor is not None and not isinstance(meeting.instructor, InstructorTBA):
self.instructor = meeting.instructor
def __bool__(self):
return any(meeting_list for meeting_list in self._code2meeting_list.values())
del add_meeting_list_properties
_STAFF = "Staff"
class Instructor(object):
"""A known course instructor."""
__FORMAT = "{0.first_name} {0.last_name} <{0.email}>"
@classmethod
def from_full_name(cls, full_name, email=None):
"""Alternate constructor. Parses out the instructor's first and last names from their full name.
May return an :class:`InstructorTBA` if they are TBA.
:param full_name: the instructor's full name (e.g. "Doe, John")
:type full_name: string
:param email: the instructor's email address
:type email: string or None
"""
if full_name.strip() == _STAFF:
return InstructorTBA()
while full_name.count(",") > 1:
index = full_name.rindex(",")
full_name = full_name[:index]+full_name[index+1:]
last, first = full_name.split(", ")
return cls(last, first, email)
def __init__(self, last, first, email=None):
#: Instructor's given name
#:
#: :type: string
self.first_name = first
#: Instructor's surname
#:
#: :type: string
self.last_name = last
#: Instructor's email address, if they have one.
#:
#: :type: string or None
self.email = email
def __repr__(self):
"FirstName LastName <EmailAddress>"
return self.__FORMAT.format(self)
@property
def __key(self):
return (self.first_name, self.last_name)
def __eq__(self, other):
"""Instructors with the same first and last name are equal to each other"""
return not isinstance(other, InstructorTBA) and self.__key == other.__key
def __ne__(self, other):
return not self == other
class InstructorTBA(object):
"""An as-yet-unknown instructor."""
def __init__(self):
pass
def __repr__(self):
return "(TBA)"
def __eq__(self, other):
"""Unknown instructors are equal to each other"""
return isinstance(other, self.__class__)
def __ne__(self, other):
return not self == other
| |
# -*- coding: utf-8 -*-
"""
(c) 2012-2021 Martin Wendt; see https://github.com/mar10/pyftpsync
Licensed under the MIT license: https://www.opensource.org/licenses/mit-license.php
"""
import os
import yaml
from ftpsync.cli_common import common_parser, creds_parser, verbose_parser
from ftpsync.synchronizers import CONFIG_FILE_NAME
from ftpsync.util import write
MANDATORY_TASK_ARGS = {"command", "remote"}
KNOWN_TASK_ARGS = {
"case",
"debug",
"delete",
"delete_unmatched",
"dry_run",
"exclude",
"force",
"ftp_active",
"here",
"local",
"match",
"no_color",
"no_keyring",
"no_netrc",
"no_prompt",
"no_verify_host_keys",
"progress",
"prompt",
"resolve",
"root",
"verbose",
}
# Flag-style arguments that default to False
OVERRIDABLE_BOOL_ARGS = {
"dry_run",
"force",
"no_color",
"no_keyring",
"no_netrc",
"no_prompt",
"no_verify_host_keys",
"progress",
# "resolve",
}
def add_run_parser(subparsers):
# --- Create the parser for the "scan" command -----------------------------
parser = subparsers.add_parser(
"run",
# parents=[verbose_parser, common_parser, matcher_parser, creds_parser],
parents=[verbose_parser, common_parser, creds_parser],
help="run pyftpsync with configuration from `pyftpsync.yaml` in current or parent folder",
)
parser.add_argument(
"task",
# metavar="TASK",
nargs="?",
help="task to run (default: use `default_task` from `pyftpsync.yaml`)",
)
p_group = parser.add_mutually_exclusive_group()
p_group.add_argument(
"--here", action="store_true", help="use current folder as root"
)
p_group.add_argument(
"--root",
action="store_true",
help="use folder of nearest `pyftpsync.yaml` as root",
)
# parser.set_defaults(command=run_handler)
parser.set_defaults(command="run")
return parser
def handle_run_command(parser, args):
"""Implement `run` sub-command."""
MAX_LEVELS = 15
# --- Look for `pyftpsync.yaml` in current folder and parents ---
cur_level = 0
cur_folder = os.getcwd()
config_path = None
while cur_level < MAX_LEVELS:
path = os.path.join(cur_folder, CONFIG_FILE_NAME)
# print("Searching for {}...".format(path))
if os.path.isfile(path):
config_path = path
break
parent = os.path.dirname(cur_folder)
if parent == cur_folder:
break
cur_folder = parent
cur_level += 1
if not config_path:
parser.error(
"Could not locate `pyftpsync.yaml` in {} or {} parent folders.".format(
os.getcwd(), cur_level
)
)
# --- Parse `pyftpsync.yaml` and set `args` attributes ---
try:
with open(config_path, "rb") as f:
config = yaml.safe_load(f)
except Exception as e:
parser.error("Error parsing {}: {}".format(config_path, e))
# print(config)
if "tasks" not in config:
parser.error("Missing option `tasks` in {}".format(config_path))
common_config = config.get("common_config", {})
default_task = config.get("default_task", "default")
task_name = args.task or default_task
if task_name not in config["tasks"]:
parser.error("Missing option `tasks.{}` in {}".format(task_name, config_path))
task = config["tasks"][task_name]
write("Running task '{}' from {}".format(task_name, config_path))
common_config.update(task)
task = common_config
# write("task", task)
# --- Check task syntax ---
task_args = set(task.keys())
missing_args = MANDATORY_TASK_ARGS.difference(task_args)
if missing_args:
parser.error(
"Missing mandatory options: tasks.{}.{}".format(
task_name, ", ".join(missing_args)
)
)
allowed_args = KNOWN_TASK_ARGS.union(MANDATORY_TASK_ARGS)
invalid_args = task_args.difference(allowed_args)
if invalid_args:
parser.error(
"Invalid options: tasks.{}.{}".format(task_name, ", ".join(invalid_args))
)
# write("args", args)
for name in allowed_args:
val = task.get(name, None) # default)
if val is None:
continue # option not specified in yaml
# Override yaml entry by command line
cmd_val = getattr(args, name, None)
# write("check --{}: {} => {}".format(name, val, cmd_val))
if cmd_val != val:
override = False
if name in OVERRIDABLE_BOOL_ARGS and cmd_val:
override = True
elif name in {"here", "root"} and (args.here or args.root):
override = True
elif name == "verbose" and cmd_val != 3:
override = True
if override:
write(
"Yaml entry overriden by --{}: {} => {}".format(name, val, cmd_val)
)
continue
setattr(args, name, val)
# --- Figure out local target path ---
cur_folder = os.getcwd()
root_folder = os.path.dirname(config_path)
if task.get("local"):
root_folder = os.path.join(root_folder, task["local"])
path_ofs = os.path.relpath(os.getcwd(), root_folder)
if cur_level == 0 or args.root:
path_ofs = ""
args.local = root_folder
elif args.here:
write("Using sub-branch {sub} of {root}".format(root=root_folder, sub=path_ofs))
args.local = cur_folder
args.remote = os.path.join(args.remote, path_ofs)
else:
parser.error(
"`pyftpsync.yaml` configuration was found in a parent directory. "
"Please pass an additional argument to clarify:\n"
" --root: synchronize whole project ({root})\n"
" --here: synchronize sub branch ({root}/{sub})".format(
root=root_folder, sub=path_ofs
)
)
| |
# ============================================================================
# Group Allocator - allocates students to balanced groups
# ============================================================================
# The MIT License (MIT)
# Copyright (c) 2014 Michael Fairley (mfai035@aucklanduni.ac.nz)
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ============================================================================
# Authors : Oscar Dowson odow003@aucklanduni.ac.nz
# Michael Fairley mfai035@aucklanduni.ac.nz
# Date : 8 August 2014
#
# ============================================================================
from pulp import *
from math import ceil
from System import Array
import datetime
problem = LpProblem('ENGGEN403', LpMinimize)
print('Creating model...')
# ============================================================================
# Pre-computed variables and constants
# Weightings
gpa_mean_weight = factor_gpamean
gpa_variance_weight = factor_gpavar
specialisation_weight = factor_spec
gender_weight = factor_gender
ethnicity_weight = factor_eth
outstanding_gpa_weight = factor_out
# beta is the number to adjust the upper bound on number of
# type of people in a group
beta = 0
# The GPA of an 'oustanding' student
outstanding_gpa = 8.00
SPECIALISATIONS = list()
for s in STUDENTS:
if specialisation[s] != 'Not Applicable':
if specialisation[s] not in SPECIALISATIONS:
SPECIALISATIONS.append(specialisation[s])
ETHNICITIES = list()
for s in STUDENTS:
if ethnicity[s] != 'Not Applicable':
if ethnicity[s] not in ETHNICITIES:
ETHNICITIES.append(ethnicity[s])
# Set for all groups
GROUPS = range(1, int(number_groups) + 1)
# Set for students assigned to group
STUDENT_GROUP = [(s, g) for s in STUDENTS for g in GROUPS]
# Number of students
number_students = len(STUDENTS)
# Group sizes
m1 = int(number_students / number_groups)
m2 = m1+1
# Number of each group size
j2 = int(number_students - m1 * number_groups)
j1 = int(number_groups - j2)
# Sets for each group size
GROUPS1 = range(1, j1 + 1)
GROUPS2 = range(j1 + 1, int(number_groups) + 1)
# Average GPA of all students
gpa_mean = sum([gpa[i] for i in STUDENTS]) / number_students
# Total GPA variance
gpa_variance_total = (sum(pow(gpa[s] - gpa_mean, 2)
for s in STUDENTS)) \
/ number_students
# Minimum number of females and males in each group
number_males = sum([gender[i].lower() == 'male' for i in STUDENTS])
male_min = int(number_males / number_groups)
number_females = sum([gender[i].lower() == 'female' for i in STUDENTS])
female_min = int(number_females / number_groups)
# Number in each specialisation
specialisation_counts = {}
for s in SPECIALISATIONS:
specialisation_counts[s] = sum([specialisation[i].lower()
== s.lower() for i in STUDENTS])
# Minimum number of each specialisation in each group
specialisation_min = {}
specialisation_max = {}
for s in SPECIALISATIONS:
specialisation_min[s] = int(specialisation_counts[s] / number_groups)
specialisation_max[s] = int(ceil(specialisation_counts[s] / number_groups)
+ beta)
# Number from each ethnicity
ethnicity_counts = {}
for e in ETHNICITIES:
ethnicity_counts[e] = sum([ethnicity[i].lower()
== e.lower() for i in STUDENTS])
# Minimum number of each ethnicity in each group
ethnicity_min = {}
ethnicity_max = {}
for e in ETHNICITIES:
ethnicity_min[e] = int(ethnicity_counts[e] / number_groups)
ethnicity_max[e] = int(ceil(ethnicity_counts[e] / number_groups) + beta)
# Number of outstanding students in each group
oustanding_count = 0
for s in STUDENTS:
if gpa[s] >= outstanding_gpa:
oustanding_count += 1
oustanding_gpa_min = int(oustanding_count / number_groups)
# Artificial variables sets for specialisations and ethnicities in each group
# Penalises violating constraint for number of each in group
SPECIALISATION_ARTIFICIAL = [(s, g) for s in SPECIALISATIONS for g in GROUPS]
ETHNICITY_ARTIFICIAL = [(e, g) for e in ETHNICITIES for g in GROUPS]
# ============================================================================
# OPTIMISATION MODEL START
if chart_only != "Yes":
# ============================================================================
# Decision Variables
# x = 1 if student s is assigned to group g, else 0
x = LpVariable.dicts('x', STUDENT_GROUP, None, None, LpBinary)
female_artificial = LpVariable.dicts('female_artificial',
GROUPS,
0, number_students)
male_artificial = LpVariable.dicts('male_artificial',
GROUPS,
0, number_students)
specialisation_artificial_min = LpVariable.dicts(('specialisation_'
'artificial_min'),
SPECIALISATION_ARTIFICIAL,
0, number_students)
specialisation_artificial_max = LpVariable.dicts(('specialisation_'
'artificial_max'),
SPECIALISATION_ARTIFICIAL,
0, number_students)
ethnicity_artificial_min = LpVariable.dicts('ethnicity_artificial_min',
ETHNICITY_ARTIFICIAL,
0, number_students)
ethnicity_artificial_max = LpVariable.dicts('ethnicity_artificial_max',
ETHNICITY_ARTIFICIAL,
0, number_students)
oustanding_gpa_artificial = LpVariable.dicts('oustanding_gpa_artificial',
GROUPS,
0, number_students)
gpa_min = LpVariable('gpa_min', 0, 9)
gpa_max = LpVariable('gpa_max', 0, 9)
gpa_variance_min = LpVariable('gpa_variance_min', 0, 25)
gpa_variance_max = LpVariable('gpa_variance_max', 0, 25)
# ============================================================================
# Objective Function
problem += gpa_mean_weight * (gpa_max - gpa_min) \
+ gpa_variance_weight * (gpa_variance_max - gpa_variance_min) \
+ 1e4 * (specialisation_weight * \
lpSum([(specialisation_artificial_min[i] + specialisation_artificial_max[i]) \
for i in SPECIALISATION_ARTIFICIAL]) \
+ gender_weight * lpSum([female_artificial[i] + male_artificial[i]\
for i in GROUPS]) \
+ ethnicity_weight * \
lpSum([(ethnicity_artificial_min[i] + ethnicity_artificial_max[i]) \
for i in ETHNICITY_ARTIFICIAL]) \
+ outstanding_gpa_weight * lpSum([oustanding_gpa_artificial[i] \
for i in GROUPS])), 'objective'
# ============================================================================
# Constraints
# Every student is assigned to exactly one group
for s in STUDENTS:
problem += lpSum([x[(s, g)] for g in GROUPS]) == 1, 'single_group_%s' % s
# Constraint for first group size
for g in GROUPS1:
# Size if group is m1
problem += lpSum([x[(s, g)] for s in STUDENTS]) == m1, 'size_g%d' % g
# Minimum GPA is given by group with lowest GPA
problem += lpSum([gpa[s] * x[(s, g)] for s in STUDENTS]) >= m1 * gpa_min, \
'calculate_min_gpa_g%d' % g
# Maximum GPA is given by group with highest GPA
problem += lpSum([gpa[s] * x[(s, g)] for s in STUDENTS]) <= m1 * gpa_max, \
'calculate_max_gpa_g%d' % g
# Minimum variance of GPA is given by group with lowest variance
problem += lpSum([pow(gpa[s] - gpa_mean, 2) * x[(s, g)] for s in STUDENTS]) \
>= m1 * gpa_variance_min, 'calculate_gpa_variance_min_g%d' % g
# Maximum variance of GPA is given by group with highest variance
problem += lpSum([pow(gpa[s] - gpa_mean, 2) * x[(s, g)] for s in STUDENTS]) \
<= m1 * gpa_variance_max, 'calculate_gpa_variance_max_g%d' % g
# Constraint for second group size
for g in GROUPS2:
# Size if group is m2
problem += lpSum([x[(s, g)] for s in STUDENTS]) == m2, 'size_g%d' % g
# Minimum GPA is given by group with lowest GPA
problem += lpSum([gpa[s] * x[(s, g)] for s in STUDENTS]) >= m2 * gpa_min, \
'calculate_min_gpa_g%d' % g
# Maximum GPA is given by group with highest GPA
problem += lpSum([gpa[s] * x[(s, g)] for s in STUDENTS]) <= m2 * gpa_max, \
'calculate_max_gpa_g%d' % g
# Minimum variance of GPA is given by group with lowest variance
problem += lpSum([pow(gpa[s] - gpa_mean, 2) * x[(s, g)] for s in STUDENTS]) \
>= m2 * gpa_variance_min, 'calculate_gpa_variance_min_g%d' % g
# Maximum variance of GPA is given by group with highest variance
problem += lpSum([pow(gpa[s] - gpa_mean, 2) * x[(s, g)] for s in STUDENTS]) \
<= m2 * gpa_variance_max, 'calculate_gpa_variance_max_g%d' % g
# Semi-relaxed constraints to enforce gender,
# specialisation and ethnicity distribution
for g in GROUPS:
# Gender must be at least minimum (relaxed)
problem += lpSum([x[(s, g)] for s in STUDENTS if gender[s].lower() == 'female']) \
+ female_artificial[g] >= female_min, \
'min_females_g%d' % g
problem += lpSum([x[(s, g)] for s in STUDENTS if gender[s].lower() == 'male']) \
+ male_artificial[g] >= male_min, \
'min_males_g%d' % g
# Number from each specialisation must be at least min (relaxed)
for k in SPECIALISATIONS:
problem += lpSum([x[(s, g)] for s in STUDENTS
if specialisation[s].lower() == k.lower()]) \
+ specialisation_artificial_min[(k, g)] >= specialisation_min[k], \
'min_spec%s_g%d' % (k, g)
# Number from each specialisation must be at most max (relaxed)
for k in SPECIALISATIONS:
problem += lpSum([x[(s, g)] for s in STUDENTS
if specialisation[s].lower() == k.lower()]) \
- specialisation_artificial_max[(k, g)] <= specialisation_max[k], \
'max_spec%s_g%d' % (k, g)
# Number from each ethnicity must be at least min (relaxed)
for e in ETHNICITIES:
problem += lpSum([x[(s, g)] for s in STUDENTS
if ethnicity[s].lower() == e.lower()]) \
+ ethnicity_artificial_min[(e, g)] >= ethnicity_min[e], \
'min_eth%s_g%d' % (e, g)
# Number from each ethnicity must be at most max (relaxed)
for e in ETHNICITIES:
problem += lpSum([x[(s, g)] for s in STUDENTS
if ethnicity[s].lower() == e.lower()]) \
- ethnicity_artificial_max[(e, g)] <= ethnicity_max[e], \
'max_eth%s_g%d' % (e, g)
# Number of oustanding students must be at least min (relaxed)
problem += lpSum([x[(s, g)] for s in STUDENTS
if gpa[s] >= outstanding_gpa]) \
+ oustanding_gpa_artificial[g] >= oustanding_gpa_min, \
'out_gpa%s_g%d' % (e, g)
# ============================================================================
# Solve
print('Solving . . .')
try:
# SolverStudio version < 0.6
problem.solve(solvers.PULP_CBC_CMD(msg=1, maxSeconds=time_limit))
except:
# new version >= 0.6
problem.solve(COIN_CMD(msg=1, maxSeconds=time_limit))
# ============================================================================
# Solution Post Processing and Display in Excel spreadsheet
print('Finished Solving')
# Write group number for each student and add to group list
for s, g in STUDENT_GROUP:
if x[(s, g)].value() == 1:
groups[s] = g
gpa_difference = gpa_max.value() - gpa_min.value()
gpa_variance_difference = gpa_variance_max.value() - gpa_variance_min.value()
print('\n')
print('Biggest difference in mean GPA: %.2f'
% gpa_difference)
print('Biggest difference in GPA variance: %.2f'
% gpa_variance_difference)
print('\n')
print('Values of artificial variables for relaxation')
print('Specialisations Min: %.0f' %
sum([specialisation_artificial_min[i].value()
for i in SPECIALISATION_ARTIFICIAL]))
print('Specialisations Max: %.0f' %
sum([specialisation_artificial_max[i].value()
for i in SPECIALISATION_ARTIFICIAL]))
print('Females Min: %.0f' % sum([female_artificial[i].value()
for i in GROUPS]))
print('Males Min: %.0f' % sum([male_artificial[i].value()
for i in GROUPS]))
print('Ethnicities Min: %.0f' %
sum([ethnicity_artificial_min[i].value() for i in ETHNICITY_ARTIFICIAL]))
print('Ethnicities Max: %.0f' %
sum([ethnicity_artificial_max[i].value() for i in ETHNICITY_ARTIFICIAL]))
print('Oustanding GPA Min: %.0f' %
sum([oustanding_gpa_artificial[i].value() for i in GROUPS]))
print('\n')
# ============================================================================
# OPTIMISATION MODEL END
# If charting only, code will continue here
# Make list to hold groups
students_in_group = {}
for g in GROUPS:
students_in_group[g] = list()
for s in STUDENTS:
for g in GROUPS:
if groups[s] == g:
students_in_group[g].append(s)
# Perform calculations for each group
students_group = {}
males_group = {}
females_group = {}
gpa_total_group = {}
specialisations_group = {}
for s in SPECIALISATIONS:
specialisations_group[s] = {}
ethnicities_group = {}
for e in ETHNICITIES:
ethnicities_group[e] = {}
# Initialise python sets
for g in GROUPS:
students_group[g] = 0
males_group[g] = 0
females_group[g] = 0
gpa_total_group[g] = float(0.00)
for s in SPECIALISATIONS:
specialisations_group[s][g] = 0
for e in ETHNICITIES:
ethnicities_group[e][g] = 0
# Count the number of males, females etc in each group
for s in STUDENTS:
g = groups[s]
if gender[s].lower() == 'female':
females_group[g] += 1
elif gender[s].lower() == 'male':
males_group[g] += 1
gpa_total_group[g] += gpa[s]
for k in SPECIALISATIONS:
if specialisation[s] == k:
specialisations_group[k][g] += 1
for e in ETHNICITIES:
if ethnicity[s] == e:
ethnicities_group[e][g] += 1
students_group[g] += 1
# Compute average GPA for each group
gpa_mean_group = {}
for g in GROUPS:
gpa_mean_group[g] = float(gpa_total_group[g]) / float(students_group[g])
# Compute GPA variance for each group
gpa_variance_group = {}
for g in GROUPS:
gpa_variance_group[g] = float((sum(pow(gpa[s] - gpa_mean_group[g], 2)
for s in students_in_group[g]))) \
/ float(len(students_in_group[g]))
# Print data to spreadsheet
# Summary Results
ws = Application.Worksheets('Summary_Results')
ws.Cells.Clear()
# Results for each group
ws.Cells(1, 1).Value = 'Group'
ws.Cells(1, 1).Font.Bold = True
# Rows of table
ws.Cells(2, 1).Value = 'Whole Class'
cell_index = 2
for g in GROUPS:
cell_index += 1
ws.Cells(cell_index, 1).Value = g
# Columns of Table
ws.Cells(1, 2).Value = 'Students'
ws.Cells(1, 2).Font.Bold = True
ws.Cells(1, 3).Value = 'Males'
ws.Cells(1, 3).Font.Bold = True
ws.Cells(1, 4).Value = 'Females'
ws.Cells(1, 4).Font.Bold = True
ws.Cells(1, 5).Value = 'Mean GPA'
ws.Cells(1, 5).Font.Bold = True
ws.Cells(1, 6).Value = 'GPA Variance'
ws.Cells(1, 6).Font.Bold = True
cell_index = 6
for s in SPECIALISATIONS:
cell_index += 1
ws.Cells(1, cell_index).Value = s
ws.Cells(1, cell_index).Font.Bold = True
ws.Range(ws.Cells(1, cell_index),
ws.Cells(1, cell_index)).Interior.ThemeColor = 5
for e in ETHNICITIES:
cell_index += 1
ws.Cells(1, cell_index).Value = e
ws.Cells(1, cell_index).Font.Bold = True
ws.Range(ws.Cells(1, cell_index),
ws.Cells(1, cell_index)).Interior.ThemeColor = 6
# Fill in table data by column
# Whole Class frst
ws.Cells(2, 2).Value = number_students
ws.Cells(2, 3).Value = number_males
ws.Cells(2, 4).Value = number_females
ws.Cells(2, 5).Value = '%.2f' % gpa_mean
ws.Cells(2, 6).Value = '%.2f' % gpa_variance_total
cell_index = 6
for s in SPECIALISATIONS:
cell_index += 1
ws.Cells(2, cell_index).Value = specialisation_counts[s]
for e in ETHNICITIES:
cell_index += 1
ws.Cells(2, cell_index).Value = ethnicity_counts[e]
# Each group
cell_index = 2
for g in GROUPS:
cell_index += 1
ws.Cells(cell_index, 2).Value = students_group[g]
ws.Cells(cell_index, 3).Value = males_group[g]
ws.Cells(cell_index, 4).Value = females_group[g]
ws.Cells(cell_index, 5).Value = '%.2f' % gpa_mean_group[g]
ws.Cells(cell_index, 6).Value = '%.2f' % gpa_variance_group[g]
col_index = 6
for s in SPECIALISATIONS:
col_index += 1
ws.Cells(cell_index, col_index).Value = specialisations_group[s][g]
for e in ETHNICITIES:
col_index += 1
ws.Cells(cell_index, col_index).Value = ethnicities_group[e][g]
# Insert data for box plots
col_index += 1
ws.Cells(1, col_index + 1).Value = 'gpa_min'
ws.Cells(1, col_index + 2).Value = 'gpa_q1'
ws.Cells(1, col_index + 3).Value = 'gpa_median'
ws.Cells(1, col_index + 4).Value = 'gpa_q3'
ws.Cells(1, col_index + 5).Value = 'gpa_max'
ws.Cells(1, col_index + 6).Value = 'gpa_d_min'
ws.Cells(1, col_index + 7).Value = 'gpa_d_q1'
ws.Cells(1, col_index + 8).Value = 'gpa_d_median'
ws.Cells(1, col_index + 9).Value = 'gpa_d_q3'
ws.Cells(1, col_index + 10).Value = 'gpa_d_max'
# Create array for each group containing gpas
data_summary = {}
for g in GROUPS:
array = Array.CreateInstance(object, len(students_in_group[g]))
for i in range(len(students_in_group[g])):
student = students_in_group[g][i]
array[i] = gpa[student]
min_gpa = Application.WorksheetFunction.Min(array)
q1_gpa = Application.WorksheetFunction.Quartile(array, 1)
med_gpa = Application.WorksheetFunction.Median(array)
q3_gpa = Application.WorksheetFunction.Quartile(array, 3)
max_gpa = Application.WorksheetFunction.Max(array)
data_summary[g] = [min_gpa, q1_gpa, med_gpa, q3_gpa, max_gpa]
row_index = 2
for g in GROUPS:
row_index += 1
ws.Cells(row_index, col_index + 1).Value = '%.2f' % data_summary[g][0]
ws.Cells(row_index, col_index + 2).Value = '%.2f' % data_summary[g][1]
ws.Cells(row_index, col_index + 3).Value = '%.2f' % data_summary[g][2]
ws.Cells(row_index, col_index + 4).Value = '%.2f' % data_summary[g][3]
ws.Cells(row_index, col_index + 5).Value = '%.2f' % data_summary[g][4]
# Differences needed for charting
ws.Cells(row_index, col_index + 6).Value = '%.2f' \
% data_summary[g][0]
ws.Cells(row_index, col_index + 7).Value = '%.2f' \
% (data_summary[g][1] - data_summary[g][0])
ws.Cells(row_index, col_index + 8).Value = '%.2f' \
% (data_summary[g][2] - data_summary[g][1])
ws.Cells(row_index, col_index + 9).Value = '%.2f' \
% (data_summary[g][3] - data_summary[g][2])
ws.Cells(row_index, col_index + 10).Value = '%.2f' \
% (data_summary[g][4] - data_summary[g][3])
# Autofit columns in Summary_Results
ws.Activate()
ws.Cells.Select()
Application.Selection.Columns.AutoFit()
ws.Range(ws.Cells(2, 5), ws.Cells(number_groups+2, 6)).NumberFormat = '0.00'
ws.Range(ws.Cells(2, 1),
ws.Cells(2, 6 + len(SPECIALISATIONS) +
len(ETHNICITIES))).Style = 'Good'
ws.Range(ws.cells(1, 1),
ws.Cells(number_groups+2, 1)).Borders(10).LineStyle = 1
ws.Range(ws.cells(1, 1),
ws.Cells(number_groups+2, 1)).HorizontalAlignment = -4131
ws.Cells(number_groups+5, 1).Select()
# Generate graphs
wb = Application.ActiveWorkbook
# Delete all existing charts
for sheet in Application.Charts:
if sheet.Name.endswith('_Chart') is True:
Application.DisplayAlerts = False
sheet.Delete()
Application.DisplayAlerts = True
# GPA Box Plot Chart
print('Charting GPA . . .')
# Select data range
# ws.Range(ws.Cells(3, col_index + 6), ws.Cells(2 + number_groups,
# col_index + 10)).Select()
# Application.Worksheets('Summary_Results').Shapes.AddChart2(297, 52).Select()
Application.Worksheets('Summary_Results').Shapes.AddChart().Select()
a = Application.ActiveChart
a.ChartType = 52
a.SetSourceData(Source=ws.Range(ws.Cells(3, col_index + 6),
ws.Cells(2 + number_groups, col_index + 10)))
a.SeriesCollection(1).Select()
Application.Selection.Format.Fill.Visible = 0
Application.Selection.Format.Line.Visible = 0
a.SeriesCollection(2).Select()
Application.Selection.Format.Fill.Visible = 0
Application.Selection.Format.Line.Visible = 0
a.SeriesCollection(2).HasErrorBars = True
# a.SeriesCollection(2).ErrorBars.Select()
a.SeriesCollection(2).ErrorBar(Direction=1, Include=3, Type=2, Amount=100)
a.SeriesCollection(5).Select()
Application.Selection.Format.Fill.Visible = 0
Application.Selection.Format.Line.Visible = 0
a.SeriesCollection(4).HasErrorBars = True
a = Application.ActiveChart
xlY = 1
xlPlusValues = 2
xlCustom = -4114
a.SeriesCollection(4).ErrorBar(Direction=xlY, Include=xlPlusValues,
Type=xlCustom, MinusValues="={0}",
Amount=ws.Range(ws.Cells(3, col_index + 10),
ws.Cells(2 + number_groups,
col_index + 10)))
a.SeriesCollection(4).Select()
Application.Selection.Format.Fill.Visible = 0
Application.Selection.Format.Line.Visible = 1
Application.Selection.Format.Line.ForeColor.ObjectThemeColor = 13
a.SeriesCollection(3).Select()
Application.Selection.Format.Fill.Visible = 0
Application.Selection.Format.Line.Visible = 1
Application.Selection.Format.Line.ForeColor.ObjectThemeColor = 13
a.SetElement(2)
a.SetElement(306)
a.SetElement(301)
a.SetElement(102)
a.Legend.Select()
Application.Selection.Delete()
a.ChartTitle.Text = ('GPA spread per group'
' (lines bottom to top = Min, Q1, Median, Q3, Max)')
a.Axes(1, 1).HasTitle = True
a.Axes(1, 1).AxisTitle.Text = 'Group'
a.Axes(2, 1).HasTitle = True
a.Axes(2, 1).AxisTitle.Text = 'GPA'
a.Axes(2).MaximumScale = 9
a.Axes(2).MinimumScale = 0
a.Location(Where=1, Name='GPA_Chart')
a.deselect()
# GPA mean and variance graph
# print('Charting GPA . . .')
# Application.Worksheets('Summary_Results').Activate()
# ws.Cells(number_groups+5, 1).Select()
# Application.Worksheets('Summary_Results').Shapes.AddChart(201, 54).Select()
# a = Application.ActiveChart
# a.SeriesCollection().NewSeries()
# a.SeriesCollection(1).Name = 'GPA'
# a.SeriesCollection(1).Values = ws.Range(ws.Cells(3, 5),
# ws.Cells(2 + number_groups, 5))
# a.SeriesCollection(1).XValues = x_axis_range
# a.SeriesCollection().NewSeries()
# a.SeriesCollection(2).Name = 'GPA Variance'
# a.SeriesCollection(2).Values = ws.Range(ws.Cells(3, 6),
# ws.Cells(2 + number_groups, 6))
# a.SeriesCollection(2).XValues = x_axis_range
# a.SetElement(2)
# a.SetElement(306)
# a.SetElement(301)
# a.SetElement(102)
# a.ChartTitle.Text = 'Mean GPA and Variance of GPA'
# a.Axes(1, 1).HasTitle = True
# a.Axes(1, 1).AxisTitle.Text = 'Group'
# a.Axes(2, 1).HasTitle = True
# a.Axes(2, 1).AxisTitle.Text = 'GPA'
# a.Axes(2).MaximumScale = 9
# a.Axes(2).MinimumScale = 0
# a.Location(Where=1, Name='GPA_Chart')
# a.deselect()
x_axis_range = ws.Range(ws.Cells(3, 1), ws.Cells(2 + number_groups, 1))
# Gender
print('Charting Gender . . .')
Application.Worksheets('Summary_Results').Activate()
ws.Cells(number_groups+5, 1).Select()
Application.Worksheets('Summary_Results').Shapes.AddChart(201, 54).Select()
a = Application.ActiveChart
a.SeriesCollection().NewSeries()
a.SeriesCollection(1).Name = 'Male'
a.SeriesCollection(1).Values = ws.Range(ws.Cells(3, 3),
ws.Cells(2 + number_groups, 3))
a.SeriesCollection(1).XValues = x_axis_range
a.SeriesCollection().NewSeries()
a.SeriesCollection(2).Name = 'Female'
a.SeriesCollection(2).Values = ws.Range(ws.Cells(3, 4),
ws.Cells(2 + number_groups, 4))
a.SeriesCollection(2).XValues = x_axis_range
a.SetElement(2)
a.SetElement(306)
a.SetElement(301)
a.SetElement(102)
a.ChartTitle.Text = 'Number of Males and Females in each group'
a.Axes(1, 1).HasTitle = True
a.Axes(1, 1).AxisTitle.Text = 'Group'
a.Axes(2, 1).HasTitle = True
a.Axes(2, 1).AxisTitle.Text = 'Number of Students'
a.Axes(2).MaximumScale = m2 + 1
a.Axes(2).MinimumScale = 0
a.Location(Where=1, Name='Gender_Chart')
a.deselect()
# Specialisations
print('Charting Specialisations . . .')
col_index = 6
for s in SPECIALISATIONS:
col_index += 1
ws = Application.Worksheets('Summary_Results')
y_axis_range = ws.Range(ws.Cells(3, col_index),
ws.Cells(2 + number_groups, col_index))
ws.Activate()
ws.Cells(number_groups+5, 1).Select()
ws.Shapes.AddChart(201, 54).Select()
a = Application.ActiveChart
a.SeriesCollection().NewSeries()
a.SeriesCollection(1).Name = s
a.SeriesCollection(1).Values = y_axis_range
a.SeriesCollection(1).XValues = x_axis_range
a.SetElement(2)
a.SetElement(306)
a.SetElement(301)
a.SetElement(102)
a.ChartTitle.Text = '%s students in each group' % s.title()
a.Axes(1, 1).HasTitle = True
a.Axes(1, 1).AxisTitle.Text = 'Group'
a.Axes(2, 1).HasTitle = True
a.Axes(2, 1).AxisTitle.Text = 'Number of Students'
a.Axes(2).MaximumScale = m2 + 1
a.Axes(2).MinimumScale = 0
title = '%s_Chart' % s
a.Location(Where=1, Name=title)
wb.Sheets(title).Tab.ThemeColor = 9
wb.Sheets(title).Tab.TintAndShade = 0
a.deselect()
# Ethnicities
print('Charting Ethnic Groups . . .')
for e in ETHNICITIES:
col_index += 1
ws = Application.Worksheets('Summary_Results')
y_axis_range = ws.Range(ws.Cells(3, col_index),
ws.Cells(2 + number_groups, col_index))
ws.Activate()
ws.Cells(number_groups+5, 1).Select()
ws.Shapes.AddChart(201, 54).Select()
a = Application.ActiveChart
a.SeriesCollection().NewSeries()
a.SeriesCollection(1).Name = e
a.SeriesCollection(1).Values = y_axis_range
a.SeriesCollection(1).XValues = x_axis_range
a.SetElement(2)
a.SetElement(306)
a.SetElement(301)
a.SetElement(102)
a.ChartTitle.Text = '%s students in each group' % e.title()
a.Axes(1, 1).HasTitle = True
a.Axes(1, 1).AxisTitle.Text = 'Group'
a.Axes(2, 1).HasTitle = True
a.Axes(2, 1).AxisTitle.Text = 'Number of Students'
a.Axes(2).MaximumScale = m2 + 1
a.Axes(2).MinimumScale = 0
title = '%s(E)_Chart' % e
a.Location(Where=1, Name=title)
wb.Sheets(title).Tab.ThemeColor = 6
wb.Sheets(title).Tab.TintAndShade = 0
a.deselect()
Application.Worksheets('Summary_Results').\
Move(after=Application.Worksheets('Student_Data'))
# Group Lists in Separate Workbook
# Setup paths
now = datetime.datetime.now()
path = Application.ActiveWorkbook.path
append_string = now.strftime('%Y-%m-%d_%H.%M.%S')
instructor_workbook_name = 'Groups_InstructorView_%s.xlsx' % (append_string)
student_workbook_name = 'Groups_StudentView_%s.xlsx' % (append_string)
save_path_instructor = '%s\\%s' % (path, instructor_workbook_name)
save_path_student = '%s\\%s' % (path, student_workbook_name)
# Instructor View
# Create new workbook
# Application.Workbooks.Add()
# Application.ActiveWorkbook.SaveAs(Filename=save_path_instructor)
# wb = Application.Workbooks(instructor_workbook_name)
# # All groups
# wb.Sheets.Add()
# wb.ActiveSheet.Name = 'All_Groups'
# ws = wb.Worksheets('All_Groups')
# # Headers
# ws.Cells(1, 1).Value = 'Group'
# ws.Cells(1, 1).Font.Bold = True
# ws.Cells(1, 2).Value = 'Name'
# ws.Cells(1, 2).Font.Bold = True
# ws.Cells(1, 3).Value = 'Gender'
# ws.Cells(1, 3).Font.Bold = True
# ws.Cells(1, 4).Value = 'UPI'
# ws.Cells(1, 4).Font.Bold = True
# ws.Cells(1, 5).Value = 'Discipline'
# ws.Cells(1, 5).Font.Bold = True
# ws.Cells(1, 6).Value = 'UoA Email'
# ws.Cells(1, 6).Font.Bold = True
# ws.Cells(1, 7).Value = 'GPA'
# ws.Range(ws.Cells(1, 7), ws.Cells(1, 7)).Style = 'Bad'
# ws.Cells(1, 7).Font.Bold = True
# ws.Cells(1, 8).Value = 'Ethnic Group'
# ws.Range(ws.Cells(1, 8), ws.Cells(1, 8)).Style = 'Bad'
# ws.Cells(1, 8).Font.Bold = True
# # Data
# row_index = 1
# for g in GROUPS:
# for s in students_in_group[g]:
# row_index += 1
# ws.Cells(row_index, 1).Value = g
# ws.Cells(row_index, 2).Value = NAMES[s]
# ws.Cells(row_index, 3).Value = gender[s]
# ws.Cells(row_index, 4).Value = UPI[s]
# ws.Cells(row_index, 5).Value = specialisation[s]
# ws.Cells(row_index, 6).Value = '%s@aucklanduni.ac.nz' % UPI[s]
# ws.Cells(row_index, 7).Value = '%.2f' % gpa[s]
# ws.Range(ws.Cells(row_index, 7),
# ws.Cells(row_index, 7)).NumberFormat = '0.00'
# ws.Cells(row_index, 8).Value = ethnicity[s]
# # Space between each group
# row_index += 1
# ws.Activate()
# ws.Cells.Select()
# Application.Selection.Columns.AutoFit()
# ws.Cells(1, 1).Select()
# for sheet in wb.Worksheets:
# if sheet.Name != 'All_Groups':
# Application.DisplayAlerts = False
# sheet.Delete()
# Application.DisplayAlerts = True
# # Make a sheet for each group
# for g in GROUPS:
# count = Application.Worksheets.Count
# wb.Sheets.Add(After=wb.Sheets(count))
# wb_name = 'Group_%s' % g
# wb.ActiveSheet.Name = wb_name
# ws = wb.Worksheets(wb_name)
# # Headers
# ws.Cells(1, 1).Value = 'Group'
# ws.Cells(1, 1).Font.Bold = True
# ws.Cells(1, 2).Value = 'Name'
# ws.Cells(1, 2).Font.Bold = True
# ws.Cells(1, 3).Value = 'Gender'
# ws.Cells(1, 3).Font.Bold = True
# ws.Cells(1, 4).Value = 'UPI'
# ws.Cells(1, 4).Font.Bold = True
# ws.Cells(1, 5).Value = 'Discipline'
# ws.Cells(1, 5).Font.Bold = True
# ws.Cells(1, 6).Value = 'UoA Email'
# ws.Cells(1, 6).Font.Bold = True
# ws.Cells(1, 7).Value = 'GPA'
# ws.Range(ws.Cells(1, 7), ws.Cells(1, 7)).Style = 'Bad'
# ws.Cells(1, 7).Font.Bold = True
# ws.Cells(1, 8).Value = 'Ethnic Group'
# ws.Range(ws.Cells(1, 8), ws.Cells(1, 8)).Style = 'Bad'
# ws.Cells(1, 8).Font.Bold = True
# # Data
# row_index = 1
# for s in students_in_group[g]:
# row_index += 1
# ws.Cells(row_index, 1).Value = g
# ws.Cells(row_index, 2).Value = NAMES[s]
# ws.Cells(row_index, 3).Value = gender[s]
# ws.Cells(row_index, 4).Value = UPI[s]
# ws.Cells(row_index, 5).Value = specialisation[s]
# ws.Cells(row_index, 6).Value = '%s@aucklanduni.ac.nz' % UPI[s]
# ws.Cells(row_index, 7).Value = '%.2f' % gpa[s]
# ws.Range(ws.Cells(row_index, 7),
# ws.Cells(row_index, 7)).NumberFormat = '0.00'
# ws.Cells(row_index, 8).Value = ethnicity[s]
# # Mean GPA
# row_index += 2
# ws.Cells(row_index, 6).Value = 'Mean GPA'
# ws.Cells(row_index, 6).Font.Bold = True
# ws.Cells(row_index, 7).Value = '%.2f' % gpa_mean_group[g]
# ws.Range(ws.Cells(row_index, 7),
# ws.Cells(row_index, 7)).NumberFormat = '0.00'
# # Activate and autofit
# ws.Activate()
# ws.Cells.Select()
# Application.Selection.Columns.AutoFit()
# ws.Cells(1, 1).Select()
# wb.Worksheets('All_Groups').Activate()
# wb.Save()
# Student View
# Create new workbook
Application.Workbooks.Add()
Application.ActiveWorkbook.SaveAs(Filename=save_path_student)
wb = Application.Workbooks(student_workbook_name)
# All groups
wb.Sheets.Add()
wb.ActiveSheet.Name = 'All_Groups'
ws = wb.Worksheets('All_Groups')
# Headers
ws.Cells(1, 1).Value = 'Group'
ws.Cells(1, 1).Font.Bold = True
ws.Cells(1, 2).Value = 'Name'
ws.Cells(1, 2).Font.Bold = True
# ws.Cells(1, 3).Value = 'Gender'
# ws.Cells(1, 3).Font.Bold = True
ws.Cells(1, 3).Value = 'UPI'
ws.Cells(1, 3).Font.Bold = True
ws.Cells(1, 4).Value = 'Discipline'
ws.Cells(1, 4).Font.Bold = True
ws.Cells(1, 5).Value = 'UoA Email'
ws.Cells(1, 5).Font.Bold = True
# Data
row_index = 1
for g in GROUPS:
for s in students_in_group[g]:
row_index += 1
ws.Cells(row_index, 1).Value = g
ws.Cells(row_index, 2).Value = NAMES[s]
# ws.Cells(row_index, 3).Value = gender[s]
ws.Cells(row_index, 3).Value = UPI[s]
ws.Cells(row_index, 4).Value = specialisation[s]
ws.Cells(row_index, 5).Value = '%s@aucklanduni.ac.nz' % UPI[s]
# Space between each group
#row_index += 1
ws.Activate()
ws.Cells.Select()
Application.Selection.Columns.AutoFit()
ws.Cells(1, 1).Select()
for sheet in wb.Worksheets:
if sheet.Name != 'All_Groups':
Application.DisplayAlerts = False
sheet.Delete()
Application.DisplayAlerts = True
#Make a sheet for each group
for g in GROUPS:
count = Application.Worksheets.Count
wb.Sheets.Add(After=wb.Sheets(count))
wb_name = 'Group_%s' % g
wb.ActiveSheet.Name = wb_name
ws = wb.Worksheets(wb_name)
# Headers
ws.Cells(1, 1).Value = 'Group'
ws.Cells(1, 1).Font.Bold = True
ws.Cells(1, 2).Value = 'Name'
ws.Cells(1, 2).Font.Bold = True
ws.Cells(1, 3).Value = 'UPI'
ws.Cells(1, 3).Font.Bold = True
ws.Cells(1, 4).Value = 'Discipline'
ws.Cells(1, 4).Font.Bold = True
ws.Cells(1, 5).Value = 'UoA Email'
ws.Cells(1, 5).Font.Bold = True
# Data
row_index = 1
for s in students_in_group[g]:
row_index += 1
ws.Cells(row_index, 1).Value = g
ws.Cells(row_index, 2).Value = NAMES[s]
ws.Cells(row_index, 3).Value = UPI[s]
ws.Cells(row_index, 4).Value = specialisation[s]
ws.Cells(row_index, 5).Value = '%s@aucklanduni.ac.nz' % UPI[s]
# Activate
ws.Activate()
ws.Cells.Select()
Application.Selection.Columns.AutoFit()
ws.Cells(1, 1).Select()
wb.Worksheets('All_Groups').Activate()
wb.Save()
| |
#!/usr/bin/env python
"""
conference.py -- Udacity conference server-side Python App Engine API;
uses Google Cloud Endpoints
$Id: conference.py,v 1.25 2014/05/24 23:42:19 wesc Exp wesc $
created by wesc on 2014 apr 21
"""
from datetime import datetime
import endpoints
from protorpc import messages
from protorpc import message_types
from protorpc import remote
from google.appengine.api import memcache
from google.appengine.api import taskqueue
from google.appengine.ext import ndb
from models import ConflictException
from models import Profile
from models import ProfileMiniForm
from models import ProfileForm
from models import StringMessage
from models import BooleanMessage
from models import Conference
from models import ConferenceForm
from models import ConferenceForms
from models import ConferenceQueryForm
from models import ConferenceQueryForms
from models import TeeShirtSize
from models import Session
from models import SessionForm
from models import SessionForms
from models import SessionsByType
from models import SessionsBySpeaker
from models import AddSessionToWishlist
from models import FindSessionByDatewithStartTimeRange
from models import SessionsBySpeakerOnSpecificDate
from models import Speaker
from models import SpeakerForm
from models import SpeakerForms
from settings import WEB_CLIENT_ID
from settings import ANDROID_CLIENT_ID
from settings import IOS_CLIENT_ID
from settings import ANDROID_AUDIENCE
from utils import getUserId
__author__ = 'wesc+api@google.com (Wesley Chun)'
EMAIL_SCOPE = endpoints.EMAIL_SCOPE
API_EXPLORER_CLIENT_ID = endpoints.API_EXPLORER_CLIENT_ID
MEMCACHE_ANNOUNCEMENTS_KEY = "RECENT_ANNOUNCEMENTS"
MEMCACHE_FEATURED_SPEAKER_KEY = "FEATURED_SPEAKER"
ANNOUNCEMENT_TPL = ('Last chance to attend! The following conferences '
'are nearly sold out: %s')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
DEFAULTS = {
"city": "Default City",
"maxAttendees": 0,
"seatsAvailable": 0,
"topics": ["Default", "Topic"],
}
OPERATORS = {
'EQ': '=',
'GT': '>',
'GTEQ': '>=',
'LT': '<',
'LTEQ': '<=',
'NE': '!='
}
FIELDS = {
'CITY': 'city',
'TOPIC': 'topics',
'MONTH': 'month',
'MAX_ATTENDEES': 'maxAttendees',
}
CONF_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeConferenceKey=messages.StringField(1, required=True),
)
CONF_POST_REQUEST = endpoints.ResourceContainer(
ConferenceForm,
websafeConferenceKey=messages.StringField(1),
)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@endpoints.api(name='conference', version='v1', audiences=[ANDROID_AUDIENCE],
allowed_client_ids=[WEB_CLIENT_ID, API_EXPLORER_CLIENT_ID,
ANDROID_CLIENT_ID, IOS_CLIENT_ID],
scopes=[EMAIL_SCOPE])
class ConferenceApi(remote.Service):
"""Conference API v0.1"""
# - - - Conference objects - - - - - - - - - - - - - - - - -
def _copyConferenceToForm(self, conf, displayName):
"""Copy relevant fields from Conference to ConferenceForm."""
cf = ConferenceForm()
for field in cf.all_fields():
if hasattr(conf, field.name):
# convert Date to date string; just copy others
if field.name.endswith('Date'):
setattr(cf, field.name, str(getattr(conf, field.name)))
else:
setattr(cf, field.name, getattr(conf, field.name))
elif field.name == "websafeKey":
setattr(cf, field.name, conf.key.urlsafe())
if displayName:
setattr(cf, 'organizerDisplayName', displayName)
cf.check_initialized()
return cf
def _createConferenceObject(self, request):
"""Create or update Conference object,
returning ConferenceForm/request."""
# preload necessary data items
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
if not request.name:
raise endpoints.BadRequestException(
"Conference 'name' field required")
# copy ConferenceForm/ProtoRPC Message into dict
data = {field.name: getattr(request, field.name)
for field in request.all_fields()}
del data['websafeKey']
del data['organizerDisplayName']
# add default values for those missing
# (both data model & outbound Message)
for df in DEFAULTS:
if data[df] in (None, []):
data[df] = DEFAULTS[df]
setattr(request, df, DEFAULTS[df])
# convert dates from strings to Date objects;
# set month based on start_date
if data['startDate']:
data['startDate'] = datetime.strptime(
data['startDate'][:10], "%Y-%m-%d").date()
data['month'] = data['startDate'].month
else:
data['month'] = 0
if data['endDate']:
data['endDate'] = datetime.strptime(
data['endDate'][:10], "%Y-%m-%d").date()
# set seatsAvailable to be same as maxAttendees on creation
if data["maxAttendees"] > 0:
data["seatsAvailable"] = data["maxAttendees"]
# generate Profile Key based on user ID and Conference
# ID based on Profile key get Conference key from ID
p_key = ndb.Key(Profile, user_id)
c_id = Conference.allocate_ids(size=1, parent=p_key)[0]
c_key = ndb.Key(Conference, c_id, parent=p_key)
data['key'] = c_key
data['organizerUserId'] = request.organizerUserId = user_id
# create Conference, send email to organizer confirming
# creation of Conference & return (modified) ConferenceForm
Conference(**data).put()
taskqueue.add(params={'email': user.email(),
'conferenceInfo': repr(request)},
url='/tasks/send_confirmation_email')
return request
@ndb.transactional()
def _updateConferenceObject(self, request):
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# copy ConferenceForm/ProtoRPC Message into dict
data = {field.name: getattr(request, field.name)
for field in request.all_fields()}
# update existing conference
try:
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
except:
conf = None
# check that conference exists
if not conf:
raise endpoints.NotFoundException(
'No conference found for key: %s' \
% request.websafeConferenceKey)
# check that user is owner
if user_id != conf.organizerUserId:
raise endpoints.ForbiddenException(
'Only the owner can update the conference.')
# Not getting all the fields, so don't create a new object; just
# copy relevant fields from ConferenceForm to Conference object
for field in request.all_fields():
data = getattr(request, field.name)
# only copy fields where we get data
if data not in (None, []):
# special handling for dates (convert string to Date)
if field.name in ('startDate', 'endDate'):
data = datetime.strptime(data, "%Y-%m-%d").date()
if field.name == 'startDate':
conf.month = data.month
# write to Conference object
setattr(conf, field.name, data)
conf.put()
prof = ndb.Key(Profile, user_id).get()
return self._copyConferenceToForm(conf, getattr(prof, 'displayName'))
@endpoints.method(ConferenceForm, ConferenceForm, path='conference',
http_method='POST', name='createConference')
def createConference(self, request):
"""Create new conference."""
return self._createConferenceObject(request)
@endpoints.method(CONF_POST_REQUEST, ConferenceForm,
path='conference/{websafeConferenceKey}',
http_method='PUT', name='updateConference')
def updateConference(self, request):
"""Update conference w/provided fields & return w/updated info."""
return self._updateConferenceObject(request)
@endpoints.method(CONF_GET_REQUEST, ConferenceForm,
path='conference/{websafeConferenceKey}',
http_method='GET', name='getConference')
def getConference(self, request):
"""Return requested conference (by websafeConferenceKey)."""
# get Conference object from request; bail if not found
try:
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
except:
conf = None
if not conf:
raise endpoints.NotFoundException(
'No conference found for key: %s' \
% request.websafeConferenceKey)
prof = conf.key.parent().get()
# return ConferenceForm
return self._copyConferenceToForm(conf, getattr(prof, 'displayName'))
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='getConferencesCreated',
http_method='POST', name='getConferencesCreated')
def getConferencesCreated(self, request):
"""Return conferences created by user."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# create ancestor query for all key matches for this user
confs = Conference.query(ancestor=ndb.Key(Profile, user_id))
prof = ndb.Key(Profile, user_id).get()
# return set of ConferenceForm objects per Conference
return ConferenceForms(
items=[self._copyConferenceToForm(
conf, getattr(prof, 'displayName')) for conf in confs])
def _getQuery(self, request):
"""Return formatted query from the submitted filters."""
q = Conference.query()
inequality_filter, filters = self._formatFilters(request.filters)
# If exists, sort on inequality filter first
if not inequality_filter:
q = q.order(Conference.name)
else:
q = q.order(ndb.GenericProperty(inequality_filter))
q = q.order(Conference.name)
for filtr in filters:
if filtr["field"] in ["month", "maxAttendees"]:
filtr["value"] = int(filtr["value"])
formatted_query = ndb.query.FilterNode(
filtr["field"], filtr["operator"], filtr["value"])
q = q.filter(formatted_query)
return q
def _formatFilters(self, filters):
"""Parse, check validity and format user supplied filters."""
formatted_filters = []
inequality_field = None
for f in filters:
filtr = {field.name: getattr(f, field.name)
for field in f.all_fields()}
try:
filtr["field"] = FIELDS[filtr["field"]]
filtr["operator"] = OPERATORS[filtr["operator"]]
except KeyError:
raise endpoints.BadRequestException(
"Filter contains invalid field or operator.")
# Every operation except "=" is an inequality
if filtr["operator"] != "=":
# check if inequality operation has been used in previous
# filters disallow the filter if inequality was performed
# on a different field before track the field on which the
# inequality operation is performed
if inequality_field and inequality_field != filtr["field"]:
raise endpoints.BadRequestException(
"Inequality filter is allowed on only one field.")
else:
inequality_field = filtr["field"]
formatted_filters.append(filtr)
return (inequality_field, formatted_filters)
@endpoints.method(ConferenceQueryForms, ConferenceForms,
path='queryConferences',
http_method='POST',
name='queryConferences')
def queryConferences(self, request):
"""Query for conferences."""
conferences = self._getQuery(request)
# need to fetch organiser displayName from profiles
# get all keys and use get_multi for speed
organisers = [(ndb.Key(Profile, conf.organizerUserId))
for conf in conferences]
profiles = ndb.get_multi(organisers)
# put display names in a dict for easier fetching
names = {}
for profile in profiles:
names[profile.key.id()] = profile.displayName
# return individual ConferenceForm object per Conference
return ConferenceForms(
items=[self._copyConferenceToForm(
conf, names[conf.organizerUserId]) for conf in
conferences])
# - - - Profile objects - - - - - - - - - - - - - - - - - - -
def _copyProfileToForm(self, prof):
"""Copy relevant fields from Profile to ProfileForm."""
# copy relevant fields from Profile to ProfileForm
pf = ProfileForm()
for field in pf.all_fields():
if hasattr(prof, field.name):
# convert t-shirt string to Enum; just copy others
if field.name == 'teeShirtSize':
setattr(pf, field.name, getattr(
TeeShirtSize, getattr(prof, field.name)))
else:
setattr(pf, field.name, getattr(prof, field.name))
pf.check_initialized()
return pf
def _getProfileFromUser(self):
"""Return user Profile from datastore, creating new one
if non-existent."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# get Profile from datastore
user_id = getUserId(user)
p_key = ndb.Key(Profile, user_id)
profile = p_key.get()
# create new Profile if not there
if not profile:
profile = Profile(key=p_key,
displayName=user.nickname(),
mainEmail=user.email(),
teeShirtSize=str(TeeShirtSize.NOT_SPECIFIED),)
profile.put()
return profile # return Profile
def _doProfile(self, save_request=None):
"""Get user Profile and return to user, possibly updating it first."""
# get user Profile
prof = self._getProfileFromUser()
# if saveProfile(), process user-modifyable fields
if save_request:
for field in ('displayName', 'teeShirtSize'):
if hasattr(save_request, field):
val = getattr(save_request, field)
if val:
setattr(prof, field, str(val))
# if field == 'teeShirtSize':
# setattr(prof, field, str(val).upper())
# else:
# setattr(prof, field, val)
prof.put()
# return ProfileForm
return self._copyProfileToForm(prof)
@endpoints.method(message_types.VoidMessage, ProfileForm,
path='profile', http_method='GET', name='getProfile')
def getProfile(self, request):
"""Return user profile."""
return self._doProfile()
@endpoints.method(ProfileMiniForm, ProfileForm,
path='profile', http_method='POST', name='saveProfile')
def saveProfile(self, request):
"""Update & return user profile."""
return self._doProfile(request)
# - - - Announcements - - - - - - - - - - - - - - - - - - - -
@staticmethod
def _cacheAnnouncement():
"""Create Announcement & assign to memcache; used by
memcache cron job & putAnnouncement().
"""
confs = Conference.query(ndb.AND(
Conference.seatsAvailable <= 5,
Conference.seatsAvailable > 0)
).fetch(projection=[Conference.name])
if confs:
# If there are almost sold out conferences,
# format announcement and set it in memcache
announcement = ANNOUNCEMENT_TPL % (
', '.join(conf.name for conf in confs))
memcache.set(MEMCACHE_ANNOUNCEMENTS_KEY, announcement)
else:
# If there are no sold out conferences,
# delete the memcache announcements entry
announcement = ""
memcache.delete(MEMCACHE_ANNOUNCEMENTS_KEY)
return announcement
@staticmethod
def _setFeaturedSpeaker(conf_key, speaker_key):
"""Create Featured Speaker text and assign to memcache;
used by getFeaturedSpeaker().
"""
conf = ndb.Key(urlsafe=conf_key).get()
speaker = ndb.Key(urlsafe=speaker_key).get()
q = Session.query(ancestor=conf.key)
q = q.filter(Session.speaker == speaker.key).fetch()
# if number of sessions for this speaker is > 1
# then this is the featured speaker
if len(q) > 1:
# format announcement and set it in memcache
featured_speaker = "Our featured speaker for %s is: %s %s!" \
% (conf.name, speaker.firstName, speaker.lastName)
memcache.set(MEMCACHE_FEATURED_SPEAKER_KEY, featured_speaker)
else:
featured_speaker = None
return featured_speaker
@endpoints.method(message_types.VoidMessage, StringMessage,
path='conference/announcement/get',
http_method='GET', name='getAnnouncement')
def getAnnouncement(self, request):
"""Return Announcement from memcache."""
return StringMessage(data=memcache.get(
MEMCACHE_ANNOUNCEMENTS_KEY) or "")
@endpoints.method(message_types.VoidMessage, StringMessage,
path='conference/featured_speaker/get',
http_method='GET', name='getFeaturedSpeaker')
def getAnnouncement(self, request):
"""Return Featured Speaker from memcache."""
return StringMessage(data=memcache.get(
MEMCACHE_FEATURED_SPEAKER_KEY) or "")
# - - - Registration - - - - - - - - - - - - - - - - - - - -
@ndb.transactional(xg=True)
def _conferenceRegistration(self, request, reg=True):
"""Register or unregister user for selected conference."""
retval = None
prof = self._getProfileFromUser() # get user Profile
# check if conf exists given websafeConfKey
# get conference; check that it exists
wsck = request.websafeConferenceKey
try:
conf = ndb.Key(urlsafe=wsck).get()
except:
conf = None
if not conf:
raise endpoints.NotFoundException(
'No conference found for key: %s' % wsck)
# register
if reg:
# check if user already registered otherwise add
if wsck in prof.conferenceKeysToAttend:
raise ConflictException(
"You have already registered for this conference")
# check if seats avail
if conf.seatsAvailable <= 0:
raise ConflictException(
"There are no seats available.")
# register user, take away one seat
prof.conferenceKeysToAttend.append(wsck)
conf.seatsAvailable -= 1
retval = True
# unregister
else:
# check if user already registered
if wsck in prof.conferenceKeysToAttend:
# unregister user, add back one seat
prof.conferenceKeysToAttend.remove(wsck)
conf.seatsAvailable += 1
retval = True
else:
retval = False
# write things back to the datastore & return
prof.put()
conf.put()
return BooleanMessage(data=retval)
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='conferences/attending',
http_method='GET', name='getConferencesToAttend')
def getConferencesToAttend(self, request):
"""Get list of conferences that user has registered for."""
prof = self._getProfileFromUser() # get user Profile
conf_keys = [ndb.Key(urlsafe=wsck) for wsck in
prof.conferenceKeysToAttend]
conferences = ndb.get_multi(conf_keys)
# get organizers
organisers = [ndb.Key(Profile, conf.organizerUserId) for
conf in conferences]
profiles = ndb.get_multi(organisers)
# put display names in a dict for easier fetching
names = {}
for profile in profiles:
names[profile.key.id()] = profile.displayName
# return set of ConferenceForm objects per Conference
return ConferenceForms(items=[self._copyConferenceToForm(
conf, names[conf.organizerUserId]) for
conf in conferences])
@endpoints.method(CONF_GET_REQUEST, BooleanMessage,
path='conference/{websafeConferenceKey}',
http_method='POST', name='registerForConference')
def registerForConference(self, request):
"""Register user for selected conference."""
return self._conferenceRegistration(request)
@endpoints.method(CONF_GET_REQUEST, BooleanMessage,
path='conference/{websafeConferenceKey}',
http_method='DELETE', name='unregisterFromConference')
def unregisterFromConference(self, request):
"""Unregister user for selected conference."""
return self._conferenceRegistration(request, reg=False)
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='filterPlayground',
http_method='GET', name='filterPlayground')
def filterPlayground(self, request):
"""Filter Playground"""
q = Conference.query()
# field = "city"
# operator = "="
# value = "London"
# f = ndb.query.FilterNode(field, operator, value)
# q = q.filter(f)
q = q.filter(Conference.city == "London")
q = q.filter(Conference.topics == "Medical Innovations")
q = q.filter(Conference.month == 6)
return ConferenceForms(
items=[self._copyConferenceToForm(conf, "") for conf in q]
)
# - - - Sessions - - - - - - - - - - - - - - - - - - - -
def _copySessionToForm(self, sess):
"""Copy relevant fields from Session to SessionForm."""
sf = SessionForm()
for field in sf.all_fields():
if hasattr(sess, field.name):
# Convert Date to string
# Convert Time to string in HH:MM only
# else convert others as is
if field.name.endswith('date'):
setattr(sf, field.name, str(getattr(sess, field.name)))
elif field.name.endswith('Time'):
setattr(sf, field.name,
str(getattr(sess, field.name).strftime("%H:%M")))
else:
setattr(sf, field.name, getattr(sess, field.name))
elif field.name == "sessionWebSafeKey":
setattr(sf, field.name, sess.key.urlsafe())
elif field.name == "speakerName":
try:
speaker = sess.speaker.get()
speakerName = "%s %s" % (getattr(speaker, "firstName"),
getattr(speaker, "lastName"))
setattr(sf, 'speakerName', speakerName)
except:
pass
sf.check_initialized()
return sf
def _createSessionObject(self, request):
"""Create a Session, returning SessionForm/request."""
# preload necessary data items
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
if not request.name:
raise endpoints.BadRequestException(
"Session 'name' field required")
data = {field.name: getattr(request, field.name) for
field in request.all_fields()}
# get existing conference using web safe key
try:
conf = ndb.Key(urlsafe=data['conferenceWebSafeKey']).get()
except:
conf = None
# check that conference exists
if not conf:
raise endpoints.NotFoundException(
'No conference found for key: %s' \
% data['conferenceWebSafeKey'])
if user_id != conf.organizerUserId:
raise endpoints.ForbiddenException(
'Only the Conference owner can create sessions.')
# get speaker using web safe key
try:
speaker = ndb.Key(urlsafe=data['speakerWebSafeKey']).get()
data['speaker'] = speaker.key
# check parent of key to confirm Speaker is owned by user
speaker_parent = speaker.key.parent().pairs()
speaker_parent = speaker_parent[0][1]
except:
speaker = None
speaker_parent = None
if user_id != speaker_parent:
raise endpoints.ForbiddenException(
'Only the Speaker owner can use this speaker.')
# convert dates/times from strings to Date/Time objects
if data['date']:
data['date'] = datetime.strptime(
data['date'][:10], "%Y-%m-%d").date()
if data['startTime']:
data['startTime'] = datetime.strptime(
data['startTime'][:5], "%H:%M").time()
# generate Session ID based on Conf key, get Session key from ID
session_id = Session.allocate_ids(size=1, parent=conf.key)[0]
session_key = ndb.Key(Session, session_id, parent=conf.key)
data['key'] = session_key
del data['conferenceWebSafeKey']
del data['sessionWebSafeKey']
del data['speakerName']
del data['speakerWebSafeKey']
# create Session
Session(**data).put()
# add a task to see if this new session creates a featured speaker
taskqueue.add(params={'websafeConferenceKey': conf.key.urlsafe(),
'websafeSpeakerKey': speaker.key.urlsafe()},
url='/tasks/set_featured_speaker',
method='GET')
return request
def _SessionToWishList(self, request, add=True):
"""Register session to user's wishlist."""
retval = None
prof = self._getProfileFromUser() # get user Profile
# check if session exists given websafeConfKey
# get session; check that it exists
session_wsck = request.sessionWebSafeKey
try:
session = ndb.Key(urlsafe=session_wsck).get()
except:
session = None
if not session:
raise endpoints.NotFoundException(
'No session found for key: %s' % session_wsck)
# add
if add:
# check if user already has session in wishlist
if session_wsck in prof.sessionKeysToAttend:
raise ConflictException(
"You already have this session on your wishlist.")
# add session to wishlist
prof.sessionKeysToAttend.append(session_wsck)
retval = True
# remove
else:
# check if user already has session in wishlist
if session_wsck in prof.sessionKeysToAttend:
# remove session from wishlist
prof.sessionKeysToAttend.remove(session_wsck)
retval = True
else:
retval = False
# write things back to the datastore & return
prof.put()
return BooleanMessage(data=retval)
@endpoints.method(SessionForm, SessionForm,
path='conference/create_session',
http_method='POST', name='createSession')
def createSession(self, request):
"""Create new conference session."""
return self._createSessionObject(request)
@endpoints.method(CONF_GET_REQUEST, SessionForms,
path='conference/sessions',
http_method='GET', name='getConferenceSessions')
def getConferenceSessions(self, request):
"""Return sessions for a Conference (by websafeConferenceKey)."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# user_id = getUserId(user)
wsck = request.websafeConferenceKey
try:
conf = ndb.Key(urlsafe=wsck).get()
except:
conf = None
if not conf:
raise endpoints.NotFoundException(
'No conference found for key: %s' % wsck)
# create ancestor query for all key matches for this conference
sessions = Session.query(ancestor=conf.key)
# return set of SessionForm objects for conference
return SessionForms(
items=[self._copySessionToForm(session) for session in sessions]
)
@endpoints.method(SessionsByType, SessionForms,
path='conference/sessions_by_type',
http_method='GET', name='getConferenceSessionsByType')
def getConferenceSessionsByType(self, request):
"""Return sessions for a Conference by Type."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# user_id = getUserId(user)
wsck = request.websafeConferenceKey
try:
conf = ndb.Key(urlsafe=wsck).get()
except:
conf = None
if not conf:
raise endpoints.NotFoundException(
'No conference found for key: %s' % wsck)
# create ancestor query for all key matches for this conference
# then filter on typeOfSession
typeOfSession = request.typeOfSession
sessions = Session.query(ancestor=conf.key)
sessions = sessions.filter(Session.typeOfSession == typeOfSession)
# return set of SessionForm objects per Conference
return SessionForms(
items=[self._copySessionToForm(session) for session in sessions]
)
@endpoints.method(SessionsBySpeaker, SessionForms,
path='conference/sessions_by_speaker',
http_method='GET', name='getSessionsBySpeaker')
def getConferenceSessionsBySpeaker(self, request):
"""Return Conference sessions by Speaker."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# user_id = getUserId(user)
sp_lastName = request.lastName
sp_firstName = request.firstName
if sp_firstName:
# find by first and last name
speaker = Speaker.query(ndb.AND(
Speaker.lastName == sp_lastName,
Speaker.firstName == sp_firstName))
else:
# find by last name only
speaker = Speaker.query(Speaker.lastName == sp_lastName)
speaker_keys = [sp.key for sp in speaker]
# iterate over each key finding all sessions
all_sessions = []
for sp_k in speaker_keys:
sessions = Session.query(Session.speaker == sp_k)
for s in sessions:
all_sessions.append(s)
# return list of sessions that match each of the speaker_keys
return SessionForms(
items=[self._copySessionToForm(sess) for sess in all_sessions]
)
@endpoints.method(AddSessionToWishlist, BooleanMessage,
path='session/add_to_wishlist',
http_method='POST', name='addSessionToWishlist')
def addSessionToWishlist(self, request):
"""Add session to user's wishlist."""
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# user_id = getUserId(user)
return self._SessionToWishList(request)
@endpoints.method(AddSessionToWishlist, BooleanMessage,
path='session/remove_from_wishlist',
http_method='DELETE', name='removeSessionFromWishlist')
def removeSessionFromWishlist(self, request):
"""Remove session to user's wishlist."""
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# user_id = getUserId(user)
return self._SessionToWishList(request, add=False)
@endpoints.method(message_types.VoidMessage, SessionForms,
path='sessions/get_wishlist',
http_method='GET', name='getSessionsInWishlist')
def getSessionsInWishlist(self, request):
"""Get list of sessions that user has on their wishlist."""
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# user_id = getUserId(user)
prof = self._getProfileFromUser() # get user Profile
session_keys = [ndb.Key(urlsafe=wsck) for wsck
in prof.sessionKeysToAttend]
sessions = ndb.get_multi(session_keys)
# return set of session objects in wishlist
return SessionForms(
items=[self._copySessionToForm(session) for session in sessions]
)
@endpoints.method(FindSessionByDatewithStartTimeRange, SessionForms,
path='session/find_by_date_and_start_time_range',
http_method='GET',
name='FindSessionByDatewithStartTimeRange')
def FindSessionByDatewithStartTimeRange(self, request):
"""Find Sessions By Date with Start Time Range"""
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# user_id = getUserId(user)
sessions = Session.query()
theStartTime = datetime.strptime(
request.startTimeRangeBeginning, "%H:%M").time()
theEndTime = datetime.strptime(
request.startTimeRangeEnding, "%H:%M").time()
theDate = datetime.strptime(request.conferenceDate, "%Y-%m-%d").date()
sessions = sessions.filter(Session.startTime >= theStartTime)
sessions = sessions.filter(Session.startTime <= theEndTime)
sessions = sessions.filter(Session.date == theDate)
return SessionForms(
items=[self._copySessionToForm(session) for session in sessions]
)
@endpoints.method(SessionsBySpeakerOnSpecificDate, SessionForms,
path='session/find_by_speaker_on_specific_date',
http_method='GET',
name='SessionsBySpeakerOnSpecificDate')
def SessionsBySpeakerOnSpecificDate(self, request):
"""Return Conference sessions by Speaker on a specific date."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# user_id = getUserId(user)
sp_lastName = request.lastName
sp_firstName = request.firstName
theDate = datetime.strptime(request.conferenceDate, "%Y-%m-%d").date()
if sp_firstName:
# find by first and last name
speaker = Speaker.query(ndb.AND(
Speaker.lastName == sp_lastName,
Speaker.firstName == sp_firstName))
else:
# find by last name only
speaker = Speaker.query(Speaker.lastName == sp_lastName)
speaker_keys = [sp.key for sp in speaker]
# iterate over each key finding all sessions
all_sessions = []
for sp_k in speaker_keys:
sessions = Session.query(ndb.AND(
Session.speaker == sp_k,
Session.date == theDate))
for s in sessions:
all_sessions.append(s)
# return list of sessions that match each of the speaker_keys
return SessionForms(
items=[self._copySessionToForm(sess) for sess in all_sessions]
)
@endpoints.method(message_types.VoidMessage, SessionForms,
path='session/nonWorkshop_Sessions_Before_7pm',
http_method='GET', name='NonWorkshopSessionsBefore7pm')
def NonWorkshopSessionsBefore7pm(self, request):
"""Return Non-Workshop Sessions Before 7pm."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# user_id = getUserId(user)
theStartTime = datetime.strptime("19:00", "%H:%M").time()
# idea from reading answers from Tim Hoffman
# (http://stackoverflow.com/users/1201324/tim-hoffman)
# and Brent Washburne
# (http://stackoverflow.com/users/584846/brent-washburne)
# specifically Brent's answer here:
# https://stackoverflow.com/questions/33549573/combining-results-of-multiple-ndb-inequality-queries
# create two separate inequality queries and get the keys from each
# then use set.intersection method to get the
# intersection of the two sets
query1 = Session.query(Session.typeOfSession != "Workshop").fetch(
keys_only=True)
query2 = Session.query(Session.startTime < theStartTime).fetch(
keys_only=True)
sessions = ndb.get_multi(set(query1).intersection(query2))
# return set of SessionForm objects per Conference
return SessionForms(
items=[self._copySessionToForm(session) for session in sessions]
)
# - - - Speaker - - - - - - - - - - - - - - - - - - - -
def _copySpeakerToForm(self, speak):
"""Copy relevant fields from Speaker to SpeakerForm."""
sp = SpeakerForm()
for field in sp.all_fields():
if hasattr(speak, field.name):
setattr(sp, field.name, getattr(speak, field.name))
elif field.name == "speakerWebSafeKey":
setattr(sp, field.name, speak.key.urlsafe())
sp.check_initialized()
return sp
def _createSpeakerObject(self, request):
"""Create a Speaker object."""
# preload necessary data items
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
if not request.firstName:
raise endpoints.BadRequestException(
"Speaker 'firstName' field required")
if not request.lastName:
raise endpoints.BadRequestException(
"Speaker 'lastName' field required")
# copy SpeakerForm Message into dict
data = {field.name: getattr(request, field.name) for field
in request.all_fields()}
# generate Profile Key based on user ID and Speaker
# ID based on Profile key get Speaker key from ID
p_key = ndb.Key(Profile, user_id)
speaker_id = Speaker.allocate_ids(size=1, parent=p_key)[0]
speaker_key = ndb.Key(Speaker, speaker_id, parent=p_key)
data['key'] = speaker_key
del data['speakerWebSafeKey']
# creation Speaker entity
Speaker(**data).put()
return request
@endpoints.method(SpeakerForm, SpeakerForm, path='speaker/create_speaker',
http_method='POST', name='createSpeaker')
def createSpeaker(self, request):
"""Create new speaker."""
return self._createSpeakerObject(request)
@endpoints.method(message_types.VoidMessage, SpeakerForms,
path='speaker/speakers',
http_method='GET', name='getSpeakersCreated')
def getSpeakersCreated(self, request):
"""Return speakers created by user."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# create ancestor query for all key matches for this user
speakers = Speaker.query(ancestor=ndb.Key(Profile, user_id))
# return set of Speaker objects
return SpeakerForms(
items=[self._copySpeakerToForm(speaker) for speaker in speakers]
)
api = endpoints.api_server([ConferenceApi]) # register API
| |
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import unittest
from django.core.checks import Error, Warning as DjangoWarning
from django.db import connection, models
from django.test import TestCase
from django.test.utils import override_settings
from django.utils.timezone import now
from .base import IsolatedModelsTestCase
class AutoFieldTests(IsolatedModelsTestCase):
def test_valid_case(self):
class Model(models.Model):
id = models.AutoField(primary_key=True)
field = Model._meta.get_field('id')
errors = field.check()
expected = []
self.assertEqual(errors, expected)
def test_primary_key(self):
# primary_key must be True. Refs #12467.
class Model(models.Model):
field = models.AutoField(primary_key=False)
# Prevent Django from autocreating `id` AutoField, which would
# result in an error, because a model must have exactly one
# AutoField.
another = models.IntegerField(primary_key=True)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
'AutoFields must set primary_key=True.',
hint=None,
obj=field,
id='fields.E100',
),
]
self.assertEqual(errors, expected)
class BooleanFieldTests(IsolatedModelsTestCase):
def test_nullable_boolean_field(self):
class Model(models.Model):
field = models.BooleanField(null=True)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
'BooleanFields do not accept null values.',
hint='Use a NullBooleanField instead.',
obj=field,
id='fields.E110',
),
]
self.assertEqual(errors, expected)
class CharFieldTests(IsolatedModelsTestCase, TestCase):
def test_valid_field(self):
class Model(models.Model):
field = models.CharField(
max_length=255,
choices=[
('1', 'item1'),
('2', 'item2'),
],
db_index=True)
field = Model._meta.get_field('field')
errors = field.check()
expected = []
self.assertEqual(errors, expected)
def test_missing_max_length(self):
class Model(models.Model):
field = models.CharField()
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"CharFields must define a 'max_length' attribute.",
hint=None,
obj=field,
id='fields.E120',
),
]
self.assertEqual(errors, expected)
def test_negative_max_length(self):
class Model(models.Model):
field = models.CharField(max_length=-1)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'max_length' must be a positive integer.",
hint=None,
obj=field,
id='fields.E121',
),
]
self.assertEqual(errors, expected)
def test_bad_max_length_value(self):
class Model(models.Model):
field = models.CharField(max_length="bad")
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'max_length' must be a positive integer.",
hint=None,
obj=field,
id='fields.E121',
),
]
self.assertEqual(errors, expected)
def test_str_max_length_value(self):
class Model(models.Model):
field = models.CharField(max_length='20')
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'max_length' must be a positive integer.",
hint=None,
obj=field,
id='fields.E121',
),
]
self.assertEqual(errors, expected)
def test_non_iterable_choices(self):
class Model(models.Model):
field = models.CharField(max_length=10, choices='bad')
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'choices' must be an iterable (e.g., a list or tuple).",
hint=None,
obj=field,
id='fields.E004',
),
]
self.assertEqual(errors, expected)
def test_choices_containing_non_pairs(self):
class Model(models.Model):
field = models.CharField(max_length=10, choices=[(1, 2, 3), (1, 2, 3)])
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'choices' must be an iterable containing (actual value, human readable name) tuples.",
hint=None,
obj=field,
id='fields.E005',
),
]
self.assertEqual(errors, expected)
def test_bad_db_index_value(self):
class Model(models.Model):
field = models.CharField(max_length=10, db_index='bad')
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'db_index' must be None, True or False.",
hint=None,
obj=field,
id='fields.E006',
),
]
self.assertEqual(errors, expected)
@unittest.skipUnless(connection.vendor == 'mysql',
"Test valid only for MySQL")
def test_too_long_char_field_under_mysql(self):
from django.db.backends.mysql.validation import DatabaseValidation
class Model(models.Model):
field = models.CharField(unique=True, max_length=256)
field = Model._meta.get_field('field')
validator = DatabaseValidation(connection=None)
errors = validator.check_field(field)
expected = [
Error(
'MySQL does not allow unique CharFields to have a max_length > 255.',
hint=None,
obj=field,
id='mysql.E001',
)
]
self.assertEqual(errors, expected)
class DateFieldTests(IsolatedModelsTestCase, TestCase):
def test_auto_now_and_auto_now_add_raise_error(self):
class Model(models.Model):
field0 = models.DateTimeField(auto_now=True, auto_now_add=True, default=now)
field1 = models.DateTimeField(auto_now=True, auto_now_add=False, default=now)
field2 = models.DateTimeField(auto_now=False, auto_now_add=True, default=now)
field3 = models.DateTimeField(auto_now=True, auto_now_add=True, default=None)
expected = []
checks = []
for i in range(4):
field = Model._meta.get_field('field%d' % i)
expected.append(Error(
"The options auto_now, auto_now_add, and default "
"are mutually exclusive. Only one of these options "
"may be present.",
hint=None,
obj=field,
id='fields.E160',
))
checks.extend(field.check())
self.assertEqual(checks, expected)
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.DateField(default=now())
field_d = models.DateField(default=now().date())
field_now = models.DateField(default=now)
field_dt = Model._meta.get_field('field_dt')
field_d = Model._meta.get_field('field_d')
field_now = Model._meta.get_field('field_now')
errors = field_dt.check()
errors.extend(field_d.check())
errors.extend(field_now.check()) # doesn't raise a warning
expected = [
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_dt,
id='fields.W161',
),
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_d,
id='fields.W161',
)
]
maxDiff = self.maxDiff
self.maxDiff = None
self.assertEqual(errors, expected)
self.maxDiff = maxDiff
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
class DateTimeFieldTests(IsolatedModelsTestCase, TestCase):
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.DateTimeField(default=now())
field_d = models.DateTimeField(default=now().date())
field_now = models.DateTimeField(default=now)
field_dt = Model._meta.get_field('field_dt')
field_d = Model._meta.get_field('field_d')
field_now = Model._meta.get_field('field_now')
errors = field_dt.check()
errors.extend(field_d.check())
errors.extend(field_now.check()) # doesn't raise a warning
expected = [
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_dt,
id='fields.W161',
),
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_d,
id='fields.W161',
)
]
maxDiff = self.maxDiff
self.maxDiff = None
self.assertEqual(errors, expected)
self.maxDiff = maxDiff
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
class DecimalFieldTests(IsolatedModelsTestCase):
def test_required_attributes(self):
class Model(models.Model):
field = models.DecimalField()
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"DecimalFields must define a 'decimal_places' attribute.",
hint=None,
obj=field,
id='fields.E130',
),
Error(
"DecimalFields must define a 'max_digits' attribute.",
hint=None,
obj=field,
id='fields.E132',
),
]
self.assertEqual(errors, expected)
def test_negative_max_digits_and_decimal_places(self):
class Model(models.Model):
field = models.DecimalField(max_digits=-1, decimal_places=-1)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'decimal_places' must be a non-negative integer.",
hint=None,
obj=field,
id='fields.E131',
),
Error(
"'max_digits' must be a positive integer.",
hint=None,
obj=field,
id='fields.E133',
),
]
self.assertEqual(errors, expected)
def test_bad_values_of_max_digits_and_decimal_places(self):
class Model(models.Model):
field = models.DecimalField(max_digits="bad", decimal_places="bad")
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'decimal_places' must be a non-negative integer.",
hint=None,
obj=field,
id='fields.E131',
),
Error(
"'max_digits' must be a positive integer.",
hint=None,
obj=field,
id='fields.E133',
),
]
self.assertEqual(errors, expected)
def test_decimal_places_greater_than_max_digits(self):
class Model(models.Model):
field = models.DecimalField(max_digits=9, decimal_places=10)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'max_digits' must be greater or equal to 'decimal_places'.",
hint=None,
obj=field,
id='fields.E134',
),
]
self.assertEqual(errors, expected)
def test_valid_field(self):
class Model(models.Model):
field = models.DecimalField(max_digits=10, decimal_places=10)
field = Model._meta.get_field('field')
errors = field.check()
expected = []
self.assertEqual(errors, expected)
class FileFieldTests(IsolatedModelsTestCase):
def test_valid_case(self):
class Model(models.Model):
field = models.FileField(upload_to='somewhere')
field = Model._meta.get_field('field')
errors = field.check()
expected = []
self.assertEqual(errors, expected)
def test_unique(self):
class Model(models.Model):
field = models.FileField(unique=False, upload_to='somewhere')
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'unique' is not a valid argument for a FileField.",
hint=None,
obj=field,
id='fields.E200',
)
]
self.assertEqual(errors, expected)
def test_primary_key(self):
class Model(models.Model):
field = models.FileField(primary_key=False, upload_to='somewhere')
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"'primary_key' is not a valid argument for a FileField.",
hint=None,
obj=field,
id='fields.E201',
)
]
self.assertEqual(errors, expected)
class FilePathFieldTests(IsolatedModelsTestCase):
def test_forbidden_files_and_folders(self):
class Model(models.Model):
field = models.FilePathField(allow_files=False, allow_folders=False)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
"FilePathFields must have either 'allow_files' or 'allow_folders' set to True.",
hint=None,
obj=field,
id='fields.E140',
),
]
self.assertEqual(errors, expected)
class GenericIPAddressFieldTests(IsolatedModelsTestCase):
def test_non_nullable_blank(self):
class Model(models.Model):
field = models.GenericIPAddressField(null=False, blank=True)
field = Model._meta.get_field('field')
errors = field.check()
expected = [
Error(
('GenericIPAddressFields cannot have blank=True if null=False, '
'as blank values are stored as nulls.'),
hint=None,
obj=field,
id='fields.E150',
),
]
self.assertEqual(errors, expected)
class ImageFieldTests(IsolatedModelsTestCase):
def test_pillow_installed(self):
try:
from PIL import Image # NOQA
except ImportError:
pillow_installed = False
else:
pillow_installed = True
class Model(models.Model):
field = models.ImageField(upload_to='somewhere')
field = Model._meta.get_field('field')
errors = field.check()
expected = [] if pillow_installed else [
Error(
'Cannot use ImageField because Pillow is not installed.',
hint=('Get Pillow at https://pypi.python.org/pypi/Pillow '
'or run command "pip install Pillow".'),
obj=field,
id='fields.E210',
),
]
self.assertEqual(errors, expected)
class IntegerFieldTests(IsolatedModelsTestCase):
def test_max_length_warning(self):
class Model(models.Model):
value = models.IntegerField(max_length=2)
value = Model._meta.get_field('value')
errors = Model.check()
expected = [
DjangoWarning(
"'max_length' is ignored when used with IntegerField",
hint="Remove 'max_length' from field",
obj=value,
id='fields.W122',
)
]
self.assertEqual(errors, expected)
class TimeFieldTests(IsolatedModelsTestCase, TestCase):
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.TimeField(default=now())
field_t = models.TimeField(default=now().time())
field_now = models.DateField(default=now)
field_dt = Model._meta.get_field('field_dt')
field_t = Model._meta.get_field('field_t')
field_now = Model._meta.get_field('field_now')
errors = field_dt.check()
errors.extend(field_t.check())
errors.extend(field_now.check()) # doesn't raise a warning
expected = [
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_dt,
id='fields.W161',
),
DjangoWarning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=field_t,
id='fields.W161',
)
]
maxDiff = self.maxDiff
self.maxDiff = None
self.assertEqual(errors, expected)
self.maxDiff = maxDiff
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
| |
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from pybuilder.core import Logger
from pybuilder.errors import MissingTaskDependencyException, CircularTaskDependencyException, NoSuchTaskException, \
MissingActionDependencyException, InvalidNameException, RequiredTaskExclusionException
from pybuilder.execution import as_task_name_list, Action, Executable, ExecutionManager, Task, \
DependenciesNotResolvedException, Initializer, TaskDependency
from test_utils import Mock, ANY, call
class AsTaskNameList(unittest.TestCase):
def test_should_return_list_of_strings_when_string_given(self):
self.assertEqual(["spam"], as_task_name_list("spam"))
def test_should_return_list_of_strings_when_list_of_strings_given(self):
self.assertEqual(
["spam", "eggs"], as_task_name_list(["spam", "eggs"]))
def test_should_return_list_of_strings_when_function_given(self):
def spam():
pass
self.assertEqual(["spam"], as_task_name_list(spam))
def test_should_return_list_of_strings_when_list_of_functions_given(self):
def spam():
pass
def eggs():
pass
self.assertEqual(["spam", "eggs"], as_task_name_list([spam, eggs]))
class ExecutableTest(unittest.TestCase):
def test_should_raise_exception_when_passing_non_function_to_constructor(self):
self.assertRaises(TypeError, Executable, "callable", "spam")
def test_should_raise_exception_when_executable_name_is_invalid(self):
def callable():
pass
self.assertRaises(InvalidNameException, Executable, "a-b", callable)
self.assertRaises(InvalidNameException, Executable, "88aa", callable)
self.assertRaises(
InvalidNameException, Executable, "l asd ll", callable)
self.assertRaises(InvalidNameException, Executable, "@", callable)
self.assertRaises(InvalidNameException, Executable, "$", callable)
self.assertRaises(InvalidNameException, Executable, "%", callable)
def test_should_execute_callable_without_arguments(self):
def callable():
callable.called = True
callable.called = False
Executable("callable", callable).execute({})
self.assertTrue(callable.called)
def test_should_execute_callable_with_single_arguments(self):
def callable(spam):
callable.called = True
callable.spam = spam
callable.called = False
Executable("callable", callable).execute({"spam": "spam"})
self.assertTrue(callable.called)
self.assertEqual("spam", callable.spam)
def test_should_raise_exception_when_callable_argument_cannot_be_satisfied(self):
def callable(spam):
pass
executable = Executable("callable", callable)
self.assertRaises(ValueError, executable.execute, {})
class ActionTest(unittest.TestCase):
def test_should_initialize_fields(self):
def callable():
pass
action = Action("callable", callable, "before", "after", "description")
self.assertEqual(["before"], action.execute_before)
self.assertEqual(["after"], action.execute_after)
self.assertEqual("description", action.description)
class TaskTest(unittest.TestCase):
def test_should_sort_tasks_by_name(self):
task_a = Task("a_name", lambda: None, "dependency", "description")
task_b = Task("b_name", lambda: None, "dependency", "description")
task_list = [task_b, task_a]
self.assertEqual(["a_name", "b_name"], [
task.name for task in sorted(task_list)])
def test_should_initialize_fields(self):
def callable():
pass
task = Task("callable", callable, "dependency", "description")
self.assertEqual(["dependency"], task.dependencies)
self.assertEqual(["description"], task.description)
def test_should_execute_callable_without_arguments(self):
def callable():
callable.called = True
callable.called = False
Task("callable", callable).execute(Mock(), {})
self.assertTrue(callable.called)
def test_should_execute_callable_with_single_arguments(self):
def callable(spam):
callable.called = True
callable.spam = spam
callable.called = False
Task("callable", callable).execute(Mock(), {"spam": "spam"})
self.assertTrue(callable.called)
self.assertEqual("spam", callable.spam)
def test_should_raise_exception_when_callable_argument_cannot_be_satisfied(self):
def callable(spam):
pass
executable = Task("callable", callable)
self.assertRaises(ValueError, executable.execute, Mock(), {})
class TaskExtensionTest(unittest.TestCase):
def test_should_extend_task_with_values_from_other_task(self):
def callable_one():
pass
def callable_two(param):
pass
task = Task("task", callable_one, "dependency", "description")
replacement = Task("replacement", callable_two,
"another_dependency", "replacement description")
task.extend(replacement)
self.assertEqual("task", task.name)
self.assertEqual(
["dependency", "another_dependency"], task.dependencies)
self.assertEqual(
["description", "replacement description"], task.description)
def test_should_execute_both_callables_when_extending_task(self):
def callable_one():
callable_one.called = True
callable_one.called = False
def callable_two(param):
callable_two.called = True
callable_two.called = False
task_one = Task("task", callable_one)
task_two = Task("task", callable_two)
task_one.extend(task_two)
task_one.execute(Mock(), {"param": "spam"})
self.assertTrue(callable_one.called)
self.assertTrue(callable_two.called)
class InitializerTest(unittest.TestCase):
def setUp(self):
def callable():
pass
self.callable = callable
def test_should_return_true_when_invoking_is_applicable_without_environment_and_initializer_does_not_define_environments(
self):
initializer = Initializer("initialzer", self.callable)
self.assertTrue(initializer.is_applicable())
def test_should_return_true_when_invoking_is_applicable_with_environment_and_initializer_does_not_define_environments(
self):
initializer = Initializer("initialzer", self.callable)
self.assertTrue(initializer.is_applicable("any_environment"))
def test_should_return_true_when_invoking_is_applicable_with_environment_and_initializer_defines_environment(self):
initializer = Initializer(
"initialzer", self.callable, "any_environment")
self.assertTrue(initializer.is_applicable("any_environment"))
def test_should_return_true_when_invoking_is_applicable_with_environments_and_initializer_defines_environment(self):
initializer = Initializer(
"initialzer", self.callable, "any_environment")
self.assertTrue(initializer.is_applicable(
["any_environment", "any_other_environment"]))
def test_should_return_false_when_invoking_is_applicable_with_environment_and_initializer_defines_environment(self):
initializer = Initializer(
"initialzer", self.callable, "any_environment")
self.assertFalse(initializer.is_applicable("any_other_environment"))
def test_should_return_false_when_invoking_is_applicable_without_environment_and_initializer_defines_environment(
self):
initializer = Initializer(
"initialzer", self.callable, "any_environment")
self.assertFalse(initializer.is_applicable())
def test_should_return_true_when_invoking_is_applicable_with_environment_and_initializer_defines_multiple_environments(
self):
initializer = Initializer(
"initialzer", self.callable, ["any_environment", "any_other_environment"])
self.assertTrue(initializer.is_applicable(["any_environment"]))
class ExecutionManagerTestBase(unittest.TestCase):
def setUp(self):
self.execution_manager = ExecutionManager(Mock(Logger))
class ExecutionManagerInitializerTest(ExecutionManagerTestBase):
def test_ensure_that_initializer_is_added_when_calling_register_initializer(self):
initializer = Mock()
self.execution_manager.register_initializer(initializer)
self.assertEqual([initializer], self.execution_manager.initializers)
def test_ensure_that_registered_initializers_are_executed_when_calling_execute_initializers(self):
initializer_1 = Mock()
initializer_1.is_applicable.return_value = True
self.execution_manager.register_initializer(initializer_1)
initializer_2 = Mock()
initializer_2.is_applicable.return_value = True
self.execution_manager.register_initializer(initializer_2)
self.execution_manager.execute_initializers(a=1)
initializer_1.execute.assert_called_with({"a": 1})
initializer_2.execute.assert_called_with({"a": 1})
def test_ensure_that_registered_initializers_are_not_executed_when_environments_do_not_match(self):
initializer = Mock()
initializer.is_applicable.return_value = False
self.execution_manager.register_initializer(initializer)
environments = []
self.execution_manager.execute_initializers(environments, a=1)
initializer.is_applicable.assert_called_with(environments)
initializer.execute.assert_not_called()
class ExecutionManagerTaskTest(ExecutionManagerTestBase):
def test_ensure_task_is_added_when_calling_register_task(self):
task = Mock()
self.execution_manager.register_task(task)
self.assertEqual([task], self.execution_manager.tasks)
def test_ensure_task_is_replaced_when_registering_two_tasks_with_same_name(self):
original = Mock(name="spam")
replacement = Mock(name="spam")
self.execution_manager.register_task(original)
self.execution_manager.register_task(replacement)
original.extend.assert_called_with(replacement)
def test_should_raise_exception_when_calling_execute_task_before_resolve_dependencies(self):
self.assertRaises(DependenciesNotResolvedException,
self.execution_manager.execute_task,
Mock())
def test_ensure_task_is_executed_when_calling_execute_task(self):
task = Mock(name="spam", dependencies=[])
self.execution_manager.register_task(task)
self.execution_manager.resolve_dependencies()
self.execution_manager.execute_task(task, a=1)
task.execute.assert_called_with(ANY, {"a": 1})
def test_ensure_before_action_is_executed_when_task_is_executed(self):
task = Mock(name="task", dependencies=[])
action = Mock(name="action", execute_before=["task"], execute_after=[])
self.execution_manager.register_action(action)
self.execution_manager.register_task(task)
self.execution_manager.resolve_dependencies()
self.execution_manager.execute_task(task)
action.execute.assert_called_with({})
task.execute.assert_called_with(ANY, {})
def test_ensure_after_action_is_executed_when_task_is_executed(self):
task = Mock(name="task", dependencies=[])
action = Mock(name="action", execute_before=[], execute_after=["task"])
self.execution_manager.register_action(action)
self.execution_manager.register_task(task)
self.execution_manager.resolve_dependencies()
self.execution_manager.execute_task(task)
action.execute.assert_called_with({})
task.execute.assert_called_with(ANY, {})
def test_ensure_after_action_teardown_is_executed_when_task_fails(self):
task = Mock(name="task", dependencies=[])
task.execute.side_effect = ValueError("simulated task error")
action = Mock(name="action", execute_before=[], execute_after=["task"], teardown=True)
self.execution_manager.register_action(action)
self.execution_manager.register_task(task)
self.execution_manager.resolve_dependencies()
try:
self.execution_manager.execute_task(task)
self.assertTrue(False, "should not have reached here")
except Exception as e:
self.assertEqual(type(e), ValueError)
self.assertEqual(str(e), "simulated task error")
action.execute.assert_called_with({})
task.execute.assert_called_with(ANY, {})
def test_ensure_after_action_teardown_is_executed_when_action_fails(self):
task = Mock(name="task", dependencies=[])
action_regular = Mock(name="action_regular", execute_before=[], execute_after=["task"], teardown=False)
action_regular.name = "action_regular"
action_regular.execute.side_effect = ValueError("simulated action error")
action_teardown = Mock(name="action_teardown", execute_before=[], execute_after=["task"], teardown=True)
action_after_teardown = Mock(name="action_after_teardown", execute_before=[], execute_after=["task"],
teardown=False)
self.execution_manager.register_action(action_regular)
self.execution_manager.register_action(action_teardown)
self.execution_manager.register_action(action_after_teardown)
self.execution_manager.register_task(task)
self.execution_manager.resolve_dependencies()
try:
self.execution_manager.execute_task(task)
self.assertTrue(False, "should not have reached here")
except Exception as e:
self.assertEqual(type(e), ValueError)
self.assertEqual(str(e), "simulated action error")
task.execute.assert_called_with(ANY, {})
action_regular.execute.assert_called_with({})
action_teardown.execute.assert_called_with({})
action_after_teardown.execute.assert_not_called()
def test_ensure_after_action_teardown_suppression_works_when_action_fails(self):
task = Mock(name="task", dependencies=[])
action_regular = Mock(name="action_regular", execute_before=[], execute_after=["task"], teardown=False)
action_regular.name = "action_regular"
action_regular.execute.side_effect = ValueError("simulated action error")
action_teardown = Mock(name="action_teardown", execute_before=[], execute_after=["task"], teardown=True)
action_after_teardown = Mock(name="action_after_teardown", execute_before=[], execute_after=["task"],
teardown=False)
self.execution_manager.register_action(action_regular)
self.execution_manager.register_action(action_teardown)
self.execution_manager.register_action(action_after_teardown)
self.execution_manager.register_task(task)
self.execution_manager.resolve_dependencies()
try:
self.execution_manager.execute_task(task)
self.assertTrue(False, "should not have reached here")
except Exception as e:
self.assertEqual(type(e), ValueError)
self.assertEqual(str(e), "simulated action error")
task.execute.assert_called_with(ANY, {})
action_regular.execute.assert_called_with({})
action_teardown.execute.assert_called_with({})
action_after_teardown.execute.assert_not_called()
def test_ensure_after_action_teardown_is_executed_and_suppresses(self):
task = Mock(name="task", dependencies=[])
task.execute.side_effect = ValueError("simulated task error")
action_teardown1 = Mock(name="action_teardown1", execute_before=[], execute_after=["task"], teardown=True,
source="task")
action_teardown1.execute.side_effect = ValueError("simulated action error teardown1")
action_teardown2 = Mock(name="action_teardown2", execute_before=[], execute_after=["task"], teardown=True,
source="task")
self.execution_manager.register_action(action_teardown1)
self.execution_manager.register_action(action_teardown2)
self.execution_manager.register_task(task)
self.execution_manager.resolve_dependencies()
try:
self.execution_manager.execute_task(task)
self.assertTrue(False, "should not have reached here")
except Exception as e:
self.assertEqual(type(e), ValueError)
self.assertEqual(str(e), "simulated task error")
task.execute.assert_called_with(ANY, {})
action_teardown1.execute.assert_called_with({})
action_teardown2.execute.assert_called_with({})
self.execution_manager.logger.error.assert_called_with(
"Executing action '%s' from '%s' resulted in an error that was suppressed:\n%s", "action_teardown1",
"task", ANY)
def test_should_return_single_task_name(self):
self.execution_manager.register_task(Mock(name="spam"))
self.assertEqual(["spam"], self.execution_manager.task_names)
def test_should_return_all_task_names(self):
self.execution_manager.register_task(
Mock(name="spam"), Mock(name="eggs"))
self.assertEqual(["eggs", "spam"], self.execution_manager.task_names)
class ExecutionManagerActionTest(ExecutionManagerTestBase):
def test_ensure_action_is_registered(self):
action = Mock(name="action")
self.execution_manager.register_action(action)
self.assertEqual({"action": action}, self.execution_manager._actions)
def test_ensure_action_registered_for_two_tasks_is_executed_two_times(self):
spam = Mock(name="spam", dependencies=[])
eggs = Mock(name="eggs", dependencies=[])
self.execution_manager.register_task(spam, eggs)
action = Mock(name="action",
execute_before=[],
execute_after=["spam", "eggs"],
only_once=False)
self.execution_manager.register_action(action)
self.execution_manager.resolve_dependencies()
self.execution_manager.execute_execution_plan([spam, eggs])
action.execute.assert_has_calls([call(ANY), call(ANY)])
def test_ensure_action_registered_for_two_tasks_is_executed_only_once_if_single_attribute_is_present(self):
spam = Mock(name="spam", dependencies=[])
eggs = Mock(name="eggs", dependencies=[])
self.execution_manager.register_task(spam, eggs)
action = Mock(name="action",
execute_before=[],
execute_after=["spam", "eggs"],
only_once=True)
self.execution_manager.register_action(action)
self.execution_manager.resolve_dependencies()
self.execution_manager.execute_execution_plan([spam, eggs])
action.execute.assert_called_with(ANY)
class ExecutionManagerResolveDependenciesTest(ExecutionManagerTestBase):
def test_ensure_that_dependencies_are_resolved_when_no_task_is_given(self):
self.execution_manager.resolve_dependencies()
self.assertTrue(self.execution_manager._dependencies_resolved)
def test_ensure_that_dependencies_are_resolved_when_single_task_is_given(self):
task = Mock(dependencies=[])
self.execution_manager.register_task(task)
self.execution_manager.resolve_dependencies()
self.assertTrue(self.execution_manager._dependencies_resolved)
def test_should_raise_exception_when_task_depends_on_task_not_found(self):
task = Mock(dependencies=[TaskDependency("not_found")])
self.execution_manager.register_task(task)
self.assertRaises(MissingTaskDependencyException,
self.execution_manager.resolve_dependencies)
def test_should_raise_exception_when_before_action_depends_on_task_not_found(self):
action = Mock(execute_before=["not_found"], execute_after=[])
self.execution_manager.register_action(action)
self.assertRaises(MissingActionDependencyException,
self.execution_manager.resolve_dependencies)
def test_should_raise_exception_when_after_action_depends_on_task_not_found(self):
action = Mock(execute_before=[], execute_after=["not_found"])
self.execution_manager.register_action(action)
self.assertRaises(MissingActionDependencyException,
self.execution_manager.resolve_dependencies)
def test_ensure_that_dependencies_are_resolved_when_simple_dependency_is_found(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertEqual([], self.execution_manager._task_dependencies.get("one"))
self.assertEqual([TaskDependency(one)], self.execution_manager._task_dependencies.get("two"))
def test_ensure_that_dependencies_are_resolved_when_task_depends_on_multiple_tasks(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
three = Mock(name="three", dependencies=[TaskDependency("one"), TaskDependency("two")])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies()
self.assertEqual([], self.execution_manager._task_dependencies.get("one"))
self.assertEqual([TaskDependency(one)], self.execution_manager._task_dependencies.get("two"))
self.assertEqual(
[TaskDependency(one), TaskDependency(two)], self.execution_manager._task_dependencies.get("three"))
def test_override_optional_dependency_with_required(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one", True), TaskDependency("one")])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertEqual([], self.execution_manager._task_dependencies.get("one"))
self.assertEqual([TaskDependency(one)], self.execution_manager._task_dependencies.get("two"))
def test_ignore_required_override_with_optional_dependency(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one"), TaskDependency("one", True)])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertEqual([], self.execution_manager._task_dependencies.get("one"))
self.assertEqual([TaskDependency(one)], self.execution_manager._task_dependencies.get("two"))
def test_ignore_second_optional_dependency(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one", True), TaskDependency("one", True)])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertEqual([], self.execution_manager._task_dependencies.get("one"))
self.assertEqual([TaskDependency(one, True)], self.execution_manager._task_dependencies.get("two"))
def test_ignore_second_required_dependency(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one"), TaskDependency("one")])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertEqual([], self.execution_manager._task_dependencies.get("one"))
self.assertEqual([TaskDependency(one)], self.execution_manager._task_dependencies.get("two"))
def test_verify_late_dependency(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
three = Mock(name="three", dependencies=[TaskDependency("one")])
self.execution_manager.register_task(one, two)
self.execution_manager.register_task(three)
self.execution_manager.register_late_task_dependencies({"two": [TaskDependency("three")]})
self.execution_manager.resolve_dependencies()
self.assertEqual([], self.execution_manager._task_dependencies.get("one"))
self.assertEqual([TaskDependency(one), TaskDependency(three)],
self.execution_manager._task_dependencies.get("two"))
self.assertEqual([TaskDependency(one)],
self.execution_manager._task_dependencies.get("three"))
def test_verify_duplicate_late_dependency_removed(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
three = Mock(name="three", dependencies=[TaskDependency("one"), TaskDependency("one")])
self.execution_manager.register_task(one, two)
self.execution_manager.register_task(three)
self.execution_manager.register_late_task_dependencies(
{"two": [TaskDependency("three"), TaskDependency("three")]})
self.execution_manager.resolve_dependencies()
self.assertEqual([], self.execution_manager._task_dependencies.get("one"))
self.assertEqual([TaskDependency(one), TaskDependency(three)],
self.execution_manager._task_dependencies.get("two"))
self.assertEqual([TaskDependency(one)],
self.execution_manager._task_dependencies.get("three"))
def test_verify_error_unresolved_late_dependency(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
three = Mock(name="three", dependencies=[TaskDependency("one")])
self.execution_manager.register_task(one, two)
self.execution_manager.register_task(three)
self.execution_manager.register_late_task_dependencies(
{"four": [TaskDependency("three")]})
self.assertRaises(NoSuchTaskException, self.execution_manager.resolve_dependencies)
class ExecutionManagerBuildExecutionPlanTest(ExecutionManagerTestBase):
def test_should_collect_all_tasks_when_there_are_no_dependencies(self):
one = Mock(name="one", dependencies=[])
self.execution_manager.register_task(one)
self.execution_manager.resolve_dependencies()
self.assertEqual(self.execution_manager.collect_all_transitive_tasks(["one"]), set([one]))
def test_should_collect_all_tasks_when_there_is_a_simple_dependency(self):
one = Mock(name="one", dependencies=[TaskDependency("two")])
two = Mock(name="two", dependencies=[])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertEqual(self.execution_manager.collect_all_transitive_tasks(["one"]), set([one, two]))
def test_should_collect_all_tasks_when_there_is_a_transitive_dependency(self):
one = Mock(name="one", dependencies=[TaskDependency("two")])
two = Mock(name="two", dependencies=[TaskDependency("three")])
three = Mock(name="three", dependencies=[])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies()
self.assertEqual(self.execution_manager.collect_all_transitive_tasks(["one"]), set([one, two, three]))
def test_should_collect_all_tasks_when_several_tasks_given(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("three")])
three = Mock(name="three", dependencies=[])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies()
self.assertEqual(self.execution_manager.collect_all_transitive_tasks(["one", "two"]), set([one, two, three]))
def test_should_only_collect_required_tasks(self):
one = Mock(name="one", dependencies=[TaskDependency("three")])
two = Mock(name="two", dependencies=[])
three = Mock(name="three", dependencies=[])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies()
self.assertEqual(self.execution_manager.collect_all_transitive_tasks(["one"]), set([one, three]))
def test_should_raise_exception_when_building_execution_plan_and_dependencies_are_not_resolved(self):
self.assertRaises(DependenciesNotResolvedException,
self.execution_manager.build_execution_plan, ("boom",))
def test_should_raise_exception_when_building_execution_plan_for_task_not_found(self):
self.execution_manager.resolve_dependencies()
self.assertRaises(
NoSuchTaskException, self.execution_manager.build_execution_plan, ("boom",))
def test_should_return_execution_plan_with_single_task_when_single_task_is_to_be_executed(self):
one = Mock(name="one", dependencies=[])
self.execution_manager.register_task(one)
self.execution_manager.resolve_dependencies()
self.assertEqual(
[one], self.execution_manager.build_execution_plan(["one"]))
def test_should_return_execution_plan_with_two_tasks_when_two_tasks_are_to_be_executed(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertEqual(
[one, two], self.execution_manager.build_execution_plan(["one", "two"]))
def test_ensure_that_dependencies_are_executed_before_root_task(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertEqual(
[one, two], self.execution_manager.build_execution_plan(["two"]))
def test_ensure_that_tasks_are_not_executed_multiple_times(self):
one = Mock(name="one", dependencies=[], )
self.execution_manager.register_task(one)
self.execution_manager.resolve_dependencies()
self.assertEqual(
[one], self.execution_manager.build_execution_plan(["one", "one"]))
def test_ensure_that_tasks_are_not_executed_multiple_times_when_being_dependencies(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertEqual(
[one, two], self.execution_manager.build_execution_plan(["one", "two"]))
def test_should_raise_exception_when_circular_reference_is_detected_on_single_task(self):
one = Mock(name="one", dependencies=[TaskDependency("one")])
self.execution_manager.register_task(one)
self.execution_manager.resolve_dependencies()
self.assertRaises(CircularTaskDependencyException,
self.execution_manager.build_execution_plan, ["one"])
def test_should_raise_exception_when_circular_reference_is_detected_on_two_tasks(self):
one = Mock(name="one", dependencies=[TaskDependency("two")])
two = Mock(name="two", dependencies=[TaskDependency("one")])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.assertRaises(CircularTaskDependencyException,
self.execution_manager.build_execution_plan, ["one"])
def test_should_raise_exception_when_circular_reference_is_detected_on_three_tasks(self):
one = Mock(name="one", dependencies=[TaskDependency("three")])
two = Mock(name="two", dependencies=[TaskDependency("one")])
three = Mock(name="three", dependencies=[TaskDependency("one"), TaskDependency("two")])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies()
self.assertRaises(CircularTaskDependencyException,
self.execution_manager.build_execution_plan, ["one"])
def test_should_raise_exception_when_circular_reference_is_detected_on_indirect_required_tasks(self):
one = Mock(name="one", dependencies=[TaskDependency("two")])
two = Mock(name="two", dependencies=[TaskDependency("three")])
three = Mock(name="three", dependencies=[TaskDependency("two")])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies()
self.assertRaises(CircularTaskDependencyException,
self.execution_manager.build_execution_plan, ["one"])
def test_shortest_execution_plan_is_shortest(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
three = Mock(name="three", dependencies=[TaskDependency("two")])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies()
self.assertEqual([one, two], self.execution_manager.build_execution_plan("two"))
self.execution_manager._tasks_executed.append(one)
self.execution_manager._tasks_executed.append(two)
self.assertEqual([three], self.execution_manager.build_shortest_execution_plan("three"))
def test_shortest_execution_plan_always_executes_target(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
three = Mock(name="three", dependencies=[TaskDependency("two")])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies()
self.execution_manager._tasks_executed.append(one)
self.execution_manager._tasks_executed.append(two)
self.assertEqual([three], self.execution_manager.build_shortest_execution_plan("three"))
self.execution_manager._tasks_executed.append(three)
self.assertEqual([three], self.execution_manager.build_shortest_execution_plan("three"))
def test_shortest_execution_plan_checks_circularity(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
three = Mock(name="three", dependencies=[TaskDependency("two")])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies()
self.execution_manager._tasks_executed.append(one)
self.execution_manager._current_task = two
self.assertRaises(CircularTaskDependencyException, self.execution_manager.build_shortest_execution_plan,
["three"])
def test_shortest_execution_plan_reruns_on_demand(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
three = Mock(name="three", dependencies=[TaskDependency("two")])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies()
self.execution_manager._tasks_executed.append(one)
self.execution_manager._tasks_executed.append(two)
self.assertEqual([two, three], self.execution_manager.build_shortest_execution_plan(("two", "three")))
self.assertEqual([two, three], self.execution_manager.build_shortest_execution_plan(("three", "two")))
self.assertEqual([one, two, three], self.execution_manager.build_shortest_execution_plan(("three", "one")))
def test_ensure_that_optional_tasks_are_excluded(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one", True)])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies(exclude_optional_tasks=["one"])
execution_plan = self.execution_manager.build_execution_plan("two")
self.assertEqual([two], execution_plan)
def test_ensure_that_optional_branch_is_excluded(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
three = Mock(name="three", dependencies=[TaskDependency("two", True)])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies(exclude_optional_tasks=["two"])
execution_plan = self.execution_manager.build_execution_plan("three")
self.assertEqual([three], execution_plan)
def test_ensure_that_required_tasks_are_force_excluded(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies(exclude_tasks=["one"])
execution_plan = self.execution_manager.build_execution_plan("two")
self.assertEqual([two], execution_plan)
def test_ensure_that_required_branch_is_force_excluded(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
three = Mock(name="three", dependencies=[TaskDependency("two")])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies(exclude_tasks=["two"])
execution_plan = self.execution_manager.build_execution_plan("three")
self.assertEqual([three], execution_plan)
def test_ensure_that_required_tasks_are_not_optionally_excluded(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies(exclude_optional_tasks=["one"])
self.assertRaises(RequiredTaskExclusionException, self.execution_manager.build_execution_plan, "two")
def test_ensure_that_required_tasks_branch_not_optionally_excluded(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
three = Mock(name="three", dependencies=[TaskDependency("two")])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies(exclude_optional_tasks=["one"])
self.assertRaises(RequiredTaskExclusionException, self.execution_manager.build_execution_plan, "three")
def test_is_task_in_current_execution_plan(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
three = Mock(name="three", dependencies=[TaskDependency("one"), TaskDependency("two")])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies(exclude_all_optional=True)
self.execution_manager._current_execution_plan = self.execution_manager.build_execution_plan("three")
self.assertTrue(self.execution_manager.is_task_in_current_execution_plan("three"))
self.assertTrue(self.execution_manager.is_task_in_current_execution_plan("two"))
self.assertTrue(self.execution_manager.is_task_in_current_execution_plan("one"))
self.assertFalse(self.execution_manager.is_task_in_current_execution_plan("four"))
class ExecutionManagerExecuteExecutionPlanTest(ExecutionManagerTestBase):
def test_should_raise_exception_when_dependencies_are_not_resolved(self):
self.assertRaises(DependenciesNotResolvedException,
self.execution_manager.execute_execution_plan, ["boom"])
def test_ensure_tasks_are_executed(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[])
self.execution_manager.register_task(one, two)
self.execution_manager.resolve_dependencies()
self.execution_manager.execute_execution_plan([one, two])
one.execute.assert_called_with(ANY, {})
two.execute.assert_called_with(ANY, {})
def test_shortest_execution_plan_executed(self):
one = Mock(name="one", dependencies=[])
two = Mock(name="two", dependencies=[TaskDependency("one")])
three = Mock(name="three", dependencies=[TaskDependency("two")])
self.execution_manager.register_task(one, two, three)
self.execution_manager.resolve_dependencies()
self.execution_manager.execute_execution_plan(self.execution_manager.build_execution_plan("two"))
one.execute.assert_has_calls([call(ANY, {})])
two.execute.assert_has_calls([call(ANY, {})])
three.execute.assert_not_called()
self.execution_manager.execute_execution_plan(self.execution_manager.build_shortest_execution_plan("three"))
one.execute.assert_has_calls([call(ANY, {})])
two.execute.assert_has_calls([call(ANY, {})])
three.execute.assert_has_calls([call(ANY, {})])
self.execution_manager.execute_execution_plan(self.execution_manager.build_shortest_execution_plan("three"))
one.execute.assert_has_calls([call(ANY, {})])
two.execute.assert_has_calls([call(ANY, {})])
three.execute.assert_has_calls([call(ANY, {}), call(ANY, {})])
| |
# Copyright 2014-2015 Zuercher Hochschule fuer Angewandte Wissenschaften
# Copyright (c) 2013-2015, Intel Performance Learning Solutions Ltd, Intel Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__author__ = 'andy'
from distutils import dir_util
import json
from mako.template import Template
import os
import random
import shutil
import tempfile
from threading import Thread
from urlparse import urlparse
import uuid
from occi.core_model import Resource, Link
from sm.config import CONFIG
from sm.log import LOG
from sm.retry_http import http_retriable_request
HTTP = 'http://'
WAIT = int(CONFIG.get('cloud_controller', 'wait_time', 2000))
ATTEMPTS = int(CONFIG.get('cloud_controller', 'max_attempts', 5))
class ServiceParameters():
def __init__(self):
self.service_params = {}
service_params_file_path = CONFIG.get('service_manager', 'service_params', '')
if len(service_params_file_path) > 0:
try:
with open(service_params_file_path) as svc_params_content:
self.service_params = json.load(svc_params_content)
svc_params_content.close()
except ValueError as e:
LOG.error("Invalid JSON sent as service config file")
except IOError as e:
LOG.error('Cannot find the specified parameters file: ' + service_params_file_path)
else:
LOG.warn("No service parameters file found in config file, setting internal params to empty.")
def service_parameters(self, state='', content_type='text/occi'):
# takes the internal parameters defined for the lifecycle phase...
# and combines them with the client supplied parameters
if content_type == 'text/occi':
params = []
# get the state specific internal parameters
try:
params = self.service_params[state]
except KeyError as err:
LOG.warn('The requested states parameters are not available: "' + state + '"')
# get the client supplied parameters if any
try:
for p in self.service_params['client_params']:
params.append(p)
except KeyError as err:
LOG.info('No client params')
header = ''
for param in params:
if param['type'] == 'string':
value = '"' + param['value'] + '"'
else:
value = str(param['value'])
header = header + param['name'] + '=' + value + ', '
return header[0:-2]
else:
LOG.error('Content type not supported: ' + content_type)
def add_client_params(self, params={}):
# adds user supplied parameters from the instantiation request of a service
client_params = []
for k, v in params.items():
param_type = 'number'
if (v.startswith('"') or v.startswith('\'')) and (v.endswith('"') or v.endswith('\'')):
param_type = 'string'
v = v[1:-1]
param = {'name': k, 'value': v, 'type': param_type}
client_params.append(param)
self.service_params['client_params'] = client_params
if __name__ == '__main__':
sp = ServiceParameters()
cp = {
'test': '1',
'test.test': '"astring"'
}
sp.add_client_params(cp)
p = sp.service_parameters('initialise')
print p
class AsychExe(Thread):
"""
Only purpose of this thread is to execute a list of tasks sequentially
as a background "thread".
"""
def __init__(self, tasks, registry=None):
super(AsychExe, self).__init__()
self.registry = registry
self.tasks = tasks
def run(self):
super(AsychExe, self).run()
LOG.debug('Starting AsychExe thread')
for task in self.tasks:
entity, extras = task.run()
if self.registry:
LOG.debug('Updating entity in registry')
self.registry.add_resource(key=entity.identifier, resource=entity, extras=extras)
# XXX push common functionality here
class Task():
def __init__(self, entity, extras, state):
self.entity = entity
self.extras = extras
self.state = state
def run(self):
raise NotImplemented()
class InitSO(Task):
def __init__(self, entity, extras):
Task.__init__(self, entity, extras, state='initialise')
self.nburl = CONFIG.get('cloud_controller', 'nb_api', '')
if self.nburl[-1] == '/':
self.nburl = self.nburl[0:-1]
LOG.info('CloudController Northbound API: ' + self.nburl)
if len(entity.attributes) > 0:
LOG.info('Client supplied parameters: ' + entity.attributes.__repr__())
#TODO check that these parameters are valid according to the kind specification
self.extras['srv_prms'].add_client_params(entity.attributes)
else:
LOG.warn('No client supplied parameters.')
def run(self):
self.entity.attributes['mcn.service.state'] = 'initialise'
LOG.debug('Ensuring SM SSH Key...')
self.__ensure_ssh_key()
# create an app for the new SO instance
LOG.debug('Creating SO container...')
if not self.entity.extras:
self.entity.extras = {}
self.entity.extras['repo_uri'] = self.__create_app()
return self.entity, self.extras
def __create_app(self):
# name must be A-Za-z0-9 and <=32 chars
app_name = self.entity.kind.term[0:4] + 'srvinst' + ''.join(random.choice('0123456789ABCDEF') for i in range(16))
heads = {
'Content-Type': 'text/occi',
'Category': 'app; scheme="http://schemas.ogf.org/occi/platform#", '
'python-2.7; scheme="http://schemas.openshift.com/template/app#", '
'small; scheme="http://schemas.openshift.com/template/app#"',
'X-OCCI-Attribute': 'occi.app.name=' + app_name
}
url = self.nburl + '/app/'
LOG.debug('Requesting container to execute SO Bundle: ' + url)
LOG.info('Sending headers: ' + heads.__repr__())
r = http_retriable_request('POST', url, headers=heads, authenticate=True)
loc = r.headers.get('Location', '')
if loc == '':
raise AttributeError("No OCCI Location attribute found in request")
app_uri_path = urlparse(loc).path
LOG.debug('SO container created: ' + app_uri_path)
LOG.debug('Updating OCCI entity.identifier from: ' + self.entity.identifier + ' to: '
+ app_uri_path.replace('/app/', self.entity.kind.location))
self.entity.identifier = app_uri_path.replace('/app/', self.entity.kind.location)
LOG.debug('Setting occi.core.id to: ' + app_uri_path.replace('/app/', ''))
self.entity.attributes['occi.core.id'] = app_uri_path.replace('/app/', '')
# get git uri. this is where our bundle is pushed to
return self.__git_uri(app_uri_path)
def __git_uri(self, app_uri_path):
url = self.nburl + app_uri_path
headers = {'Accept': 'text/occi'}
LOG.debug('Requesting container\'s git URL ' + url)
LOG.info('Sending headers: ' + headers.__repr__())
r = http_retriable_request('GET', url, headers=headers, authenticate=True)
attrs = r.headers.get('X-OCCI-Attribute', '')
if attrs == '':
raise AttributeError("No occi attributes found in request")
repo_uri = ''
for attr in attrs.split(', '):
if attr.find('occi.app.repo') != -1:
repo_uri = attr.split('=')[1][1:-1] # scrubs trailing wrapped quotes
break
if repo_uri == '':
raise AttributeError("No occi.app.repo attribute found in request")
LOG.debug('SO container repository: ' + repo_uri)
return repo_uri
def __ensure_ssh_key(self):
url = self.nburl + '/public_key/'
heads = {'Accept': 'text/occi'}
resp = http_retriable_request('GET', url, headers=heads, authenticate=True)
locs = resp.headers.get('x-occi-location', '')
# Split on spaces, test if there is at least one key registered
if len(locs.split()) < 1:
LOG.debug('No SM SSH registered. Registering default SM SSH key.')
occi_key_name, occi_key_content = self.__extract_public_key()
create_key_headers = {'Content-Type': 'text/occi',
'Category': 'public_key; scheme="http://schemas.ogf.org/occi/security/credentials#"',
'X-OCCI-Attribute':'occi.key.name="' + occi_key_name + '", occi.key.content="' +
occi_key_content + '"'
}
http_retriable_request('POST', url, headers=create_key_headers, authenticate=True)
else:
LOG.debug('Valid SM SSH is registered with OpenShift.')
def __extract_public_key(self):
ssh_key_file = CONFIG.get('service_manager', 'ssh_key_location', '')
if ssh_key_file == '':
raise Exception('No ssh_key_location parameter supplied in sm.cfg')
LOG.debug('Using SSH key file: ' + ssh_key_file)
with open(ssh_key_file, 'r') as content_file:
content = content_file.read()
content = content.split()
if content[0] == 'ssh-dsa':
raise Exception("The supplied key is not a RSA ssh key. Location: " + ssh_key_file)
key_content = content[1]
key_name = 'servicemanager'
if len(content) == 3:
key_name = content[2]
return key_name, key_content
class ActivateSO(Task):
def __init__(self, entity, extras):
Task.__init__(self, entity, extras, state='activate')
self.repo_uri = self.entity.extras['repo_uri']
self.host = urlparse(self.repo_uri).netloc.split('@')[1]
if os.system('which git') != 0:
raise EnvironmentError('Git is not available.')
def run(self):
# get the code of the bundle and push it to the git facilities
# offered by OpenShift
LOG.debug('Deploying SO Bundle to: ' + self.repo_uri)
self.__deploy_app()
LOG.debug('Activating the SO...')
self.__init_so()
self.entity.attributes['mcn.service.state'] = 'activate'
return self.entity, self.extras
def __deploy_app(self):
"""
Deploy the local SO bundle
assumption here
- a git repo is returned
- the bundle is not managed by git
"""
# create temp dir...and clone the remote repo provided by OpS
dir = tempfile.mkdtemp()
LOG.debug('Cloning git repository: ' + self.repo_uri + ' to: ' + dir)
cmd = ' '.join(['git', 'clone', self.repo_uri, dir])
os.system(cmd)
# Get the SO bundle
bundle_loc = CONFIG.get('service_manager', 'bundle_location', '')
if bundle_loc == '':
raise Exception('No bundle_location parameter supplied in sm.cfg')
LOG.debug('Bundle to add to repo: ' + bundle_loc)
dir_util.copy_tree(bundle_loc, dir)
self.__add_openshift_files(bundle_loc, dir)
# add & push to OpenShift
os.system(' '.join(['cd', dir, '&&', 'git', 'add', '-A']))
os.system(' '.join(['cd', dir, '&&', 'git', 'commit', '-m', '"deployment of SO for tenant ' + \
self.extras['tenant_name'] + '"', '-a']))
LOG.debug('Pushing new code to remote repository...')
os.system(' '.join(['cd', dir, '&&', 'git', 'push']))
shutil.rmtree(dir)
def __add_openshift_files(self, bundle_loc, dir):
# put OpenShift stuff in place
# build and pre_start_python comes from 'support' directory in bundle
LOG.debug('Adding OpenShift support files from: ' + bundle_loc + '/support')
# TODO generate these files automatically - no need for end-users to manage them
# 1. Write build
LOG.debug('Writing build to: ' + os.path.join(dir, '.openshift', 'action_hooks', 'build'))
shutil.copyfile(bundle_loc+'/support/build', os.path.join(dir, '.openshift', 'action_hooks', 'build'))
# 1. Write pre_start_python
LOG.debug('Writing pre_start_python to: ' + os.path.join(dir, '.openshift', 'action_hooks', 'pre_start_python'))
pre_start_template = Template(filename=bundle_loc+'/support/pre_start_python')
design_uri = CONFIG.get('service_manager', 'design_uri', '')
content = pre_start_template.render(design_uri=design_uri)
LOG.debug('Writing pre_start_python content as: ' + content)
pre_start_file = open(os.path.join(dir, '.openshift', 'action_hooks', 'pre_start_python'), "w")
pre_start_file.write(content)
pre_start_file.close()
os.system(' '.join(['chmod', '+x', os.path.join(dir, '.openshift', 'action_hooks', '*')]))
# example request to the SO
# curl -v -X PUT http://localhost:8051/orchestrator/default \
# -H 'Content-Type: text/occi' \
# -H 'Category: orchestrator; scheme="http://schemas.mobile-cloud-networking.eu/occi/service#"' \
# -H 'X-Auth-Token: '$KID \
# -H 'X-Tenant-Name: '$TENANT
def __init_so(self):
url = HTTP + self.host + '/orchestrator/default'
heads = {
'Category': 'orchestrator; scheme="http://schemas.mobile-cloud-networking.eu/occi/service#"',
'Content-Type': 'text/occi',
'X-Auth-Token': self.extras['token'],
'X-Tenant-Name': self.extras['tenant_name'],
}
occi_attrs = self.extras['srv_prms'].service_parameters(self.state)
if len(occi_attrs) > 0:
LOG.info('Adding service-specific parameters to call... X-OCCI-Attribute: ' + occi_attrs)
heads['X-OCCI-Attribute'] = occi_attrs
LOG.debug('Initialising SO with: ' + url)
LOG.info('Sending headers: ' + heads.__repr__())
http_retriable_request('PUT', url, headers=heads)
class DeploySO(Task):
def __init__(self, entity, extras):
Task.__init__(self, entity, extras, state='deploy')
self.repo_uri = self.entity.extras['repo_uri']
self.host = urlparse(self.repo_uri).netloc.split('@')[1]
# example request to the SO
# curl -v -X POST http://localhost:8051/orchestrator/default?action=deploy \
# -H 'Content-Type: text/occi' \
# -H 'Category: deploy; scheme="http://schemas.mobile-cloud-networking.eu/occi/service#"' \
# -H 'X-Auth-Token: '$KID \
# -H 'X-Tenant-Name: '$TENANT
def run(self):
# Deployment is done without any control by the client...
# otherwise we won't be able to hand back a working service!
LOG.debug('Deploying the SO bundle...')
url = HTTP + self.host + '/orchestrator/default'
params = {'action': 'deploy'}
heads = {
'Category': 'deploy; scheme="http://schemas.mobile-cloud-networking.eu/occi/service#"',
'Content-Type': 'text/occi',
'X-Auth-Token': self.extras['token'],
'X-Tenant-Name': self.extras['tenant_name']}
occi_attrs = self.extras['srv_prms'].service_parameters(self.state)
if len(occi_attrs) > 0:
LOG.info('Adding service-specific parameters to call... X-OCCI-Attribute:' + occi_attrs)
heads['X-OCCI-Attribute'] = occi_attrs
LOG.debug('Deploying SO with: ' + url)
LOG.info('Sending headers: ' + heads.__repr__())
http_retriable_request('POST', url, headers=heads, params=params)
self.entity.attributes['mcn.service.state'] = 'deploy'
LOG.debug('SO Deployed ')
return self.entity, self.extras
# TODO this can only be executed when heat has completed!!!!
# TODO that this is not an issue
# XXX workaround: the logic of checking state can be covered in the SO
class ProvisionSO(Task):
def __init__(self, entity, extras):
Task.__init__(self, entity, extras, state='provision')
self.repo_uri = self.entity.extras['repo_uri']
self.host = urlparse(self.repo_uri).netloc.split('@')[1]
def run(self):
url = HTTP + self.host + '/orchestrator/default'
params = {'action': 'provision'}
heads = {
'Category': 'provision; scheme="http://schemas.mobile-cloud-networking.eu/occi/service#"',
'Content-Type': 'text/occi',
'X-Auth-Token': self.extras['token'],
'X-Tenant-Name': self.extras['tenant_name']}
occi_attrs = self.extras['srv_prms'].service_parameters(self.state)
if len(occi_attrs) > 0:
LOG.info('Adding service-specific parameters to call... X-OCCI-Attribute: ' + occi_attrs)
heads['X-OCCI-Attribute'] = occi_attrs
LOG.debug('Provisioning SO with: ' + url)
LOG.info('Sending headers: ' + heads.__repr__())
http_retriable_request('POST', url, headers=heads, params=params)
self.entity.attributes['mcn.service.state'] = 'provision'
return self.entity, self.extras
class RetrieveSO(Task):
def __init__(self, entity, extras):
Task.__init__(self, entity, extras, 'retrieve')
repo_uri = self.entity.extras['repo_uri']
self.host = urlparse(repo_uri).netloc.split('@')[1]
self.registry = self.extras['registry']
def run(self):
# example request to the SO
# curl -v -X GET http://localhost:8051/orchestrator/default \
# -H 'X-Auth-Token: '$KID \
# -H 'X-Tenant-Name: '$TENANT
if self.entity.attributes['mcn.service.state'] in ['activate', 'deploy', 'provision', 'update']:
heads = {
'Content-Type': 'text/occi',
'Accept': 'text/occi',
'X-Auth-Token': self.extras['token'],
'X-Tenant-Name': self.extras['tenant_name']}
LOG.info('Getting state of service orchestrator with: ' + self.host + '/orchestrator/default')
LOG.info('Sending headers: ' + heads.__repr__())
r = http_retriable_request('GET', HTTP + self.host + '/orchestrator/default', headers=heads)
attrs = r.headers['x-occi-attribute'].split(', ')
for attr in attrs:
kv = attr.split('=')
if kv[0] != 'occi.core.id':
if kv[1].startswith('"') and kv[1].endswith('"'):
kv[1] = kv[1][1:-1] # scrub off quotes
self.entity.attributes[kv[0]] = kv[1]
LOG.debug('OCCI Attribute: ' + kv[0] + ' --> ' + kv[1])
# Assemble the SIG
svcinsts = ''
try:
svcinsts = self.entity.attributes['mcn.so.svcinsts']
del self.entity.attributes['mcn.so.svcinsts'] # remove this, not be be used anywhere else
except KeyError:
LOG.warn('There was no service instance endpoints - ignore if not a composition.')
pass
if self.registry is None:
LOG.error('No registry!')
if len(svcinsts) > 0:
svcinsts = svcinsts.split() # all instance EPs
for svc_loc in svcinsts:
# TODO get the service instance resource representation
# source resource is self.entity
compos = svc_loc.split('/')
key = '/' + compos[3] + '/' + compos[4]
target = Resource(key, Resource.kind, []) # target resource
target.attributes['mcn.sm.endpoint'] = svc_loc
self.registry.add_resource(key, target, None)
key = '/link/'+str(uuid.uuid4())
link = Link(key, Link.kind, [], self.entity, target)
self.registry.add_resource(key, link, None)
self.entity.links.append(link)
else:
LOG.debug('Cannot GET entity as it is not in the activated, deployed or provisioned, updated state')
return self.entity, self.extras
class UpdateSO(Task):
def __init__(self, entity, extras, updated_entity):
Task.__init__(self, entity, extras, state='update')
self.repo_uri = self.entity.extras['repo_uri']
self.host = urlparse(self.repo_uri).netloc.split('@')[1]
self.new = updated_entity
def run(self):
# take parameters from EEU and send them down to the SO instance
# Trigger update on SO + service instance:
#
# $ curl -v -X POST http://localhost:8051/orchestrator/default \
# -H 'Content-Type: text/occi' \
# -H 'X-Auth-Token: '$KID \
# -H 'X-Tenant-Name: '$TENANT \
# -H 'X-OCCI-Attribute: occi.epc.attr_1="foo"'
url = HTTP + self.host + '/orchestrator/default'
heads = {
'Content-Type': 'text/occi',
'X-Auth-Token': self.extras['token'],
'X-Tenant-Name': self.extras['tenant_name']}
occi_attrs = self.extras['srv_prms'].service_parameters(self.state)
if len(occi_attrs) > 0:
LOG.info('Adding service-specific parameters to call... X-OCCI-Attribute:' + occi_attrs)
heads['X-OCCI-Attribute'] = occi_attrs
if len(self.new.attributes) > 0:
LOG.info('Adding updated parameters... X-OCCI-Attribute: ' + self.new.attributes.__repr__())
for k, v in self.new.attributes.items():
occi_attrs = occi_attrs + ', ' + k + '=' + v
self.entity.attributes[k] = v
heads['X-OCCI-Attribute'] = occi_attrs
LOG.debug('Provisioning SO with: ' + url)
LOG.info('Sending headers: ' + heads.__repr__())
http_retriable_request('POST', url, headers=heads)
self.entity.attributes['mcn.service.state'] = 'update'
return self.entity, self.extras
class DestroySO(Task):
def __init__(self, entity, extras):
Task.__init__(self, entity, extras, state='destroy')
self.nburl = CONFIG.get('cloud_controller', 'nb_api', '')
repo_uri = self.entity.extras['repo_uri']
self.host = urlparse(repo_uri).netloc.split('@')[1]
def run(self):
# 1. dispose the active SO, essentially kills the STG/ITG
# 2. dispose the resources used to run the SO
# example request to the SO
# curl -v -X DELETE http://localhost:8051/orchestrator/default \
# -H 'X-Auth-Token: '$KID \
# -H 'X-Tenant-Name: '$TENANT
url = HTTP + self.host + '/orchestrator/default'
heads = {'X-Auth-Token': self.extras['token'],
'X-Tenant-Name': self.extras['tenant_name']}
occi_attrs = self.extras['srv_prms'].service_parameters(self.state)
if len(occi_attrs) > 0:
LOG.info('Adding service-specific parameters to call... X-OCCI-Attribute:' + occi_attrs)
heads['X-OCCI-Attribute'] = occi_attrs
LOG.info('Disposing service orchestrator with: ' + url)
LOG.info('Sending headers: ' + heads.__repr__())
http_retriable_request('DELETE', url, headers=heads)
url = self.nburl + self.entity.identifier.replace('/' + self.entity.kind.term + '/', '/app/')
heads = {'Content-Type': 'text/occi',
'X-Auth-Token': self.extras['token'],
'X-Tenant-Name': self.extras['tenant_name']}
LOG.info('Disposing service orchestrator container via CC... ' + url)
LOG.info('Sending headers: ' + heads.__repr__())
http_retriable_request('DELETE', url, headers=heads, authenticate=True)
return self.entity, self.extras
| |
# Copyright 2012, Google Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import errno
import logging
import re
import time
from net import gorpc
from vtdb import tablet2
from vtdb import dbexceptions
# NOTE(msolomon) this sketchy import allows upstream code to mostly interpret
# our exceptions as if they came from MySQLdb. Good for a cutover, probably
# bad long-term.
import MySQLdb as MySQLErrors
_errno_pattern = re.compile('\(errno (\d+)\)')
# NOTE(msolomon) This mapping helps us mimic the behavior of mysql errors
# even though the relationship between connections and failures is now quite
# different. In general, we map vtocc errors to DatabaseError, unless there
# is a pressing reason to be more precise. Otherwise, these errors can get
# misinterpreted futher up the call chain.
_mysql_error_map = {
1062: MySQLErrors.IntegrityError,
}
# Errors fall into three classes based on recovery strategy.
#
# APP_LEVEL is for routine programmer errors (bad input etc) -- nothing can be
# done here, so just propagate the error upstream.
#
# RETRY means a simple reconnect (and immediate) reconnect to the same
# host will likely fix things. This is usually due vtocc restarting. In general
# this can be handled transparently unless the error is within a transaction.
#
# FATAL indicates that retrying an action on the host is likely to fail.
ERROR_APP_LEVEL = 'app_level'
ERROR_RETRY = 'retry'
ERROR_FATAL = 'fatal'
RECONNECT_DELAY = 0.002
# simple class to trap and re-export only variables referenced from the sql
# statement. bind dictionaries can be *very* noisy.
# this is by-product of converting the mysql %(name)s syntax to vtocc :name
class BindVarsProxy(object):
def __init__(self, bind_vars):
self.bind_vars = bind_vars
self.accessed_keys = set()
def __getitem__(self, name):
self.bind_vars[name]
self.accessed_keys.add(name)
return ':%s' % name
def export_bind_vars(self):
return dict([(k, self.bind_vars[k]) for k in self.accessed_keys])
# Provide compatibility with the MySQLdb query param style and prune bind_vars
class VtOCCConnection(tablet2.TabletConnection):
max_attempts = 2
def dial(self):
tablet2.TabletConnection.dial(self)
try:
response = self.client.call('OccManager.GetSessionId', self.dbname)
self.set_session_id(response.reply)
except gorpc.GoRpcError, e:
raise dbexceptions.OperationalError(*e.args)
def _convert_error(self, exception, *error_hints):
message = str(exception[0]).lower()
# NOTE(msolomon) extract a mysql error code so we can push this up the code
# stack. At this point, this is almost exclusively for handling integrity
# errors from duplicate key inserts.
match = _errno_pattern.search(message)
if match:
err = int(match.group(1))
elif isinstance(exception[0], IOError):
err = exception[0].errno
else:
err = -1
if message.startswith('fatal'):
# Force this error code upstream so MySQL code understands this as a
# permanent failure on this host. Feels a little dirty, but probably the
# most consistent way since this correctly communicates the recovery
# strategy upstream.
raise MySQLErrors.OperationalError(2003, str(exception), self.addr,
*error_hints)
elif message.startswith('retry'):
# Retry means that a trivial redial of this host will fix things. This
# is frequently due to vtocc being restarted independently of the mysql
# instance behind it.
error_type = ERROR_RETRY
elif 'curl error 7' in message:
# Client side error - sometimes the listener is unavailable for a few
# milliseconds during a restart.
error_type = ERROR_RETRY
elif err in (errno.ECONNREFUSED, errno.EPIPE):
error_type = ERROR_RETRY
else:
# Everything else is app level - just process the failure and continue
# to use the existing connection.
error_type = ERROR_APP_LEVEL
if error_type == ERROR_RETRY and self.transaction_id:
# With a transaction, you cannot retry, so just redial. The next action
# will be successful. Masquerade as commands-out-of-sync - an operational
# error that can be reattempted at the app level.
error_type = ERROR_APP_LEVEL
error_hints += ('cannot retry action within a transaction',)
try:
time.sleep(RECONNECT_DELAY)
self.dial()
except Exception, e:
# If this fails now, the code will retry later as the session_id
# won't be valid until the handshake finishes.
logging.warning('error dialing vtocc %s (%s)', self.addr, e)
exc_class = _mysql_error_map.get(err, MySQLErrors.DatabaseError)
return error_type, exc_class(err, str(exception), self.addr,
*error_hints)
def begin(self):
attempt = 0
while True:
try:
return tablet2.TabletConnection.begin(self)
except dbexceptions.OperationalError, e:
error_type, e = self._convert_error(e, 'begin')
if error_type == ERROR_RETRY:
attempt += 1
if attempt < self.max_attempts:
try:
time.sleep(RECONNECT_DELAY)
self.dial()
except dbexceptions.OperationalError, dial_error:
logging.warning('error dialing vtocc on begin %s (%s)',
self.addr, dial_error)
continue
logging.warning('Failing with 2003 on begin')
raise MySQLErrors.OperationalError(2003, str(e), self.addr, 'begin')
raise e
def commit(self):
try:
return tablet2.TabletConnection.commit(self)
except dbexceptions.OperationalError, e:
error_type, e = self._convert_error(e, 'commit')
raise e
def _execute(self, sql, bind_variables):
bind_vars_proxy = BindVarsProxy(bind_variables)
try:
# convert bind style from %(name)s to :name
sql = sql % bind_vars_proxy
except KeyError, e:
raise dbexceptions.InterfaceError(e[0], sql, bind_variables)
sane_bind_vars = bind_vars_proxy.export_bind_vars()
attempt = 0
while True:
try:
return tablet2.TabletConnection._execute(self, sql, sane_bind_vars)
except dbexceptions.OperationalError, e:
error_type, e = self._convert_error(e, sql, sane_bind_vars)
if error_type == ERROR_RETRY:
attempt += 1
if attempt < self.max_attempts:
try:
time.sleep(RECONNECT_DELAY)
self.dial()
except dbexceptions.OperationalError, dial_error:
logging.warning('error dialing vtocc on execute %s (%s)',
self.addr, dial_error)
continue
logging.warning('Failing with 2003 on %s: %s, %s', str(e), sql, sane_bind_vars)
raise MySQLErrors.OperationalError(2003, str(e), self.addr, sql, sane_bind_vars)
raise e
def connect(addr, timeout, dbname=None):
conn = VtOCCConnection(addr, dbname, timeout)
conn.dial()
return conn
| |
import pprint
import argparse
import sys
import os
import json
import errno
import zk_utils
import cmd_pred
gdata = {
'all_clients_node_path': "/all_clients",
}
CONFIG_MICROSERVICE_URL="io.seldon.algorithm.external.url"
CONFIG_MICROSERVICE_NAME="io.seldon.algorithm.external.name"
EXTERNAL_RECOMMENDER="externalItemRecommendationAlgorithm"
def pp(o):
p = pprint.PrettyPrinter(indent=4)
p.pprint(o)
def getOpts(args):
parser = argparse.ArgumentParser(prog='seldon-cli rec_alg', description='Seldon Cli')
parser.add_argument('--action', help="the action to use", required=True, choices=['list','add','delete','show','commit','create'])
parser.add_argument('--alg-type', help="type of algorithm", required=False, choices=['recommendation','prediction'], default='recommendation')
parser.add_argument('--client-name', help="the name of the client", required=False)
parser.add_argument('--recommender-name', help="the name of recommender", required=False)
parser.add_argument('--config', help="algorithm specific config in the form x=y", required=False, action='append')
parser.add_argument('-f','--json-file', help="the json file to use for creating algs or '-' for stdin", required=False)
parser.add_argument('args', nargs=argparse.REMAINDER) # catch rest (non-options) as args
opts = parser.parse_args(args)
return opts
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
def dict_to_json(d, expand=False):
return json.dumps(d, sort_keys=True, indent=4, separators=(',', ': ')) if expand else json.dumps(d, sort_keys=True, separators=(',',':'))
def json_to_dict(json_data):
return json.loads(json_data)
def ensure_client_has_algs(zk_client):
node_path = gdata["all_clients_node_path"]+"/"+client_name+"/algs"
if zk_client.exists(node_path):
write_node_value_to_file(zk_client, zkroot, node_path)
else:
default_alg_json = '{"algorithms":[{"config":[],"filters":[],"includers":[],"name":"recentItemsRecommender"}],"combiner":"firstSuccessfulCombiner"}'
data = json_to_dict(default_alg_json)
write_data_to_file(data_fpath, data)
zk_utils.node_set(zk_client, node_path, dict_to_json(data))
def write_data_to_file(data_fpath, data):
json = dict_to_json(data, True) if isinstance(data,dict) else str(data)
mkdir_p(os.path.dirname(data_fpath))
f = open(data_fpath,'w')
f.write(json)
f.write('\n')
f.close()
print "Writing data to file[{data_fpath}]".format(**locals())
def get_data_from_file(data_fpath):
if os.path.isfile(data_fpath):
f = open(data_fpath)
data = f.read()
f.close()
return data
else:
return ""
def write_node_value_to_file(zk_client, zkroot, node_path):
node_value = zk_utils.node_get(zk_client, node_path)
node_value = node_value.strip()
if zk_utils.is_json_data(node_value):
data = json_to_dict(node_value) if node_value != None and len(node_value)>0 else ""
else:
data = str(node_value)
data_fpath = zkroot + node_path + "/_data_"
write_data_to_file(data_fpath, data)
def is_existing_client(zkroot, client_name):
client_names = os.listdir(zkroot + gdata["all_clients_node_path"])
if client_name in client_names:
return True
else:
return False
def add_model_activate(zkroot,client_name,activate_path):
node_fpath = zkroot + activate_path + "/_data_"
data = get_data_from_file(node_fpath).rstrip()
if len(data) > 0:
clients = data.split(',')
for client in clients:
if client == client_name:
return
else:
clients = []
clients.append(client_name)
data = ",".join(clients)
write_data_to_file(node_fpath,data)
def remove_model_activate(zkroot,client_name,activate_path):
node_fpath = zkroot + activate_path + "/_data_"
data = get_data_from_file(node_fpath).rstrip()
print "data is ",data
if len(data)>0:
clients = data.split(',')
for client in clients:
print "looking at ",client
if client == client_name:
clients.remove(client)
data = ",".join(clients)
write_data_to_file(node_fpath,data)
def show_algs(data):
json = dict_to_json(data, True)
print json
def ensure_client_has_algs(zkroot, zk_client, client_name):
data_fpath = "{zkroot}{all_clients_node_path}/{client_name}/algs/_data_".format(zkroot=zkroot,all_clients_node_path=gdata["all_clients_node_path"],client_name=client_name)
if not os.path.isfile(data_fpath):
node_path = gdata["all_clients_node_path"]+"/"+client_name+"/algs"
if zk_client.exists(node_path):
write_node_value_to_file(zk_client, zkroot, node_path)
else:
default_alg_json = '{"algorithms":[{"config":[],"filters":[],"includers":[],"name":"recentItemsRecommender"}],"combiner":"firstSuccessfulCombiner"}'
data = json_to_dict(default_alg_json)
write_data_to_file(data_fpath, data)
zk_utils.node_set(zk_client, node_path, dict_to_json(data))
def action_show(command_data, opts):
client_name = opts.client_name
if client_name == None:
print "Need client name to show the algs for"
sys.exit(1)
zkroot = command_data["zkdetails"]["zkroot"]
if not is_existing_client(zkroot, client_name):
print "Invalid client[{client_name}]".format(**locals())
sys.exit(1)
data_fpath = zkroot + gdata["all_clients_node_path"] + "/" + client_name + "/alg_rectags/_data_"
if os.path.isfile(data_fpath):
f = open(data_fpath)
json = f.read()
f.close()
data = json_to_dict(json)
show_algs(data)
else:
print "Unable to show recommenders definition for client[{client_name}]".format(**locals())
def has_config(opts,name):
if not opts.config is None:
for nv in opts.config:
if nv.split('=')[0] == name:
return True
return False
def action_add(command_data, opts):
client_name = opts.client_name
if client_name == None:
print "Need client name to add algs for"
sys.exit(1)
recommender_name = opts.recommender_name
if recommender_name == None:
print "Need recommender name"
sys.exit(1)
zkroot = command_data["zkdetails"]["zkroot"]
if not is_existing_client(zkroot, client_name):
print "Invalid client[{client_name}]".format(**locals())
sys.exit(1)
default_algorithms = command_data["conf_data"]["default_algorithms"]
recommenders = default_algorithms.keys()
if recommender_name not in recommenders:
print "Invalid recommender[{recommender_name}]".format(**locals())
sys.exit(1)
#if recommender_name == EXTERNAL_RECOMMENDER:
#if not (has_config(opts,CONFIG_MICROSERVICE_URL) and has_config(opts,CONFIG_MICROSERVICE_NAME)):
# print "You must supply "+CONFIG_MICROSERVICE_URL+" and "+CONFIG_MICROSERVICE_NAME+" for "+EXTERNAL_RECOMMENDER
# sys.exit(1)
zk_client = command_data["zkdetails"]["zk_client"]
ensure_client_has_algs(zkroot, zk_client, client_name)
data_fpath = zkroot + gdata["all_clients_node_path"] + "/" + client_name + "/algs/_data_"
f = open(data_fpath)
json = f.read()
f.close()
data = json_to_dict(json)
algorithms = data["algorithms"]
includers = default_algorithms[recommender_name]["includers"] if default_algorithms[recommender_name].has_key("includers") else []
recommender_data = {
'filters':[],
'includers': includers,
'name': recommender_name,
'config': default_algorithms[recommender_name]["config"]
}
if not opts.config is None:
for nv in opts.config:
(name,value) = nv.split('=')
recommender_data['config'].append({"name":name,"value":value})
algorithms.append(recommender_data)
write_data_to_file(data_fpath, data)
if default_algorithms[recommender_name].has_key("zk_activate_node"):
add_model_activate(zkroot,client_name,default_algorithms[recommender_name]["zk_activate_node"])
print "Added [{recommender_name}]".format(**locals())
show_algs(data)
def action_delete(command_data, opts):
client_name = opts.client_name
if client_name == None:
print "Need client name to add algs for"
sys.exit(1)
recommender_name = opts.recommender_name
if recommender_name == None:
print "Need recommender name"
sys.exit(1)
zkroot = command_data["zkdetails"]["zkroot"]
if not is_existing_client(zkroot, client_name):
print "Invalid client[{client_name}]".format(**locals())
sys.exit(1)
zk_client = command_data["zkdetails"]["zk_client"]
ensure_client_has_algs(zkroot, zk_client, client_name)
data_fpath = zkroot + gdata["all_clients_node_path"] + "/" + client_name + "/algs/_data_"
f = open(data_fpath)
json = f.read()
f.close()
data = json_to_dict(json)
default_algorithms = command_data["conf_data"]["default_algorithms"]
recommenders = default_algorithms.keys()
if recommender_name not in recommenders:
print "Invalid recommender[{recommender_name}]".format(**locals())
sys.exit(1)
algorithms = data["algorithms"]
length_before_removal = len(algorithms)
def recommender_filter(item):
if item["name"] == recommender_name:
return False
else:
return True
filtered_algorithms = filter(recommender_filter, algorithms)
length_after_removal = len(filtered_algorithms)
data["algorithms"] = filtered_algorithms
if length_after_removal < length_before_removal:
write_data_to_file(data_fpath, data)
if default_algorithms[recommender_name].has_key("zk_activate_node"):
remove_model_activate(zkroot,client_name,default_algorithms[recommender_name]["zk_activate_node"])
print "Removed [{recommender_name}]".format(**locals())
def action_list(command_data, opts):
print "Default recommenders:"
default_algorithms = command_data["conf_data"]["default_algorithms"]
for recommender in default_algorithms:
print " {recommender}".format(**locals())
def action_commit(command_data, opts):
client_name = opts.client_name
if client_name == None:
print "Need client name to commit data for"
sys.exit(1)
zkroot = command_data["zkdetails"]["zkroot"]
if not is_existing_client(zkroot, client_name):
print "Invalid client[{client_name}]".format(**locals())
return
zk_client = command_data["zkdetails"]["zk_client"]
zkroot = command_data["zkdetails"]["zkroot"]
data_fpath = zkroot + gdata["all_clients_node_path"] + "/" + client_name + "/alg_rectags/_data_"
if os.path.isfile(data_fpath):
f = open(data_fpath)
data_json = f.read()
f.close()
zk_client = command_data["zkdetails"]["zk_client"]
node_path = gdata["all_clients_node_path"] + "/" + client_name + "/alg_rectags"
zk_utils.node_set(zk_client, node_path, data_json)
# activate any required models
data = json_to_dict(data_json)
recommender_set = set()
if data["defaultStrategy"].has_key("algorithms"):
for alg in data["defaultStrategy"]["algorithms"]:
recommender_set.add(alg["name"])
elif data["defaultStrategy"].has_key("variations"):
for variation in data["defaultStrategy"]["variations"]:
for alg in variation["config"]["algorithms"]:
recommender_set.add(alg["name"])
default_algorithms = command_data["conf_data"]["default_algorithms"]
for recommender_name in recommender_set:
if default_algorithms[recommender_name].has_key("zk_activate_node"):
print "activate",recommender_name
node_path = default_algorithms[recommender_name]["zk_activate_node"]
node_fpath = zkroot + node_path + "/_data_"
data_models = get_data_from_file(node_fpath)
zk_utils.node_set(zk_client, node_path, data_models)
return
#TODO remove the following once only using alg_rectags
data_fpath = zkroot + gdata["all_clients_node_path"] + "/" + client_name + "/algs/_data_"
if not os.path.isfile(data_fpath):
print "Data to commit not found!!"
sys.exit(1)
f = open(data_fpath)
data_json = f.read()
f.close()
zk_client = command_data["zkdetails"]["zk_client"]
node_path = gdata["all_clients_node_path"] + "/" + client_name + "/algs"
zk_utils.node_set(zk_client, node_path, data_json)
# activate any required models
default_algorithms = command_data["conf_data"]["default_algorithms"]
data = json_to_dict(data_json)
algorithms = data["algorithms"]
print "algorithms:"
for alg in algorithms:
alg_name=alg["name"]
if default_algorithms[alg_name].has_key("zk_activate_node"):
node_path = default_algorithms[alg_name]["zk_activate_node"]
node_fpath = zkroot + node_path + "/_data_"
data_models = get_data_from_file(node_fpath)
zk_utils.node_set(zk_client, node_path, data_models)
def action_create(command_data, opts):
zkroot = command_data["zkdetails"]["zkroot"]
#check_valid_client_name
client_name = opts.client_name
if client_name == None:
print "Need client name to create algs for"
sys.exit(1)
if not is_existing_client(zkroot, client_name):
print "Invalid client[{client_name}]".format(**locals())
sys.exit(1)
#check_valid_json_file
json_file_contents = ""
json_file = opts.json_file
if json_file == None:
print "Need json-file to use for creating algs"
sys.exit(1)
if json_file == "-":
json_file_contents = sys.stdin.read()
else:
if not os.path.isfile(json_file):
print "Unable find file[{json_file}]".format(**locals())
sys.exit(1)
f = open(json_file)
json_file_contents = f.read()
f.close()
# ensure valid data
data = json_to_dict(json_file_contents)
#do the model activate
recommender_set = set()
if data["defaultStrategy"].has_key("algorithms"):
for alg in data["defaultStrategy"]["algorithms"]:
recommender_set.add(alg["name"])
elif data["defaultStrategy"].has_key("variations"):
for variation in data["defaultStrategy"]["variations"]:
for alg in variation["config"]["algorithms"]:
recommender_set.add(alg["name"])
default_algorithms = command_data["conf_data"]["default_algorithms"]
for recommender_name in recommender_set:
if default_algorithms[recommender_name].has_key("zk_activate_node"):
add_model_activate(zkroot,client_name,default_algorithms[recommender_name]["zk_activate_node"])
#save to zkoot
data_fpath = zkroot + gdata["all_clients_node_path"] + "/" + client_name + "/alg_rectags/_data_"
write_data_to_file(data_fpath, data)
def cmd_alg(gopts,command_data, command_args):
actions = {
"list" : action_list,
"show" : action_show,
"add" : action_add,
"delete" : action_delete,
"commit" : action_commit,
"create" : action_create,
}
opts = getOpts(command_args)
action = opts.action
if action == None:
print "Running default list action"
actions["default"](command_data, opts)
else:
if actions.has_key(action):
actions[action](command_data, opts)
else:
print "Invalid action[{}]".format(action)
| |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Simple utility PTransforms.
"""
from __future__ import absolute_import
from __future__ import division
import collections
import contextlib
import random
import re
import sys
import time
import warnings
from builtins import filter
from builtins import object
from builtins import range
from builtins import zip
from typing import TYPE_CHECKING
from typing import Any
from typing import Iterable
from typing import List
from typing import Tuple
from typing import TypeVar
from typing import Union
from future.utils import itervalues
from past.builtins import long
from apache_beam import coders
from apache_beam import typehints
from apache_beam.metrics import Metrics
from apache_beam.portability import common_urns
from apache_beam.transforms import window
from apache_beam.transforms.combiners import CountCombineFn
from apache_beam.transforms.core import CombinePerKey
from apache_beam.transforms.core import DoFn
from apache_beam.transforms.core import FlatMap
from apache_beam.transforms.core import Flatten
from apache_beam.transforms.core import GroupByKey
from apache_beam.transforms.core import Map
from apache_beam.transforms.core import ParDo
from apache_beam.transforms.core import Windowing
from apache_beam.transforms.ptransform import PTransform
from apache_beam.transforms.ptransform import ptransform_fn
from apache_beam.transforms.timeutil import TimeDomain
from apache_beam.transforms.trigger import AccumulationMode
from apache_beam.transforms.trigger import AfterCount
from apache_beam.transforms.userstate import BagStateSpec
from apache_beam.transforms.userstate import CombiningValueStateSpec
from apache_beam.transforms.userstate import TimerSpec
from apache_beam.transforms.userstate import on_timer
from apache_beam.transforms.window import NonMergingWindowFn
from apache_beam.transforms.window import TimestampCombiner
from apache_beam.transforms.window import TimestampedValue
from apache_beam.utils import windowed_value
from apache_beam.utils.annotations import deprecated
from apache_beam.utils.annotations import experimental
if TYPE_CHECKING:
from apache_beam import pvalue
from apache_beam.runners.pipeline_context import PipelineContext
__all__ = [
'BatchElements',
'CoGroupByKey',
'Distinct',
'Keys',
'KvSwap',
'Regex',
'Reify',
'RemoveDuplicates',
'Reshuffle',
'ToString',
'Values',
'WithKeys',
'GroupIntoBatches'
]
K = TypeVar('K')
V = TypeVar('V')
T = TypeVar('T')
class CoGroupByKey(PTransform):
"""Groups results across several PCollections by key.
Given an input dict of serializable keys (called "tags") to 0 or more
PCollections of (key, value) tuples, it creates a single output PCollection
of (key, value) tuples whose keys are the unique input keys from all inputs,
and whose values are dicts mapping each tag to an iterable of whatever values
were under the key in the corresponding PCollection, in this manner::
('some key', {'tag1': ['value 1 under "some key" in pcoll1',
'value 2 under "some key" in pcoll1',
...],
'tag2': ... ,
... })
For example, given::
{'tag1': pc1, 'tag2': pc2, 333: pc3}
where::
pc1 = [(k1, v1)]
pc2 = []
pc3 = [(k1, v31), (k1, v32), (k2, v33)]
The output PCollection would be::
[(k1, {'tag1': [v1], 'tag2': [], 333: [v31, v32]}),
(k2, {'tag1': [], 'tag2': [], 333: [v33]})]
CoGroupByKey also works for tuples, lists, or other flat iterables of
PCollections, in which case the values of the resulting PCollections
will be tuples whose nth value is the list of values from the nth
PCollection---conceptually, the "tags" are the indices into the input.
Thus, for this input::
(pc1, pc2, pc3)
the output would be::
[(k1, ([v1], [], [v31, v32]),
(k2, ([], [], [v33]))]
Attributes:
**kwargs: Accepts a single named argument "pipeline", which specifies the
pipeline that "owns" this PTransform. Ordinarily CoGroupByKey can obtain
this information from one of the input PCollections, but if there are none
(or if there's a chance there may be none), this argument is the only way
to provide pipeline information, and should be considered mandatory.
"""
def __init__(self, **kwargs):
super(CoGroupByKey, self).__init__()
self.pipeline = kwargs.pop('pipeline', None)
if kwargs:
raise ValueError('Unexpected keyword arguments: %s' % list(kwargs.keys()))
def _extract_input_pvalues(self, pvalueish):
try:
# If this works, it's a dict.
return pvalueish, tuple(itervalues(pvalueish))
except AttributeError:
pcolls = tuple(pvalueish)
return pcolls, pcolls
def expand(self, pcolls):
"""Performs CoGroupByKey on argument pcolls; see class docstring."""
# For associating values in K-V pairs with the PCollections they came from.
def _pair_tag_with_value(key_value, tag):
(key, value) = key_value
return (key, (tag, value))
# Creates the key, value pairs for the output PCollection. Values are either
# lists or dicts (per the class docstring), initialized by the result of
# result_ctor(result_ctor_arg).
def _merge_tagged_vals_under_key(key_grouped, result_ctor,
result_ctor_arg):
(key, grouped) = key_grouped
result_value = result_ctor(result_ctor_arg)
for tag, value in grouped:
result_value[tag].append(value)
return (key, result_value)
try:
# If pcolls is a dict, we turn it into (tag, pcoll) pairs for use in the
# general-purpose code below. The result value constructor creates dicts
# whose keys are the tags.
result_ctor_arg = list(pcolls)
result_ctor = lambda tags: dict((tag, []) for tag in tags)
pcolls = pcolls.items()
except AttributeError:
# Otherwise, pcolls is a list/tuple, so we turn it into (index, pcoll)
# pairs. The result value constructor makes tuples with len(pcolls) slots.
pcolls = list(enumerate(pcolls))
result_ctor_arg = len(pcolls)
result_ctor = lambda size: tuple([] for _ in range(size))
# Check input PCollections for PCollection-ness, and that they all belong
# to the same pipeline.
for _, pcoll in pcolls:
self._check_pcollection(pcoll)
if self.pipeline:
assert pcoll.pipeline == self.pipeline
return ([pcoll | 'pair_with_%s' % tag >> Map(_pair_tag_with_value, tag)
for tag, pcoll in pcolls]
| Flatten(pipeline=self.pipeline)
| GroupByKey()
| Map(_merge_tagged_vals_under_key, result_ctor, result_ctor_arg))
def Keys(label='Keys'): # pylint: disable=invalid-name
"""Produces a PCollection of first elements of 2-tuples in a PCollection."""
return label >> Map(lambda k_v: k_v[0])
def Values(label='Values'): # pylint: disable=invalid-name
"""Produces a PCollection of second elements of 2-tuples in a PCollection."""
return label >> Map(lambda k_v1: k_v1[1])
def KvSwap(label='KvSwap'): # pylint: disable=invalid-name
"""Produces a PCollection reversing 2-tuples in a PCollection."""
return label >> Map(lambda k_v2: (k_v2[1], k_v2[0]))
@ptransform_fn
def Distinct(pcoll): # pylint: disable=invalid-name
"""Produces a PCollection containing distinct elements of a PCollection."""
return (pcoll
| 'ToPairs' >> Map(lambda v: (v, None))
| 'Group' >> CombinePerKey(lambda vs: None)
| 'Distinct' >> Keys())
@deprecated(since='2.12', current='Distinct')
@ptransform_fn
def RemoveDuplicates(pcoll):
"""Produces a PCollection containing distinct elements of a PCollection."""
return pcoll | 'RemoveDuplicates' >> Distinct()
class _BatchSizeEstimator(object):
"""Estimates the best size for batches given historical timing.
"""
_MAX_DATA_POINTS = 100
_MAX_GROWTH_FACTOR = 2
def __init__(self,
min_batch_size=1,
max_batch_size=10000,
target_batch_overhead=.05,
target_batch_duration_secs=1,
variance=0.25,
clock=time.time,
ignore_first_n_seen_per_batch_size=0):
if min_batch_size > max_batch_size:
raise ValueError("Minimum (%s) must not be greater than maximum (%s)" % (
min_batch_size, max_batch_size))
if target_batch_overhead and not 0 < target_batch_overhead <= 1:
raise ValueError("target_batch_overhead (%s) must be between 0 and 1" % (
target_batch_overhead))
if target_batch_duration_secs and target_batch_duration_secs <= 0:
raise ValueError("target_batch_duration_secs (%s) must be positive" % (
target_batch_duration_secs))
if not (target_batch_overhead or target_batch_duration_secs):
raise ValueError("At least one of target_batch_overhead or "
"target_batch_duration_secs must be positive.")
if ignore_first_n_seen_per_batch_size < 0:
raise ValueError('ignore_first_n_seen_per_batch_size (%s) must be non '
'negative' % (ignore_first_n_seen_per_batch_size))
self._min_batch_size = min_batch_size
self._max_batch_size = max_batch_size
self._target_batch_overhead = target_batch_overhead
self._target_batch_duration_secs = target_batch_duration_secs
self._variance = variance
self._clock = clock
self._data = []
self._ignore_next_timing = False
self._ignore_first_n_seen_per_batch_size = (
ignore_first_n_seen_per_batch_size)
self._batch_size_num_seen = {}
self._replay_last_batch_size = None
self._size_distribution = Metrics.distribution(
'BatchElements', 'batch_size')
self._time_distribution = Metrics.distribution(
'BatchElements', 'msec_per_batch')
# Beam distributions only accept integer values, so we use this to
# accumulate under-reported values until they add up to whole milliseconds.
# (Milliseconds are chosen because that's conventionally used elsewhere in
# profiling-style counters.)
self._remainder_msecs = 0
def ignore_next_timing(self):
"""Call to indicate the next timing should be ignored.
For example, the first emit of a ParDo operation is known to be anomalous
due to setup that may occur.
"""
self._ignore_next_timing = True
@contextlib.contextmanager
def record_time(self, batch_size):
start = self._clock()
yield
elapsed = self._clock() - start
elapsed_msec = 1e3 * elapsed + self._remainder_msecs
self._size_distribution.update(batch_size)
self._time_distribution.update(int(elapsed_msec))
self._remainder_msecs = elapsed_msec - int(elapsed_msec)
# If we ignore the next timing, replay the batch size to get accurate
# timing.
if self._ignore_next_timing:
self._ignore_next_timing = False
self._replay_last_batch_size = batch_size
else:
self._data.append((batch_size, elapsed))
if len(self._data) >= self._MAX_DATA_POINTS:
self._thin_data()
def _thin_data(self):
# Make sure we don't change the parity of len(self._data)
# As it's used below to alternate jitter.
self._data.pop(random.randrange(len(self._data) // 4))
self._data.pop(random.randrange(len(self._data) // 2))
@staticmethod
def linear_regression_no_numpy(xs, ys):
# Least squares fit for y = a + bx over all points.
n = float(len(xs))
xbar = sum(xs) / n
ybar = sum(ys) / n
if xbar == 0:
return ybar, 0
if all(xs[0] == x for x in xs):
# Simply use the mean if all values in xs are same.
return 0, ybar / xbar
b = (sum([(x - xbar) * (y - ybar) for x, y in zip(xs, ys)])
/ sum([(x - xbar)**2 for x in xs]))
a = ybar - b * xbar
return a, b
@staticmethod
def linear_regression_numpy(xs, ys):
# pylint: disable=wrong-import-order, wrong-import-position
import numpy as np
from numpy import sum
n = len(xs)
if all(xs[0] == x for x in xs):
# If all values of xs are same then fallback to linear_regression_no_numpy
return _BatchSizeEstimator.linear_regression_no_numpy(xs, ys)
xs = np.asarray(xs, dtype=float)
ys = np.asarray(ys, dtype=float)
# First do a simple least squares fit for y = a + bx over all points.
b, a = np.polyfit(xs, ys, 1)
if n < 10:
return a, b
else:
# Refine this by throwing out outliers, according to Cook's distance.
# https://en.wikipedia.org/wiki/Cook%27s_distance
sum_x = sum(xs)
sum_x2 = sum(xs**2)
errs = a + b * xs - ys
s2 = sum(errs**2) / (n - 2)
if s2 == 0:
# It's an exact fit!
return a, b
h = (sum_x2 - 2 * sum_x * xs + n * xs**2) / (n * sum_x2 - sum_x**2)
cook_ds = 0.5 / s2 * errs**2 * (h / (1 - h)**2)
# Re-compute the regression, excluding those points with Cook's distance
# greater than 0.5, and weighting by the inverse of x to give a more
# stable y-intercept (as small batches have relatively more information
# about the fixed overhead).
weight = (cook_ds <= 0.5) / xs
b, a = np.polyfit(xs, ys, 1, w=weight)
return a, b
try:
# pylint: disable=wrong-import-order, wrong-import-position
import numpy as np
linear_regression = linear_regression_numpy
except ImportError:
linear_regression = linear_regression_no_numpy
def _calculate_next_batch_size(self):
if self._min_batch_size == self._max_batch_size:
return self._min_batch_size
elif len(self._data) < 1:
return self._min_batch_size
elif len(self._data) < 2:
# Force some variety so we have distinct batch sizes on which to do
# linear regression below.
return int(max(
min(self._max_batch_size,
self._min_batch_size * self._MAX_GROWTH_FACTOR),
self._min_batch_size + 1))
# There tends to be a lot of noise in the top quantile, which also
# has outsided influence in the regression. If we have enough data,
# Simply declare the top 20% to be outliers.
trimmed_data = sorted(self._data)[:max(20, len(self._data) * 4 // 5)]
# Linear regression for y = a + bx, where x is batch size and y is time.
xs, ys = zip(*trimmed_data)
a, b = self.linear_regression(xs, ys)
# Avoid nonsensical or division-by-zero errors below due to noise.
a = max(a, 1e-10)
b = max(b, 1e-20)
last_batch_size = self._data[-1][0]
cap = min(last_batch_size * self._MAX_GROWTH_FACTOR, self._max_batch_size)
target = self._max_batch_size
if self._target_batch_duration_secs:
# Solution to a + b*x = self._target_batch_duration_secs.
target = min(target, (self._target_batch_duration_secs - a) / b)
if self._target_batch_overhead:
# Solution to a / (a + b*x) = self._target_batch_overhead.
target = min(target, (a / b) * (1 / self._target_batch_overhead - 1))
# Avoid getting stuck at a single batch size (especially the minimal
# batch size) which would not allow us to extrapolate to other batch
# sizes.
# Jitter alternates between 0 and 1.
jitter = len(self._data) % 2
# Smear our samples across a range centered at the target.
if len(self._data) > 10:
target += int(target * self._variance * 2 * (random.random() - .5))
return int(max(self._min_batch_size + jitter, min(target, cap)))
def next_batch_size(self):
# Check if we should replay a previous batch size due to it not being
# recorded.
if self._replay_last_batch_size:
result = self._replay_last_batch_size
self._replay_last_batch_size = None
else:
result = self._calculate_next_batch_size()
seen_count = self._batch_size_num_seen.get(result, 0) + 1
if seen_count <= self._ignore_first_n_seen_per_batch_size:
self.ignore_next_timing()
self._batch_size_num_seen[result] = seen_count
return result
class _GlobalWindowsBatchingDoFn(DoFn):
def __init__(self, batch_size_estimator):
self._batch_size_estimator = batch_size_estimator
def start_bundle(self):
self._batch = []
self._batch_size = self._batch_size_estimator.next_batch_size()
# The first emit often involves non-trivial setup.
self._batch_size_estimator.ignore_next_timing()
def process(self, element):
self._batch.append(element)
if len(self._batch) >= self._batch_size:
with self._batch_size_estimator.record_time(self._batch_size):
yield self._batch
self._batch = []
self._batch_size = self._batch_size_estimator.next_batch_size()
def finish_bundle(self):
if self._batch:
with self._batch_size_estimator.record_time(self._batch_size):
yield window.GlobalWindows.windowed_value(self._batch)
self._batch = None
self._batch_size = self._batch_size_estimator.next_batch_size()
class _WindowAwareBatchingDoFn(DoFn):
_MAX_LIVE_WINDOWS = 10
def __init__(self, batch_size_estimator):
self._batch_size_estimator = batch_size_estimator
def start_bundle(self):
self._batches = collections.defaultdict(list)
self._batch_size = self._batch_size_estimator.next_batch_size()
# The first emit often involves non-trivial setup.
self._batch_size_estimator.ignore_next_timing()
def process(self, element, window=DoFn.WindowParam):
self._batches[window].append(element)
if len(self._batches[window]) >= self._batch_size:
with self._batch_size_estimator.record_time(self._batch_size):
yield windowed_value.WindowedValue(
self._batches[window], window.max_timestamp(), (window,))
del self._batches[window]
self._batch_size = self._batch_size_estimator.next_batch_size()
elif len(self._batches) > self._MAX_LIVE_WINDOWS:
window, _ = sorted(
self._batches.items(),
key=lambda window_batch: len(window_batch[1]),
reverse=True)[0]
with self._batch_size_estimator.record_time(self._batch_size):
yield windowed_value.WindowedValue(
self._batches[window], window.max_timestamp(), (window,))
del self._batches[window]
self._batch_size = self._batch_size_estimator.next_batch_size()
def finish_bundle(self):
for window, batch in self._batches.items():
if batch:
with self._batch_size_estimator.record_time(self._batch_size):
yield windowed_value.WindowedValue(
batch, window.max_timestamp(), (window,))
self._batches = None
self._batch_size = self._batch_size_estimator.next_batch_size()
@typehints.with_input_types(T)
@typehints.with_output_types(List[T])
class BatchElements(PTransform):
"""A Transform that batches elements for amortized processing.
This transform is designed to precede operations whose processing cost
is of the form
time = fixed_cost + num_elements * per_element_cost
where the per element cost is (often significantly) smaller than the fixed
cost and could be amortized over multiple elements. It consumes a PCollection
of element type T and produces a PCollection of element type List[T].
This transform attempts to find the best batch size between the minimim
and maximum parameters by profiling the time taken by (fused) downstream
operations. For a fixed batch size, set the min and max to be equal.
Elements are batched per-window and batches emitted in the window
corresponding to its contents.
Args:
min_batch_size: (optional) the smallest number of elements per batch
max_batch_size: (optional) the largest number of elements per batch
target_batch_overhead: (optional) a target for fixed_cost / time,
as used in the formula above
target_batch_duration_secs: (optional) a target for total time per bundle,
in seconds
variance: (optional) the permitted (relative) amount of deviation from the
(estimated) ideal batch size used to produce a wider base for
linear interpolation
clock: (optional) an alternative to time.time for measuring the cost of
donwstream operations (mostly for testing)
"""
def __init__(self,
min_batch_size=1,
max_batch_size=10000,
target_batch_overhead=.05,
target_batch_duration_secs=1,
variance=0.25,
clock=time.time):
self._batch_size_estimator = _BatchSizeEstimator(
min_batch_size=min_batch_size,
max_batch_size=max_batch_size,
target_batch_overhead=target_batch_overhead,
target_batch_duration_secs=target_batch_duration_secs,
variance=variance,
clock=clock)
def expand(self, pcoll):
if getattr(pcoll.pipeline.runner, 'is_streaming', False):
raise NotImplementedError("Requires stateful processing (BEAM-2687)")
elif pcoll.windowing.is_default():
# This is the same logic as _GlobalWindowsBatchingDoFn, but optimized
# for that simpler case.
return pcoll | ParDo(_GlobalWindowsBatchingDoFn(
self._batch_size_estimator))
else:
return pcoll | ParDo(_WindowAwareBatchingDoFn(self._batch_size_estimator))
class _IdentityWindowFn(NonMergingWindowFn):
"""Windowing function that preserves existing windows.
To be used internally with the Reshuffle transform.
Will raise an exception when used after DoFns that return TimestampedValue
elements.
"""
def __init__(self, window_coder):
"""Create a new WindowFn with compatible coder.
To be applied to PCollections with windows that are compatible with the
given coder.
Arguments:
window_coder: coders.Coder object to be used on windows.
"""
super(_IdentityWindowFn, self).__init__()
if window_coder is None:
raise ValueError('window_coder should not be None')
self._window_coder = window_coder
def assign(self, assign_context):
if assign_context.window is None:
raise ValueError(
'assign_context.window should not be None. '
'This might be due to a DoFn returning a TimestampedValue.')
return [assign_context.window]
def get_window_coder(self):
return self._window_coder
@typehints.with_input_types(Tuple[K, V])
@typehints.with_output_types(Tuple[K, V])
class ReshufflePerKey(PTransform):
"""PTransform that returns a PCollection equivalent to its input,
but operationally provides some of the side effects of a GroupByKey,
in particular preventing fusion of the surrounding transforms,
checkpointing, and deduplication by id.
ReshufflePerKey is experimental. No backwards compatibility guarantees.
"""
def expand(self, pcoll):
windowing_saved = pcoll.windowing
if windowing_saved.is_default():
# In this (common) case we can use a trivial trigger driver
# and avoid the (expensive) window param.
globally_windowed = window.GlobalWindows.windowed_value(None)
MIN_TIMESTAMP = window.MIN_TIMESTAMP
def reify_timestamps(element, timestamp=DoFn.TimestampParam):
key, value = element
if timestamp == MIN_TIMESTAMP:
timestamp = None
return key, (value, timestamp)
def restore_timestamps(element):
key, values = element
return [
globally_windowed.with_value((key, value))
if timestamp is None
else window.GlobalWindows.windowed_value((key, value), timestamp)
for (value, timestamp) in values]
else:
def reify_timestamps(element,
timestamp=DoFn.TimestampParam,
window=DoFn.WindowParam):
key, value = element
# Transport the window as part of the value and restore it later.
return key, windowed_value.WindowedValue(value, timestamp, [window])
def restore_timestamps(element):
key, windowed_values = element
return [wv.with_value((key, wv.value)) for wv in windowed_values]
ungrouped = pcoll | Map(reify_timestamps).with_output_types(Any)
# TODO(BEAM-8104) Using global window as one of the standard window.
# This is to mitigate the Dataflow Java Runner Harness limitation to
# accept only standard coders.
ungrouped._windowing = Windowing(
window.GlobalWindows(),
triggerfn=AfterCount(1),
accumulation_mode=AccumulationMode.DISCARDING,
timestamp_combiner=TimestampCombiner.OUTPUT_AT_EARLIEST)
result = (ungrouped
| GroupByKey()
| FlatMap(restore_timestamps).with_output_types(Any))
result._windowing = windowing_saved
return result
@typehints.with_input_types(T)
@typehints.with_output_types(T)
class Reshuffle(PTransform):
"""PTransform that returns a PCollection equivalent to its input,
but operationally provides some of the side effects of a GroupByKey,
in particular preventing fusion of the surrounding transforms,
checkpointing, and deduplication by id.
Reshuffle adds a temporary random key to each element, performs a
ReshufflePerKey, and finally removes the temporary key.
Reshuffle is experimental. No backwards compatibility guarantees.
"""
def expand(self, pcoll):
# type: (pvalue.PValue) -> pvalue.PCollection
if sys.version_info >= (3,):
KeyedT = Tuple[int, T]
else:
KeyedT = Tuple[long, T] # pylint: disable=long-builtin
return (pcoll
| 'AddRandomKeys' >> Map(lambda t: (random.getrandbits(32), t))
.with_input_types(T).with_output_types(KeyedT)
| ReshufflePerKey()
| 'RemoveRandomKeys' >> Map(lambda t: t[1])
.with_input_types(KeyedT).with_output_types(T))
def to_runner_api_parameter(self, unused_context):
# type: (PipelineContext) -> Tuple[str, None]
return common_urns.composites.RESHUFFLE.urn, None
@PTransform.register_urn(common_urns.composites.RESHUFFLE.urn, None)
def from_runner_api_parameter(unused_parameter, unused_context):
return Reshuffle()
@ptransform_fn
def WithKeys(pcoll, k):
"""PTransform that takes a PCollection, and either a constant key or a
callable, and returns a PCollection of (K, V), where each of the values in
the input PCollection has been paired with either the constant key or a key
computed from the value.
"""
if callable(k):
return pcoll | Map(lambda v: (k(v), v))
return pcoll | Map(lambda v: (k, v))
@experimental()
@typehints.with_input_types(Tuple[K, V])
class GroupIntoBatches(PTransform):
"""PTransform that batches the input into desired batch size. Elements are
buffered until they are equal to batch size provided in the argument at which
point they are output to the output Pcollection.
Windows are preserved (batches will contain elements from the same window)
GroupIntoBatches is experimental. Its use case will depend on the runner if
it has support of States and Timers.
"""
def __init__(self, batch_size):
"""Create a new GroupIntoBatches with batch size.
Arguments:
batch_size: (required) How many elements should be in a batch
"""
warnings.warn('Use of GroupIntoBatches transform requires State/Timer '
'support from the runner')
self.batch_size = batch_size
def expand(self, pcoll):
input_coder = coders.registry.get_coder(pcoll)
return pcoll | ParDo(_pardo_group_into_batches(
self.batch_size, input_coder))
def _pardo_group_into_batches(batch_size, input_coder):
ELEMENT_STATE = BagStateSpec('values', input_coder)
COUNT_STATE = CombiningValueStateSpec('count', input_coder, CountCombineFn())
EXPIRY_TIMER = TimerSpec('expiry', TimeDomain.WATERMARK)
class _GroupIntoBatchesDoFn(DoFn):
def process(self, element,
window=DoFn.WindowParam,
element_state=DoFn.StateParam(ELEMENT_STATE),
count_state=DoFn.StateParam(COUNT_STATE),
expiry_timer=DoFn.TimerParam(EXPIRY_TIMER)):
# Allowed lateness not supported in Python SDK
# https://beam.apache.org/documentation/programming-guide/#watermarks-and-late-data
expiry_timer.set(window.end)
element_state.add(element)
count_state.add(1)
count = count_state.read()
if count >= batch_size:
batch = [element for element in element_state.read()]
yield batch
element_state.clear()
count_state.clear()
@on_timer(EXPIRY_TIMER)
def expiry(self, element_state=DoFn.StateParam(ELEMENT_STATE),
count_state=DoFn.StateParam(COUNT_STATE)):
batch = [element for element in element_state.read()]
if batch:
yield batch
element_state.clear()
count_state.clear()
return _GroupIntoBatchesDoFn()
class ToString(object):
"""
PTransform for converting a PCollection element, KV or PCollection Iterable
to string.
"""
class Kvs(PTransform):
"""
Transforms each element of the PCollection to a string on the key followed
by the specific delimiter and the value.
"""
def __init__(self, delimiter=None):
self.delimiter = delimiter or ","
def expand(self, pcoll):
input_type = Tuple[Any, Any]
output_type = str
return (pcoll | ('%s:KeyVaueToString' % self.label >> (Map(
lambda x: "{}{}{}".format(x[0], self.delimiter, x[1])))
.with_input_types(input_type) # type: ignore[misc]
.with_output_types(output_type)))
class Element(PTransform):
"""
Transforms each element of the PCollection to a string.
"""
def expand(self, pcoll):
input_type = T
output_type = str
return (pcoll | ('%s:ElementToString' % self.label >> (Map(
lambda x: str(x)))
.with_input_types(input_type)
.with_output_types(output_type)))
class Iterables(PTransform):
"""
Transforms each item in the iterable of the input of PCollection to a
string. There is no trailing delimiter.
"""
def __init__(self, delimiter=None):
self.delimiter = delimiter or ","
def expand(self, pcoll):
input_type = Iterable[Any]
output_type = str
return (pcoll | ('%s:IterablesToString' % self.label >> (
Map(lambda x: self.delimiter.join(str(_x) for _x in x)))
.with_input_types(input_type)
.with_output_types(output_type)))
class Reify(object):
"""PTransforms for converting between explicit and implicit form of various
Beam values."""
@typehints.with_input_types(T)
@typehints.with_output_types(T)
class Timestamp(PTransform):
"""PTransform to wrap a value in a TimestampedValue with it's
associated timestamp."""
@staticmethod
def add_timestamp_info(element, timestamp=DoFn.TimestampParam):
yield TimestampedValue(element, timestamp)
def expand(self, pcoll):
return pcoll | ParDo(self.add_timestamp_info)
@typehints.with_input_types(T)
@typehints.with_output_types(T)
class Window(PTransform):
"""PTransform to convert an element in a PCollection into a tuple of
(element, timestamp, window), wrapped in a TimestampedValue with it's
associated timestamp."""
@staticmethod
def add_window_info(element, timestamp=DoFn.TimestampParam,
window=DoFn.WindowParam):
yield TimestampedValue((element, timestamp, window), timestamp)
def expand(self, pcoll):
return pcoll | ParDo(self.add_window_info)
@typehints.with_input_types(Tuple[K, V])
@typehints.with_output_types(Tuple[K, V])
class TimestampInValue(PTransform):
"""PTransform to wrap the Value in a KV pair in a TimestampedValue with
the element's associated timestamp."""
@staticmethod
def add_timestamp_info(element, timestamp=DoFn.TimestampParam):
key, value = element
yield (key, TimestampedValue(value, timestamp))
def expand(self, pcoll):
return pcoll | ParDo(self.add_timestamp_info)
@typehints.with_input_types(Tuple[K, V])
@typehints.with_output_types(Tuple[K, V])
class WindowInValue(PTransform):
"""PTransform to convert the Value in a KV pair into a tuple of
(value, timestamp, window), with the whole element being wrapped inside a
TimestampedValue."""
@staticmethod
def add_window_info(element, timestamp=DoFn.TimestampParam,
window=DoFn.WindowParam):
key, value = element
yield TimestampedValue((key, (value, timestamp, window)), timestamp)
def expand(self, pcoll):
return pcoll | ParDo(self.add_window_info)
class Regex(object):
"""
PTransform to use Regular Expression to process the elements in a
PCollection.
"""
ALL = "__regex_all_groups"
@staticmethod
def _regex_compile(regex):
"""Return re.compile if the regex has a string value"""
if isinstance(regex, str):
regex = re.compile(regex)
return regex
@staticmethod
@typehints.with_input_types(str)
@typehints.with_output_types(str)
@ptransform_fn
def matches(pcoll, regex, group=0):
"""
Returns the matches (group 0 by default) if zero or more characters at the
beginning of string match the regular expression. To match the entire
string, add "$" sign at the end of regex expression.
Group can be integer value or a string value.
Args:
regex: the regular expression string or (re.compile) pattern.
group: (optional) name/number of the group, it can be integer or a string
value. Defaults to 0, meaning the entire matched string will be
returned.
"""
regex = Regex._regex_compile(regex)
def _process(element):
m = regex.match(element)
if m:
yield m.group(group)
return pcoll | FlatMap(_process)
@staticmethod
@typehints.with_input_types(str)
@typehints.with_output_types(List[str])
@ptransform_fn
def all_matches(pcoll, regex):
"""
Returns all matches (groups) if zero or more characters at the beginning
of string match the regular expression.
Args:
regex: the regular expression string or (re.compile) pattern.
"""
regex = Regex._regex_compile(regex)
def _process(element):
m = regex.match(element)
if m:
yield [m.group(ix) for ix in range(m.lastindex + 1)]
return pcoll | FlatMap(_process)
@staticmethod
@typehints.with_input_types(str)
@typehints.with_output_types(Tuple[str, str])
@ptransform_fn
def matches_kv(pcoll, regex, keyGroup, valueGroup=0):
"""
Returns the KV pairs if the string matches the regular expression, deriving
the key & value from the specified group of the regular expression.
Args:
regex: the regular expression string or (re.compile) pattern.
keyGroup: The Regex group to use as the key. Can be int or str.
valueGroup: (optional) Regex group to use the value. Can be int or str.
The default value "0" returns entire matched string.
"""
regex = Regex._regex_compile(regex)
def _process(element):
match = regex.match(element)
if match:
yield (match.group(keyGroup), match.group(valueGroup))
return pcoll | FlatMap(_process)
@staticmethod
@typehints.with_input_types(str)
@typehints.with_output_types(str)
@ptransform_fn
def find(pcoll, regex, group=0):
"""
Returns the matches if a portion of the line matches the Regex. Returns
the entire group (group 0 by default). Group can be integer value or a
string value.
Args:
regex: the regular expression string or (re.compile) pattern.
group: (optional) name of the group, it can be integer or a string value.
"""
regex = Regex._regex_compile(regex)
def _process(element):
r = regex.search(element)
if r:
yield r.group(group)
return pcoll | FlatMap(_process)
@staticmethod
@typehints.with_input_types(str)
@typehints.with_output_types(Union[List[str], Tuple[str, str]])
@ptransform_fn
def find_all(pcoll, regex, group=0, outputEmpty=True):
"""
Returns the matches if a portion of the line matches the Regex. By default,
list of group 0 will return with empty items. To get all groups, pass the
`Regex.ALL` flag in the `group` parameter which returns all the groups in
the tuple format.
Args:
regex: the regular expression string or (re.compile) pattern.
group: (optional) name of the group, it can be integer or a string value.
outputEmpty: (optional) Should empty be output. True to output empties
and false if not.
"""
regex = Regex._regex_compile(regex)
def _process(element):
matches = regex.finditer(element)
if group == Regex.ALL:
yield [(m.group(), m.groups()[0]) for m in matches if outputEmpty
or m.groups()[0]]
else:
yield [m.group(group) for m in matches if outputEmpty or m.group(group)]
return pcoll | FlatMap(_process)
@staticmethod
@typehints.with_input_types(str)
@typehints.with_output_types(Tuple[str, str])
@ptransform_fn
def find_kv(pcoll, regex, keyGroup, valueGroup=0):
"""
Returns the matches if a portion of the line matches the Regex. Returns the
specified groups as the key and value pair.
Args:
regex: the regular expression string or (re.compile) pattern.
keyGroup: The Regex group to use as the key. Can be int or str.
valueGroup: (optional) Regex group to use the value. Can be int or str.
The default value "0" returns entire matched string.
"""
regex = Regex._regex_compile(regex)
def _process(element):
matches = regex.finditer(element)
if matches:
for match in matches:
yield (match.group(keyGroup), match.group(valueGroup))
return pcoll | FlatMap(_process)
@staticmethod
@typehints.with_input_types(str)
@typehints.with_output_types(str)
@ptransform_fn
def replace_all(pcoll, regex, replacement):
"""
Returns the matches if a portion of the line matches the regex and
replaces all matches with the replacement string.
Args:
regex: the regular expression string or (re.compile) pattern.
replacement: the string to be substituted for each match.
"""
regex = Regex._regex_compile(regex)
return pcoll | Map(lambda elem: regex.sub(replacement, elem))
@staticmethod
@typehints.with_input_types(str)
@typehints.with_output_types(str)
@ptransform_fn
def replace_first(pcoll, regex, replacement):
"""
Returns the matches if a portion of the line matches the regex and replaces
the first match with the replacement string.
Args:
regex: the regular expression string or (re.compile) pattern.
replacement: the string to be substituted for each match.
"""
regex = Regex._regex_compile(regex)
return pcoll | Map(lambda elem: regex.sub(replacement, elem, 1))
@staticmethod
@typehints.with_input_types(str)
@typehints.with_output_types(List[str])
@ptransform_fn
def split(pcoll, regex, outputEmpty=False):
"""
Returns the list string which was splitted on the basis of regular
expression. It will not output empty items (by defaults).
Args:
regex: the regular expression string or (re.compile) pattern.
outputEmpty: (optional) Should empty be output. True to output empties
and false if not.
"""
regex = Regex._regex_compile(regex)
outputEmpty = bool(outputEmpty)
def _process(element):
r = regex.split(element)
if r and not outputEmpty:
r = list(filter(None, r))
yield r
return pcoll | FlatMap(_process)
| |
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.db.models import signals
from djangotoolbox.fields import ListField
from djangotoolbox.utils import getattr_by_path
from copy import deepcopy
import re
import string
_PUNCTUATION_REGEX = re.compile(
'[' + re.escape(string.punctuation.replace('-', '').replace(
'_', '').replace('#', '')) + ']')
_PUNCTUATION_SEARCH_REGEX = re.compile(
'[' + re.escape(string.punctuation.replace('_', '').replace(
'#', '')) + ']')
# Various base indexers
def startswith(words, indexing, **kwargs):
"""Allows for word prefix search."""
if not indexing:
# In search mode we simply match search terms exactly
return words
# In indexing mode we add all prefixes ('h', 'he', ..., 'hello')
result = []
for word in words:
result.extend([word[:count].strip(u'-')
for count in range(1, len(word)+1)])
return result
def porter_stemmer(words, language, **kwargs):
"""Porter-stemmer in various languages."""
languages = [language,]
if '-' in language:
languages.append(language.split('-')[0])
# Fall back to English
languages.append('en')
# Find a stemmer for this language
for language in languages:
try:
stem = __import__('search.porter_stemmers.%s' % language,
{}, {}, ['']).stem
except:
continue
break
result = []
for word in words:
result.append(stem(word))
return result
stop_words = {
'en': set(('a', 'an', 'and', 'or', 'the', 'these', 'those', 'whose', 'to')),
'de': set(('ein', 'eine', 'eines', 'einer', 'einem', 'einen', 'den',
'der', 'die', 'das', 'dieser', 'dieses', 'diese', 'diesen',
'deren', 'und', 'oder'))
}
def get_stop_words(language):
if language not in stop_words and '-' in language:
language = language.split('-', 1)[0]
return stop_words.get(language, set())
def non_stop(words, indexing, language, **kwargs):
"""Removes stop words from search query."""
if indexing:
return words
return list(set(words) - get_stop_words(language))
def porter_stemmer_non_stop(words, **kwargs):
"""Combines porter_stemmer with non_stop."""
return porter_stemmer(non_stop(words, **kwargs), **kwargs)
# Language handler
def site_language(instance, **kwargs):
"""The default language handler tries to determine the language from
fields in the model instance."""
# Check if there's a language attribute
if hasattr(instance, 'language'):
return instance.language
if hasattr(instance, 'lang'):
return instance.lang
# Fall back to default language
return settings.LANGUAGE_CODE
def default_splitter(text, indexing=False, **kwargs):
"""
Returns an array of keywords, that are included
in query. All character besides of letters, numbers
and '_' are split characters. The character '-' is a special
case: two words separated by '-' create an additional keyword
consisting of both words without separation (see example).
Examples:
- text='word1/word2 word3'
returns ['word1', 'word2', word3]
- text='word1/word2-word3'
returns ['word1', 'word2', 'word3', 'word2word3']
"""
if not text:
return []
if not indexing:
return _PUNCTUATION_SEARCH_REGEX.sub(u' ', text.lower()).split()
keywords = []
for word in set(_PUNCTUATION_REGEX.sub(u' ', text.lower()).split()):
if not word:
continue
if '-' not in word:
keywords.append(word)
else:
keywords.extend(get_word_combinations(word))
return keywords
def get_word_combinations(word):
"""
'one-two-three'
=>
['one', 'two', 'three', 'onetwo', 'twothree', 'onetwothree']
"""
permutations = []
parts = [part for part in word.split(u'-') if part]
for count in range(1, len(parts) + 1):
for index in range(len(parts) - count + 1):
permutations.append(u''.join(parts[index:index+count]))
return permutations
class DictEmu(object):
def __init__(self, data):
self.data = data
def __getitem__(self, key):
return getattr(self.data, key)
# IndexField is a (String)ListField storing indexed fields of a model_instance
class IndexField(ListField):
def __init__(self, search_manager, *args, **kwargs):
self.search_manager = search_manager
kwargs['item_field'] = models.CharField(max_length=500)
kwargs['editable'] = False
super(IndexField, self).__init__(*args, **kwargs)
def pre_save(self, model_instance, add):
if self.search_manager.filters and not \
self.search_manager.should_index(DictEmu(model_instance)):
return []
language = self.search_manager.language
if callable(language):
language = language(model_instance, property=self)
index = []
for field_name in self.search_manager.fields_to_index:
values = getattr_by_path(model_instance, field_name, None)
if not values:
values = ()
elif not isinstance(values, (list, tuple)):
values = (values,)
for value in values:
index.extend(self.search_manager.splitter(value, indexing=True,
language=language))
if self.search_manager.indexer:
index = self.search_manager.indexer(index, indexing=True,
language=language)
# Sort index to make debugging easier
setattr(model_instance, self.search_manager.search_list_field_name,
sorted(set(index)))
return index
class SearchManager(models.Manager):
"""
Simple full-text manager adding a search function.
If "relation_index" is True the index will be stored in a separate entity.
With "integrate" you can add fields to your relation index,
so they can be searched, too.
With "filters" you can specify when a values index should be created.
"""
def __init__(self, fields_to_index, indexer=None, splitter=default_splitter,
relation_index=True, integrate='*', filters={},
language=site_language, **kwargs):
# integrate should be specified when using the relation index otherwise
# we doublicate the amount of data in the datastore and the relation
# index makes no sense any more
# TODO: filters has to be extended (maybe a function) to allow Django's
# QuerySet methods like exclude
if integrate is None:
integrate = ()
if integrate == '*' and not relation_index:
integrate = ()
if isinstance(fields_to_index, basestring):
fields_to_index = (fields_to_index,)
self.fields_to_index = fields_to_index
if isinstance(integrate, basestring):
integrate = (integrate,)
self.filters = filters
self.integrate = integrate
self.splitter = splitter
self.indexer = indexer
self.language = language
self.relation_index = relation_index
if len(fields_to_index) == 0:
raise ValueError('No fields specified for index!')
# search_list_field_name will be set if no relation_index is used that is
# for relation_index=False or for the relation_index_model itself
self.search_list_field_name = ''
super(SearchManager, self).__init__(**kwargs)
def contribute_to_class(self, model, name):
super(SearchManager, self).contribute_to_class(model, name)
# set default_manager to None such that the default_manager will be set
# to 'objects' via the class-prepared signal calling
# ensure_default_manager
# setattr(model, '_default_manager', None)
self.name = name
# add IndexField to the model if we do not use the relation_index
if not self.relation_index:
self.search_list_field_name = "%s_search_list_field" %name
# Add field to class dynamically
setattr(model, self.search_list_field_name, IndexField(self))
getattr(model, self.search_list_field_name).contribute_to_class(
model, self.search_list_field_name)
def filter(self, values):
"""
Returns a query for the given values (creates '=' filters for the
IndexField. Additionally filters can be applied afterwoods via chaining.
"""
if not isinstance(values, (tuple, list)):
values = (values,)
filtered = self.model.objects.all()
for value in set(values):
filter = {self.search_list_field_name:value}
filtered = filtered.filter(**filter)
return filtered
def _search(self, query, indexer=None, splitter=None,
language=settings.LANGUAGE_CODE):
if not splitter:
splitter = default_splitter
words = splitter(query, indexing=False, language=language)
if indexer:
words = indexer(words, indexing=False, language=language)
# Optimize query
words = set(words)
if len(words) >= 4:
words -= get_stop_words(language)
# Don't allow empty queries
if not words and query:
# This query will never find anything
return self.filter(()).filter({self.search_list_field_name:' '})
return self.filter(sorted(words))
def should_index(self, values):
# Check if filter doesn't match
if not values:
return False
for filter, value in self.filters.items():
attr, op = filter, 'exact'
if '__' in filter:
attr, op = filter.rsplit('__', 1)
op = op.lower()
if (op == 'exact' and values[attr] != value or
# op == '!=' and values[attr] == value or
op == 'in' and values[attr] not in value or
op == 'lt' and values[attr] >= value or
op == 'lte' and values[attr] > value or
op == 'gt' and values[attr] <= value or
op == 'gte' and values[attr] < value):
return False
elif op not in ('exact', 'in', 'lt', 'lte', 'gte', 'gt'):
raise ValueError('Invalid search index filter: %s %s' % (filter, value))
return True
# @commit_locked
def update_relation_index(self, parent_pk, delete=False):
relation_index_model = self._relation_index_model
try:
index = relation_index_model.objects.get(pk=parent_pk)
except ObjectDoesNotExist:
index = None
if not delete:
try:
parent = self.model.objects.get(pk=parent_pk)
except ObjectDoesNotExist:
parent = None
values = None
if parent:
values = self.get_index_values(parent)
# Remove index if it's not needed, anymore
if delete or not self.should_index(values):
if index:
index.delete()
return
# Update/create index
if not index:
index = relation_index_model(pk=parent_pk, **values)
# This guarantees that we also set virtual @properties
for key, value in values.items():
setattr(index, key, value)
index.save()
def create_index_model(self):
attrs = dict(__module__=self.__module__)
# By default we integrate everything when using relation index
# manager will add the IndexField to the relation index automaticaly
if self.integrate == ('*',):
self.integrate = tuple(field.name
for field in self.model._meta.fields
if not isinstance(field, IndexField))
for field_name in self.integrate:
field = self.model._meta.get_field_by_name(field_name)[0]
field = deepcopy(field)
attrs[field_name] = field
if isinstance(field, models.ForeignKey):
attrs[field_name].rel.related_name = '_sidx_%s_%s_%s_set_' % (
self.model._meta.object_name.lower(),
self.name, field_name,
)
owner = self
def __init__(self, *args, **kwargs):
# Save some space: don't copy the whole indexed text into the
# relation index field unless the field gets integrated.
field_names = [field.name for field in self._meta.fields]
owner_field_names = [field.name
for field in owner.model._meta.fields]
for key, value in kwargs.items():
if key in field_names or key not in owner_field_names:
continue
setattr(self, key, value)
del kwargs[key]
models.Model.__init__(self, *args, **kwargs)
attrs['__init__'] = __init__
self._relation_index_model = type(
('RelationIndex_%s_%s_%s' % (self.model._meta.app_label,
self.model._meta.object_name,
self.name)).encode('ascii','ignore'),
(models.Model,), attrs)
self._relation_index_model.add_to_class(self.name, SearchManager(
self.fields_to_index, splitter=self.splitter, indexer=self.indexer,
language=self.language, relation_index=False))
def get_index_values(self, parent):
filters = []
for filter in self.filters.keys():
if '__' in filter:
filters.append(filter.rsplit('__')[0])
else:
filters.append(filter)
filters = tuple(filters)
values = {}
for field_name in set(self.fields_to_index + self.integrate + filters):
field = self.model._meta.get_field_by_name(field_name)[0]
if isinstance(field, models.ForeignKey):
value = field.pre_save(parent, False)
else:
value = getattr(parent, field_name)
if field_name == self.fields_to_index[0] and \
isinstance(value, (list, tuple)):
value = sorted(value)
if isinstance(field, models.ForeignKey):
values[field.column] = value
else:
values[field_name] = value
return values
def search(self, query, language=settings.LANGUAGE_CODE):
if self.relation_index:
items = getattr(self._relation_index_model, self.name).search(query,
language=language).values('pk')
return RelationIndexQuery(self.model, items)
return self._search(query, splitter=self.splitter,
indexer=self.indexer, language=language)
def load_backend():
backend = getattr(settings, 'SEARCH_BACKEND', 'search.backends.immediate_update')
import_list = []
if '.' in backend:
import_list = [backend.rsplit('.', 1)[1]]
return __import__(backend, globals(), locals(), import_list)
def post(delete, sender, instance, **kwargs):
for counter, manager_name, manager in sender._meta.concrete_managers:
if isinstance(manager, SearchManager):
if manager.relation_index:
backend = load_backend()
backend.update_relation_index(manager, instance.pk, delete)
def post_save(sender, instance, **kwargs):
post(False, sender, instance, **kwargs)
def post_delete(sender, instance, **kwargs):
post(True, sender, instance, **kwargs)
def install_index_model(sender, **kwargs):
needs_relation_index = False
# what to do for abstract_managers?
for counter, manager_name, manager in sender._meta.concrete_managers:
if isinstance(manager, SearchManager) and manager.relation_index:
manager.create_index_model()
needs_relation_index = True
if needs_relation_index:
signals.post_save.connect(post_save, sender=sender)
signals.post_delete.connect(post_delete, sender=sender)
#signals.class_prepared.connect(install_index_model)
class QueryTraits(object):
def __iter__(self):
return iter(self[:301])
def __len__(self):
return self.count()
def get(self, *args, **kwargs):
result = self[:1]
if result:
return result[0]
raise ObjectDoesNotExist
class RelationIndexQuery(QueryTraits):
"""Combines the results of multiple queries by appending the queries in the
given order."""
def __init__(self, model, query):
self.model = model
self.query = query
def order_by(self, *args, **kwargs):
self.query = self.query.order_by(*args, **kwargs)
return self
def filter(self, *args, **kwargs):
self.query = self.query.filter(*args, **kwargs)
return self
def __getitem__(self, index):
pks_slice = index
if not isinstance(index, slice):
pks_slice = slice(None, index + 1, None)
pks = [instance.pk if isinstance(instance, models.Model) else instance['pk']
for instance in self.query[pks_slice]]
if not isinstance(index, slice):
return self.model.objects.filter(pk__in=pks)[index]
return self.model.objects.filter(pk__in=pks)[pks_slice]
# return [item for item in self.model.objects.filter(
# pk__in=pks) if item]
def count(self):
return self.query.count()
# TODO: add keys_only query
# def values(self, fields):
# pass
def search(model, query, language=settings.LANGUAGE_CODE,
search_index='search_index'):
return getattr(model, search_index).search(query, language)
| |
import math
from mock import Mock, patch
from elasticmagic import agg, Params, Term, Document, DynamicDocument
from elasticmagic.types import Integer, Boolean, List
from elasticmagic.expression import Field
from .base import BaseTestCase
class AggregationTest(BaseTestCase):
def test_aggs(self):
f = DynamicDocument.fields
a = agg.AggExpression()
self.assertRaises(NotImplementedError, a.build_agg_result, {})
a = agg.Avg(f.price)
self.assert_expression(
a,
{
"avg": {"field": "price"}
}
)
res = a.build_agg_result({
'value': 75.3
})
self.assertAlmostEqual(res.value, 75.3)
res = a.build_agg_result({
'value': None
})
self.assertIs(res.value, None)
aa = a.clone()
self.assertIsNot(a, aa)
self.assertEqual(a.__visit_name__, aa.__visit_name__)
self.assertEqual(a.params, aa.params)
a = agg.Min(f.price)
self.assert_expression(
a,
{
"min": {"field": "price"}
}
)
res = a.build_agg_result({
'value': 38
})
self.assertAlmostEqual(res.value, 38)
res = a.build_agg_result({
'value': 1297167619690,
'value_as_string': '2011-02-08T12:20:19.690Z'
})
self.assertAlmostEqual(res.value, 1297167619690)
a = agg.Max(f.price)
self.assert_expression(
a,
{
"max": {"field": "price"}
}
)
res = a.build_agg_result({
'value': 45693.5
})
self.assertAlmostEqual(res.value, 45693.5)
a = agg.Stats(f.grade)
self.assert_expression(
a,
{
"stats": {"field": "grade"}
}
)
a = a.build_agg_result(
{
"count": 6,
"min": 60,
"max": 98,
"avg": 78.5,
"sum": 471
}
)
self.assertEqual(a.count, 6)
self.assertEqual(a.min, 60)
self.assertEqual(a.max, 98)
self.assertAlmostEqual(a.avg, 78.5)
self.assertEqual(a.sum, 471)
a = agg.ExtendedStats(f.grade)
self.assert_expression(
a,
{
"extended_stats": {"field": "grade"}
}
)
a = a.build_agg_result(
{
"count": 6,
"min": 72,
"max": 117.6,
"avg": 94.2,
"sum": 565.2,
"sum_of_squares": 54551.51999999999,
"variance": 218.2799999999976,
"std_deviation": 14.774302013969987
}
)
self.assertEqual(a.count, 6)
self.assertEqual(a.min, 72)
self.assertAlmostEqual(a.max, 117.6)
self.assertAlmostEqual(a.avg, 94.2)
self.assertAlmostEqual(a.sum, 565.2)
self.assertAlmostEqual(a.sum_of_squares, 54551.51999999999)
self.assertAlmostEqual(a.variance, 218.2799999999976)
self.assertAlmostEqual(a.std_deviation, 14.774302013969987)
percentiles_agg = agg.Percentiles(f.load_time, percents=[95, 99, 99.9])
self.assert_expression(
percentiles_agg,
{
"percentiles": {
"field": "load_time",
"percents": [95, 99, 99.9]
}
}
)
a = percentiles_agg.build_agg_result(
{
"values": {
"95.0": 60,
"99.0": 150,
"99.9": 153,
}
}
)
self.assertEqual(
a.values,
[(95.0, 60), (99.0, 150), (99.9, 153)],
)
self.assertEqual(a.get_value(95), 60)
self.assertEqual(a.get_value(95.0), 60)
self.assertEqual(a.get_value(99), 150)
self.assertEqual(a.get_value(99.0), 150)
self.assertEqual(a.get_value(99.9), 153)
a = percentiles_agg.build_agg_result(
{
"values": {
"95.0": 60,
"95.0_as_string": "60",
"99.0": 150,
"99.0_as_string": "150",
"99.9": 153,
"99.9_as_string": "153",
}
}
)
self.assertEqual(
a.values,
[(95.0, 60), (99.0, 150), (99.9, 153)],
)
self.assertEqual(a.get_value(95), 60)
self.assertEqual(a.get_value(95.0), 60)
self.assertEqual(a.get_value(99), 150)
self.assertEqual(a.get_value(99.0), 150)
self.assertEqual(a.get_value(99.9), 153)
percentiles_agg = agg.Percentiles(f.load_time, percents=[50])
self.assert_expression(
percentiles_agg,
{
"percentiles": {
"field": "load_time",
"percents": [50]
}
}
)
a = percentiles_agg.build_agg_result(
{
"values": {
"50.0": "NaN",
}
}
)
self.assertEqual(
len(a.values),
1
)
self.assertAlmostEqual(
a.values[0][0],
50.0
)
self.assertTrue(
math.isnan(a.values[0][1])
)
self.assertTrue(math.isnan(a.get_value(50)))
self.assertTrue(math.isnan(a.get_value(50.0)))
ranks_agg = agg.PercentileRanks(f.load_time, values=[14.8, 30])
self.assert_expression(
ranks_agg,
{
"percentile_ranks": {
"field": "load_time",
"values": [14.8, 30.0]
}
}
)
a = ranks_agg.build_agg_result(
{
"values": {
"14.8": 12.32,
"30": 100,
}
}
)
self.assertEqual(
a.values,
[(14.8, 12.32), (30.0, 100)],
)
self.assertEqual(
a.values,
[(14.8, 12.32), (30.0, 100)],
)
self.assertAlmostEqual(a.get_percent(14.8), 12.32)
self.assertAlmostEqual(a.get_percent(13.7 + 1.1), 12.32)
self.assertAlmostEqual(a.get_percent(30), 100.0)
self.assertAlmostEqual(a.get_percent(30.0), 100.0)
a = ranks_agg.build_agg_result(
{
"values": {
"14.8": 12.32,
"14.8_as_string": "12.32",
"30": 100,
"30_as_string": "100",
}
}
)
self.assertEqual(
a.values,
[(14.8, 12.32), (30.0, 100)],
)
self.assertEqual(
a.values,
[(14.8, 12.32), (30.0, 100)],
)
self.assertAlmostEqual(a.get_percent(14.8), 12.32)
self.assertAlmostEqual(a.get_percent(13.7 + 1.1), 12.32)
self.assertAlmostEqual(a.get_percent(30), 100.0)
self.assertAlmostEqual(a.get_percent(30.0), 100.0)
a = agg.Cardinality(f.author, precision_threshold=100)
self.assert_expression(
a,
{
"cardinality": {
"field": "author",
"precision_threshold": 100
}
}
)
a = a.build_agg_result(
{
"value": 184
}
)
self.assertEqual(a.value, 184)
a = agg.Global()
self.assert_expression(a, {"global": {}})
a = a.build_agg_result(
{"doc_count": 185}
)
self.assertEqual(a.doc_count, 185)
a = agg.Filter(f.company == 1)
self.assert_expression(a, {"filter": {"term": {"company": 1}}})
a2 = a.clone()
self.assertIsNot(a, a2)
self.assert_expression(a2, {"filter": {"term": {"company": 1}}})
a = a.build_agg_result(
{"doc_count": 148}
)
self.assertEqual(a.doc_count, 148)
a = agg.Terms(f.status)
self.assert_expression(
a,
{
"terms": {"field": "status"}
}
)
a1 = a.clone()
self.assertIsNot(a, a1)
a = a.build_agg_result(
{
'buckets': [
{'doc_count': 7353499, 'key': 0},
{'doc_count': 2267139, 'key': 1},
{'doc_count': 1036951, 'key': 4},
{'doc_count': 438384, 'key': 2},
{'doc_count': 9594, 'key': 3},
{'doc_count': 46, 'key': 5}
]
}
)
self.assertEqual(len(a.buckets), 6)
self.assertEqual(list(iter(a)), a.buckets)
self.assertEqual(a.buckets[0].key, 0)
self.assertEqual(a.buckets[0].doc_count, 7353499)
self.assertEqual(repr(a.buckets[0]), '<Bucket key=0 doc_count=7353499>')
self.assertIs(a.buckets[0], a.get_bucket(0))
self.assertEqual(a.buckets[1].key, 1)
self.assertEqual(a.buckets[1].doc_count, 2267139)
self.assertIs(a.buckets[1], a.get_bucket(1))
self.assertEqual(repr(a.buckets[1]), '<Bucket key=1 doc_count=2267139>')
self.assertEqual(a.buckets[2].key, 4)
self.assertEqual(a.buckets[2].doc_count, 1036951)
self.assertIs(a.buckets[2], a.get_bucket(4))
self.assertEqual(repr(a.buckets[2]), '<Bucket key=4 doc_count=1036951>')
self.assertEqual(a.buckets[3].key, 2)
self.assertEqual(a.buckets[3].doc_count, 438384)
self.assertIs(a.buckets[3], a.get_bucket(2))
self.assertEqual(repr(a.buckets[3]), '<Bucket key=2 doc_count=438384>')
self.assertEqual(a.buckets[4].key, 3)
self.assertEqual(a.buckets[4].doc_count, 9594)
self.assertIs(a.buckets[4], a.get_bucket(3))
self.assertEqual(repr(a.buckets[4]), '<Bucket key=3 doc_count=9594>')
self.assertEqual(a.buckets[5].key, 5)
self.assertEqual(a.buckets[5].doc_count, 46)
self.assertIs(a.buckets[5], a.get_bucket(5))
self.assertEqual(repr(a.buckets[5]), '<Bucket key=5 doc_count=46>')
a = agg.Terms(f.is_visible, type=Boolean)
self.assert_expression(
a,
{
"terms": {"field": "is_visible"}
}
)
a = a.build_agg_result(
{
'buckets': [
{'doc_count': 7, 'key': 'T'},
{'doc_count': 2, 'key': 'F'},
]
}
)
self.assertEqual(len(a.buckets), 2)
self.assertEqual(a.buckets[0].key, True)
self.assertEqual(a.buckets[0].doc_count, 7)
self.assertIs(a.buckets[0], a.get_bucket(True))
self.assertEqual(a.buckets[1].key, False)
self.assertEqual(a.buckets[1].doc_count, 2)
self.assertIs(a.buckets[1], a.get_bucket(False))
a = agg.Terms(f.category, type=List(Integer))
self.assert_expression(
a,
{
"terms": {"field": "category"}
}
)
a = a.build_agg_result(
{
'buckets': [
{'doc_count': 792, 'key': 28},
{'doc_count': 185, 'key': 3},
]
}
)
self.assertEqual(len(a.buckets), 2)
self.assertEqual(a.buckets[0].key, 28)
self.assertEqual(a.buckets[0].doc_count, 792)
self.assertIs(a.buckets[0], a.get_bucket(28))
self.assertEqual(a.buckets[1].key, 3)
self.assertEqual(a.buckets[1].doc_count, 185)
self.assertIs(a.buckets[1], a.get_bucket(3))
class ProductDocument(Document):
is_visible = Field(Boolean)
a = agg.Terms(ProductDocument.is_visible)
self.assert_expression(
a,
{
"terms": {"field": "is_visible"}
}
)
a = a.build_agg_result(
{
'buckets': [
{'doc_count': 7, 'key': 'T'},
{'doc_count': 2, 'key': 'F'},
]
}
)
self.assertEqual(len(a.buckets), 2)
self.assertEqual(a.buckets[0].key, True)
self.assertEqual(a.buckets[0].doc_count, 7)
self.assertIs(a.buckets[0], a.get_bucket(True))
self.assertEqual(a.buckets[1].key, False)
self.assertEqual(a.buckets[1].doc_count, 2)
self.assertIs(a.buckets[1], a.get_bucket(False))
a = agg.SignificantTerms(f.crime_type)
self.assert_expression(
a,
{
"significant_terms": {"field": "crime_type"}
}
)
a = a.build_agg_result(
{
"doc_count": 47347,
"buckets" : [
{
"key": "Bicycle theft",
"doc_count": 3640,
"score": 0.371,
"bg_count": 66799,
},
{
"key": "Mobile phone theft",
"doc_count": 27617,
"score": 0.0599,
"bg_count": 53182,
}
]
}
)
self.assertEqual(len(a.buckets), 2)
self.assertEqual(a.buckets[0].key, 'Bicycle theft')
self.assertEqual(a.buckets[0].doc_count, 3640)
self.assertAlmostEqual(a.buckets[0].score, 0.371)
self.assertEqual(a.buckets[0].bg_count, 66799)
self.assertIs(a.buckets[0], a.get_bucket('Bicycle theft'))
self.assertEqual(a.buckets[1].key, 'Mobile phone theft')
self.assertEqual(a.buckets[1].doc_count, 27617)
self.assertAlmostEqual(a.buckets[1].score, 0.0599)
self.assertEqual(a.buckets[1].bg_count, 53182)
self.assertIs(a.buckets[1], a.get_bucket('Mobile phone theft'))
a = agg.Range(
f.price,
ranges=[{'to': 200}, {'from': 200, 'to': 1000}, {'from': 1000}],
type=Integer,
)
self.assert_expression(
a,
{
"range": {
"field": "price",
"ranges": [
{"to": 200},
{"from": 200, "to": 1000},
{"from": 1000}
]
}
}
)
a1 = a.clone()
self.assertIsNot(a1, a)
a = a.build_agg_result(
{
"buckets": [
{
"to": 200,
"doc_count": 12
},
{
"from": 200,
"to": 1000,
"doc_count": 197
},
{
"from": 1000,
"doc_count": 8
}
]
}
)
self.assertEqual(len(a.buckets), 3)
self.assertEqual(a.buckets[0].doc_count, 12)
self.assertEqual(a.buckets[1].doc_count, 197)
self.assertEqual(a.buckets[2].doc_count, 8)
a = agg.Filters([Term(f.body, 'error'), Term(f.body, 'warning')])
self.assert_expression(
a,
{
"filters": {
"filters": [
{"term": {"body": "error"}},
{"term": {"body": "warning"}}
]
}
}
)
a = a.build_agg_result(
{
"buckets": [
{
"doc_count" : 34
},
{
"doc_count" : 439
},
]
}
)
self.assertEqual(len(a.buckets), 2)
self.assertIs(a.buckets[0].key, None)
self.assertEqual(a.buckets[0].doc_count, 34)
self.assertIs(a.buckets[1].key, None)
self.assertEqual(a.buckets[1].doc_count, 439)
self.assertIs(a.get_bucket(None), None)
a = agg.Filters(Params(errors=Term(f.body, 'error'), warnings=Term(f.body, 'warning')))
self.assert_expression(
a,
{
"filters": {
"filters": {
"errors": {"term": {"body": "error"}},
"warnings": {"term": {"body": "warning"}}
}
}
}
)
a = a.build_agg_result(
{
"buckets": {
"errors": {
"doc_count" : 34
},
"warnings": {
"doc_count" : 439
},
}
}
)
self.assertEqual(len(a.buckets), 2)
self.assertIs(a.buckets[0].key, 'errors')
self.assertEqual(a.buckets[0].doc_count, 34)
self.assertIs(a.buckets[0], a.get_bucket('errors'))
self.assertIs(a.buckets[1].key, 'warnings')
self.assertEqual(a.buckets[1].doc_count, 439)
self.assertIs(a.buckets[1], a.get_bucket('warnings'))
a = agg.Nested(f.resellers, aggs={'min_price': agg.Min(f.resellers.price)})
self.assert_expression(
a,
{
"nested": {"path": "resellers"},
"aggregations": {
"min_price": {"min": {"field": "resellers.price"}}
}
}
)
a = a.build_agg_result(
{
"min_price": {
"value" : 350
}
}
)
self.assertEqual(a.get_aggregation('min_price').value, 350)
a = agg.Sampler(shard_size=1000, aggs={'avg_price': agg.Avg(f.price)})
self.assert_expression(
a,
{
"sampler": {"shard_size": 1000},
"aggregations": {
"avg_price": {"avg": {"field": "price"}}
}
}
)
a = a.build_agg_result(
{
"doc_count": 1000,
"avg_price": {
"value" : 750
}
}
)
self.assertEqual(a.doc_count, 1000)
self.assertEqual(a.get_aggregation('avg_price').value, 750)
# complex aggregation with sub aggregations
a = agg.Global()
a = a.aggs({
'selling_type': agg.Terms(
f.selling_type,
aggs={
'price_avg': agg.Avg(f.price),
'price_min': agg.Min(f.price),
'price_max': agg.Max(f.price),
'price_hist': agg.Histogram(f.price, interval=50),
}
),
'price_avg': agg.Avg(f.price),
}
)
self.assert_expression(
a,
{
"global": {},
"aggregations": {
"selling_type": {
"terms": {"field": "selling_type"},
"aggregations": {
"price_avg": {"avg": {"field": "price"}},
"price_min": {"min": {"field": "price"}},
"price_max": {"max": {"field": "price"}},
"price_hist": {"histogram": {"field": "price", "interval": 50}},
}
},
"price_avg": {"avg": {"field": "price"}}
}
}
)
a = a.build_agg_result(
{
'doc_count': 100,
'selling_type': {
'buckets': [
{
'key': 'retail',
'doc_count': 70,
'price_avg': {'value': 60.5},
'price_min': {'value': 1.1},
'price_max': {'value': 83.4},
'price_hist': {
'buckets': [
{'key': 50, 'doc_count': 60},
{'key': 100, 'doc_count': 7},
{'key': 150, 'doc_count': 3},
]
},
},
{
'key': 'wholesale',
'doc_count': 30,
'price_avg': {'value': 47.9},
'price_min': {'value': 20.1},
'price_max': {'value': 64.8},
'price_hist': {
'buckets': [
{'key': 0, 'doc_count': 17},
{'key': 50, 'doc_count': 5},
{'key': 100, 'doc_count': 6},
{'key': 150, 'doc_count': 2},
]
},
},
],
},
'price_avg': {'value': 56.3},
}
)
self.assertEqual(a.doc_count, 100)
type_agg = a.get_aggregation('selling_type')
self.assertEqual(len(type_agg.buckets), 2)
self.assertEqual(type_agg.buckets[0].key, 'retail')
self.assertEqual(type_agg.buckets[0].doc_count, 70)
self.assertIs(type_agg.buckets[0], type_agg.get_bucket('retail'))
self.assertAlmostEqual(type_agg.buckets[0].get_aggregation('price_avg').value, 60.5)
self.assertAlmostEqual(type_agg.buckets[0].get_aggregation('price_min').value, 1.1)
self.assertAlmostEqual(type_agg.buckets[0].get_aggregation('price_max').value, 83.4)
price_hist_agg = type_agg.buckets[0].get_aggregation('price_hist')
self.assertEqual(price_hist_agg.buckets[0].key, 50)
self.assertEqual(price_hist_agg.buckets[0].doc_count, 60)
self.assertIs(price_hist_agg.buckets[0], price_hist_agg.get_bucket(50))
self.assertEqual(price_hist_agg.buckets[1].key, 100)
self.assertEqual(price_hist_agg.buckets[1].doc_count, 7)
self.assertIs(price_hist_agg.buckets[1], price_hist_agg.get_bucket(100))
self.assertEqual(price_hist_agg.buckets[2].key, 150)
self.assertEqual(price_hist_agg.buckets[2].doc_count, 3)
self.assertIs(price_hist_agg.buckets[2], price_hist_agg.get_bucket(150))
self.assertEqual(len(price_hist_agg.buckets), 3)
self.assertEqual(type_agg.buckets[1].key, 'wholesale')
self.assertEqual(type_agg.buckets[1].doc_count, 30)
self.assertIs(type_agg.buckets[1], type_agg.get_bucket('wholesale'))
self.assertAlmostEqual(type_agg.buckets[1].get_aggregation('price_avg').value, 47.9)
self.assertAlmostEqual(type_agg.buckets[1].get_aggregation('price_min').value, 20.1)
self.assertAlmostEqual(type_agg.buckets[1].get_aggregation('price_max').value, 64.8)
price_hist_agg = type_agg.buckets[1].get_aggregation('price_hist')
self.assertEqual(len(price_hist_agg.buckets), 4)
self.assertEqual(price_hist_agg.buckets[0].key, 0)
self.assertEqual(price_hist_agg.buckets[0].doc_count, 17)
self.assertIs(price_hist_agg.buckets[0], price_hist_agg.get_bucket(0))
self.assertEqual(price_hist_agg.buckets[1].key, 50)
self.assertEqual(price_hist_agg.buckets[1].doc_count, 5)
self.assertIs(price_hist_agg.buckets[1], price_hist_agg.get_bucket(50))
self.assertEqual(price_hist_agg.buckets[2].key, 100)
self.assertEqual(price_hist_agg.buckets[2].doc_count, 6)
self.assertIs(price_hist_agg.buckets[2], price_hist_agg.get_bucket(100))
self.assertEqual(price_hist_agg.buckets[3].key, 150)
self.assertEqual(price_hist_agg.buckets[3].doc_count, 2)
self.assertIs(price_hist_agg.buckets[3], price_hist_agg.get_bucket(150))
self.assertEqual(a.get_aggregation('price_avg').value, 56.3)
class QuestionDocument(DynamicDocument):
pass
class PaperDocument(DynamicDocument):
pass
question_mapper = Mock(
return_value={
'602679': Mock(id=602679, type='question'),
'602678': Mock(id=602678, type='question'),
}
)
paper_mapper = Mock(return_value={'602672': Mock(id=602672, type='paper')})
top_hits_agg = agg.Terms(
f.tags,
size=3,
aggs={
'top_tags_hits': agg.TopHits(
size=1,
sort=f.last_activity_date.desc(),
_source={'include': f.title},
instance_mapper={
QuestionDocument: question_mapper, PaperDocument: paper_mapper
},
)
}
)
self.assert_expression(
top_hits_agg,
{
"terms": {
"field": "tags",
"size": 3
},
"aggregations": {
"top_tags_hits": {
"top_hits": {
"sort": {
"last_activity_date": "desc"
},
"_source": {
"include": "title"
},
"size" : 1
}
}
}
}
)
a = top_hits_agg.build_agg_result(
{
"buckets": [
{
"key": "windows-7",
"doc_count": 25365,
"top_tags_hits": {
"hits": {
"total": 25365,
"max_score": 1,
"hits": [
{
"_index": "stack",
"_type": "question",
"_id": "602679",
"_score": 1,
"_source": {
"title": "Windows port opening"
},
"sort": [
1370143231177
]
}
]
}
}
},
{
"key": "linux",
"doc_count": 18342,
"top_tags_hits": {
"hits": {
"total": 18342,
"max_score": 1,
"hits": [
{
"_index": "stack",
"_type": "paper",
"_id": "602672",
"_score": 1,
"_source": {
"title": "Ubuntu RFID Screensaver lock-unlock"
},
"sort": [
1370143379747
]
}
]
}
}
},
{
"key": "windows",
"doc_count": 18119,
"top_tags_hits": {
"hits": {
"total": 18119,
"max_score": 1,
"hits": [
{
"_index": "stack",
"_type": "question",
"_id": "602678",
"_score": 1,
"_source": {
"title": "If I change my computers date / time, what could be affected?"
},
"sort": [
1370142868283
]
}
]
}
}
}
]
},
doc_cls_map={'question': QuestionDocument, 'paper': PaperDocument},
mapper_registry={},
)
self.assertEqual(len(a.buckets), 3)
self.assertEqual(a.buckets[0].doc_count, 25365)
self.assertEqual(a.buckets[0].key, 'windows-7')
top_tags_agg = a.buckets[0].get_aggregation('top_tags_hits')
self.assertEqual(top_tags_agg.total, 25365)
self.assertEqual(top_tags_agg.max_score, 1)
self.assertEqual(len(top_tags_agg.hits), 1)
self.assertIsInstance(top_tags_agg.hits[0], QuestionDocument)
self.assertEqual(top_tags_agg.hits[0]._index, 'stack')
self.assertEqual(top_tags_agg.hits[0]._type, 'question')
self.assertEqual(top_tags_agg.hits[0]._score, 1)
self.assertEqual(top_tags_agg.hits[0]._id, '602679')
self.assertEqual(top_tags_agg.hits[0].title, 'Windows port opening')
self.assertEqual(top_tags_agg.hits[0].instance.id, 602679)
self.assertEqual(top_tags_agg.hits[0].instance.type, 'question')
self.assertEqual(a.buckets[1].doc_count, 18342)
self.assertEqual(a.buckets[1].key, 'linux')
top_tags_agg = a.buckets[1].get_aggregation('top_tags_hits')
self.assertEqual(top_tags_agg.total, 18342)
self.assertEqual(top_tags_agg.max_score, 1)
self.assertEqual(len(top_tags_agg.hits), 1)
self.assertIsInstance(top_tags_agg.hits[0], PaperDocument)
self.assertEqual(top_tags_agg.hits[0]._index, 'stack')
self.assertEqual(top_tags_agg.hits[0]._type, 'paper')
self.assertEqual(top_tags_agg.hits[0]._score, 1)
self.assertEqual(top_tags_agg.hits[0]._id, '602672')
self.assertEqual(top_tags_agg.hits[0].title, 'Ubuntu RFID Screensaver lock-unlock')
self.assertEqual(top_tags_agg.hits[0].instance.id, 602672)
self.assertEqual(top_tags_agg.hits[0].instance.type, 'paper')
self.assertEqual(a.buckets[2].doc_count, 18119)
self.assertEqual(a.buckets[2].key, 'windows')
top_tags_agg = a.buckets[2].get_aggregation('top_tags_hits')
self.assertEqual(top_tags_agg.total, 18119)
self.assertEqual(top_tags_agg.max_score, 1)
self.assertEqual(len(top_tags_agg.hits), 1)
self.assertIsInstance(top_tags_agg.hits[0], DynamicDocument)
self.assertEqual(top_tags_agg.hits[0]._index, 'stack')
self.assertEqual(top_tags_agg.hits[0]._type, 'question')
self.assertEqual(top_tags_agg.hits[0]._score, 1)
self.assertEqual(top_tags_agg.hits[0]._id, '602678')
self.assertEqual(top_tags_agg.hits[0].title, 'If I change my computers date / time, what could be affected?')
self.assertEqual(top_tags_agg.hits[0].instance.id, 602678)
self.assertEqual(top_tags_agg.hits[0].instance.type, 'question')
self.assertEqual(question_mapper.call_count, 1)
self.assertEqual(paper_mapper.call_count, 1)
def test_instance_mapper(self):
class _Gender(object):
def __init__(self, key, title):
self.key = key
self.title = title
Male = _Gender('m', 'Male')
Female = _Gender('f', 'Female')
GENDERS = {g.key: g for g in [Male, Female]}
f = DynamicDocument.fields
gender_mapper = Mock(return_value=GENDERS)
a = agg.Terms(f.gender, instance_mapper=gender_mapper)
a = a.build_agg_result(
{
"buckets": [
{
"key": "m",
"doc_count": 10
},
{
"key": "f",
"doc_count": 10
},
]
}
)
self.assertEqual(len(a.buckets), 2)
self.assertEqual(a.buckets[0].instance.title, 'Male')
self.assertEqual(a.buckets[1].instance.title, 'Female')
self.assertEqual(gender_mapper.call_count, 1)
gender_mapper = Mock(return_value=GENDERS)
a = agg.Global(
aggs={
'all_genders': agg.Terms(f.gender, instance_mapper=gender_mapper),
'all_salary': agg.Range(
f.month_salary,
ranges=[
{'to': 1000},
{'from': 1000, 'to': 2000},
{'from': 2000, 'to': 3000},
{'from': 3000},
],
aggs={
'gender': agg.Terms(f.gender, instance_mapper=gender_mapper)
}
)
}
)
a = a.build_agg_result(
{
"doc_count": 1819,
"all_genders": {
"buckets": [
{
"key": "m",
"doc_count": 1212
},
{
"key": "f",
"doc_count": 607
}
]
},
"all_salary": {
"buckets": [
{
"to": 1000,
"doc_count": 183,
"gender": {
"buckets": [
{
"key": "f",
"doc_count": 101
},
{
"key": "m",
"doc_count": 82
}
]
}
},
{
"from": 1000,
"to": 2000,
"doc_count": 456,
"gender": {
"buckets": [
{
"key": "f",
"doc_count": 231
},
{
"key": "m",
"doc_count": 225
}
]
}
},
{
"from": 2000,
"to": 3000,
"doc_count": 1158,
"gender": {
"buckets": [
{
"key": "m",
"doc_count": 894
},
{
"key": "f",
"doc_count": 264
}
]
}
},
{
"from": 3000,
"doc_count": 22,
"gender": {
"buckets": [
{
"key": "m",
"doc_count": 11
},
{
"key": "f",
"doc_count": 11
}
]
}
},
]
}
},
mapper_registry={}
)
self.assertEqual(a.doc_count, 1819)
all_genders_agg = a.get_aggregation('all_genders')
self.assertEqual(len(all_genders_agg.buckets), 2)
self.assertEqual(all_genders_agg.buckets[0].key, 'm')
self.assertEqual(all_genders_agg.buckets[0].doc_count, 1212)
self.assertEqual(all_genders_agg.buckets[0].instance.title, 'Male')
self.assertEqual(all_genders_agg.buckets[1].key, 'f')
self.assertEqual(all_genders_agg.buckets[1].doc_count, 607)
self.assertEqual(all_genders_agg.buckets[1].instance.title, 'Female')
all_salary_agg = a.get_aggregation('all_salary')
self.assertEqual(len(all_salary_agg.buckets), 4)
self.assertIs(all_salary_agg.buckets[0].from_, None)
self.assertEqual(all_salary_agg.buckets[0].to, 1000)
self.assertEqual(all_salary_agg.buckets[0].doc_count, 183)
gender_agg = all_salary_agg.buckets[0].get_aggregation('gender')
self.assertEqual(len(gender_agg.buckets), 2)
self.assertEqual(gender_agg.buckets[0].key, 'f')
self.assertEqual(gender_agg.buckets[0].doc_count, 101)
self.assertEqual(gender_agg.buckets[0].instance.title, 'Female')
self.assertEqual(gender_agg.buckets[1].key, 'm')
self.assertEqual(gender_agg.buckets[1].doc_count, 82)
self.assertEqual(gender_agg.buckets[1].instance.title, 'Male')
self.assertEqual(all_salary_agg.buckets[1].from_, 1000)
self.assertEqual(all_salary_agg.buckets[1].to, 2000)
self.assertEqual(all_salary_agg.buckets[1].doc_count, 456)
gender_agg = all_salary_agg.buckets[1].get_aggregation('gender')
self.assertEqual(len(gender_agg.buckets), 2)
self.assertEqual(gender_agg.buckets[0].key, 'f')
self.assertEqual(gender_agg.buckets[0].doc_count, 231)
self.assertEqual(gender_agg.buckets[0].instance.title, 'Female')
self.assertEqual(gender_agg.buckets[1].key, 'm')
self.assertEqual(gender_agg.buckets[1].doc_count, 225)
self.assertEqual(gender_agg.buckets[1].instance.title, 'Male')
self.assertEqual(gender_mapper.call_count, 1)
| |
# Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import sys
from mixbox.binding_utils import *
from . import cybox_common
from . import memory_object
from . import process_object
from . import win_handle_object
from . import win_thread_object
class MemorySectionListType(GeneratedsSuper):
"""The MemorySectionListType type specifies a list of memory sections
used by the process."""
subclass = None
superclass = None
def __init__(self, Memory_Section=None):
if Memory_Section is None:
self.Memory_Section = []
else:
self.Memory_Section = Memory_Section
def factory(*args_, **kwargs_):
if MemorySectionListType.subclass:
return MemorySectionListType.subclass(*args_, **kwargs_)
else:
return MemorySectionListType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Memory_Section(self): return self.Memory_Section
def set_Memory_Section(self, Memory_Section): self.Memory_Section = Memory_Section
def add_Memory_Section(self, value): self.Memory_Section.append(value)
def insert_Memory_Section(self, index, value): self.Memory_Section[index] = value
def hasContent_(self):
if (
self.Memory_Section
):
return True
else:
return False
def export(self, lwrite, level, namespace_='WinProcessObj:', name_='MemorySectionListType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(lwrite, level, pretty_print)
lwrite('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(lwrite, level, already_processed, namespace_, name_='MemorySectionListType')
if self.hasContent_():
lwrite('>%s' % (eol_, ))
self.exportChildren(lwrite, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(lwrite, level, pretty_print)
lwrite('</%s%s>%s' % (namespace_, name_, eol_))
else:
lwrite('/>%s' % (eol_, ))
def exportAttributes(self, lwrite, level, already_processed, namespace_='WinProcessObj:', name_='MemorySectionListType'):
pass
def exportChildren(self, lwrite, level, namespace_='WinProcessObj:', name_='MemorySectionListType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Memory_Section_ in self.Memory_Section:
Memory_Section_.export(lwrite, level, 'WinProcessObj:', name_='Memory_Section', pretty_print=pretty_print)
def build(self, node):
self.__sourcenode__ = node
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Memory_Section':
obj_ = memory_object.MemoryObjectType.factory()
obj_.build(child_)
self.Memory_Section.append(obj_)
# end class MemorySectionListType
class StartupInfoType(GeneratedsSuper):
"""The StartupInfoType type encapsulates the information contained in
the STARTUPINFO struct for the process."""
subclass = None
superclass = None
def __init__(self, lpDesktop=None, lpTitle=None, dwX=None, dwY=None, dwXSize=None, dwYSize=None, dwXCountChars=None, dwYCountChars=None, dwFillAttribute=None, dwFlags=None, wShowWindow=None, hStdInput=None, hStdOutput=None, hStdError=None):
self.lpDesktop = lpDesktop
self.lpTitle = lpTitle
self.dwX = dwX
self.dwY = dwY
self.dwXSize = dwXSize
self.dwYSize = dwYSize
self.dwXCountChars = dwXCountChars
self.dwYCountChars = dwYCountChars
self.dwFillAttribute = dwFillAttribute
self.dwFlags = dwFlags
self.wShowWindow = wShowWindow
self.hStdInput = hStdInput
self.hStdOutput = hStdOutput
self.hStdError = hStdError
def factory(*args_, **kwargs_):
if StartupInfoType.subclass:
return StartupInfoType.subclass(*args_, **kwargs_)
else:
return StartupInfoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_lpDesktop(self): return self.lpDesktop
def set_lpDesktop(self, lpDesktop): self.lpDesktop = lpDesktop
def validate_StringObjectPropertyType(self, value):
# Validate type cybox_common.StringObjectPropertyType, a restriction on None.
pass
def get_lpTitle(self): return self.lpTitle
def set_lpTitle(self, lpTitle): self.lpTitle = lpTitle
def get_dwX(self): return self.dwX
def set_dwX(self, dwX): self.dwX = dwX
def validate_IntegerObjectPropertyType(self, value):
# Validate type cybox_common.IntegerObjectPropertyType, a restriction on None.
pass
def get_dwY(self): return self.dwY
def set_dwY(self, dwY): self.dwY = dwY
def get_dwXSize(self): return self.dwXSize
def set_dwXSize(self, dwXSize): self.dwXSize = dwXSize
def validate_PositiveIntegerObjectPropertyType(self, value):
# Validate type cybox_common.PositiveIntegerObjectPropertyType, a restriction on None.
pass
def get_dwYSize(self): return self.dwYSize
def set_dwYSize(self, dwYSize): self.dwYSize = dwYSize
def get_dwXCountChars(self): return self.dwXCountChars
def set_dwXCountChars(self, dwXCountChars): self.dwXCountChars = dwXCountChars
def get_dwYCountChars(self): return self.dwYCountChars
def set_dwYCountChars(self, dwYCountChars): self.dwYCountChars = dwYCountChars
def get_dwFillAttribute(self): return self.dwFillAttribute
def set_dwFillAttribute(self, dwFillAttribute): self.dwFillAttribute = dwFillAttribute
def get_dwFlags(self): return self.dwFlags
def set_dwFlags(self, dwFlags): self.dwFlags = dwFlags
def get_wShowWindow(self): return self.wShowWindow
def set_wShowWindow(self, wShowWindow): self.wShowWindow = wShowWindow
def get_hStdInput(self): return self.hStdInput
def set_hStdInput(self, hStdInput): self.hStdInput = hStdInput
def get_hStdOutput(self): return self.hStdOutput
def set_hStdOutput(self, hStdOutput): self.hStdOutput = hStdOutput
def get_hStdError(self): return self.hStdError
def set_hStdError(self, hStdError): self.hStdError = hStdError
def hasContent_(self):
if (
self.lpDesktop is not None or
self.lpTitle is not None or
self.dwX is not None or
self.dwY is not None or
self.dwXSize is not None or
self.dwYSize is not None or
self.dwXCountChars is not None or
self.dwYCountChars is not None or
self.dwFillAttribute is not None or
self.dwFlags is not None or
self.wShowWindow is not None or
self.hStdInput is not None or
self.hStdOutput is not None or
self.hStdError is not None
):
return True
else:
return False
def export(self, lwrite, level, namespace_='WinProcessObj:', name_='StartupInfoType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(lwrite, level, pretty_print)
lwrite('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(lwrite, level, already_processed, namespace_, name_='StartupInfoType')
if self.hasContent_():
lwrite('>%s' % (eol_, ))
self.exportChildren(lwrite, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(lwrite, level, pretty_print)
lwrite('</%s%s>%s' % (namespace_, name_, eol_))
else:
lwrite('/>%s' % (eol_, ))
def exportAttributes(self, lwrite, level, already_processed, namespace_='WinProcessObj:', name_='StartupInfoType'):
pass
def exportChildren(self, lwrite, level, namespace_='WinProcessObj:', name_='StartupInfoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.lpDesktop is not None:
self.lpDesktop.export(lwrite, level, 'WinProcessObj:', name_='lpDesktop', pretty_print=pretty_print)
if self.lpTitle is not None:
self.lpTitle.export(lwrite, level, 'WinProcessObj:', name_='lpTitle', pretty_print=pretty_print)
if self.dwX is not None:
self.dwX.export(lwrite, level, 'WinProcessObj:', name_='dwX', pretty_print=pretty_print)
if self.dwY is not None:
self.dwY.export(lwrite, level, 'WinProcessObj:', name_='dwY', pretty_print=pretty_print)
if self.dwXSize is not None:
self.dwXSize.export(lwrite, level, 'WinProcessObj:', name_='dwXSize', pretty_print=pretty_print)
if self.dwYSize is not None:
self.dwYSize.export(lwrite, level, 'WinProcessObj:', name_='dwYSize', pretty_print=pretty_print)
if self.dwXCountChars is not None:
self.dwXCountChars.export(lwrite, level, 'WinProcessObj:', name_='dwXCountChars', pretty_print=pretty_print)
if self.dwYCountChars is not None:
self.dwYCountChars.export(lwrite, level, 'WinProcessObj:', name_='dwYCountChars', pretty_print=pretty_print)
if self.dwFillAttribute is not None:
self.dwFillAttribute.export(lwrite, level, 'WinProcessObj:', name_='dwFillAttribute', pretty_print=pretty_print)
if self.dwFlags is not None:
self.dwFlags.export(lwrite, level, 'WinProcessObj:', name_='dwFlags', pretty_print=pretty_print)
if self.wShowWindow is not None:
self.wShowWindow.export(lwrite, level, 'WinProcessObj:', name_='wShowWindow', pretty_print=pretty_print)
if self.hStdInput is not None:
self.hStdInput.export(lwrite, level, 'WinProcessObj:', name_='hStdInput', pretty_print=pretty_print)
if self.hStdOutput is not None:
self.hStdOutput.export(lwrite, level, 'WinProcessObj:', name_='hStdOutput', pretty_print=pretty_print)
if self.hStdError is not None:
self.hStdError.export(lwrite, level, 'WinProcessObj:', name_='hStdError', pretty_print=pretty_print)
def build(self, node):
self.__sourcenode__ = node
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'lpDesktop':
obj_ = cybox_common.StringObjectPropertyType.factory()
obj_.build(child_)
self.set_lpDesktop(obj_)
elif nodeName_ == 'lpTitle':
obj_ = cybox_common.StringObjectPropertyType.factory()
obj_.build(child_)
self.set_lpTitle(obj_)
elif nodeName_ == 'dwX':
obj_ = cybox_common.IntegerObjectPropertyType.factory()
obj_.build(child_)
self.set_dwX(obj_)
elif nodeName_ == 'dwY':
obj_ = cybox_common.IntegerObjectPropertyType.factory()
obj_.build(child_)
self.set_dwY(obj_)
elif nodeName_ == 'dwXSize':
obj_ = cybox_common.PositiveIntegerObjectPropertyType.factory()
obj_.build(child_)
self.set_dwXSize(obj_)
elif nodeName_ == 'dwYSize':
obj_ = cybox_common.PositiveIntegerObjectPropertyType.factory()
obj_.build(child_)
self.set_dwYSize(obj_)
elif nodeName_ == 'dwXCountChars':
obj_ = cybox_common.PositiveIntegerObjectPropertyType.factory()
obj_.build(child_)
self.set_dwXCountChars(obj_)
elif nodeName_ == 'dwYCountChars':
obj_ = cybox_common.PositiveIntegerObjectPropertyType.factory()
obj_.build(child_)
self.set_dwYCountChars(obj_)
elif nodeName_ == 'dwFillAttribute':
obj_ = cybox_common.IntegerObjectPropertyType.factory()
obj_.build(child_)
self.set_dwFillAttribute(obj_)
elif nodeName_ == 'dwFlags':
obj_ = cybox_common.IntegerObjectPropertyType.factory()
obj_.build(child_)
self.set_dwFlags(obj_)
elif nodeName_ == 'wShowWindow':
obj_ = cybox_common.IntegerObjectPropertyType.factory()
obj_.build(child_)
self.set_wShowWindow(obj_)
elif nodeName_ == 'hStdInput':
obj_ = win_handle_object.WindowsHandleObjectType.factory()
obj_.build(child_)
self.set_hStdInput(obj_)
elif nodeName_ == 'hStdOutput':
obj_ = win_handle_object.WindowsHandleObjectType.factory()
obj_.build(child_)
self.set_hStdOutput(obj_)
elif nodeName_ == 'hStdError':
obj_ = win_handle_object.WindowsHandleObjectType.factory()
obj_.build(child_)
self.set_hStdError(obj_)
# end class StartupInfoType
class WindowsProcessObjectType(process_object.ProcessObjectType):
"""The WindowsProcessObjectType type is intended to characterize
Windows processes.The aslr_enabled field specifies whether
Address Space Layout Randomization (ASLR) is enabled for the
process.The dep_enabled field specifies whether Data Execution
Prevention (DEP) is enabled for the process."""
subclass = None
superclass = process_object.ProcessObjectType
def __init__(self, object_reference=None, Custom_Properties=None, xsi_type=None, is_hidden=None, PID=None, Name=None, Creation_Time=None, Parent_PID=None, Child_PID_List=None, Image_Info=None, Argument_List=None, Environment_Variable_List=None, Kernel_Time=None, Port_List=None, Network_Connection_List=None, Start_Time=None, Status=None, Username=None, User_Time=None, Extracted_Features=None, aslr_enabled=None, dep_enabled=None, Handle_List=None, Priority=None, Section_List=None, Security_ID=None, Startup_Info=None, Security_Type=None, Window_Title=None, Thread=None):
super(WindowsProcessObjectType, self).__init__(object_reference, Custom_Properties, is_hidden, PID, Name, Creation_Time, Parent_PID, Child_PID_List, Image_Info, Argument_List, Environment_Variable_List, Kernel_Time, Port_List, Network_Connection_List, Start_Time, Status, Username, User_Time, Extracted_Features, )
self.aslr_enabled = _cast(bool, aslr_enabled)
self.dep_enabled = _cast(bool, dep_enabled)
self.Handle_List = Handle_List
self.Priority = Priority
self.Section_List = Section_List
self.Security_ID = Security_ID
self.Startup_Info = Startup_Info
self.Security_Type = Security_Type
self.Window_Title = Window_Title
if not Thread:
self.Thread = []
else:
self.Thread = Thread
def factory(*args_, **kwargs_):
if WindowsProcessObjectType.subclass:
return WindowsProcessObjectType.subclass(*args_, **kwargs_)
else:
return WindowsProcessObjectType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Handle_List(self): return self.Handle_List
def set_Handle_List(self, Handle_List): self.Handle_List = Handle_List
def get_Priority(self): return self.Priority
def set_Priority(self, Priority): self.Priority = Priority
def validate_StringObjectPropertyType(self, value):
# Validate type cybox_common.StringObjectPropertyType, a restriction on None.
pass
def get_Section_List(self): return self.Section_List
def set_Section_List(self, Section_List): self.Section_List = Section_List
def get_Security_ID(self): return self.Security_ID
def set_Security_ID(self, Security_ID): self.Security_ID = Security_ID
def get_Startup_Info(self): return self.Startup_Info
def set_Startup_Info(self, Startup_Info): self.Startup_Info = Startup_Info
def get_Security_Type(self): return self.Security_Type
def set_Security_Type(self, Security_Type): self.Security_Type = Security_Type
def validate_SIDType(self, value):
# Validate type cybox_common.SIDType, a restriction on None.
pass
def get_Window_Title(self): return self.Window_Title
def set_Window_Title(self, Window_Title): self.Window_Title = Window_Title
def get_Thread(self): return self.Thread
def set_Thread(self, Thread): self.Thread = Thread
def add_Thread(self, Thread): self.Thread.append(Thread)
def get_aslr_enabled(self): return self.aslr_enabled
def set_aslr_enabled(self, aslr_enabled): self.aslr_enabled = aslr_enabled
def get_dep_enabled(self): return self.dep_enabled
def set_dep_enabled(self, dep_enabled): self.dep_enabled = dep_enabled
def hasContent_(self):
if (
self.Handle_List is not None or
self.Priority is not None or
self.Section_List is not None or
self.Security_ID is not None or
self.Startup_Info is not None or
self.Security_Type is not None or
self.Window_Title is not None or
self.Thread or
super(WindowsProcessObjectType, self).hasContent_()
):
return True
else:
return False
def export(self, lwrite, level, namespace_='WinProcessObj:', name_='WindowsProcessObjectType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(lwrite, level, pretty_print)
lwrite('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(lwrite, level, already_processed, namespace_, name_='WindowsProcessObjectType')
if self.hasContent_():
lwrite('>%s' % (eol_, ))
self.exportChildren(lwrite, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(lwrite, level, pretty_print)
lwrite('</%s%s>%s' % (namespace_, name_, eol_))
else:
lwrite('/>%s' % (eol_, ))
def exportAttributes(self, lwrite, level, already_processed, namespace_='WinProcessObj:', name_='WindowsProcessObjectType'):
super(WindowsProcessObjectType, self).exportAttributes(lwrite, level, already_processed, namespace_, name_='WindowsProcessObjectType')
if self.aslr_enabled is not None:
lwrite(' aslr_enabled="%s"' % self.gds_format_boolean(self.aslr_enabled, input_name='aslr_enabled'))
if self.dep_enabled is not None:
lwrite(' dep_enabled="%s"' % self.gds_format_boolean(self.dep_enabled, input_name='dep_enabled'))
def exportChildren(self, lwrite, level, namespace_='WinProcessObj:', name_='WindowsProcessObjectType', fromsubclass_=False, pretty_print=True):
super(WindowsProcessObjectType, self).exportChildren(lwrite, level, 'WinProcessObj:', name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Handle_List is not None:
self.Handle_List.export(lwrite, level, 'WinProcessObj:', name_='Handle_List', pretty_print=pretty_print)
if self.Priority is not None:
self.Priority.export(lwrite, level, 'WinProcessObj:', name_='Priority', pretty_print=pretty_print)
if self.Section_List is not None:
self.Section_List.export(lwrite, level, 'WinProcessObj:', name_='Section_List', pretty_print=pretty_print)
if self.Security_ID is not None:
self.Security_ID.export(lwrite, level, 'WinProcessObj:', name_='Security_ID', pretty_print=pretty_print)
if self.Startup_Info is not None:
self.Startup_Info.export(lwrite, level, 'WinProcessObj:', name_='Startup_Info', pretty_print=pretty_print)
if self.Security_Type is not None:
self.Security_Type.export(lwrite, level, 'WinProcessObj:', name_='Security_Type', pretty_print=pretty_print)
if self.Window_Title is not None:
self.Window_Title.export(lwrite, level, 'WinProcessObj:', name_='Window_Title', pretty_print=pretty_print)
for Thread_ in self.Thread:
Thread_.export(lwrite, level, 'WinProcessObj:', name_='Thread', pretty_print=pretty_print)
def build(self, node):
self.__sourcenode__ = node
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('aslr_enabled', node)
if value is not None:
if value in ('true', '1'):
self.aslr_enabled = True
elif value in ('false', '0'):
self.aslr_enabled = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('dep_enabled', node)
if value is not None:
if value in ('true', '1'):
self.dep_enabled = True
elif value in ('false', '0'):
self.dep_enabled = False
else:
raise_parse_error(node, 'Bad boolean attribute')
super(WindowsProcessObjectType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Handle_List':
obj_ = win_handle_object.WindowsHandleListType.factory()
obj_.build(child_)
self.set_Handle_List(obj_)
elif nodeName_ == 'Priority':
obj_ = cybox_common.StringObjectPropertyType.factory()
obj_.build(child_)
self.set_Priority(obj_)
elif nodeName_ == 'Section_List':
obj_ = MemorySectionListType.factory()
obj_.build(child_)
self.set_Section_List(obj_)
elif nodeName_ == 'Security_ID':
obj_ = cybox_common.StringObjectPropertyType.factory()
obj_.build(child_)
self.set_Security_ID(obj_)
elif nodeName_ == 'Startup_Info':
obj_ = StartupInfoType.factory()
obj_.build(child_)
self.set_Startup_Info(obj_)
elif nodeName_ == 'Security_Type':
obj_ = cybox_common.SIDType.factory()
obj_.build(child_)
self.set_Security_Type(obj_)
elif nodeName_ == 'Window_Title':
obj_ = cybox_common.StringObjectPropertyType.factory()
obj_.build(child_)
self.set_Window_Title(obj_)
elif nodeName_ == 'Thread':
obj_ = win_thread_object.WindowsThreadObjectType.factory()
obj_.build(child_)
self.Thread.append(obj_)
super(WindowsProcessObjectType, self).buildChildren(child_, node, nodeName_, True)
# end class WindowsProcessObjectType
GDSClassesMapping = {
'Build_Utility': cybox_common.BuildUtilityType,
'Errors': cybox_common.ErrorsType,
'Accept_Charset': cybox_common.StringObjectPropertyType,
'Time': cybox_common.TimeType,
'Certificate_Issuer': cybox_common.StringObjectPropertyType,
'Identifier': cybox_common.PlatformIdentifierType,
'Max_Forwards': cybox_common.IntegerObjectPropertyType,
'Proxy_Authorization': cybox_common.StringObjectPropertyType,
'Metadata': cybox_common.MetadataType,
'Hash': cybox_common.HashType,
'Entry_Type': cybox_common.StringObjectPropertyType,
'PID': cybox_common.UnsignedIntegerObjectPropertyType,
'lpDesktop': cybox_common.StringObjectPropertyType,
'Argument_List': process_object.ArgumentListType,
'Information_Source_Type': cybox_common.ControlledVocabularyStringType,
'Path': cybox_common.StringObjectPropertyType,
'Internal_Strings': cybox_common.InternalStringsType,
'Byte_Run': cybox_common.ByteRunType,
'SubDatum': cybox_common.MetadataType,
'Record_Name': cybox_common.StringObjectPropertyType,
'Segment_Hash': cybox_common.HashValueType,
'Digital_Signature': cybox_common.DigitalSignatureInfoType,
'X_Forwarded_Proto': cybox_common.StringObjectPropertyType,
'Region_Start_Address': cybox_common.HexBinaryObjectPropertyType,
'Code_Snippets': cybox_common.CodeSnippetsType,
'Value': cybox_common.AnyURIObjectPropertyType,
'Length': cybox_common.PositiveIntegerObjectPropertyType,
'Expect': cybox_common.StringObjectPropertyType,
'If_Range': cybox_common.StringObjectPropertyType,
'TE': cybox_common.StringObjectPropertyType,
'Parent_PID': cybox_common.UnsignedIntegerObjectPropertyType,
'Encoding': cybox_common.ControlledVocabularyStringType,
'Internationalization_Settings': cybox_common.InternationalizationSettingsType,
'Image_Offset': cybox_common.IntegerObjectPropertyType,
'Status_Code': cybox_common.PositiveIntegerObjectPropertyType,
'File_System_Offset': cybox_common.IntegerObjectPropertyType,
'Warning': cybox_common.StringObjectPropertyType,
'Memory_Region': memory_object.MemoryObjectType,
'Object_Address': cybox_common.UnsignedLongObjectPropertyType,
'Memory_Section': memory_object.MemoryObjectType,
'Segments': cybox_common.HashSegmentsType,
'Content_Length': cybox_common.IntegerObjectPropertyType,
'X_UA_Compatible': cybox_common.StringObjectPropertyType,
'Functions': cybox_common.FunctionsType,
'X_Powered_By': cybox_common.StringObjectPropertyType,
'String_Value': cybox_common.StringObjectPropertyType,
'Pointer_Count': cybox_common.UnsignedLongObjectPropertyType,
'Build_Utility_Platform_Specification': cybox_common.PlatformSpecificationType,
'Compiler_Informal_Description': cybox_common.CompilerInformalDescriptionType,
'Start_Time': cybox_common.DateTimeObjectPropertyType,
'System': cybox_common.ObjectPropertiesType,
'Priority': cybox_common.StringObjectPropertyType,
'Platform': cybox_common.PlatformSpecificationType,
'Version': cybox_common.StringObjectPropertyType,
'Usage_Context_Assumptions': cybox_common.UsageContextAssumptionsType,
'Accept_Language': cybox_common.StringObjectPropertyType,
'Import': cybox_common.StringObjectPropertyType,
'Raw_Header': cybox_common.StringObjectPropertyType,
'Type': cybox_common.ControlledVocabularyStringType,
'Compilers': cybox_common.CompilersType,
'Username': cybox_common.StringObjectPropertyType,
'Tool_Type': cybox_common.ControlledVocabularyStringType,
'String': cybox_common.ExtractedStringType,
'lpTitle': cybox_common.StringObjectPropertyType,
'Tool': cybox_common.ToolInformationType,
'Refresh': cybox_common.IntegerObjectPropertyType,
'Build_Information': cybox_common.BuildInformationType,
'hStdOutput': win_handle_object.WindowsHandleObjectType,
'Link': cybox_common.StringObjectPropertyType,
'Tool_Hashes': cybox_common.HashListType,
'TTL': cybox_common.IntegerObjectPropertyType,
'X_Frame_Options': cybox_common.StringObjectPropertyType,
'Age': cybox_common.IntegerObjectPropertyType,
'Message_Body': cybox_common.StringObjectPropertyType,
'Address_Value': cybox_common.StringObjectPropertyType,
'Error_Instances': cybox_common.ErrorInstancesType,
'Data_Segment': cybox_common.StringObjectPropertyType,
'dwFlags': cybox_common.IntegerObjectPropertyType,
'Access_Control_Allow_Origin': cybox_common.StringObjectPropertyType,
'Range': cybox_common.StringObjectPropertyType,
'Certificate_Subject': cybox_common.StringObjectPropertyType,
'Content_Location': cybox_common.StringObjectPropertyType,
'Retry_After': cybox_common.IntegerObjectPropertyType,
'Property': cybox_common.PropertyType,
'Strings': cybox_common.ExtractedStringsType,
'WWW_Authenticate': cybox_common.StringObjectPropertyType,
'Via': cybox_common.StringObjectPropertyType,
'X_Requested_For': cybox_common.StringObjectPropertyType,
'Contributors': cybox_common.PersonnelType,
'Simple_Hash_Value': cybox_common.SimpleHashValueType,
'Transfer_Encoding': cybox_common.StringObjectPropertyType,
'Security_Type': cybox_common.SIDType,
'Reference_Description': cybox_common.StructuredTextType,
'Server': cybox_common.StringObjectPropertyType,
'User_Account_Info': cybox_common.ObjectPropertiesType,
'Child_PID': cybox_common.UnsignedIntegerObjectPropertyType,
'Configuration_Settings': cybox_common.ConfigurationSettingsType,
'Compiler_Platform_Specification': cybox_common.PlatformSpecificationType,
'wShowWindow': cybox_common.IntegerObjectPropertyType,
'Byte_String_Value': cybox_common.HexBinaryObjectPropertyType,
'User_Time': cybox_common.DurationObjectPropertyType,
'dwYSize': cybox_common.PositiveIntegerObjectPropertyType,
'Reason_Phrase': cybox_common.StringObjectPropertyType,
'Handle_List': win_handle_object.WindowsHandleListType,
'Record_Type': cybox_common.StringObjectPropertyType,
'Instance': cybox_common.ObjectPropertiesType,
'dwXSize': cybox_common.PositiveIntegerObjectPropertyType,
'Access_Mask': cybox_common.UnsignedLongObjectPropertyType,
'dwFillAttribute': cybox_common.IntegerObjectPropertyType,
'Authorization': cybox_common.StringObjectPropertyType,
'Accept_Encoding': cybox_common.StringObjectPropertyType,
'Status': process_object.ProcessStatusType,
'Window_Title': cybox_common.StringObjectPropertyType,
'Current_Directory': cybox_common.StringObjectPropertyType,
'Tool_Specific_Data': cybox_common.ToolSpecificDataType,
'Execution_Environment': cybox_common.ExecutionEnvironmentType,
'If_Modified_Since': cybox_common.DateTimeObjectPropertyType,
'X_Content_Type_Options': cybox_common.StringObjectPropertyType,
'hStdError': win_handle_object.WindowsHandleObjectType,
'Search_Distance': cybox_common.IntegerObjectPropertyType,
'Child_PID_List': process_object.ChildPIDListType,
'Dependencies': cybox_common.DependenciesType,
'Segment_Count': cybox_common.IntegerObjectPropertyType,
'Offset': cybox_common.IntegerObjectPropertyType,
'Date': cybox_common.DateTimeObjectPropertyType,
'Cookie': cybox_common.StringObjectPropertyType,
'Hashes': cybox_common.HashListType,
'Strict_Transport_Security': cybox_common.StringObjectPropertyType,
'Content_Disposition': cybox_common.StringObjectPropertyType,
'dwX': cybox_common.IntegerObjectPropertyType,
'User_Agent': cybox_common.StringObjectPropertyType,
'Address_Class': cybox_common.StringObjectPropertyType,
'hStdInput': win_handle_object.WindowsHandleObjectType,
'Command_Line': cybox_common.StringObjectPropertyType,
'Language': cybox_common.StringObjectPropertyType,
'Creation_Time': cybox_common.DateTimeObjectPropertyType,
'Usage_Context_Assumption': cybox_common.StructuredTextType,
'Block_Hash': cybox_common.FuzzyHashBlockType,
'Dependency': cybox_common.DependencyType,
'Connection': cybox_common.StringObjectPropertyType,
'X_Requested_With': cybox_common.StringObjectPropertyType,
'Kernel_Time': cybox_common.DurationObjectPropertyType,
'Error': cybox_common.ErrorType,
'P3P': cybox_common.StringObjectPropertyType,
'If_Unmodified_Since': cybox_common.DateTimeObjectPropertyType,
'Trigger_Point': cybox_common.HexBinaryObjectPropertyType,
'Environment_Variable': cybox_common.EnvironmentVariableType,
'dwXCountChars': cybox_common.PositiveIntegerObjectPropertyType,
'English_Translation': cybox_common.StringObjectPropertyType,
'Tool_Configuration': cybox_common.ToolConfigurationType,
'Process': process_object.ProcessObjectType,
'Imports': cybox_common.ImportsType,
'Library': cybox_common.LibraryType,
'Cache_Control': cybox_common.StringObjectPropertyType,
'References': cybox_common.ToolReferencesType,
'Service_Used': cybox_common.StringObjectPropertyType,
'Image_Info': process_object.ImageInfoType,
'X_XSS_Protection': cybox_common.StringObjectPropertyType,
'Windows_Handle': win_handle_object.WindowsHandleObjectType,
'Block_Hash_Value': cybox_common.HashValueType,
'Trailer': cybox_common.StringObjectPropertyType,
'Fuzzy_Hash_Structure': cybox_common.FuzzyHashStructureType,
'File_Name': cybox_common.StringObjectPropertyType,
'Configuration_Setting': cybox_common.ConfigurationSettingType,
'dwY': cybox_common.IntegerObjectPropertyType,
'Argument': cybox_common.StringObjectPropertyType,
'Libraries': cybox_common.LibrariesType,
'QClass': cybox_common.StringObjectPropertyType,
'Content_Language': cybox_common.StringObjectPropertyType,
'Content_MD5': cybox_common.StringObjectPropertyType,
'Security_ID': cybox_common.StringObjectPropertyType,
'Function': cybox_common.StringObjectPropertyType,
'Handle': win_handle_object.WindowsHandleObjectType,
'Description': cybox_common.StructuredTextType,
'Code_Snippet': cybox_common.ObjectPropertiesType,
'Build_Configuration': cybox_common.BuildConfigurationType,
'Extracted_Features': cybox_common.ExtractedFeaturesType,
'Expires': cybox_common.DateTimeObjectPropertyType,
'VLAN_Name': cybox_common.StringObjectPropertyType,
'Content_Range': cybox_common.StringObjectPropertyType,
'X_ATT_DeviceId': cybox_common.StringObjectPropertyType,
'Content_Encoding': cybox_common.StringObjectPropertyType,
'Pragma': cybox_common.StringObjectPropertyType,
'Search_Within': cybox_common.IntegerObjectPropertyType,
'Segment': cybox_common.HashSegmentType,
'Port_Value': cybox_common.PositiveIntegerObjectPropertyType,
'Compiler': cybox_common.CompilerType,
'Name': cybox_common.StringObjectPropertyType,
'Set_Cookie': cybox_common.StringObjectPropertyType,
'Network_Connection_List': process_object.NetworkConnectionListType,
'Accept_Datetime': cybox_common.StringObjectPropertyType,
'Environment_Variable_List': cybox_common.EnvironmentVariableListType,
'Last_Modified': cybox_common.DateTimeObjectPropertyType,
'Flags': cybox_common.HexBinaryObjectPropertyType,
'Port_List': process_object.PortListType,
'Content_Type': cybox_common.StringObjectPropertyType,
'Signature_Description': cybox_common.StringObjectPropertyType,
'Block_Size': cybox_common.IntegerObjectPropertyType,
'ID': cybox_common.UnsignedIntegerObjectPropertyType,
'Proxy_Authenticate': cybox_common.StringObjectPropertyType,
'If_None_Match': cybox_common.StringObjectPropertyType,
'Accept_Ranges': cybox_common.StringObjectPropertyType,
'Region_Size': cybox_common.UnsignedLongObjectPropertyType,
'Data_Length': cybox_common.IntegerObjectPropertyType,
'Fuzzy_Hash_Value': cybox_common.FuzzyHashValueType,
'Accept': cybox_common.StringObjectPropertyType,
'Data_Size': cybox_common.DataSizeType,
'dwYCountChars': cybox_common.PositiveIntegerObjectPropertyType,
'Dependency_Description': cybox_common.StructuredTextType,
'ETag': cybox_common.StringObjectPropertyType,
'Date_Ran': cybox_common.DateTimeObjectPropertyType,
'Contributor': cybox_common.ContributorType,
'If_Match': cybox_common.StringObjectPropertyType,
'Tools': cybox_common.ToolsInformationType,
'Custom_Properties': cybox_common.CustomPropertiesType,
'VLAN_Num': cybox_common.IntegerObjectPropertyType,
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print(USAGE_TEXT)
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Windows_Process'
rootClass = WindowsProcessObjectType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
# sys.stdout.write('<?xml version="1.0" ?>\n')
# rootObj.export(sys.stdout.write, 0, name_=rootTag,
# namespacedef_='',
# pretty_print=True)
return rootObj
def parseEtree(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Windows_Process'
rootClass = WindowsProcessObjectType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
rootElement = rootObj.to_etree(None, name_=rootTag)
content = etree_.tostring(rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement
def parseString(inString):
from mixbox.vendor.six import StringIO
doc = parsexml_(StringIO(inString))
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Windows_Process'
rootClass = WindowsProcessObjectType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
# sys.stdout.write('<?xml version="1.0" ?>\n')
# rootObj.export(sys.stdout.write, 0, name_="Windows_Process",
# namespacedef_='')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"WindowsProcessObjectType",
"MemorySectionListType",
"StartupInfoType"
]
| |
#!/usr/bin/env python
# encoding: utf-8
#
# The MIT License
#
# Copyright (c) 2008 William T. Katz
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# --- Significant portions of the code was taken from Google App Engine SDK
# --- which is licensed under Apache 2.0
#
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import getopt
import datetime
import getpass
import mimetypes
import os
import re
import sha
import sys
import time
import httplib
import urllib
import urlparse
import socket
import string
from utils.external import textile
help_message = """
First argument must be an authentication cookie that can be cut & pasted after
logging in with a browser. Cookies can be easily viewed by using the Web
Developer plugin with Firefox.
For example, for uploading data into the local datastore, you'd do something
like this:
drupal_uploader.py 'dev_appserver_login="root@example.com:True"'
(or you could skip the first argument and use the -r or --root options)
For uploading data into a Google AppEngine-hosted app, the cookie would begin
with ACSID:
drupal_uploader.py 'ACSID=AJXUWfE-aefkae...'
Options:
-D, --dbtype = database type (default is 'mysql')
-t, --prefix = table prefix, not supported by all blogtypes (default is '')
-b, --blogtype = type of blog to import from (default is 'drupal')
-r, --root sets authorization cookie for local dev admin
-d, --dbhostname = hostname of MySQL server (default is 'localhost')
-p, --dbport = port of MySQL server (default is '3306')
-u, --dbuserpwd = user:passwd for MySQL server (e.g., 'johndoe:mypasswd')
-n, --dbname = name of Drupal database name (default is 'drupal')
-l, --url = the url (web location) of the Bloog app
-a, --articles = only upload this many articles (for testing)
-R, --static_redirect = generate redirects for static content by adding this
prefix. Not supported by all blogtypes.
"""
DB_ENCODING = 'latin-1'
# List the ASCII chars that are OK for our pages
NEWLINE_CHARS = [ord(x) for x in ['\n', '\t', '\r']]
OK_CHARS = range(32,126) + [ord(x) for x in ['\n', '\t', '\r']]
OK_TITLE = range(32,126)
db_types = {}
try:
import MySQLdb
def mysql_connect(dbuser, dbpasswd, dbhostname, dbport, dbname):
if not dbport:
dbport = 3306
return MySQLdb.connect(user=dbuser,
passwd=dbpasswd,
host=dbhostname,
port=dbport,
db=dbname)
db_types['mysql'] = mysql_connect
except ImportError:
pass
try:
import psycopg2
def postgres_connect(dbuser, dbpasswd, dbhostname, dbport, dbname):
if not dbport:
dbport = 5432
return psycopg2.connect(
"user='%s' password='%s' host='%s' port='%s' dbname='%s'" %
(dbuser, dbpasswd, dbhostname, dbport, dbname))
db_types['postgres'] = postgres_connect
except ImportError:
pass
def clean_multiline(raw_string):
return ''.join([x for x in raw_string if ord(x) in OK_CHARS])
def force_singleline(raw_string):
return ''.join([x for x in raw_string if ord(x) not in NEWLINE_CHARS])
def fix_string(str_from_db):
# Add encoding change here if needed.
# For Bloog, will just output latin-1 and let it convert to utf-8
return str_from_db
def fix_thread_string(tstr):
"""
Takes a string with numbers separated by period and possibly with /
at end, and outputs a string with 3 digit numbers separated by periods.
"""
remove_slash = lambda s: s[:-1] if s[-1] == '/' else s
three_digits = lambda s: "%03d" % int(s)
return '.'.join( map(three_digits, map(remove_slash, tstr.split('.'))))
class Error(Exception):
"""Base-class for exceptions in this module."""
class UsageError(Error):
def __init__(self, msg):
self.msg = msg
class HTTPConnectError(Error):
"""An error has occured while trying to connect to the Bloog app."""
class RequestError(Error):
"""An error occured while trying a HTTP request to the Bloog app."""
class UnsupportedSchemeError(Error):
"""Tried to access url with unsupported scheme (not http or https)."""
class HttpRESTClient(object):
@staticmethod
def connect(scheme, netloc):
if scheme == 'http':
return httplib.HTTPConnection(netloc)
if scheme == 'https':
return httplib.HTTPSConnection(netloc)
raise UnsupportedSchemeError()
def __init__(self, auth_cookie):
self.auth_cookie = auth_cookie
def do_request(self, url, verb, headers, body=''):
scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
print("Trying %s to %s (%s) using %s" % (verb, netloc, path, scheme))
try:
connection = HttpRESTClient.connect(scheme, netloc)
try:
connection.request(verb, path+'?'+query, body, headers)
response = connection.getresponse()
status = response.status
reason = response.reason
content = response.read()
tuple_headers = response.getheaders()
print('Received response code %d: %s\n%s' %
(status, reason, content))
if status != httplib.OK:
raise RequestError('Request error, code %d: %s\n%s' %
(status, reason, content))
return status, reason, content, tuple_headers
finally:
connection.close()
except (IOError, httplib.HTTPException, socket.error), e:
print('Encountered exception accessing HTTP server: %s', e)
raise HTTPConnectError(e)
def get(self, url):
headers = {}
headers['Cookie'] = self.auth_cookie
print "Cookie:", self.auth_cookie
self.do_request(url, 'GET', headers)
def post(self, url, body_dict):
body = urllib.urlencode(body_dict)
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': len(body),
'Cookie': self.auth_cookie
}
status, reason, content, tuple_headers = \
self.do_request(url, 'POST', headers, body)
# Our app expects POSTs to return the new post's URL.
if status != 200:
raise RequestError('Unexpected response from web app: '
'%s, %s, %s' % (status, reason, content))
return content
class BlogConverter(object):
def __init__(self, auth_cookie, conn, app_url, table_prefix='',
static_redirect=None):
self.webserver = HttpRESTClient(auth_cookie)
self.app_url = app_url
self.table_prefix = table_prefix
self.conn = conn
self.cursor = self.conn.cursor()
self.static_redirect = static_redirect
def close(self):
self.cursor.close()
self.conn.close()
def go(self, num_articles = None):
# Get all articles
self.redirect = {} # Keys are legacy IDs and maps to permalink
article_count = 0
for article in self.get_articles():
article = self.get_article_tags(article)
# Store the article by posting to either root (if "page")
# or blog month (if "blog" entry)
print('Posting article with title "%s" to %s' %
(article['title'], article['post_url']))
entry_permalink = self.webserver.post(
self.app_url + article['post_url'],
article)
if article['legacy_id']:
self.redirect[article['legacy_id']] = entry_permalink
print('Bloog successfully stored at %s' % (entry_permalink))
comment_posting_url = self.app_url + entry_permalink
for comment in self.get_article_comments(article):
print ("Posting comment '%s' to %s"
% (comment['title'], comment_posting_url))
self.webserver.post(comment_posting_url, comment)
article_count += 1
if num_articles and article_count >= num_articles:
break
# create_python_routing from url_alias table
f = open('legacy_aliases.py', 'w')
print >>f, "redirects = {"
for src, dest in self.get_redirects():
print >>f, " '%s': '%s'," % \
(src.lower(), dest)
print >>f, "}"
f.close()
def get_articles(self):
"""Returns an iterable of blog articles to be imported."""
raise NotImplementedError()
def get_article_tags(self, article):
"""Annotates an article with tags."""
return article
def get_article_comments(self, article):
"""Returns an iterable of comments associated with an article."""
return []
def get_redirects(self):
"""Returns an iterable of (src, dest) redirect tuples."""
return []
class SerendipityConverter(BlogConverter):
def get_articles(self):
self.cursor.execute("SELECT id, title, timestamp, last_modified, body"
" FROM %sentries WHERE NOT isdraft"
% (self.table_prefix,))
rows = self.cursor.fetchall()
for row in rows:
article = {}
article['legacy_id'] = row[0]
article['title'] = force_singleline(row[1])
article['format'] = None
article['body'] = re.sub('\n', '<br />', row[4])
article['html'] = article['body']
article['format'] = 'html'
published = datetime.datetime.fromtimestamp(row[2])
last_modified = datetime.datetime.fromtimestamp(row[3])
article['published'] = str(published)
article['updated'] = str(last_modified)
article['post_url'] = '/%s/%s/' % (published.year, published.month)
yield article
def get_article_tags(self, article):
article_tags = set()
self.cursor.execute("SELECT categoryid FROM %sentrycat"
" WHERE entryid = %s"
% (self.table_prefix, article['legacy_id']))
rows = self.cursor.fetchall()
for row in rows:
tag = self.tags.get(row[0], None)
while tag:
article_tags.add(tag['name'])
tag = self.tags.get(tag['parent'], None)
article['tags'] = ','.join(article_tags)
return article
def get_article_comments(self, article):
self.cursor.execute("SELECT entry_id, id, parent_id, title, body, "
"timestamp, author, email, url FROM %scomments "
"WHERE entry_id = %s ORDER BY entry_id,parent_id,id"
% (self.table_prefix, article['legacy_id']))
rows = self.cursor.fetchall()
comments = {0: { 'children': []}}
thread_id_ctr = 0
for row in rows:
comments[row[1]] = {
'data': row,
'children': [],
'thread_id': thread_id_ctr
}
comments[row[2]]['children'].append(row[1])
thread_id_ctr += 1
stack = []
for i in comments[0]['children']:
stack.append(((comments[i]['thread_id'],), comments[i]))
while stack:
thread, entry = stack.pop()
data = entry['data']
yield {
'title': data[3],
'body': re.sub('\n', '<br />', data[4]),
'published': str(datetime.datetime.fromtimestamp(data[5])),
'thread': '.'.join('%03d' % x for x in thread),
'name': data[6],
'email': data[7],
'homepage': data[8],
}
for i in comments[data[1]]['children']:
stack.append((thread + (comments[i]['thread_id'],),
comments[i]))
def get_redirects(self):
if self.static_redirect:
self.cursor.execute("SELECT name, extension, thumbnail_name "
"FROM %simages" % (self.table_prefix,))
rows = self.cursor.fetchall()
for row in rows:
path = "uploads/%s.%s" % row[0:2]
yield (path, self.static_redirect + path)
if row[2]:
thumbpath = "uploads/%s.%s.%s" % (row[0], row[2], row[1])
yield (thumbpath, self.static_redirect + thumbpath)
def go(self, num_articles=None):
self.cursor.execute("SELECT categoryid, parentid, category_name"
" FROM %scategory"
% (self.table_prefix,))
rows = self.cursor.fetchall()
self.tags = {}
for row in rows:
self.tags[row[0]] = {
'parent': row[1],
'name': row[2],
}
super(SerendipityConverter, self).go(num_articles)
class DrupalConverter(BlogConverter):
"""
Makes remote connection to MySQL database for Drupal 4.* blog.
Uses data in the following tables to initialize a Bloog app:
- comments
- node
- term_data
- term_hierarchy
- term_node
- url_alias
Uploading data to the Bloog app is done solely through RESTful calls.
"""
drupal_format_description = [
None,
"filtered html",
None, # php code which we'll reject
"html", # full html
"textile"
]
def get_html(self, raw_body, markup_type):
""" Convert various Drupal formats to html """
utf8_body = fix_string(raw_body)
def repl(tmatch):
if tmatch: # Assume latin-1. Will be converted by Bloog.
return textile.textile(tmatch.group(1),
encoding='latin-1', output='latin-1')
# Because Drupal textile formatting allows use of [textile][/textile]
# delimeters, remove them.
if markup_type == 'textile':
pattern = re.compile('\[textile\](.*)\[/textile\]',
re.MULTILINE | re.IGNORECASE | re.DOTALL)
body = re.sub(pattern, repl, utf8_body)
elif markup_type == 'filtered html':
body = re.sub('\n', '<br />', utf8_body)
else:
body = raw_body
return body
def get_articles(self):
self.cursor.execute("SELECT * FROM node")
rows = self.cursor.fetchall()
for row in rows:
article = {}
ntype = row[1]
if ntype in ['page', 'blog']:
article['legacy_id'] = row[0]
article['title'] = force_singleline(row[2])
article['format'] = None
if row[14] >= 0 and row[14] <= 4:
cur_format = self.drupal_format_description[row[14]]
article['body'] = self.get_html(raw_body=row[11],
markup_type=cur_format)
article['html'] = article['body']
# Because Drupal lets you intermix textile with other
# markup, just convert it all to HTML
article['format'] = 'html'
published = datetime.datetime.fromtimestamp(row[5])
article['published'] = str(published)
article['updated'] = \
str(datetime.datetime.fromtimestamp(row[6]))
# Determine where to POST this article if it's a
# article or a blog entry
if ntype == 'blog':
article['post_url'] = '/' + str(published.year) + \
'/' + str(published.month) + "/"
else:
article['post_url'] = '/'
yield article
else:
print "Rejected article with title (", \
article['title'], ") because bad format."
def get_article_tags(self, article):
# Add tags to each article by looking at term_node table
sql = "SELECT d.tid FROM term_data d, term_node n " \
"WHERE d.tid = n.tid AND n.nid = " + \
str(article['legacy_id'])
self.cursor.execute(sql)
rows = self.cursor.fetchall()
tag_names = set()
for row in rows:
tid = row[0]
# Walk up the term tree and add all tags along path to root
while tid:
tag_names.update([self.tags[tid]['name']])
tid = self.tags[tid]['parent']
article['tags'] = ','.join(tag_names)
return article
def get_article_comments(self, article):
# Store comments associated with the article
sql = "SELECT subject, comment, timestamp, thread, name, mail, " \
"homepage FROM comments WHERE nid = " + \
str(article['legacy_id'])
self.cursor.execute(sql)
rows = self.cursor.fetchall()
for row in rows:
# Store comment associated with article by POST to
# article entry url
comment = {
'title': force_singleline(row[0]),
'body': fix_string(row[1]),
'published': str(datetime.datetime.fromtimestamp(row[2])),
'thread': fix_thread_string(force_singleline(row[3])),
'name': force_singleline(row[4]),
'email': force_singleline(row[5]),
'homepage': force_singleline(row[6])
}
yield comment
def get_redirects(self):
self.cursor.execute("SELECT * FROM url_alias")
rows = self.cursor.fetchall()
for row in rows:
nmatch = re.match('node/(\d+)', row[1])
if nmatch:
legacy_id = string.atoi(nmatch.group(1))
if legacy_id in self.redirect:
yield (row[2], self.redirect[legacy_id])
def go(self, num_articles=None):
# Get all the term (tag) data and the hierarchy pattern
self.cursor.execute("SELECT tid, name FROM term_data")
rows = self.cursor.fetchall()
self.tags = {}
for row in rows:
tid = row[0]
self.tags[tid] = {'name': row[1]}
self.cursor.execute("SELECT tid, parent FROM term_hierarchy")
rows = self.cursor.fetchall()
for row in rows:
self.tags[row[0]]['parent'] = row[1]
super(DrupalConverter, self).go(num_articles)
blog_types = {
'serendipity': SerendipityConverter,
'drupal': DrupalConverter,
}
def main(argv):
try:
try:
opts, args = getopt.gnu_getopt(argv, 'hrd:p:u:n:l:a:vD:t:b:R:',
["help", "root", "dbhostname=",
"dbport=", "dbuserpwd=", "dbname=",
"url=", "articles=", "dbtype=",
"prefix=", "blogtype=",
"static_redirect="])
except getopt.error, msg:
raise UsageError(msg)
blogtype = 'drupal'
dbtype = 'mysql'
table_prefix = ''
dbhostname = 'localhost'
dbport = None
dbname = None
dbuser = ''
dbpasswd = ''
app_url = 'http://localhost:8080'
num_articles = None
static_redirect = None
# option processing
local_admin = None
for option, value in opts:
print "Looking at option:", str(option), str(value)
if option == "-v":
verbose = True
if option in ("-h", "--help"):
raise UsageError(help_message)
if option in ("-r", "--root"):
local_admin = 'dev_appserver_login="root@example.com:True"'
if option in ("-D", "--dbtype"):
if value not in db_types:
print "-D, --dbtype must be one of %r" % db_types.keys()
return 1
dbtype = value
if option in ("-t", "--prefix"):
table_prefix = value
if option in ("-b", "--blogtype"):
if value not in blog_types:
print "-b, --blogtype must be one of %r" % blog_types.keys()
return 1
blogtype = value
if option in ("-d", "--dbhostname"):
dbhostname = value
if option in ("-p", "--dbport"):
dbport = value
if option in ("-u", "--dbuserpwd"):
userpwd = value.split(":")
try:
dbuser = userpwd[0]
dbpasswd = userpwd[1]
except:
print "-u, --dbuserpwd should be followed by " \
"'username:passwd' with colon separating " \
"required information"
if option in ("-n", "--dbname"):
dbname = value
if option in ("-a", "--articles"):
num_articles = string.atoi(value)
if option in ("-l", "--url"):
print "Got url:", value
app_url = value
if app_url[:4] != 'http':
app_url = 'http://' + app_url
if app_url[-1] == '/':
app_url = app_url[:-1]
if option in ("-R", "--static_redirect"):
static_redirect = value
if not dbname:
dbname = blogtype
if len(args) < 2 and not local_admin:
raise UsageError("Please specify the authentication cookie string"
" as first argument.")
else:
auth_cookie = local_admin or args[1]
#TODO - Use mechanize module to programmatically login
#email = raw_input("E-mail: ")
#passwd = getpass.getpass("Password: ")
print dbuser, dbpasswd, dbhostname, dbport, dbname
conn = db_types[dbtype](dbuser, dbpasswd, dbhostname, dbport,
dbname)
converter = blog_types[blogtype](auth_cookie=auth_cookie,
conn=conn,
app_url=app_url,
table_prefix=table_prefix,
static_redirect=static_redirect)
converter.go(num_articles)
converter.close()
except UsageError, err:
print >> sys.stderr, sys.argv[0].split("/")[-1] + ": " + str(err.msg)
print >> sys.stderr, "\t for help use --help"
return 2
if __name__ == "__main__":
sys.exit(main(sys.argv))
| |
#!/usr/bin/env python
#This tool allow users to plot SVM-prob ROC curve from data
from svmutil import *
from sys import argv, platform
from os import path, popen
from random import randrange , seed
from operator import itemgetter
from time import sleep
#search path for gnuplot executable
#be careful on using windows LONG filename, surround it with double quotes.
#and leading 'r' to make it raw string, otherwise, repeat \\.
gnuplot_exe_list = [r'"C:\Program Files\gnuplot\pgnuplot.exe"', "/usr/bin/gnuplot","/usr/local/bin/gnuplot"]
def get_pos_deci(train_y, train_x, test_y, test_x, param):
model = svm_train(train_y, train_x, param)
#predict and grab decision value, assure deci>0 for label+,
#the positive descision value = val[0]*labels[0]
labels = model.get_labels()
py, evals, deci = svm_predict(test_y, test_x, model)
deci = [labels[0]*val[0] for val in deci]
return deci,model
#get_cv_deci(prob_y[], prob_x[], svm_parameter param, nr_fold)
#input raw attributes, labels, param, cv_fold in decision value building
#output list of decision value, remember to seed(0)
def get_cv_deci(prob_y, prob_x, param, nr_fold):
if nr_fold == 1 or nr_fold==0:
deci,model = get_pos_deci(prob_y, prob_x, prob_y, prob_x, param)
return deci
deci, model = [], []
prob_l = len(prob_y)
#random permutation by swapping i and j instance
for i in range(prob_l):
j = randrange(i,prob_l)
prob_x[i], prob_x[j] = prob_x[j], prob_x[i]
prob_y[i], prob_y[j] = prob_y[j], prob_y[i]
#cross training : folding
for i in range(nr_fold):
begin = i * prob_l // nr_fold
end = (i + 1) * prob_l // nr_fold
train_x = prob_x[:begin] + prob_x[end:]
train_y = prob_y[:begin] + prob_y[end:]
test_x = prob_x[begin:end]
test_y = prob_y[begin:end]
subdeci, submdel = get_pos_deci(train_y, train_x, test_y, test_x, param)
deci += subdeci
return deci
#a simple gnuplot object
class gnuplot:
def __init__(self, term='onscreen'):
# -persists leave plot window on screen after gnuplot terminates
if platform == 'win32':
cmdline = gnuplot_exe
self.__dict__['screen_term'] = 'windows'
else:
cmdline = gnuplot_exe + ' -persist'
self.__dict__['screen_term'] = 'x11'
self.__dict__['iface'] = popen(cmdline,'w')
self.set_term(term)
def set_term(self, term):
if term=='onscreen':
self.writeln("set term %s" % self.screen_term)
else:
#term must be either x.ps or x.png
if term.find('.ps')>0:
self.writeln("set term postscript eps color 22")
elif term.find('.png')>0:
self.writeln("set term png")
else:
print("You must set term to either *.ps or *.png")
raise SystemExit
self.output = term
def writeln(self,cmdline):
self.iface.write(cmdline + '\n')
def __setattr__(self, attr, val):
if type(val) == str:
self.writeln('set %s \"%s\"' % (attr, val))
else:
print("Unsupport format:", attr, val)
raise SystemExit
#terminate gnuplot
def __del__(self):
self.writeln("quit")
self.iface.flush()
self.iface.close()
def __repr__(self):
return "<gnuplot instance: output=%s>" % term
#data is a list of [x,y]
def plotline(self, data):
self.writeln("plot \"-\" notitle with lines linewidth 1")
for i in range(len(data)):
self.writeln("%f %f" % (data[i][0], data[i][1]))
sleep(0) #delay
self.writeln("e")
if platform=='win32':
sleep(3)
#processing argv and set some global variables
def proc_argv(argv = argv):
#print("Usage: %s " % argv[0])
#The command line : ./plotroc.py [-v cv_fold | -T testing_file] [libsvm-options] training_file
train_file = argv[-1]
test_file = None
fold = 5
options = []
i = 1
while i < len(argv)-1:
if argv[i] == '-T':
test_file = argv[i+1]
i += 1
elif argv[i] == '-v':
fold = int(argv[i+1])
i += 1
else :
options += [argv[i]]
i += 1
return ' '.join(options), fold, train_file, test_file
def plot_roc(deci, label, output, title):
#count of postive and negative labels
db = []
pos, neg = 0, 0
for i in range(len(label)):
if label[i]>0:
pos+=1
else:
neg+=1
db.append([deci[i], label[i]])
#sorting by decision value
db = sorted(db, key=itemgetter(0), reverse=True)
#calculate ROC
xy_arr = []
tp, fp = 0., 0. #assure float division
for i in range(len(db)):
if db[i][1]>0: #positive
tp+=1
else:
fp+=1
xy_arr.append([fp/neg,tp/pos])
#area under curve
aoc = 0.
prev_x = 0
for x,y in xy_arr:
if x != prev_x:
aoc += (x - prev_x) * y
prev_x = x
#begin gnuplot
if title == None:
title = output
#also write to file
g = gnuplot(output)
g.xlabel = "False Positive Rate"
g.ylabel = "True Positive Rate"
g.title = "ROC curve of %s (AUC = %.4f)" % (title,aoc)
g.plotline(xy_arr)
#display on screen
s = gnuplot('onscreen')
s.xlabel = "False Positive Rate"
s.ylabel = "True Positive Rate"
s.title = "ROC curve of %s (AUC = %.4f)" % (title,aoc)
s.plotline(xy_arr)
def check_gnuplot_exe():
global gnuplot_exe
gnuplot_exe = None
for g in gnuplot_exe_list:
if path.exists(g.replace('"','')):
gnuplot_exe=g
break
if gnuplot_exe == None:
print("You must add correct path of 'gnuplot' into gnuplot_exe_list")
raise SystemExit
def main():
check_gnuplot_exe()
if len(argv) <= 1:
print("Usage: %s [-v cv_fold | -T testing_file] [libsvm-options] training_file" % argv[0])
raise SystemExit
param,fold,train_file,test_file = proc_argv()
output_file = path.split(train_file)[1] + '-roc.png'
#read data
train_y, train_x = svm_read_problem(train_file)
if set(train_y) != set([1,-1]):
print("ROC is only applicable to binary classes with labels 1, -1")
raise SystemExit
#get decision value, with positive = label+
seed(0) #reset random seed
if test_file: #go with test_file
output_title = "%s on %s" % (path.split(test_file)[1], path.split(train_file)[1])
test_y, test_x = svm_read_problem(test_file)
if set(test_y) != set([1,-1]):
print("ROC is only applicable to binary classes with labels 1, -1")
raise SystemExit
deci,model = get_pos_deci(train_y, train_x, test_y, test_x, param)
plot_roc(deci, test_y, output_file, output_title)
else: #single file -> CV
output_title = path.split(train_file)[1]
deci = get_cv_deci(train_y, train_x, param, fold)
plot_roc(deci, train_y, output_file, output_title)
if __name__ == '__main__':
main()
| |
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for dsub.lib.param_util."""
from __future__ import absolute_import
from __future__ import print_function
import datetime
import doctest
import os
import re
import unittest
from dsub.lib import job_util
from dsub.lib import param_util
import parameterized
PL = param_util.P_LOCAL
PG = param_util.P_GCS
class ParamUtilTest(unittest.TestCase):
def testEnvParam(self):
env_param = param_util.EnvParam('my_name', 'my_value')
self.assertEqual('my_name', env_param.name)
self.assertEqual('my_value', env_param.value)
def testLabelParam(self):
good_labels = [('genre', 'jazz'), ('underscores_are',
'totally_ok'), ('dashes-are', 'also-ok'),
('num_123', 'good_456'), ('final_underscore_', 'ok_too_'),
('final-dash', 'no-problem-'), ('optional_value',
''), ('a' * 63,
'not_too_long')]
for name, value in good_labels:
label_param = param_util.LabelParam(name, value)
self.assertEqual(name, label_param.name)
self.assertEqual(value, label_param.value)
bad_labels = [('WHATS',
'updog'), ('1', 'initial_number'), ('initial_number', '1'),
('-', 'initial_dash'), ('initial_dash', '-'),
('spaces bad', ''), ('midCaps', 'bad'), ('bad', 'midCaps'),
('a' * 64, 'too_long'), ('', 'name_required'), ('too_long',
'a' * 64)]
for name, value in bad_labels:
with self.assertRaises(ValueError):
param_util.LabelParam(name, value)
def testFileParam(self):
file_param = param_util.FileParam(
'my_name',
'my_value',
'my_docker_path',
'my_remote_uri',
recursive=True,
file_provider=param_util.P_GCS)
self.assertEqual('my_name', file_param.name)
self.assertEqual('my_docker_path', file_param.docker_path)
self.assertEqual('my_remote_uri', file_param.uri)
self.assertTrue(file_param.recursive)
def testParseTasksFileHeader(self):
header = '--env SAMPLE_ID\t--input VCF_FILE\t--output-recursive OUTPUT_PATH'
header = header.split('\t')
input_file_param_util = param_util.InputFileParamUtil('input')
output_file_param_util = param_util.OutputFileParamUtil('output')
job_params = param_util.parse_tasks_file_header(
header, input_file_param_util, output_file_param_util)
self.assertEqual(3, len(job_params))
# The first one is the SAMPLE env param.
self.assertTrue(isinstance(job_params[0], param_util.EnvParam))
self.assertEqual('SAMPLE_ID', job_params[0].name)
self.assertTrue(isinstance(job_params[1], param_util.InputFileParam))
self.assertEqual('VCF_FILE', job_params[1].name)
self.assertFalse(job_params[1].recursive)
self.assertTrue(isinstance(job_params[2], param_util.OutputFileParam))
self.assertEqual('OUTPUT_PATH', job_params[2].name)
self.assertTrue(job_params[2].recursive)
def testTasksFileToJobData(self):
expected_tsv_file = 'test/testdata/params_tasks.tsv'
input_file_param_util = param_util.InputFileParamUtil('input')
output_file_param_util = param_util.OutputFileParamUtil('output')
all_job_data = param_util.tasks_file_to_job_data({
'path': expected_tsv_file
}, input_file_param_util, output_file_param_util)
self.assertEqual(4, len(all_job_data))
for i in range(4):
job_data = all_job_data[i]
self.assertEqual('SAMPLE_ID', job_data['envs'][0].name)
self.assertEqual('sid-00%d' % i, job_data['envs'][0].value)
self.assertEqual('VCF_FILE', job_data['inputs'][0].name)
self.assertEqual('input/gs/inputs/sid-00%d.vcf' % i,
job_data['inputs'][0].docker_path)
self.assertEqual('OUTPUT_PATH', job_data['outputs'][0].name)
self.assertEqual('output/gs/outputs/results-00%d/' % i,
job_data['outputs'][0].docker_path)
# Fixed values for age_to_create_time
fixed_time = datetime.datetime(2017, 1, 1)
fixed_time_utc = int(
(fixed_time - datetime.datetime.utcfromtimestamp(0)).total_seconds())
@parameterized.parameterized.expand([
('simple_second', '1s', fixed_time_utc - 1),
('simple_minute', '1m', fixed_time_utc - (1 * 60)),
('simple_hour', '1h', fixed_time_utc - (1 * 60 * 60)),
('simple_day', '1d', fixed_time_utc - (24 * 60 * 60)),
('simple_week', '1w', fixed_time_utc - (7 * 24 * 60 * 60)),
('simple_now', str(fixed_time_utc), fixed_time_utc),
])
def test_compute_create_time(self, unused_name, age, expected):
result = param_util.age_to_create_time(age, self.fixed_time)
self.assertEqual(expected, result)
@parameterized.parameterized.expand([
('bad_units', '1second'),
('overflow', '100000000w'),
])
def test_compute_create_time_fail(self, unused_name, age):
with self.assertRaisesRegexp(ValueError, 'Unable to parse age string'):
_ = param_util.age_to_create_time(age)
class FileParamUtilTest(unittest.TestCase):
@parameterized.parameterized.expand([
('lf', False, 'file:///tmp/myfile', 'file/tmp/myfile', PL),
('lf', False, '/tmp/myfile', 'file/tmp/myfile', PL),
('lf', False, '../../myfile', 'file/_dotdot_/_dotdot_/myfile', PL),
('lf', False, '~/tmp/myfile', 'file/_home_/tmp/myfile', PL),
('gf', False, 'gs://tmp/myfile', 'gs/tmp/myfile', PG),
('gf', False, 'gs://tmp/myfile', 'gs/tmp/myfile', PG),
('gf', False, 'gs://bucket/../myfile', 'gs/bucket/../myfile', PG),
# Recursive tests for local and google
('lr', True, 'file:///tmp/myfile/', 'file/tmp/myfile/', PL),
('lr', True, '/tmp/myfile', 'file/tmp/myfile/', PL),
('lr', True, '../../myfile/', 'file/_dotdot_/_dotdot_/myfile/', PL),
('lr', True, '~/tmp/myfile', 'file/_home_/tmp/myfile/', PL),
('gr', True, 'gs://tmp/myfile/', 'gs/tmp/myfile/', PG),
('gr', True, 'gs://tmp/myfile', 'gs/tmp/myfile/', PG),
('gr', True, 'gs://bucket/../myfile', 'gs/bucket/../myfile/', PG),
# wildcard tests for local and google.
('wc', False, 'gs://bucket/f/*.txt', 'gs/bucket/f/*.txt', PG),
('wc', False, 'gs://bucket/f/*', 'gs/bucket/f/*', PG),
('wc', False, '*.bam', 'file/*.bam', PL),
('wc', False, '../*', 'file/_dotdot_/*', PL),
])
def test_input_file_docker_rewrite(self, _, recursive, uri, docker, provider):
docker = os.path.join('input', docker)
file_param_util = param_util.InputFileParamUtil('input')
param = file_param_util.make_param('TEST', uri, recursive)
self.assertIsInstance(param, param_util.InputFileParam)
self.assertEqual('TEST', param.name)
self.assertEqual(docker, param.docker_path)
self.assertEqual(provider, param.file_provider)
@parameterized.parameterized.expand([
# Non-recursive tests for local and google
('lf', False, 'file:///tmp/myfile', 'file/tmp/myfile', PL),
('lf', False, '/tmp/myfile', 'file/tmp/myfile', PL),
('lf', False, '../../myfile', 'file/_dotdot_/_dotdot_/myfile', PL),
('lf', False, '~/tmp/myfile', 'file/_home_/tmp/myfile', PL),
('lf', False, '/a../myfile', 'file/a../myfile', PL),
('lf', False, '../myfile', 'file/_dotdot_/myfile', PL),
('gf', False, 'gs://tmp/myfile', 'gs/tmp/myfile', PG),
('gf', False, 'gs://tmp/myfile', 'gs/tmp/myfile', PG),
('gf', False, 'gs://bucket/../myfile', 'gs/bucket/../myfile', PG),
# Recursive tests for local and google
('lr', True, 'file:///tmp/myfile/', 'file/tmp/myfile/', PL),
('lr', True, '/tmp/myfile', 'file/tmp/myfile/', PL),
('lr', True, '../../myfile/', 'file/_dotdot_/_dotdot_/myfile/', PL),
('lr', True, '~/tmp/myfile', 'file/_home_/tmp/myfile/', PL),
('gr', True, 'gs://tmp/myfile/', 'gs/tmp/myfile/', PG),
('gr', True, 'gs://tmp/myfile', 'gs/tmp/myfile/', PG),
('gr', True, 'gs://bucket/../myfile', 'gs/bucket/../myfile/', PG),
# wildcard tests for local and google.
('wc', False, 'gs://bucket/f/*.txt', 'gs/bucket/f/*.txt', PG),
('wc', False, 'gs://bucket/f/*', 'gs/bucket/f/*', PG),
('wc', False, '*.bam', 'file/*.bam', PL),
('wc', False, '../*', 'file/_dotdot_/*', PL),
])
def test_out_file_docker_rewrite(self, _, recursive, uri, docker, provider):
docker = os.path.join('output', docker)
file_param_util = param_util.OutputFileParamUtil('output')
param = file_param_util.make_param('TEST', uri, recursive)
self.assertIsInstance(param, param_util.OutputFileParam)
self.assertEqual('TEST', param.name)
self.assertEqual(docker, param.docker_path)
self.assertEqual(provider, param.file_provider)
@parameterized.parameterized.expand([
# Non-recursive tests for local and google
('gf', False, 'gs://tmp/myfile', 'gs://tmp/', 'myfile', PG),
('gf', False, 'gs://buc/../myfile', 'gs://buc/../', 'myfile', PG),
('lf', False, 'file:///tmp/myfile', '/tmp/', 'myfile', PL),
('lf', False, '../myfile', '../', 'myfile', PL),
# Tests with wildcards.
('gfwc', False, 'gs://tmp/*.bam', 'gs://tmp/', '*.bam', PG),
('gfwc', False, 'gs://tmp/*', 'gs://tmp/', '*', PG),
('gfwc', False, 'gs://bucket/../*', 'gs://bucket/../', '*', PG),
('lfwc', False, '../tmp/*.bam', '../tmp/', '*.bam', PL),
('lfwc', False, './*', './', '*', PL),
('localroot', False, '/*', '/', '*', PL),
('lfwc', False, '/tmp/*', '/tmp/', '*', PL),
('lfwc', False, '/bucket/*', '/bucket/', '*', PL),
# Recursive tests for local and google
('lr', True, '/tmp/myfile/', '/tmp/myfile/', '', PL),
('lr', True, '../myfile', '../myfile/', '', PL),
('lr', True, './', './', '', PL),
('gr', True, 'gs://t/myfile/', 'gs://t/myfile/', '', PG),
('gr', True, 'gs://t/myfile', 'gs://t/myfile/', '', PG),
('gr', True, 'gs://buc/../myfile', 'gs://buc/../myfile/', '', PG),
])
def test_uri_rewrite_out(self, _, recursive, raw_uri, path, bn, provider):
# perpare the path if local.
if provider == PL:
path = os.path.abspath(path).rstrip('/') + '/'
out_util = param_util.OutputFileParamUtil('')
out_param = out_util.make_param('TEST', raw_uri, recursive=recursive)
self.assertEqual(path, out_param.uri.path)
self.assertEqual(bn, out_param.uri.basename)
self.assertEqual(path + bn, out_param.uri)
self.assertEqual(provider, out_param.file_provider)
@parameterized.parameterized.expand([
# Non-recursive tests for local and google
('gf', False, 'gs://tmp/myfile', 'gs://tmp/', 'myfile', PG),
('gf', False, 'gs://buc/../myfile', 'gs://buc/../', 'myfile', PG),
('lf', False, 'file:///tmp/myfile', '/tmp/', 'myfile', PL),
('lf', False, '../myfile', '../', 'myfile', PL),
# Tests with wildcards.
('gfwc', False, 'gs://tmp/*.bam', 'gs://tmp/', '*.bam', PG),
('gfwc', False, 'gs://tmp/*', 'gs://tmp/', '*', PG),
('gfwc', False, 'gs://bucket/../*', 'gs://bucket/../', '*', PG),
('lfwc', False, '../tmp/*.bam', '../tmp/', '*.bam', PL),
('lfwc', False, './*', './', '*', PL),
('localroot', False, '/*', '/', '*', PL),
('lfwc', False, '/tmp/*', '/tmp/', '*', PL),
('lfwc', False, '/bucket/*', '/bucket/', '*', PL),
# Recursive tests for local and google
('lr', True, '/tmp/myfile/', '/tmp/myfile/', '', PL),
('lr', True, '../myfile', '../myfile/', '', PL),
('lr', True, './', './', '', PL),
('gr', True, 'gs://t/myfile/', 'gs://t/myfile/', '', PG),
('gr', True, 'gs://t/myfile', 'gs://t/myfile/', '', PG),
('gr', True, 'gs://buc/../myfile', 'gs://buc/../myfile/', '', PG),
])
def test_uri_rewrite_in(self, _, recursive, uri_raw, path, bn, provider):
# perpare the path if local.
if provider == PL:
path = os.path.abspath(path).rstrip('/') + '/'
in_util = param_util.InputFileParamUtil('')
in_param = in_util.make_param('TEST', uri_raw, recursive=recursive)
self.assertEqual(path, in_param.uri.path)
self.assertEqual(bn, in_param.uri.basename)
self.assertEqual(path + bn, in_param.uri)
self.assertEqual(provider, in_param.file_provider)
@parameterized.parameterized.expand([
('cant_use_wc', True, 'gs://tmp/myfile/*', 'only supported for files'),
('dir_wc', False, 'gs://b/yfile/*/*', 'only supported for files'),
('question', False, 'gs://b/myfile/?', 'Question mark'),
('recursive_wc', False, 'gs://b/myfile/**', 'Recursive'),
('no_filename_l', False, '../myfile/', 'not recursive must reference'),
('no_filename_g', False, 'gs://myfile/', 'not recursive must reference'),
])
def test_output_val_err(self, unused_name, recursive, uri, regex):
file_param_util = param_util.OutputFileParamUtil('output')
with self.assertRaisesRegexp(ValueError, regex):
file_param_util.parse_uri(uri, recursive)
@parameterized.parameterized.expand([
('s3', 's3://b/myfile/', 'not supported: s3://'),
('gluster', 'gluster+tcp://myfile/', r'supported: gluster\+tcp://'),
('ftp', 'ftp://myfile/', 'not supported: ftp://'),
])
def test_file_provider_err(self, unused_name, uri, regex):
file_param_util = param_util.OutputFileParamUtil('output')
with self.assertRaisesRegexp(ValueError, regex):
file_param_util.parse_file_provider(uri)
@parameterized.parameterized.expand([
('l', '/tmp/mydir/inner', '/tmp/mydir/inner/', PL),
('l_log', '/tmp/mydir/data.log', '/tmp/mydir/data.log', PL),
('l_indir', '/tmp/mydir/extra/../runner', '/tmp/mydir/runner/', PL),
('g', 'gs://bucket/mydir', 'gs://bucket/mydir/', PG),
('glog', 'gs://bucket/my.log', 'gs://bucket/my.log', PG),
])
def test_logging_param_maker(self, unused_name, uri, expected_out, provider):
param = param_util.build_logging_param(
uri, util_class=param_util.OutputFileParamUtil)
self.assertEqual(param.uri, expected_out)
self.assertEqual(param.file_provider, provider)
class TestSubmitValidator(unittest.TestCase):
def setUp(self):
self.task_data = [
{
'inputs': [
param_util.FileParam('IN', uri='gs://in/*', file_provider=PG)
]
},
{
'outputs': [
param_util.FileParam('OUT', uri='gs://out/*', file_provider=PG)
]
},
]
def test_submit_validator_passes(self):
resources = job_util.JobResources(logging=param_util.LoggingParam(
'gs://buck/logs', PG))
param_util.validate_submit_args_or_fail(
job_resources=resources,
all_task_data=self.task_data,
provider_name='MYPROVIDER',
input_providers=[PG],
output_providers=[PG],
logging_providers=[PG])
@parameterized.parameterized.expand([
('input', 'gs://in/*', [PL], [PG], [PG]),
('output', 'gs://out/*', [PG], [PL], [PG]),
('logging', 'gs://buck/logs', [PG], [PG], [PL]),
])
def test_submit_validator_fails(self, name, path, inwl, outwl, logwl):
resources = job_util.JobResources(logging=param_util.LoggingParam(
'gs://buck/logs', PG))
err_expected = 'Unsupported %s path (%s) for provider' % (name, path)
with self.assertRaisesRegexp(ValueError, re.escape(err_expected)):
param_util.validate_submit_args_or_fail(
job_resources=resources,
all_task_data=self.task_data,
provider_name='MYPROVIDER',
input_providers=inwl,
output_providers=outwl,
logging_providers=logwl)
class TestParamUtilDocs(unittest.TestCase):
def test_doctest(self):
result = doctest.testmod(param_util, report=True)
self.assertEqual(0, result.failed)
if __name__ == '__main__':
unittest.main()
| |
import toml
import copy
import pytest
import os
import sys
from decimal import Decimal
from toml.decoder import InlineTableDict
TEST_STR = """
[a]\r
b = 1\r
c = 2
"""
TEST_DICT = {"a": {"b": 1, "c": 2}}
def test_bug_148():
assert 'a = "\\u0064"\n' == toml.dumps({'a': '\\x64'})
assert 'a = "\\\\x64"\n' == toml.dumps({'a': '\\\\x64'})
assert 'a = "\\\\\\u0064"\n' == toml.dumps({'a': '\\\\\\x64'})
def test_bug_144():
if sys.version_info >= (3,):
return
bug_dict = {'username': '\xd7\xa9\xd7\x9c\xd7\x95\xd7\x9d'}
round_trip_bug_dict = toml.loads(toml.dumps(bug_dict))
unicoded_bug_dict = {'username': bug_dict['username'].decode('utf-8')}
assert round_trip_bug_dict == unicoded_bug_dict
assert bug_dict['username'] == (round_trip_bug_dict['username']
.encode('utf-8'))
def test_bug_196():
import datetime
d = datetime.datetime.now()
bug_dict = {'x': d}
round_trip_bug_dict = toml.loads(toml.dumps(bug_dict))
assert round_trip_bug_dict == bug_dict
assert round_trip_bug_dict['x'] == bug_dict['x']
def test_valid_tests():
valid_dir = "toml-test/tests/valid/"
for f in os.listdir(valid_dir):
if not f.endswith("toml"):
continue
with open(os.path.join(valid_dir, f)) as fh:
toml.dumps(toml.load(fh))
def test_circular_ref():
a = {}
b = {}
b['c'] = 4
b['self'] = b
a['b'] = b
with pytest.raises(ValueError):
toml.dumps(a)
with pytest.raises(ValueError):
toml.dumps(b)
def test__dict():
class TestDict(dict):
pass
assert isinstance(toml.loads(
TEST_STR, _dict=TestDict), TestDict)
def test_dict_decoder():
class TestDict(dict):
pass
test_dict_decoder = toml.TomlDecoder(TestDict)
assert isinstance(toml.loads(
TEST_STR, decoder=test_dict_decoder), TestDict)
def test_inline_dict():
class TestDict(dict, InlineTableDict):
pass
encoder = toml.TomlPreserveInlineDictEncoder()
t = copy.deepcopy(TEST_DICT)
t['d'] = TestDict()
t['d']['x'] = "abc"
o = toml.loads(toml.dumps(t, encoder=encoder))
assert o == toml.loads(toml.dumps(o, encoder=encoder))
def test_array_sep():
encoder = toml.TomlArraySeparatorEncoder(separator=",\t")
d = {"a": [1, 2, 3]}
o = toml.loads(toml.dumps(d, encoder=encoder))
assert o == toml.loads(toml.dumps(o, encoder=encoder))
def test_numpy_floats():
np = pytest.importorskip('numpy')
encoder = toml.TomlNumpyEncoder()
d = {'a': np.array([1, .3], dtype=np.float64)}
o = toml.loads(toml.dumps(d, encoder=encoder))
assert o == toml.loads(toml.dumps(o, encoder=encoder))
d = {'a': np.array([1, .3], dtype=np.float32)}
o = toml.loads(toml.dumps(d, encoder=encoder))
assert o == toml.loads(toml.dumps(o, encoder=encoder))
d = {'a': np.array([1, .3], dtype=np.float16)}
o = toml.loads(toml.dumps(d, encoder=encoder))
assert o == toml.loads(toml.dumps(o, encoder=encoder))
def test_numpy_ints():
np = pytest.importorskip('numpy')
encoder = toml.TomlNumpyEncoder()
d = {'a': np.array([1, 3], dtype=np.int64)}
o = toml.loads(toml.dumps(d, encoder=encoder))
assert o == toml.loads(toml.dumps(o, encoder=encoder))
d = {'a': np.array([1, 3], dtype=np.int32)}
o = toml.loads(toml.dumps(d, encoder=encoder))
assert o == toml.loads(toml.dumps(o, encoder=encoder))
d = {'a': np.array([1, 3], dtype=np.int16)}
o = toml.loads(toml.dumps(d, encoder=encoder))
assert o == toml.loads(toml.dumps(o, encoder=encoder))
def test_ordered():
from toml import ordered as toml_ordered
encoder = toml_ordered.TomlOrderedEncoder()
decoder = toml_ordered.TomlOrderedDecoder()
o = toml.loads(toml.dumps(TEST_DICT, encoder=encoder), decoder=decoder)
assert o == toml.loads(toml.dumps(TEST_DICT, encoder=encoder),
decoder=decoder)
def test_tuple():
d = {"a": (3, 4)}
o = toml.loads(toml.dumps(d))
assert o == toml.loads(toml.dumps(o))
def test_decimal():
PLACES = Decimal(10) ** -4
d = {"a": Decimal("0.1")}
o = toml.loads(toml.dumps(d))
assert o == toml.loads(toml.dumps(o))
assert Decimal(o["a"]).quantize(PLACES) == d["a"].quantize(PLACES)
def test_invalid_tests():
invalid_dir = "toml-test/tests/invalid/"
for f in os.listdir(invalid_dir):
if not f.endswith("toml"):
continue
with pytest.raises(toml.TomlDecodeError):
with open(os.path.join(invalid_dir, f)) as fh:
toml.load(fh)
def test_exceptions():
with pytest.raises(TypeError):
toml.loads(2)
with pytest.raises(TypeError):
toml.load(2)
try:
FNFError = FileNotFoundError
except NameError:
# py2
FNFError = IOError
with pytest.raises(FNFError):
toml.load([])
class FakeFile(object):
def __init__(self):
self.written = ""
def write(self, s):
self.written += s
return None
def read(self):
return self.written
def test_dump():
from collections import OrderedDict
f = FakeFile()
g = FakeFile()
h = FakeFile()
toml.dump(TEST_DICT, f)
toml.dump(toml.load(f, _dict=OrderedDict), g)
toml.dump(toml.load(g, _dict=OrderedDict), h)
assert g.written == h.written
def test_paths():
toml.load("test.toml")
toml.load(b"test.toml")
import sys
if (3, 4) <= sys.version_info:
import pathlib
p = pathlib.Path("test.toml")
toml.load(p)
def test_warnings():
# Expect 1 warning for the non existent toml file
with pytest.warns(UserWarning):
toml.load(["test.toml", "nonexist.toml"])
def test_commutativity():
o = toml.loads(toml.dumps(TEST_DICT))
assert o == toml.loads(toml.dumps(o))
def test_pathlib():
if (3, 4) <= sys.version_info:
import pathlib
o = {"root": {"path": pathlib.Path("/home/edgy")}}
test_str = """[root]
path = "/home/edgy"
"""
assert test_str == toml.dumps(o, encoder=toml.TomlPathlibEncoder())
def test_comment_preserve_decoder_encoder():
test_str = """[[products]]
name = "Nail"
sku = 284758393
# This is a comment
color = "gray" # Hello World
# name = { first = 'Tom', last = 'Preston-Werner' }
# arr7 = [
# 1, 2, 3
# ]
# lines = '''
# The first newline is
# trimmed in raw strings.
# All other whitespace
# is preserved.
# '''
[animals]
color = "gray" # col
fruits = "apple" # a = [1,2,3]
a = 3
b-comment = "a is 3"
"""
s = toml.dumps(toml.loads(test_str,
decoder=toml.TomlPreserveCommentDecoder()),
encoder=toml.TomlPreserveCommentEncoder())
assert len(s) == len(test_str) and sorted(test_str) == sorted(s)
def test_deepcopy_timezone():
import copy
o = toml.loads("dob = 1979-05-24T07:32:00-08:00")
o2 = copy.deepcopy(o)
assert o2["dob"] == o["dob"]
assert o2["dob"] is not o["dob"]
| |
import wx
import sys
import time
import threading
import math
import wx.lib.colourselect as csel
import os
from manta import *
from pycallback import *
from MantaCapture import *
# from MantaPlot import *
import FloatSpin as FS
###############################################################################
# Manta Image Sequence Capture Frame.
# This class ports functionality of FMantaCapture dialog to wxManta.
class MantaCameraPathPanel(wx.Panel):
def __init__(self, parent, engine, channel=0 ):
wx.Panel.__init__(self, parent )
self.engine = engine
self.channel = channel
self.parent = parent
self.record_delta_t = 0.0
self.record_delta_time = 0.0
# Load/Create Path.
self.load_path_button = wx.Button( self, -1, "Load Path" )
self.new_path_button = wx.Button( self, -1, "New Path" )
self.save_path_button = wx.Button( self, -1, "Save Path" )
self.record_interval = wx.SpinCtrl( self, -1 )
self.record_interval.SetRange( 1, 60000 )
self.record_interval.SetValue( 500 )
self.record_timer = wx.Timer( self )
self.new_path_counter = 0
# List of available paths.
self.path_list = wx.ListCtrl( self, -1, size=(400,-1),style=wx.LC_REPORT|wx.LC_EDIT_LABELS )
self.path_list.InsertColumn( 0, "Path Name" )
self.path_list.InsertColumn( 1, "Key Frames" )
self.path_list.InsertColumn( 2, "Delta time" )
self.path_list.InsertColumn( 3, "Delta t" )
self.path_list.SetColumnWidth(0, 100)
self.path_list.SetColumnWidth(1, 100)
self.path_list.SetColumnWidth(2, 100)
self.path_list.SetColumnWidth(3, 100)
self.path_loop_check = wx.CheckBox( self, -1, "Play in a loop." )
self.automator = []; # Empty list.
# Capture frames from a path.
self.capture_frames_check = wx.CheckBox( self, -1, "Capture frames from path" )
self.capture_panel = MantaCapturePanel( self, engine, channel );
self.capture_panel.Disable()
self.capture_separate_check = wx.CheckBox( self, -1, "Record in multiple passes." )
self.capture_pass = 0
# Run a benchmark using a path.
self.benchmark_check = wx.CheckBox( self, -1, "Benchmark path" )
self.benchmark_list = wx.ListCtrl( self, -1, size=(400,-1),style=wx.LC_REPORT|wx.LC_EDIT_LABELS )
self.benchmark_list.InsertColumn( 0, "Path Name" )
self.benchmark_list.InsertColumn( 1, "Total Samples" )
self.benchmark_list.InsertColumn( 2, "Average FPS" )
self.benchmark_list.SetColumnWidth(0, 100)
self.benchmark_list.SetColumnWidth(1, 100)
self.benchmark_list.SetColumnWidth(2, 100)
self.benchmarks = []; # Empty list.
# Just run a freaking path!
self.start_button = wx.Button( self, -1, "Start" )
self.pause_button = wx.Button( self, -1, "Pause" )
self.stop_button = wx.Button( self, -1, "Stop" )
# Arrange gui components.
vsizer = wx.BoxSizer(wx.VERTICAL);
# Load, New Buttons.
hsizer = wx.BoxSizer(wx.HORIZONTAL);
hsizer.Add( self.load_path_button, wx.ALIGN_CENTER )
hsizer.Add( self.new_path_button, wx.ALIGN_CENTER )
vsizer.Add( hsizer, wx.ALIGN_CENTER )
hsizer = wx.BoxSizer(wx.HORIZONTAL);
hsizer.Add( wx.StaticText( self, -1, "Record Interval " ), wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_CENTER)
hsizer.Add( self.record_interval, 0, wx.ALIGN_CENTER )
hsizer.Add( wx.StaticText( self, -1, "ms" ), wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_CENTER)
hsizer.Add( self.save_path_button, wx.ALIGN_CENTER )
vsizer.Add( hsizer, wx.ALIGN_CENTER )
# Path List.
vsizer.Add( self.path_list, 0, wx.EXPAND )
vsizer.Add( self.path_loop_check, 0, wx.ALIGN_LEFT )
self.path_list.Bind( wx.EVT_RIGHT_UP, self.OnPathRightClick )
# Capture.
hsizer = wx.BoxSizer(wx.HORIZONTAL);
hsizer.Add( self.capture_frames_check, wx.ALIGN_CENTER )
hsizer.Add( self.capture_separate_check, wx.ALIGN_CENTER )
vsizer.Add( hsizer, wx.ALIGN_CENTER )
vsizer.Add( self.capture_panel, wx.EXPAND )
# Benchmark.
hsizer = wx.BoxSizer(wx.HORIZONTAL);
hsizer.Add( self.benchmark_check, wx.ALIGN_CENTER )
vsizer.Add( hsizer, wx.ALIGN_CENTER )
vsizer.Add( self.benchmark_list, 0, wx.EXPAND )
# self.benchmark_list.Bind( wx.EVT_RIGHT_UP, self.OnListRightClick )
# Controls.
hsizer = wx.BoxSizer(wx.HORIZONTAL);
hsizer.Add( self.start_button, wx.ALIGN_CENTER )
hsizer.Add( self.pause_button, wx.ALIGN_CENTER )
hsizer.Add( self.stop_button, wx.ALIGN_CENTER )
vsizer.Add( hsizer, wx.ALIGN_CENTER )
self.SetSizerAndFit(vsizer)
# Bind events.
self.Bind(wx.EVT_BUTTON, self.OnLoadPathButton, self.load_path_button)
self.Bind(wx.EVT_BUTTON, self.OnNewPathButton, self.new_path_button)
self.Bind(wx.EVT_BUTTON, self.OnSavePathButton, self.save_path_button)
self.Bind(wx.EVT_BUTTON, self.OnStartButton, self.start_button)
self.Bind(wx.EVT_BUTTON, self.OnStopButton, self.stop_button)
self.Bind(wx.EVT_CHECKBOX, self.OnCaptureFramesCheck, self.capture_frames_check)
self.Bind(wx.EVT_TIMER, self.OnRecordTimer )
###########################################################################
# Gui interactions.
def OnLoadPathButton( self, event ):
# Get the filename.
dialog = wx.FileDialog( self,
message="Choose path file",
defaultDir=os.getcwd(),
defaultFile="",
wildcard="Text files (*.txt)|*.txt|All Files (*.*)|*.*",
style=wx.OPEN|wx.CHANGE_DIR )
# Determine the file to open.
if (dialog.ShowModal() == wx.ID_OK):
# Attempt to add a path per file selected.
files = dialog.GetPaths()
index = -1
for name in files:
# Parse the file.
try:
automator = manta_new(CameraPathAutomator( self.engine, 0, 0, str(name) ))
self.AddAutomator( automator, name )
except:
print "Failed to load: " + name
# Check to see if any were successful.
if (index >= 0):
# Enable Buttons.
self.start_button.Enable()
self.path_list.SetItemState( index, wx.LIST_STATE_SELECTED, wx.LIST_STATE_SELECTED )
def AddAutomator( self, automator, name ):
# Specifiy that the automator should wait for additional commands after running.
automator.set_automator_mode( AutomatorUI.AUTOMATOR_KEEPALIVE )
cbArgs = ( automator, )
automator.set_terminate_callback( manta_new(createMantaTransaction(self.MantaCameraPathComplete, cbArgs )))
# Add the automator the list.
index = self.path_list.InsertStringItem( sys.maxint, name )
self.path_list.SetStringItem( index, 1, str(automator.get_total_points()) );
self.path_list.SetStringItem( index, 2, str(automator.get_delta_time()) );
self.path_list.SetStringItem( index, 3, str(automator.get_delta_t()) );
# Store the automator
self.automator.append(automator)
def OnNewPathButton( self, event ):
if (self.record_timer.IsRunning()):
# Stop recording.
self.record_timer.Stop()
# Toggle buttons.
self.new_path_button.SetLabel( "New Path" )
# Create a new automator.
automator = CameraPathAutomator( self.engine, 0, 0,
self.new_path_data,
self.record_delta_t,
self.record_delta_time )
self.AddAutomator( automator, "NewPath" + str(self.new_path_counter) + ".txt" )
self.new_path_counter += 1
# Remove temporary storage.
self.new_path_data.Clear()
else:
# Determine the interval
interval = self.record_interval.GetValue()
# Create a camera data list.
self.new_path_data = CameraPathDataVector()
# Add a timer to record camera position information.
self.record_timer.Start( interval )
self.record_prev_frame = self.engine.getFrameState().frameSerialNumber
self.record_prev_time = time.time()
# Toggle buttons.
self.new_path_button.SetLabel( "Done" )
def OnRecordTimer( self, event ):
# Record camera position.
camera_data = self.engine.getCamera( self.channel ).getBasicCameraData()
# Compute elapse time and frames.
current_frame = self.engine.getFrameState().frameSerialNumber
current_time = time.time()
elapse_frames = current_frame - self.record_prev_frame;
elapse_time = current_time - self.record_prev_time;
if (elapse_frames > 0):
# Compute delta t and time.
self.record_delta_t = 1.0 / elapse_frames;
self.record_delta_time = elapse_time / elapse_frames;
# Add the position to the current path.
self.new_path_data.PushBack( camera_data )
self.record_prev_frame = current_frame;
self.record_prev_time = current_time;
def OnSavePathButton( self, event ):
# Determine the currently selected automator.
index = self.path_list.GetNextItem( -1, wx.LIST_NEXT_ALL, wx.LIST_STATE_SELECTED )
if (index >= 0):
automator = self.automator[index]
filename = self.path_list.GetItemText( index )
dialog = wx.FileDialog( self,
message = "Save path as...",
defaultDir = os.getcwd(),
defaultFile = filename,
wildcard="Text files (*.txt)|*.txt|All Files (*.*)|*.*",
style=wx.SAVE|wx.CHANGE_DIR )
if (dialog.ShowModal() == wx.ID_OK):
# Get the pathname.
filename = dialog.GetPath()
# Write to a file.
automator.write_path( str(filename) )
def OnStartButton( self, event ):
# Determine the currently selected automator.
index = self.path_list.GetNextItem( -1, wx.LIST_NEXT_ALL, wx.LIST_STATE_SELECTED )
if (index >= 0):
automator = self.automator[index]
# Set the loop behavior.
if (self.path_loop_check.GetValue()):
automator.set_loop_behavior( CameraPathAutomator.PATH_LOOP )
else:
automator.set_loop_behavior( CameraPathAutomator.PATH_STOP )
# Initialize Benchmark
if (self.benchmark_check.IsChecked()):
# Synchronzie the automator thread with Manta every frame.
automator.set_sync_frames( 1 )
automator.set_sync_quiet( True )
else:
automator.set_sync_frames( 0 )
# Start camera path if necessary.
if (self.capture_frames_check.IsChecked()):
# Check if multiple passes should be used.
if (self.capture_separate_check.IsChecked()):
# Controlled capture mode.
self.benchmark_check.SetValue( False )
self.capture_pass += 1
if (self.capture_pass == 1):
# Pass 1 collect performance data.
automator.set_sync_frames( 1 )
automator.set_sync_quiet( True )
self.parent.statusbar.SetStatusText( "Pass 1 collect performance data." )
if(self.capture_pass == 2):
# Pass 2 capture frame images.
automator.set_sync_frames( 1 )
automator.set_sync_quiet( True )
self.parent.statusbar.SetStatusText( "Pass 2 capture frame images." )
self.capture_panel.OnStartButton(())
else:
# Normal capture mode.
self.parent.statusbar.SetStatusText( "Capturing frame images." )
self.capture_panel.OnStartButton(())
# Toggle buttons.
self.load_path_button.Disable()
self.new_path_button.Disable()
self.start_button.Disable()
self.stop_button.Enable()
# Start the asynchronous thread.
automator.restart()
else:
self.parent.statusbar.SetStatusText( "Select a Path" )
def OnStopButton( self, event ):
# Determine the currently selected automator.
index = self.path_list.GetNextItem( -1, wx.LIST_NEXT_ALL, wx.LIST_STATE_SELECTED )
if (index >= 0):
# Access the automator.
automator = self.automator[index]
# Start camera path if necessary.
if (self.capture_frames_check.IsChecked()):
self.capture_panel.OnStopButton(())
# Cause the automator to abort and call its termination callback.
automator.set_loop_behavior ( CameraPathAutomator.PATH_ABORT )
automator.set_automator_mode( AutomatorUI.AUTOMATOR_EXIT )
def OnCaptureFramesCheck( self, event ):
if (self.capture_frames_check.IsChecked()):
self.capture_panel.Enable()
else:
self.capture_panel.Disable()
def OnPathRightClick( self, event ):
# Popup menu options.
self.POPUP_GLYPH = wx.NewId()
self.Bind(wx.EVT_MENU, self.OnPopupGlyphCheck, id=self.POPUP_GLYPH )
menu = wx.Menu()
menu.Append( self.POPUP_GLYPH, "Toggle Glyphs" )
self.path_list.PopupMenu( menu )
menu.Destroy()
def OnPopupGlyphCheck( self, event ):
# Get the selected automator.
index = self.path_list.GetFirstSelected()
automator = self.automator[index]
# Create a Group containing glyphs for each control point.
group = manta_new(Group())
total = automator.get_total_points()
for i in range(0,total):
# Place a Cone glyph at the control point.
c = automator.GetControlPoint( i )
material = manta_new(Flat(Color(RGBColor((float(i)/float(total)),0.1,0.1))))
glyph = manta_new(Sphere( material, c.eye, 15.0 ))
group.add(manta_new( glyph ))
# Build an acceleration structure for the group.
self.glyph_bvh = manta_new( DynBVH() )
self.glyph_bvh.rebuild( group )
# Add the group to manta in a transaction.
cbArgs = ( self.glyph_bvh, )
self.engine.addTransaction("Manta Add Glyph",
manta_new(createMantaTransaction(self.MantaAddGlyph, cbArgs)))
def MantaAddGlyph(self, glyph_bvh ):
scene = self.engine.getScene()
new_world = manta_new( Group() )
new_world.add( glyph_bvh )
new_world.add( scene.getObject() )
scene.setObject( new_world )
def OnListRightClick( self, event ):
# Popup menu options.
self.POPUP_RENAME = wx.NewId()
self.POPUP_HISTOGRAM = wx.NewId()
self.POPUP_PLOT = wx.NewId()
self.Bind(wx.EVT_MENU, self.OnPopupRename, id=self.POPUP_RENAME )
self.Bind(wx.EVT_MENU, self.OnPopupHistogram, id=self.POPUP_HISTOGRAM )
self.Bind(wx.EVT_MENU, self.OnPopupPlot, id=self.POPUP_PLOT )
menu = wx.Menu()
# menu.Append( self.POPUP_RENAME, "Rename" )
menu.Append( self.POPUP_HISTOGRAM, "Histogram" )
menu.Append( self.POPUP_PLOT, "Plot" )
self.benchmark_list.PopupMenu( menu )
menu.Destroy()
def OnPopupRename( self, event ):
# Lookup selected item.
index = self.benchmark_list.GetFirstSelected()
print "Unimplemented."
def OnPopupPlot( self, event ):
# Look up the performance vector.
index = self.benchmark_list.GetFirstSelected()
plotable = self.benchmarks[index];
# Look up the path name.
plotname = self.path_list.GetItemText( index )
# Fps Histogram
plot_frame = PlotFrame( self, plotable, plotname );
plot_frame.Show()
def OnPopupHistogram( self, event ):
# Look up the performance vector.
index = self.benchmark_list.GetFirstSelected()
plotable = self.benchmarks[index];
# Look up the path name.
plotname = self.path_list.GetItemText( index )
# Fps Histogram
plot_frame = HistogramFrame( self, plotable, plotname );
plot_frame.Show()
###########################################################################
# Manta Transactions
def MantaCameraPathComplete( self, automator ):
# Stop capturing frames.
if (self.capture_frames_check.IsChecked()):
# Check if multiple passes should be used.
if (self.capture_separate_check.IsChecked()):
# Controlled capture mode.
if (self.capture_pass == 1):
# Pass 1 collect performance data.
self.parent.statusbar.SetStatusText( "First Pass Complete" )
self.capture_performance = automator.get_performance()
wx.CallAfter( self.OnStartButton, () )
if (self.capture_pass == 2):
# Pass 2 captured frame images.
self.capture_pass = 0
self.capture_panel.OnStopButton(())
# Reprocess images.
self.parent.statusbar.SetStatusText( "Resampling frames." )
wx.CallAfter( ResampleCapturedFrames, self.capture_panel.GetPrefix(),
self.capture_performance )
# Cleanup.
self.capture_performance = ()
else:
# Normal capture mode.
self.capture_panel.OnStopButton(())
# Add results to benchmark table.
if (self.benchmark_check.IsChecked()):
# Copy the performance data.
performance = automator.get_performance()
self.benchmarks.append( performance )
# Lookup the path name.
index = self.automator.index( automator )
filename = self.path_list.GetItemText( index )
# Add a row to the benchmark table.
index = self.benchmark_list.InsertStringItem( sys.maxint, filename )
self.benchmark_list.SetStringItem( index, 1, str(performance.size()) );
self.benchmark_list.SetStringItem( index, 2, str(automator.get_average_fps()) );
self.parent.statusbar.SetStatusText( "Right click to plot performance." )
# Renable buttons.
self.load_path_button.Enable()
self.new_path_button.Enable()
self.start_button.Enable()
self.stop_button.Disable()
class MantaCameraPathFrame(wx.Frame):
def __init__(self, parent, engine, channel=0 ):
wx.Frame.__init__(self, parent=parent, title="Camera Paths")
panel = wx.lib.scrolledpanel.ScrolledPanel(self, -1, style=wx.TAB_TRAVERSAL)
# Create a Capture Panel.
sizer = wx.BoxSizer(wx.VERTICAL)
self.panel = MantaCameraPathPanel( panel, engine, channel )
self.statusbar = self.CreateStatusBar()
sizer.Add(self.panel, 0, wx.ALIGN_CENTER|wx.ALL, 0)
closeButton = wx.Button(panel, wx.ID_CLOSE)
sizer.Add(closeButton, 0, wx.ALIGN_CENTER|wx.ALL, 0)
self.Bind(wx.EVT_BUTTON, self.OnCloseWindow, closeButton)
panel.SetSizer(sizer)
panel.Layout()
self.Layout()
self.SetClientSize(self.panel.GetSize()+(0,50))
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
panel.SetFocus()
panel.Bind(wx.EVT_KEY_DOWN, self.OnKeyDown)
def OnKeyDown(self, evt):
keycode = evt.GetKeyCode()
if (keycode == wx.WXK_ESCAPE):
self.Show(False)
evt.Skip()
def OnCloseWindow(self, event):
self.Show( False )
| |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for database migrations.
There are "opportunistic" tests which allows testing against all 3 databases
(sqlite in memory, mysql, pg) in a properly configured unit test environment.
For the opportunistic testing you need to set up db's named 'openstack_citest'
with user 'openstack_citest' and password 'openstack_citest' on localhost. The
test will then use that db and u/p combo to run the tests.
For postgres on Ubuntu this can be done with the following commands::
| sudo -u postgres psql
| postgres=# create user openstack_citest with createdb login password
| 'openstack_citest';
| postgres=# create database openstack_citest with owner openstack_citest;
"""
import os
from migrate.versioning import repository
import mock
from oslo_db.sqlalchemy import test_base
from oslo_db.sqlalchemy import test_migrations
from oslo_db.sqlalchemy import utils as db_utils
import sqlalchemy
from sqlalchemy.engine import reflection
from nova.db import migration
from nova.db.sqlalchemy.api_migrations import migrate_repo
from nova.db.sqlalchemy import api_models
from nova.db.sqlalchemy import migration as sa_migration
from nova import test
from nova.tests import fixtures as nova_fixtures
class NovaAPIModelsSync(test_migrations.ModelsMigrationsSync):
"""Test that the models match the database after migrations are run."""
def db_sync(self, engine):
with mock.patch.object(sa_migration, 'get_engine',
return_value=engine):
sa_migration.db_sync(database='api')
@property
def migrate_engine(self):
return self.engine
def get_engine(self, context=None):
return self.migrate_engine
def get_metadata(self):
return api_models.API_BASE.metadata
def include_object(self, object_, name, type_, reflected, compare_to):
if type_ == 'table':
# migrate_version is a sqlalchemy-migrate control table and
# isn't included in the model.
if name == 'migrate_version':
return False
return True
def filter_metadata_diff(self, diff):
# Filter out diffs that shouldn't cause a sync failure.
new_diff = []
# Define a whitelist of ForeignKeys that exist on the model but not in
# the database. They will be removed from the model at a later time.
fkey_whitelist = {'build_requests': ['request_spec_id']}
# Define a whitelist of columns that will be removed from the
# DB at a later release and aren't on a model anymore.
column_whitelist = {
'build_requests': ['vm_state', 'instance_metadata',
'display_name', 'access_ip_v6', 'access_ip_v4', 'key_name',
'locked_by', 'image_ref', 'progress', 'request_spec_id',
'info_cache', 'user_id', 'task_state', 'security_groups',
'config_drive']
}
for element in diff:
if isinstance(element, list):
# modify_nullable is a list
new_diff.append(element)
else:
# tuple with action as first element. Different actions have
# different tuple structures.
if element[0] == 'add_fk':
fkey = element[1]
tablename = fkey.table.name
column_keys = fkey.column_keys
if (tablename in fkey_whitelist and
column_keys == fkey_whitelist[tablename]):
continue
elif element[0] == 'remove_column':
tablename = element[2]
column = element[3]
if (tablename in column_whitelist and
column.name in column_whitelist[tablename]):
continue
new_diff.append(element)
return new_diff
class TestNovaAPIMigrationsSQLite(NovaAPIModelsSync,
test_base.DbTestCase,
test.NoDBTestCase):
pass
class TestNovaAPIMigrationsMySQL(NovaAPIModelsSync,
test_base.MySQLOpportunisticTestCase,
test.NoDBTestCase):
pass
class TestNovaAPIMigrationsPostgreSQL(NovaAPIModelsSync,
test_base.PostgreSQLOpportunisticTestCase, test.NoDBTestCase):
pass
class NovaAPIMigrationsWalk(test_migrations.WalkVersionsMixin):
def setUp(self):
# NOTE(sdague): the oslo_db base test case completely
# invalidates our logging setup, we actually have to do that
# before it is called to keep this from vomitting all over our
# test output.
self.useFixture(nova_fixtures.StandardLogging())
super(NovaAPIMigrationsWalk, self).setUp()
@property
def INIT_VERSION(self):
return migration.db_initial_version('api')
@property
def REPOSITORY(self):
return repository.Repository(
os.path.abspath(os.path.dirname(migrate_repo.__file__)))
@property
def migration_api(self):
return sa_migration.versioning_api
@property
def migrate_engine(self):
return self.engine
def _skippable_migrations(self):
mitaka_placeholders = list(range(8, 13))
newton_placeholders = list(range(21, 26))
special_cases = [
30, # Enforcement migration, no changes to test
]
return mitaka_placeholders + newton_placeholders + special_cases
def migrate_up(self, version, with_data=False):
if with_data:
check = getattr(self, '_check_%03d' % version, None)
if version not in self._skippable_migrations():
self.assertIsNotNone(check,
('API DB Migration %i does not have a '
'test. Please add one!') % version)
super(NovaAPIMigrationsWalk, self).migrate_up(version, with_data)
def test_walk_versions(self):
self.walk_versions(snake_walk=False, downgrade=False)
def assertColumnExists(self, engine, table_name, column):
self.assertTrue(db_utils.column_exists(engine, table_name, column),
'Column %s.%s does not exist' % (table_name, column))
def assertIndexExists(self, engine, table_name, index):
self.assertTrue(db_utils.index_exists(engine, table_name, index),
'Index %s on table %s does not exist' %
(index, table_name))
def assertUniqueConstraintExists(self, engine, table_name, columns):
inspector = reflection.Inspector.from_engine(engine)
constrs = inspector.get_unique_constraints(table_name)
constr_columns = [constr['column_names'] for constr in constrs]
self.assertIn(columns, constr_columns)
def assertTableNotExists(self, engine, table_name):
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
db_utils.get_table, engine, table_name)
def _check_001(self, engine, data):
for column in ['created_at', 'updated_at', 'id', 'uuid', 'name',
'transport_url', 'database_connection']:
self.assertColumnExists(engine, 'cell_mappings', column)
self.assertIndexExists(engine, 'cell_mappings', 'uuid_idx')
self.assertUniqueConstraintExists(engine, 'cell_mappings',
['uuid'])
def _check_002(self, engine, data):
for column in ['created_at', 'updated_at', 'id', 'instance_uuid',
'cell_id', 'project_id']:
self.assertColumnExists(engine, 'instance_mappings', column)
for index in ['instance_uuid_idx', 'project_id_idx']:
self.assertIndexExists(engine, 'instance_mappings', index)
self.assertUniqueConstraintExists(engine, 'instance_mappings',
['instance_uuid'])
inspector = reflection.Inspector.from_engine(engine)
# There should only be one foreign key here
fk = inspector.get_foreign_keys('instance_mappings')[0]
self.assertEqual('cell_mappings', fk['referred_table'])
self.assertEqual(['id'], fk['referred_columns'])
self.assertEqual(['cell_id'], fk['constrained_columns'])
def _check_003(self, engine, data):
for column in ['created_at', 'updated_at', 'id',
'cell_id', 'host']:
self.assertColumnExists(engine, 'host_mappings', column)
self.assertIndexExists(engine, 'host_mappings', 'host_idx')
self.assertUniqueConstraintExists(engine, 'host_mappings',
['host'])
inspector = reflection.Inspector.from_engine(engine)
# There should only be one foreign key here
fk = inspector.get_foreign_keys('host_mappings')[0]
self.assertEqual('cell_mappings', fk['referred_table'])
self.assertEqual(['id'], fk['referred_columns'])
self.assertEqual(['cell_id'], fk['constrained_columns'])
def _check_004(self, engine, data):
columns = ['created_at', 'updated_at', 'id', 'instance_uuid', 'spec']
for column in columns:
self.assertColumnExists(engine, 'request_specs', column)
self.assertUniqueConstraintExists(engine, 'request_specs',
['instance_uuid'])
self.assertIndexExists(engine, 'request_specs',
'request_spec_instance_uuid_idx')
def _check_005(self, engine, data):
# flavors
for column in ['created_at', 'updated_at', 'name', 'id', 'memory_mb',
'vcpus', 'swap', 'vcpu_weight', 'flavorid', 'rxtx_factor',
'root_gb', 'ephemeral_gb', 'disabled', 'is_public']:
self.assertColumnExists(engine, 'flavors', column)
self.assertUniqueConstraintExists(engine, 'flavors',
['flavorid'])
self.assertUniqueConstraintExists(engine, 'flavors',
['name'])
# flavor_extra_specs
for column in ['created_at', 'updated_at', 'id', 'flavor_id', 'key',
'value']:
self.assertColumnExists(engine, 'flavor_extra_specs', column)
self.assertIndexExists(engine, 'flavor_extra_specs',
'flavor_extra_specs_flavor_id_key_idx')
self.assertUniqueConstraintExists(engine, 'flavor_extra_specs',
['flavor_id', 'key'])
inspector = reflection.Inspector.from_engine(engine)
# There should only be one foreign key here
fk = inspector.get_foreign_keys('flavor_extra_specs')[0]
self.assertEqual('flavors', fk['referred_table'])
self.assertEqual(['id'], fk['referred_columns'])
self.assertEqual(['flavor_id'], fk['constrained_columns'])
# flavor_projects
for column in ['created_at', 'updated_at', 'id', 'flavor_id',
'project_id']:
self.assertColumnExists(engine, 'flavor_projects', column)
self.assertUniqueConstraintExists(engine, 'flavor_projects',
['flavor_id', 'project_id'])
inspector = reflection.Inspector.from_engine(engine)
# There should only be one foreign key here
fk = inspector.get_foreign_keys('flavor_projects')[0]
self.assertEqual('flavors', fk['referred_table'])
self.assertEqual(['id'], fk['referred_columns'])
self.assertEqual(['flavor_id'], fk['constrained_columns'])
def _check_006(self, engine, data):
for column in ['id', 'request_spec_id', 'project_id', 'user_id',
'display_name', 'instance_metadata', 'progress', 'vm_state',
'image_ref', 'access_ip_v4', 'access_ip_v6', 'info_cache',
'security_groups', 'config_drive', 'key_name', 'locked_by']:
self.assertColumnExists(engine, 'build_requests', column)
self.assertIndexExists(engine, 'build_requests',
'build_requests_project_id_idx')
self.assertUniqueConstraintExists(engine, 'build_requests',
['request_spec_id'])
inspector = reflection.Inspector.from_engine(engine)
# There should only be one foreign key here
fk = inspector.get_foreign_keys('build_requests')[0]
self.assertEqual('request_specs', fk['referred_table'])
self.assertEqual(['id'], fk['referred_columns'])
self.assertEqual(['request_spec_id'], fk['constrained_columns'])
def _check_007(self, engine, data):
map_table = db_utils.get_table(engine, 'instance_mappings')
self.assertTrue(map_table.columns['cell_id'].nullable)
# Ensure the foreign key still exists
inspector = reflection.Inspector.from_engine(engine)
# There should only be one foreign key here
fk = inspector.get_foreign_keys('instance_mappings')[0]
self.assertEqual('cell_mappings', fk['referred_table'])
self.assertEqual(['id'], fk['referred_columns'])
self.assertEqual(['cell_id'], fk['constrained_columns'])
def _check_013(self, engine, data):
for column in ['instance_uuid', 'instance']:
self.assertColumnExists(engine, 'build_requests', column)
self.assertIndexExists(engine, 'build_requests',
'build_requests_instance_uuid_idx')
self.assertUniqueConstraintExists(engine, 'build_requests',
['instance_uuid'])
def _check_014(self, engine, data):
for column in ['name', 'public_key']:
self.assertColumnExists(engine, 'key_pairs', column)
self.assertUniqueConstraintExists(engine, 'key_pairs',
['user_id', 'name'])
def _check_015(self, engine, data):
build_requests_table = db_utils.get_table(engine, 'build_requests')
for column in ['request_spec_id', 'user_id', 'security_groups',
'config_drive']:
self.assertTrue(build_requests_table.columns[column].nullable)
inspector = reflection.Inspector.from_engine(engine)
constrs = inspector.get_unique_constraints('build_requests')
constr_columns = [constr['column_names'] for constr in constrs]
self.assertNotIn(['request_spec_id'], constr_columns)
def _check_016(self, engine, data):
self.assertColumnExists(engine, 'resource_providers', 'id')
self.assertIndexExists(engine, 'resource_providers',
'resource_providers_name_idx')
self.assertIndexExists(engine, 'resource_providers',
'resource_providers_uuid_idx')
self.assertColumnExists(engine, 'inventories', 'id')
self.assertIndexExists(engine, 'inventories',
'inventories_resource_class_id_idx')
self.assertColumnExists(engine, 'allocations', 'id')
self.assertColumnExists(engine, 'resource_provider_aggregates',
'aggregate_id')
def _check_017(self, engine, data):
# aggregate_metadata
for column in ['created_at',
'updated_at',
'id',
'aggregate_id',
'key',
'value']:
self.assertColumnExists(engine, 'aggregate_metadata', column)
self.assertUniqueConstraintExists(engine, 'aggregate_metadata',
['aggregate_id', 'key'])
self.assertIndexExists(engine, 'aggregate_metadata',
'aggregate_metadata_key_idx')
# aggregate_hosts
for column in ['created_at',
'updated_at',
'id',
'host',
'aggregate_id']:
self.assertColumnExists(engine, 'aggregate_hosts', column)
self.assertUniqueConstraintExists(engine, 'aggregate_hosts',
['host', 'aggregate_id'])
# aggregates
for column in ['created_at',
'updated_at',
'id',
'name']:
self.assertColumnExists(engine, 'aggregates', column)
self.assertIndexExists(engine, 'aggregates',
'aggregate_uuid_idx')
self.assertUniqueConstraintExists(engine, 'aggregates', ['name'])
def _check_018(self, engine, data):
# instance_groups
for column in ['created_at',
'updated_at',
'id',
'user_id',
'project_id',
'uuid',
'name']:
self.assertColumnExists(engine, 'instance_groups', column)
self.assertUniqueConstraintExists(engine, 'instance_groups', ['uuid'])
# instance_group_policy
for column in ['created_at',
'updated_at',
'id',
'policy',
'group_id']:
self.assertColumnExists(engine, 'instance_group_policy', column)
self.assertIndexExists(engine, 'instance_group_policy',
'instance_group_policy_policy_idx')
# Ensure the foreign key still exists
inspector = reflection.Inspector.from_engine(engine)
# There should only be one foreign key here
fk = inspector.get_foreign_keys('instance_group_policy')[0]
self.assertEqual('instance_groups', fk['referred_table'])
self.assertEqual(['id'], fk['referred_columns'])
# instance_group_member
for column in ['created_at',
'updated_at',
'id',
'instance_uuid',
'group_id']:
self.assertColumnExists(engine, 'instance_group_member', column)
self.assertIndexExists(engine, 'instance_group_member',
'instance_group_member_instance_idx')
def _check_019(self, engine, data):
self.assertColumnExists(engine, 'build_requests',
'block_device_mappings')
def _pre_upgrade_020(self, engine):
build_requests = db_utils.get_table(engine, 'build_requests')
fake_build_req = {'id': 2020,
'project_id': 'fake_proj_id',
'block_device_mappings': 'fake_BDM'}
build_requests.insert().execute(fake_build_req)
def _check_020(self, engine, data):
build_requests = db_utils.get_table(engine, 'build_requests')
if engine.name == 'mysql':
self.assertIsInstance(build_requests.c.block_device_mappings.type,
sqlalchemy.dialects.mysql.MEDIUMTEXT)
fake_build_req = build_requests.select(
build_requests.c.id == 2020).execute().first()
self.assertEqual('fake_BDM', fake_build_req.block_device_mappings)
def _check_026(self, engine, data):
self.assertColumnExists(engine, 'resource_classes', 'id')
self.assertColumnExists(engine, 'resource_classes', 'name')
def _check_027(self, engine, data):
# quota_classes
for column in ['created_at',
'updated_at',
'id',
'class_name',
'resource',
'hard_limit']:
self.assertColumnExists(engine, 'quota_classes', column)
self.assertIndexExists(engine, 'quota_classes',
'quota_classes_class_name_idx')
# quota_usages
for column in ['created_at',
'updated_at',
'id',
'project_id',
'resource',
'in_use',
'reserved',
'until_refresh',
'user_id']:
self.assertColumnExists(engine, 'quota_usages', column)
self.assertIndexExists(engine, 'quota_usages',
'quota_usages_project_id_idx')
self.assertIndexExists(engine, 'quota_usages',
'quota_usages_user_id_idx')
# quotas
for column in ['created_at',
'updated_at',
'id',
'project_id',
'resource',
'hard_limit']:
self.assertColumnExists(engine, 'quotas', column)
self.assertUniqueConstraintExists(engine, 'quotas',
['project_id', 'resource'])
# project_user_quotas
for column in ['created_at',
'updated_at',
'id',
'user_id',
'project_id',
'resource',
'hard_limit']:
self.assertColumnExists(engine, 'project_user_quotas', column)
self.assertUniqueConstraintExists(engine, 'project_user_quotas',
['user_id', 'project_id', 'resource'])
self.assertIndexExists(engine, 'project_user_quotas',
'project_user_quotas_project_id_idx')
self.assertIndexExists(engine, 'project_user_quotas',
'project_user_quotas_user_id_idx')
# reservations
for column in ['created_at',
'updated_at',
'id',
'uuid',
'usage_id',
'project_id',
'resource',
'delta',
'expire',
'user_id']:
self.assertColumnExists(engine, 'reservations', column)
self.assertIndexExists(engine, 'reservations',
'reservations_project_id_idx')
self.assertIndexExists(engine, 'reservations',
'reservations_uuid_idx')
self.assertIndexExists(engine, 'reservations',
'reservations_expire_idx')
self.assertIndexExists(engine, 'reservations',
'reservations_user_id_idx')
# Ensure the foreign key still exists
inspector = reflection.Inspector.from_engine(engine)
# There should only be one foreign key here
fk = inspector.get_foreign_keys('reservations')[0]
self.assertEqual('quota_usages', fk['referred_table'])
self.assertEqual(['id'], fk['referred_columns'])
def _pre_upgrade_028(self, engine):
build_requests = db_utils.get_table(engine, 'build_requests')
fake_build_req = {'id': 2021,
'project_id': 'fake_proj_id',
'instance': '{"uuid": "foo", "name": "bar"}'}
build_requests.insert().execute(fake_build_req)
def _check_028(self, engine, data):
build_requests = db_utils.get_table(engine, 'build_requests')
if engine.name == 'mysql':
self.assertIsInstance(build_requests.c.block_device_mappings.type,
sqlalchemy.dialects.mysql.MEDIUMTEXT)
fake_build_req = build_requests.select(
build_requests.c.id == 2021).execute().first()
self.assertEqual('{"uuid": "foo", "name": "bar"}',
fake_build_req.instance)
def _check_029(self, engine, data):
for column in ['created_at', 'updated_at', 'id', 'uuid']:
self.assertColumnExists(engine, 'placement_aggregates', column)
class TestNovaAPIMigrationsWalkSQLite(NovaAPIMigrationsWalk,
test_base.DbTestCase,
test.NoDBTestCase):
pass
class TestNovaAPIMigrationsWalkMySQL(NovaAPIMigrationsWalk,
test_base.MySQLOpportunisticTestCase,
test.NoDBTestCase):
pass
class TestNovaAPIMigrationsWalkPostgreSQL(NovaAPIMigrationsWalk,
test_base.PostgreSQLOpportunisticTestCase, test.NoDBTestCase):
pass
| |
# coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Networks that take as input global and per-arm features, and output rewards."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from typing import Callable, Optional, Sequence, Text
import gin
import numpy as np
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tf_agents.bandits.specs import utils as bandit_spec_utils
from tf_agents.networks import encoding_network
from tf_agents.networks import network
from tf_agents.networks import q_network
from tf_agents.specs import tensor_spec
from tf_agents.typing import types
def _remove_num_actions_dim_from_spec(
observation_spec: types.NestedTensorSpec) -> types.NestedTensorSpec:
"""Removes the extra `num_actions` dimension from the observation spec."""
obs_spec_no_num_actions = {
bandit_spec_utils.GLOBAL_FEATURE_KEY:
observation_spec[bandit_spec_utils.GLOBAL_FEATURE_KEY],
bandit_spec_utils.PER_ARM_FEATURE_KEY:
tensor_spec.remove_outer_dims_nest(
observation_spec[bandit_spec_utils.PER_ARM_FEATURE_KEY], 1)
}
if bandit_spec_utils.NUM_ACTIONS_FEATURE_KEY in observation_spec:
obs_spec_no_num_actions.update({
bandit_spec_utils.NUM_ACTIONS_FEATURE_KEY:
observation_spec[bandit_spec_utils.NUM_ACTIONS_FEATURE_KEY]
})
return obs_spec_no_num_actions
@gin.configurable
def create_feed_forward_common_tower_network(
observation_spec: types.NestedTensorSpec,
global_layers: Sequence[int],
arm_layers: Sequence[int],
common_layers: Sequence[int],
output_dim: int = 1,
global_preprocessing_combiner: Optional[Callable[..., types.Tensor]] = None,
arm_preprocessing_combiner: Optional[Callable[..., types.Tensor]] = None,
activation_fn: Callable[[types.Tensor],
types.Tensor] = tf.keras.activations.relu,
name: Optional[str] = None) -> types.Network:
"""Creates a common tower network with feedforward towers.
The network produced by this function can be used either in
`GreedyRewardPredictionPolicy`, or `NeuralLinUCBPolicy`.
In the former case, the network must have `output_dim=1`, it is going to be an
instance of `QNetwork`, and used in the policy as a reward prediction network.
In the latter case, the network will be an encoding network with its output
consumed by a reward layer or a LinUCB method. The specified `output_dim` will
be the encoding dimension.
Args:
observation_spec: A nested tensor spec containing the specs for global as
well as per-arm observations.
global_layers: Iterable of ints. Specifies the layers of the global tower.
arm_layers: Iterable of ints. Specifies the layers of the arm tower.
common_layers: Iterable of ints. Specifies the layers of the common tower.
output_dim: The output dimension of the network. If 1, the common tower will
be a QNetwork. Otherwise, the common tower will be an encoding network
with the specified output dimension.
global_preprocessing_combiner: Preprocessing combiner for global features.
arm_preprocessing_combiner: Preprocessing combiner for the arm features.
activation_fn: A keras activation, specifying the activation function used
in all layers. Defaults to relu.
name: The network name to use. Shows up in Tensorboard losses.
Returns:
A network that takes observations adhering observation_spec and outputs
reward estimates for every action.
"""
obs_spec_no_num_actions = _remove_num_actions_dim_from_spec(observation_spec)
global_network = encoding_network.EncodingNetwork(
input_tensor_spec=obs_spec_no_num_actions[
bandit_spec_utils.GLOBAL_FEATURE_KEY],
fc_layer_params=global_layers,
activation_fn=activation_fn,
preprocessing_combiner=global_preprocessing_combiner)
arm_network = encoding_network.EncodingNetwork(
input_tensor_spec=obs_spec_no_num_actions[
bandit_spec_utils.PER_ARM_FEATURE_KEY],
fc_layer_params=arm_layers,
activation_fn=activation_fn,
preprocessing_combiner=arm_preprocessing_combiner)
# When `global_layers` or `arm_layers` are empty, the corresponding encoding
# networks simply pass the inputs forward, so in such cases we get the output
# dimensions from the respective observation specs.
global_network_out_dim = global_layers[
-1] if global_layers else obs_spec_no_num_actions[
bandit_spec_utils.GLOBAL_FEATURE_KEY].shape[-1]
arm_network_out_dim = arm_layers[
-1] if arm_layers else obs_spec_no_num_actions[
bandit_spec_utils.PER_ARM_FEATURE_KEY].shape[-1]
common_input_spec = tensor_spec.TensorSpec(
shape=(global_network_out_dim + arm_network_out_dim,), dtype=tf.float32)
if output_dim == 1:
common_network = q_network.QNetwork(
input_tensor_spec=common_input_spec,
action_spec=tensor_spec.BoundedTensorSpec(
shape=(), minimum=0, maximum=0, dtype=tf.int32),
fc_layer_params=common_layers,
activation_fn=activation_fn)
else:
common_network = encoding_network.EncodingNetwork(
input_tensor_spec=common_input_spec,
fc_layer_params=list(common_layers) + [output_dim],
activation_fn=activation_fn)
return GlobalAndArmCommonTowerNetwork(
obs_spec_no_num_actions,
global_network,
arm_network,
common_network,
name=name)
def create_feed_forward_dot_product_network(
observation_spec: types.NestedTensorSpec,
global_layers: Sequence[int],
arm_layers: Sequence[int],
activation_fn: Callable[[types.Tensor],
types.Tensor] = tf.keras.activations.relu
) -> types.Network:
"""Creates a dot product network with feedforward towers.
Args:
observation_spec: A nested tensor spec containing the specs for global as
well as per-arm observations.
global_layers: Iterable of ints. Specifies the layers of the global tower.
arm_layers: Iterable of ints. Specifies the layers of the arm tower. The
last element of arm_layers has to be equal to that of global_layers.
activation_fn: A keras activation, specifying the activation function used
in all layers. Defaults to relu.
Returns:
A dot product network that takes observations adhering observation_spec and
outputs reward estimates for every action.
Raises:
ValueError: If the last arm layer does not match the last global layer.
"""
if arm_layers[-1] != global_layers[-1]:
raise ValueError('Last layer size of global and arm layers should match.')
obs_spec_no_num_actions = _remove_num_actions_dim_from_spec(observation_spec)
global_network = encoding_network.EncodingNetwork(
input_tensor_spec=obs_spec_no_num_actions[
bandit_spec_utils.GLOBAL_FEATURE_KEY],
fc_layer_params=global_layers,
activation_fn=activation_fn)
arm_network = encoding_network.EncodingNetwork(
input_tensor_spec=obs_spec_no_num_actions[
bandit_spec_utils.PER_ARM_FEATURE_KEY],
fc_layer_params=arm_layers,
activation_fn=activation_fn)
return GlobalAndArmDotProductNetwork(obs_spec_no_num_actions, global_network,
arm_network)
@gin.configurable
class GlobalAndArmCommonTowerNetwork(network.Network):
"""A network that takes global and arm observations and outputs rewards.
This network takes the output of the global and per-arm networks, and leads
them through a common network, that in turn outputs reward estimates.
"""
def __init__(self,
observation_spec: types.NestedTensorSpec,
global_network: types.Network,
arm_network: types.Network,
common_network: types.Network,
name='GlobalAndArmCommonTowerNetwork') -> types.Network:
"""Initializes an instance of `GlobalAndArmCommonTowerNetwork`.
The network architecture contains networks for both the global and the arm
features. The outputs of these networks are concatenated and led through a
third (common) network which in turn outputs reward estimates.
Args:
observation_spec: The observation spec for the policy that uses this
network.
global_network: The network that takes the global features as input.
arm_network: The network that takes the arm features as input.
common_network: The network that takes as input the concatenation of the
outputs of the global and the arm networks.
name: The name of this instance of `GlobalAndArmCommonTowerNetwork`.
"""
super(GlobalAndArmCommonTowerNetwork, self).__init__(
input_tensor_spec=observation_spec, state_spec=(), name=name)
self._global_network = global_network
self._arm_network = arm_network
self._common_network = common_network
def call(self, observation, step_type=None, network_state=()):
"""Runs the observation through the network."""
global_obs = observation[bandit_spec_utils.GLOBAL_FEATURE_KEY]
arm_obs = observation[bandit_spec_utils.PER_ARM_FEATURE_KEY]
arm_output, arm_state = self._arm_network(
arm_obs, step_type=step_type, network_state=network_state)
# Reshape arm output to rank 3 tensor.
arm_output_shape = tf.shape(arm_output)
batch_size = arm_output_shape[0]
inner_dim = 1
outer_dim = arm_output_shape[-1]
if arm_output.shape.rank > 2:
# Cannot have undefined inner dimension in arm output shape.
inner_dims = arm_output.shape[1:-1]
if any(d is None for d in inner_dims):
raise ValueError('inner dimensions of arm output cannot be unknown; '
f'arm_output.shape: {arm_output.shape}')
inner_dim = np.prod(inner_dims)
arm_output = tf.reshape(
arm_output, shape=[batch_size, inner_dim, outer_dim])
global_output, global_state = self._global_network(
global_obs, step_type=step_type, network_state=network_state)
num_actions = tf.shape(arm_output)[1]
global_output = tf.tile(
tf.expand_dims(global_output, axis=1), [1, num_actions, 1])
common_input = tf.concat([global_output, arm_output], axis=-1)
output, state = self._common_network(common_input,
(global_state, arm_state))
if isinstance(self._common_network, q_network.QNetwork):
output = tf.squeeze(output, axis=-1)
return output, state
@gin.configurable
class GlobalAndArmDotProductNetwork(network.Network):
"""A network that takes global and arm observations and outputs rewards.
This network calculates the dot product of the output of the global and
per-arm networks and returns them as reward estimates.
"""
def __init__(self,
observation_spec: types.NestedTensorSpec,
global_network: types.Network,
arm_network: types.Network,
name: Optional[Text] = 'GlobalAndArmDotProductNetwork'):
"""Initializes an instance of `GlobalAndArmDotProductNetwork`.
The network architecture contains networks for both the global and the arm
features. The reward estimates will be the dot product of the global and per
arm outputs.
Args:
observation_spec: The observation spec for the policy that uses this
network.
global_network: The network that takes the global features as input.
arm_network: The network that takes the arm features as input.
name: The name of this instance of `GlobalAndArmDotProductNetwork`.
"""
super(GlobalAndArmDotProductNetwork, self).__init__(
input_tensor_spec=observation_spec, state_spec=(), name=name)
self._global_network = global_network
self._arm_network = arm_network
def call(self, observation, step_type=None, network_state=()):
"""Runs the observation through the network."""
global_obs = observation[bandit_spec_utils.GLOBAL_FEATURE_KEY]
arm_obs = observation[bandit_spec_utils.PER_ARM_FEATURE_KEY]
global_output, global_state = self._global_network(
global_obs, step_type=step_type, network_state=network_state)
arm_output, arm_state = self._arm_network(
arm_obs, step_type=step_type, network_state=network_state)
dot_product = tf.linalg.matvec(arm_output, global_output)
return dot_product, global_state + arm_state
| |
from __future__ import unicode_literals
import io
import os
import subprocess
import time
from .common import AudioConversionError, PostProcessor
from ..compat import (
compat_subprocess_get_DEVNULL,
)
from ..utils import (
encodeArgument,
encodeFilename,
get_exe_version,
is_outdated_version,
PostProcessingError,
prepend_extension,
shell_quote,
subtitles_filename,
dfxp2srt,
ISO639Utils,
)
class FFmpegPostProcessorError(PostProcessingError):
pass
class FFmpegPostProcessor(PostProcessor):
def __init__(self, downloader=None):
PostProcessor.__init__(self, downloader)
self._determine_executables()
def check_version(self):
if not self.available:
raise FFmpegPostProcessorError('ffmpeg or avconv not found. Please install one.')
required_version = '10-0' if self.basename == 'avconv' else '1.0'
if is_outdated_version(
self._versions[self.basename], required_version):
warning = 'Your copy of %s is outdated, update %s to version %s or newer if you encounter any errors.' % (
self.basename, self.basename, required_version)
if self._downloader:
self._downloader.report_warning(warning)
@staticmethod
def get_versions(downloader=None):
return FFmpegPostProcessor(downloader)._versions
def _determine_executables(self):
programs = ['avprobe', 'avconv', 'ffmpeg', 'ffprobe']
prefer_ffmpeg = False
self.basename = None
self.probe_basename = None
self._paths = None
self._versions = None
if self._downloader:
prefer_ffmpeg = self._downloader.params.get('prefer_ffmpeg', False)
location = self._downloader.params.get('ffmpeg_location')
if location is not None:
if not os.path.exists(location):
self._downloader.report_warning(
'ffmpeg-location %s does not exist! '
'Continuing without avconv/ffmpeg.' % (location))
self._versions = {}
return
elif not os.path.isdir(location):
basename = os.path.splitext(os.path.basename(location))[0]
if basename not in programs:
self._downloader.report_warning(
'Cannot identify executable %s, its basename should be one of %s. '
'Continuing without avconv/ffmpeg.' %
(location, ', '.join(programs)))
self._versions = {}
return None
location = os.path.dirname(os.path.abspath(location))
if basename in ('ffmpeg', 'ffprobe'):
prefer_ffmpeg = True
self._paths = dict(
(p, os.path.join(location, p)) for p in programs)
self._versions = dict(
(p, get_exe_version(self._paths[p], args=['-version']))
for p in programs)
if self._versions is None:
self._versions = dict(
(p, get_exe_version(p, args=['-version'])) for p in programs)
self._paths = dict((p, p) for p in programs)
if prefer_ffmpeg:
prefs = ('ffmpeg', 'avconv')
else:
prefs = ('avconv', 'ffmpeg')
for p in prefs:
if self._versions[p]:
self.basename = p
break
if prefer_ffmpeg:
prefs = ('ffprobe', 'avprobe')
else:
prefs = ('avprobe', 'ffprobe')
for p in prefs:
if self._versions[p]:
self.probe_basename = p
break
@property
def available(self):
return self.basename is not None
@property
def executable(self):
return self._paths[self.basename]
@property
def probe_available(self):
return self.probe_basename is not None
@property
def probe_executable(self):
return self._paths[self.probe_basename]
def run_ffmpeg_multiple_files(self, input_paths, out_path, opts):
self.check_version()
oldest_mtime = min(
os.stat(encodeFilename(path)).st_mtime for path in input_paths)
opts += self._configuration_args()
files_cmd = []
for path in input_paths:
files_cmd.extend([
encodeArgument('-i'),
encodeFilename(self._ffmpeg_filename_argument(path), True)
])
cmd = ([encodeFilename(self.executable, True), encodeArgument('-y')] +
files_cmd +
[encodeArgument(o) for o in opts] +
[encodeFilename(self._ffmpeg_filename_argument(out_path), True)])
if self._downloader.params.get('verbose', False):
self._downloader.to_screen('[debug] ffmpeg command line: %s' % shell_quote(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
stderr = stderr.decode('utf-8', 'replace')
msg = stderr.strip().split('\n')[-1]
raise FFmpegPostProcessorError(msg)
self.try_utime(out_path, oldest_mtime, oldest_mtime)
def run_ffmpeg(self, path, out_path, opts):
self.run_ffmpeg_multiple_files([path], out_path, opts)
def _ffmpeg_filename_argument(self, fn):
# Always use 'file:' because the filename may contain ':' (ffmpeg
# interprets that as a protocol) or can start with '-' (-- is broken in
# ffmpeg, see https://ffmpeg.org/trac/ffmpeg/ticket/2127 for details)
return 'file:' + fn
class FFmpegExtractAudioPP(FFmpegPostProcessor):
def __init__(self, downloader=None, preferredcodec=None, preferredquality=None, nopostoverwrites=False):
FFmpegPostProcessor.__init__(self, downloader)
if preferredcodec is None:
preferredcodec = 'best'
self._preferredcodec = preferredcodec
self._preferredquality = preferredquality
self._nopostoverwrites = nopostoverwrites
def get_audio_codec(self, path):
if not self.probe_available:
raise PostProcessingError('ffprobe or avprobe not found. Please install one.')
try:
cmd = [
encodeFilename(self.probe_executable, True),
encodeArgument('-show_streams'),
encodeFilename(self._ffmpeg_filename_argument(path), True)]
if self._downloader.params.get('verbose', False):
self._downloader.to_screen('[debug] %s command line: %s' % (self.basename, shell_quote(cmd)))
handle = subprocess.Popen(cmd, stderr=compat_subprocess_get_DEVNULL(), stdout=subprocess.PIPE, stdin=subprocess.PIPE)
output = handle.communicate()[0]
if handle.wait() != 0:
return None
except (IOError, OSError):
return None
audio_codec = None
for line in output.decode('ascii', 'ignore').split('\n'):
if line.startswith('codec_name='):
audio_codec = line.split('=')[1].strip()
elif line.strip() == 'codec_type=audio' and audio_codec is not None:
return audio_codec
return None
def run_ffmpeg(self, path, out_path, codec, more_opts):
if codec is None:
acodec_opts = []
else:
acodec_opts = ['-acodec', codec]
opts = ['-vn'] + acodec_opts + more_opts
try:
FFmpegPostProcessor.run_ffmpeg(self, path, out_path, opts)
except FFmpegPostProcessorError as err:
raise AudioConversionError(err.msg)
def run(self, information):
path = information['filepath']
filecodec = self.get_audio_codec(path)
if filecodec is None:
raise PostProcessingError('WARNING: unable to obtain file audio codec with ffprobe')
more_opts = []
if self._preferredcodec == 'best' or self._preferredcodec == filecodec or (self._preferredcodec == 'm4a' and filecodec == 'aac'):
if filecodec == 'aac' and self._preferredcodec in ['m4a', 'best']:
# Lossless, but in another container
acodec = 'copy'
extension = 'm4a'
more_opts = ['-bsf:a', 'aac_adtstoasc']
elif filecodec in ['aac', 'mp3', 'vorbis', 'opus']:
# Lossless if possible
acodec = 'copy'
extension = filecodec
if filecodec == 'aac':
more_opts = ['-f', 'adts']
if filecodec == 'vorbis':
extension = 'ogg'
else:
# MP3 otherwise.
acodec = 'libmp3lame'
extension = 'mp3'
more_opts = []
if self._preferredquality is not None:
if int(self._preferredquality) < 10:
more_opts += ['-q:a', self._preferredquality]
else:
more_opts += ['-b:a', self._preferredquality + 'k']
else:
# We convert the audio (lossy)
acodec = {'mp3': 'libmp3lame', 'aac': 'aac', 'm4a': 'aac', 'opus': 'opus', 'vorbis': 'libvorbis', 'wav': None}[self._preferredcodec]
extension = self._preferredcodec
more_opts = []
if self._preferredquality is not None:
# The opus codec doesn't support the -aq option
if int(self._preferredquality) < 10 and extension != 'opus':
more_opts += ['-q:a', self._preferredquality]
else:
more_opts += ['-b:a', self._preferredquality + 'k']
if self._preferredcodec == 'aac':
more_opts += ['-f', 'adts']
if self._preferredcodec == 'm4a':
more_opts += ['-bsf:a', 'aac_adtstoasc']
if self._preferredcodec == 'vorbis':
extension = 'ogg'
if self._preferredcodec == 'wav':
extension = 'wav'
more_opts += ['-f', 'wav']
prefix, sep, ext = path.rpartition('.') # not os.path.splitext, since the latter does not work on unicode in all setups
new_path = prefix + sep + extension
# If we download foo.mp3 and convert it to... foo.mp3, then don't delete foo.mp3, silly.
if (new_path == path or
(self._nopostoverwrites and os.path.exists(encodeFilename(new_path)))):
self._downloader.to_screen('[ffmpeg] Post-process file %s exists, skipping' % new_path)
return [], information
try:
self._downloader.to_screen('[ffmpeg] Destination: ' + new_path)
self.run_ffmpeg(path, new_path, acodec, more_opts)
except AudioConversionError as e:
raise PostProcessingError(
'audio conversion failed: ' + e.msg)
except Exception:
raise PostProcessingError('error running ' + self.basename)
# Try to update the date time for extracted audio file.
if information.get('filetime') is not None:
self.try_utime(
new_path, time.time(), information['filetime'],
errnote='Cannot update utime of audio file')
information['filepath'] = new_path
information['ext'] = extension
return [path], information
class FFmpegVideoConvertorPP(FFmpegPostProcessor):
def __init__(self, downloader=None, preferedformat=None):
super(FFmpegVideoConvertorPP, self).__init__(downloader)
self._preferedformat = preferedformat
def run(self, information):
path = information['filepath']
if information['ext'] == self._preferedformat:
self._downloader.to_screen('[ffmpeg] Not converting video file %s - already is in target format %s' % (path, self._preferedformat))
return [], information
options = []
if self._preferedformat == 'avi':
options.extend(['-c:v', 'libxvid', '-vtag', 'XVID'])
prefix, sep, ext = path.rpartition('.')
outpath = prefix + sep + self._preferedformat
self._downloader.to_screen('[' + 'ffmpeg' + '] Converting video from %s to %s, Destination: ' % (information['ext'], self._preferedformat) + outpath)
self.run_ffmpeg(path, outpath, options)
information['filepath'] = outpath
information['format'] = self._preferedformat
information['ext'] = self._preferedformat
return [path], information
class FFmpegEmbedSubtitlePP(FFmpegPostProcessor):
def run(self, information):
if information['ext'] not in ['mp4', 'mkv']:
self._downloader.to_screen('[ffmpeg] Subtitles can only be embedded in mp4 or mkv files')
return [], information
subtitles = information.get('requested_subtitles')
if not subtitles:
self._downloader.to_screen('[ffmpeg] There aren\'t any subtitles to embed')
return [], information
sub_langs = list(subtitles.keys())
filename = information['filepath']
sub_filenames = [subtitles_filename(filename, lang, sub_info['ext']) for lang, sub_info in subtitles.items()]
input_files = [filename] + sub_filenames
opts = [
'-map', '0',
'-c', 'copy',
# Don't copy the existing subtitles, we may be running the
# postprocessor a second time
'-map', '-0:s',
]
if information['ext'] == 'mp4':
opts += ['-c:s', 'mov_text']
for (i, lang) in enumerate(sub_langs):
opts.extend(['-map', '%d:0' % (i + 1)])
lang_code = ISO639Utils.short2long(lang)
if lang_code is not None:
opts.extend(['-metadata:s:s:%d' % i, 'language=%s' % lang_code])
temp_filename = prepend_extension(filename, 'temp')
self._downloader.to_screen('[ffmpeg] Embedding subtitles in \'%s\'' % filename)
self.run_ffmpeg_multiple_files(input_files, temp_filename, opts)
os.remove(encodeFilename(filename))
os.rename(encodeFilename(temp_filename), encodeFilename(filename))
return sub_filenames, information
class FFmpegMetadataPP(FFmpegPostProcessor):
def run(self, info):
metadata = {}
if info.get('title') is not None:
metadata['title'] = info['title']
if info.get('upload_date') is not None:
metadata['date'] = info['upload_date']
if info.get('artist') is not None:
metadata['artist'] = info['artist']
elif info.get('uploader') is not None:
metadata['artist'] = info['uploader']
elif info.get('uploader_id') is not None:
metadata['artist'] = info['uploader_id']
if info.get('description') is not None:
metadata['description'] = info['description']
metadata['comment'] = info['description']
if info.get('webpage_url') is not None:
metadata['purl'] = info['webpage_url']
if info.get('album') is not None:
metadata['album'] = info['album']
if not metadata:
self._downloader.to_screen('[ffmpeg] There isn\'t any metadata to add')
return [], info
filename = info['filepath']
temp_filename = prepend_extension(filename, 'temp')
if info['ext'] == 'm4a':
options = ['-vn', '-acodec', 'copy']
else:
options = ['-c', 'copy']
for (name, value) in metadata.items():
options.extend(['-metadata', '%s=%s' % (name, value)])
self._downloader.to_screen('[ffmpeg] Adding metadata to \'%s\'' % filename)
self.run_ffmpeg(filename, temp_filename, options)
os.remove(encodeFilename(filename))
os.rename(encodeFilename(temp_filename), encodeFilename(filename))
return [], info
class FFmpegMergerPP(FFmpegPostProcessor):
def run(self, info):
filename = info['filepath']
temp_filename = prepend_extension(filename, 'temp')
args = ['-c', 'copy', '-map', '0:v:0', '-map', '1:a:0']
self._downloader.to_screen('[ffmpeg] Merging formats into "%s"' % filename)
self.run_ffmpeg_multiple_files(info['__files_to_merge'], temp_filename, args)
os.rename(encodeFilename(temp_filename), encodeFilename(filename))
return info['__files_to_merge'], info
def can_merge(self):
# TODO: figure out merge-capable ffmpeg version
if self.basename != 'avconv':
return True
required_version = '10-0'
if is_outdated_version(
self._versions[self.basename], required_version):
warning = ('Your copy of %s is outdated and unable to properly mux separate video and audio files, '
'youtube-dl will download single file media. '
'Update %s to version %s or newer to fix this.') % (
self.basename, self.basename, required_version)
if self._downloader:
self._downloader.report_warning(warning)
return False
return True
class FFmpegFixupStretchedPP(FFmpegPostProcessor):
def run(self, info):
stretched_ratio = info.get('stretched_ratio')
if stretched_ratio is None or stretched_ratio == 1:
return [], info
filename = info['filepath']
temp_filename = prepend_extension(filename, 'temp')
options = ['-c', 'copy', '-aspect', '%f' % stretched_ratio]
self._downloader.to_screen('[ffmpeg] Fixing aspect ratio in "%s"' % filename)
self.run_ffmpeg(filename, temp_filename, options)
os.remove(encodeFilename(filename))
os.rename(encodeFilename(temp_filename), encodeFilename(filename))
return [], info
class FFmpegFixupM4aPP(FFmpegPostProcessor):
def run(self, info):
if info.get('container') != 'm4a_dash':
return [], info
filename = info['filepath']
temp_filename = prepend_extension(filename, 'temp')
options = ['-c', 'copy', '-f', 'mp4']
self._downloader.to_screen('[ffmpeg] Correcting container in "%s"' % filename)
self.run_ffmpeg(filename, temp_filename, options)
os.remove(encodeFilename(filename))
os.rename(encodeFilename(temp_filename), encodeFilename(filename))
return [], info
class FFmpegSubtitlesConvertorPP(FFmpegPostProcessor):
def __init__(self, downloader=None, format=None):
super(FFmpegSubtitlesConvertorPP, self).__init__(downloader)
self.format = format
def run(self, info):
subs = info.get('requested_subtitles')
filename = info['filepath']
new_ext = self.format
new_format = new_ext
if new_format == 'vtt':
new_format = 'webvtt'
if subs is None:
self._downloader.to_screen('[ffmpeg] There aren\'t any subtitles to convert')
return [], info
self._downloader.to_screen('[ffmpeg] Converting subtitles')
for lang, sub in subs.items():
ext = sub['ext']
if ext == new_ext:
self._downloader.to_screen(
'[ffmpeg] Subtitle file for %s is already in the requested'
'format' % new_ext)
continue
new_file = subtitles_filename(filename, lang, new_ext)
if ext == 'dfxp' or ext == 'ttml':
self._downloader.report_warning(
'You have requested to convert dfxp (TTML) subtitles into another format, '
'which results in style information loss')
dfxp_file = subtitles_filename(filename, lang, ext)
srt_file = subtitles_filename(filename, lang, 'srt')
with io.open(dfxp_file, 'rt', encoding='utf-8') as f:
srt_data = dfxp2srt(f.read())
with io.open(srt_file, 'wt', encoding='utf-8') as f:
f.write(srt_data)
ext = 'srt'
subs[lang] = {
'ext': 'srt',
'data': srt_data
}
if new_ext == 'srt':
continue
self.run_ffmpeg(
subtitles_filename(filename, lang, ext),
new_file, ['-f', new_format])
with io.open(new_file, 'rt', encoding='utf-8') as f:
subs[lang] = {
'ext': ext,
'data': f.read(),
}
return [], info
| |
# NOTE: DO NOT UPDATE THIS FILE - Create a new views_common.py and place any views there if you need it. then reference
# your new one in the urls.py file (which should be edited by you)
import os
import logging
import mimetypes
import importlib.util
import json
import codecs
from django.conf import settings
# from django.template import Context
from django.template import Template, Origin, RequestContext
from django.http import HttpResponse, FileResponse, JsonResponse, HttpResponseForbidden, HttpResponseRedirect
from django.views.generic import View
from django.views.decorators.csrf import csrf_protect
from django.core import serializers
# from pprint import pprint
# from django.shortcuts import redirect
from django.shortcuts import render
from django.contrib import messages
from django.contrib.auth import authenticate, login
import base64
from .forms import LoginForm
from .models import Content
log = logging.getLogger("cms.views")
# This view is called from DocrootFallbackMiddleware.process_response
# when a 404 is raised and we are not working with a template (we want to look for a static file in the docroot)
def static(request):
docroot_dir = getattr(settings, "DOCROOT_ROOT", "")
use_static = getattr(settings, "USE_STATIC_FORBIDDEN", False)
log.debug("docroot dir: " + docroot_dir)
path = request.path_info
if path.startswith("/"):
path = path[1:]
log.debug("path: " + path)
file = os.path.join(docroot_dir, path)
log.debug("file: " + file)
if os.path.isfile(file):
# for various reasons we don't want to serve up various file extensions. Let's look at a setting containing
# extensions to ignore
# USE CASE: Apache at root passes .htaccess, .dt and .py files through we don't want to show these static files
if use_static:
forbidden_extensions = getattr(settings, "STATIC_FORBIDDEN_EXTENSIONS", [])
forbidden_file_names = getattr(settings, "STATIC_FORBIDDEN_FILE_NAMES", [])
filename, ext = os.path.splitext(file)
if ext in forbidden_extensions:
return HttpResponseForbidden()
elif os.path.basename(filename) in forbidden_file_names:
return HttpResponseForbidden()
log.debug("found static file: " + file)
log.debug("downloading...")
response = FileResponse(open(file, 'rb'), content_type=mimetypes.guess_type(path)[0])
return response
else:
return None
# This view is called from DocrootFallbackMiddleware.process_response
# when a 404 is raised, which often means CsrfViewMiddleware.process_view
# has not been called even if CsrfViewMiddleware is installed. So we need
# to use @csrf_protect, in case the template needs {% csrf_token %}.
# However, we can't just wrap this view; if no matching page exists,
# or a redirect is required for authentication, the 404 needs to be returned
# without any CSRF checks. Therefore, we only
# CSRF protect the internal implementation (render page).
def page(request):
# NEW VERSION: pushed meta processing down to class TemplateMeta
meta = TemplateMeta(request)
# NOTE: only calls render_page if the template is found
return meta.render()
# This view is called from DocrootFallbackMiddleware.process_response
# when a 404 is raised. We do not need to use @csrf_protect since a web service should never contain input forms.
def api(request):
meta = ApiMeta(request)
return meta.render()
class TemplateMeta:
"""
encapsulates the core atts and methods to get a valid template
"""
def __init__(self, request):
# setup our basic attributes for the meta-data we will use for validation and template creation
self.is_found = False
self.request = request
self.docroot_dir = getattr(settings, "DOCROOT_ROOT", "")
log.debug("docroot dir: " + self.docroot_dir)
self.original_path = request.path_info.strip()
self.path = self.original_path
if self.path.startswith("/"):
self.path = self.path[1:]
self.file_name = os.path.join(self.docroot_dir, self.path)
log.debug("file: " + str(self.file_name))
self.module_name = self.path
self.template_name = self.path
# try and modify urls for logic on how to pull the correct template
self.find_template()
# get our settings needed for processing defaulting if they are not there
ignore_lanaguage_prefix = getattr(settings, 'IGNORE_LANGUAGE_PREFIX', False)
append_slash = getattr(settings, 'APPEND_SLASH', False)
if not ignore_lanaguage_prefix:
# if not found lets try and make sure it is not because of language
if not self.is_found and request.LANGUAGE_CODE:
# new code to make us django language aware (strip language code when looking for a template)
# print('request lang: ' + str(request.LANGUAGE_CODE))
# print('settings lang: ' + str(settings.LANGUAGE_CODE))
# print('languages: ' + str(settings.LANGUAGES))
lang = '/' + request.LANGUAGE_CODE + "/"
if self.original_path.startswith(lang):
self.path = self.original_path[len(lang):]
self.file_name = os.path.join(self.docroot_dir, self.path)
log.debug("language stripped file: " + str(self.file_name))
self.module_name = self.path
self.template_name = self.path
# re-try and modify urls for logic on how to pull the correct template
self.find_template()
# finally if we still don't have a template and ends with / and APPEND_SLASH is set and False strip it
if not self.is_found and request.LANGUAGE_CODE and append_slash and self.original_path.endswith('/'):
lang = '/' + request.LANGUAGE_CODE + "/"
if self.original_path.startswith(lang):
# same as above except now try to strip the last slash
self.path = self.original_path[len(lang):len(self.original_path) - 1]
self.file_name = os.path.join(self.docroot_dir, self.path)
log.debug("language stripped file: " + str(self.file_name))
self.module_name = self.path
self.template_name = self.path
# re-try and modify urls for logic on how to pull the correct template
self.find_template()
# contains the logic for taking a request url and attempting to locate a page template for it
def find_template(self):
# if the url ends in .html then try to load a corresponding template from the docroot/files directory
if self.file_name.endswith(".html"):
# our url will request .html but we want to look for a .dt file (required for template processing)
self.file_name = self.file_name[:-4]
self.file_name += "dt"
self.template_name = self.template_name[:-4]
self.template_name += "dt"
if os.path.isfile(self.file_name):
log.debug("found file: " + self.file_name)
self.is_found = True
elif self.file_name.endswith('/'):
self.file_name += "index.dt"
if os.path.isfile(self.file_name):
log.debug("found file: " + self.file_name)
self.module_name += "index.html"
self.template_name += "index.dt"
self.is_found = True
else:
self.file_name += ".dt"
if os.path.isfile(self.file_name):
log.debug("found file: " + self.file_name)
self.module_name += ".html"
self.template_name += ".dt"
self.is_found = True
def render(self):
if self.is_found:
log.debug("opening file: " + self.file_name)
fp = codecs.open(self.file_name, "r", encoding='utf-8')
log.debug("loading template...")
# template = Template(fp.read().encode('utf-8'), Origin(self.file_name), self.template_name)
# sas django 2.2 no longer reqiures bytes so we can go back to just reading it in
# if this has problems with utf-8 content then do a decode afterwards instead
template = Template(fp.read(), Origin(self.file_name), self.template_name)
log.debug("closing file")
fp.close()
if template:
log.debug("attempting to load context and render the template...")
return render_page(self.request, template, self.module_name)
else:
return None
def is_found(self):
return self.is_found
def __str__(self):
return self.file_name
class ApiMeta:
"""
encapsulates the core atts and methods to get a valid api data file and process it
"""
def __init__(self, request):
self.ALL_OPTIONS = ['GET', 'POST', 'PUT', 'TRACE', 'DELETE', 'HEAD', 'PATCH']
self.options = []
# setup our basic attributes for the meta-data we will use for validation and api creation
self.is_found = False
self.request = request
self.docroot_dir = getattr(settings, "DOCROOT_ROOT", "")
log.debug("docroot dir: " + self.docroot_dir)
self.original_path = request.path_info.strip()
self.path = self.original_path
if self.path.startswith("/"):
self.path = self.path[1:]
self.file_name = os.path.join(self.docroot_dir, self.path)
log.debug("file: " + str(self.file_name))
self.api_name = self.path
# try and modify urls for logic on how to pull the correct template
self.find_api()
# get our settings needed for processing defaulting if they are not there
ignore_lanaguage_prefix = getattr(settings, 'IGNORE_LANGUAGE_PREFIX', False)
append_slash = getattr(settings, 'APPEND_SLASH', False)
if not ignore_lanaguage_prefix:
# if not found lets try and make sure it is not because of language
if not self.is_found and request.LANGUAGE_CODE:
# new code to make us django language aware (strip language code when looking for a template)
# print('request lang: ' + str(request.LANGUAGE_CODE))
# print('settings lang: ' + str(settings.LANGUAGE_CODE))
# print('languages: ' + str(settings.LANGUAGES))
lang = '/' + request.LANGUAGE_CODE + "/"
if self.original_path.startswith(lang):
self.path = self.original_path[len(lang):]
self.file_name = os.path.join(self.docroot_dir, self.path)
log.debug("language stripped file: " + str(self.file_name))
self.api_name = self.path
# re-try and modify urls for logic on how to pull the correct template
self.find_api()
# finally if we still don't have a template and ends with / and APPEND_SLASH is set and False strip it
if not self.is_found and request.LANGUAGE_CODE and append_slash and self.original_path.endswith('/'):
lang = '/' + request.LANGUAGE_CODE + "/"
if self.original_path.startswith(lang):
# same as above except now try to strip the last slash
self.path = self.original_path[len(lang):len(self.original_path) - 1]
self.file_name = os.path.join(self.docroot_dir, self.path)
log.debug("language stripped file: " + str(self.file_name))
self.api_name = self.path
# re-try and modify urls for logic on how to pull the correct template
self.find_api()
# contains the logic for taking a request url and attempting to locate an api for it
def find_api(self):
# if the url ends in .html then try to load a corresponding template from the docroot/files directory
if self.file_name.endswith(".json"):
# our url will request .html but we want to look for a .dt file (required for template processing)
self.file_name = self.file_name[:-4]
self.file_name += "data.py"
if os.path.isfile(self.file_name):
log.debug("found file: " + self.file_name)
self.is_found = True
def render(self):
# return none if not found
if self.is_found:
# try to load a data file if it is there in order to get the context
# all data files should support get_context() or a context property
try:
log.debug("attempting to load data_file...")
spec = importlib.util.spec_from_file_location(self.api_name, self.file_name)
data = importlib.util.module_from_spec(spec)
spec.loader.exec_module(data)
# datafile = imp.load_source(module_name, datafile_name)
# note changing datafile below to data
except Exception as ex:
logging.error(str(ex))
return None
if data:
methods = dir(data)
for method in methods:
if method in self.ALL_OPTIONS:
self.options.append(method)
# figure out the proper method to call (get, post trace etc) return method not supported if not there
request_method = self.request.method
try:
initmethod = getattr(data, request_method)
except AttributeError:
initmethod = None
if initmethod:
# we may want to return something like a redirect so if is response then return it; else use for
# data!
content = initmethod(self.request)
if isinstance(content, HttpResponse):
return content
else:
response = HttpResponse(content)
response['Allow'] = ",".join(self.options)
response['Content-Type'] = "application/json"
return response
else:
log.error(
"Found datafile [" + self.file_name + "] but didn't find method [" + self.request.method + "]!")
response = HttpResponse("Method Not Supported [" + self.request.method + "]!", status=405)
response['Allow'] = ",".join(self.options)
response['Content-Type'] = "application/json"
return response
def is_found(self):
return self.is_found
def __str__(self):
return self.file_name
@csrf_protect
def render_page(request, template, module_name):
"""
Internal interface to the dev page view.
"""
context = {}
log.debug("template name: " + template.name)
log.debug("module_name: " + module_name)
datafile_name = template.origin.name
# strip off the html and try data.py
if datafile_name.endswith('dt'):
datafile_name = datafile_name[0:len(datafile_name) - 2]
datafile_name += 'data.py'
log.debug("datafilename: " + datafile_name)
# else:
# datafile_name += '.data.py'
# try to load a data file if it is there in order to get the context
# all data files should support get_context() or a context property
try:
log.debug("attempting to load data_file...")
spec = importlib.util.spec_from_file_location(module_name, datafile_name)
data = importlib.util.module_from_spec(spec)
spec.loader.exec_module(data)
# datafile = imp.load_source(module_name, datafile_name)
# note changing datafile below to data
except FileNotFoundError:
return None
except Exception as ex:
# logging.error(traceback.format_exc())
data = None
if settings.DEBUG:
raise ex
if data:
try:
initmethod = getattr(data, 'get_context')
except AttributeError:
initmethod = None
if initmethod:
# we may want to return something like a redirect so if is response then return it; else use for data!
context = initmethod(request)
if isinstance(context, HttpResponse):
return context
else:
try:
context = getattr(data, 'context')
except AttributeError:
context = {}
# print("context string: " + str(context))
template_context = RequestContext(request)
if context:
template_context.push(context)
response = HttpResponse(template.render(template_context))
return response
# class based view for getting and putting cms content
class ContentApi(View):
def get(self, request):
db_content = None
# get the requested uri
uri = request.GET.get('uri', None)
if uri:
db_content = Content.objects.filter(uri=uri).values('uri', 'element_id', 'content')
# return any content for this url
return JsonResponse(list(db_content) or [], safe=False)
def post(self, request):
# save and return any content for this url
received_json_data = json.loads(request.body.decode("utf-8"))
# data = request.body.decode('utf-8')
# received_json_data = json.loads(data)
content = received_json_data.get('content', None)
if content:
uri = received_json_data.get('uri', '')
element_id = received_json_data.get('element_id', None)
# lookup a record for this uri, element_id combination
try:
db_content = Content.objects.get(uri=uri, element_id=element_id)
except Content.DoesNotExist:
db_content = None
if db_content:
db_content.content = content
db_content.save()
else:
db_content = Content()
db_content.uri = uri
db_content.element_id = element_id
db_content.content = content
db_content.save()
serialized_object = serializers.serialize('json', [db_content, ])
return JsonResponse(serialized_object or [], safe=False)
else:
return HttpResponse(status=204)
# # AUTHENTICATION VIEWS
#
class LoginFormView(View):
form_class = LoginForm
# initial = {'key': 'value'}
template_name = 'login.dt'
def get(self, request, *args, **kwargs):
# try to get target location from header
target = self.request.META.get('HTTP_X_TARGET')
auth_message = self.request.META.get('HTTP_X_AUTH_MESSAGE')
print(f"target from header: {target}")
if not target:
target = request.META.get('HTTP_REFERER')
print(f"target from referrer: {target}")
# if not target:
# return HttpResponseForbidden()
# form = self.form_class(initial=self.initial)
form = self.form_class(initial={'target': target})
return render(request, self.template_name, {'form': form, 'auth_message': auth_message})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
login = form.cleaned_data['login']
password = form.cleaned_data['password']
target = form.cleaned_data['target']
print(f"target from form: {target}")
if not target:
target = '/'
print(f"final target: {str(form.cleaned_data['target'])}")
# if we have all our values set the header
if login and password:
auth_str = str(login) + ":" + str(password)
auth_cookie = base64.urlsafe_b64encode(auth_str.encode("utf-8"))
response = HttpResponseRedirect(target)
response.set_cookie('nginxauth', auth_cookie.decode("utf-8"), httponly=True)
return response
else:
messages.add_message(request, messages.ERROR, 'Login and password must not be blank!')
return render(request, self.template_name, {'form': form})
class LogoutView(View):
form_class = LoginForm
# initial = {'key': 'value'}
template_name = 'login.dt'
def get(self, request, *args, **kwargs):
# try to get target location from header
target = self.request.META.get('HTTP_X_TARGET')
print("form get")
print(f"target from header: {target}")
if not target:
target = request.META.get('HTTP_REFERER')
print(f"target from referrer: {target}")
# if not target:
# return HttpResponseForbidden()
# form = self.form_class(initial=self.initial)
form = self.form_class(initial={'target': target})
return render(request, self.template_name, {'form': form})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
login = form.cleaned_data['login']
password = form.cleaned_data['password']
target = form.cleaned_data['target']
print(f"target from form: {target}")
if not target:
target = '/'
print(f"final target: {str(form.cleaned_data['target'])}")
# if we have all our values set the header
if login and password:
auth_cookie = base64.urlsafe_b64encode((str(login) + ":" + str(password)).encode("ascii"))
response = HttpResponseRedirect(target)
response.set_cookie('nginxauth', auth_cookie, httponly=True)
return response
else:
messages.add_message(request, messages.ERROR, 'Login and password must not be blank!')
return render(request, self.template_name, {'form': form})
class AuthenticateView(View):
form_class = LoginForm
# initial = {'key': 'value'}
template_name = 'login.dt'
def get(self, request, *args, **kwargs):
# get our header variables
realm = request.META.get('HTTP_X_LDAP_REALM', 'Restricted')
url = request.META.get('HTTP_X_LDAP_URL')
start_tls = request.META.get('HTTP_X_LDAP_STARTTLS', 'false')
disable_referrals = request.META.get('HTTP_X_LDAP_DISABLEREFERRALS', 'false')
base_dn = request.META.get('HTTP_X_LDAP_BASEDN')
template = request.META.get('HTTP_X_LDAP_TEMPLATE', '(cn=%(username)s)')
bind_dn = request.META.get('HTTP_X_LDAP_BINDDN', '')
bind_password = request.META.get('HTTP_X_LDAP_BINDPASS', '')
cookie_name = request.META.get('HTTP_X_LDAP_COOKIENAME', 'nginxauth')
target = self.request.META.get('HTTP_X_TARGET')
authn_header = self.request.META.get('HTTP_WWW_AUTHENTICATE')
auth_header = self.request.META.get('HTTP_AUTHORIZATION')
auth_cookie = self.request.COOKIES.get(cookie_name)
print(f"target from header: {target}")
if not target:
target = request.META.get('HTTP_REFERER')
print(f"target from referrer: {target}")
# if our authorization header is blank check if we have a cookie and if so set the authorization header
# from cookie and continue to achieve auto-login
if not auth_header and auth_cookie:
auth_header = "Basic " + auth_cookie
# if we don't have auhorization header still then tell nginx to have the user login
if auth_header is None or not auth_header.lower().startswith('basic '):
# messages.info(request, "Please log in")
# request.session['SAS_LOGIN_MESSAGE'] = 'Please log in'
print('no auth_header so telling nginx to authenticate...')
response = HttpResponse('Unauthorized', status=401)
response['Cache-Control'] = 'no-cache'
response['WWW-Authenticate'] = 'Basic realm="' + realm + '"'
# response['X-Target'] = target
response['X-Auth-Message'] = "Please log in"
return response
# we have a auth header so lets try to authenticate using the stored credentials
# NOTE: my have expired ect. we need to add that next
# TODO: add check for expired credentials and to try again (session timeout)
# get the username and password and attempt to authenticate
username = self.get_username(auth_header)
password = self.get_password(auth_header)
# NOTE: using django for now; change to ldap later
# TODO: change to ldap?
user = authenticate(username=username, password=password)
if user is not None and user.is_active:
login(request, user)
else:
# give error message and re-authenticate to login to show error
if user is None:
msg = "Login or Password is incorrect."
else:
msg = "Account has been de-activated."
messages.add_message(request, messages.ERROR, msg)
response = HttpResponse('Unauthorized', status=401)
response['Cache-Control'] = 'no-cache'
response['WWW-Authenticate'] = 'Basic realm="' + realm + '"'
# response['X-Target'] = target
response['X-Auth-Message'] = msg
return response
msg = "Authenticated"
# todo: add processing here to determine if the url is protected and we have rights (authorization)
# todo: add processing to determine what headers to pack in auth headers and send for this url (post authz)
# if we got this far we should have a logged in user; lets create a global header and redirect to target
response = HttpResponse()
response['X-Auth-Headers'] = 'sm_constitid:3074952'
response['X-Auth-Message'] = msg
return response
def get_username(self, auth_token):
trim_token = auth_token[6:]
btrim_token = trim_token.encode("utf-8")
bauth_str = base64.urlsafe_b64decode(btrim_token)
auth_str = bauth_str.decode("utf-8")
print('decoded trimmed token: ' + str(auth_str))
username, password = auth_str.split(':', 1)
return username
def get_password(self, auth_token):
# may split this but for now use urlsafe b64 encoding
trim_token = auth_token[6:]
btrim_token = trim_token.encode("utf-8")
bauth_str = base64.urlsafe_b64decode(btrim_token)
auth_str = bauth_str.decode("utf-8")
print('decoded trimmed token: ' + str(auth_str))
username, password = auth_str.split(':', 1)
return password
# # HELPER API VIEWS (move?)
# Idea is to use this for links enhancement in ckeditor (gives immediate feedback when creating a link)
# redo this after we figure out about the requests requirement
# class UrlValidationApi(View):
# def get(self, request):
# # we will attempt to validate every URL parameter passed in
#
# is_valid = True
#
# dict_buffer = {}
# # list_dicts = []
#
# items = request.GET.items()
# if not items:
# items = request.POST.items()
# for _key, _val in items:
# # build a json list and return it
# print(_key, _val)
# check_response = False
# check_status = 500
#
# try:
# r = requests.head(_val, allow_redirects=True)
#
# if r.status_code >= 200 and r.status_code < 400:
# check_status = r.status_code
# # if we were intercepted by a login screen then we treat that as a forbidden instead of success
# if r.request.path_url.lower().startswith('/en/admin/login/' or r.request.path_url.lower(
# ).startswith('/admin/login/')):
# # print('starts with login')
# check_response = False
# is_valid = False
# check_status = 403
# else:
# check_response = True
# else:
# check_response = False
# is_valid = False
#
# dict_buffer[_key] = {}
# dict_buffer[_key]["response"] = check_response
# dict_buffer[_key]["status_code"] = check_status
# # list_dicts.append(dict_buffer)
#
# except Exception as e:
# is_valid = False
# dict_buffer[_key] = {}
# dict_buffer[_key]["response"] = False
# dict_buffer[_key]["status_code"] = 500
# # list_dicts.append(dict_buffer)
#
# # myurl = request.GET.get("myurl", None)
# # # try and do a lookup for the url passed
# # myresponse = False
# # mystatus = None
# # try:
# # r = requests.head(myurl)
# # if r.status_code >= 200 and r.status_code < 400:
# # myresponse = True
# # mystatus=r.status_code
# # except Exception as e:
# # mystatus=500
# return JsonResponse({'valid': is_valid, 'results': dict_buffer})
| |
# -*- coding: utf-8 -*-
"""
github3.pulls
=============
This module contains all the classes relating to pull requests.
"""
from __future__ import unicode_literals
from json import dumps
from uritemplate import URITemplate
from . import models
from . import users
from .decorators import requires_auth
from .issues import Issue
from .issues.comment import IssueComment
from .repos.commit import RepoCommit
from .repos.contents import Contents
class PullDestination(models.GitHubCore):
"""The :class:`PullDestination <PullDestination>` object.
See also: http://developer.github.com/v3/pulls/#get-a-single-pull-request
"""
def __init__(self, dest, direction):
super(PullDestination, self).__init__(dest)
from .repos.repo import Repository
#: Direction of the merge with respect to this destination
self.direction = direction
#: Full reference string of the object
self.ref = dest.get('ref')
#: label of the destination
self.label = dest.get('label')
#: :class:`User <github3.users.User>` representing the owner
self.user = None
if dest.get('user'):
self.user = users.ShortUser(dest.get('user'), None)
#: SHA of the commit at the head
self.sha = dest.get('sha')
self._repo_name = ''
self._repo_owner = ''
if dest.get('repo'):
self._repo_name = dest['repo'].get('name')
self._repo_owner = dest['repo']['owner'].get('login')
self.repository = Repository(dest.get('repo'), self)
self.repo = (self._repo_owner, self._repo_name)
def _repr(self):
return '<{0} [{1}]>'.format(self.direction, self.label)
class PullFile(models.GitHubCore):
"""The :class:`PullFile <PullFile>` object.
See also: http://developer.github.com/v3/pulls/#list-pull-requests-files
"""
def _update_attributes(self, pfile):
#: SHA of the commit
self.sha = self._get_attribute(pfile, 'sha')
#: Name of the file
self.filename = self._get_attribute(pfile, 'filename')
#: Status of the file, e.g., 'added'
self.status = self._get_attribute(pfile, 'status')
#: Number of additions on this file
self.additions_count = self._get_attribute(pfile, 'additions')
#: Number of deletions on this file
self.deletions_count = self._get_attribute(pfile, 'deletions')
#: Number of changes made to this file
self.changes_count = self._get_attribute(pfile, 'changes')
#: URL to view the blob for this file
self.blob_url = self._get_attribute(pfile, 'blob_url')
#: URL to view the raw diff of this file
self.raw_url = self._get_attribute(pfile, 'raw_url')
#: Patch generated by this pull request
self.patch = self._get_attribute(pfile, 'patch')
#: URL to JSON object with content and metadata
self.contents_url = self._get_attribute(pfile, 'contents_url')
def _repr(self):
return '<Pull Request File [{0}]>'.format(self.filename)
def contents(self):
"""Return the contents of the file.
:returns: :class:`Contents <github3.repos.contents.Contents>`
"""
json = self._json(self._get(self.contents_url), 200)
return self._instance_or_null(Contents, json)
class PullRequest(models.GitHubCore):
"""The :class:`PullRequest <PullRequest>` object.
Two pull request instances can be checked like so::
p1 == p2
p1 != p2
And is equivalent to::
p1.id == p2.id
p1.id != p2.id
See also: http://developer.github.com/v3/pulls/
"""
def _update_attributes(self, pull):
self._api = self._get_attribute(pull, 'url')
#: Base of the merge
self.base = self._class_attribute(
pull, 'base', PullDestination, 'Base'
)
#: Body of the pull request message
self.body = self._get_attribute(pull, 'body')
#: Body of the pull request as HTML
self.body_html = self._get_attribute(pull, 'body_html')
#: Body of the pull request as plain text
self.body_text = self._get_attribute(pull, 'body_text')
#: Number of additions on this pull request
self.additions_count = self._get_attribute(pull, 'additions')
#: Number of deletions on this pull request
self.deletions_count = self._get_attribute(pull, 'deletions')
#: datetime object representing when the pull was closed
self.closed_at = self._strptime_attribute(pull, 'closed_at')
#: Number of comments
self.comments_count = self._get_attribute(pull, 'comments')
#: Comments url (not a template)
self.comments_url = self._get_attribute(pull, 'comments_url')
#: Number of commits
self.commits_count = self._get_attribute(pull, 'commits')
#: GitHub.com url of commits in this pull request
self.commits_url = self._get_attribute(pull, 'commits_url')
#: datetime object representing when the pull was created
self.created_at = self._strptime_attribute(pull, 'created_at')
#: URL to view the diff associated with the pull
self.diff_url = self._get_attribute(pull, 'diff_url')
#: The new head after the pull request
self.head = self._class_attribute(
pull, 'head', PullDestination, 'Head'
)
#: The URL of the pull request
self.html_url = self._get_attribute(pull, 'html_url')
#: The unique id of the pull request
self.id = self._get_attribute(pull, 'id')
#: The URL of the associated issue
self.issue_url = self._get_attribute(pull, 'issue_url')
#: Statuses URL
self.statuses_url = self._get_attribute(pull, 'statuses_url')
#: Dictionary of _links. Changed in 1.0
self.links = self._get_attribute(pull, '_links', {})
#: Boolean representing whether the pull request has been merged
self.merged = self._get_attribute(pull, 'merged')
#: datetime object representing when the pull was merged
self.merged_at = self._strptime_attribute(pull, 'merged_at')
#: Whether the pull is deemed mergeable by GitHub
self.mergeable = self._get_attribute(pull, 'mergeable', False)
#: Whether it would be a clean merge or not
self.mergeable_state = self._get_attribute(pull, 'mergeable_state')
#: :class:`User <github3.users.User>` who merged this pull
self.merged_by = self._class_attribute(
pull, 'merged_by', users.ShortUser, self,
)
#: Number of the pull/issue on the repository
self.number = self._get_attribute(pull, 'number')
#: The URL of the patch
self.patch_url = self._get_attribute(pull, 'patch_url')
#: Review comment URL Template. Expands with ``number``
self.review_comment_url = self._class_attribute(
pull, 'review_comment_url', URITemplate
)
#: Number of review comments on the pull request
self.review_comments_count = self._get_attribute(
pull, 'review_comments'
)
#: GitHub.com url for review comments (not a template)
self.review_comments_url = self._get_attribute(
pull, 'review_comments_url'
)
#: Returns ('owner', 'repository') this issue was filed on.
self.repository = self.base
if self.repository:
self.repository = self.base.repo
#: The state of the pull
self.state = self._get_attribute(pull, 'state')
#: The title of the request
self.title = self._get_attribute(pull, 'title')
#: datetime object representing the last time the object was changed
self.updated_at = self._strptime_attribute(pull, 'updated_at')
#: :class:`User <github3.users.User>` object representing the creator
#: of the pull request
self.user = self._class_attribute(pull, 'user', users.ShortUser, self)
#: :class:`User <github3.users.User>` object representing the assignee
#: of the pull request
self.assignee = self._class_attribute(
pull, 'assignee', users.ShortUser, self,
)
def _repr(self):
return '<Pull Request [#{0}]>'.format(self.number)
@requires_auth
def close(self):
"""Close this Pull Request without merging.
:returns: bool
"""
return self.update(self.title, self.body, 'closed')
@requires_auth
def create_comment(self, body):
"""Create a comment on this pull request's issue.
:param str body: (required), comment body
:returns: :class:`IssueComment <github3.issues.comment.IssueComment>`
"""
url = self.comments_url
json = None
if body:
json = self._json(self._post(url, data={'body': body}), 201)
return self._instance_or_null(IssueComment, json)
@requires_auth
def create_review_comment(self, body, commit_id, path, position):
"""Create a review comment on this pull request.
All parameters are required by the GitHub API.
:param str body: The comment text itself
:param str commit_id: The SHA of the commit to comment on
:param str path: The relative path of the file to comment on
:param int position: The line index in the diff to comment on.
:returns: The created review comment.
:rtype: :class:`~github3.pulls.ReviewComment`
"""
url = self._build_url('comments', base_url=self._api)
data = {'body': body, 'commit_id': commit_id, 'path': path,
'position': int(position)}
json = self._json(self._post(url, data=data), 201)
return self._instance_or_null(ReviewComment, json)
def diff(self):
"""Return the diff.
:returns: bytestring representation of the diff.
"""
resp = self._get(self._api,
headers={'Accept': 'application/vnd.github.diff'})
return resp.content if self._boolean(resp, 200, 404) else b''
def is_merged(self):
"""Check to see if the pull request was merged.
:returns: bool
"""
if self.merged:
return self.merged
url = self._build_url('merge', base_url=self._api)
return self._boolean(self._get(url), 204, 404)
def issue(self):
"""Retrieve the issue associated with this pull request.
:returns: :class:`~github3.issues.Issue`
"""
json = self._json(self._get(self.issue_url), 200)
return self._instance_or_null(Issue, json)
def commits(self, number=-1, etag=None):
r"""Iterate over the commits on this pull request.
:param int number: (optional), number of commits to return. Default:
-1 returns all available commits.
:param str etag: (optional), ETag from a previous request to the same
endpoint
:returns: generator of
:class:`RepoCommit <github3.repos.commit.RepoCommit>`\ s
"""
url = self._build_url('commits', base_url=self._api)
return self._iter(int(number), url, RepoCommit, etag=etag)
def files(self, number=-1, etag=None):
r"""Iterate over the files associated with this pull request.
:param int number: (optional), number of files to return. Default:
-1 returns all available files.
:param str etag: (optional), ETag from a previous request to the same
endpoint
:returns: generator of :class:`PullFile <PullFile>`\ s
"""
url = self._build_url('files', base_url=self._api)
return self._iter(int(number), url, PullFile, etag=etag)
def issue_comments(self, number=-1, etag=None):
r"""Iterate over the issue comments on this pull request.
:param int number: (optional), number of comments to return. Default:
-1 returns all available comments.
:param str etag: (optional), ETag from a previous request to the same
endpoint
:returns: generator of :class:`IssueComment <IssueComment>`\ s
"""
comments = self.links.get('comments', {})
url = comments.get('href')
if not url:
url = self._build_url(
'comments', base_url=self._api.replace('pulls', 'issues')
)
return self._iter(int(number), url, IssueComment, etag=etag)
@requires_auth
def merge(self, commit_message=None, sha=None, squash=False):
"""Merge this pull request.
:param str commit_message: (optional), message to be used for the
merge commit
:param str sha: (optional), SHA that pull request head must match
to merge.
:param bool squash: (optional), commit a single commit to the
head branch.
:returns: bool
"""
parameters = {'squash': squash}
if sha:
parameters['sha'] = sha
if commit_message is not None:
parameters['commit_message'] = commit_message
url = self._build_url('merge', base_url=self._api)
json = self._json(self._put(url, data=dumps(parameters)), 200)
if not json:
return False
return json['merged']
def patch(self):
"""Return the patch.
:returns: bytestring representation of the patch
"""
resp = self._get(self._api,
headers={'Accept': 'application/vnd.github.patch'})
return resp.content if self._boolean(resp, 200, 404) else b''
@requires_auth
def reopen(self):
"""Re-open a closed Pull Request.
:returns: bool
"""
return self.update(self.title, self.body, 'open')
def review_comments(self, number=-1, etag=None):
r"""Iterate over the review comments on this pull request.
:param int number: (optional), number of comments to return. Default:
-1 returns all available comments.
:param str etag: (optional), ETag from a previous request to the same
endpoint
:returns: generator of :class:`ReviewComment <ReviewComment>`\ s
"""
url = self._build_url('comments', base_url=self._api)
return self._iter(int(number), url, ReviewComment, etag=etag)
def reviews(self, number=-1, etag=None):
r"""Iterate over the reviews associated with this pull request.
:param int number: (optional), number of reviews to return. Default:
-1 returns all available files.
:param str etag: (optional), ETag from a previous request to the same
endpoint
:returns: generator of :class:`PullReview <PullReview>`\ s
"""
# Accept the preview headers for reviews
headers = {'Accept': 'application/vnd.github.black-cat-preview+json'}
url = self._build_url('reviews', base_url=self._api)
return self._iter(int(number), url, PullReview, etag=etag,
headers=headers)
@requires_auth
def update(self, title=None, body=None, state=None):
"""Update this pull request.
:param str title: (optional), title of the pull
:param str body: (optional), body of the pull request
:param str state: (optional), ('open', 'closed')
:returns: bool
"""
data = {'title': title, 'body': body, 'state': state}
json = None
self._remove_none(data)
if data:
json = self._json(self._patch(self._api, data=dumps(data)), 200)
if json:
self._update_attributes(json)
return True
return False
class PullReview(models.GitHubCore):
"""The :class:`PullReview <PullReview>` object.
See also: https://developer.github.com/v3/pulls/reviews/
"""
def _update_attributes(self, preview):
#: ID of the review
self.id = self._get_attribute(preview, 'id')
#: SHA of the commit the review is on
self.commit_id = self._get_attribute(preview, 'commit_id')
#: :class:`User <github3.users.User>` who made the comment
self.user = self._class_attribute(
preview, 'user', users.ShortUser, self,
)
#: State of the review
self.state = self._get_attribute(preview, 'state')
#: datetime object representing when the event was created.
self.created_at = self._strptime_attribute(preview, 'created_at')
#: Body text of the review
self.body = self._get_attribute(preview, 'body')
#: API URL for the Pull Request
self.pull_request_url = self._get_attribute(
preview, 'pull_request_url'
)
def _repr(self):
return '<Pull Request Review [{0}]>'.format(self.id)
class ReviewComment(models.BaseComment):
"""The :class:`ReviewComment <ReviewComment>` object.
This is used to represent comments on pull requests.
Two comment instances can be checked like so::
c1 == c2
c1 != c2
And is equivalent to::
c1.id == c2.id
c1.id != c2.id
See also: http://developer.github.com/v3/pulls/comments/
"""
def _update_attributes(self, comment):
super(ReviewComment, self)._update_attributes(comment)
#: :class:`User <github3.users.User>` who made the comment
self.user = self._class_attribute(
comment, 'user', users.ShortUser, self,
)
#: Original position inside the file
self.original_position = self._get_attribute(
comment,
'original_position'
)
#: Path to the file
self.path = self._get_attribute(comment, 'path')
#: Position within the commit
self.position = self._get_attribute(comment, 'position')
#: SHA of the commit the comment is on
self.commit_id = self._get_attribute(comment, 'commit_id')
#: The diff hunk
self.diff_hunk = self._get_attribute(comment, 'diff_hunk')
#: Original commit SHA
self.original_commit_id = self._get_attribute(
comment, 'original_commit_id'
)
#: API URL for the Pull Request
self.pull_request_url = self._get_attribute(
comment, 'pull_request_url'
)
def _repr(self):
return '<Review Comment [{0}]>'.format(self.user.login)
@requires_auth
def reply(self, body):
"""Reply to this review comment with a new review comment.
:param str body: The text of the comment.
:returns: The created review comment.
:rtype: :class:`~github3.pulls.ReviewComment`
"""
url = self._build_url('comments', base_url=self.pull_request_url)
index = self._api.rfind('/') + 1
in_reply_to = self._api[index:]
json = self._json(self._post(url, data={
'body': body, 'in_reply_to': in_reply_to
}), 201)
return self._instance_or_null(ReviewComment, json)
| |
#!/usr/bin/python
'''
creates node in multi scaffold graph
'''
### imports ###
import sys
import os
import logging
import networkx as nx
import numpy as np
import subprocess
from operator import itemgetter
# hack for metis.
#os.environ['METIS_DLL'] = '/usr/local/lib/libmetis.so'
#import metis
import helpers.io as io
import optimize.orient as orient
import helpers.graphs as graphs
import helpers.misc as misc
logging.basicConfig(level=logging.DEBUG, format='[%(levelname)s] (%(threadName)-10s) %(message)s', )
### definitions ###
DECOMP_BOUND = 500
EXT_OUT = open("/dev/null")
DECOMP_0_PROG = "%s/bin/decomp0" % '/'.join(os.path.realpath(__file__).split("/")[0:-2])
DECOMP_1_PROG = "%s/bin/decomp1" % '/'.join(os.path.realpath(__file__).split("/")[0:-2])
### functions ###
def test_bi():
''' creates a bi-connected component '''
# make a biconnected component.
G = nx.Graph()
G.add_edge(0,1)
G.add_edge(0,3)
G.add_edge(0,2)
G.add_edge(1,3)
G.add_edge(3,2)
G.add_edge(2,4)
G.add_edge(4,5)
G.add_edge(5,2)
G.add_edge(3,7)
G.add_edge(6,7)
G.add_edge(6,8)
G.add_edge(9,8)
G.add_edge(9,7)
return G
def test_tri():
''' creates a tri-connected component '''
# make a biconnected component.
G = nx.Graph()
G.add_edge(1,2)
G.add_edge(1,3)
G.add_edge(2,4)
G.add_edge(3,4)
G.add_edge(5,4)
G.add_edge(5,6)
G.add_edge(5,7)
G.add_edge(6,8)
G.add_edge(7,8)
G.add_edge(2,6)
G.add_edge(3,7)
G.add_edge(10,11)
G.add_edge(10,12)
G.add_edge(13,12)
G.add_edge(13,11)
G.add_edge(13,14)
G.add_edge(15,14)
G.add_edge(16,14)
G.add_edge(16,17)
G.add_edge(15,17)
G.add_edge(11,15)
G.add_edge(12,16)
G.add_edge(18,19)
G.add_edge(18,20)
G.add_edge(21,20)
G.add_edge(21,19)
G.add_edge(21,22)
G.add_edge(23,22)
G.add_edge(24,22)
G.add_edge(24,25)
G.add_edge(23,25)
G.add_edge(23,19)
G.add_edge(20,24)
G.add_edge(0,1)
G.add_edge(0,10)
G.add_edge(0,18)
G.add_edge(9,8)
G.add_edge(9,17)
G.add_edge(9,25)
return G
def make_subg(G, active):
''' returns subgraph '''
# make a list of it.
comp = list(active)
# make new graph.
NG = nx.Graph()
# add nodes.
nluf = dict()
nlur = dict()
for i in range(len(comp)):
NG.add_node(i)
nluf[comp[i]] = i
nlur[i] = comp[i]
# add edges.
for p, q in G.edges():
# skip if both not active.
if p not in active or q not in active:
continue
# add the edge.
NG.add_edge(nluf[p], nluf[q])
# return it.
return NG, nluf, nlur
def load_decomp(in_file, nlu, prefix):
''' load decomposition results '''
# load the data.
fin = open(in_file, "rb")
lines = fin.readlines()
fin.close()
# build directed graph.
DG = nx.DiGraph()
# tokenize once.
tokens = [line.strip().split() for line in lines]
# load the nodes.
for token in tokens:
if token[0] != "N": continue
# build set of ids.
comp = frozenset([nlu[int(x)] for x in token[2::]])
# create index of this comp.
idx = "%s_%i" % (prefix, int(token[1]))
# add info.
DG.add_node(idx, comp=comp, graph=None)
# add the directed edges.
for token in tokens:
if token[0] != "E": continue
# get ids.
s = "%s_%i" % (prefix, int(token[1]))
t = "%s_%i" % (prefix, int(token[2]))
cut = frozenset([nlu[int(x)] for x in token[3::]])
# add the edge.
DG.add_edge(s, t, cut=cut)
# return the decomposition.
return DG
def write_graph(G, out_file):
''' writes graph in simple format'''
with open(out_file, "wb") as fout:
# write the number of nodes and edges.
fout.write("%i\t%i\n" % (G.number_of_nodes(), G.number_of_edges()))
# write edges.
for p, q in G.edges():
fout.write("%i\t%i\n" % (p, q))
def decomp0(G, tmp1_file, tmp2_file, msize=None):
''' returns connected components '''
# do decomposition.
comps = nx.connected_components(G)
# create the decomposition graph.
DC = nx.DiGraph()
# loop over connected components.
idx = 0
for comp in comps:
# freeze the components.
comp = frozenset(comp)
# compute further decomp if necessary.
if len(comp) > DECOMP_BOUND:
dg = decomp1(G, comp, tmp1_file, tmp2_file, msize=msize)
else:
dg = None
# add node to DC.
DC.add_node("con_%i" % idx, comp=frozenset(comp), graph=dg)
idx += 1
# return the graph.
return DC
def decomp1(G, comp, tmp1_file, tmp2_file, msize=None):
''' bi-connected decomposition '''
# create active subgraph.
subg, nluf, nlur = make_subg(G, comp)
# serialize this to disk.
write_graph(subg, tmp1_file)
# execute decomposition.
cmd = [DECOMP_0_PROG, tmp1_file, tmp2_file]
if subprocess.call(cmd, stdout=EXT_OUT) != 0:
logging.error("error in biconnected decomposition")
sys.exit(1)
# create decomposition graph.
DC = load_decomp(tmp2_file, nlur, "bicon")
# loop over each node in DC.
for n in DC.nodes():
# grab frozen component.
comp = DC.node[n]['comp']
# compute further decomp if necessary.
if len(comp) > DECOMP_BOUND:
# compute decomposition.
dg = decomp2(G, comp, tmp1_file, tmp2_file, msize=msize)
else:
dg = None
# modify node in DC.
DC.node[n]['graph'] = dg
# return the graph.
return DC
def decomp2(G, comp, tmp1_file, tmp2_file, msize=None):
''' tri-connected decomposition '''
# create active subgraph.
subg, nluf, nlur = make_subg(G, comp)
# serialize this to disk.
write_graph(subg, tmp1_file)
# execute decomposition.
cmd = [DECOMP_1_PROG, tmp1_file, tmp2_file]
if subprocess.call(cmd, stdout=EXT_OUT) != 0:
logging.error("error in triconnected decomposition")
sys.exit(1)
# create decomposition graph.
DC = load_decomp(tmp2_file, nlur, "tricon")
# inform us of the largest component size.
largest = -1
lcomp = None
for n in DC.nodes():
if len(DC.node[n]['comp']) > largest:
largest = len(DC.node[n]['comp'])
lcomp = DC.node[n]['comp']
logging.info("largest component: %d" % largest)
# engage heuristic if necessary.
if msize != None and len(lcomp) > msize:
# break the graph more.
_heuristic(G, comp)
# note heuristic was applied.
G.graph['redo'] = True
# return the graph.
return DC
def _heuristic(G, comp):
''' breaks component by increasing bundle size at dense cores '''
# build subgraph.
subg = G.subgraph(comp)
#G.remove_edges_from(subg.edges())
# break using balanced cuts.
#(edgecuts, parts) = metis.part_graph(subg, 2)
#print edgecuts
#sys.exit()
# rank nodes by connectivity.
nranks = dict()
for n in subg.nodes():
nranks[n] = len(subg.neighbors(n))
nranks = sorted(nranks.items(), key=itemgetter(1))
# take top 10%
tcut = int(len(nranks) * .05) + 1
totrim = set([x[0] for x in nranks[-tcut::]])
# trim any nodes with more than 5 connections.
for n in list(totrim):
for q in subg.neighbors(n):
totrim.add(q)
totrim = list(totrim)
# build further subgraph.
subg2 = subg.subgraph(totrim)
# find minimum bundle size + 1
mbs = 10000
for p, q in subg2.edges():
for i in range(4):
b = subg2[p][q]['bcnts'][i]
if b != 0 and b < mbs:
mbs = b
# increase minimum bundle size by 1.
mbs += 1
# check filter
toremove = list()
for p, q in subg2.edges():
# zero out weak bundles.
for i in range(4):
if subg2[p][q]['bcnts'][i] < mbs:
subg2[p][q]['bcnts'][i] = 0
# check for edge removal.
if sum(subg2[p][q]['bcnts']) < 1:
toremove.append((p,q))
# remove bad edges.
G.remove_edges_from(toremove)
logging.info("removed %i edges in heuristic mode" % len(toremove))
def _compact_inner(DG):
''' merges components if possible '''
# recursive call to compact next level shit.
for n in DG.nodes():
if DG.node[n]['graph'] != None:
_compact_inner(DG.node[n]['graph'])
# repeat until all paths merged.
while 1 == 1:
# loop over each edge and check for path.
merged = 0
for p, q in DG.edges():
# maybe nodes were removed already?
if DG.has_node(p) == False or DG.has_node(q) == False:
continue
# don't compact node with or adj to subgraph.
if DG.node[p]['graph'] != None or DG.node[q]['graph'] != None:
continue
# get childs components and kids.
pcomp = set(DG.node[p]['comp'])
qcomp = set(DG.node[q]['comp'])
# size check.
if len(pcomp) + len(qcomp) > DECOMP_BOUND:
continue
# merge into parent.
DG.node[p]['comp'] = frozenset(pcomp.union(qcomp))
# connect grandkids.
grandkids = DG.successors(q)
for grandkid in grandkids:
cut = DG.node[p]['comp'].intersection(DG.node[grandkid]['comp'])
DG.add_edge(p, grandkid, cut=cut)
# remove node and edge come with it.
DG.remove_node(q)
merged += 1
# check if we break.
if merged == 0:
break
def _compact_outter(DG):
''' merges components if possible '''
# identify singles.
singles = list()
for n in DG.nodes():
# skip connected, big and recursed.
comp = DG.node[n]['comp']
if len(DG.neighbors(n)) > 0: continue
if len(comp) > DECOMP_BOUND: continue
if DG.node[n]['graph'] != None: continue
# note it.
singles.append((n,len(comp)))
# sort by small to high.
singles = sorted(singles, key=itemgetter(1), reverse=True)
# merge singletons.
n, comp = singles.pop()
curname = n
DG.node[curname]['comp'] = set(DG.node[curname]['comp'])
while len(singles) > 0:
# pop next.
n, lcomp = singles.pop()
curset = DG.node[curname]['comp']
nowset = set(DG.node[n]['comp'])
# check if we can add to current.
if len(curset) + len(nowset) < DECOMP_BOUND:
# just concatinate.
DG.node[curname]['comp'] = DG.node[curname]['comp'].union(nowset)
# remove node.
DG.remove_node(n)
else:
# freeze previouse.
DG.node[curname]['comp'] = frozenset(DG.node[curname]['comp'])
DG.node[curname]['graph'] = None
# update current keeper.
curname = n
DG.node[curname]['comp'] = set(DG.node[curname]['comp'])
# freeze last.
DG.node[curname]['comp'] = frozenset(DG.node[curname]['comp'])
DG.node[curname]['graph'] = None
def _validate_comp(RG):
''' validates connection at a certain level '''
# use topological sort to find root.
root = nx.topological_sort(RG)[0]
# try to solve each node.
for n in nx.dfs_postorder_nodes(RG, source=root):
# dive down.
if RG.node[n]['graph'] != None:
_validate_comp(RG.node[n]['graph'])
# check for parent.
parent = RG.predecessors(n)
# skip if root
if len(parent) == 0:
if n != root:
logging.error("bad root, no cookie")
sys.exit()
continue
parent = parent[0]
# get components.
pcomp = RG.node[parent]['comp']
ccomp = RG.node[n]['comp']
# compute cuts.
cutGIVEN = RG[parent][n]['cut']
cutTEST = pcomp.intersection(ccomp)
# test cut.
if cutGIVEN != cutTEST:
print "bad cut"
print cutGIVEN, cutTEST
print n, parent
sys.exit()
def decompose(paths, args):
""" runs decomposition
Parameters
----------
paths.bundle_file : file
paths.tmp1_file : file
paths.tmp2_file : file
paths.decomp_file : file
args.msize : integer
"""
# load the bundle graph.
logging.info("loading info")
BG = nx.read_gpickle(paths.bundle_file)
#BG = test_bi()
#BG = test_tri()
# run decomposition until satisfied.
BG.graph['redo'] = False
while 1 == 1:
# decomposition.
DC = decomp0(BG, paths.tmp1_file, paths.tmp2_file, msize=args.msize)
# check if only once.
if args.msize == None or BG.graph['redo'] == False:
break
elif BG.graph['redo'] == True:
BG.graph['redo'] = False
# remove temp files.
if os.path.isfile(paths.tmp1_file) == True:
subprocess.call(["rm","-f",paths.tmp1_file])
if os.path.isfile(paths.tmp2_file) == True:
subprocess.call(["rm","-f",paths.tmp2_file])
# compact decomposition.
_compact_outter(DC)
for subcc in nx.weakly_connected_component_subgraphs(DC):
# call recursive compaction.
_compact_inner(DC)
# verify decomposition.
for subcc in nx.weakly_connected_component_subgraphs(DC):
# check its consistency.
_validate_comp(subcc)
# write to disk.
nx.write_gpickle(DC, paths.decomp_file)
nx.write_gpickle(BG, paths.bundle_file)
| |
"""Loads configurations from .yaml files and expands environment variables.
"""
import copy
import collections
import glob
import math
import os
import sys
import yaml
import toolz as tz
class CmdNotFound(Exception):
pass
# ## Generalized configuration
def update_w_custom(config, lane_info):
"""Update the configuration for this lane if a custom analysis is specified.
"""
name_remaps = {"variant": ["SNP calling", "variant", "variant2"],
"SNP calling": ["SNP calling", "variant", "variant2"],
"variant2": ["SNP calling", "variant", "variant2"]}
config = copy.deepcopy(config)
base_name = lane_info.get("analysis")
if "algorithm" not in config:
config["algorithm"] = {}
for analysis_type in name_remaps.get(base_name, [base_name]):
custom = config.get("custom_algorithms", {}).get(analysis_type)
if custom:
for key, val in custom.iteritems():
config["algorithm"][key] = val
# apply any algorithm details specified with the lane
for key, val in lane_info.get("algorithm", {}).iteritems():
config["algorithm"][key] = val
# apply any resource details specified with the lane
for prog, pkvs in lane_info.get("resources", {}).iteritems():
if prog not in config["resources"]:
config["resources"][prog] = {}
for key, val in pkvs.iteritems():
config["resources"][prog][key] = val
return config
# ## Retrieval functions
def load_system_config(config_file=None, work_dir=None):
"""Load bcbio_system.yaml configuration file, handling standard defaults.
Looks for configuration file in default location within
final base directory from a standard installation. Handles both standard
installs (galaxy/bcbio_system.yaml) and docker installs (config/bcbio_system.yaml).
"""
docker_config = _get_docker_config()
if config_file is None:
config_file = "bcbio_system.yaml"
if not os.path.exists(config_file):
base_dir = get_base_installdir()
test_config = os.path.join(base_dir, "galaxy", config_file)
if os.path.exists(test_config):
config_file = test_config
else:
raise ValueError("Could not find input system configuration file %s, "
"including inside standard directory %s" %
(config_file, os.path.join(base_dir, "galaxy")))
config = load_config(config_file)
if docker_config:
assert work_dir is not None, "Need working directory to merge docker config"
config_file = os.path.join(work_dir, "%s-merged%s" % os.path.splitext(os.path.basename(config_file)))
config = _merge_system_configs(config, docker_config, config_file)
if "algorithm" not in config:
config["algorithm"] = {}
config["bcbio_system"] = config_file
return config, config_file
def get_base_installdir():
return os.path.normpath(os.path.join(os.path.realpath(sys.executable), os.pardir, os.pardir, os.pardir))
def _merge_system_configs(host_config, container_config, out_file=None):
"""Create a merged system configuration from external and internal specification.
"""
out = copy.deepcopy(container_config)
for k, v in host_config.iteritems():
if k in set(["galaxy_config"]):
out[k] = v
elif k == "resources":
for pname, resources in v.iteritems():
if not isinstance(resources, dict) and pname not in out[k]:
out[k][pname] = resources
else:
for rname, rval in resources.iteritems():
if (rname in set(["cores", "jvm_opts", "memory"])
or pname in set(["gatk", "mutect"])):
if pname not in out[k]:
out[k][pname] = {}
out[k][pname][rname] = rval
# Ensure final file is relocatable by mapping back to reference directory
if "bcbio_system" in out and ("galaxy_config" not in out or not os.path.isabs(out["galaxy_config"])):
out["galaxy_config"] = os.path.normpath(os.path.join(os.path.dirname(out["bcbio_system"]),
os.pardir, "galaxy",
"universe_wsgi.ini"))
if out_file:
with open(out_file, "w") as out_handle:
yaml.safe_dump(out, out_handle, default_flow_style=False, allow_unicode=False)
return out
def _get_docker_config():
base_dir = get_base_installdir()
docker_configfile = os.path.join(base_dir, "config", "bcbio_system.yaml")
if os.path.exists(docker_configfile):
return load_config(docker_configfile)
def merge_resources(args):
"""Merge docker local resources and global resource specification in a set of arguments.
Finds the `data` object within passed arguments and updates the resources
from a local docker configuration if present.
"""
docker_config = _get_docker_config()
if not docker_config:
return args
else:
def _update_resources(config):
config["resources"] = _merge_system_configs(config, docker_config)["resources"]
return config
return _update_config(args, _update_resources)
def load_config(config_file):
"""Load YAML config file, replacing environmental variables.
"""
with open(config_file) as in_handle:
config = yaml.load(in_handle)
config = _expand_paths(config)
# lowercase resource names, the preferred way to specify, for back-compatibility
newr = {}
for k, v in config["resources"].iteritems():
if k.lower() != k:
newr[k.lower()] = v
config["resources"].update(newr)
return config
def _expand_paths(config):
for field, setting in config.items():
if isinstance(config[field], dict):
config[field] = _expand_paths(config[field])
else:
config[field] = expand_path(setting)
return config
def expand_path(path):
""" Combines os.path.expandvars with replacing ~ with $HOME.
"""
try:
return os.path.expandvars(path.replace("~", "$HOME"))
except AttributeError:
return path
def get_resources(name, config):
"""Retrieve resources for a program, pulling from multiple config sources.
"""
return tz.get_in(["resources", name], config,
tz.get_in(["resources", "default"], config, {}))
def get_program(name, config, ptype="cmd", default=None):
"""Retrieve program information from the configuration.
This handles back compatible location specification in input
YAML. The preferred location for program information is in
`resources` but the older `program` tag is also supported.
"""
try:
pconfig = config.get("resources", {})[name]
# If have leftover old
except KeyError:
pconfig = {}
old_config = config.get("program", {}).get(name, None)
if old_config:
for key in ["dir", "cmd"]:
if not key in pconfig:
pconfig[key] = old_config
if ptype == "cmd":
return _get_program_cmd(name, pconfig, default)
elif ptype == "dir":
return _get_program_dir(name, pconfig)
else:
raise ValueError("Don't understand program type: %s" % ptype)
def _get_check_program_cmd(fn):
def wrap(name, config, default):
program = expand_path(fn(name, config, default))
is_ok = lambda f: os.path.isfile(f) and os.access(f, os.X_OK)
if is_ok(program): return program
for adir in os.environ['PATH'].split(":"):
if is_ok(os.path.join(adir, program)):
return os.path.join(adir, program)
else:
raise CmdNotFound(" ".join(map(repr, (fn.func_name, name, config, default))))
return wrap
@_get_check_program_cmd
def _get_program_cmd(name, config, default):
"""Retrieve commandline of a program.
"""
if config is None:
return name
elif isinstance(config, basestring):
return config
elif "cmd" in config:
return config["cmd"]
elif default is not None:
return default
else:
return name
def _get_program_dir(name, config):
"""Retrieve directory for a program (local installs/java jars).
"""
if config is None:
raise ValueError("Could not find directory in config for %s" % name)
elif isinstance(config, basestring):
return config
elif "dir" in config:
return expand_path(config["dir"])
else:
raise ValueError("Could not find directory in config for %s" % name)
def get_jar(base_name, dname):
"""Retrieve a jar in the provided directory
"""
jars = glob.glob(os.path.join(expand_path(dname), "%s*.jar" % base_name))
if len(jars) == 1:
return jars[0]
elif len(jars) > 1:
raise ValueError("Found multiple jars for %s in %s. Need single jar: %s" %
(base_name, dname, jars))
else:
raise ValueError("Could not find java jar %s in %s" %
(base_name, dname))
# ## Retrieval and update to configuration from arguments
def is_std_config_arg(x):
return isinstance(x, dict) and "algorithm" in x and "resources" in x and "files" not in x
def is_nested_config_arg(x):
return isinstance(x, dict) and "config" in x and is_std_config_arg(x["config"])
def get_algorithm_config(xs):
"""Flexibly extract algorithm configuration for a sample from any function arguments.
"""
for x in xs:
if is_std_config_arg(x):
return x["algorithm"]
elif is_nested_config_arg(x):
return x["config"]["algorithm"]
elif isinstance(x, (list, tuple)) and is_nested_config_arg(x[0]):
return x[0]["config"]["algorithm"]
raise ValueError("Did not find algorithm configuration in items: {0}"
.format(xs))
def get_dataarg(args):
"""Retrieve the world 'data' argument from a set of input parameters.
"""
for i, arg in enumerate(args):
if is_nested_config_arg(arg):
return i, arg
elif is_std_config_arg(arg):
return i, {"config": arg}
elif isinstance(arg, (list, tuple)) and is_nested_config_arg(arg[0]):
return i, arg[0]
raise ValueError("Did not find configuration or data object in arguments: %s" % args)
def add_cores_to_config(args, cores_per_job, parallel=None):
"""Add information about available cores for a job to configuration.
Ugly hack to update core information in a configuration dictionary.
"""
def _update_cores(config):
config["algorithm"]["num_cores"] = int(cores_per_job)
if parallel:
parallel.pop("view", None)
config["parallel"] = parallel
return config
return _update_config(args, _update_cores)
def _update_config(args, update_fn):
"""Update configuration, nested in argument list, with the provided update function.
"""
new_i = None
for i, arg in enumerate(args):
if (is_std_config_arg(arg) or is_nested_config_arg(arg) or
(isinstance(arg, (list, tuple)) and is_nested_config_arg(arg[0]))):
new_i = i
break
if new_i is None:
raise ValueError("Could not find configuration in args: %s" % str(args))
new_arg = args[new_i]
if is_nested_config_arg(new_arg):
new_arg["config"] = update_fn(copy.deepcopy(new_arg["config"]))
elif is_std_config_arg(new_arg):
new_arg = update_fn(copy.deepcopy(new_arg))
elif isinstance(arg, (list, tuple)) and is_nested_config_arg(new_arg[0]):
new_arg_first = new_arg[0]
new_arg_first["config"] = update_fn(copy.deepcopy(new_arg_first["config"]))
new_arg = [new_arg_first] + new_arg[1:]
else:
raise ValueError("Unexpected configuration dictionary: %s" % new_arg)
args = list(args)[:]
args[new_i] = new_arg
return args
def convert_to_bytes(mem_str):
"""Convert a memory specification, potentially with M or G, into bytes.
"""
if str(mem_str)[-1].upper().endswith("G"):
return int(mem_str[:-1]) * 1024 * 1024
elif str(mem_str)[-1].upper().endswith("M"):
return int(mem_str[:-1]) * 1024
else:
return int(mem_str)
def adjust_memory(val, magnitude, direction="increase", out_modifier=""):
"""Adjust memory based on number of cores utilized.
"""
modifier = val[-1:]
amount = int(val[:-1])
if direction == "decrease":
new_amount = amount / float(magnitude)
# dealing with a specifier like 1G, need to scale to Mb
if new_amount < 1:
if modifier.upper().startswith("G"):
new_amount = (amount * 1024) / magnitude
modifier = "M" + modifier[1:]
else:
raise ValueError("Unexpected decrease in memory: %s by %s" % (val, magnitude))
amount = int(new_amount)
elif direction == "increase":
# for increases with multiple cores, leave small percentage of
# memory for system to maintain process running resource and
# avoid OOM killers
adjuster = 0.91
amount = int(math.ceil(amount * (adjuster * magnitude)))
if out_modifier.upper().startswith("G") and modifier.upper().startswith("M"):
modifier = out_modifier
amount = int(math.floor(amount / 1024.0))
if out_modifier.upper().startswith("M") and modifier.upper().startswith("G"):
modifier = out_modifier
modifier = int(amount * 1024)
return "{amount}{modifier}".format(amount=amount, modifier=modifier)
def adjust_opts(in_opts, config):
"""Establish JVM opts, adjusting memory for the context if needed.
This allows using less or more memory for highly parallel or multicore
supporting processes, respectively.
"""
memory_adjust = config["algorithm"].get("memory_adjust", {})
out_opts = []
for opt in in_opts:
if opt.startswith("-Xmx") or (opt.startswith("-Xms") and memory_adjust.get("direction") == "decrease"):
arg = opt[:4]
opt = "{arg}{val}".format(arg=arg,
val=adjust_memory(opt[4:],
memory_adjust.get("magnitude", 1),
memory_adjust.get("direction")))
out_opts.append(opt)
return out_opts
# specific program usage
def use_vqsr(algs):
"""Processing uses GATK's Variant Quality Score Recalibration.
"""
vqsr_callers = set(["gatk", "gatk-haplotype"])
vqsr_sample_thresh = 50
vqsr_supported = collections.defaultdict(int)
coverage_intervals = set([])
for alg in algs:
callers = alg.get("variantcaller", "gatk")
if isinstance(callers, basestring):
callers = [callers]
if not callers: # no variant calling, no VQSR
continue
if "vqsr" in alg.get("tools_off", []): # VQSR turned off
continue
for c in callers:
if c in vqsr_callers:
vqsr_supported[c] += 1
coverage_intervals.add(alg.get("coverage_interval", "exome").lower())
if len(vqsr_supported) > 0:
num_samples = max(vqsr_supported.values())
if "genome" in coverage_intervals or num_samples >= vqsr_sample_thresh:
return True
return False
def use_snpeff(algs):
"""Processing uses snpEff. Avoids memory requirements if not used.
"""
return any(alg.get("effects", "snpeff") == "snpeff" and alg.get("variantcaller") for alg in algs)
def use_bcbio_variation_recall(algs):
"""Processing uses bcbio-variation-recall. Avoids core requirement if not used.
"""
for alg in algs:
jointcaller = alg.get("jointcaller", [])
if not isinstance(jointcaller, (tuple, list)):
jointcaller = [jointcaller]
for caller in jointcaller:
if caller not in set(["gatk-haplotype-joint", None, False]):
return True
return False
## functions for navigating through the standard galaxy directory of files
def get_rRNA_interval(genome_dir):
return os.path.join(genome_dir, "rnaseq", "rRNA.interval_list")
def get_transcript_refflat(genome_dir):
return os.path.join(genome_dir, "rnaseq", "ref-transcripts.refFlat")
def get_rRNA_sequence(genome_dir):
return os.path.join(genome_dir, "rnaseq", "rRNA.fa")
| |
from __future__ import absolute_import, division, print_function
import copy
import logging
import six
import numpy as np
from numpy.testing import (assert_equal, assert_array_almost_equal,
assert_array_equal)
from nose.tools import assert_true, raises, assert_raises
from skbeam.core.fitting.base.parameter_data import get_para, e_calibration
from skbeam.core.fitting.xrf_model import (
ModelSpectrum, ParamController, linear_spectrum_fitting,
construct_linear_model, trim, sum_area, compute_escape_peak,
register_strategy, update_parameter_dict, _set_parameter_hint,
fit_pixel_multiprocess_nnls, _STRATEGY_REGISTRY, calculate_area
)
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s',
level=logging.INFO,
filemode='w')
def synthetic_spectrum():
param = get_para()
x = np.arange(2000)
pileup_peak = ['Si_Ka1-Si_Ka1', 'Si_Ka1-Ce_La1']
user_peak = ['user_peak1']
elemental_lines = (['Ar_K', 'Fe_K', 'Ce_L', 'Pt_M'] + pileup_peak +
user_peak)
elist, matv, area_v = construct_linear_model(x, param, elemental_lines,
default_area=1e5)
# In case that y0 might be zero at certain points.
return np.sum(matv, 1)
def test_param_controller_fail():
param = get_para()
PC = ParamController(param, [])
assert_raises(ValueError, PC._add_area_param, 'Ar')
def test_parameter_controller():
param = get_para()
pileup_peak = ['Si_Ka1-Si_Ka1', 'Si_Ka1-Ce_La1']
elemental_lines = ['Ar_K', 'Fe_K', 'Ce_L', 'Pt_M'] + pileup_peak
PC = ParamController(param, elemental_lines)
set_opt = dict(pos='hi', width='lohi', area='hi', ratio='lo')
PC.update_element_prop(['Fe_K', 'Ce_L', pileup_peak[0]], **set_opt)
PC.set_strategy('linear')
# check boundary value
for k, v in six.iteritems(PC.params):
if 'Fe' in k:
if 'ratio' in k:
assert_equal(str(v['bound_type']), set_opt['ratio'])
if 'center' in k:
assert_equal(str(v['bound_type']), set_opt['pos'])
elif 'area' in k:
assert_equal(str(v['bound_type']), set_opt['area'])
elif 'sigma' in k:
assert_equal(str(v['bound_type']), set_opt['width'])
elif ('pileup_'+pileup_peak[0].replace('-', '_')) in k:
if 'ratio' in k:
assert_equal(str(v['bound_type']), set_opt['ratio'])
if 'center' in k:
assert_equal(str(v['bound_type']), set_opt['pos'])
elif 'area' in k:
assert_equal(str(v['bound_type']), set_opt['area'])
elif 'sigma' in k:
assert_equal(str(v['bound_type']), set_opt['width'])
def test_fit():
param = get_para()
pileup_peak = ['Si_Ka1-Si_Ka1', 'Si_Ka1-Ce_La1']
user_peak = ['user_peak1']
elemental_lines = (['Ar_K', 'Fe_K', 'Ce_L', 'Pt_M'] + pileup_peak +
user_peak)
x0 = np.arange(2000)
y0 = synthetic_spectrum()
default_area = 1e5
x, y = trim(x0, y0, 100, 1300)
MS = ModelSpectrum(param, elemental_lines)
MS.assemble_models()
result = MS.model_fit(x, y, weights=1/np.sqrt(y+1), maxfev=200)
# check area of each element
for k, v in six.iteritems(result.values):
if 'area' in k:
# error smaller than 1e-6
assert_true(abs(v - default_area)/default_area < 1e-6)
# multiple peak sumed, so value should be larger than one peak area 1e5
sum_Fe = sum_area('Fe_K', result)
assert_true(sum_Fe > default_area)
sum_Ce = sum_area('Ce_L', result)
assert_true(sum_Ce > default_area)
sum_Pt = sum_area('Pt_M', result)
assert_true(sum_Pt > default_area)
# create full list of parameters
PC = ParamController(param, elemental_lines)
new_params = PC.params
# update values
update_parameter_dict(new_params, result)
for k, v in six.iteritems(new_params):
if 'area' in k:
assert_equal(v['value'], result.values[k])
def test_register():
new_strategy = e_calibration
register_strategy('e_calibration', new_strategy, overwrite=False)
assert_equal(len(_STRATEGY_REGISTRY), 5)
new_strategy = copy.deepcopy(e_calibration)
new_strategy['coherent_sct_amplitude'] = 'fixed'
register_strategy('new_strategy', new_strategy)
assert_equal(len(_STRATEGY_REGISTRY), 6)
@raises(RuntimeError)
def test_register_error():
new_strategy = copy.deepcopy(e_calibration)
new_strategy['coherent_sct_amplitude'] = 'fixed'
register_strategy('e_calibration', new_strategy, overwrite=False)
def test_pre_fit():
# No pre-defined elements. Use all possible elements activated at
# given energy
y0 = synthetic_spectrum()
x0 = np.arange(len(y0))
# the following items should appear
item_list = ['Ar_K', 'Fe_K', 'compton', 'elastic']
param = get_para()
# fit without weights
x, y_total, area_v = linear_spectrum_fitting(x0, y0, param, weights=None)
for v in item_list:
assert_true(v in y_total)
sum1 = np.sum([v for v in y_total.values()], axis=0)
# r squares as a measurement
r1 = 1 - np.sum((sum1-y0)**2)/np.sum((y0-np.mean(y0))**2)
assert_true(r1 > 0.85)
# fit with weights
w = 1/np.sqrt(y0+1)
x, y_total, area_v = linear_spectrum_fitting(x0, y0, param, weights=w)
for v in item_list:
assert_true(v in y_total)
sum2 = np.sum([v for v in y_total.values()], axis=0)
# r squares as a measurement
r2 = 1 - np.sum((sum2-y0)**2)/np.sum((y0-np.mean(y0))**2)
assert_true(r2 > 0.85)
def test_escape_peak():
y0 = synthetic_spectrum()
ratio = 0.01
param = get_para()
xnew, ynew = compute_escape_peak(y0, ratio, param)
# ratio should be the same
assert_array_almost_equal(np.sum(ynew)/np.sum(y0), ratio, decimal=3)
def test_set_param_hint():
param = get_para()
elemental_lines = ['Ar_K', 'Fe_K', 'Ce_L', 'Pt_M']
bound_options = ['none', 'lohi', 'fixed', 'lo', 'hi']
MS = ModelSpectrum(param, elemental_lines)
MS.assemble_models()
# get compton model
compton = MS.mod.components[0]
for v in bound_options:
input_param = {'bound_type': v, 'max': 13.0, 'min': 9.0, 'value': 11.0}
_set_parameter_hint('coherent_sct_energy', input_param, compton)
p = compton.make_params()
if v == 'fixed':
assert_equal(p['coherent_sct_energy'].vary, False)
else:
assert_equal(p['coherent_sct_energy'].vary, True)
@raises(ValueError)
def test_set_param():
param = get_para()
elemental_lines = ['Ar_K', 'Fe_K', 'Ce_L', 'Pt_M']
MS = ModelSpectrum(param, elemental_lines)
MS.assemble_models()
# get compton model
compton = MS.mod.components[0]
input_param = {'bound_type': 'other', 'max': 13.0, 'min': 9.0,
'value': 11.0}
_set_parameter_hint('coherent_sct_energy', input_param, compton)
def test_pixel_fit_multiprocess():
param = get_para()
y0 = synthetic_spectrum()
x = np.arange(len(y0))
pileup_peak = ['Si_Ka1-Si_Ka1', 'Si_Ka1-Ce_La1']
user_peak = ['user_peak1']
elemental_lines = (['Ar_K', 'Fe_K', 'Ce_L', 'Pt_M'] + pileup_peak +
user_peak)
default_area = 1e5
elist, matv, area_v = construct_linear_model(x, param, elemental_lines,
default_area=default_area)
exp_data = np.zeros([2, 1, len(y0)])
for i in range(exp_data.shape[0]):
exp_data[i, 0, :] = y0
results = fit_pixel_multiprocess_nnls(exp_data, matv, param,
use_snip=True)
# output area of dict
result_map = calculate_area(elist, matv, results,
param, first_peak_area=True)
# compare input list and output elemental list
assert_array_equal(elist, elemental_lines+['compton', 'elastic'])
# Total len includes all the elemental list, compton, elastic and
# two more items, which are summed area of background and r-squared
total_len = len(elist) + 2
assert_array_equal(results.shape, [2, 1, total_len])
# same exp data should output same results
assert_array_equal(results[0, :, :], results[1, :, :])
for k, v in six.iteritems(result_map):
assert_equal(v[0, 0], v[1, 0])
if k in ['snip_bkg', 'r_squared']:
# bkg is not a fitting parameter, and r_squared is just a
# statistical output.
# Only compare the fitting parameters, such as area of each peak.
continue
# compare with default value 1e5, and get difference < 1%
assert_true(abs(v[0, 0] * 0.01 - default_area) / default_area < 1e-2)
| |
# -*- coding: utf-8 -*-
#
# Spec work for ibride.com documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 17 17:18:01 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Spec work for ibride.com'
copyright = u'2015, Evgeny V. Generalov'
author = u'Evgeny V. Generalov'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.1'
# The full version, including alpha/beta/rc tags.
release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'ru'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Specworkforibridecomdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
# Latex figure (float) alignment
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Specworkforibridecom.tex', u'Spec work for ibride.com Documentation',
u'Evgeny V. Generalov', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'specworkforibridecom', u'Spec work for ibride.com Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Specworkforibridecom', u'Spec work for ibride.com Documentation',
author, 'Specworkforibridecom', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# on_rtd is whether we are on readthedocs.org
# Use Read The Docs Theme
import os
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Auto-documenting Django Models with Sphinx (https://djangosnippets.org/snippets/2533/)
import inspect
from django.utils.html import strip_tags
from django.utils.encoding import force_text
def process_docstring(app, what, name, obj, options, lines):
# This causes import errors if left outside the function
from django.db import models
# Only look at objects that inherit from Django's base model class
if inspect.isclass(obj) and issubclass(obj, models.Model):
# Grab the field list from the meta class
for field in obj._meta.fields:
# Decode and strip any html out of the field's help text
help_text = strip_tags(force_text(field.help_text))
# Decode and capitalize the verbose name, for use if there isn't
# any help text
verbose_name = force_text(field.verbose_name).capitalize()
if help_text:
# Add the model field to the end of the docstring as a param
# using the help text as the description
lines.append(u':param %s: %s' % (field.attname, help_text))
else:
# Add the model field to the end of the docstring as a param
# using the verbose name as the description
lines.append(u':param %s: %s' % (field.attname, verbose_name))
# Add the field's type to the docstring
if isinstance(field, models.ForeignKey):
to = field.rel.to
lines.append(u':type %s: %s to :class:`~%s.%s`' % (
field.attname, type(field).__name__, to.__module__, to.__name__))
else:
lines.append(u':type %s: %s' % (field.attname, type(field).__name__))
# Return the extended docstring
return lines
def setup(app):
# Register the docstring processor with sphinx
app.connect('autodoc-process-docstring', process_docstring)
# Load Django project settings
import django
django.setup()
| |
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Generate template values for methods.
Extends IdlArgument with property |default_cpp_value|.
Extends IdlTypeBase and IdlUnionType with property |union_arguments|.
Design doc: http://www.chromium.org/developers/design-documents/idl-compiler
"""
from idl_definitions import IdlArgument, IdlOperation
from idl_types import IdlTypeBase, IdlUnionType, inherits_interface
from v8_globals import includes
import v8_types
import v8_utilities
from v8_utilities import (has_extended_attribute_value, is_unforgeable,
is_legacy_interface_type_checking)
# Methods with any of these require custom method registration code in the
# interface's configure*Template() function.
CUSTOM_REGISTRATION_EXTENDED_ATTRIBUTES = frozenset([
'DoNotCheckSecurity',
])
def method_is_visible(method, interface_is_partial):
if 'overloads' in method:
return method['overloads']['visible'] and not (method['overloads']['has_partial_overloads'] and interface_is_partial)
return method['visible'] and 'overload_index' not in method
def conditionally_exposed(method):
exposed = method['overloads']['exposed_test_all'] if 'overloads' in method else method['exposed_test']
secure_context = method['overloads']['secure_context_test_all'] if 'overloads' in method else method['secure_context_test']
return exposed or secure_context
def filter_conditionally_exposed(methods, interface_is_partial):
return [method for method in methods if (
method_is_visible(method, interface_is_partial) and conditionally_exposed(method))]
def custom_registration(method):
if 'overloads' in method:
return (method['overloads']['has_custom_registration_all'] or
method['overloads']['runtime_determined_lengths'] or
(method['overloads']['runtime_enabled_function_all'] and not conditionally_exposed(method)))
return (method['has_custom_registration'] or
(method['runtime_enabled_function'] and not conditionally_exposed(method)))
def filter_custom_registration(methods, interface_is_partial):
return [method for method in methods if (
method_is_visible(method, interface_is_partial) and custom_registration(method))]
def filter_method_configuration(methods, interface_is_partial):
return [method for method in methods if
method_is_visible(method, interface_is_partial) and
method['should_be_exposed_to_script'] and
not method['origin_trial_feature_name'] and
not conditionally_exposed(method) and
not custom_registration(method)]
def method_for_origin_trial_feature(methods, feature_name, interface_is_partial):
"""Filters the list of methods, and returns those defined for the named origin trial feature."""
return [method for method in methods if
method_is_visible(method, interface_is_partial) and
method['should_be_exposed_to_script'] and
method['origin_trial_feature_name'] == feature_name and
not conditionally_exposed(method) and
not custom_registration(method)]
def method_filters():
return {'conditionally_exposed': filter_conditionally_exposed,
'custom_registration': filter_custom_registration,
'has_method_configuration': filter_method_configuration,
'method_for_origin_trial_feature': method_for_origin_trial_feature}
def use_local_result(method):
extended_attributes = method.extended_attributes
idl_type = method.idl_type
return (has_extended_attribute_value(method, 'CallWith', 'ScriptState') or
'ImplementedInPrivateScript' in extended_attributes or
'NewObject' in extended_attributes or
'RaisesException' in extended_attributes or
idl_type.is_union_type or
idl_type.is_explicit_nullable)
def method_context(interface, method, is_visible=True):
arguments = method.arguments
extended_attributes = method.extended_attributes
idl_type = method.idl_type
is_static = method.is_static
name = method.name
if is_visible:
idl_type.add_includes_for_type(extended_attributes)
this_cpp_value = cpp_value(interface, method, len(arguments))
is_implemented_in_private_script = 'ImplementedInPrivateScript' in extended_attributes
if is_implemented_in_private_script:
includes.add('bindings/core/v8/PrivateScriptRunner.h')
includes.add('core/frame/LocalFrame.h')
includes.add('platform/ScriptForbiddenScope.h')
# [OnlyExposedToPrivateScript]
is_only_exposed_to_private_script = 'OnlyExposedToPrivateScript' in extended_attributes
is_call_with_script_arguments = has_extended_attribute_value(method, 'CallWith', 'ScriptArguments')
if is_call_with_script_arguments:
includes.update(['bindings/core/v8/ScriptCallStack.h',
'core/inspector/ScriptArguments.h'])
is_call_with_script_state = has_extended_attribute_value(method, 'CallWith', 'ScriptState')
is_call_with_this_value = has_extended_attribute_value(method, 'CallWith', 'ThisValue')
if is_call_with_script_state or is_call_with_this_value:
includes.add('bindings/core/v8/ScriptState.h')
# [CheckSecurity]
is_do_not_check_security = 'DoNotCheckSecurity' in extended_attributes
is_check_security_for_receiver = (
has_extended_attribute_value(interface, 'CheckSecurity', 'Receiver') and
not is_do_not_check_security)
is_check_security_for_return_value = (
has_extended_attribute_value(method, 'CheckSecurity', 'ReturnValue'))
if is_check_security_for_receiver or is_check_security_for_return_value:
includes.add('bindings/core/v8/BindingSecurity.h')
is_ce_reactions = 'CEReactions' in extended_attributes
if is_ce_reactions:
includes.add('core/dom/custom/CEReactionsScope.h')
is_custom_element_callbacks = 'CustomElementCallbacks' in extended_attributes
if is_custom_element_callbacks:
includes.add('core/dom/custom/V0CustomElementProcessingStack.h')
is_raises_exception = 'RaisesException' in extended_attributes
is_custom_call_prologue = has_extended_attribute_value(method, 'Custom', 'CallPrologue')
is_custom_call_epilogue = has_extended_attribute_value(method, 'Custom', 'CallEpilogue')
is_post_message = 'PostMessage' in extended_attributes
if is_post_message:
includes.add('bindings/core/v8/SerializedScriptValueFactory.h')
includes.add('bindings/core/v8/Transferables.h')
if 'LenientThis' in extended_attributes:
raise Exception('[LenientThis] is not supported for operations.')
argument_contexts = [
argument_context(interface, method, argument, index, is_visible=is_visible)
for index, argument in enumerate(arguments)]
return {
'activity_logging_world_list': v8_utilities.activity_logging_world_list(method), # [ActivityLogging]
'arguments': argument_contexts,
'argument_declarations_for_private_script':
argument_declarations_for_private_script(interface, method),
'cpp_type': (v8_types.cpp_template_type('Nullable', idl_type.cpp_type)
if idl_type.is_explicit_nullable else idl_type.cpp_type),
'cpp_value': this_cpp_value,
'cpp_type_initializer': idl_type.cpp_type_initializer,
'custom_registration_extended_attributes':
CUSTOM_REGISTRATION_EXTENDED_ATTRIBUTES.intersection(
extended_attributes.iterkeys()),
'deprecate_as': v8_utilities.deprecate_as(method), # [DeprecateAs]
'do_not_test_new_object': 'DoNotTestNewObject' in extended_attributes,
'exposed_test': v8_utilities.exposed(method, interface), # [Exposed]
# TODO(yukishiino): Retire has_custom_registration flag. Should be
# replaced with V8DOMConfiguration::PropertyLocationConfiguration.
'has_custom_registration':
v8_utilities.has_extended_attribute(
method, CUSTOM_REGISTRATION_EXTENDED_ATTRIBUTES),
'has_exception_state':
is_raises_exception or
is_check_security_for_receiver or
any(argument for argument in arguments
if (argument.idl_type.name == 'SerializedScriptValue' or
argument_conversion_needs_exception_state(method, argument))),
'has_optional_argument_without_default_value':
any(True for argument_context in argument_contexts
if argument_context['is_optional_without_default_value']),
'idl_type': idl_type.base_type,
'is_call_with_execution_context': has_extended_attribute_value(method, 'CallWith', 'ExecutionContext'),
'is_call_with_script_arguments': is_call_with_script_arguments,
'is_call_with_script_state': is_call_with_script_state,
'is_call_with_this_value': is_call_with_this_value,
'is_ce_reactions': is_ce_reactions,
'is_check_security_for_receiver': is_check_security_for_receiver,
'is_check_security_for_return_value': is_check_security_for_return_value,
'is_custom': 'Custom' in extended_attributes and
not (is_custom_call_prologue or is_custom_call_epilogue),
'is_custom_call_prologue': is_custom_call_prologue,
'is_custom_call_epilogue': is_custom_call_epilogue,
'is_custom_element_callbacks': is_custom_element_callbacks,
'is_do_not_check_security': is_do_not_check_security,
'is_explicit_nullable': idl_type.is_explicit_nullable,
'is_implemented_in_private_script': is_implemented_in_private_script,
'is_new_object': 'NewObject' in extended_attributes,
'is_partial_interface_member':
'PartialInterfaceImplementedAs' in extended_attributes,
'is_per_world_bindings': 'PerWorldBindings' in extended_attributes,
'is_post_message': is_post_message,
'is_raises_exception': is_raises_exception,
'is_static': is_static,
'is_unforgeable': is_unforgeable(interface, method),
'is_variadic': arguments and arguments[-1].is_variadic,
'measure_as': v8_utilities.measure_as(method, interface), # [MeasureAs]
'name': name,
'number_of_arguments': len(arguments),
'number_of_required_arguments': len([
argument for argument in arguments
if not (argument.is_optional or argument.is_variadic)]),
'number_of_required_or_variadic_arguments': len([
argument for argument in arguments
if not argument.is_optional]),
'on_instance': v8_utilities.on_instance(interface, method),
'on_interface': v8_utilities.on_interface(interface, method),
'on_prototype': v8_utilities.on_prototype(interface, method),
'only_exposed_to_private_script': is_only_exposed_to_private_script,
'origin_trial_enabled_function': v8_utilities.origin_trial_enabled_function_name(method), # [OriginTrialEnabled]
'origin_trial_feature_name': v8_utilities.origin_trial_feature_name(method), # [OriginTrialEnabled]
'private_script_v8_value_to_local_cpp_value': idl_type.v8_value_to_local_cpp_value(
extended_attributes, 'v8Value', 'cppValue', isolate='scriptState->isolate()', bailout_return_value='false'),
'property_attributes': property_attributes(interface, method),
'returns_promise': method.returns_promise,
'runtime_enabled_function': v8_utilities.runtime_enabled_function_name(method), # [RuntimeEnabled]
'secure_context_test': v8_utilities.secure_context(method, interface), # [SecureContext]
'should_be_exposed_to_script': not (is_implemented_in_private_script and is_only_exposed_to_private_script),
'use_output_parameter_for_result': idl_type.use_output_parameter_for_result,
'use_local_result': use_local_result(method),
'v8_set_return_value': v8_set_return_value(interface.name, method, this_cpp_value),
'v8_set_return_value_for_main_world': v8_set_return_value(interface.name, method, this_cpp_value, for_main_world=True),
'visible': is_visible,
'world_suffixes': ['', 'ForMainWorld'] if 'PerWorldBindings' in extended_attributes else [''], # [PerWorldBindings],
}
def argument_context(interface, method, argument, index, is_visible=True):
extended_attributes = argument.extended_attributes
idl_type = argument.idl_type
if is_visible:
idl_type.add_includes_for_type(extended_attributes)
this_cpp_value = cpp_value(interface, method, index)
is_variadic_wrapper_type = argument.is_variadic and idl_type.is_wrapper_type
# [LegacyInterfaceTypeChecking]
has_type_checking_interface = (
not is_legacy_interface_type_checking(interface, method) and
idl_type.is_wrapper_type)
if ('ImplementedInPrivateScript' in extended_attributes and
not idl_type.is_wrapper_type and
not idl_type.is_basic_type):
raise Exception('Private scripts supports only primitive types and DOM wrappers.')
set_default_value = argument.set_default_value
this_cpp_type = idl_type.cpp_type_args(extended_attributes=extended_attributes,
raw_type=True,
used_as_variadic_argument=argument.is_variadic)
context = {
'cpp_type': (
v8_types.cpp_template_type('Nullable', this_cpp_type)
if idl_type.is_explicit_nullable and not argument.is_variadic
else this_cpp_type),
'cpp_value': this_cpp_value,
# FIXME: check that the default value's type is compatible with the argument's
'set_default_value': set_default_value,
'enum_type': idl_type.enum_type,
'enum_values': idl_type.enum_values,
'handle': '%sHandle' % argument.name,
# FIXME: remove once [Default] removed and just use argument.default_value
'has_default': 'Default' in extended_attributes or set_default_value,
'has_type_checking_interface': has_type_checking_interface,
# Dictionary is special-cased, but arrays and sequences shouldn't be
'idl_type': idl_type.base_type,
'idl_type_object': idl_type,
'index': index,
'is_callback_function': idl_type.is_callback_function,
'is_callback_interface': idl_type.is_callback_interface,
# FIXME: Remove generic 'Dictionary' special-casing
'is_dictionary': idl_type.is_dictionary or idl_type.base_type == 'Dictionary',
'is_explicit_nullable': idl_type.is_explicit_nullable,
'is_nullable': idl_type.is_nullable,
'is_optional': argument.is_optional,
'is_variadic': argument.is_variadic,
'is_variadic_wrapper_type': is_variadic_wrapper_type,
'is_wrapper_type': idl_type.is_wrapper_type,
'name': argument.name,
'private_script_cpp_value_to_v8_value': idl_type.cpp_value_to_v8_value(
argument.name, isolate='scriptState->isolate()',
creation_context='scriptState->context()->Global()'),
'use_permissive_dictionary_conversion': 'PermissiveDictionaryConversion' in extended_attributes,
'v8_set_return_value': v8_set_return_value(interface.name, method, this_cpp_value),
'v8_set_return_value_for_main_world': v8_set_return_value(interface.name, method, this_cpp_value, for_main_world=True),
'v8_value_to_local_cpp_value': v8_value_to_local_cpp_value(method, argument, index),
}
context.update({
'is_optional_without_default_value':
context['is_optional'] and
not context['has_default'] and
not context['is_dictionary'] and
not context['is_callback_interface'],
})
return context
def argument_declarations_for_private_script(interface, method):
argument_declarations = ['LocalFrame* frame']
argument_declarations.append('%s* holderImpl' % interface.name)
argument_declarations.extend(['%s %s' % (argument.idl_type.cpp_type_args(
used_as_rvalue_type=True), argument.name) for argument in method.arguments])
if method.idl_type.name != 'void':
argument_declarations.append('%s* %s' % (method.idl_type.cpp_type, 'result'))
return argument_declarations
################################################################################
# Value handling
################################################################################
def cpp_value(interface, method, number_of_arguments):
# Truncate omitted optional arguments
arguments = method.arguments[:number_of_arguments]
cpp_arguments = []
if 'ImplementedInPrivateScript' in method.extended_attributes:
cpp_arguments.append('toLocalFrame(toFrameIfNotDetached(info.GetIsolate()->GetCurrentContext()))')
cpp_arguments.append('impl')
if method.is_constructor:
call_with_values = interface.extended_attributes.get('ConstructorCallWith')
else:
call_with_values = method.extended_attributes.get('CallWith')
cpp_arguments.extend(v8_utilities.call_with_arguments(call_with_values))
# Members of IDL partial interface definitions are implemented in C++ as
# static member functions, which for instance members (non-static members)
# take *impl as their first argument
if ('PartialInterfaceImplementedAs' in method.extended_attributes and
'ImplementedInPrivateScript' not in method.extended_attributes and
not method.is_static):
cpp_arguments.append('*impl')
cpp_arguments.extend(argument.name for argument in arguments)
if 'ImplementedInPrivateScript' in method.extended_attributes:
if method.idl_type.name != 'void':
cpp_arguments.append('&result')
elif ('RaisesException' in method.extended_attributes or
(method.is_constructor and
has_extended_attribute_value(interface, 'RaisesException', 'Constructor'))):
cpp_arguments.append('exceptionState')
# If a method returns an IDL dictionary or union type, the return value is
# passed as an argument to impl classes.
idl_type = method.idl_type
if idl_type and idl_type.use_output_parameter_for_result:
cpp_arguments.append('result')
if method.name == 'Constructor':
base_name = 'create'
elif method.name == 'NamedConstructor':
base_name = 'createForJSConstructor'
elif 'ImplementedInPrivateScript' in method.extended_attributes:
base_name = '%sMethod' % method.name
else:
base_name = v8_utilities.cpp_name(method)
cpp_method_name = v8_utilities.scoped_name(interface, method, base_name)
return '%s(%s)' % (cpp_method_name, ', '.join(cpp_arguments))
def v8_set_return_value(interface_name, method, cpp_value, for_main_world=False):
idl_type = method.idl_type
extended_attributes = method.extended_attributes
if not idl_type or idl_type.name == 'void':
# Constructors and void methods don't have a return type
return None
if ('ImplementedInPrivateScript' in extended_attributes and
not idl_type.is_wrapper_type and
not idl_type.is_basic_type):
raise Exception('Private scripts supports only primitive types and DOM wrappers.')
# [CallWith=ScriptState], [RaisesException]
if use_local_result(method):
if idl_type.is_explicit_nullable:
# result is of type Nullable<T>
cpp_value = 'result.get()'
else:
cpp_value = 'result'
script_wrappable = 'impl' if inherits_interface(interface_name, 'Node') else ''
return idl_type.v8_set_return_value(cpp_value, extended_attributes, script_wrappable=script_wrappable, for_main_world=for_main_world, is_static=method.is_static)
def v8_value_to_local_cpp_variadic_value(method, argument, index, return_promise):
assert argument.is_variadic
idl_type = argument.idl_type
this_cpp_type = idl_type.cpp_type
if method.returns_promise:
check_expression = 'exceptionState.hadException()'
else:
check_expression = 'exceptionState.throwIfNeeded()'
if idl_type.is_dictionary or idl_type.is_union_type:
vector_type = 'HeapVector'
else:
vector_type = 'Vector'
return {
'assign_expression': 'toImplArguments<%s<%s>>(info, %s, exceptionState)' % (vector_type, this_cpp_type, index),
'check_expression': check_expression,
'cpp_type': this_cpp_type,
'cpp_name': argument.name,
'declare_variable': False,
}
def v8_value_to_local_cpp_value(method, argument, index, return_promise=False, restricted_float=False):
extended_attributes = argument.extended_attributes
idl_type = argument.idl_type
name = argument.name
if argument.is_variadic:
return v8_value_to_local_cpp_variadic_value(method, argument, index, return_promise)
return idl_type.v8_value_to_local_cpp_value(extended_attributes, 'info[%s]' % index,
name, index=index, declare_variable=False,
use_exception_state=method.returns_promise,
restricted_float=restricted_float)
################################################################################
# Auxiliary functions
################################################################################
# [NotEnumerable], [Unforgeable]
def property_attributes(interface, method):
extended_attributes = method.extended_attributes
property_attributes_list = []
if 'NotEnumerable' in extended_attributes:
property_attributes_list.append('v8::DontEnum')
if is_unforgeable(interface, method):
property_attributes_list.append('v8::ReadOnly')
property_attributes_list.append('v8::DontDelete')
return property_attributes_list
def argument_set_default_value(argument):
idl_type = argument.idl_type
default_value = argument.default_value
if not default_value:
return None
if idl_type.is_dictionary:
if not argument.default_value.is_null:
raise Exception('invalid default value for dictionary type')
return None
if idl_type.is_array_or_sequence_type:
if default_value.value != '[]':
raise Exception('invalid default value for sequence type: %s' % default_value.value)
# Nothing to do when we set an empty sequence as default value, but we
# need to return non-empty value so that we don't generate method calls
# without this argument.
return '/* Nothing to do */'
if idl_type.is_union_type:
if argument.default_value.is_null:
if not idl_type.includes_nullable_type:
raise Exception('invalid default value for union type: null for %s'
% idl_type.name)
# Union container objects are "null" initially.
return '/* null default value */'
if isinstance(default_value.value, basestring):
member_type = idl_type.string_member_type
elif isinstance(default_value.value, (int, float)):
member_type = idl_type.numeric_member_type
elif isinstance(default_value.value, bool):
member_type = idl_type.boolean_member_type
else:
member_type = None
if member_type is None:
raise Exception('invalid default value for union type: %r for %s'
% (default_value.value, idl_type.name))
member_type_name = (member_type.inner_type.name
if member_type.is_nullable else
member_type.name)
return '%s.set%s(%s)' % (argument.name, member_type_name,
member_type.literal_cpp_value(default_value))
return '%s = %s' % (argument.name,
idl_type.literal_cpp_value(default_value))
IdlArgument.set_default_value = property(argument_set_default_value)
def method_returns_promise(method):
return method.idl_type and method.idl_type.name == 'Promise'
IdlOperation.returns_promise = property(method_returns_promise)
def argument_conversion_needs_exception_state(method, argument):
idl_type = argument.idl_type
return (idl_type.v8_conversion_needs_exception_state or
argument.is_variadic or
(method.returns_promise and idl_type.is_string_type))
| |
#!/usr/bin/env python
# Purpose : Python Boot Camp - Basemap Teaching Program 2.
# Ensure that environment variable PYTHONUNBUFFERED=yes
# This allows STDOUT and STDERR to both be logged in chronological order
import sys # platform, args, run tools
import os # platform, args, run tools
import argparse # For parsing command line
import datetime # For date/time processing
import numpy as np
import h5py
import matplotlib as mpl
mpl.use('Agg', warn=False)
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure, show, subplots
from mpl_toolkits.basemap import Basemap
from mpl_toolkits.basemap import cm as bm_cm
import matplotlib.cm as mpl_cm
################################################
#########################################################################
# Command Line Parameters Class
#########################################################################
class Bcbm2CP():
def bcbm2_cp(self, bcbm2_cmd_line):
description = ("Python Boot Camp - Basemap Teaching Program 2")
parser = argparse.ArgumentParser(description=description)
help_text = ("Input file name")
parser.add_argument('input_file_name',
metavar='input_file_name',
#type=string,
help=help_text)
help_text = ("Display processing messages to STDOUT " +
"(DEFAULT=NO)")
parser.add_argument("-v", "--verbose",
default=False,
help=help_text,
action="store_true",
dest="verbose")
help_text = ("Run program in test mode " +
"(DEFAULT=NO)")
parser.add_argument("-t", "--test_mode",
default=False,
help=help_text,
action="store_true",
dest="test_mode")
self.args = parser.parse_args(bcbm2_cmd_line)
if (self.args.verbose):
sys.stdout.write("BCBM2 : bcbm2_cmd_line = " + str(bcbm2_cmd_line) + "\n")
# Return
return(0)
#########################################################################
# Main Program
#########################################################################
class Bcbm2():
def bcbm2(self, bcbm2_cmd_line):
# Start time
self.start_time = datetime.datetime.today()
# Parse input parameters from cmd line
bcbm2_cp1 = Bcbm2CP()
bcbm2_cp1_ret = bcbm2_cp1.bcbm2_cp(bcbm2_cmd_line)
self.bcbm2_cmd_line = bcbm2_cmd_line
if (len(self.bcbm2_cmd_line) == 0):
self.bcbm2_cmd_line = " "
if (bcbm2_cp1_ret):
return(bcbm2_cp1_ret)
self.verbose = bcbm2_cp1.args.verbose
self.test_mode = bcbm2_cp1.args.test_mode
self.input_file_name = bcbm2_cp1.args.input_file_name
if (self.test_mode):
self.timestamp = "Test Mode Date/Time Stamp"
if (self.verbose):
sys.stdout.write("BCBM2 : Running in test mode\n")
sys.stdout.write("BCBM2 : sys.version = " + str(sys.version) + "\n")
else:
self.timestamp = datetime.datetime.today().strftime("%Y-%m-%d %H:%M:%S")
if (self.verbose):
sys.stdout.write("BCBM2 : Program started : " + str(self.start_time) + "\n")
sys.stdout.write("BCBM2 : sys.version = " + str(sys.version) + "\n")
if (self.verbose):
sys.stdout.write("BCBM2 : sys.version = " + str(sys.version) + "\n")
sys.stdout.write("BCBM2 : self.verbose = " + str(self.verbose) + "\n")
sys.stdout.write("BCBM2 : self.test_mode = " + str(self.test_mode) + "\n")
sys.stdout.write("BCBM2 : self.input_file_name = " + str(self.input_file_name) + "\n")
# Call functions
bcbm2_f11_ret = self.make_mercator_projection()
if (bcbm2_f11_ret):
return(bcbm2_f11_ret)
# End program
self.end_time = datetime.datetime.today()
self.run_time = self.end_time - self.start_time
if (self.verbose):
if (self.test_mode):
pass
else:
sys.stdout.write("BCBM2 : Program ended : " + str(self.end_time) + "\n")
sys.stdout.write("BCBM2 : Run time : " + str(self.run_time) + "\n")
if (self.verbose):
sys.stdout.write("BCBM2 : Program completed normally\n")
return(0)
# Define functions
#------------------------------------------------------------------------------
def make_mercator_projection(self):
if (self.verbose):
sys.stdout.write("BCBM2 : make_mercator_projection ACTIVATED\n")
# Set up figure in Matplotlib
self.current_figure = mpl.pyplot.figure(1, figsize=(14.0, 10.0))
self.current_figure.suptitle("Basemap - Mercator Map\n" +
self.timestamp)
self.current_figure.text(0.05, 0.95, "A Mercator Projection of the Earth")
self.current_figure.subplots_adjust(left=0.05,
right=0.95,
top=0.80,
bottom=0.05,
wspace=0.2,
hspace=0.4)
self.current_plot = self.current_figure.add_subplot(1, 1, 1)
# Plot figure
self.map = Basemap(projection='merc',
lat_0=0,
lon_0=0,
llcrnrlat=-80,
urcrnrlat=80,
llcrnrlon=-180,
urcrnrlon=180,
resolution='c')
#self.map.drawmapboundary(fill_color='aqua')
#self.map.fillcontinents(color='coral',lake_color='aqua')
self.map.drawcoastlines()
#self.map.drawcountries()
#self.map.drawrivers()
#self.map.drawstates()
self.map.drawparallels(np.arange( -90.0, 90.0, 20.0))
self.map.drawmeridians(np.arange(-180.0, 181.0, 20.0))
# Display day and night shading
#self.date = datetime.datetime.utcnow()
#self.map_nightshade = self.map.nightshade(self.date)
# Write the output to a graphic file
self.current_figure.savefig("bcbm2_plot1")
mpl.pyplot.close(self.current_figure)
# Read data from input file
#self.read_omps_data()
# Plot satellite tracks
#self.plot_satellite_tracks_dots()
#self.plot_satellite_tracks_lines()
return(0)
#------------------------------------------------------------------------------
def read_omps_data(self):
if (self.verbose):
sys.stdout.write("BCBM2 : read_omps_data ACTIVATED\n")
# Open input HDF5 file
self.input_file = h5py.File(self.input_file_name, "r")
sys.stdout.write("BCBM2 : self.input_file = " + str(self.input_file) + "\n")
self.o3 = self.input_file["DataFields/O3CombinedValue"]
self.lats = self.input_file["GeolocationFields/Latitude"]
self.lons = self.input_file["GeolocationFields/Longitude"]
self.orbit_num = self.input_file["GeolocationFields/OrbitNumber"]
# Convert from Numpy objects to list arrays
# Select only centre slit data
self.lat_cs = self.lats[:,1]
self.lon_cs = self.lons[:,1]
self.orbit_num_cs = self.orbit_num[:,1]
sys.stdout.write("BCBM2 : self.lat_cs = " + str(self.lat_cs) + "\n")
sys.stdout.write("BCBM2 : self.lon_cs = " + str(self.lon_cs) + "\n")
sys.stdout.write("BCBM2 : self.orbit_num_cs = " + str(self.orbit_num_cs) + "\n")
return(0)
#------------------------------------------------------------------------------
def plot_satellite_tracks_dots(self):
if (self.verbose):
sys.stdout.write("BCBM2 : plot_satellite_tracks_dots ACTIVATED\n")
# Set up mesh for plotting
self.xmesh, self.ymesh = self.map(self.lon_cs, self.lat_cs)
self.map_scatter = self.map.scatter(self.xmesh,
self.ymesh,
1,
marker='o',
color='r',
label="OMPS"
)
# Write the output to a graphic file
self.current_figure.savefig("bcbm2_plot2")
mpl.pyplot.close(self.current_figure)
return(0)
#------------------------------------------------------------------------------
def plot_satellite_tracks_lines(self):
if (self.verbose):
sys.stdout.write("BCBM2 : plot_satellite_tracks_lines ACTIVATED\n")
# Make unique list of orbit numbers
self.orbit_num_cs_unique = np.unique(self.orbit_num_cs)
sys.stdout.write("BCBM2 : self.orbit_num_cs_unique = " +
str(self.orbit_num_cs_unique) + "\n")
# Loop on unique orbit numbers
for self.orbit_num in self.orbit_num_cs_unique:
#sys.stdout.write("BCBM2 : self.orbit_num = " + str(self.orbit_num) + "\n")
# Find the data for just that orbit
self.lat_cs_orbit = self.lat_cs[np.where(self.orbit_num_cs == self.orbit_num)]
self.lon_cs_orbit = self.lon_cs[np.where(self.orbit_num_cs == self.orbit_num)]
#sys.stdout.write("BCBM2 : len(self.lat_cs_orbit) = " + str(len(self.lat_cs_orbit)) + "\n")
#sys.stdout.write("BCBM2 : len(self.lon_cs_orbit) = " + str(len(self.lon_cs_orbit)) + "\n")
# Set up mesh for plotting
self.xmesh_orbit, self.ymesh_orbit = self.map(self.lon_cs_orbit, self.lat_cs_orbit)
self.map_plot = self.map.plot(self.xmesh_orbit,
self.ymesh_orbit,
"-",
#marker='o',
color='r',
label="OMPS"
)
# Write the output to a graphic file
self.current_figure.savefig("bcbm2_plot3")
mpl.pyplot.close(self.current_figure)
return(0)
#------------------------------------------------------------------------------
####################################################
def main(argv=None): # When run as a script
if argv is None:
bcbm2_cmd_line = sys.argv[1:]
bcbm2 = Bcbm2()
bcbm2_ret = bcbm2.bcbm2(bcbm2_cmd_line)
if __name__ == '__main__':
sys.exit(main())
| |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum, EnumMeta
from six import with_metaclass
class _CaseInsensitiveEnumMeta(EnumMeta):
def __getitem__(self, name):
return super().__getitem__(name.upper())
def __getattr__(cls, name):
"""Return the enum member matching `name`
We use __getattr__ instead of descriptors or inserting into the enum
class' __dict__ in order to support `name` and `value` being both
properties for enum members (which live in the class' __dict__) and
enum members themselves.
"""
try:
return cls._member_map_[name.upper()]
except KeyError:
raise AttributeError(name)
class Access(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Indicates whether the traffic is allowed or denied.
"""
ALLOW = "Allow"
DENY = "Deny"
class ApplicationGatewayBackendHealthServerHealth(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Health of backend server.
"""
UNKNOWN = "Unknown"
UP = "Up"
DOWN = "Down"
PARTIAL = "Partial"
DRAINING = "Draining"
class ApplicationGatewayCookieBasedAffinity(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Cookie based affinity.
"""
ENABLED = "Enabled"
DISABLED = "Disabled"
class ApplicationGatewayFirewallMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Web application firewall mode.
"""
DETECTION = "Detection"
PREVENTION = "Prevention"
class ApplicationGatewayOperationalState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Operational state of the application gateway resource.
"""
STOPPED = "Stopped"
STARTING = "Starting"
RUNNING = "Running"
STOPPING = "Stopping"
class ApplicationGatewayProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Protocol.
"""
HTTP = "Http"
HTTPS = "Https"
class ApplicationGatewayRedirectType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
PERMANENT = "Permanent"
FOUND = "Found"
SEE_OTHER = "SeeOther"
TEMPORARY = "Temporary"
class ApplicationGatewayRequestRoutingRuleType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Rule type.
"""
BASIC = "Basic"
PATH_BASED_ROUTING = "PathBasedRouting"
class ApplicationGatewaySkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Name of an application gateway SKU.
"""
STANDARD_SMALL = "Standard_Small"
STANDARD_MEDIUM = "Standard_Medium"
STANDARD_LARGE = "Standard_Large"
WAF_MEDIUM = "WAF_Medium"
WAF_LARGE = "WAF_Large"
class ApplicationGatewaySslCipherSuite(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Ssl cipher suites enums.
"""
TLS_ECDHE_RSA_WITH_AES256_CBC_SHA384 = "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384"
TLS_ECDHE_RSA_WITH_AES128_CBC_SHA256 = "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256"
TLS_ECDHE_RSA_WITH_AES256_CBC_SHA = "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA"
TLS_ECDHE_RSA_WITH_AES128_CBC_SHA = "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA"
TLS_DHE_RSA_WITH_AES256_GCM_SHA384 = "TLS_DHE_RSA_WITH_AES_256_GCM_SHA384"
TLS_DHE_RSA_WITH_AES128_GCM_SHA256 = "TLS_DHE_RSA_WITH_AES_128_GCM_SHA256"
TLS_DHE_RSA_WITH_AES256_CBC_SHA = "TLS_DHE_RSA_WITH_AES_256_CBC_SHA"
TLS_DHE_RSA_WITH_AES128_CBC_SHA = "TLS_DHE_RSA_WITH_AES_128_CBC_SHA"
TLS_RSA_WITH_AES256_GCM_SHA384 = "TLS_RSA_WITH_AES_256_GCM_SHA384"
TLS_RSA_WITH_AES128_GCM_SHA256 = "TLS_RSA_WITH_AES_128_GCM_SHA256"
TLS_RSA_WITH_AES256_CBC_SHA256 = "TLS_RSA_WITH_AES_256_CBC_SHA256"
TLS_RSA_WITH_AES128_CBC_SHA256 = "TLS_RSA_WITH_AES_128_CBC_SHA256"
TLS_RSA_WITH_AES256_CBC_SHA = "TLS_RSA_WITH_AES_256_CBC_SHA"
TLS_RSA_WITH_AES128_CBC_SHA = "TLS_RSA_WITH_AES_128_CBC_SHA"
TLS_ECDHE_ECDSA_WITH_AES256_GCM_SHA384 = "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384"
TLS_ECDHE_ECDSA_WITH_AES128_GCM_SHA256 = "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256"
TLS_ECDHE_ECDSA_WITH_AES256_CBC_SHA384 = "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384"
TLS_ECDHE_ECDSA_WITH_AES128_CBC_SHA256 = "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256"
TLS_ECDHE_ECDSA_WITH_AES256_CBC_SHA = "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA"
TLS_ECDHE_ECDSA_WITH_AES128_CBC_SHA = "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA"
TLS_DHE_DSS_WITH_AES256_CBC_SHA256 = "TLS_DHE_DSS_WITH_AES_256_CBC_SHA256"
TLS_DHE_DSS_WITH_AES128_CBC_SHA256 = "TLS_DHE_DSS_WITH_AES_128_CBC_SHA256"
TLS_DHE_DSS_WITH_AES256_CBC_SHA = "TLS_DHE_DSS_WITH_AES_256_CBC_SHA"
TLS_DHE_DSS_WITH_AES128_CBC_SHA = "TLS_DHE_DSS_WITH_AES_128_CBC_SHA"
TLS_RSA_WITH3_DES_EDE_CBC_SHA = "TLS_RSA_WITH_3DES_EDE_CBC_SHA"
TLS_DHE_DSS_WITH3_DES_EDE_CBC_SHA = "TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA"
TLS_ECDHE_RSA_WITH_AES128_GCM_SHA256 = "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256"
TLS_ECDHE_RSA_WITH_AES256_GCM_SHA384 = "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384"
class ApplicationGatewaySslPolicyName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Ssl predefined policy name enums.
"""
APP_GW_SSL_POLICY20150501 = "AppGwSslPolicy20150501"
APP_GW_SSL_POLICY20170401 = "AppGwSslPolicy20170401"
APP_GW_SSL_POLICY20170401_S = "AppGwSslPolicy20170401S"
class ApplicationGatewaySslPolicyType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Type of Ssl Policy
"""
PREDEFINED = "Predefined"
CUSTOM = "Custom"
class ApplicationGatewaySslProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Ssl protocol enums.
"""
TL_SV1_0 = "TLSv1_0"
TL_SV1_1 = "TLSv1_1"
TL_SV1_2 = "TLSv1_2"
class ApplicationGatewayTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Tier of an application gateway.
"""
STANDARD = "Standard"
WAF = "WAF"
class AssociationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The association type of the child resource to the parent resource.
"""
ASSOCIATED = "Associated"
CONTAINS = "Contains"
class AuthenticationMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""VPN client Authentication Method. Possible values are: 'EAPTLS' and 'EAPMSCHAPv2'.
"""
EAPTLS = "EAPTLS"
EAPMSCHA_PV2 = "EAPMSCHAPv2"
class AuthorizationUseStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""AuthorizationUseStatus. Possible values are: 'Available' and 'InUse'.
"""
AVAILABLE = "Available"
IN_USE = "InUse"
class BgpPeerState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The BGP peer state
"""
UNKNOWN = "Unknown"
STOPPED = "Stopped"
IDLE = "Idle"
CONNECTING = "Connecting"
CONNECTED = "Connected"
class ConnectionState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The connection state.
"""
REACHABLE = "Reachable"
UNREACHABLE = "Unreachable"
UNKNOWN = "Unknown"
class ConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The connection status.
"""
UNKNOWN = "Unknown"
CONNECTED = "Connected"
DISCONNECTED = "Disconnected"
DEGRADED = "Degraded"
class DhGroup(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The DH Groups used in IKE Phase 1 for initial SA.
"""
NONE = "None"
DH_GROUP1 = "DHGroup1"
DH_GROUP2 = "DHGroup2"
DH_GROUP14 = "DHGroup14"
DH_GROUP2048 = "DHGroup2048"
ECP256 = "ECP256"
ECP384 = "ECP384"
DH_GROUP24 = "DHGroup24"
class Direction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The direction of the packet represented as a 5-tuple.
"""
INBOUND = "Inbound"
OUTBOUND = "Outbound"
class EffectiveRouteSource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Who created the route. Possible values are: 'Unknown', 'User', 'VirtualNetworkGateway', and
'Default'.
"""
UNKNOWN = "Unknown"
USER = "User"
VIRTUAL_NETWORK_GATEWAY = "VirtualNetworkGateway"
DEFAULT = "Default"
class EffectiveRouteState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The value of effective route. Possible values are: 'Active' and 'Invalid'.
"""
ACTIVE = "Active"
INVALID = "Invalid"
class EffectiveSecurityRuleProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The network protocol this rule applies to. Possible values are: 'Tcp', 'Udp', and 'All'.
"""
TCP = "Tcp"
UDP = "Udp"
ALL = "All"
class EvaluationState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Connectivity analysis evaluation state.
"""
NOT_STARTED = "NotStarted"
IN_PROGRESS = "InProgress"
COMPLETED = "Completed"
class ExpressRouteCircuitPeeringAdvertisedPublicPrefixState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""AdvertisedPublicPrefixState of the Peering resource. Possible values are 'NotConfigured',
'Configuring', 'Configured', and 'ValidationNeeded'.
"""
NOT_CONFIGURED = "NotConfigured"
CONFIGURING = "Configuring"
CONFIGURED = "Configured"
VALIDATION_NEEDED = "ValidationNeeded"
class ExpressRouteCircuitPeeringState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The state of peering. Possible values are: 'Disabled' and 'Enabled'
"""
DISABLED = "Disabled"
ENABLED = "Enabled"
class ExpressRouteCircuitPeeringType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The PeeringType. Possible values are: 'AzurePublicPeering', 'AzurePrivatePeering', and
'MicrosoftPeering'.
"""
AZURE_PUBLIC_PEERING = "AzurePublicPeering"
AZURE_PRIVATE_PEERING = "AzurePrivatePeering"
MICROSOFT_PEERING = "MicrosoftPeering"
class ExpressRouteCircuitSkuFamily(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The family of the SKU. Possible values are: 'UnlimitedData' and 'MeteredData'.
"""
UNLIMITED_DATA = "UnlimitedData"
METERED_DATA = "MeteredData"
class ExpressRouteCircuitSkuTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The tier of the SKU. Possible values are 'Standard' and 'Premium'.
"""
STANDARD = "Standard"
PREMIUM = "Premium"
class IkeEncryption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The IKE encryption algorithm (IKE phase 2).
"""
DES = "DES"
DES3 = "DES3"
AES128 = "AES128"
AES192 = "AES192"
AES256 = "AES256"
class IkeIntegrity(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The IKE integrity algorithm (IKE phase 2).
"""
MD5 = "MD5"
SHA1 = "SHA1"
SHA256 = "SHA256"
SHA384 = "SHA384"
class IPAllocationMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""PrivateIP allocation method.
"""
STATIC = "Static"
DYNAMIC = "Dynamic"
class IpsecEncryption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The IPSec encryption algorithm (IKE phase 1).
"""
NONE = "None"
DES = "DES"
DES3 = "DES3"
AES128 = "AES128"
AES192 = "AES192"
AES256 = "AES256"
GCMAES128 = "GCMAES128"
GCMAES192 = "GCMAES192"
GCMAES256 = "GCMAES256"
class IpsecIntegrity(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The IPSec integrity algorithm (IKE phase 1).
"""
MD5 = "MD5"
SHA1 = "SHA1"
SHA256 = "SHA256"
GCMAES128 = "GCMAES128"
GCMAES192 = "GCMAES192"
GCMAES256 = "GCMAES256"
class IPVersion(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Available from Api-Version 2016-03-30 onwards, it represents whether the specific
ipconfiguration is IPv4 or IPv6. Default is taken as IPv4. Possible values are: 'IPv4' and
'IPv6'.
"""
I_PV4 = "IPv4"
I_PV6 = "IPv6"
class IssueType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of issue.
"""
UNKNOWN = "Unknown"
AGENT_STOPPED = "AgentStopped"
GUEST_FIREWALL = "GuestFirewall"
DNS_RESOLUTION = "DnsResolution"
SOCKET_BIND = "SocketBind"
NETWORK_SECURITY_RULE = "NetworkSecurityRule"
USER_DEFINED_ROUTE = "UserDefinedRoute"
PORT_THROTTLED = "PortThrottled"
PLATFORM = "Platform"
class LoadBalancerSkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Name of a load balancer SKU.
"""
BASIC = "Basic"
STANDARD = "Standard"
class LoadDistribution(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The load distribution policy for this rule. Possible values are 'Default', 'SourceIP', and
'SourceIPProtocol'.
"""
DEFAULT = "Default"
SOURCE_IP = "SourceIP"
SOURCE_IP_PROTOCOL = "SourceIPProtocol"
class NetworkOperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Status of the Azure async operation. Possible values are: 'InProgress', 'Succeeded', and
'Failed'.
"""
IN_PROGRESS = "InProgress"
SUCCEEDED = "Succeeded"
FAILED = "Failed"
class NextHopType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Next hop type.
"""
INTERNET = "Internet"
VIRTUAL_APPLIANCE = "VirtualAppliance"
VIRTUAL_NETWORK_GATEWAY = "VirtualNetworkGateway"
VNET_LOCAL = "VnetLocal"
HYPER_NET_GATEWAY = "HyperNetGateway"
NONE = "None"
class Origin(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The origin of the issue.
"""
LOCAL = "Local"
INBOUND = "Inbound"
OUTBOUND = "Outbound"
class PcError(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
INTERNAL_ERROR = "InternalError"
AGENT_STOPPED = "AgentStopped"
CAPTURE_FAILED = "CaptureFailed"
LOCAL_FILE_FAILED = "LocalFileFailed"
STORAGE_FAILED = "StorageFailed"
class PcProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Protocol to be filtered on.
"""
TCP = "TCP"
UDP = "UDP"
ANY = "Any"
class PcStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The status of the packet capture session.
"""
NOT_STARTED = "NotStarted"
RUNNING = "Running"
STOPPED = "Stopped"
ERROR = "Error"
UNKNOWN = "Unknown"
class PfsGroup(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The DH Groups used in IKE Phase 2 for new child SA.
"""
NONE = "None"
PFS1 = "PFS1"
PFS2 = "PFS2"
PFS2048 = "PFS2048"
ECP256 = "ECP256"
ECP384 = "ECP384"
PFS24 = "PFS24"
class ProbeProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The protocol of the end point. Possible values are: 'Http' or 'Tcp'. If 'Tcp' is specified, a
received ACK is required for the probe to be successful. If 'Http' is specified, a 200 OK
response from the specifies URI is required for the probe to be successful.
"""
HTTP = "Http"
TCP = "Tcp"
class ProcessorArchitecture(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""VPN client Processor Architecture. Possible values are: 'AMD64' and 'X86'.
"""
AMD64 = "Amd64"
X86 = "X86"
class Protocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Protocol to be verified on.
"""
TCP = "TCP"
UDP = "UDP"
class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The provisioning state of the resource.
"""
SUCCEEDED = "Succeeded"
UPDATING = "Updating"
DELETING = "Deleting"
FAILED = "Failed"
class PublicIPAddressSkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Name of a public IP address SKU.
"""
BASIC = "Basic"
STANDARD = "Standard"
class RouteFilterRuleType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The rule type of the rule. Valid value is: 'Community'
"""
COMMUNITY = "Community"
class RouteNextHopType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of Azure hop the packet should be sent to. Possible values are:
'VirtualNetworkGateway', 'VnetLocal', 'Internet', 'VirtualAppliance', and 'None'.
"""
VIRTUAL_NETWORK_GATEWAY = "VirtualNetworkGateway"
VNET_LOCAL = "VnetLocal"
INTERNET = "Internet"
VIRTUAL_APPLIANCE = "VirtualAppliance"
NONE = "None"
class SecurityRuleAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Whether network traffic is allowed or denied. Possible values are: 'Allow' and 'Deny'.
"""
ALLOW = "Allow"
DENY = "Deny"
class SecurityRuleDirection(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The direction of the rule. Possible values are: 'Inbound and Outbound'.
"""
INBOUND = "Inbound"
OUTBOUND = "Outbound"
class SecurityRuleProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Network protocol this rule applies to. Possible values are 'Tcp', 'Udp', and '*'.
"""
TCP = "Tcp"
UDP = "Udp"
ASTERISK = "*"
class ServiceProviderProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The ServiceProviderProvisioningState state of the resource. Possible values are
'NotProvisioned', 'Provisioning', 'Provisioned', and 'Deprovisioning'.
"""
NOT_PROVISIONED = "NotProvisioned"
PROVISIONING = "Provisioning"
PROVISIONED = "Provisioned"
DEPROVISIONING = "Deprovisioning"
class Severity(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The severity of the issue.
"""
ERROR = "Error"
WARNING = "Warning"
class TransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The transport protocol for the endpoint. Possible values are 'Udp' or 'Tcp' or 'All.'
"""
UDP = "Udp"
TCP = "Tcp"
ALL = "All"
class UsageUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""An enum describing the unit of measurement.
"""
COUNT = "Count"
class VirtualNetworkGatewayConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Virtual network Gateway connection status
"""
UNKNOWN = "Unknown"
CONNECTING = "Connecting"
CONNECTED = "Connected"
NOT_CONNECTED = "NotConnected"
class VirtualNetworkGatewayConnectionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Gateway connection type. Possible values are: 'IPsec','Vnet2Vnet','ExpressRoute', and
'VPNClient.
"""
I_PSEC = "IPsec"
VNET2_VNET = "Vnet2Vnet"
EXPRESS_ROUTE = "ExpressRoute"
VPN_CLIENT = "VPNClient"
class VirtualNetworkGatewaySkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Gateway SKU name.
"""
BASIC = "Basic"
HIGH_PERFORMANCE = "HighPerformance"
STANDARD = "Standard"
ULTRA_PERFORMANCE = "UltraPerformance"
VPN_GW1 = "VpnGw1"
VPN_GW2 = "VpnGw2"
VPN_GW3 = "VpnGw3"
class VirtualNetworkGatewaySkuTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Gateway SKU tier.
"""
BASIC = "Basic"
HIGH_PERFORMANCE = "HighPerformance"
STANDARD = "Standard"
ULTRA_PERFORMANCE = "UltraPerformance"
VPN_GW1 = "VpnGw1"
VPN_GW2 = "VpnGw2"
VPN_GW3 = "VpnGw3"
class VirtualNetworkGatewayType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of this virtual network gateway. Possible values are: 'Vpn' and 'ExpressRoute'.
"""
VPN = "Vpn"
EXPRESS_ROUTE = "ExpressRoute"
class VirtualNetworkPeeringState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The status of the virtual network peering. Possible values are 'Initiated', 'Connected', and
'Disconnected'.
"""
INITIATED = "Initiated"
CONNECTED = "Connected"
DISCONNECTED = "Disconnected"
class VpnClientProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""VPN client protocol enabled for the virtual network gateway.
"""
IKE_V2 = "IkeV2"
SSTP = "SSTP"
class VpnType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of this virtual network gateway. Possible values are: 'PolicyBased' and 'RouteBased'.
"""
POLICY_BASED = "PolicyBased"
ROUTE_BASED = "RouteBased"
| |
import feedparser
import os.path
import sys, getopt, time, socket, os, csv, re, json
import requests
import xml.etree.ElementTree as ET
import zipfile, zlib
import argparse
import subprocess
import itertools
import pandas as pd
import urllib2
from urllib2 import urlopen
from urllib2 import URLError
from urllib2 import HTTPError
from os import listdir
from os.path import isfile, join
from collections import Counter
from bs4 import BeautifulSoup
from elasticsearch import Elasticsearch
from elasticsearch.helpers import streaming_bulk, scan
from datetime import datetime
from datetime import date, timedelta
# --
# cli
parser = argparse.ArgumentParser(description='grab_new_filings')
parser.add_argument("--from-scratch", action = 'store_true')
parser.add_argument("--most-recent", action = 'store_true')
parser.add_argument("--config-path", type=str, action='store')
args = parser.parse_args()
# --
# config
config_path = args.config_path
config = json.load(open(config_path))
# --
# es connection
client = Elasticsearch([{
'host' : config["es"]["host"],
'port' : config["es"]["port"]
}], timeout = 60000)
# --
# functions
def ingest_section( sec, period ):
if sec == 'sub':
end = 35
else:
end = 8
# ---
f = open('/home/ubuntu/data/XBRL_AQFS/' + period + '/' + sec + '.txt', 'r')
x = f.readlines()
# -
name = 'lst_' + sec
name = []
for line in x:
row = line.split('\t')
row[end] = row[end].replace('\n', '')
name.append(row)
# -
return name
def build_df( sec_list ):
sec = pd.DataFrame(sec_list)
sec.columns = sec.iloc[0]
sec = sec[1:]
return sec
def to_dict ( df ):
_dat = []
m = df.values.tolist()
keys = list(df.columns.values)
for i in range(0, len(m)):
x = keys
y = m[i]
dictionary = dict(zip(x, y))
# -
_dat.append(dictionary)
# -
return _dat
def run():
periods = []
if args.from_scratch:
for yr in range(2009, int(date.today().year) + 1):
if yr < date.today().year:
for qtr in range(1, 5):
periods.append(str(yr) + 'q' + str(qtr))
#
elif yr == date.today().year:
for qtr in range(1, (int(date.today().month) / 3) + 1):
periods.append(str(yr) + 'q' + str(qtr))
elif args.most_recent:
yr = str(int(date.today().year))
qtr = str(int(date.today().month) / 3)
periods.append(yr + 'q' + qtr)
# ---
for period in periods:
print('___ ingesting ___' + period)
ingest(period)
def ingest ( period ):
response = urllib2.urlopen('https://www.sec.gov/data/financial-statements/' + str(period) + '.zip')
aqfs = response.read()
# -
with open('/home/ubuntu/data/XBRL_AQFS/' + str(period) + '.zip', 'w') as inf:
inf.write(aqfs)
inf.close()
# -
with zipfile.ZipFile('/home/ubuntu/data/XBRL_AQFS/' + str(period) + '.zip', 'r') as z:
z.extractall('/home/ubuntu/data/XBRL_AQFS/' + str(period) + '/')
# -
num = build_df( ingest_section ( 'num', period ) )
sub = build_df( ingest_section ( 'sub', period ) )
pre = build_df( ingest_section ( 'pre', period ) )
tag = build_df( ingest_section ( 'tag', period ) )
# --
numTag = pd.merge(num, tag, on = ['tag', 'version'])
numTagPre = pd.merge(numTag, pre, on = ['tag', 'adsh', 'version'])
_dict = to_dict ( numTagPre )
_head = to_dict ( self, sub )
# --
counter = 0
for sub in _head:
counter += 1
if counter % 100 == True:
print(counter)
doc = {}
doc['submission'] = sub
doc['facts'] = {}
facts = [_dict[i] for i in range(0, len(_dict)) \
if _dict[i]['adsh'] == sub['adsh'] \
and _dict[i]['ddate'] == sub['period'] \
and _dict[i]['coreg'] == '' \
and _dict[i]['abstract'] == '0' \
and _dict[i]['custom'] == '0']
for i in facts:
doc_core = {
'line' : i['line'],
'uom' : i['uom'],
'value' : i['value'],
'iord' : i['iord'],
'crdr' : i['crdr'],
'tlabel' : i['tlabel'],
'stmt' : i['stmt'],
'inpth' : i['inpth'],
'plabel' : i['plabel']
}
try:
p = doc['facts'][i['tag']]
try:
p = doc['facts'][i['tag']][i['version']]
try:
p = doc['facts'][i['tag']][i['version']][i['qtrs']]
try:
p = doc['facts'][i['tag']][i['version']][i['qtrs']][i['report']]
try:
p = doc['facts'][i['tag']][i['version']][i['qtrs']][i['report']][str(i['line']) + '_' + i['uom']]
except:
doc['facts'][i['tag']][i['version']][i['qtrs']][i['report']][str(i['line']) + '_' + i['uom']] = doc_core
except:
doc['facts'][i['tag']][i['version']][i['qtrs']][i['report']] = {
str(i['line']) + '_' + i['uom']: doc_core
}
except:
doc['facts'][i['tag']][i['version']][i['qtrs']] = {
i['report'] : {
str(i['line']) + '_' + i['uom']: doc_core
}
}
except:
doc['facts'][i['tag']][i['version']] = {
i['qtrs']: {
i['report'] : {
str(i['line']) + '_' + i['uom']: doc_core
}
}
}
except:
doc['facts'][i['tag']] = {
i['version'] : {
i['qtrs']: {
i['report'] : {
str(i['line']) + '_' + i['uom']: doc_core
}
}
}
}
# --
client.index(index = config['xbrl_aqfs']['index'], doc_type = config['xbrl_aqfs']['_type'], \
body = doc, id = doc['submission']['adsh'])
# --
# run
run()
| |
from __future__ import absolute_import, print_function
import pytz
import six
from croniter import croniter
from datetime import datetime, timedelta
from dateutil import rrule
from django.db import models
from django.db.models import Q
from django.utils import timezone
from uuid import uuid4
from sentry.constants import ObjectStatus
from sentry.db.models import (
Model,
BoundedPositiveIntegerField,
EncryptedJsonField,
UUIDField,
sane_repr,
)
SCHEDULE_INTERVAL_MAP = {
"year": rrule.YEARLY,
"month": rrule.MONTHLY,
"week": rrule.WEEKLY,
"day": rrule.DAILY,
"hour": rrule.HOURLY,
"minute": rrule.MINUTELY,
}
def generate_secret():
return uuid4().hex + uuid4().hex
def get_next_schedule(base_datetime, schedule_type, schedule):
if schedule_type == ScheduleType.CRONTAB:
itr = croniter(schedule, base_datetime)
next_schedule = itr.get_next(datetime)
elif schedule_type == ScheduleType.INTERVAL:
count, unit_name = schedule
# count is the "number of units" and unit_name is the "unit name of interval"
# which is inverse from what rrule calls them
rule = rrule.rrule(
freq=SCHEDULE_INTERVAL_MAP[unit_name], interval=count, dtstart=base_datetime, count=2
)
if rule[0] > base_datetime:
next_schedule = rule[0]
else:
next_schedule = rule[1]
else:
raise NotImplementedError("unknown schedule_type")
return next_schedule
def get_monitor_context(monitor):
config = monitor.config.copy()
if "schedule_type" in config:
config["schedule_type"] = monitor.get_schedule_type_display()
return {
"id": six.text_type(monitor.guid),
"name": monitor.name,
"config": monitor.config,
"status": monitor.get_status_display(),
"type": monitor.get_type_display(),
}
class MonitorStatus(ObjectStatus):
OK = 4
ERROR = 5
@classmethod
def as_choices(cls):
return (
(cls.ACTIVE, u"active"),
(cls.DISABLED, u"disabled"),
(cls.PENDING_DELETION, u"pending_deletion"),
(cls.DELETION_IN_PROGRESS, u"deletion_in_progress"),
(cls.OK, u"ok"),
(cls.ERROR, u"error"),
)
class MonitorType(object):
UNKNOWN = 0
HEALTH_CHECK = 1
HEARTBEAT = 2
CRON_JOB = 3
@classmethod
def as_choices(cls):
return (
(cls.UNKNOWN, "unknown"),
(cls.HEALTH_CHECK, "health_check"),
(cls.HEARTBEAT, "heartbeat"),
(cls.CRON_JOB, "cron_job"),
)
@classmethod
def get_name(cls, value):
return dict(cls.as_choices())[value]
class MonitorFailure(object):
UNKNOWN = "unknown"
MISSED_CHECKIN = "missed_checkin"
DURATION = "duration"
class ScheduleType(object):
UNKNOWN = 0
CRONTAB = 1
INTERVAL = 2
@classmethod
def as_choices(cls):
return ((cls.UNKNOWN, "unknown"), (cls.CRONTAB, "crontab"), (cls.INTERVAL, "interval"))
@classmethod
def get_name(cls, value):
return dict(cls.as_choices())[value]
class Monitor(Model):
__core__ = True
guid = UUIDField(unique=True, auto_add=True)
organization_id = BoundedPositiveIntegerField(db_index=True)
project_id = BoundedPositiveIntegerField(db_index=True)
name = models.CharField(max_length=128)
status = BoundedPositiveIntegerField(
default=MonitorStatus.ACTIVE, choices=MonitorStatus.as_choices()
)
type = BoundedPositiveIntegerField(
default=MonitorType.UNKNOWN,
choices=[(k, six.text_type(v)) for k, v in MonitorType.as_choices()],
)
config = EncryptedJsonField(default=dict)
next_checkin = models.DateTimeField(null=True)
last_checkin = models.DateTimeField(null=True)
date_added = models.DateTimeField(default=timezone.now)
class Meta:
app_label = "sentry"
db_table = "sentry_monitor"
index_together = (("type", "next_checkin"),)
__repr__ = sane_repr("guid", "project_id", "name")
def get_schedule_type_display(self):
return ScheduleType.get_name(self.config.get("schedule_type", ScheduleType.CRONTAB))
def get_audit_log_data(self):
return {"name": self.name, "type": self.type, "status": self.status, "config": self.config}
def get_next_scheduled_checkin(self, last_checkin=None):
if last_checkin is None:
last_checkin = self.last_checkin
tz = pytz.timezone(self.config.get("timezone") or "UTC")
schedule_type = self.config.get("schedule_type", ScheduleType.CRONTAB)
base_datetime = last_checkin.astimezone(tz)
next_checkin = get_next_schedule(base_datetime, schedule_type, self.config["schedule"])
return next_checkin + timedelta(minutes=int(self.config.get("checkin_margin") or 0))
def mark_failed(self, last_checkin=None, reason=MonitorFailure.UNKNOWN):
from sentry.coreapi import insert_data_to_database_legacy
from sentry.event_manager import EventManager
from sentry.models import Project
from sentry.signals import monitor_failed
if last_checkin is None:
next_checkin_base = timezone.now()
last_checkin = self.last_checkin or timezone.now()
else:
next_checkin_base = last_checkin
affected = (
type(self)
.objects.filter(
Q(last_checkin__lte=last_checkin) | Q(last_checkin__isnull=True), id=self.id
)
.update(
next_checkin=self.get_next_scheduled_checkin(next_checkin_base),
status=MonitorStatus.ERROR,
last_checkin=last_checkin,
)
)
if not affected:
return False
event_manager = EventManager(
{
"logentry": {"message": "Monitor failure: %s (%s)" % (self.name, reason)},
"contexts": {"monitor": get_monitor_context(self)},
"fingerprint": ["monitor", six.text_type(self.guid), reason],
},
project=Project(id=self.project_id),
)
event_manager.normalize()
data = event_manager.get_data()
insert_data_to_database_legacy(data)
monitor_failed.send(monitor=self, sender=type(self))
return True
| |
"""
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import platform
import time
import tinctest
from gppylib.commands.base import Command
from tinctest.lib import run_shell_command
class GPFDISTError(Exception):
pass
class GPFDIST:
def __init__(self, port, hostname, directory=None):
"init"
self.port = port
self.hostname = hostname
self.secure = False
self.ssl_cert = ""
if directory is None:
directory = os.getcwd()
self.directory = directory
self.gphome = os.environ.get("GPHOME")
# Ensure we use compatible ps command on Solaris platform
self.ps_command = 'ps'
if platform.system() in ['SunOS']:
self.ps_command = '/bin/ps'
def gethost(self):
return self.hostname
def getport(self):
return self.port
def getdir(self):
return self.directory
def startGpfdist(self, options="", port=None, raise_assert=True, ssl=None):
"""
start hosting the data
@comment: Why do we need to ssh to a host that is localhost
killGpfdist does not support kill process on other host
@note: If we are to use ssh subprocess, we will go to the home folder,
let's revisit this with remote command so that it works for starting
gpfdist on remote host
"""
if port is None:
port = self.port
else:
port = str(port)
if ssl is None:
ssl = ""
else:
self.secure = True
self.ssl_cert = ssl
ssl = "--ssl %s" % self.ssl_cert
directory = self.directory
gpfdist_cmd = "gpfdist -p %s -d %s %s %s" % (port, directory, options, ssl)
cmd = "gpssh -h %s 'source %s/greenplum_path.sh; %s > /dev/null &'" % (self.hostname, self.gphome, gpfdist_cmd)
res = {'rc':0, 'stderr':'', 'stdout':''}
run_shell_command(cmd, 'gpfdist', res)
if res['rc'] > 0:
raise Exception("Failed to start gpfdist on host %s and port %s with non-zero rc" %(self.hostname, port))
return self.check_gpfdist_process(port=port, raise_assert=raise_assert)
def check_gpfdist_process(self, wait=60, port=None, raise_assert=True):
"""
Check for the gpfdist process
Wait at least 60s until gpfdist starts, else raise an exception
"""
if port is None:
port = self.port
process_started = False
count = 0
while (not process_started and count<wait):
cmd_str = " | ".join([
self.ps_command + ' -ef',
'grep \"[g]pfdist -p %s\"' % (port)])
cmd = "gpssh -h %s '%s'" %(self.hostname, cmd_str)
res = {'rc':0, 'stderr':'', 'stdout':''}
run_shell_command(cmd, 'gpfdist process check', res)
content = res['stdout']
if len(content)>0:
if content.find("gpfdist -p %s" % port)>0:
process_started = self.is_gpfdist_connected(port)
if process_started:
return True
count = count + 1
time.sleep(1)
if raise_assert:
raise GPFDISTError("Could not start gpfdist process")
else:
tinctest.logger.warning("Could not start gpfdist process")
def is_gpfdist_connected(self, port=None):
"""
Check gpfdist by connecting after starting process
@return: True or False
@todo: Need the absolute path
"""
if port is None:
port = self.port
url = "http://%s:%s" % (self.hostname, port)
if self.secure:
url = url.replace("http:", "https:") + " -k"
cmd_str = "curl %s" %url
res = {'rc':0, 'stderr':'', 'stdout':''}
run_shell_command(cmd_str, 'gpfdist process check', res)
content = res['stdout']
if content.find("couldn't")>=0:
return False
return True
def is_port_released(self, port=None):
"""
Check whether the port is released after stopping gpfdist
@return: True or False
"""
if port is None:
port = self.port
cmd_str = "netstat -an | grep %s" % port
cmd = "gpssh -h %s '%s'" %(self.hostname, cmd_str)
res = {'rc':0, 'stderr':'', 'stdout':''}
run_shell_command(cmd, 'gpfdist port check', res)
content = res['stdout']
# strip hostname prefix from gpssh output
content = content.replace(self.hostname, '').strip('[]').strip()
if len(content)>0:
return False
return True
def is_gpfdist_killed(self, port=None, wait=1):
"""
Check whether the gpfdist process is killed
"""
if port is None:
port = self.port
process_killed = False
count = 0
while (not process_killed and count < wait):
cmd_str = " | ".join([
self.ps_command + ' -ef',
'grep \"[g]pfdist -p %s\"' % (port)])
cmd = "gpssh -h %s '%s'" %(self.hostname, cmd_str)
res = {'rc':0, 'stderr':'', 'stdout':''}
run_shell_command(cmd, 'gpfdist process check', res)
content = res['stdout']
# strip hostname prefix from gpssh output
content = content.replace(self.hostname, '').strip('[]').strip()
if len(content)>0 or content.find("gpfdist -p %s" %port) > 0:
tinctest.logger.warning("gpfdist process still exists on %s:%s" %(self.hostname, self.port))
else:
return True
count = count + 1
time.sleep(1)
tinctest.logger.warning("gpfdist process not killed on %s:%s" %(self.hostname, self.port))
return False
def killGpfdist(self, wait=60, port=None):
"""
kill the gpfdist process
@change: Johnny Soedomo, check from netstat whether the system has released the process rather than waiting a flat 10s
@todo: Support for stopping gpfdist process on remote host
"""
if port is None:
port = self.port
cmd_str = ' | '.join([self.ps_command + " -ef",
"grep \"[g]pfdist -p %s\"" % (port),
"awk '\"'\"'{print $2}'\"'\"'",
"xargs kill"])
cmd = "gpssh -h %s '%s'" %(self.hostname, cmd_str)
res = {'rc':0, 'stderr':'', 'stdout':''}
run_shell_command(cmd, 'kill gpfdist', res)
if not self.is_gpfdist_killed():
raise GPFDISTError("Could not kill gpfdist process on %s:%s" %(self.hostname, self.port))
# Make sure the port is released
is_released = False
count = 0
while (not is_released and count < wait):
is_released = self.is_port_released()
count = count + 1
time.sleep(1)
| |
# -*- coding: utf-8 -*-
# See LICENSE file for copyright and license details
''' Test 'misc' module. '''
import unittest
from misery import (
misc,
)
class TestDiff(unittest.TestCase):
''' Test misc.diff func. '''
def test_lists(self):
''' Diff lists. '''
list1 = [1, 2, 3, 4, 5]
list2 = [1, 2, 0, 0, 5]
real_output = misc.diff(list1, list2)
expected_output = (
'--- \n'
'+++ \n'
'@@ -1,7 +1,7 @@\n'
' [\n'
' 1,\n'
' 2,\n'
'- 3,\n'
'- 4,\n'
'+ 0,\n'
'+ 0,\n'
' 5,\n'
' ]'
)
self.assertEqual(expected_output, real_output)
class TestPrettyPrinter(unittest.TestCase):
''' Test misc.pretty_print func. '''
def test_none(self):
''' Print None. '''
input_data = None
expected_output = '<None>'
real_output = misc.pretty_print(input_data)
self.assertEqual(expected_output, real_output)
def test_empty_list(self):
''' Print empty list. '''
input_data = []
expected_output = '[]'
real_output = misc.pretty_print(input_data)
self.assertEqual(expected_output, real_output)
def test_simple_list(self):
''' Print simple list. '''
input_data = [1, 2, 3]
expected_output = (
'[\n'
' 1,\n'
' 2,\n'
' 3,\n'
']'
)
real_output = misc.pretty_print(input_data)
self.assertEqual(expected_output, real_output)
def test_empty_map(self):
''' Print empty map. '''
input_data = {}
expected_output = '{\n}'
real_output = misc.pretty_print(input_data)
self.assertEqual(expected_output, real_output)
def test_map_1(self):
''' Print map with one field. '''
input_data = {'field': 1}
expected_output = (
'{\n'
' field: 1\n'
'}'
)
real_output = misc.pretty_print(input_data)
self.assertEqual(expected_output, real_output)
def test_map_2(self):
''' Print map with two fields. '''
input_data = {'1': 1, '2': 2}
expected_output = (
'{\n'
' 1: 1\n'
' 2: 2\n'
'}'
)
real_output = misc.pretty_print(input_data)
self.assertEqual(expected_output, real_output)
def test_empty_tuple(self):
''' Print empty tuple. '''
input_data = ()
expected_output = (
'<TUPLE>(\n'
')'
)
real_output = misc.pretty_print(input_data)
self.assertEqual(expected_output, real_output)
def test_simple_tuple(self):
''' Print simple tuple. '''
input_data = (1, 2, 3)
expected_output = (
'<TUPLE>(\n'
' 1,\n'
' 2,\n'
' 3,\n'
')'
)
real_output = misc.pretty_print(input_data)
self.assertEqual(expected_output, real_output)
def test_string(self):
''' Print string. '''
input_data = 'hi'
expected_output = '\"hi\"'
real_output = misc.pretty_print(input_data)
self.assertEqual(expected_output, real_output)
def test_float(self):
''' Print floating point number. '''
input_data = 1.1
expected_output = '1.1'
real_output = misc.pretty_print(input_data)
self.assertEqual(expected_output, real_output)
def test_empty_object(self):
''' Print object without fields. '''
class TestClass:
def __init__(self):
pass
input_data = TestClass()
expected_output = 'TestClass()'
real_output = misc.pretty_print(input_data)
self.assertEqual(expected_output, real_output)
def test_object_with_one_field(self):
''' Print object without fields. '''
class TestClass:
def __init__(self):
self.field = 0
input_data = TestClass()
input_data.field = 1
expected_output = 'TestClass(field=1)'
real_output = misc.pretty_print(input_data)
self.assertEqual(expected_output, real_output)
def test_object_with_two_field(self):
''' Print object without fields. '''
class TestClass:
def __init__(self):
self.field1 = 0
self.field2 = 0
input_data = TestClass()
input_data.field1 = 1
input_data.field2 = 'hi'
expected_output = (
'TestClass(\n'
' field1=1,\n'
' field2=\"hi\",\n'
')'
)
real_output = misc.pretty_print(input_data)
self.assertEqual(expected_output, real_output)
class TestCaseMock:
def __init__(self):
self.is_ok = True
def assertEqual(self, expected, real):
return expected == real
def fail(self, msg):
assert msg != ''
self.is_ok = False
class TestAssertEqual(unittest.TestCase):
''' Test misc.assert_equal func. '''
def test_failed(self):
''' Test failed. '''
mock = TestCaseMock()
misc.assert_equal(mock, 1, 2)
self.assertEqual(mock.is_ok, False)
def test_passed(self):
''' Test passed. '''
mock = TestCaseMock()
misc.assert_equal(mock, 1, 1)
self.assertEqual(mock.is_ok, True)
def test_passed_2(self):
''' Test passed. '''
mock = TestCaseMock()
mock.assertEqual(1, 1)
self.assertEqual(mock.is_ok, True)
class TestGetCallerFuncName(unittest.TestCase):
def test_check_caller_name(self):
def helper_func():
self.assertEqual(
'test_check_caller_name',
misc.get_caller_func_name(),
)
helper_func()
class TestFlattenTree(unittest.TestCase):
''' TestSuite for misc.flatten_tree() func. '''
def test_simple(self):
''' Basic misc.flatten_tree() test. '''
input_list = [
[
'1',
'2',
],
[[['3']]],
'4'
]
real_output = misc.flatten_tree(input_list)
expected_output = ['1', '2', '3', '4']
misc.assert_equal(self, expected_output, real_output)
def test_bad_node_type_error(self):
input_list = ['1', '2', 3, '4']
self.assertRaisesRegexp(
Exception,
'Bad node type: .*int',
misc.flatten_tree,
input_list,
)
class TestRemoveQuotationMarks(unittest.TestCase):
def test_1(self):
self.assertEqual(
misc.remove_quotation_marks('"hi"'),
'hi',
)
class TestTolist(unittest.TestCase):
def test_1(self):
self.assertEqual(
misc.tolist(None),
[],
)
def test_2(self):
self.assertEqual(
misc.tolist([]),
[],
)
def test_3(self):
self.assertEqual(
misc.tolist([1, 2]),
[1, 2],
)
def test_obj_to_list(self):
self.assertEqual(
misc.tolist(1),
[1],
)
# vim: set tabstop=4 shiftwidth=4 softtabstop=4 expandtab:
| |
# Copyright 2016 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""S3 Resource Manager
Filters:
The generic Values filters (jmespath) expression and Or filter are
available with all resources, including buckets, we include several
additonal bucket data (Tags, Replication, Acl, Policy) as keys within
a bucket representation.
Actions:
encrypt-keys
Scan all keys in a bucket and optionally encrypt them in place.
global-grants
Check bucket acls for global grants
encryption-policy
Attach an encryption required policy to a bucket, this will break
applications that are not using encryption, including aws log
delivery.
"""
from botocore.client import Config
from botocore.exceptions import ClientError
from botocore.vendored.requests.exceptions import SSLError
from concurrent.futures import as_completed
import functools
import json
import itertools
import logging
import math
import os
import time
import ssl
from c7n import executor
from c7n.actions import ActionRegistry, BaseAction, AutoTagUser
from c7n.filters import FilterRegistry, Filter, CrossAccountAccessFilter
from c7n.manager import resources
from c7n.query import QueryResourceManager, ResourceQuery
from c7n.tags import Tag
from c7n.utils import chunks, local_session, set_annotation, type_schema
"""
TODO:
- How does replication status effect in place encryption.
- Test glacier support
"""
log = logging.getLogger('custodian.s3')
filters = FilterRegistry('s3.filters')
actions = ActionRegistry('s3.actions')
actions.register('auto-tag-user', AutoTagUser)
MAX_COPY_SIZE = 1024 * 1024 * 1024 * 2
@resources.register('s3')
class S3(QueryResourceManager):
class resource_type(ResourceQuery.resolve("aws.s3.bucket")):
dimension = 'BucketName'
executor_factory = executor.ThreadPoolExecutor
filter_registry = filters
action_registry = actions
def __init__(self, ctx, data):
super(S3, self).__init__(ctx, data)
self.log_dir = ctx.log_dir
def augment(self, buckets):
with self.executor_factory(
max_workers=min((10, len(buckets)))) as w:
results = w.map(
assemble_bucket,
zip(itertools.repeat(self.session_factory), buckets))
results = filter(None, results)
return results
S3_AUGMENT_TABLE = (
('get_bucket_location', 'Location', None, None),
('get_bucket_tagging', 'Tags', [], 'TagSet'),
('get_bucket_policy', 'Policy', None, 'Policy'),
('get_bucket_acl', 'Acl', None, None),
('get_bucket_replication', 'Replication', None, None),
('get_bucket_versioning', 'Versioning', None, None),
('get_bucket_website', 'Website', None, None),
('get_bucket_logging', 'Logging', None, 'LoggingEnabled'),
('get_bucket_notification_configuration', 'Notification', None, None)
# ('get_bucket_lifecycle', 'Lifecycle', None, None),
# ('get_bucket_cors', 'Cors'),
)
def assemble_bucket(item):
"""Assemble a document representing all the config state around a bucket.
"""
factory, b = item
s = factory()
c = s.client('s3')
# Bucket Location, Current Client Location, Default Location
b_location = c_location = location = "us-east-1"
methods = list(S3_AUGMENT_TABLE)
for m, k, default, select in methods:
try:
method = getattr(c, m)
v = method(Bucket=b['Name'])
v.pop('ResponseMetadata')
if select is not None and select in v:
v = v[select]
except (ssl.SSLError, SSLError) as e:
# Proxy issues? i assume
log.warning("Bucket ssl error %s: %s %s",
b['Name'], b.get('Location', 'unknown'),
e)
continue
except ClientError as e:
code = e.response['Error']['Code']
if code.startswith("NoSuch") or "NotFound" in code:
v = default
elif code == 'PermanentRedirect':
s = factory()
c = bucket_client(s, b)
# Requeue with the correct region given location constraint
methods.append((m, k, default, select))
continue
else:
log.warning(
"Bucket:%s unable to invoke method:%s error:%s " % (
b['Name'], m, e.response['Error']['Message']))
return None
# As soon as we learn location (which generally works)
if k == 'Location' and v is not None:
b_location = v.get('LocationConstraint')
if v and v != c_location:
c = s.client('s3', region_name=b_location)
elif c_location != location:
c = s.client('s3', region_name=location)
b[k] = v
return b
def bucket_client(session, b, kms=False):
location = b.get('Location')
if location is None:
region = 'us-east-1'
else:
region = location['LocationConstraint'] or 'us-east-1'
if kms:
# Need v4 signature for aws:kms crypto
config = Config(signature_version='s3v4', read_timeout=200)
else:
config = Config(read_timeout=200)
return session.client('s3', region_name=region, config=config)
@filters.register('cross-account')
class S3CrossAccountFilter(CrossAccountAccessFilter):
def get_accounts(self):
"""add in elb access by default
ELB Accounts by region http://goo.gl/a8MXxd
"""
accounts = super(S3CrossAccountFilter, self).get_accounts()
return accounts.union(
['127311923021', # us-east-1
'797873946194', # us-west-2
'027434742980', # us-west-1
'156460612806', # eu-west-1
'054676820928', # eu-central-1
'114774131450', # ap-southeast-1
'582318560864', # ap-northeast-1
'783225319266', # ap-southeast-2
'600734575887', # ap-northeast-2
'507241528517', # sa-east-1
'048591011584', # gov-cloud-1
])
@filters.register('global-grants')
class GlobalGrantsFilter(Filter):
schema = type_schema('global-grants', permissions={
'type': 'array', 'items': {
'type': 'string', 'enum': [
'READ', 'WRITE', 'WRITE_ACP', 'READ', 'READ_ACP']}})
GLOBAL_ALL = "http://acs.amazonaws.com/groups/global/AllUsers"
AUTH_ALL = "http://acs.amazonaws.com/groups/global/AuthenticatedUsers"
def process(self, buckets, event=None):
with self.executor_factory(max_workers=5) as w:
results = w.map(self.process_bucket, buckets)
results = filter(None, list(results))
return results
def process_bucket(self, b):
acl = b.get('Acl', {'Grants': []})
if not acl or not acl['Grants']:
return
results = []
perms = self.data.get('permissions', [])
for grant in acl['Grants']:
if 'URI' not in grant.get("Grantee", {}):
continue
if grant['Grantee']['URI'] not in [self.AUTH_ALL, self.GLOBAL_ALL]:
continue
if grant['Permission'] == 'READ' and b['Website']:
continue
if not perms or (perms and grant['Permission'] in perms):
results.append(grant['Permission'])
c = bucket_client(self.manager.session_factory(), b)
if results:
set_annotation(b, 'GlobalPermissions', results)
return b
class BucketActionBase(BaseAction):
def get_permissions(self):
return self.permissions
@filters.register('has-statement')
class HasStatementFilter(Filter):
"""Find buckets with set of named policy statements."""
schema = type_schema(
'has-statement',
statement_ids={'type': 'array', 'items': {'type': 'string'}})
def process(self, buckets, event=None):
return filter(None, map(self.process_bucket, buckets))
def process_bucket(self, b):
p = b.get('Policy')
if p is None:
return b
p = json.loads(p)
required = list(self.data.get('statement_ids', []))
statements = p.get('Statement', [])
for s in list(statements):
if s.get('Sid') in required:
required.remove(s['Sid'])
if not required:
return b
return None
@filters.register('missing-statement')
@filters.register('missing-policy-statement')
class MissingPolicyStatementFilter(Filter):
"""Find buckets missing a set of named policy statements."""
schema = type_schema(
'missing-policy-statement',
aliases=('missing-statement',),
statement_ids={'type': 'array', 'items': {'type': 'string'}})
def __call__(self, b):
p = b.get('Policy')
if p is None:
return b
p = json.loads(p)
required = list(self.data.get('statement_ids', []))
statements = p.get('Statement', [])
for s in list(statements):
if s.get('Sid') in required:
required.remove(s['Sid'])
if not required:
return False
return True
@actions.register('no-op')
class NoOp(BucketActionBase):
schema = type_schema('no-op')
def process(self, buckets):
return None
@actions.register('remove-statements')
class RemovePolicyStatement(BucketActionBase):
schema = type_schema(
'remove-statements',
statement_ids={'type': 'array', 'items': {'type': 'string'}})
def process(self, buckets):
with self.executor_factory(max_workers=3) as w:
results = w.map(self.process_bucket, buckets)
return filter(None, list(results))
def process_bucket(self, bucket):
p = bucket.get('Policy')
if p is None:
return
else:
p = json.loads(p)
statements = p.get('Statement', [])
found = []
for s in list(statements):
if s['Sid'] in self.data['statement_ids']:
found.append(s)
statements.remove(s)
if not found:
return
s3 = local_session(self.manager.session_factory).client('s3')
if not statements:
s3.delete_bucket_policy(Bucket=bucket['Name'])
else:
s3.put_bucket_policy(Bucket=bucket['Name'], Policy=json.dumps(p))
return {'Name': bucket['Name'], 'State': 'PolicyRemoved', 'Statements': found}
@actions.register('attach-encrypt')
class AttachLambdaEncrypt(BucketActionBase):
schema = type_schema(
'attach-encrypt', role={'type': 'string'})
def __init__(self, data=None, manager=None):
self.data = data or {}
self.manager = manager
def validate(self):
if not self.data.get('role', self.manager.config.assume_role):
raise ValueError(
"attach-encrypt: role must be specified either"
"via assume or in config")
return self
def process(self, buckets):
from c7n.mu import LambdaManager
from c7n.ufuncs.s3crypt import get_function
func = get_function(
None, self.data.get('role', self.manager.config.assume_role))
# Publish function to all of our buckets regions
region_funcs = {}
regions = set([
b.get('LocationConstraint', 'us-east-1') for b in buckets])
for r in regions:
lambda_mgr = LambdaManager(
functools.partial(self.manager.session_factory, region=r))
lambda_mgr.publish(func)
region_funcs[r] = func
with self.executor_factory(max_workers=3) as w:
results = []
futures = []
for b in buckets:
futures.append(
w.submit(
self.process_bucket,
region_funcs[b.get('LocationConstraint', 'us-east-1')],
b))
for f in as_completed(futures):
if f.exception():
log.exception(
"Error attaching lambda-encrypt %s" % (f.exception()))
results.append(f.result())
return filter(None, results)
def process_bucket(self, f, b):
from c7n.mu import BucketNotification
source = BucketNotification({}, self.manager.session_factory, b)
return source.add(f)
@actions.register('encryption-policy')
class EncryptionRequiredPolicy(BucketActionBase):
permissions = ("s3:GetBucketPolicy", "s3:PutBucketPolicy")
schema = type_schema('encryption-policy')
def __init__(self, data=None, manager=None):
self.data = data or {}
self.manager = manager
def process(self, buckets):
with self.executor_factory(max_workers=3) as w:
results = w.map(self.process_bucket, buckets)
results = filter(None, list(results))
return results
def process_bucket(self, b):
p = b['Policy']
if p is None:
log.info("No policy found, creating new")
p = {'Version': "2012-10-17", "Statement": []}
else:
p = json.loads(p)
encryption_sid = "RequiredEncryptedPutObject"
encryption_statement = {
'Sid': encryption_sid,
'Effect': 'Deny',
'Principal': '*',
'Action': 's3:PutObject',
"Resource": "arn:aws:s3:::%s/*" % b['Name'],
"Condition": {
# AWS Managed Keys or KMS keys, note policy language
# does not support custom kms (todo add issue)
"StringNotEquals": {
"s3:x-amz-server-side-encryption": ["AES256", "aws:kms"]}}}
statements = p.get('Statement', [])
found = False
for s in list(statements):
if s['Sid'] == encryption_sid:
log.debug("Bucket:%s Found extant encrypt policy", b['Name'])
if s != encryption_statement:
log.info(
"Bucket:%s updating extant encrypt policy", b['Name'])
statements.remove(s)
else:
return
session = self.manager.session_factory()
s3 = bucket_client(session, b)
statements.append(encryption_statement)
p['Statement'] = statements
log.info('Bucket:%s attached encryption policy' % b['Name'])
try:
s3.put_bucket_policy(
Bucket=b['Name'],
Policy=json.dumps(p))
except ClientError as e:
if e.response['Error']['Code'] == 'NoSuchBucket':
return
self.log.exception(
"Error on bucket:%s putting policy\n%s error:%s",
b['Name'],
json.dumps(statements, indent=2), e)
raise
return {'Name': b['Name'], 'State': 'PolicyAttached'}
class BucketScanLog(object):
"""Offload remediated key ids to a disk file in batches
A bucket keyspace is effectively infinite, we need to store partial
results out of memory, this class provides for a json log on disk
with partial write support.
json output format:
- [list_of_serialized_keys],
- [] # Empty list of keys at end when we close the buffer
"""
def __init__(self, log_dir, name):
self.log_dir = log_dir
self.name = name
self.fh = None
self.count = 0
@property
def path(self):
return os.path.join(self.log_dir, "%s.json" % self.name)
def __enter__(self):
# Don't require output directories
if self.log_dir is None:
return
self.fh = open(self.path, 'w')
self.fh.write("[\n")
return self
def __exit__(self, exc_type=None, exc_value=None, exc_frame=None):
if self.fh is None:
return
# we need an empty marker list at end to avoid trailing commas
self.fh.write("[]")
# and close the surrounding list
self.fh.write("\n]")
self.fh.close()
if not self.count:
os.remove(self.fh.name)
self.fh = None
return False
def add(self, keys):
self.count += len(keys)
if self.fh is None:
return
self.fh.write(json.dumps(keys))
self.fh.write(",\n")
class ScanBucket(BucketActionBase):
permissions = ("s3:ListBucket",)
bucket_ops = {
'standard': {
'iterator': 'list_objects',
'contents_key': 'Contents',
'key_processor': 'process_key'
},
'versioned': {
'iterator': 'list_object_versions',
'contents_key': 'Versions',
'key_processor': 'process_version'
}
}
def __init__(self, data, manager=None):
super(ScanBucket, self).__init__(data, manager)
self.denied_buckets = []
def get_bucket_style(self, b):
return (
b.get('Versioning', {'Status': ''}).get('Status') == 'Enabled'
and 'versioned' or 'standard')
def get_bucket_op(self, b, op_name):
bucket_style = self.get_bucket_style(b)
op = self.bucket_ops[bucket_style][op_name]
if op_name == 'key_processor':
return getattr(self, op)
return op
def process(self, buckets):
results = []
with self.executor_factory(max_workers=3) as w:
futures = {}
for b in buckets:
futures[w.submit(self.process_bucket, b)] = b
for f in as_completed(futures):
if f.exception():
self.log.error(
"Error on bucket:%s region:%s policy:%s error: %s",
b['Name'], b.get('Location', 'unknown'),
self.manager.data.get('name'), f.exception())
self.denied_buckets.append(b['Name'])
continue
result = f.result()
if result:
results.append(result)
if self.denied_buckets and self.manager.log_dir:
with open(
os.path.join(
self.manager.log_dir, 'denied.json'), 'w') as fh:
json.dump(self.denied_buckets, fh, indent=2)
self.denied_buckets = []
return results
def process_bucket(self, b):
log.info(
"Scanning bucket:%s visitor:%s style:%s" % (
b['Name'], self.__class__.__name__, self.get_bucket_style(b)))
s = self.manager.session_factory()
s3 = bucket_client(s, b)
# The bulk of _process_bucket function executes inline in
# calling thread/worker context, neither paginator nor
# bucketscan log should be used across worker boundary.
p = s3.get_paginator(
self.get_bucket_op(b, 'iterator')).paginate(Bucket=b['Name'])
with BucketScanLog(self.manager.log_dir, b['Name']) as key_log:
with self.executor_factory(max_workers=10) as w:
try:
return self._process_bucket(b, p, key_log, w)
except ClientError as e:
if e.response['Error']['Code'] == 'NoSuchBucket':
log.warning(
"Bucket:%s removed while scanning" % b['Name'])
return
if e.response['Error']['Code'] == 'AccessDenied':
log.warning(
"Access Denied Bucket:%s while scanning" % b['Name'])
self.denied_buckets.append(b['Name'])
return
log.exception(
"Error processing bucket:%s paginator:%s" % (
b['Name'], p))
__call__ = process_bucket
def _process_bucket(self, b, p, key_log, w):
content_key = self.get_bucket_op(b, 'contents_key')
count = 0
for key_set in p:
count += len(key_set.get(content_key, []))
# Empty bucket check
if content_key not in key_set and not key_set['IsTruncated']:
b['KeyScanCount'] = count
b['KeyRemediated'] = key_log.count
return {'Bucket': b['Name'],
'Remediated': key_log.count,
'Count': count}
futures = []
for batch in chunks(key_set.get(content_key, []), size=100):
if not batch:
continue
futures.append(w.submit(self.process_chunk, batch, b))
for f in as_completed(futures):
if f.exception():
log.exception("Exception Processing bucket:%s key batch %s" % (
b['Name'], f.exception()))
continue
r = f.result()
if r:
key_log.add(r)
# Log completion at info level, progress at debug level
if key_set['IsTruncated']:
log.debug('Scan progress bucket:%s keys:%d remediated:%d ...',
b['Name'], count, key_log.count)
else:
log.info('Scan Complete bucket:%s keys:%d remediated:%d',
b['Name'], count, key_log.count)
b['KeyScanCount'] = count
b['KeyRemediated'] = key_log.count
return {
'Bucket': b['Name'], 'Remediated': key_log.count, 'Count': count}
def process_chunk(self, batch, bucket):
raise NotImplementedError()
def process_key(self, s3, key, bucket_name, info=None):
raise NotImplementedError()
def process_version(self, s3, bucket, key):
raise NotImplementedError()
@actions.register('encrypt-keys')
class EncryptExtantKeys(ScanBucket):
permissions = (
"s3:GetObject",
"s3:PutObject",
"s3:DeleteObjectVersion",
"s3:RestoreObject",
) + ScanBucket.permissions
schema = {
'type': 'object',
'additonalProperties': False,
'properties': {
'report-only': {'type': 'boolean'},
'glacier': {'type': 'boolean'},
'large': {'type': 'boolean'},
'crypto': {'enum': ['AES256', 'aws:kms']}
}
}
metrics = [
('Total Keys', {'Scope': 'Account'}),
('Unencrypted', {'Scope': 'Account'})]
def process(self, buckets):
t = time.time()
results = super(EncryptExtantKeys, self).process(buckets)
run_time = time.time() - t
remediated_count = object_count = 0
for r in results:
object_count += r['Count']
remediated_count += r['Remediated']
self.manager.ctx.metrics.put_metric(
"Unencrypted", r['Remediated'], "Count", Scope=r['Bucket'],
buffer=True)
self.manager.ctx.metrics.put_metric(
"Unencrypted", remediated_count, "Count", Scope="Account",
buffer=True
)
self.manager.ctx.metrics.put_metric(
"Total Keys", object_count, "Count", Scope="Account",
buffer=True
)
self.manager.ctx.metrics.flush()
log.info(
("EncryptExtant Complete keys:%d "
"remediated:%d rate:%0.2f/s time:%0.2fs"),
object_count,
remediated_count,
float(object_count) / run_time,
run_time)
return results
def process_chunk(self, batch, bucket):
crypto_method = self.data.get('crypto', 'AES256')
s3 = bucket_client(
local_session(self.manager.session_factory), bucket,
kms=(crypto_method == 'aws:kms'))
b = bucket['Name']
results = []
key_processor = self.get_bucket_op(bucket, 'key_processor')
for key in batch:
r = key_processor(s3, key, b)
if r:
results.append(r)
return results
def process_key(self, s3, key, bucket_name, info=None):
k = key['Key']
if info is None:
info = s3.head_object(Bucket=bucket_name, Key=k)
if 'ServerSideEncryption' in info:
return False
if self.data.get('report-only'):
return k
storage_class = info.get('StorageClass', 'STANDARD')
if storage_class == 'GLACIER':
if not self.data.get('glacier'):
return False
if 'Restore' not in info:
# This takes multiple hours, we let the next c7n
# run take care of followups.
s3.restore_object(
Bucket=bucket_name,
Key=k,
RestoreRequest={'Days': 30})
return False
elif not restore_complete(info['Restore']):
return False
storage_class == 'STANDARD'
crypto_method = self.data.get('crypto', 'AES256')
# Note on copy we lose individual object acl grants
params = {'Bucket': bucket_name,
'Key': k,
'CopySource': "/%s/%s" % (bucket_name, k),
'MetadataDirective': 'COPY',
'StorageClass': storage_class,
'ServerSideEncryption': crypto_method}
if key['Size'] > MAX_COPY_SIZE and self.data.get('large', True):
return self.process_large_file(s3, bucket_name, key, info, params)
s3.copy_object(**params)
return k
def process_version(self, s3, key, bucket_name):
info = s3.head_object(
Bucket=bucket_name,
Key=key['Key'],
VersionId=key['VersionId'])
if 'ServerSideEncryption' in info:
return False
if self.data.get('report-only'):
return key['Key'], key['VersionId']
if key['IsLatest']:
r = self.process_key(s3, key, bucket_name, info)
# Glacier request processing, wait till we have the restored object
if not r:
return r
s3.delete_object(
Bucket=bucket_name,
Key=key['Key'],
VersionId=key['VersionId'])
return key['Key'], key['VersionId']
def process_large_file(self, s3, bucket_name, key, info, params):
"""For objects over 5gb, use multipart upload to copy"""
part_size = MAX_COPY_SIZE - (1024 ** 2)
num_parts = int(math.ceil(key['Size'] / part_size))
source = params.pop('CopySource')
params.pop('MetadataDirective')
if 'Metadata' in info:
params['Metadata'] = info['Metadata']
upload_id = s3.create_multipart_upload(**params)['UploadId']
params = {'Bucket': bucket_name,
'Key': key['Key'],
'CopySource': "/%s/%s" % (bucket_name, key['Key']),
'UploadId': upload_id,
'CopySource': source,
'CopySourceIfMatch': key['ETag']}
def upload_part(part_num):
part_params = dict(params)
part_params['CopySourceRange'] = "bytes=%d-%d" % (
part_size * (part_num - 1),
min(part_size * part_num - 1, key['Size'] - 1))
part_params['PartNumber'] = part_num
response = s3.upload_part_copy(**part_params)
return {'ETag': response['CopyPartResult']['ETag'],
'PartNumber': part_num}
try:
with self.executor_factory(max_workers=2) as w:
parts = list(w.map(upload_part, range(1, num_parts+1)))
except Exception:
log.warning(
"Error during large key copy bucket: %s key: %s, "
"aborting upload", bucket_name, key, exc_info=True)
s3.abort_multipart_upload(
Bucket=bucket_name, Key=key['Key'], UploadId=upload_id)
raise
s3.complete_multipart_upload(
Bucket=bucket_name, Key=key['Key'], UploadId=upload_id,
MultipartUpload={'Parts': parts})
return key['Key']
def restore_complete(restore):
if ',' in restore:
ongoing, avail = restore.split(',', 1)
else:
ongoing = restore
return 'false' in ongoing
@filters.register('is-log-target')
class LogTarget(Filter):
"""Filter and return buckets are log destinations.
Not suitable for use in lambda on large accounts, This is a api
heavy process to detect scan all possible log sources.
Sources:
- elb (Access Log)
- s3 (Access Log)
- cfn (Template writes)
- cloudtrail
"""
schema = type_schema('is-log-target', value={'type': 'boolean'})
executor_factory = executor.MainThreadExecutor
def process(self, buckets, event=None):
log_buckets = set()
count = 0
for bucket, _ in self.get_elb_bucket_locations():
log_buckets.add(bucket)
count += 1
self.log.debug("Found %d elb log targets" % count)
count = 0
for bucket, _ in self.get_s3_bucket_locations(buckets):
count += 1
log_buckets.add(bucket)
self.log.debug('Found %d s3 log targets' % count)
for bucket, _ in self.get_cloud_trail_locations(buckets):
log_buckets.add(bucket)
self.log.info("Found %d log targets for %d buckets" % (
len(log_buckets), len(buckets)))
if self.data.get('value', True):
return [b for b in buckets if b['Name'] in log_buckets]
else:
return [b for b in buckets if b['Name'] not in log_buckets]
@staticmethod
def get_s3_bucket_locations(buckets):
"""return (bucket_name, prefix) for all s3 logging targets"""
for b in buckets:
if b['Logging']:
yield (b['Logging']['TargetBucket'],
b['Logging']['TargetPrefix'])
if b['Name'].startswith('cf-templates-'):
yield (b['Name'], '')
def get_cloud_trail_locations(self, buckets):
session = local_session(self.manager.session_factory)
client = session.client('cloudtrail')
names = set([b['Name'] for b in buckets])
for t in client.describe_trails().get('trailList', ()):
if t.get('S3BucketName') in names:
yield (t['S3BucketName'], t.get('S3KeyPrefix', ''))
def get_elb_bucket_locations(self):
session = local_session(self.manager.session_factory)
client = session.client('elb')
# Try to use the cache if it exists
elbs = self.manager._cache.get(
{'region': self.manager.config.region, 'resource': 'elb'})
# Sigh, post query refactor reuse, we can't save our cache here
# as that resource manager does extra lookups on tags. Not
# worth paginating, since with cache usage we have full set in
# mem.
if elbs is None:
p = client.get_paginator('describe_load_balancers')
results = p.paginate()
elbs = results.build_full_result().get(
'LoadBalancerDescriptions', ())
self.log.info("Queried %d elbs", len(elbs))
else:
self.log.info("Using %d cached elbs", len(elbs))
get_elb_attrs = functools.partial(
_query_elb_attrs, self.manager.session_factory)
with self.executor_factory(max_workers=2) as w:
futures = []
for elb_set in chunks(elbs, 100):
futures.append(w.submit(get_elb_attrs, elb_set))
for f in as_completed(futures):
if f.exception():
log.error("Error while scanning elb log targets: %s" % (
f.exception()))
continue
for tgt in f.result():
yield tgt
def _query_elb_attrs(session_factory, elb_set):
session = local_session(session_factory)
client = session.client('elb')
log_targets = []
for e in elb_set:
try:
attrs = client.describe_load_balancer_attributes(
LoadBalancerName=e['LoadBalancerName'])[
'LoadBalancerAttributes']
if 'AccessLog' in attrs and attrs['AccessLog']['Enabled']:
log_targets.append((
attrs['AccessLog']['S3BucketName'],
attrs['AccessLog']['S3BucketPrefix']))
except Exception as err:
log.warning(
"Could not retrieve load balancer %s: %s" % (
e['LoadBalancerName'], err))
return log_targets
@actions.register('delete-global-grants')
class DeleteGlobalGrants(BucketActionBase):
schema = type_schema(
'delete-global-grants',
grantees={'type': 'array', 'items': {'type': 'string'}})
def process(self, buckets):
with self.executor_factory(max_workers=5) as w:
return filter(None, list(w.map(self.process_bucket, buckets)))
def process_bucket(self, b):
grantees = self.data.get(
'grantees', [
GlobalGrantsFilter.AUTH_ALL, GlobalGrantsFilter.GLOBAL_ALL])
s3 = bucket_client(self.manager.session_factory(), b)
log.info(b)
acl = b.get('Acl', {'Grants': []})
if not acl or not acl['Grants']:
return
new_grants = []
for grant in acl['Grants']:
grantee = grant.get('Grantee', {})
if not grantee:
continue
# Yuck, 'get_bucket_acl' doesn't return the grantee type.
if 'URI' in grantee:
grantee['Type'] = 'Group'
else:
grantee['Type'] = 'CanonicalUser'
if ('URI' in grantee and
grantee['URI'] in grantees and not
(grant['Permission'] == 'READ' and b['Website'])):
# Remove this grantee.
pass
else:
new_grants.append(grant)
log.info({'Owner': acl['Owner'], 'Grants': new_grants})
c = bucket_client(self.manager.session_factory(), b)
try:
c.put_bucket_acl(
Bucket=b['Name'],
AccessControlPolicy={
'Owner': acl['Owner'], 'Grants': new_grants})
except ClientError as e:
if e.response['Error']['Code'] == 'NoSuchBucket':
return
return b
@actions.register('tag')
class BucketTag(Tag):
def process_resource_set(self, resource_set, tags):
client = local_session(self.manager.session_factory).client('s3')
for r in resource_set:
# all the tag marshalling back and forth is a bit gross :-(
new_tags = {t['Key']: t['Value'] for t in tags}
for t in r.get('Tags', ()):
if t['Key'] not in new_tags:
new_tags[t['Key']] = t['Value']
tag_set = [{'Key': k, 'Value': v} for k, v in new_tags.items()]
try:
client.put_bucket_tagging(
Bucket=r['Name'], Tagging={'TagSet': tag_set})
except ClientError as e:
raise
self.log.exception(
"Error while tagging bucket %s err: %s" % (
r['Name'], e))
| |
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: david@reciprocitylabs.com
# Maintained By: david@reciprocitylabs.com
from ggrc import db
from ggrc.models.mixins import Identifiable
from ggrc.models.mixins import Mapping
from sqlalchemy import or_, and_
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import validates
from sqlalchemy.orm.collections import attribute_mapped_collection
from werkzeug.exceptions import BadRequest
import functools
import inspect
class Relationship(Mapping, db.Model):
__tablename__ = 'relationships'
source_id = db.Column(db.Integer, nullable=False)
source_type = db.Column(db.String, nullable=False)
destination_id = db.Column(db.Integer, nullable=False)
destination_type = db.Column(db.String, nullable=False)
automapping_id = db.Column(
db.Integer,
db.ForeignKey('relationships.id', ondelete='SET NULL'),
nullable=True,
)
automapping = db.relationship(
lambda: Relationship,
remote_side=lambda: Relationship.id
)
relationship_attrs = db.relationship(
lambda: RelationshipAttr,
collection_class=attribute_mapped_collection("attr_name"),
lazy='joined', # eager loading
cascade='all, delete-orphan'
)
attrs = association_proxy(
"relationship_attrs", "attr_value",
creator=lambda k, v: RelationshipAttr(attr_name=k, attr_value=v)
)
@property
def source_attr(self):
return '{0}_source'.format(self.source_type)
@property
def source(self):
return getattr(self, self.source_attr)
@source.setter
def source(self, value):
self.source_id = value.id if value is not None else None
self.source_type = value.__class__.__name__ if value is not None else None
return setattr(self, self.source_attr, value)
@property
def destination_attr(self):
return '{0}_destination'.format(self.destination_type)
@property
def destination(self):
return getattr(self, self.destination_attr)
@destination.setter
def destination(self, value):
self.destination_id = value.id if value is not None else None
self.destination_type = value.__class__.__name__ if value is not None \
else None
return setattr(self, self.destination_attr, value)
@validates('relationship_attrs')
def _validate_attr(self, key, attr):
"""
Only white-listed attributes can be stored, so users don't use this
for storing arbitrary data.
"""
RelationshipAttr.validate_attr(self.source, self.destination,
self.attrs, attr)
return attr
@classmethod
def find_related(cls, object1, object2):
return cls.get_related_query(object1, object2).first()
@classmethod
def get_related_query(cls, object1, object2):
def predicate(src, dst):
return and_(
Relationship.source_type == src.type,
or_(Relationship.source_id == src.id, src.id == None), # noqa
Relationship.destination_type == dst.type,
or_(Relationship.destination_id == dst.id, dst.id == None), # noqa
)
return Relationship.query.filter(
or_(predicate(object1, object2), predicate(object2, object1))
)
@classmethod
def update_attributes(cls, object1, object2, new_attrs):
r = cls.find_related(object1, object2)
for attr_name, attr_value in new_attrs.iteritems():
attr = RelationshipAttr(attr_name=attr_name, attr_value=attr_value)
attr = RelationshipAttr.validate_attr(r.source, r.destination,
r.attrs, attr)
r.attrs[attr.attr_name] = attr.attr_value
return r
@staticmethod
def _extra_table_args(cls):
return (
db.UniqueConstraint(
'source_id', 'source_type', 'destination_id', 'destination_type'),
db.Index(
'ix_relationships_source',
'source_type', 'source_id'),
db.Index(
'ix_relationships_destination',
'destination_type', 'destination_id'),
)
_publish_attrs = [
'source',
'destination',
'attrs',
]
attrs.publish_raw = True
def _display_name(self):
return "{}:{} <-> {}:{}".format(self.source_type, self.source_id,
self.destination_type, self.destination_id)
def log_json(self):
json = super(Relationship, self).log_json()
# manually add attrs since the base log_json only captures table columns
json["attrs"] = self.attrs.copy() # copy in order to detach from orm
return json
class Relatable(object):
@declared_attr
def related_sources(cls):
joinstr = 'and_(remote(Relationship.destination_id) == {type}.id, '\
'remote(Relationship.destination_type) == "{type}")'
joinstr = joinstr.format(type=cls.__name__)
return db.relationship(
'Relationship',
primaryjoin=joinstr,
foreign_keys='Relationship.destination_id',
backref='{0}_destination'.format(cls.__name__),
cascade='all, delete-orphan')
@declared_attr
def related_destinations(cls):
joinstr = 'and_(remote(Relationship.source_id) == {type}.id, '\
'remote(Relationship.source_type) == "{type}")'
joinstr = joinstr.format(type=cls.__name__)
return db.relationship(
'Relationship',
primaryjoin=joinstr,
foreign_keys='Relationship.source_id',
backref='{0}_source'.format(cls.__name__),
cascade='all, delete-orphan')
_publish_attrs = [
'related_sources',
'related_destinations'
]
_include_links = []
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(Relatable, cls).eager_query()
return cls.eager_inclusions(query, Relatable._include_links).options(
orm.subqueryload('related_sources'),
orm.subqueryload('related_destinations'))
class RelationshipAttr(Identifiable, db.Model):
"""
Extended attributes for relationships. Used to store relations meta-data
so the Relationship table can be used in place of join-tables that carry
extra information
"""
__tablename__ = 'relationship_attrs'
relationship_id = db.Column(
db.Integer,
db.ForeignKey('relationships.id'),
primary_key=True
)
attr_name = db.Column(db.String, nullable=False)
attr_value = db.Column(db.String, nullable=False)
_validators = {}
@classmethod
def validate_attr(cls, source, destination, attrs, attr):
"""
Checks both source and destination type (with mixins) for
defined validators _validate_relationship_attr
"""
attr_name = attr.attr_name
attr_value = attr.attr_value
validators = cls._get_validators(source) + cls._get_validators(destination)
for validator in validators:
validated_value = validator(source, destination, attrs,
attr_name, attr_value)
if validated_value is not None:
attr.attr_value = validated_value
return attr
raise BadRequest("Invalid attribute {}: {}".format(attr_name, attr_value))
@classmethod
def _get_validators(cls, obj):
target_class = type(obj)
if target_class not in cls._validators:
cls._validators[target_class] = cls._gather_validators(target_class)
return cls._validators[target_class]
@staticmethod
def _gather_validators(target_class):
validators = set(getattr(cls, "_validate_relationship_attr", None)
for cls in inspect.getmro(target_class))
validators.discard(None)
return [functools.partial(v, target_class) for v in validators]
| |
import time
import pandas as pd
from warnings import warn
from carto.auth import APIKeyAuthClient
from carto.datasets import DatasetManager
from carto.exceptions import CartoException, CartoRateLimitException
from carto.sql import SQLClient, BatchSQLClient, CopySQLClient
from pyrestcli.exceptions import NotFoundException
from ..dataset_info import DatasetInfo
from ... import __version__
from ...auth.defaults import get_default_credentials
from ...utils.logger import log
from ...utils.geom_utils import encode_geometry_ewkb
from ...utils.utils import (is_sql_query, check_credentials, encode_row, map_geom_type, PG_NULL, double_quote,
create_tmp_name)
from ...utils.columns import (get_dataframe_columns_info, get_query_columns_info, obtain_converters, date_columns_names,
normalize_name)
DEFAULT_RETRY_TIMES = 3
BATCH_API_PAYLOAD_THRESHOLD = 12000
def retry_copy(func):
def wrapper(*args, **kwargs):
m_retry_times = kwargs.get('retry_times', DEFAULT_RETRY_TIMES)
while m_retry_times >= 1:
try:
return func(*args, **kwargs)
except CartoRateLimitException as err:
m_retry_times -= 1
if m_retry_times <= 0:
warn(('Read call was rate-limited. '
'This usually happens when there are multiple queries being read at the same time.'))
raise err
warn('Read call rate limited. Waiting {s} seconds'.format(s=err.retry_after))
time.sleep(err.retry_after)
warn('Retrying...')
return func(*args, **kwargs)
return wrapper
def not_found(func):
def decorator_func(*args, **kwargs):
try:
return func(*args, **kwargs)
except CartoException as e:
if hasattr(e, 'args') and isinstance(e.args, (list, tuple)) and type(e.args[0]) == NotFoundException:
raise Exception('Resource not found') from None
else:
raise e
return decorator_func
class ContextManager:
def __init__(self, credentials):
self.credentials = credentials or get_default_credentials()
check_credentials(self.credentials)
self.auth_client = _create_auth_client(self.credentials)
self.sql_client = SQLClient(self.auth_client)
self.copy_client = CopySQLClient(self.auth_client)
self.batch_sql_client = BatchSQLClient(self.auth_client)
@not_found
def execute_query(self, query, parse_json=True, do_post=True, format=None, **request_args):
return self.sql_client.send(query.strip(), parse_json, do_post, format, **request_args)
@not_found
def execute_long_running_query(self, query):
return self.batch_sql_client.create_and_wait_for_completion(query.strip())
def copy_to(self, source, schema=None, limit=None, retry_times=DEFAULT_RETRY_TIMES):
query = self.compute_query(source, schema)
columns = self._get_query_columns_info(query)
copy_query = self._get_copy_query(query, columns, limit)
return self._copy_to(copy_query, columns, retry_times)
def copy_from(self, gdf, table_name, if_exists='fail', cartodbfy=True,
retry_times=DEFAULT_RETRY_TIMES):
schema = self.get_schema()
table_name = self.normalize_table_name(table_name)
df_columns = get_dataframe_columns_info(gdf)
if self.has_table(table_name, schema):
if if_exists == 'replace':
table_query = self._compute_query_from_table(table_name, schema)
table_columns = self._get_query_columns_info(table_query)
if self._compare_columns(df_columns, table_columns):
# Equal columns: truncate table
self._truncate_table(table_name, schema)
else:
# Diff columns: truncate table and drop + add columns
self._truncate_and_drop_add_columns(
table_name, schema, df_columns, table_columns)
elif if_exists == 'fail':
raise Exception('Table "{schema}.{table_name}" already exists in your CARTO account. '
'Please choose a different `table_name` or use '
'if_exists="replace" to overwrite it.'.format(
table_name=table_name, schema=schema))
else: # 'append'
cartodbfy = False
else:
self._create_table_from_columns(table_name, schema, df_columns)
self._copy_from(gdf, table_name, df_columns, retry_times)
if cartodbfy is True:
cartodbfy_query = _cartodbfy_query(table_name, schema)
self.execute_long_running_query(cartodbfy_query)
return table_name
def create_table_from_query(self, query, table_name, if_exists, cartodbfy=True):
schema = self.get_schema()
table_name = self.normalize_table_name(table_name)
if self.has_table(table_name, schema):
if if_exists == 'replace':
# TODO: review logic copy_from
self._drop_create_table_from_query(table_name, schema, query)
elif if_exists == 'fail':
raise Exception('Table "{schema}.{table_name}" already exists in your CARTO account. '
'Please choose a different `table_name` or use '
'if_exists="replace" to overwrite it.'.format(
table_name=table_name, schema=schema))
else: # 'append'
pass
else:
self._drop_create_table_from_query(table_name, schema, query)
if cartodbfy is True:
cartodbfy_query = _cartodbfy_query(table_name, schema)
self.execute_long_running_query(cartodbfy_query)
return table_name
def list_tables(self, schema=None):
datasets = DatasetManager(self.auth_client).filter(
show_table_size_and_row_count='false',
show_table='false',
show_stats='false',
show_likes='false',
show_liked='false',
show_permission='false',
show_uses_builder_features='false',
show_synchronization='false',
load_totals='false'
)
datasets.sort(key=lambda x: x.updated_at, reverse=True)
return pd.DataFrame([dataset.name for dataset in datasets], columns=['tables'])
def has_table(self, table_name, schema=None):
query = self.compute_query(table_name, schema)
return self._check_exists(query)
def delete_table(self, table_name):
query = _drop_table_query(table_name)
output = self.execute_query(query)
return not('notices' in output and 'does not exist' in output['notices'][0])
def _delete_function(self, function_name):
query = _drop_function_query(function_name)
self.execute_query(query)
return function_name
def _create_function(self, schema, statement,
function_name=None, columns_types=None, return_value='VOID', language='plpgsql'):
function_name = function_name or create_tmp_name(base='tmp_func')
safe_schema = double_quote(schema)
query, qualified_func_name = _create_function_query(
schema=safe_schema,
function_name=function_name,
statement=statement,
columns_types=columns_types or '',
return_value=return_value,
language=language)
self.execute_query(query)
return qualified_func_name
def rename_table(self, table_name, new_table_name, if_exists='fail'):
new_table_name = self.normalize_table_name(new_table_name)
if table_name == new_table_name:
raise ValueError('Table names are equal. Please choose a different table name.')
if not self.has_table(table_name):
raise Exception('Table "{table_name}" does not exist in your CARTO account.'.format(
table_name=table_name))
if self.has_table(new_table_name):
if if_exists == 'replace':
log.debug('Removing table "{}"'.format(new_table_name))
self.delete_table(new_table_name)
elif if_exists == 'fail':
raise Exception('Table "{new_table_name}" already exists in your CARTO account. '
'Please choose a different `new_table_name` or use '
'if_exists="replace" to overwrite it.'.format(
new_table_name=new_table_name))
self._rename_table(table_name, new_table_name)
return new_table_name
def update_privacy_table(self, table_name, privacy=None):
DatasetInfo(self.auth_client, table_name).update_privacy(privacy)
def get_privacy(self, table_name):
return DatasetInfo(self.auth_client, table_name).privacy
def get_schema(self):
"""Get user schema from current credentials"""
query = 'SELECT current_schema()'
result = self.execute_query(query, do_post=False)
schema = result['rows'][0]['current_schema']
log.debug('schema: {}'.format(schema))
return schema
def get_geom_type(self, query):
"""Fetch geom type of a remote table or query"""
distict_query = '''
SELECT distinct ST_GeometryType(the_geom) AS geom_type
FROM ({}) q
LIMIT 5
'''.format(query)
response = self.execute_query(distict_query, do_post=False)
if response and response.get('rows') and len(response.get('rows')) > 0:
st_geom_type = response.get('rows')[0].get('geom_type')
if st_geom_type:
return map_geom_type(st_geom_type[3:])
return None
def get_num_rows(self, query):
"""Get the number of rows in the query"""
result = self.execute_query('SELECT COUNT(*) FROM ({query}) _query'.format(query=query))
return result.get('rows')[0].get('count')
def get_bounds(self, query):
extent_query = '''
SELECT ARRAY[
ARRAY[st_xmin(geom_env), st_ymin(geom_env)],
ARRAY[st_xmax(geom_env), st_ymax(geom_env)]
] bounds FROM (
SELECT ST_Extent(the_geom) geom_env
FROM ({}) q
) q;
'''.format(query)
response = self.execute_query(extent_query, do_post=False)
if response and response.get('rows') and len(response.get('rows')) > 0:
return response.get('rows')[0].get('bounds')
return None
def get_column_names(self, source, schema=None, exclude=None):
query = self.compute_query(source, schema)
columns = [c.name for c in self._get_query_columns_info(query)]
if exclude and isinstance(exclude, list):
columns = list(set(columns) - set(exclude))
return columns
def is_public(self, query):
# Used to detect public tables in queries in the publication,
# because privacy only works for tables.
public_auth_client = _create_auth_client(self.credentials, public=True)
public_sql_client = SQLClient(public_auth_client)
exists_query = 'EXPLAIN {}'.format(query)
try:
public_sql_client.send(exists_query, do_post=False)
return True
except CartoException:
return False
def get_table_names(self, query):
# Used to detect tables in queries in the publication.
query = 'SELECT CDB_QueryTablesText($q${}$q$) as tables'.format(query)
result = self.execute_query(query)
tables = []
if result['total_rows'] > 0 and result['rows'][0]['tables']:
# Dataset_info only works with tables without schema
tables = [table.split('.')[1] if '.' in table else table for table in result['rows'][0]['tables']]
return tables
def _compare_columns(self, a, b):
a_copy = [i for i in a if _not_reserved(i.name)]
b_copy = [i for i in b if _not_reserved(i.name)]
a_copy.sort()
b_copy.sort()
return a_copy == b_copy
def _drop_create_table_from_query(self, table_name, schema, query):
log.debug('DROP + CREATE table "{}"'.format(table_name))
query = 'BEGIN; {drop}; {create}; COMMIT;'.format(
drop=_drop_table_query(table_name),
create=_create_table_from_query_query(table_name, query))
self.execute_long_running_query(query)
def _create_table_from_columns(self, table_name, schema, columns):
log.debug('CREATE table "{}"'.format(table_name))
query = 'BEGIN; {create}; COMMIT;'.format(
create=_create_table_from_columns_query(table_name, columns))
self.execute_query(query)
def _truncate_table(self, table_name, schema):
log.debug('TRUNCATE table "{}"'.format(table_name))
query = 'BEGIN; {truncate}; COMMIT;'.format(
truncate=_truncate_table_query(table_name))
self.execute_query(query)
def _truncate_and_drop_add_columns(self, table_name, schema, df_columns, table_columns):
log.debug('TRUNCATE AND DROP + ADD columns table "{}"'.format(table_name))
drop_columns = _drop_columns_query(table_name, table_columns)
add_columns = _add_columns_query(table_name, df_columns)
drop_add_columns = 'ALTER TABLE {table_name} {drop_columns},{add_columns};'.format(
table_name=table_name, drop_columns=drop_columns, add_columns=add_columns)
query = '{regenerate}; BEGIN; {truncate}; {drop_add_columns}; COMMIT;'.format(
regenerate=_regenerate_table_query(table_name, schema) if self._check_regenerate_table_exists() else '',
truncate=_truncate_table_query(table_name),
drop_add_columns=drop_add_columns)
query_length_over_threshold = len(query) > BATCH_API_PAYLOAD_THRESHOLD
if query_length_over_threshold:
qualified_func_name = self._create_function(
schema=schema, statement=drop_add_columns)
drop_add_func_sql = 'SELECT {}'.format(qualified_func_name)
query = '''
{regenerate};
BEGIN;
{truncate};
{drop_add_func_sql};
COMMIT;'''.format(
regenerate=_regenerate_table_query(
table_name, schema) if self._check_regenerate_table_exists() else '',
truncate=_truncate_table_query(table_name),
drop_add_func_sql=drop_add_func_sql)
try:
self.execute_long_running_query(query)
finally:
if query_length_over_threshold:
self._delete_function(qualified_func_name)
def compute_query(self, source, schema=None):
if is_sql_query(source):
return source
schema = schema or self.get_schema()
return self._compute_query_from_table(source, schema)
def _compute_query_from_table(self, table_name, schema):
return 'SELECT * FROM "{schema}"."{table_name}"'.format(
schema=schema or 'public',
table_name=table_name
)
def _check_exists(self, query):
exists_query = 'EXPLAIN {}'.format(query)
try:
self.execute_query(exists_query, do_post=False)
return True
except CartoException:
return False
def _check_regenerate_table_exists(self):
query = '''
SELECT 1
FROM pg_catalog.pg_proc p
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace
WHERE p.proname = 'cdb_regeneratetable' AND n.nspname = 'cartodb';
'''
result = self.execute_query(query)
return len(result['rows']) > 0
def _get_query_columns_info(self, query):
query = 'SELECT * FROM ({}) _q LIMIT 0'.format(query)
table_info = self.execute_query(query)
return get_query_columns_info(table_info['fields'])
def _get_copy_query(self, query, columns, limit):
query_columns = [
double_quote(column.name) for column in columns
if (column.name != 'the_geom_webmercator')
]
query = 'SELECT {columns} FROM ({query}) _q'.format(
query=query,
columns=','.join(query_columns))
if limit is not None:
if isinstance(limit, int) and (limit >= 0):
query += ' LIMIT {limit}'.format(limit=limit)
else:
raise ValueError("`limit` parameter must an integer >= 0")
return query
@retry_copy
def _copy_to(self, query, columns, retry_times=DEFAULT_RETRY_TIMES):
log.debug('COPY TO')
copy_query = "COPY ({0}) TO stdout WITH (FORMAT csv, HEADER true, NULL '{1}')".format(query, PG_NULL)
raw_result = self.copy_client.copyto_stream(copy_query)
converters = obtain_converters(columns)
parse_dates = date_columns_names(columns)
df = pd.read_csv(
raw_result,
converters=converters,
parse_dates=parse_dates)
return df
@retry_copy
def _copy_from(self, dataframe, table_name, columns, retry_times=DEFAULT_RETRY_TIMES):
log.debug('COPY FROM')
query = """
COPY {table_name}({columns}) FROM stdin WITH (FORMAT csv, DELIMITER '|', NULL '{null}');
""".format(
table_name=table_name, null=PG_NULL,
columns=','.join(double_quote(column.dbname) for column in columns)).strip()
data = _compute_copy_data(dataframe, columns)
self.copy_client.copyfrom(query, data)
def _rename_table(self, table_name, new_table_name):
query = _rename_table_query(table_name, new_table_name)
self.execute_query(query)
def normalize_table_name(self, table_name):
norm_table_name = normalize_name(table_name)
if norm_table_name != table_name:
log.debug('Table name normalized: "{}"'.format(norm_table_name))
return norm_table_name
def _drop_table_query(table_name, if_exists=True):
return 'DROP TABLE {if_exists} {table_name}'.format(
table_name=table_name,
if_exists='IF EXISTS' if if_exists else '')
def _drop_function_query(function_name, columns_types=None, if_exists=True):
if columns_types and not isinstance(columns_types, dict):
raise ValueError('The columns_types parameter should be a dictionary of column names and types.')
columns_types = columns_types or {}
columns = ['{0} {1}'.format(cname, ctype) for cname, ctype in columns_types.items()]
columns_str = ','.join(columns)
return 'DROP FUNCTION {if_exists} {function_name}{columns_str_call}'.format(
function_name=function_name,
if_exists='IF EXISTS' if if_exists else '',
columns_str_call='({columns_str})'.format(columns_str=columns_str) if columns else '')
def _truncate_table_query(table_name):
return 'TRUNCATE TABLE {table_name}'.format(
table_name=table_name)
def _create_function_query(schema, function_name, statement, columns_types, return_value, language):
if columns_types and not isinstance(columns_types, dict):
raise ValueError('The columns_types parameter should be a dictionary of column names and types.')
columns_types = columns_types or {}
columns = ['{0} {1}'.format(cname, ctype) for cname, ctype in columns_types.items()]
columns_str = ','.join(columns) if columns else ''
function_query = '''
CREATE FUNCTION {schema}.{function_name}({columns_str})
RETURNS {return_value} AS $$
BEGIN
{statement}
END;
$$ LANGUAGE {language}
'''.format(schema=schema,
function_name=function_name,
statement=statement,
columns_str=columns_str,
return_value=return_value,
language=language)
qualified_func_name = '{schema}.{function_name}({columns_str})'.format(
schema=schema, function_name=function_name, columns_str=columns_str)
return function_query, qualified_func_name
def _drop_columns_query(table_name, columns):
columns = ['DROP COLUMN {name}'.format(name=double_quote(c.dbname))
for c in columns if _not_reserved(c.dbname)]
return ','.join(columns)
def _add_columns_query(table_name, columns):
columns = ['ADD COLUMN {name} {type}'.format(name=double_quote(c.dbname), type=c.dbtype)
for c in columns if _not_reserved(c.dbname)]
return ','.join(columns)
def _not_reserved(column):
RESERVED_COLUMNS = ['cartodb_id', 'the_geom', 'the_geom_webmercator']
return column not in RESERVED_COLUMNS
def _create_table_from_columns_query(table_name, columns):
columns = ['{name} {type}'.format(name=double_quote(c.dbname), type=c.dbtype) for c in columns]
return 'CREATE TABLE {table_name} ({columns})'.format(
table_name=table_name,
columns=','.join(columns))
def _create_table_from_query_query(table_name, query):
return 'CREATE TABLE {table_name} AS ({query})'.format(table_name=table_name, query=query)
def _cartodbfy_query(table_name, schema):
return "SELECT CDB_CartodbfyTable('{schema}', '{table_name}')".format(
schema=schema, table_name=table_name)
def _regenerate_table_query(table_name, schema):
return "SELECT CDB_RegenerateTable('{schema}.{table_name}'::regclass)".format(
schema=schema, table_name=table_name)
def _rename_table_query(table_name, new_table_name):
return 'ALTER TABLE {table_name} RENAME TO {new_table_name};'.format(
table_name=table_name, new_table_name=new_table_name)
def _create_auth_client(credentials, public=False):
return APIKeyAuthClient(
base_url=credentials.base_url,
api_key='default_public' if public else credentials.api_key,
session=credentials.session,
client_id='cartoframes_{}'.format(__version__),
user_agent='cartoframes_{}'.format(__version__))
def _compute_copy_data(df, columns):
for index in df.index:
row_data = []
for column in columns:
val = df.at[index, column.name]
if column.is_geom:
val = encode_geometry_ewkb(val)
row_data.append(encode_row(val))
csv_row = b'|'.join(row_data)
csv_row += b'\n'
yield csv_row
| |
#coding=utf-8
import math
import triangle
import matplotlib.pyplot as plt
class TriAxisCenter:
def __init__(self, points):
'''
initiate the triaxiscenter with points list without duplicate point
points: [[x0,y0],[x1,y1],...,[xn,yn]]
'''
# initiate polygon points, and point number
self.vertices = points
self.ptnum = len(points)
# calculate pslg, and cdt
self._cdt()
self._top_axis_graph()
def betweenness_center(self, top = 1):
ears = self.top_axis_graph.findVertex(1)
results = {}
for branch in self.top_axis_graph.findVertex(3):
results[branch] = {}
results[branch]["xy"] = self.axisverts[branch]
results[branch]["degree"] = 0
for ear in ears:
for path in self.top_axis_graph.paths_to_ear(ear):
for k in results.keys():
if k in path:
results[k]["degree"] += 1
return sorted(results.iteritems(), key=lambda (k,v): (v["degree"], k), reverse=True)[0:top]
def closeness_center(self, weight="length", top = 1):
results = {}
for v in self.axis.vertices():
if self.axis.degree(v) > 1:
plen = []
for path in self.axis.paths_to_ear(v):
length = self.axis.path_length(path, weight)
plen.append(length)
mean = sum(plen) * 1.0 / len(plen)
sq_sum = 0.0
for l in plen:
sq_sum += ((l - mean) * (l - mean))
std = math.sqrt(sq_sum / len(plen))
results[v] = {}
results[v]["degree"] = std
results[v]["xy"] = self.axisverts[v]
return sorted(results.iteritems(), key=lambda (k,v): (v["degree"], k))[0:top]
def _next(self, i):
'''
for circulate the points list
'''
return (i+1) % self.ptnum
def _tcenter(self, tri):
'''
mass center of a triangle
'''
pi, pj, pk = [self.vertices[t] for t in tri]
cx = (pi[0] + pj[0] + pk[0]) / 3.0
cy = (pi[1] + pj[1] + pk[1]) / 3.0
return [cx,cy]
def _tarea(self, tri):
'''
area of a triangle
'''
pi, pj, pk = [self.vertices[t] for t in tri]
a = math.sqrt((pi[0]-pj[0])*(pi[0]-pj[0]) + (pi[1]-pj[1])*(pi[1]-pj[1]))
b = math.sqrt((pi[0]-pk[0])*(pi[0]-pk[0]) + (pi[1]-pk[1])*(pi[1]-pk[1]))
c = math.sqrt((pk[0]-pj[0])*(pk[0]-pj[0]) + (pk[1]-pj[1])*(pk[1]-pj[1]))
s = 0.5 * (a +b +c)
area = math.sqrt(s*(s-a)*(s-b)*(s-c))
return area
def _has_pair(self, e, segs):
'''
decide if an edge pair in segments list
'''
if [e[0], e[1]] in segs or [e[1], e[0]] in segs:
return True
else:
return False
def _cdt(self):
'''
build the CDT, Dual Graph
'''
data = {"vertices":[], "segments":[]}
ptnum = self.ptnum
for i in range(self.ptnum):
data["vertices"].append(self.vertices[i])
data["segments"].append([i, (i+1) % ptnum])
# triangulate the polygon
cdt = triangle.triangulate(data,"p")
self.cdt = {
"vertices" : cdt["vertices"],
"segments" : data["segments"],
"triangles" : cdt["triangles"],
"ttype":[]
}
# update triangle's type and edges list in cdt
edge_tri = {} # edge - triangle relation dict, for building dual graph
self.tcenters = [] # mass center of triangles
self.axisverts = [] # vertices of axis graph
self.dual = Graph() # dual graph initiate
self.axis = Graph() # axis graph initiate
vidx = 0 # axis graph vertex index counter
vnames = {} # vertex names, for filtering duplicate diagonal edge center point
for ti, tri in enumerate(cdt["triangles"]):
cpt = self._tcenter(tri) # calculate mass center of triangle
self.tcenters.append(cpt)
area = self._tarea(tri) # area of triangle
# initiate triangle type to 3
ttype = 3
# initiate triangle edge to type 1, the edge of polygon, if is diagonal, set to 0
isside = [1,1,1]
# save center point index of triangle edge in axis graph vertices list [axisverts]
edge_cpt_idx = []
# loop on edges of the triangle
for i in range(3):
# get an edge index of the triangle
e = [tri[i], tri[(i+1) % 3]]
# update the segments list of CDT
if self._has_pair(e, self.cdt["segments"]) is False:
self.cdt["segments"].append(e)
# build dual graph
ename = "-".join([str(ev) for ev in sorted(e)])
if edge_tri.has_key(ename):
self.dual.addEdge(edge_tri[ename][0], ti)
else:
edge_tri[ename] = [ti]
# build the axis graph
if self._next(e[0]) == e[1] or self._next(e[1]) == e[0]:
ttype -= 1 # if the edge is edge of the polygon , decreate the type
else:
# else, set the edge to diagonal, 0
isside[i] = 0
# middle point of the edge, name, if it is calculated, pass and store into edge_cpt_idx
vn = "-".join([str(vi) for vi in sorted(e)])
if vnames.has_key(vn) is False:
x = (self.cdt["vertices"][e[0]][0] + self.cdt["vertices"][e[1]][0]) / 2.0
y = (self.cdt["vertices"][e[0]][1] + self.cdt["vertices"][e[1]][1]) / 2.0
self.axisverts.append([x, y])
edge_cpt_idx.append(vidx)
vnames[vn] = vidx
vidx += 1
else:
edge_cpt_idx.append(vnames[vn])
if ttype == 1: # type I triangle
diag_idx = isside.index(0) # diagonal edge index
ear_vert_idx = tri[(diag_idx + 2) % 3] # ear vertex index
ear_vert = self.cdt["vertices"][ear_vert_idx] # ear vertex
# add a vertex
self.axisverts.append(ear_vert)
x, y = self.axisverts[edge_cpt_idx[0]]
length = math.sqrt(pow(ear_vert[0] - x, 2) + pow(ear_vert[1] - y, 2))
width = area / length
self.axis.addEdge(vidx, edge_cpt_idx[0], weights={"length" : length, "width" : width, "area" : area})
vidx += 1
elif ttype == 2: # type II triangle, add edge
x1, y1 = self.axisverts[edge_cpt_idx[0]]
x2, y2 = self.axisverts[edge_cpt_idx[1]]
length = math.sqrt(pow(x1 - x2, 2) + pow(y1 - y2, 2))
width = area / length
self.axis.addEdge(edge_cpt_idx[0],edge_cpt_idx[1], weights={"length" : length, "width" : width, "area" : area})
elif ttype == 3: # type III triangle, add 3 edges, and a vertex, the mass center
self.axisverts.append(cpt)
cidx = vidx
for eidx in edge_cpt_idx:
x1, y1 = self.axisverts[eidx]
length = math.sqrt(pow(x1 - cpt[0], 2) + pow(y1 - cpt[1], 2))
width = area / length
self.axis.addEdge(cidx, eidx, weights={"length" : length, "width" : width, "area" : area})
vidx += 1
# save triangle type information into CDT
self.cdt["ttype"].append(ttype)
def _top_axis_graph(self):
'''
build the topology axis graph
'''
# init the graph
self.top_axis_graph = Graph()
# find vertices has degree 3
deg3v = self.axis.findVertex(3)
# if no branck vertices,
if len(deg3v) == 0:
deg1v = self.axis.findVertex(1)
self.top_axis_graph.addEdge(deg1v[0], deg1v[1])
return
# traverse the axis graph from a vertex with degree of 3
start = deg3v[0]
curr = start # current vertex, a cursor
# stack for traverse, to lambda function: pushStack, topStack, popStack
stack = []
pushStack = lambda li, x: li.append(x)
topStack = lambda li: li[-1]
popStack = lambda li: li.pop()
pushStack(stack, start)
# visited flag
visited = [0] * self.axis.V
visited[start] = 1
while len(stack) > 0:
dead = 1 # if no neighbor to go, this vertex is dead, means I, III vertex
for i in self.axis.neighbor(curr):
if visited[i] == 0:
curr = i
visited[i] = 1
dead = 0
break
if dead:
# if it is a dead vertex, find a branch, and change current vertex
topN = popStack(stack)
if len(stack):
self.top_axis_graph.addEdge(topN, topStack(stack))
curr = topStack(stack)
else:
curr_deg = self.axis.degree(curr)
if curr_deg == 2:
# go further
continue
else:
# I, III vertex, push to stack
pushStack(stack, curr)
class Graph:
def __init__(self):
'''
initiate undirected graph, adjacent dict, vertex and edge list,
'''
self.adj = {} # adjacent dict
self.weights = {} # weight
self.edges = []
self.V = 0 # vertex number
self.E = 0 # edge number
def findVertex(self, deg):
'''
find vertices with degree deg
'''
verts = []
for v, nlist in self.adj.items():
if len(nlist) == deg:
verts.append(v)
return verts
def degree(self, v):
'''
degree of a vertex
'''
return len(self.adj[v])
def neighbor(self, v):
'''
neighbor of vertex
'''
return self.adj[v]
def vertices(self):
'''
get vertices in the graph
'''
return self.adj.keys()
def addEdge(self, s, e, weights=None):
'''
add a edge, with weight dict ew
'''
if self.adj.has_key(s) is False:
self.adj[s] = []
self.V += 1
if self.adj.has_key(e) is False:
self.V += 1
self.adj[e] = []
# add new edges
if e not in self.adj[s]:
self.adj[s].append(e)
self.adj[e].append(s)
self.E += 1
self.edges.append([s,e])
if weights:
en = "-".join([str(v) for v in sorted([s, e])])
self.weights[en] = weights
def path_length(self, path, weight=None):
length = 0
for i in range(len(path) - 1):
pi, pj = path[i], path[i+1]
if weight:
en = "-".join([str(v) for v in sorted([pi, pj])])
length += self.weights[en][weight]
else:
length += 1
return length
def paths_to_ear(self, v):
'''
1 degree vertex V to all other 1 degree vertices' paths
'''
start = v
curr = start # current vertex, a cursor
# stack for traverse, to lambda function: pushStack, topStack, popStack
stack = []
pushStack = lambda li, x: li.append(x)
topStack = lambda li: li[-1]
popStack = lambda li: li.pop()
pushStack(stack, start)
# visited flag
#visited = [0] * self.V
visited = {}
for k in self.adj.keys():
visited[k] = 0
visited[start] = 1
paths = []
curpath = [start]
while len(stack) > 0:
dead = 1 # if no neighbor to go, this vertex is dead, means I, III vertex
for i in self.neighbor(curr):
if visited[i] == 0:
curr = i
visited[i] = 1
curpath.append(i)
dead = 0
break
if dead:
# if it is a dead vertex, find a branch, and change current vertex
topN = popStack(stack)
if len(stack):
curr = topStack(stack)
if self.degree(topN) == 1:
paths.append(curpath[:])
backi = curpath.index(curr)
curpath = curpath[0:backi+1]
else:
curr_deg = self.degree(curr)
if curr_deg == 2:
# go further
continue
else:
# I, III vertex, push to stack
pushStack(stack, curr)
return paths
if __name__ == "__main__":
'''
import csv
data = []
with open("data/poly.csv") as ifile:
reader = csv.DictReader(ifile)
for row in reader:
data.append([float(row["x"]), float(row["y"])])
TAC = TriAxisCenter(data)
print "poly vertices:", len(TAC.cdt["vertices"])
print "cdt edges:", len(TAC.cdt["segments"])
print "triangles:", len(TAC.cdt["triangles"])'''
pass
| |
###
# Copyright (c) 2002-2005, Jeremiah Fincher
# Copyright (c) 2008, James Vega
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import re
from supybot.test import *
class MiscTestCase(ChannelPluginTestCase):
plugins = ('Misc', 'Utilities', 'Anonymous', 'Plugin',
'Channel', 'Dict', 'User', 'String')
def testReplyWhenNotCommand(self):
try:
original = conf.supybot.reply.whenNotCommand()
conf.supybot.reply.whenNotCommand.setValue(True)
self.prefix = 'somethingElse!user@host.domain.tld'
self.assertRegexp('foo', 'not.*command')
self.assertRegexp('foo bar baz', 'not.*command')
finally:
conf.supybot.reply.whenNotCommand.setValue(original)
def testReplyWhenNotCommandButFirstCommandIsPluginName(self):
try:
original = conf.supybot.reply.whenNotCommand()
conf.supybot.reply.whenNotCommand.setValue(True)
self.assertRegexp('misc foo', '"list Misc"')
finally:
conf.supybot.reply.whenNotCommand.setValue(original)
# if network:
# def testNotReplyWhenRegexpsMatch(self):
# try:
# orig = conf.supybot.reply.whenNotCommand()
# gk = conf.supybot.plugins.Gameknot.gameSnarfer()
# conf.supybot.reply.whenNotCommand.setValue(True)
# conf.supybot.plugins.Gameknot.gameSnarfer.setValue(True)
# self.prefix = 'somethingElse!user@host.domain.tld'
# self.assertSnarfNotError(
# 'http://gameknot.com/chess.pl?bd=1019508')
# finally:
# conf.supybot.reply.whenNotCommand.setValue(orig)
# conf.supybot.plugins.Gameknot.gameSnarfer.setValue(gk)
def testNotReplyWhenNotCanonicalName(self):
try:
original = str(conf.supybot.reply.whenNotCommand)
conf.supybot.reply.whenNotCommand.set('True')
self.prefix = 'somethingElse!user@host.domain.tld'
self.assertNotRegexp('LeN foobar', 'command')
self.assertResponse('lEn foobar', '6')
finally:
conf.supybot.reply.whenNotCommand.set(original)
def testHelp(self):
self.assertHelp('help list')
self.assertRegexp('help help', r'^\(\x02help')
#self.assertRegexp('help misc help', r'^\(\x02misc help')
self.assertError('help nonExistentCommand')
def testHelpIncludeFullCommandName(self):
self.assertHelp('help channel capability add')
m = self.getMsg('help channel capability add')
self.failUnless('channel capability add' in m.args[1])
def testHelpDoesAmbiguityWithDefaultPlugins(self):
m = self.getMsg('help list') # Misc.list and User.list.
self.failIf(m.args[1].startswith('Error'))
def testHelpIsCaseInsensitive(self):
self.assertHelp('help LIST')
def testList(self):
self.assertNotError('list')
self.assertNotError('list Misc')
def testListIsCaseInsensitive(self):
self.assertNotError('list misc')
def testListPrivate(self):
# If Anonymous changes to public, these tests will break. So if
# the next assert fails, change the plugin we test for public/private
# to some other non-public plugin.
name = 'Anonymous'
conf.supybot.plugins.Anonymous.public.setValue(False)
self.assertNotRegexp('list', name)
self.assertRegexp('list --private', name)
conf.supybot.plugins.Anonymous.public.setValue(True)
self.assertRegexp('list', name)
self.assertNotRegexp('list --private', name)
def testListDoesNotIncludeNonCanonicalName(self):
self.assertNotRegexp('list Owner', '_exec')
def testListNoIncludeDispatcher(self):
self.assertNotRegexp('list Misc', 'misc')
def testListIncludesDispatcherIfThereIsAnOriginalCommand(self):
self.assertRegexp('list Dict', r'\bdict\b')
if network:
def testVersion(self):
print '*** This test should start passing when we have our '\
'threaded issues resolved.'
self.assertNotError('version')
def testSource(self):
self.assertNotError('source')
def testTell(self):
# This test fails because the test is seeing us as owner and Misc.tell
# allows the owner to send messages to people the bot hasn't seen.
m = self.getMsg('tell aljsdkfh [plugin tell]')
self.failUnless('let you do' in m.args[1])
m = self.getMsg('tell #foo [plugin tell]')
self.failUnless('No need for' in m.args[1])
m = self.getMsg('tell me you love me')
self.failUnless(m.args[0] == self.nick)
def testNoNestedTell(self):
self.assertRegexp('echo [tell %s foo]' % self.nick, 'nested')
def testTellDoesNotPropogateAction(self):
m = self.getMsg('tell foo [action bar]')
self.failIf(ircmsgs.isAction(m))
def testLast(self):
orig = conf.supybot.plugins.Misc.timestampFormat()
try:
conf.supybot.plugins.Misc.timestampFormat.setValue('')
self.feedMsg('foo bar baz')
self.assertResponse('last', '<%s> foo bar baz' % self.nick)
self.assertRegexp('last', '<%s> @last' % self.nick)
self.assertResponse('last --with foo', '<%s> foo bar baz' % \
self.nick)
self.assertResponse('last --without foo', '<%s> @last' % self.nick)
self.assertRegexp('last --regexp m/\s+/', 'last --without foo')
self.assertResponse('last --regexp m/bar/',
'<%s> foo bar baz' % self.nick)
self.assertResponse('last --from %s' % self.nick.upper(),
'<%s> @last --regexp m/bar/' % self.nick)
self.assertResponse('last --from %s*' % self.nick[0],
'<%s> @last --from %s' %
(self.nick, self.nick.upper()))
conf.supybot.plugins.Misc.timestampFormat.setValue('foo')
self.assertSnarfNoResponse('foo bar baz', 1)
self.assertResponse('last', '<%s> foo bar baz' % self.nick)
finally:
conf.supybot.plugins.Misc.timestampFormat.setValue(orig)
def testNestedLastTimestampConfig(self):
tsConfig = conf.supybot.plugins.Misc.last.nested.includeTimestamp
orig = tsConfig()
try:
tsConfig.setValue(True)
self.getMsg('foo bar baz')
chars = conf.supybot.reply.whenAddressedBy.chars()
chars = re.escape(chars)
self.assertRegexp('echo [last]', r'[%s]foo bar baz' % chars)
finally:
tsConfig.setValue(orig)
def testNestedLastNickConfig(self):
nickConfig = conf.supybot.plugins.Misc.last.nested.includeNick
orig = nickConfig()
try:
nickConfig.setValue(True)
self.getMsg('foo bar baz')
chars = conf.supybot.reply.whenAddressedBy.chars()
chars = re.escape(chars)
self.assertRegexp('echo [last]',
'<%s> [%s]foo bar baz' % (self.nick, chars))
finally:
nickConfig.setValue(orig)
def testMore(self):
self.assertRegexp('echo %s' % ('abc'*300), 'more')
self.assertRegexp('more', 'more')
self.assertNotRegexp('more', 'more')
def testInvalidCommand(self):
self.assertError('echo []')
def testMoreIsCaseInsensitive(self):
self.assertNotError('echo %s' % ('abc'*2000))
self.assertNotError('more')
nick = ircutils.nickFromHostmask(self.prefix)
self.assertNotError('more %s' % nick)
self.assertNotError('more %s' % nick.upper())
self.assertNotError('more %s' % nick.lower())
def testApropos(self):
self.assertNotError('apropos f')
self.assertRegexp('apropos asldkfjasdlkfja', 'No appropriate commands')
def testAproposIsNotCaseSensitive(self):
self.assertNotRegexp('apropos LIST', 'No appropriate commands')
def testAproposDoesntReturnNonCanonicalNames(self):
self.assertNotRegexp('apropos exec', '_exec')
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| |
from compliance_checker.ioos import (IOOS0_1Check, IOOS1_1Check, IOOS1_2Check,
NamingAuthorityValidator)
from compliance_checker.tests.resources import STATIC_FILES
from compliance_checker.tests import BaseTestCase
from compliance_checker.tests.helpers import MockTimeSeries, MockVariable
from compliance_checker.tests.test_cf import get_results
from netCDF4 import Dataset
import numpy as np
import os
class TestIOOS0_1(BaseTestCase):
'''
Tests for the IOOS Inventory Metadata v0.1
'''
def setUp(self):
# Use the NCEI Gold Standard Point dataset for IOOS checks
self.ds = self.load_dataset(STATIC_FILES['ncei_gold_point_1'])
self.ioos = IOOS0_1Check()
def test_cc_meta(self):
assert self.ioos._cc_spec == 'ioos'
assert self.ioos._cc_spec_version == '0.1'
def test_global_attributes(self):
'''
Tests that all global attributes checks are working
'''
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
results = self.ioos.check_global_attributes(nc_obj)
for result in results:
self.assert_result_is_bad(result)
attrs = [
'acknowledgement',
'publisher_email',
'institution',
'publisher_name',
'Conventions'
]
for attr in attrs:
setattr(nc_obj, attr, 'test')
results = self.ioos.check_global_attributes(nc_obj)
for result in results:
self.assert_result_is_good(result)
def test_variable_attributes(self):
'''
Tests that the platform variable attributes check is working
'''
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension('time', 1)
nc_obj.createVariable('platform', 'S1', ())
platform = nc_obj.variables['platform']
results = self.ioos.check_variable_attributes(nc_obj)
for result in results:
self.assert_result_is_bad(result)
platform.long_name = 'platform'
platform.short_name = 'platform'
platform.source = 'glider'
platform.ioos_name = 'urn:ioos:station:glos:leorgn'
platform.wmo_id = '1234'
platform.comment = 'test'
results = self.ioos.check_variable_attributes(nc_obj)
for result in results:
self.assert_result_is_good(result)
def test_variable_units(self):
'''
Tests that the variable units test is working
'''
# this check tests that units attribute is present on EVERY variable
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension('time', 1)
nc_obj.createVariable('sample_var', 'd', ('time',))
sample_var = nc_obj.variables['sample_var']
results = self.ioos.check_variable_units(nc_obj)
self.assert_result_is_bad(results)
sample_var.units = 'm'
sample_var.short_name = 'sample_var'
results = self.ioos.check_variable_units(nc_obj)
self.assert_result_is_good(results)
def test_altitude_units(self):
'''
Tests that the altitude variable units test is working
'''
results = self.ioos.check_altitude_units(self.ds)
self.assert_result_is_good(results)
# Now test an nc file with a 'z' variable without units
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension('time', 1)
nc_obj.createVariable('z', 'd', ('time',))
z = nc_obj.variables['z']
z.short_name = 'sample_var'
results = self.ioos.check_variable_units(nc_obj)
self.assert_result_is_bad(results)
class TestIOOS1_1(BaseTestCase):
'''
Tests for the compliance checker implementation of IOOS Metadata Profile
for NetCDF, Version 1.1
'''
def setUp(self):
# Use the IOOS 1_1 dataset for testing
self.ds = self.load_dataset(STATIC_FILES['ioos_gold_1_1'])
self.ioos = IOOS1_1Check()
def test_cc_meta(self):
assert self.ioos._cc_spec == 'ioos'
assert self.ioos._cc_spec_version == '1.1'
def test_required_attributes(self):
'''
Tests that required attributes test is working properly
'''
results = self.ioos.check_high(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_recomended_attributes(self):
'''
Tests that recommended attributes test is working properly
'''
results = self.ioos.check_recommended(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_bad_platform_variables(self):
'''
Tests that the platform variable attributes check is working
'''
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension('time', 1)
nc_obj.platform = 'platform'
# global attribute 'platform' points to variable that does not exist in dataset
results = self.ioos.check_platform_variables(nc_obj)
for result in results:
self.assert_result_is_bad(result)
def test_good_platform_variables(self):
'''
Tests that the platform variable attributes check is working
'''
results = self.ioos.check_platform_variables(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_bad_geophysical_vars_fill_value(self):
'''
Tests that the geophysical variable _FillValue check is working
'''
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension('time', 1)
nc_obj.createVariable('sample_var', 'd', ('time',))
# Define some variable attributes but don't specify _FillValue
sample_var = nc_obj.variables['sample_var']
sample_var.units = 'm'
sample_var.short_name = 'temp'
# global attribute 'platform' points to variable that does not exist in dataset
results = self.ioos.check_geophysical_vars_fill_value(nc_obj)
for result in results:
self.assert_result_is_bad(result)
def test_good_geophysical_vars_fill_value(self):
'''
Tests that the geophysical variable _FillValue check is working
'''
results = self.ioos.check_geophysical_vars_fill_value(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_bad_geophysical_vars_standard_name(self):
'''
Tests that the platform variable attributes check is working
'''
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension('time', 1)
nc_obj.createVariable('sample_var', 'd', ('time',))
# Define some variable attributes but don't specify _FillValue
sample_var = nc_obj.variables['sample_var']
sample_var.units = 'm'
sample_var.short_name = 'temp'
# global attribute 'platform' points to variable that does not exist in dataset
results = self.ioos.check_geophysical_vars_standard_name(nc_obj)
for result in results:
self.assert_result_is_bad(result)
def test_good_geophysical_vars_standard_name(self):
'''
Tests that the geophysical variable _FillValue check is working
'''
results = self.ioos.check_geophysical_vars_standard_name(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_bad_units(self):
'''
Tests that the valid units check is working
'''
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension('time', 1)
nc_obj.createVariable('temperature', 'd', ('time',))
# Define some variable attributes but don't specify _FillValue
sample_var = nc_obj.variables['temperature']
sample_var.units = 'degC' # Not valid units
sample_var.short_name = 'temp'
# global attribute 'platform' points to variable that does not exist in dataset
results = self.ioos.check_geophysical_vars_standard_name(nc_obj)
for result in results:
self.assert_result_is_bad(result)
def test_good_units(self):
'''
Tests that the valid units check is working
'''
results = self.ioos.check_units(self.ds)
for result in results:
self.assert_result_is_good(result)
class TestIOOS1_2(BaseTestCase):
'''
Tests for the compliance checker implementation of IOOS Metadata Profile
for NetCDF, Version 1.1
'''
def setUp(self):
self.ioos = IOOS1_2Check()
def test_check_geophysical_vars_have_attrs(self):
# create geophysical variable
ds = MockTimeSeries() # time, lat, lon, depth
temp = ds.createVariable("temp", np.float64, dimensions=("time",))
# should fail here
results = self.ioos.check_geophysical_vars_have_attrs(ds)
scored, out_of, messages = get_results(results)
self.assertLess(scored, out_of)
# set the necessary attributes
ds = MockTimeSeries(default_fill_value=9999999999.) # time, lat, lon, depth
temp = ds.createVariable("temp", np.float64, fill_value=9999999999.) # _FillValue
temp.setncattr("missing_value", 9999999999.)
temp.setncattr("standard_name", "sea_surface_temperature")
temp.setncattr("standard_name_uri", "http://cfconventions.org/Data/cf-standard-names/64/build/cf-standard-name-table.html")
temp.setncattr("units", "degree_C")
temp.setncattr("platform", "myPlatform")
results = self.ioos.check_geophysical_vars_have_attrs(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
def test_check_geospatial_vars_have_attrs(self):
# create geophysical variable
ds = MockTimeSeries() # time, lat, lon, depth
temp = ds.createVariable("temp", np.float64, dimensions=("time",))
# should fail here
results = self.ioos.check_geospatial_vars_have_attrs(ds)
scored, out_of, messages = get_results(results)
self.assertLess(scored, out_of)
# should pass - default_fill_value sets _FillValue attr
ds = MockTimeSeries(default_fill_value=9999999999.) # time, lat, lon, depth
ds.variables["time"].setncattr("standard_name", "time")
ds.variables["time"].setncattr("standard_name_uri", "time")
ds.variables["time"].setncattr("units", "hours since 1970-01-01T00:00:00")
ds.variables["time"].setncattr("missing_value", 9999999999.)
results = self.ioos.check_geospatial_vars_have_attrs(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
def test_check_contributor_role_and_vocabulary(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no contributor_role or vocab, fail both
results = self.ioos.check_contributor_role_and_vocabulary(ds)
self.assertFalse(all(r.value for r in results))
# bad contributor_role and vocab
ds.setncattr("contributor_role", "bad")
ds.setncattr("contributor_role_vocabulary", "bad")
results = self.ioos.check_contributor_role_and_vocabulary(ds)
self.assertFalse(all(r.value for r in results))
# good role, bad vocab
ds.setncattr("contributor_role", "contributor")
results = self.ioos.check_contributor_role_and_vocabulary(ds)
self.assertTrue(results[0].value)
self.assertFalse(results[1].value)
# bad role, good vocab
ds.setncattr("contributor_role", "bad")
ds.setncattr("contributor_role_vocabulary", "http://vocab.nerc.ac.uk/collection/G04/current/")
results = self.ioos.check_contributor_role_and_vocabulary(ds)
self.assertFalse(results[0].value)
self.assertTrue(results[1].value)
# good role, good vocab
ds.setncattr("contributor_role", "contributor")
ds.setncattr("contributor_role_vocabulary", "http://vocab.nerc.ac.uk/collection/G04/current/")
results = self.ioos.check_contributor_role_and_vocabulary(ds)
self.assertTrue(results[0].value)
self.assertTrue(results[1].value)
def test_check_creator_and_publisher_type(self):
"""
Checks the creator_type and publisher_type global attributes with
the following values:
Empty: Valid, defaults to "person" when not specified, which is
contained in the list of valid values.
Bad values: Invalid, not contained in list of valid values.
Good values: Valid, contained in list.
"""
ds = MockTimeSeries()
# values which are not set/specified default to person, which is valid
result_list = self.ioos.check_creator_and_publisher_type(ds)
self.assertTrue(all(res.value for res in result_list))
# create invalid values for attribute
ds.setncattr('creator_type', 'PI')
ds.setncattr('publisher_type', 'Funder')
result_list = self.ioos.check_creator_and_publisher_type(ds)
err_regex = (r"^If specified, \w+_type must be in value list "
r"\(\['group', 'institution', 'person', 'position'\]\)$")
for res in result_list:
self.assertFalse(res.value)
self.assertRegex(res.msgs[0], err_regex)
# good values
ds.setncattr('creator_type', 'person')
ds.setncattr('publisher_type', 'institution')
result_list = self.ioos.check_creator_and_publisher_type(ds)
self.assertTrue(all(res.value for res in result_list))
def test_check_gts_ingest(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no gts_ingest, should pass
results = self.ioos.check_gts_ingest(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
# global
ds.setncattr("gts_ingest", "true")
results = self.ioos.check_gts_ingest(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
# give one variable the gts_ingest attribute
# no ancillary vars, should fail
ds.variables["time"].setncattr("gts_ingest", "true")
results = self.ioos.check_gts_ingest(ds)
scored, out_of, messages = get_results(results)
self.assertLess(scored, out_of)
# set ancillary var with bad standard name
tmp = ds.createVariable("tmp", np.byte, ("time",))
tmp.setncattr("standard_name", "bad")
ds.variables["time"].setncattr("ancillary_variables", "tmp")
results = self.ioos.check_gts_ingest(ds)
scored, out_of, messages = get_results(results)
self.assertLess(scored, out_of)
# good ancillary var standard name
tmp.setncattr("standard_name", "aggregate_quality_flag")
results = self.ioos.check_gts_ingest(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
def test_check_instrument_variables(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no instrument variable, should pass
results = self.ioos.check_instrument_variables(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
temp = ds.createVariable("temp", np.float64, dimensions=("time",))
temp.setncattr("cf_role", "timeseries")
temp.setncattr("standard_name", "sea_surface_temperature")
temp.setncattr("units", "degree_C")
temp.setncattr("axis", "Y")
temp.setncattr("instrument", "myInstrument")
temp[:] = 45.
instr = ds.createVariable("myInstrument", np.float64, dimensions=("time",))
# give instrument variable with component
instr.setncattr("component", "someComponent")
results = self.ioos.check_instrument_variables(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
# give discriminant
instr.setncattr("discriminant", "someDiscriminant")
results = self.ioos.check_instrument_variables(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
# bad component
instr.setncattr("component", 45)
results = self.ioos.check_instrument_variables(ds)
scored, out_of, messages = get_results(results)
self.assertLess(scored, out_of)
def test_check_wmo_platform_code(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no wmo_platform_code, pass
result = self.ioos.check_wmo_platform_code(ds)
self.assertTrue(result.value)
# valid code
ds.setncattr("wmo_platform_code", "12345")
result = self.ioos.check_wmo_platform_code(ds)
self.assertTrue(result.value)
# valid code
ds.setncattr("wmo_platform_code", "7654321")
result = self.ioos.check_wmo_platform_code(ds)
self.assertTrue(result.value)
# non-numeric, fail
ds.setncattr("wmo_platform_code", "abcd1")
result = self.ioos.check_wmo_platform_code(ds)
self.assertFalse(result.value)
# invalid length, fail
ds.setncattr("wmo_platform_code", "123")
result = self.ioos.check_wmo_platform_code(ds)
self.assertFalse(result.value)
def test_check_standard_name(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no standard names
results = self.ioos.check_standard_name(ds)
scored, out_of, messages = get_results(results)
self.assertLess(scored, out_of)
# give standard names to all variables
ds.variables["time"].setncattr("standard_name", "time")
ds.variables["lon"].setncattr("standard_name", "longitude")
ds.variables["lat"].setncattr("standard_name", "latitude")
ds.variables["depth"].setncattr("standard_name", "depth")
results = self.ioos.check_standard_name(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
# add a QARTOD variable, no standard name - should fail
qr = ds.createVariable("depth_qc", np.byte)
qr.setncattr("flag_meanings", "blah")
results = self.ioos.check_standard_name(ds)
scored, out_of, messages = get_results(results)
self.assertLess(scored, out_of)
# bad standard name
qr.setncattr("standard_name", "blah")
results = self.ioos.check_standard_name(ds)
scored, out_of, messages = get_results(results)
self.assertLess(scored, out_of)
# good standard name
qr.setncattr("standard_name", "spike_test_quality_flag")
results = self.ioos.check_standard_name(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
def test_naming_authority_validation(self):
test_attr_name = "naming_authority"
validator = NamingAuthorityValidator()
# check URL - should pass
self.assertTrue(validator.validate(test_attr_name,
"https://ioos.us")[0])
# check reverse DNS - should pass
self.assertTrue(validator.validate(test_attr_name,
"edu.ucar.unidata")[0])
# email address is neither of the above, so should fail
bad_result = validator.validate(test_attr_name,
"webmaster.ioos.us@noaa.gov")
self.assertFalse(bad_result[0])
self.assertEqual(bad_result[1],
"naming_authority should either be a URL or a "
"reversed DNS name (e.g \"edu.ucar.unidata\")")
def test_check_platform_cf_role(self):
"""
Check that cf_role inside platform variables only allows certain
values, namely "profile_id", "timeseries_id", or "trajectory_id"
"""
ds = MockTimeSeries()
plat_var = ds.createVariable("platform", np.int8, ())
ds.variables['depth'].platform = "platform"
self.ioos.setup(ds)
results = self.ioos.check_platform_variable_cf_role(ds)
# don't set attribute, should raise error about attribute not
# existing
self.assertEqual(len(results), 1)
score, out_of = results[0].value
self.assertLess(score, out_of)
# set to invalid value
plat_var.setncattr("cf_role", "bad_value")
results = self.ioos.check_platform_variable_cf_role(ds)
self.assertLess(score, out_of)
expected_vals = ["profile_id", "timeseries_id", "trajectory_id"]
expect_msg = ("attribute cf_role in variable platform present, but not "
"in expected value list ({})".format(expected_vals))
self.assertEqual(results[0].msgs, [expect_msg])
# set to valid value
plat_var.setncattr("cf_role", "timeseries_id")
results = self.ioos.check_platform_variable_cf_role(ds)
score, out_of = results[0].value
self.assertEqual(score, out_of)
def test_check_platform_global(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no global attr, fail
self.assertFalse(self.ioos.check_platform_global(ds).value)
# bad global attr, fail
ds.setncattr("platform", "bad value")
self.assertFalse(self.ioos.check_platform_global(ds).value)
# another bad value
ds.setncattr("platform", " bad")
self.assertFalse(self.ioos.check_platform_global(ds).value)
# good value
ds.setncattr("platform", "single_string")
self.assertTrue(self.ioos.check_platform_global(ds).value)
def test_check_single_platform(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no global attr but also no platform variables, should pass
results = self.ioos.check_single_platform(ds)
self.assertTrue(results[0].value)
# give platform global, no variables, fail
ds.setncattr("platform", "buoy")
results = self.ioos.check_single_platform(ds)
self.assertFalse(results[0].value)
# global attribute, one platform variable, correct cf_role & featureType, pass
ds.setncattr("featureType", "profile")
ds.createDimension("profile", 1)
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
cf_role_var = ds.createVariable("cf_role_var", np.byte, ("profile",))
cf_role_var.setncattr("cf_role", "timeseries_id")
results = self.ioos.check_single_platform(ds)
self.assertTrue(all(r.value for r in results))
# global attr, multiple platform variables, correct cf_role & featureType, fail
plat2 = ds.createVariable("platform_var_2", np.byte)
temp2 = ds.createVariable("temp2", "d", ("time"))
temp2.setncattr("platform", "platform_var2")
results = self.ioos.check_single_platform(ds)
self.assertFalse(results[0].value)
# no global attr, one platform var, correct cf_role & featureType, fail
ds.delncattr("platform")
self.assertFalse(results[0].value)
# global attr, one platform var, correct featureType, incorrect cf_role var dimension
ds = MockTimeSeries() # time, lat, lon, depth
ds.setncattr("featureType", "trajectoryprofile")
ds.createDimension("trajectory", 2) # should only be 1
temp = ds.createVariable("temp", "d", ("time"))
temp.setncattr("platform", "platform_var")
plat = ds.createVariable("platform_var", np.byte)
cf_role_var = ds.createVariable("cf_role_var", np.byte, ("trajectory",))
cf_role_var.setncattr("cf_role", "trajectory_id")
results = self.ioos.check_single_platform(ds)
self.assertFalse(results[0].value)
def test_check_platform_vocabulary(self):
ds = MockTimeSeries() # time, lat, lon, depth
ds.setncattr("platform_vocabulary", "http://google.com")
self.assertTrue(self.ioos.check_platform_vocabulary(ds).value)
ds.setncattr("platform_vocabulary", "bad")
self.assertFalse(self.ioos.check_platform_vocabulary(ds).value)
def test_check_qartod_variables_references(self):
ds = MockTimeSeries() # time, lat, lon, depth
# no QARTOD variables
results = self.ioos.check_qartod_variables_references(ds)
scored, out_of, messages = get_results(results)
self.assertEqual(scored, out_of)
# QARTOD variable without references (fail)
qr = ds.createVariable("depth_qc", np.byte)
qr.setncattr("flag_meanings", "blah")
qr.setncattr("standard_name", "spike_test_quality_flag")
results = self.ioos.check_qartod_variables_references(ds)
self.assertFalse(all(r.value for r in results))
# QARTOD variable with references (pass)
qr.setncattr("references", "http://services.cormp.org/quality.php")
results = self.ioos.check_qartod_variables_references(ds)
self.assertTrue(all(r.value for r in results))
# QARTOD variable with bad references (fail)
qr.setncattr("references", r"p9q384ht09q38@@####???????////??//\/\/\/\//\/\74ht")
results = self.ioos.check_qartod_variables_references(ds)
self.assertFalse(all(r.value for r in results))
def test_check_ioos_ingest(self):
ds = MockTimeSeries()
# no value, pass
self.assertTrue(self.ioos.check_ioos_ingest(ds).value)
# value false
ds.setncattr("ioos_ingest", "false")
self.assertTrue(self.ioos.check_ioos_ingest(ds).value)
# value anything but false
ds.setncattr("ioos_ingest", "true")
self.assertFalse(self.ioos.check_ioos_ingest(ds).value)
ds.setncattr("ioos_ingest", 0)
self.assertFalse(self.ioos.check_ioos_ingest(ds).value)
ds.setncattr("ioos_ingest", "False")
self.assertFalse(self.ioos.check_ioos_ingest(ds).value)
| |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: bigip_ssl_certificate
short_description: Import/Delete certificates from BIG-IP
description:
- This module imports/deletes SSL certificates on BIG-IP LTM.
Certificates can be imported from certificate and key files on the local
disk, in PEM format.
version_added: "1.0.0"
options:
content:
description:
- Sets the contents of a certificate directly to the specified value.
This is used with lookup plugins or for anything with formatting, or
- C(content) must be provided when C(state) is C(present).
type: str
aliases: ['cert_content']
state:
description:
- Certificate state. This determines if the provided certificate
and key is to be made C(present) on the device or C(absent).
type: str
choices:
- present
- absent
default: present
name:
description:
- SSL Certificate Name. This is the cert name used when importing a certificate
into the BIG-IP. It also determines the filenames of the objects on the LTM.
type: str
required: True
issuer_cert:
description:
- Issuer certificate used for OCSP monitoring.
- This parameter is only valid on versions of BIG-IP 13.0.0 or above.
type: str
partition:
description:
- Device partition to manage resources on.
type: str
default: Common
notes:
- This module does not behave like other modules that you might include in
roles, where referencing files or templates first looks in the role's
files or templates directory. To have it behave that way, use the Ansible
file or template lookup (see Examples). The lookups behave as expected in
a role context.
extends_documentation_fragment: f5networks.f5_modules.f5
requirements:
- BIG-IP >= v12
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Use a file lookup to import PEM Certificate
bigip_ssl_certificate:
name: certificate-name
state: present
content: "{{ lookup('file', '/path/to/cert.crt') }}"
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Use a file lookup to import CA certificate chain
bigip_ssl_certificate:
name: ca-chain-name
state: present
content: "{{ lookup('file', '/path/to/ca-chain.crt') }}"
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Delete Certificate
bigip_ssl_certificate:
name: certificate-name
state: absent
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
'''
RETURN = r'''
cert_name:
description: The name of the certificate.
returned: created
type: str
sample: cert1
filename:
description:
- The name of the SSL certificate.
returned: created
type: str
sample: cert1.crt
checksum:
description: SHA1 checksum of the cert.
returned: changed and created
type: str
sample: f7ff9e8b7bb2e09b70935a5d785e0cc5d9d0abf0
source_path:
description: Path on BIG-IP where the source of the certificate is stored.
returned: created
type: str
sample: /var/config/rest/downloads/cert1.crt
'''
import hashlib
import os
import re
from datetime import datetime
from ansible.module_utils.basic import (
AnsibleModule, env_fallback
)
from ..module_utils.bigip import F5RestClient
from ..module_utils.common import (
F5ModuleError, AnsibleF5Parameters, transform_name, f5_argument_spec, fq_name
)
from ..module_utils.icontrol import (
upload_file, tmos_version
)
from ..module_utils.teem import send_teem
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
class Parameters(AnsibleF5Parameters):
download_path = '/var/config/rest/downloads'
api_map = {
'sourcePath': 'source_path',
'issuerCert': 'issuer_cert',
}
updatables = [
'content',
'issuer_cert',
'source_path',
]
returnables = [
'filename',
'checksum',
'source_path',
'issuer_cert',
]
api_attributes = [
'issuerCert',
'sourcePath',
]
class ApiParameters(Parameters):
@property
def checksum(self):
if self._values['checksum'] is None:
return None
pattern = r'SHA1:\d+:(?P<value>[\w+]{40})'
matches = re.match(pattern, self._values['checksum'])
if matches:
return matches.group('value')
else:
return None
@property
def filename(self):
return self._values['name']
class ModuleParameters(Parameters):
def _get_hash(self, content):
k = hashlib.sha1()
s = StringIO(content)
while True:
data = s.read(1024)
if not data:
break
k.update(data.encode('utf-8'))
return k.hexdigest()
@property
def issuer_cert(self):
if self._values['issuer_cert'] is None:
return None
name = fq_name(self.partition, self._values['issuer_cert'])
if name.endswith('.crt'):
return name
else:
return name + '.crt'
@property
def checksum(self):
if self.content is None:
return None
return self._get_hash(self.content)
@property
def filename(self):
if self.name.endswith('.crt'):
return self.name
else:
return self.name + '.crt'
@property
def source_path(self):
result = 'file://' + os.path.join(
self.download_path,
self.filename
)
return result
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
raise
return result
class ReportableChanges(Changes):
pass
class UsableChanges(Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
result = self.__default(param)
return result
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def source_path(self):
if self.want.source_path is None:
return None
if self.want.source_path == self.have.source_path:
if self.content:
return self.want.source_path
if self.want.source_path != self.have.source_path:
return self.want.source_path
@property
def content(self):
if self.want.checksum != self.have.checksum:
result = dict(
checksum=self.want.checksum,
content=self.want.content
)
return result
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def exec_module(self):
start = datetime.now().isoformat()
version = tmos_version(self.client)
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
send_teem(start, self.module, version)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
self.remove_uploaded_file_from_device(self.want.filename)
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
self.remove_uploaded_file_from_device(self.want.filename)
return True
def absent(self):
if self.exists():
return self.remove()
return False
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
return True
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def remove_uploaded_file_from_device(self, name):
filepath = '/var/config/rest/downloads/{0}'.format(name)
params = {
"command": "run",
"utilCmdArgs": filepath
}
uri = "https://{0}:{1}/mgmt/tm/util/unix-rm".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
raise F5ModuleError(resp.content)
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/sys/file/ssl-cert/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.filename)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
errors = [401, 403, 409, 500, 501, 502, 503, 504]
if resp.status in errors or 'code' in response and response['code'] in errors:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def upload_file_to_device(self, content, name):
url = 'https://{0}:{1}/mgmt/shared/file-transfer/uploads'.format(
self.client.provider['server'],
self.client.provider['server_port']
)
try:
upload_file(self.client, url, content, name)
except F5ModuleError:
raise F5ModuleError(
"Failed to upload the file."
)
def update_on_device(self):
content = StringIO(self.want.content)
self.upload_file_to_device(content, self.want.filename)
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/sys/file/ssl-cert/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.filename)
)
resp = self.client.api.put(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
raise F5ModuleError(resp.content)
def create_on_device(self):
content = StringIO(self.want.content)
self.upload_file_to_device(content, self.want.filename)
uri = "https://{0}:{1}/mgmt/tm/sys/file/ssl-cert/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
params = dict(
sourcePath=self.want.source_path,
name=self.want.filename,
partition=self.want.partition
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status not in [200, 201] or 'code' in response and response['code'] not in [200, 201]:
raise F5ModuleError(resp.content)
# This needs to be done because of the way that BIG-IP creates certificates.
#
# The extra params (such as OCSP and issuer stuff) are not available in the
# payload. In a nutshell, the available resource attributes *change* after
# a create so that *more* are available.
params = self.want.api_params()
if params:
uri = "https://{0}:{1}/mgmt/tm/sys/file/ssl-cert/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.filename)
)
resp = self.client.api.put(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
raise F5ModuleError(resp.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/file/ssl-cert/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.filename)
)
query = '?expandSubcollections=true'
resp = self.client.api.get(uri + query)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return ApiParameters(params=response)
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/file/ssl-cert/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.filename)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(
required=True
),
content=dict(aliases=['cert_content']),
state=dict(
default='present',
choices=['absent', 'present']
),
issuer_cert=dict(),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| |
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pre-commit script for Oppia.
This script lints Python and JavaScript code, and prints a
list of lint errors to the terminal. If the directory path is passed,
it will lint all Python and JavaScript files in that directory; otherwise,
it will only lint files that have been touched in this commit.
This script ignores all filepaths contained within the excludeFiles
argument in .jscsrc. Note that, as a side-effect, these filepaths will also
prevent Python files in those paths from being linted.
IMPORTANT NOTES:
1. Before running this script, you must install third-party dependencies by
running
bash scripts/start.sh
at least once.
=====================
CUSTOMIZATION OPTIONS
=====================
1. To lint only files that have been touched in this commit
python scripts/pre_commit_linter.py
2. To lint all files in the folder or to lint just a specific file
python scripts/pre_commit_linter.py --path filepath
3. To lint a specific list of files (*.js/*.py only). Separate files by spaces
python scripts/pre_commit_linter.py --files file_1 file_2 ... file_n
Note that the root folder MUST be named 'oppia'.
"""
# Pylint has issues with the import order of argparse.
# pylint: disable=wrong-import-order
import argparse
import fnmatch
import json
import multiprocessing
import os
import re
import subprocess
import sys
import time
# pylint: enable=wrong-import-order
_PARSER = argparse.ArgumentParser()
_EXCLUSIVE_GROUP = _PARSER.add_mutually_exclusive_group()
_EXCLUSIVE_GROUP.add_argument(
'--path',
help='path to the directory with files to be linted',
action='store')
_EXCLUSIVE_GROUP.add_argument(
'--files',
nargs='+',
help='specific files to be linted. Space separated list',
action='store')
BAD_PATTERNS = {
'__author__': {
'message': 'Please remove author tags from this file.',
'excluded_files': ()},
'datetime.datetime.now()': {
'message': 'Please use datetime.datetime.utcnow() instead of'
'datetime.datetime.now().',
'excluded_files': ()},
'\t': {
'message': 'Please use spaces instead of tabs.',
'excluded_files': ()},
'\r': {
'message': 'Please make sure all files only have LF endings (no CRLF).',
'excluded_files': ()},
'glyphicon': {
'message': 'Please use equivalent material-icons '
'instead of glyphicons.',
'excluded_files': ()}
}
BAD_PATTERNS_JS = {
' == ': {
'message': 'Please replace == with === in this file.',
'excluded_files': (
'core/templates/dev/head/expressions/parserSpec.js',
'core/templates/dev/head/expressions/evaluatorSpec.js',
'core/templates/dev/head/expressions/typeParserSpec.js')},
' != ': {
'message': 'Please replace != with !== in this file.',
'excluded_files': (
'core/templates/dev/head/expressions/parserSpec.js',
'core/templates/dev/head/expressions/evaluatorSpec.js',
'core/templates/dev/head/expressions/typeParserSpec.js')}
}
BAD_PATTERNS_JS_REGEXP = [
{
'regexp': r"\b(ddescribe|fdescribe)\(",
'message': "In tests, please use 'describe' instead of 'ddescribe'"
"or 'fdescribe'",
'excluded_files': ()
},
{
'regexp': r"\b(iit|fit)\(",
'message': "In tests, please use 'it' instead of 'iit' or 'fit'",
'excluded_files': ()
}
]
BAD_PATTERNS_APP_YAML = {
'MINIFICATION: true': {
'message': 'Please set the MINIFICATION env variable in app.yaml'
'to False before committing.',
'excluded_files': ()}
}
EXCLUDED_PATHS = (
'third_party/*', 'build/*', '.git/*', '*.pyc', 'CHANGELOG',
'scripts/pre_commit_linter.py', 'integrations/*',
'integrations_dev/*', '*.svg', '*.png', '*.zip', '*.ico', '*.jpg',
'*.min.js', 'assets/scripts/*')
if not os.getcwd().endswith('oppia'):
print ''
print 'ERROR Please run this script from the oppia root directory.'
_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.5.2')
if not os.path.exists(_PYLINT_PATH):
print ''
print 'ERROR Please run start.sh first to install pylint '
print ' and its dependencies.'
sys.exit(1)
_PATHS_TO_INSERT = [
_PYLINT_PATH,
os.getcwd(),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'google_appengine_1.9.19',
'google_appengine', 'lib', 'webapp2-2.3'),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'google_appengine_1.9.19',
'google_appengine', 'lib', 'yaml-3.10'),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'google_appengine_1.9.19',
'google_appengine', 'lib', 'jinja2-2.6'),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'google_appengine_1.9.19',
'google_appengine'),
os.path.join(_PARENT_DIR, 'oppia_tools', 'webtest-1.4.2'),
os.path.join(_PARENT_DIR, 'oppia_tools', 'numpy-1.6.1'),
os.path.join(_PARENT_DIR, 'oppia_tools', 'browsermob-proxy-0.7.1'),
os.path.join(_PARENT_DIR, 'oppia_tools', 'selenium-2.53.2'),
os.path.join('third_party', 'gae-pipeline-1.9.17.0'),
os.path.join('third_party', 'bleach-1.2.2'),
os.path.join('third_party', 'gae-mapreduce-1.9.17.0'),
]
for path in _PATHS_TO_INSERT:
sys.path.insert(0, path)
from pylint import lint # pylint: disable=wrong-import-position
_MESSAGE_TYPE_SUCCESS = 'SUCCESS'
_MESSAGE_TYPE_FAILED = 'FAILED'
def _get_changed_filenames():
"""Returns a list of modified files (both staged and unstaged)
Returns:
a list of filenames of modified files
"""
unstaged_files = subprocess.check_output([
'git', 'diff', '--name-only']).splitlines()
staged_files = subprocess.check_output([
'git', 'diff', '--cached', '--name-only',
'--diff-filter=ACM']).splitlines()
return unstaged_files + staged_files
def _get_glob_patterns_excluded_from_jscsrc(config_jscsrc):
"""Collects excludeFiles from jscsrc file.
Args:
- config_jscsrc: str. Path to .jscsrc file.
Returns:
a list of files in excludeFiles.
"""
with open(config_jscsrc) as f:
f.readline() # First three lines are comments
f.readline()
f.readline()
json_data = json.loads(f.read())
return json_data['excludeFiles']
def _get_all_files_in_directory(dir_path, excluded_glob_patterns):
"""Recursively collects all files in directory and
subdirectories of specified path.
Args:
- dir_path: str. Path to the folder to be linted.
- excluded_glob_patterns: set. Set of all files to be excluded.
Returns:
a list of files in directory and subdirectories without excluded files.
"""
files_in_directory = []
for _dir, _, files in os.walk(dir_path):
for file_name in files:
filename = os.path.relpath(
os.path.join(_dir, file_name), os.getcwd())
if not any([fnmatch.fnmatch(filename, gp) for gp in
excluded_glob_patterns]):
files_in_directory.append(filename)
return files_in_directory
def _lint_js_files(node_path, jscs_path, config_jscsrc, files_to_lint, stdout,
result):
"""Prints a list of lint errors in the given list of JavaScript files.
Args:
- node_path: str. Path to the node binary.
- jscs_path: str. Path to the JSCS binary.
- config_jscsrc: str. Configuration args for the call to the JSCS binary.
- files_to_lint: list of str. A list of filepaths to lint.
- stdout: multiprocessing.Queue. A queue to store JSCS outputs
- result: multiprocessing.Queue. A queue to put results of test
Returns:
None
"""
start_time = time.time()
num_files_with_errors = 0
num_js_files = len(files_to_lint)
if not files_to_lint:
result.put('')
print 'There are no JavaScript files to lint.'
return
print 'Total js files: ', num_js_files
jscs_cmd_args = [node_path, jscs_path, config_jscsrc]
for _, filename in enumerate(files_to_lint):
print 'Linting: ', filename
proc_args = jscs_cmd_args + [filename]
proc = subprocess.Popen(
proc_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
linter_stdout, linter_stderr = proc.communicate()
if linter_stderr:
print 'LINTER FAILED'
print linter_stderr
sys.exit(1)
if linter_stdout:
num_files_with_errors += 1
stdout.put(linter_stdout)
if num_files_with_errors:
result.put('%s %s JavaScript files' % (
_MESSAGE_TYPE_FAILED, num_files_with_errors))
else:
result.put('%s %s JavaScript files linted (%.1f secs)' % (
_MESSAGE_TYPE_SUCCESS, num_js_files, time.time() - start_time))
print 'Js linting finished.'
def _lint_py_files(config_pylint, files_to_lint, result):
"""Prints a list of lint errors in the given list of Python files.
Args:
- config_pylint: str. Path to the .pylintrc file.
- files_to_lint: list of str. A list of filepaths to lint.
- result: multiprocessing.Queue. A queue to put results of test
Returns:
None
"""
start_time = time.time()
are_there_errors = False
num_py_files = len(files_to_lint)
if not files_to_lint:
result.put('')
print 'There are no Python files to lint.'
return
print 'Linting %s Python files' % num_py_files
_BATCH_SIZE = 50
current_batch_start_index = 0
while current_batch_start_index < len(files_to_lint):
# Note that this index is an exclusive upper bound -- i.e., the current
# batch of files ranges from 'start_index' to 'end_index - 1'.
current_batch_end_index = min(
current_batch_start_index + _BATCH_SIZE, len(files_to_lint))
current_files_to_lint = files_to_lint[
current_batch_start_index : current_batch_end_index]
print 'Linting Python files %s to %s...' % (
current_batch_start_index + 1, current_batch_end_index)
try:
# This prints output to the console.
lint.Run(current_files_to_lint + [config_pylint])
except SystemExit as e:
if str(e) != '0':
are_there_errors = True
current_batch_start_index = current_batch_end_index
if are_there_errors:
result.put('%s Python linting failed' % _MESSAGE_TYPE_FAILED)
else:
result.put('%s %s Python files linted (%.1f secs)' % (
_MESSAGE_TYPE_SUCCESS, num_py_files, time.time() - start_time))
print 'Python linting finished.'
def _get_all_files():
"""This function is used to check if this script is ran from
root directory and to return a list of all the files for linting and
pattern checks.
"""
jscsrc_path = os.path.join(os.getcwd(), '.jscsrc')
parsed_args = _PARSER.parse_args()
if parsed_args.path:
input_path = os.path.join(os.getcwd(), parsed_args.path)
if not os.path.exists(input_path):
print 'Could not locate file or directory %s. Exiting.' % input_path
print '----------------------------------------'
sys.exit(1)
if os.path.isfile(input_path):
all_files = [input_path]
else:
excluded_glob_patterns = _get_glob_patterns_excluded_from_jscsrc(
jscsrc_path)
all_files = _get_all_files_in_directory(
input_path, excluded_glob_patterns)
elif parsed_args.files:
valid_filepaths = []
invalid_filepaths = []
for f in parsed_args.files:
if os.path.isfile(f):
valid_filepaths.append(f)
else:
invalid_filepaths.append(f)
if invalid_filepaths:
print ('The following file(s) do not exist: %s\n'
'Exiting.' % invalid_filepaths)
sys.exit(1)
all_files = valid_filepaths
else:
all_files = _get_changed_filenames()
return all_files
def _pre_commit_linter(all_files):
"""This function is used to check if node-jscs dependencies are installed
and pass JSCS binary path
"""
print 'Starting linter...'
jscsrc_path = os.path.join(os.getcwd(), '.jscsrc')
pylintrc_path = os.path.join(os.getcwd(), '.pylintrc')
config_jscsrc = '--config=%s' % jscsrc_path
config_pylint = '--rcfile=%s' % pylintrc_path
parent_dir = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
node_path = os.path.join(
parent_dir, 'oppia_tools', 'node-4.2.1', 'bin', 'node')
jscs_path = os.path.join(
parent_dir, 'node_modules', 'jscs', 'bin', 'jscs')
if not os.path.exists(jscs_path):
print ''
print 'ERROR Please run start.sh first to install node-jscs '
print ' and its dependencies.'
sys.exit(1)
js_files_to_lint = [
filename for filename in all_files if filename.endswith('.js')]
py_files_to_lint = [
filename for filename in all_files if filename.endswith('.py')]
js_result = multiprocessing.Queue()
linting_processes = []
js_stdout = multiprocessing.Queue()
linting_processes.append(multiprocessing.Process(
target=_lint_js_files, args=(node_path, jscs_path, config_jscsrc,
js_files_to_lint, js_stdout, js_result)))
py_result = multiprocessing.Queue()
linting_processes.append(multiprocessing.Process(
target=_lint_py_files,
args=(config_pylint, py_files_to_lint, py_result)))
print 'Starting Javascript and Python Linting'
print '----------------------------------------'
for process in linting_processes:
process.start()
for process in linting_processes:
# Require timeout parameter to prevent against endless waiting for the
# linting function to return.
process.join(timeout=600)
js_messages = []
while not js_stdout.empty():
js_messages.append(js_stdout.get())
print ''
print '\n'.join(js_messages)
print '----------------------------------------'
summary_messages = []
# Require block = False to prevent unnecessary waiting for the process
# output.
summary_messages.append(js_result.get(block=False))
summary_messages.append(py_result.get(block=False))
print '\n'.join(summary_messages)
print ''
return summary_messages
def _check_bad_patterns(all_files):
"""This function is used for detecting bad patterns.
"""
print 'Starting Pattern Checks'
print '----------------------------------------'
total_files_checked = 0
total_error_count = 0
summary_messages = []
all_files = [
filename for filename in all_files if not
any(fnmatch.fnmatch(filename, pattern) for pattern in EXCLUDED_PATHS)]
failed = False
for filename in all_files:
with open(filename) as f:
content = f.read()
total_files_checked += 1
for pattern in BAD_PATTERNS:
if pattern in content and filename not in (
BAD_PATTERNS[pattern]['excluded_files']):
failed = True
print '%s --> %s' % (
filename, BAD_PATTERNS[pattern]['message'])
total_error_count += 1
if filename.endswith('.js'):
for pattern in BAD_PATTERNS_JS:
if filename not in (
BAD_PATTERNS_JS[pattern]['excluded_files']):
if pattern in content:
failed = True
print '%s --> %s' % (
filename,
BAD_PATTERNS_JS[pattern]['message'])
total_error_count += 1
for regexp in BAD_PATTERNS_JS_REGEXP:
regexp_pattern = regexp['regexp']
if filename not in regexp['excluded_files']:
if re.search(regexp_pattern, content):
failed = True
print '%s --> %s' % (
filename,
regexp['message'])
total_error_count += 1
if filename == 'app.yaml':
for pattern in BAD_PATTERNS_APP_YAML:
if pattern in content:
failed = True
print '%s --> %s' % (
filename,
BAD_PATTERNS_APP_YAML[pattern]['message'])
total_error_count += 1
if failed:
summary_message = '%s Pattern checks failed' % _MESSAGE_TYPE_FAILED
summary_messages.append(summary_message)
else:
summary_message = '%s Pattern checks passed' % _MESSAGE_TYPE_SUCCESS
summary_messages.append(summary_message)
print ''
print '----------------------------------------'
print ''
if total_files_checked == 0:
print "There are no files to be checked."
else:
print '(%s files checked, %s errors found)' % (
total_files_checked, total_error_count)
print summary_message
return summary_messages
def main():
all_files = _get_all_files()
linter_messages = _pre_commit_linter(all_files)
pattern_messages = _check_bad_patterns(all_files)
all_messages = linter_messages + pattern_messages
if any([message.startswith(_MESSAGE_TYPE_FAILED) for message in
all_messages]):
sys.exit(1)
if __name__ == '__main__':
main()
| |
#!/usr/bin/env python
# Copyright 2016 Daniel Nunes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABCMeta, abstractmethod
from copy import deepcopy
from os.path import join, relpath
from PyQt5.QtWidgets import QStackedWidget, QFileDialog, QWidget
from PyQt5.QtGui import QIcon
from PyQt5.QtCore import pyqtSignal
from . import cur_folder
from .io import node_factory
from .ui_templates import (wizard_files_01, wizard_files_item, wizard_depend_01, wizard_depend_depend,
wizard_depend_depend_depend, wizard_depend_depend_file, wizard_depend_depend_flag,
wizard_depend_depend_version, wizard_depend_file, wizard_depend_flag)
class _WizardBase(QStackedWidget):
"""
The base class for wizards. Shouldn't be instantiated directly.
"""
__metaclass__ = ABCMeta
code_changed = pyqtSignal([object])
cancelled = pyqtSignal()
finished = pyqtSignal([object])
def __init__(self, parent, element, code_signal, **kwargs):
"""
:param parent: The parent widget.
:param element: The element this wizard corresponds.
:param main_window: The app's main window.
"""
super().__init__(parent)
if type(self) is _WizardBase:
raise AssertionError(str(type(self)) + " is not meant to be instanced. A subclass should be used instead.")
self.element = element
self.parent = parent
self.kwargs = kwargs
self.code_changed.connect(code_signal.emit)
self._setup_pages()
@abstractmethod
def _process_results(self, result):
"""
Method called to process the results into a new element.
:param result: The temporary element with all the info.
"""
pass
@abstractmethod
def _setup_pages(self):
"""
Method called during initialization to create all the pages necessary for each wizard.
"""
pass
class WizardFiles(_WizardBase):
"""
Wizard for the "files" tag.
"""
def _process_results(self, result):
self.finished.emit(result)
def _setup_pages(self):
def add_elem(element_, layout):
"""
:param element_: The element to be copied
:param layout: The layout into which to insert the newly copied element
"""
child = node_factory(element_.tag, element_result)
for key in element_.attrib:
child.properties[key].set_value(element_.attrib[key])
element_result.add_child(child)
spacer = layout.takeAt(layout.count() - 1)
item = self._create_field(child)
layout.addWidget(item)
layout.addSpacerItem(spacer)
self.code_changed.emit(element_result)
element_result = deepcopy(self.element)
page = QWidget()
page_ui = wizard_files_01.Ui_Form()
page_ui.setupUi(page)
file_list = [elem for elem in element_result if elem.tag == "file"]
for element in file_list:
element_result.remove_child(element)
add_elem(element, page_ui.layout_file)
folder_list = [elem for elem in element_result if elem.tag == "folder"]
for element in folder_list:
element_result.remove_child(element)
add_elem(element, page_ui.layout_folder)
# finish with connections
page_ui.button_add_file.clicked.connect(
lambda: add_elem(node_factory("file", element_result), page_ui.layout_file)
)
page_ui.button_add_folder.clicked.connect(
lambda: add_elem(node_factory("folder", element_result), page_ui.layout_folder)
)
page_ui.finish_button.clicked.connect(lambda: self._process_results(element_result))
page_ui.cancel_button.clicked.connect(self.cancelled.emit)
self.addWidget(page)
def _create_field(self, element):
"""
:param element: the element newly copied
:return: base QWidget, with the source and destination fields built
"""
def button_clicked():
open_dialog = QFileDialog()
if element.tag == "file":
file_path = open_dialog.getOpenFileName(self, "Select File:", self.kwargs["package_path"])
if file_path[0]:
item_ui.edit_source.setText(relpath(file_path[0], self.kwargs["package_path"]))
elif element.tag == "folder":
folder_path = open_dialog.getExistingDirectory(self, "Select folder:", self.kwargs["package_path"])
if folder_path:
item_ui.edit_source.setText(relpath(folder_path, self.kwargs["package_path"]))
parent_element = element.getparent()
item = QWidget()
item_ui = wizard_files_item.Ui_base()
item_ui.setupUi(item)
# set initial values
item_ui.edit_source.setText(element.properties["source"].value)
item_ui.edit_dest.setText(element.properties["destination"].value)
item_ui.button_delete.setIcon(QIcon(join(cur_folder, "resources/logos/logo_cross.png")))
# connect the signals
item_ui.edit_source.textChanged.connect(element.properties["source"].set_value)
item_ui.edit_source.textChanged.connect(element.write_attribs)
item_ui.edit_source.textChanged.connect(lambda: self.code_changed.emit(parent_element))
item_ui.edit_dest.textChanged.connect(element.properties["destination"].set_value)
item_ui.edit_dest.textChanged.connect(element.write_attribs)
item_ui.edit_dest.textChanged.connect(lambda: self.code_changed.emit(parent_element))
item_ui.button_source.clicked.connect(button_clicked)
item_ui.button_delete.clicked.connect(item.deleteLater)
item_ui.button_delete.clicked.connect(lambda _: parent_element.remove_child(element))
item_ui.button_delete.clicked.connect(lambda: self.code_changed.emit(parent_element))
return item
class WizardDepend(_WizardBase):
"""
Wizard for the "dependencies" tag.
"""
def _process_results(self, result):
self.finished.emit(result)
def _setup_pages(self):
"""
NodeConfigVisible and NodeConfigRoot are used as simple placeholders for the factory. They serve no purpose
other than giving the factory a parent to help parsing.
"""
from .nodes import NodeConfigVisible, NodeConfigRoot
def copy_depend(element_):
if element_.getparent().tag == "dependencies" or \
element_.getparent().tag == "moduleDependencies" or \
element_.getparent().tag == "visible":
result = node_factory(element_.tag, NodeConfigVisible())
elif element_.tag == "moduleDependencies":
result = node_factory(element_.tag, NodeConfigVisible())
elif element_.tag == "visible":
result = node_factory(element_.tag, NodeConfigVisible())
else:
result = node_factory(element_.tag, NodeConfigRoot())
element_.write_attribs()
for key in element_.keys():
result.set(key, element_.get(key))
result.parse_attribs()
for child in element_:
if child.tag == "dependencies":
result.add_child(copy_depend(child))
continue
new_child = deepcopy(child)
for key in child.keys():
new_child.set(key, child.get(key))
new_child.parse_attribs()
result.add_child(new_child)
return result
element_result = copy_depend(self.element)
self.code_changed.emit(element_result)
page = QWidget()
page_ui = wizard_depend_01.Ui_Form()
page_ui.setupUi(page)
page_ui.typeComboBox.setCurrentText(element_result.get("operator"))
for element in [elem for elem in element_result if elem.tag == "fileDependency"]:
self.add_elem(element_result, page_ui.layout_file, element_=element)
for element in [elem for elem in element_result if elem.tag == "flagDependency"]:
self.add_elem(element_result, page_ui.layout_flag, element_=element)
for element in [elem for elem in element_result if elem.tag == "dependencies"]:
self.add_elem(element_result, page_ui.layout_depend, element_=element)
for elem in element_result:
if elem.tag == "gameDependency":
page_ui.gameVersionLineEdit.setText(elem.get("version"))
# finish with connections
page_ui.typeComboBox.currentTextChanged.connect(element_result.properties["operator"].set_value)
page_ui.typeComboBox.currentTextChanged.connect(element_result.write_attribs)
page_ui.typeComboBox.currentTextChanged.connect(lambda: self.code_changed.emit(element_result))
page_ui.gameVersionLineEdit.textChanged.connect(
lambda value, element_=element_result: self._update_version(value, element_))
page_ui.button_file.clicked.connect(
lambda: self.add_elem(element_result, page_ui.layout_file, tag="fileDependency"))
page_ui.button_flag.clicked.connect(
lambda: self.add_elem(element_result, page_ui.layout_flag, tag="flagDependency"))
page_ui.button_sub.clicked.connect(
lambda: self.add_elem(element_result, page_ui.layout_depend, tag="dependencies"))
page_ui.finish_button.clicked.connect(lambda: self._process_results(element_result))
page_ui.cancel_button.clicked.connect(self.cancelled.emit)
self.addWidget(page)
def add_elem(self, parent_elem, layout, tag="", element_=None):
"""
:param parent_elem: The parent element - the element the wizard is being applied on.
:param tag: The tag of the element to be created
:param element_: The element to be used
:param layout: The layout into which to insert the newly copied element
"""
from .nodes import NodeConfigVisible
if element_ is None and tag:
child = node_factory(tag, NodeConfigVisible())
parent_elem.add_child(child)
else:
if element_ is None:
return
child = element_
tag = child.tag
spacer = layout.takeAt(layout.count() - 1)
item = None
if tag == "fileDependency":
item = self._create_file(child)
elif tag == "flagDependency":
item = self._create_flag(child)
elif tag == "dependencies":
item = self._create_depend(child, layout)
layout.addWidget(item)
layout.addSpacerItem(spacer)
self.code_changed.emit(parent_elem)
def _update_version(self, value, element):
elem = None
for ele in element:
if ele.tag == "gameDependency":
elem = ele
if elem is not None:
if not value:
element.remove_child(elem)
else:
elem.properties["version"].set_value(value)
elem.write_attribs()
else:
if value:
elem = node_factory("gameDependency", element)
element.add_child(elem)
elem.properties["version"].set_value(value)
elem.write_attribs()
self.code_changed.emit(element)
def _create_file(self, element):
parent_element = element.getparent()
item = QWidget()
item_ui = wizard_depend_file.Ui_Form()
item_ui.setupUi(item)
# set initial values
item_ui.edit_file.setText(element.properties["file"].value)
item_ui.combo_type.setCurrentText(element.properties["state"].value)
item_ui.button_delete.setIcon(QIcon(join(cur_folder, "resources/logos/logo_cross.png")))
# connect the signals
item_ui.edit_file.textChanged.connect(element.properties["file"].set_value)
item_ui.edit_file.textChanged.connect(element.write_attribs)
item_ui.edit_file.textChanged.connect(lambda: self.code_changed.emit(parent_element))
item_ui.combo_type.currentTextChanged.connect(element.properties["state"].set_value)
item_ui.combo_type.currentTextChanged.connect(element.write_attribs)
item_ui.combo_type.currentTextChanged.connect(lambda: self.code_changed.emit(parent_element))
item_ui.button_delete.clicked.connect(item.deleteLater)
item_ui.button_delete.clicked.connect(lambda _: parent_element.remove_child(element))
item_ui.button_delete.clicked.connect(lambda: self.code_changed.emit(parent_element))
return item
def _create_flag(self, element):
parent_element = element.getparent()
item = QWidget()
item_ui = wizard_depend_flag.Ui_Form()
item_ui.setupUi(item)
# set initial values
item_ui.edit_flag.setText(element.properties["flag"].value)
item_ui.edit_value.setText(element.properties["value"].value)
item_ui.button_delete.setIcon(QIcon(join(cur_folder, "resources/logos/logo_cross.png")))
# connect the signals
item_ui.edit_flag.textChanged.connect(element.properties["flag"].set_value)
item_ui.edit_flag.textChanged.connect(element.write_attribs)
item_ui.edit_flag.textChanged.connect(lambda: self.code_changed.emit(parent_element))
item_ui.edit_value.textChanged.connect(element.properties["value"].set_value)
item_ui.edit_value.textChanged.connect(element.write_attribs)
item_ui.edit_value.textChanged.connect(lambda: self.code_changed.emit(parent_element))
item_ui.button_delete.clicked.connect(item.deleteLater)
item_ui.button_delete.clicked.connect(lambda _: parent_element.remove_child(element))
item_ui.button_delete.clicked.connect(lambda: self.code_changed.emit(parent_element))
return item
def _create_depend(self, element, depend_layout):
parent_element = element.getparent()
item = QWidget()
item_ui = wizard_depend_depend.Ui_Form()
item_ui.setupUi(item)
file = QWidget()
file_ui = wizard_depend_depend_file.Ui_Form()
file_ui.setupUi(file)
flag = QWidget()
flag_ui = wizard_depend_depend_flag.Ui_Form()
flag_ui.setupUi(flag)
version = QWidget()
version_ui = wizard_depend_depend_version.Ui_Form()
version_ui.setupUi(version)
depend = QWidget()
depend_ui = wizard_depend_depend_depend.Ui_Form()
depend_ui.setupUi(depend)
item_ui.label_type.setText(element.properties["operator"].value)
item_ui.button_less.hide()
item_ui.line.hide()
item_ui.scrollArea.hide()
item_ui.button_delete.setIcon(QIcon(join(cur_folder, "resources/logos/logo_cross.png")))
spacer = item_ui.layout_depend_depend.takeAt(item_ui.layout_depend_depend.count() - 1)
for element_ in [elem for elem in element if elem.tag == "fileDependency"]:
file_ui.label_file.setText(element_.properties["file"].value)
file_ui.label_type.setText(element_.properties["state"].value)
item_ui.layout_depend_depend.addWidget(file)
for element_ in [elem for elem in element if elem.tag == "flagDependency"]:
flag_ui.label_flag.setText(element_.properties["flag"].value)
flag_ui.label_value.setText(element_.properties["value"].value)
item_ui.layout_depend_depend.addWidget(flag)
sub_dependencies_sum = sum(1 for elem in element if elem.tag == "dependencies")
if sub_dependencies_sum:
depend_ui.label_number.setText(str(sub_dependencies_sum))
if sub_dependencies_sum > 1:
depend_ui.label_depend.setText("Sub-Dependencies")
item_ui.layout_depend_depend.addWidget(depend)
for element_ in [elem for elem in element if elem.tag == "gameDependency"]:
version_ui.label_version.setText(element_.get("version"))
item_ui.layout_depend_depend.addWidget(version)
item_ui.layout_depend_depend.addSpacerItem(spacer)
item_ui.button_more.clicked.connect(lambda: item_ui.button_more.hide())
item_ui.button_more.clicked.connect(lambda: item_ui.button_less.show())
item_ui.button_more.clicked.connect(lambda: item_ui.line.show())
item_ui.button_more.clicked.connect(lambda: item_ui.scrollArea.show())
item_ui.button_less.clicked.connect(lambda: item_ui.button_less.hide())
item_ui.button_less.clicked.connect(lambda: item_ui.button_more.show())
item_ui.button_less.clicked.connect(lambda: item_ui.line.hide())
item_ui.button_less.clicked.connect(lambda: item_ui.scrollArea.hide())
item_ui.button_edit.clicked.connect(lambda _, element__=element: self._nested_wizard(element__, depend_layout))
item_ui.button_delete.clicked.connect(item.deleteLater)
item_ui.button_delete.clicked.connect(lambda _: parent_element.remove_child(element))
item_ui.button_delete.clicked.connect(lambda: self.code_changed.emit(parent_element))
return item
def _nested_wizard(self, element, depend_layout):
nested_wiz = WizardDepend(self, element, self.code_changed, **self.kwargs)
self.addWidget(nested_wiz)
self.setCurrentWidget(nested_wiz)
nested_wiz.cancelled.connect(lambda: nested_wiz.deleteLater())
nested_wiz.cancelled.connect(lambda parent=element.getparent(): self.code_changed.emit(parent))
nested_wiz.finished.connect(lambda: nested_wiz.deleteLater())
nested_wiz.finished.connect(lambda parent=element.getparent(): self._update_depends(parent, depend_layout))
nested_wiz.finished.connect(lambda parent=element.getparent(): self.code_changed.emit(parent))
def _update_depends(self, main_elem, depend_layout):
for index in reversed(range(depend_layout.count())):
if depend_layout.itemAt(index).widget():
widget = depend_layout.takeAt(index).widget()
if widget is not None:
widget.deleteLater()
[self.add_elem(main_elem, depend_layout, element_=elem) for elem in main_elem if elem.tag == "dependencies"]
| |
# Copyright 2018 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functionality for displaying expressions.
SymPy provides a lot of functionality for displaying expressions, but it's
slightly too centered on being a symbolic maths engine to provides all our
needs. For example, it's impossible to display an unsimplified fraction like
3/6, or a decimal that isn't internally represented as a float and thus subject
to rounding.
Also provides some other convenience such as converting numbers to words, and
displaying percentages (properly formatted).
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import decimal
# Dependency imports
import sympy
# For converting integers to words:
_INTEGER_LOW = [
'zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight',
'nine', 'ten', 'eleven', 'twelve', 'thirteen', 'fourteeen', 'fifteen',
'sixteen', 'seventeen', 'eighteen', 'nineteen'
]
_INTEGER_MID = [
'', '', 'twenty', 'thirty', 'fourty', 'fifty', 'sixty', 'seventy', 'eighty',
'ninety'
]
_INTEGER_HIGH = [
(int(1e12), 'trillion'), (int(1e9), 'billion'), (int(1e6), 'million'),
(int(1e3), 'thousand'), (100, 'hundred')
]
# For converting rationals to words:
_SINGULAR_DENOMINATORS = [
'', '', 'half', 'third', 'quarter', 'fifth', 'sixth', 'seventh', 'eighth',
'ninth', 'tenth', 'eleventh', 'twelth', 'thirteenth', 'fourteenth',
'fifteenth', 'sixteenth', 'seventeenth', 'eighteenth', 'nineteenth',
'twentieth'
]
_PLURAL_DENOMINATORS = [
'', '', 'halves', 'thirds', 'quarters', 'fifths', 'sixths', 'sevenths',
'eighths', 'ninths', 'tenths', 'elevenths', 'twelths', 'thirteenths',
'fourteenths', 'fifteenths', 'sixteenths', 'seventeenths', 'eighteenths',
'nineteenths', 'twentieths'
]
# For converting ordinals to words:
_ORDINALS = [
'zeroth', 'first', 'second', 'third', 'fourth', 'fifth', 'sixth', 'seventh',
'eighth', 'ninth', 'tenth', 'eleventh', 'twelth', 'thirteenth',
'fourteenth', 'fifteenth', 'sixteenth', 'seventeenth', 'eighteenth',
'nineteenth', 'twentieth'
]
class Decimal(object):
"""Display a value as a decimal."""
def __init__(self, value):
"""Initializes a `Decimal`.
Args:
value: (Sympy) value to display as a decimal.
Raises:
ValueError: If `value` cannot be represented as a non-terminating decimal.
"""
self._value = sympy.Rational(value)
numer = int(sympy.numer(self._value))
denom = int(sympy.denom(self._value))
denom_factors = list(sympy.factorint(denom).keys())
for factor in denom_factors:
if factor not in [2, 5]:
raise ValueError('Cannot represent {} as a non-recurring decimal.'
.format(value))
self._decimal = decimal.Decimal(numer) / decimal.Decimal(denom)
@property
def value(self):
"""Returns the value as a `sympy.Rational` object."""
return self._value
def _sympy_(self):
return self._value
def decimal_places(self):
"""Returns the number of decimal places, e.g., 32 has 0 and 1.43 has 2."""
if isinstance(self._decimal, int):
return 0
elif isinstance(self._decimal, decimal.Decimal):
return -self._decimal.as_tuple().exponent
def __str__(self):
sign, digits, exponent = self._decimal.as_tuple()
sign = '' if sign == 0 else '-'
num_left_digits = len(digits) + exponent # number digits "before" point
if num_left_digits > 0:
int_part = ''.join(str(digit) for digit in digits[:num_left_digits])
else:
int_part = '0'
if exponent < 0:
frac_part = '.'
if num_left_digits < 0:
frac_part += '0' * -num_left_digits
frac_part += ''.join(str(digit) for digit in digits[exponent:])
else:
frac_part = ''
return sign + int_part + frac_part
def __add__(self, other):
if not isinstance(other, Decimal):
raise ValueError('Arithmetic support limited to other `Decimal`s.')
return Decimal(self.value + other.value)
def __sub__(self, other):
if not isinstance(other, Decimal):
raise ValueError('Arithmetic support limited to other `Decimal`s.')
return Decimal(self.value - other.value)
def __mul__(self, other):
if not isinstance(other, Decimal):
raise ValueError('Arithmetic support limited to other `Decimal`s.')
return Decimal(self.value * other.value)
def __neg__(self):
return Decimal(-self.value)
def round(self, ndigits=0):
"""Returns a new `Decimal` rounded to this many decimal places."""
scale = sympy.Integer(10 ** ndigits)
numer = sympy.numer(self.value) * scale
denom = sympy.denom(self.value)
return Decimal(int(round(numer / denom)) / scale)
def __round__(self, ndigits):
return self.round(ndigits)
def __int__(self):
"""Returns conversion to integer if possible; TypeError if non-integer."""
if self.decimal_places() == 0:
return int(self._decimal)
else:
raise TypeError('Cannot represent {} as an integer.'.format(str(self)))
# NOTE: this is implemented in addition to `__cmp__` because SymPy does not
# support inequality comparison between sympy objects and objects that are not
# convertible to sympy objects (such as strings).
def __eq__(self, other):
return self.value == other
# Python 2 comparison
def __cmp__(self, other):
if self.value == other:
return 0
if self.value < other:
return -1
return 1
# Python 3 comparison:
def __lt__(self, other):
return self.value < other
def __le__(self, other):
return self.value <= other
def __gt__(self, other):
return self.value > other
def __ge__(self, other):
return self.value >= other
class Percentage(object):
"""Container for a percentage."""
def __init__(self, value):
"""Initializes a `Percentage`.
Args:
value: Percentage as a fractional value. E.g., pass in
`sympy.Rational(2, 5)` to create the percentage "40%".
"""
self._value = value
def _sympy_(self):
return self._value
def __str__(self):
# Display percentages as decimals (not fractions).
value = Decimal(self._value * 100)
return str(value) + '%'
class NonSimpleRational(object):
"""Container for rational a / b where allow gcd(a, b) > 1."""
def __init__(self, numer, denom):
self._numer = numer
self._denom = denom
@property
def numer(self):
return self._numer
@property
def denom(self):
return self._denom
def __str__(self):
return '{}/{}'.format(self._numer, self._denom)
class StringNumber(object):
"""A string representing a number, that can also be sympified."""
def __init__(self, value, join_number_words_with_hyphens=True):
"""Initializes a `StringNumber`.
Args:
value: An integer or rational.
join_number_words_with_hyphens: Whether to join the words in integers with
hyphens when describing as a string.
"""
self._join_number_words_with_hyphens = join_number_words_with_hyphens
self._sympy_value = sympy.sympify(value)
self._string = self._to_string(value)
def _integer_to_words(self, integer):
"""Converts an integer to a list of words."""
if integer < 0:
raise ValueError('Cannot handle negative numbers.')
if integer < 20:
return [_INTEGER_LOW[integer]]
words = None
if integer < 100:
tens, ones = divmod(integer, 10)
if ones > 0:
return [_INTEGER_MID[tens], _INTEGER_LOW[ones]]
else:
return [_INTEGER_MID[tens]]
for value, word in _INTEGER_HIGH:
if integer >= value:
den, rem = divmod(integer, value)
words = self._integer_to_words(den) + [word]
if rem > 0:
if rem < 100:
words.append('and')
words += self._integer_to_words(rem)
return words
def _rational_to_string(self, rational):
"""Converts a rational to words, e.g., "two thirds"."""
numer = sympy.numer(rational)
denom = sympy.denom(rational)
numer_words = self._to_string(numer)
if denom == 1:
return numer_words
if denom <= 0 or denom >= len(_PLURAL_DENOMINATORS):
raise ValueError('Unsupported denominator {}.'.format(denom))
if numer == 1:
denom_word = _SINGULAR_DENOMINATORS[denom]
else:
denom_word = _PLURAL_DENOMINATORS[denom]
return '{} {}'.format(numer_words, denom_word)
def _to_string(self, number):
"""Converts an integer or rational to words."""
if isinstance(number, sympy.Integer) or isinstance(number, int):
words = self._integer_to_words(number)
join_char = '-' if self._join_number_words_with_hyphens else ' '
return join_char.join(words)
elif isinstance(number, sympy.Rational):
return self._rational_to_string(number)
else:
raise ValueError('Unable to handle number {} with type {}.'
.format(number, type(number)))
def _sympy_(self):
return self._sympy_value
def __str__(self):
return self._string
class StringOrdinal(object):
"""A string representation of an ordinal, e.g., "first"."""
def __init__(self, position):
"""Initializes a `StringOrdinal`.
Args:
position: An integer >= 0.
Raises:
ValueError: If `position` is non-positive or out of range.
"""
if position < 0 or position >= len(_ORDINALS):
raise ValueError('Unsupported ordinal {}.'.format(position))
self._string = _ORDINALS[position]
def __str__(self):
return self._string
class NumberList(object):
"""Contains a list of numbers, intended for display."""
def __init__(self, numbers):
self._numbers = numbers
def __str__(self):
"""Converts the list to a string.
Returns:
Human readable string.
Raises:
ValueError: if any of the strings contain a comma and thus would lead to
an ambigious representation.
"""
strings = []
for number in self._numbers:
string = str(number)
if ',' in string:
raise ValueError('String representation of the list will be ambigious, '
'since term "{}" contains a comma.'.format(string))
strings.append(string)
return ', '.join(strings)
class NumberInBase(object):
"""Contains value, represented in a given base."""
def __init__(self, value, base):
"""Initializes a `NumberInBase`.
Args:
value: Positive or negative integer.
base: Integer in the range [2, 36].
Raises:
ValueError: If base is not in the range [2, 36] (since this is the limit
that can be represented by 10 numbers plus 26 letters).
"""
if not 2 <= base <= 36:
raise ValueError('base={} must be in the range [2, 36]'.format(base))
self._value = value
self._base = base
chars = []
remainder = abs(value)
while True:
digit = remainder % base
char = str(digit) if digit <= 9 else chr(ord('a') + digit - 10)
chars.append(char)
remainder = int(remainder / base)
if remainder == 0:
break
if value < 0:
chars.append('-')
self._str = ''.join(reversed(chars))
def __str__(self):
return self._str
def _sympy_(self):
return self._value
| |
import abc
import copy
import logging
import collections
import threading
import datetime
import string
class ParamTemplate(string.Template):
"""
Templating for the param: accepts dots
for replacing ExecutionContext values
"""
idpattern = r"[\._a-z][\._a-z0-9]*"
class TemplateModelAdapter:
def __init__(self, fallback_dic, execution_context=None):
self.fallback_dic = fallback_dic
self.execution_context = execution_context
def __getitem__(self, key):
retval = None
# If the key referes to an outcome from the ExecutionContext
if (".outcome_" in key):
if (self.execution_context):
dotPos = key.rfind(".")
taskName = key[0:dotPos]
propName = key[dotPos + 1:]
taskRef = self.execution_context.lookup_task(taskName)
if (taskRef):
if (propName == "outcome_code"):
retval = taskRef.getOutcome()[0]
elif (propName == "outcome_result"):
retval = taskRef.getOutcome()[1]
if (retval == None):
retval = self.fallback_dic[key]
return retval
class Task:
"""
Abstract base class for all Tasks
"""
__metaclass__ = abc.ABCMeta
ATTR_PARAMS = u'params'
__VALID_ATTRS = [u'aliases', u'description', u'dependsOn', u'version',
u'modelUri', ATTR_PARAMS, u'namespaces', u'load', u'haltOnError', u'precond', u'decl', u'type'
]
SCOPE_SEPARATOR = "/"
STATE_NEW = 0
STATE_RUNNING = 1
STATE_WAITING = 2
STATE_STOPPED = 3
CODE_OK = 0
CODE_SKIPPED = -1
def __init__(self, name, parent=None):
self._init_internal(name, parent)
# A tuple where first element is the code, and second element is the result
self.retainResult = True
self.outcome_code = None
self.outcome_result = None
self.retainOutcome = True
self.modelUri = None
self.aliases = None
self.namespaces = None
self.description = None
# attributes are local data that cannot be overriden
self.attribs = {}
# params are used in input for template. It can also be overriden
self.params = {}
self.haltOnError = False
self.precond = None
def __str__(self):
return self.getFqn() + " [" + self.__class__.__name__ + "]";
def _init_internal(self, name, parent=None):
"""
Initialization that is run after deep copying a task instance
I.e. All other variables are copied over.
"""
self.state = Task.STATE_NEW
self.lastExecTimeStart = None
self.lastExecTimeStop = None
self.name = name
self.parent = parent
if (parent):
parent.addChild(self)
self.children = collections.OrderedDict()
def copy(self, name, parent):
"""
Returns a copy of this tasks with a new name.
All fields are deep copied except for parent and children
"""
temp_name = self.name
temp_parent = self.parent
temp_children = self.children
self.name = None
self.parent = None
self.children = None
copy_instance = copy.deepcopy(self)
copy_instance._init_internal(name, parent)
self.name = temp_name
self.parent = temp_parent
self.children = temp_children
parent.addChild(copy_instance)
return copy_instance
def init(self):
"""
Task initialization.
Inheriting Task class may implement any initialization here.
This is called after all attributes are set
"""
self.setParam('TASK_TYPE_NAME', self.__class__.__name__ )
def setAttribute(self, attrKey, attrVal):
if (attrKey == self.ATTR_PARAMS):
self.params = attrVal
elif (attrKey in self.__VALID_ATTRS):
#setattr(self, attrKey, attrVal)
self.attribs[attrKey] = attrVal;
else:
raise Exception('Invalid Attribute "' + attrKey + '"')
def getAttribute(self, attrKey, default=None, executionContext=None, expandTemplate=True):
if (attrKey in self.attribs):
retval = self.attribs[attrKey]
# Expand template by replacing the placeholders with the params
if expandTemplate and isinstance(retval, basestring):
retval = self.applyTemplate(retval, self.getParams(), executionContext)
return retval
else:
return default
def getParams(self):
"""
Returns a copy of the param
Goes up to the scope hierarchy to add entries which the key does not conflict
"""
if (not self.params):
return {}
retval = self.params.copy()
node = self.parent
while (node and node.params):
for key, val in node.params.items():
if (key not in retval):
retval[key] = val
node = node.parent
return retval
def applyTemplate(self, templateStr, params, executionContext=None):
"""
Applies template to the argument passed
"""
# Expand template by replacing the placeholders with the params
tpl = ParamTemplate(templateStr)
model = TemplateModelAdapter(params, executionContext)
retval = tpl.safe_substitute(model)
return retval
def getParam(self, key, default=None, executionContext=None, expandTemplate=True):
params = self.getParams()
retval = None
if (key in params):
retval = params[key]
else:
retval = default
# Expand template by replacing the placeholders with the params
if expandTemplate and isinstance(retval, basestring):
retval = self.applyTemplate(retval, params, executionContext)
return retval
def setParams(self, params):
if (not self.params):
self.params = params
else:
self.params.update(params)
def setParam(self, key, value):
self.params[key] = value
def hasParam(self, key):
if (key in self.params):
return True
node = self.parent
while (node):
if (key in node.params):
return True
node = node.parent
return False
def setParent(self, parent):
self.parent = parent
def getParent(self):
return self.parent
def getRootParent(self):
node = self
#print ("++"+ str(node) + " --" + str(node.parent))
while node.parent is not None:
node = node.parent
return node
def addChild(self, child):
self.children[child.name] = child
def hasChild(self, name):
return name in self.children
def getChildren(self):
return self.children.items()
def getChild(self, name):
return self.children[name]
def getChildAt(self, idx):
self._children[idx]
def get_subtask(self, name_path):
"""
@param name_path array of names to resolve the descendant (sub) task
"""
node = self
for name in name_path:
if (node.hasChild(name)):
node = node.getChild(name)
else:
raise Exception('Nonexistent task with name [' + '.'.join(name_path) + ']' )
return node
def traverse(self):
self.__traverse(self)
def __traverse(self, node):
"""
Pending pass function as parameter
"""
print (str(node) + "+" + str(len(node.getChildren())))
if (len(node.getChildren()) > 0):
for childName, child in node.getChildren():
self.__traverse(child)
def getOutcome(self):
return (self.outcome_code, self.outcome_result)
def setOutcome(self, code, result):
self.outcome_code = code
if (self.retainResult):
self.outcome_result = result
def getResult(self):
return self.outcome_result
def setResult(self, result):
self.outcome_result = result
def getNamespace(self):
"""
Returns fully qualified name
"""
nampescope = []
currTask = self.parent
while currTask:
nampescope.append(currTask.name)
currTask = currTask.parent
# Fall back to the original typeName
return ".".join(reversed(nampescope))
def getFqn(self):
"""
Returns fully qualified name
"""
nampescope = [self.name]
currTask = self.parent
while currTask:
nampescope.append(currTask.name)
currTask = currTask.parent
# Fall back to the original typeName
return ".".join(reversed(nampescope))
def getTypename(self):
return self.__class__.__name__
def validate(self):
"""
Concrete classes must implement this method
It should return true if valid, false otherwise
"""
return True
def eval(self, expression):
return eval(expression, {"__builtins__": {}})
@abc.abstractmethod
def executeInternal(self, executionContext):
"""
Concrete classes must implement this method
"""
return
def execute(self, execution_context):
"""
The main execution method.
It internally calls executeInternal following the Template Method pattern
@type execution_context ExecutionContext
"""
precondEval = True
if (self.precond):
params = self.getParams()
precond = self.applyTemplate(self.precond, params, execution_context)
precondEval = self.eval(precond)
if (not precondEval):
# Precondition evaluated to false, return
return Task.CODE_SKIPPED, None
self.lastExecTimeStart = datetime.datetime.now()
self.state = Task.STATE_RUNNING
exit_code, output = self.executeInternal(execution_context)
self.state = Task.STATE_STOPPED
self.lastExecTimeStop = datetime.datetime.now()
# @todo - the task registry is using static module fqn instead of runtime call path.
# shall we keep as is? What is is already registered?
execution_context.register_trace(self, self.getFqn(),
self.lastExecTimeStart, self.lastExecTimeStop,
exit_code, output
)
return exit_code, output
class TaskThread(threading.Thread):
"""
Class that encapsulates a task in thread
This thread is executed from CompositeTask
"""
def __init__(self, thread_id, name, task, executionContext):
threading.Thread.__init__(self)
self.thread_id = thread_id
self.name = name
self.task = task
self.code = None
self.output = None
self.executionContext = executionContext
def run(self):
self.code, self.output = self.task.execute(self.executionContext)
class CompositeTask(Task):
"""
Task that is a grouping of tasks
"""
logger = logging.getLogger(__name__)
ATTR_INIT_MODE = u'initMode'
ATTR_EXEC = u'exec'
ATTR_EXEC_MODE = u'execMode'
# Double underscore makes unique namespace for this class
__VALID_ATTRS = [ATTR_INIT_MODE, ATTR_EXEC, ATTR_EXEC_MODE]
def __init__(self, name, parent):
super(CompositeTask, self).__init__(name, parent)
self._exec = [] # Array of string of task names
def setAttribute(self, attrKey, attrVal):
if (attrKey == self.ATTR_EXEC):
# There is two possibilities: a string that contains the fqn of
# the task to execute, or a string array of task names to execute
# either in sequentially or in parallel depending of the execMode
if (isinstance(attrVal, basestring)):
self._exec.append(attrVal)
elif (isinstance(attrVal, list)):
for item in attrVal:
if (isinstance(item, basestring)):
self._exec.append(item)
else:
raise Exception('Element in @exec must be a string')
else:
raise Exception('The value of @exec has invalid type ' + str(type(attrVal)) + ', only string or array of string is allowed.')
if (attrKey in self.__VALID_ATTRS):
self.attribs[attrKey] = attrVal
else:
super(CompositeTask, self).setAttribute(attrKey, attrVal)
def executeInternal(self, executionContext):
"""
Executes task(s) specified in the @exec attribute
"""
self.logger.info("Executing " + str(self))
# Find which task(s) to execute.
task_names_to_exec = self._exec if len(self._exec) > 0 else [u'default']
tasks_to_exec = []
for task_name_to_exec in task_names_to_exec:
task_fqn = task_name_to_exec if task_name_to_exec.startswith('root') \
else self.getFqn() + '.' + task_name_to_exec
tasks_to_exec.append(executionContext.get_task_container().get_task(task_fqn))
exec_mode = self.getAttribute(u'execMode', u'sequential')
self.logger.info("ExecMode: " + exec_mode)
code, output = (Task.CODE_OK, None)
if (exec_mode == u'parallel'):
taskThreads = []
for task_to_exec in tasks_to_exec:
#print (str(child))
taskThread = TaskThread(1, "Thread-" + task_to_exec.name, task_to_exec, executionContext)
taskThreads.append(taskThread)
try:
taskThread.start()
except:
self.logger.error("Error: unable to start thread")
self.logger.info("Joining all threads")
for thread in taskThreads:
try:
taskThread.join()
except:
self.logger.error("Error: unable to join thread")
# @todo Assign the result value
else:
# Executing in serial
for task_to_exec in tasks_to_exec:
code, output = task_to_exec.execute(executionContext)
# In serial mode, the last outcome is the compositeTask's outcome
# Composite returns None as output
return (code, None)
class EchoTask(Task):
"""
Task that simply echoes the message
"""
logger = logging.getLogger(__name__)
def executeInternal(self, executionContext):
message = self.getParam('message', False)
self.logger.info("Echo '" + message+ "'")
self.setOutcome(Task.CODE_OK, message)
return (Task.CODE_OK, message)
class SwitchTask(Task):
"""
Task that Does switch case.
Notice: first matching case is the one that is executed.
"""
logger = logging.getLogger(__name__)
def executeInternal(self, executionContext):
self.logger.info("Executing " + str(self))
cases = self.getAttribute(u'cases')
# case is the boolean statement, body is the task to execute.
code, out = (Task.CODE_OK, None)
for case, body in cases:
if self.eval(case):
if (body[0] == u'#'):
task_fqname = body[1:]
task = executionContext.lookupTask(task_fqname)
code, out = task.execute(executionContext)
break
return (code, out)
class IterationTask(Task):
"""
Task that does Iteration
"""
logger = logging.getLogger(__name__)
# Double underscore makes unique namespace for this class
__VALID_ATTRS = [u'for', u'exec']
def setAttribute(self, attrKey, attrVal):
if (attrKey in self.__VALID_ATTRS):
setattr(self, attrKey, attrVal)
else:
super(IterationTask, self).setAttribute(attrKey, attrVal)
def executeInternal(self, executionContext):
self.logger.info("Executing " + str(self))
return (Task.CODE_OK, None)
| |
import functools
from functools import partial
from django import http
from django.conf import settings
from django.db import IntegrityError, transaction
from django.shortcuts import (get_list_or_404, get_object_or_404,
redirect)
from django.contrib import auth
from django.contrib.auth.forms import PasswordResetForm
from django.contrib.auth.tokens import default_token_generator
from django.template import Context, loader
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_exempt
from django.utils.http import base36_to_int, is_safe_url
import commonware.log
import jingo
import waffle
from django_browserid import get_audience, verify
from django_statsd.clients import statsd
from mobility.decorators import mobile_template
from session_csrf import anonymous_csrf, anonymous_csrf_exempt
from tower import ugettext as _
import amo
import users.notifications as notifications
from abuse.models import send_abuse_report
from access.middleware import ACLMiddleware
from addons.decorators import addon_view_factory
from addons.models import Addon, Category
from amo import messages
from amo.decorators import (json_view, login_required, permission_required,
post_required, write)
from amo.forms import AbuseForm
from amo.helpers import loc
from amo.urlresolvers import get_url_prefix, reverse
from amo.utils import escape_all, log_cef, send_mail
from access import acl
from bandwagon.models import Collection
from browse.views import PersonasFilter
from translations.query import order_by_translation
from users.models import UserNotification
from lib.metrics import record_action
from .models import UserProfile
from .signals import logged_out
from . import forms
from .utils import autocreate_username, EmailResetCode, UnsubscribeCode
import tasks
log = commonware.log.getLogger('z.users')
addon_view = addon_view_factory(qs=Addon.objects.valid)
THEMES_LIMIT = 20
def user_view(f):
@functools.wraps(f)
def wrapper(request, user_id, *args, **kw):
"""Provides a user object given a user ID or username."""
if user_id.isdigit():
key = 'id'
else:
key = 'username'
# If the username is `me` then show the current user's profile.
if (user_id == 'me' and request.amo_user and
request.amo_user.username):
user_id = request.amo_user.username
user = get_object_or_404(UserProfile, **{key: user_id})
return f(request, user, *args, **kw)
return wrapper
@login_required(redirect=False)
@json_view
def ajax(request):
"""Query for a user matching a given email."""
if 'q' not in request.GET:
raise http.Http404()
data = {'status': 0, 'message': ''}
email = request.GET.get('q', '').strip()
dev_only = request.GET.get('dev', '1')
try:
dev_only = int(dev_only)
except ValueError:
dev_only = 1
dev_only = dev_only and settings.MARKETPLACE
if not email:
data.update(message=_('An email address is required.'))
return data
user = UserProfile.objects.filter(email=email)
if dev_only:
user = user.exclude(read_dev_agreement=None)
msg = _('A user with that email address does not exist.')
msg_dev = _('A user with that email address does not exist, or the user '
'has not yet accepted the developer agreement.')
if user:
data.update(status=1, id=user[0].id, name=user[0].name)
else:
data['message'] = msg_dev if dev_only else msg
return escape_all(data)
@user_view
def confirm(request, user, token):
if not user.confirmationcode:
return redirect('users.login')
if user.confirmationcode != token:
log.info(u"Account confirmation failed for user (%s)", user)
messages.error(request, _('Invalid confirmation code!'))
return redirect('users.login')
user.confirmationcode = ''
user.save()
messages.success(request, _('Successfully verified!'))
log.info(u"Account confirmed for user (%s)", user)
return redirect('users.login')
@user_view
def confirm_resend(request, user):
if not user.confirmationcode:
return redirect('users.login')
# Potential for flood here if someone requests a confirmationcode and then
# re-requests confirmations. We may need to track requests in the future.
log.info(u"Account confirm re-requested for user (%s)", user)
user.email_confirmation_code()
msg = _(u'An email has been sent to your address {0} to confirm '
'your account. Before you can log in, you have to activate '
'your account by clicking on the link provided in this '
'email.').format(user.email)
messages.info(request, _('Confirmation Email Sent'), msg)
return redirect('users.login')
@login_required
def delete(request):
amouser = request.amo_user
if request.method == 'POST':
form = forms.UserDeleteForm(request.POST, request=request)
if form.is_valid():
messages.success(request, _('Profile Deleted'))
amouser.anonymize()
logout(request)
form = None
return http.HttpResponseRedirect(reverse('users.login'))
else:
form = forms.UserDeleteForm()
return jingo.render(request, 'users/delete.html',
{'form': form, 'amouser': amouser})
@login_required
def delete_photo(request):
u = request.amo_user
if request.method == 'POST':
u.picture_type = ''
u.save()
log.debug(u"User (%s) deleted photo" % u)
tasks.delete_photo.delay(u.picture_path)
messages.success(request, _('Photo Deleted'))
return http.HttpResponseRedirect(reverse('users.edit') +
'#user-profile')
return jingo.render(request, 'users/delete_photo.html', dict(user=u))
@write
@login_required
def edit(request):
# Don't use request.amo_user since it has too much caching.
amouser = UserProfile.objects.get(pk=request.user.id)
if request.method == 'POST':
# ModelForm alters the instance you pass in. We need to keep a copy
# around in case we need to use it below (to email the user)
original_email = amouser.email
form = forms.UserEditForm(request.POST, request.FILES, request=request,
instance=amouser)
if form.is_valid():
messages.success(request, _('Profile Updated'))
if amouser.email != original_email:
# Temporarily block email changes.
if settings.APP_PREVIEW:
messages.error(request, 'Error',
'You cannot change your email on the '
'developer preview site.')
return jingo.render(request, 'users/edit.html',
{'form': form, 'amouser': amouser})
l = {'user': amouser,
'mail1': original_email,
'mail2': amouser.email}
log.info(u"User (%(user)s) has requested email change from "
"(%(mail1)s) to (%(mail2)s)" % l)
messages.info(request, _('Email Confirmation Sent'),
_(u'An email has been sent to {0} to confirm your new '
'email address. For the change to take effect, you '
'need to click on the link provided in this email. '
'Until then, you can keep logging in with your '
'current email address.').format(amouser.email))
token, hash_ = EmailResetCode.create(amouser.id, amouser.email)
url = '%s%s' % (settings.SITE_URL,
reverse('users.emailchange',
args=[amouser.id, token, hash_]))
t = loader.get_template('users/email/emailchange.ltxt')
c = {'domain': settings.DOMAIN, 'url': url}
send_mail(_('Please confirm your email address '
'change at %s' % settings.DOMAIN),
t.render(Context(c)), None, [amouser.email],
use_blacklist=False, real_email=True)
# Reset the original email back. We aren't changing their
# address until they confirm the new one
amouser.email = original_email
form.save()
return redirect('users.edit')
else:
messages.error(request, _('Errors Found'),
_('There were errors in the changes '
'you made. Please correct them and '
'resubmit.'))
else:
form = forms.UserEditForm(instance=amouser)
return jingo.render(request, 'users/edit.html',
{'form': form, 'amouser': amouser, 'webapp': False})
@write
@login_required
@permission_required('Users', 'Edit')
@user_view
def admin_edit(request, user):
if request.method == 'POST':
form = forms.AdminUserEditForm(request.POST, request.FILES,
request=request, instance=user)
if form.is_valid():
form.save()
messages.success(request, _('Profile Updated'))
return http.HttpResponseRedirect(reverse('zadmin.index'))
else:
form = forms.AdminUserEditForm(instance=user)
return jingo.render(request, 'users/edit.html',
{'form': form, 'amouser': user})
@user_view
def emailchange(request, user, token, hash):
try:
_uid, newemail = EmailResetCode.parse(token, hash)
except ValueError:
return http.HttpResponse(status=400)
if _uid != user.id:
# I'm calling this a warning because invalid hashes up to this point
# could be any number of things, but this is a targeted attack from
# one user account to another
log.warning((u"[Tampering] Valid email reset code for UID (%s) "
"attempted to change email address for user (%s)")
% (_uid, user))
return http.HttpResponse(status=400)
user.email = newemail
user.save()
l = {'user': user, 'newemail': newemail}
log.info(u"User (%(user)s) confirmed new email address (%(newemail)s)" % l)
messages.success(request, _('Your email address was changed successfully'),
_(u'From now on, please use {0} to log in.').format(newemail))
return http.HttpResponseRedirect(reverse('users.edit'))
def _clean_next_url(request):
gets = request.GET.copy()
url = gets.get('to', settings.LOGIN_REDIRECT_URL)
if not is_safe_url(url, host=request.get_host()):
log.info(u'Unsafe redirect to %s' % url)
url = settings.LOGIN_REDIRECT_URL
domain = gets.get('domain', None)
if domain in settings.VALID_LOGIN_REDIRECTS.keys():
url = settings.VALID_LOGIN_REDIRECTS[domain] + url
gets['to'] = url
request.GET = gets
return request
def browserid_authenticate(request, assertion, is_mobile=False,
browserid_audience=get_audience):
"""
Verify a BrowserID login attempt. If the BrowserID assertion is
good, but no account exists, create one.
"""
url = settings.BROWSERID_VERIFICATION_URL
# We must always force the Firefox OS identity provider. This is because
# we are sometimes allowing unverified assertions and you can't mix that
# feature with bridged IdPs. See bug 910938.
extra_params = {}
if settings.UNVERIFIED_ISSUER:
extra_params['experimental_forceIssuer'] = settings.UNVERIFIED_ISSUER
if is_mobile:
# When persona is running in a mobile OS then we can allow unverified
# assertions.
url = settings.NATIVE_BROWSERID_VERIFICATION_URL
extra_params['experimental_allowUnverified'] = 'true'
log.debug('Verifying Persona at %s, audience: %s, '
'extra_params: %s' % (url, browserid_audience, extra_params))
result = verify(assertion, browserid_audience,
url=url, extra_params=extra_params)
if not result:
return None, _('Persona authentication failure.')
if 'unverified-email' in result:
email = result['unverified-email']
verified = False
else:
email = result['email']
verified = True
try:
profile = UserProfile.objects.filter(email=email)[0]
except IndexError:
profile = None
if profile:
# Added due to bug 905984. It's possible to have a UserProfile
# that has no corresponding User object.
if profile.user is None:
profile.create_django_user(
backend='django_browserid.auth.BrowserIDBackend')
if profile.is_verified and not verified:
# An attempt to log in to a verified address with an unverified
# assertion is a very bad thing. Don't let that happen.
log.debug('Verified user %s attempted to log in with an '
'unverified assertion!' % profile)
return None, _('Please use the verified email for this account.')
else:
profile.is_verified = verified
profile.save()
backend = 'django_browserid.auth.BrowserIDBackend'
if getattr(profile.user, 'backend', None) != backend:
profile.user.backend = backend
profile.user.save()
return profile, None
username = autocreate_username(email.partition('@')[0])
source = (amo.LOGIN_SOURCE_MMO_BROWSERID if settings.MARKETPLACE else
amo.LOGIN_SOURCE_AMO_BROWSERID)
profile = UserProfile.objects.create(username=username, email=email,
source=source, display_name=username,
is_verified=verified)
profile.create_django_user(
backend='django_browserid.auth.BrowserIDBackend')
log_cef('New Account', 5, request, username=username,
signature='AUTHNOTICE',
msg='User created a new account (from Persona)')
if settings.MARKETPLACE:
record_action('new-user', request)
return profile, None
@csrf_exempt
@post_required
@transaction.commit_on_success
#@ratelimit(block=True, rate=settings.LOGIN_RATELIMIT_ALL_USERS)
def browserid_login(request, browserid_audience=None):
msg = ''
if waffle.switch_is_active('browserid-login'):
if request.user.is_authenticated():
# If username is different, maybe sign in as new user?
return http.HttpResponse(status=200)
try:
is_mobile = bool(int(request.POST.get('is_mobile', 0)))
except ValueError:
is_mobile = False
with statsd.timer('auth.browserid.verify'):
profile, msg = browserid_authenticate(
request, request.POST['assertion'],
is_mobile=is_mobile,
browserid_audience=browserid_audience or get_audience(request))
if profile is not None:
auth.login(request, profile.user)
profile.log_login_attempt(True)
return http.HttpResponse(status=200)
else:
msg = 'browserid-login waffle switch is not enabled'
return http.HttpResponse(msg, status=401)
@anonymous_csrf
@mobile_template('users/{mobile/}login_modal.html')
#@ratelimit(block=True, rate=settings.LOGIN_RATELIMIT_ALL_USERS)
def login_modal(request, template=None):
return _login(request, template=template)
@anonymous_csrf
@mobile_template('users/{mobile/}login.html')
#@ratelimit(block=True, rate=settings.LOGIN_RATELIMIT_ALL_USERS)
def login(request, template=None):
if settings.MARKETPLACE:
return redirect('users.login')
return _login(request, template=template)
def _login(request, template=None, data=None, dont_redirect=False):
data = data or {}
data['webapp'] = settings.APP_PREVIEW
# In case we need it later. See below.
get_copy = request.GET.copy()
if 'to' in request.GET:
request = _clean_next_url(request)
if request.user.is_authenticated():
return http.HttpResponseRedirect(
request.GET.get('to', settings.LOGIN_REDIRECT_URL))
limited = getattr(request, 'limited', 'recaptcha_shown' in request.POST)
user = None
login_status = None
if 'username' in request.POST:
try:
# We are doing all this before we try and validate the form.
user = UserProfile.objects.get(email=request.POST['username'])
limited = ((user.failed_login_attempts >=
settings.LOGIN_RATELIMIT_USER) or limited)
login_status = False
except UserProfile.DoesNotExist:
log_cef('Authentication Failure', 5, request,
username=request.POST['username'],
signature='AUTHFAIL',
msg='The username was invalid')
pass
partial_form = partial(forms.AuthenticationForm, use_recaptcha=limited)
r = auth.views.login(request, template_name=template,
redirect_field_name='to',
authentication_form=partial_form,
extra_context=data)
if isinstance(r, http.HttpResponseRedirect):
# Django's auth.views.login has security checks to prevent someone from
# redirecting to another domain. Since we want to allow this in
# certain cases, we have to make a new response object here to replace
# the above.
if 'domain' in request.GET:
request.GET = get_copy
request = _clean_next_url(request)
r = http.HttpResponseRedirect(request.GET['to'])
# Succsesful log in according to django. Now we do our checks. I do
# the checks here instead of the form's clean() because I want to use
# the messages framework and it's not available in the request there.
if user.deleted:
logout(request)
log.warning(u'Attempt to log in with deleted account (%s)' % user)
messages.error(request, _('Wrong email address or password!'))
data.update({'form': partial_form()})
user.log_login_attempt(False)
log_cef('Authentication Failure', 5, request,
username=request.user,
signature='AUTHFAIL',
msg='Account is deactivated')
return jingo.render(request, template, data)
if user.confirmationcode:
logout(request)
log.info(u'Attempt to log in with unconfirmed account (%s)' % user)
msg1 = _(u'A link to activate your user account was sent by email '
'to your address {0}. You have to click it before you '
'can log in.').format(user.email)
url = "%s%s" % (settings.SITE_URL,
reverse('users.confirm.resend', args=[user.id]))
msg2 = _('If you did not receive the confirmation email, make '
'sure your email service did not mark it as "junk '
'mail" or "spam". If you need to, you can have us '
'<a href="%s">resend the confirmation message</a> '
'to your email address mentioned above.') % url
messages.error(request, _('Activation Email Sent'), msg1)
messages.info(request, _('Having Trouble?'), msg2,
title_safe=True, message_safe=True)
data.update({'form': partial_form()})
user.log_login_attempt(False)
return jingo.render(request, template, data)
rememberme = request.POST.get('rememberme', None)
if rememberme:
request.session.set_expiry(settings.SESSION_COOKIE_AGE)
log.debug((u'User (%s) logged in successfully with '
'"remember me" set') % user)
login_status = True
if dont_redirect:
# We're recalling the middleware to re-initialize amo_user
ACLMiddleware().process_request(request)
r = jingo.render(request, template, data)
if login_status is not None:
user.log_login_attempt(login_status)
log_cef('Authentication Failure', 5, request,
username=request.POST['username'],
signature='AUTHFAIL',
msg='The password was incorrect')
return r
def logout(request):
# Not using get_profile() becuase user could be anonymous
user = request.user
if not user.is_anonymous():
log.debug(u"User (%s) logged out" % user)
auth.logout(request)
if 'to' in request.GET:
request = _clean_next_url(request)
next = request.GET.get('to')
if not next:
next = settings.LOGOUT_REDIRECT_URL
prefixer = get_url_prefix()
if prefixer:
next = prefixer.fix(next)
response = http.HttpResponseRedirect(next)
# Fire logged out signal.
logged_out.send(None, request=request, response=response)
return response
@user_view
def profile(request, user):
# Temporary until we decide we want user profile pages.
if settings.MARKETPLACE:
raise http.Http404
webapp = False
# Get user's own and favorite collections, if they allowed that.
own_coll = fav_coll = []
if not webapp:
if user.display_collections:
own_coll = (Collection.objects.listed().filter(author=user)
.order_by('-created'))[:10]
if user.display_collections_fav:
fav_coll = (Collection.objects.listed()
.filter(following__user=user)
.order_by('-following__created'))[:10]
edit_any_user = acl.action_allowed(request, 'Users', 'Edit')
own_profile = (request.user.is_authenticated() and
request.amo_user.id == user.id)
addons = []
personas = []
limited_personas = False
if user.is_developer:
addons = user.addons.reviewed().exclude(type=amo.ADDON_WEBAPP).filter(
addonuser__user=user, addonuser__listed=True)
personas = addons.filter(type=amo.ADDON_PERSONA).order_by(
'-persona__popularity')
if personas.count() > THEMES_LIMIT:
limited_personas = True
personas = personas[:THEMES_LIMIT]
addons = addons.exclude(type=amo.ADDON_PERSONA).order_by(
'-weekly_downloads')
addons = amo.utils.paginate(request, addons, 5)
# Don't show marketplace reviews for AMO (since that would break).
reviews = list(user.reviews.exclude(addon__type=amo.ADDON_WEBAPP))
reviews = amo.utils.paginate(request, reviews)
data = {'profile': user, 'own_coll': own_coll, 'reviews': reviews,
'fav_coll': fav_coll, 'edit_any_user': edit_any_user,
'addons': addons, 'own_profile': own_profile,
'personas': personas, 'limited_personas': limited_personas,
'THEMES_LIMIT': THEMES_LIMIT}
if not own_profile:
data['abuse_form'] = AbuseForm(request=request)
return jingo.render(request, 'users/profile.html', data)
@user_view
def themes(request, user, category=None):
cats = Category.objects.filter(type=amo.ADDON_PERSONA)
ctx = {
'profile': user,
'categories': order_by_translation(cats, 'name'),
'search_cat': 'themes'
}
if user.is_artist:
base = user.addons.reviewed().filter(type=amo.ADDON_PERSONA,
addonuser__user=user, addonuser__listed=True)
if category:
qs = cats.filter(slug=category)
ctx['category'] = cat = get_list_or_404(qs)[0]
base = base.filter(categories__id=cat.id)
else:
base = Addon.objects.none()
filter_ = PersonasFilter(request, base, key='sort',
default='popular')
addons = amo.utils.paginate(request, filter_.qs, 30,
count=base.count())
ctx.update({
'addons': addons,
'filter': filter_,
'sorting': filter_.field,
'sort_opts': filter_.opts
})
return jingo.render(request, 'browse/personas/grid.html', ctx)
@anonymous_csrf
def register(request):
if settings.APP_PREVIEW and waffle.switch_is_active('browserid-login'):
messages.error(request,
loc('Registrations must be through browserid.'))
form = None
raise http.Http404()
elif request.user.is_authenticated():
messages.info(request, _('You are already logged in to an account.'))
form = None
elif request.method == 'POST':
form = forms.UserRegisterForm(request.POST)
mkt_user = UserProfile.objects.filter(email=form.data['email'],
password='')
if form.is_valid():
try:
u = form.save(commit=False)
u.set_password(form.cleaned_data['password'])
u.generate_confirmationcode()
u.save()
u.create_django_user()
log.info(u'Registered new account for user (%s)', u)
log_cef('New Account', 5, request, username=u.username,
signature='AUTHNOTICE',
msg='User created a new account')
u.email_confirmation_code()
msg = _('Congratulations! Your user account was '
'successfully created.')
messages.success(request, msg)
msg = _(u'An email has been sent to your address {0} to '
'confirm your account. Before you can log in, you '
'have to activate your account by clicking on the '
'link provided in this email.').format(u.email)
messages.info(request, _('Confirmation Email Sent'), msg)
except IntegrityError, e:
# I was unable to reproduce this, but I suspect it happens
# when they POST twice quickly and the slaves don't have the
# new info yet (total guess). Anyway, I'm assuming the
# first one worked properly, so this is still a success
# case to the end user so we just log it...
log.error('Failed to register new user (%s): %s' % (u, e))
return http.HttpResponseRedirect(reverse('users.login'))
elif mkt_user.exists():
# Handle BrowserID
if (mkt_user.count() == 1 and
mkt_user[0].source in amo.LOGIN_SOURCE_BROWSERIDS):
messages.info(request, _('You already have an account.'))
form = None
else:
f = PasswordResetForm()
f.users_cache = [mkt_user[0]]
f.save(use_https=request.is_secure(),
email_template_name='users/email/pwreset.ltxt',
request=request)
return jingo.render(request, 'users/newpw_sent.html', {})
else:
messages.error(request, _('There are errors in this form'),
_('Please correct them and resubmit.'))
else:
form = forms.UserRegisterForm()
reg_action = reverse('users.register')
return jingo.render(request, 'users/register.html',
{'form': form, 'register_action': reg_action})
@anonymous_csrf_exempt
@user_view
def report_abuse(request, user):
form = AbuseForm(request.POST or None, request=request)
if request.method == 'POST' and form.is_valid():
send_abuse_report(request, user, form.cleaned_data['text'])
messages.success(request, _('User reported.'))
else:
return jingo.render(request, 'users/report_abuse_full.html',
{'profile': user, 'abuse_form': form, })
return redirect(user.get_url_path())
@never_cache
def password_reset_confirm(request, uidb36=None, token=None):
"""
Pulled from django contrib so that we can add user into the form
so then we can show relevant messages about the user.
"""
assert uidb36 is not None and token is not None
user = None
try:
uid_int = base36_to_int(uidb36)
user = UserProfile.objects.get(id=uid_int)
except (ValueError, UserProfile.DoesNotExist):
pass
if user is not None and default_token_generator.check_token(user, token):
validlink = True
if request.method == 'POST':
form = forms.SetPasswordForm(user, request.POST)
if form.is_valid():
form.save()
log_cef('Password Changed', 5, request,
username=user.username,
signature='PASSWORDCHANGED',
msg='User changed password')
return redirect(reverse('django.contrib.auth.'
'views.password_reset_complete'))
else:
form = forms.SetPasswordForm(user)
else:
validlink = False
form = None
return jingo.render(request, 'users/pwreset_confirm.html',
{'form': form, 'validlink': validlink})
@never_cache
def unsubscribe(request, hash=None, token=None, perm_setting=None):
"""
Pulled from django contrib so that we can add user into the form
so then we can show relevant messages about the user.
"""
assert hash is not None and token is not None
user = None
try:
email = UnsubscribeCode.parse(token, hash)
user = UserProfile.objects.get(email=email)
except (ValueError, UserProfile.DoesNotExist):
pass
perm_settings = []
if user is not None:
unsubscribed = True
if not perm_setting:
# TODO: make this work. nothing currently links to it, though.
perm_settings = [l for l in notifications.NOTIFICATIONS
if not l.mandatory]
else:
perm_setting = notifications.NOTIFICATIONS_BY_SHORT[perm_setting]
UserNotification.update_or_create(update={'enabled': False},
user=user, notification_id=perm_setting.id)
perm_settings = [perm_setting]
else:
unsubscribed = False
email = ''
return jingo.render(request, 'users/unsubscribe.html',
{'unsubscribed': unsubscribed, 'email': email,
'perm_settings': perm_settings})
| |
# -*- encoding: utf-8 -*-
from __future__ import print_function
import abc
import os
import time
import traceback
import numpy as np
import autosklearn.pipeline.classification
import autosklearn.pipeline.regression
from sklearn.dummy import DummyClassifier, DummyRegressor
from autosklearn.constants import *
from autosklearn.evaluation.util import get_new_run_num
from autosklearn.util import Backend
from autosklearn.pipeline.implementations.util import convert_multioutput_multiclass_to_multilabel
from autosklearn.evaluation.util import calculate_score
__all__ = [
'AbstractEvaluator'
]
class MyDummyClassifier(DummyClassifier):
def __init__(self, configuration, random_states):
super(MyDummyClassifier, self).__init__(strategy="most_frequent")
def pre_transform(self, X, y, fit_params=None, init_params=None):
if fit_params is None:
fit_params = {}
return X, fit_params
def fit(self, X, y, sample_weight=None):
return super(MyDummyClassifier, self).fit(np.ones((X.shape[0], 1)), y,
sample_weight=sample_weight)
def fit_estimator(self, X, y, fit_params=None):
return self.fit(X, y)
def predict_proba(self, X, batch_size=1000):
new_X = np.ones((X.shape[0], 1))
probas = super(MyDummyClassifier, self).predict_proba(new_X)
probas = convert_multioutput_multiclass_to_multilabel(probas).astype(
np.float32)
return probas
def estimator_supports_iterative_fit(self):
return False
class MyDummyRegressor(DummyRegressor):
def __init__(self, configuration, random_states):
super(MyDummyRegressor, self).__init__(strategy='mean')
def pre_transform(self, X, y, fit_params=None, init_params=None):
if fit_params is None:
fit_params = {}
return X, fit_params
def fit(self, X, y, sample_weight=None):
return super(MyDummyRegressor, self).fit(np.ones((X.shape[0], 1)), y,
sample_weight=sample_weight)
def fit_estimator(self, X, y, fit_params=None):
return self.fit(X, y)
def predict(self, X, batch_size=1000):
new_X = np.ones((X.shape[0], 1))
return super(MyDummyRegressor, self).predict(new_X).astype(np.float32)
def estimator_supports_iterative_fit(self):
return False
class AbstractEvaluator(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __init__(self, Datamanager, output_dir, configuration=None,
with_predictions=False,
all_scoring_functions=False,
seed=1,
output_y_test=False,
num_run=None):
self.starttime = time.time()
self.output_dir = output_dir
self.configuration = configuration
self.D = Datamanager
self.X_valid = Datamanager.data.get('X_valid')
self.X_test = Datamanager.data.get('X_test')
self.metric = Datamanager.info['metric']
self.task_type = Datamanager.info['task']
self.seed = seed
self.output_y_test = output_y_test
self.with_predictions = with_predictions
self.all_scoring_functions = all_scoring_functions
if self.task_type in REGRESSION_TASKS:
if self.configuration is None:
self.model_class = MyDummyRegressor
else:
self.model_class = \
autosklearn.pipeline.regression.SimpleRegressionPipeline
self.predict_function = self._predict_regression
else:
if self.configuration is None:
self.model_class = MyDummyClassifier
else:
self.model_class = \
autosklearn.pipeline.classification.SimpleClassificationPipeline
self.predict_function = self._predict_proba
if num_run is None:
num_run = get_new_run_num()
self.num_run = num_run
self.backend = Backend(None, self.output_dir)
self.model = self.model_class(self.configuration, self.seed)
def fit_predict_and_loss(self):
"""Fit model(s) according to resampling strategy, predict for the
validation set and return the loss and predictions on the validation
set.
Provides a closed interface in which all steps of the target
algorithm are performed without any communication with other
processes. Useful for cross-validation because it allows to train a
model, predict for the validation set and then forget the model in
order to save main memory.
"""
raise NotImplementedError()
def iterative_fit(self):
"""Fit a model iteratively.
Fitting can be interrupted in order to use a partially trained model."""
raise NotImplementedError()
def predict_and_loss(self):
"""Use current model to predict on the validation set and calculate
loss.
Should be used when using iterative fitting."""
raise NotImplementedError()
def predict(self):
"""Use the current model to predict on the validation set.
Should only be used to create dummy predictions."""
raise NotImplementedError()
def _loss(self, y_true, y_hat):
if self.configuration is None:
if self.all_scoring_functions:
return {self.metric: 1.0}
else:
return 1.0
score = calculate_score(
y_true, y_hat, self.task_type,
self.metric, self.D.info['label_num'],
all_scoring_functions=self.all_scoring_functions)
if hasattr(score, '__len__'):
err = {key: 1 - score[key] for key in score}
else:
err = 1 - score
return err
def finish_up(self, loss=None, opt_pred=None, valid_pred=None,
test_pred=None):
"""This function does everything necessary after the fitting is done:
* predicting
* saving the files for the ensembles_statistics
* generate output for SMAC
We use it as the signal handler so we can recycle the code for the
normal usecase and when the runsolver kills us here :)"""
try:
self.duration = time.time() - self.starttime
if loss is None:
loss, opt_pred, valid_pred, test_pred = self.predict_and_loss()
self.file_output(loss, opt_pred, valid_pred, test_pred)
self.duration = time.time() - self.starttime
num_run = str(self.num_run).zfill(5)
if isinstance(loss, dict):
loss_ = loss
loss = loss_[self.D.info['metric']]
else:
loss_ = {}
additional_run_info = ';'.join(['%s: %s' %
(METRIC_TO_STRING[
metric] if metric in METRIC_TO_STRING else metric,
value)
for metric, value in loss_.items()])
additional_run_info += ';' + 'duration: ' + str(self.duration)
additional_run_info += ';' + 'num_run:' + num_run
if self.configuration is not None:
self._output_SMAC_string(self.duration, loss, self.seed,
additional_run_info)
except Exception as e:
self.duration = time.time() - self.starttime
print(traceback.format_exc())
self._output_SMAC_string(self.duration, 2.0, self.seed,
'No results were produced! Error is %s' % str(e))
def _output_SMAC_string(self, duration, loss, seed, additional_run_info):
print('Result for ParamILS: %s, %f, 1, %f, %d, %s' %
('SAT', abs(self.duration), loss, self.seed,
additional_run_info))
def file_output(self, loss, Y_optimization_pred, Y_valid_pred, Y_test_pred):
seed = os.environ.get('AUTOSKLEARN_SEED')
if self.Y_optimization.shape[0] != Y_optimization_pred.shape[0]:
return 2, "Targets %s and prediction %s don't have the same " \
"length. Probably training didn't finish" % (
self.Y_optimization.shape, Y_optimization_pred.shape)
num_run = str(self.num_run).zfill(5)
if os.path.exists(self.backend.get_model_dir()):
self.backend.save_model(self.model, self.num_run, seed)
if self.output_y_test:
try:
os.makedirs(self.output_dir)
except OSError:
pass
self.backend.save_targets_ensemble(self.Y_optimization)
self.backend.save_predictions_as_npy(Y_optimization_pred, 'ensemble',
seed, num_run)
if Y_valid_pred is not None:
self.backend.save_predictions_as_npy(Y_valid_pred, 'valid',
seed, num_run)
if Y_test_pred is not None:
self.backend.save_predictions_as_npy(Y_test_pred, 'test',
seed, num_run)
def _predict_proba(self, X, model, task_type, Y_train):
Y_pred = model.predict_proba(X, batch_size=1000)
Y_pred = self._ensure_prediction_array_sizes(Y_pred, Y_train)
return Y_pred
def _predict_regression(self, X, model, task_type, Y_train=None):
Y_pred = model.predict(X)
if len(Y_pred.shape) == 1:
Y_pred = Y_pred.reshape((-1, 1))
return Y_pred
def _ensure_prediction_array_sizes(self, prediction, Y_train):
num_classes = self.D.info['label_num']
if self.task_type == MULTICLASS_CLASSIFICATION and \
prediction.shape[1] < num_classes:
if Y_train is None:
raise ValueError('Y_train must not be None!')
classes = list(np.unique(Y_train))
mapping = dict()
for class_number in range(num_classes):
if class_number in classes:
index = classes.index(class_number)
mapping[index] = class_number
new_predictions = np.zeros((prediction.shape[0], num_classes),
dtype=np.float32)
for index in mapping:
class_index = mapping[index]
new_predictions[:, class_index] = prediction[:, index]
return new_predictions
return prediction
| |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import time
import tensorflow as tf
# from tensorflow.python import debug as tf_debug
sys.path.append(os.path.abspath('../../'))
from models.ctc.ctc import CTC
from models.test.data import generate_data, idx2alpha
from utils.io.labels.phone import Idx2phone
from utils.io.labels.sparsetensor import list2sparsetensor, sparsetensor2list
from utils.parameter import count_total_parameters
from utils.training.learning_rate_controller import Controller
from utils.measure_time_func import measure_time
class TestCTCTraining(tf.test.TestCase):
def test(self):
print("CTC Working check.")
# CNN-like-CTC
# self.check(encoder_type='cnn_zhang', label_type='phone')
# self.check(encoder_type='vgg_wang')
self.check(encoder_type='cldnn_wang', lstm_impl='LSTMBlockCell')
# BLSTM-CTC
self.check(encoder_type='blstm', lstm_impl='BasicLSTMCell')
self.check(encoder_type='blstm', lstm_impl='LSTMCell')
self.check(encoder_type='blstm', lstm_impl='LSTMBlockCell')
self.check(encoder_type='blstm', lstm_impl='LSTMBlockCell',
time_major=False)
# LSTM-CTC
self.check(encoder_type='lstm', lstm_impl='BasicLSTMCell')
self.check(encoder_type='lstm', lstm_impl='LSTMCell')
self.check(encoder_type='lstm', lstm_impl='LSTMBlockCell')
# GRU-CTC
self.check(encoder_type='bgru')
self.check(encoder_type='gru')
# VGG-BLSTM-CTC
self.check(encoder_type='vgg_blstm', lstm_impl='BasicLSTMCell')
self.check(encoder_type='vgg_blstm', lstm_impl='LSTMCell')
self.check(encoder_type='vgg_blstm', lstm_impl='LSTMBlockCell')
# VGG-LSTM-CTC
self.check(encoder_type='vgg_lstm', lstm_impl='BasicLSTMCell')
self.check(encoder_type='vgg_lstm', lstm_impl='LSTMCell')
self.check(encoder_type='vgg_lstm', lstm_impl='LSTMBlockCell')
@measure_time
def check(self, encoder_type, label_type='character',
lstm_impl=None, time_major=True, save_params=False):
print('==================================================')
print(' encoder_type: %s' % encoder_type)
print(' label_type: %s' % label_type)
print(' lstm_impl: %s' % lstm_impl)
print(' time_major: %s' % str(time_major))
print(' save_params: %s' % str(save_params))
print('==================================================')
tf.reset_default_graph()
with tf.Graph().as_default():
# Load batch data
batch_size = 2
splice = 11 if encoder_type in ['vgg_blstm', 'vgg_lstm', 'cnn_zhang',
'vgg_wang', 'resnet_wang', 'cldnn_wang'] else 1
num_stack = 2
inputs, labels, inputs_seq_len = generate_data(
label_type=label_type,
model='ctc',
batch_size=batch_size,
num_stack=num_stack,
splice=splice)
# NOTE: input_size must be even number when using CudnnLSTM
# Define model graph
num_classes = 27 if label_type == 'character' else 61
model = CTC(encoder_type=encoder_type,
input_size=inputs[0].shape[-1] // splice // num_stack,
splice=splice,
num_stack=num_stack,
num_units=256,
num_layers=2,
num_classes=num_classes,
lstm_impl=lstm_impl,
parameter_init=0.1,
clip_grad_norm=5.0,
clip_activation=50,
num_proj=256,
weight_decay=1e-10,
# bottleneck_dim=50,
bottleneck_dim=None,
time_major=time_major)
# Define placeholders
model.create_placeholders()
learning_rate_pl = tf.placeholder(tf.float32, name='learning_rate')
# Add to the graph each operation
loss_op, logits = model.compute_loss(
model.inputs_pl_list[0],
model.labels_pl_list[0],
model.inputs_seq_len_pl_list[0],
model.keep_prob_pl_list[0])
train_op = model.train(loss_op,
optimizer='nestrov',
learning_rate=learning_rate_pl)
# NOTE: Adam does not run on CudnnLSTM
decode_op = model.decoder(logits,
model.inputs_seq_len_pl_list[0],
beam_width=20)
ler_op = model.compute_ler(decode_op, model.labels_pl_list[0])
# Define learning rate controller
learning_rate = 1e-4
lr_controller = Controller(learning_rate_init=learning_rate,
decay_start_epoch=50,
decay_rate=0.9,
decay_patient_epoch=10,
lower_better=True)
if save_params:
# Create a saver for writing training checkpoints
saver = tf.train.Saver(max_to_keep=None)
# Add the variable initializer operation
init_op = tf.global_variables_initializer()
# Count total parameters
if lstm_impl != 'CudnnLSTM':
parameters_dict, total_parameters = count_total_parameters(
tf.trainable_variables())
for parameter_name in sorted(parameters_dict.keys()):
print("%s %d" %
(parameter_name, parameters_dict[parameter_name]))
print("Total %d variables, %s M parameters" %
(len(parameters_dict.keys()),
"{:,}".format(total_parameters / 1000000)))
# Make feed dict
feed_dict = {
model.inputs_pl_list[0]: inputs,
model.labels_pl_list[0]: list2sparsetensor(labels, padded_value=-1),
model.inputs_seq_len_pl_list[0]: inputs_seq_len,
model.keep_prob_pl_list[0]: 1.0,
learning_rate_pl: learning_rate
}
idx2phone = Idx2phone(map_file_path='./phone61.txt')
with tf.Session() as sess:
# Initialize parameters
sess.run(init_op)
# Wrapper for tfdbg
# sess = tf_debug.LocalCLIDebugWrapperSession(sess)
# Train model
max_steps = 1000
start_time_step = time.time()
for step in range(max_steps):
# for debug
# encoder_outputs = sess.run(
# model.encoder_outputs, feed_dict)
# print(encoder_outputs.shape)
# Compute loss
_, loss_train = sess.run(
[train_op, loss_op], feed_dict=feed_dict)
# Gradient check
# grads = sess.run(model.clipped_grads,
# feed_dict=feed_dict)
# for grad in grads:
# print(np.max(grad))
if (step + 1) % 10 == 0:
# Change to evaluation mode
feed_dict[model.keep_prob_pl_list[0]] = 1.0
# Compute accuracy
ler_train = sess.run(ler_op, feed_dict=feed_dict)
duration_step = time.time() - start_time_step
print('Step %d: loss = %.3f / ler = %.3f (%.3f sec) / lr = %.5f' %
(step + 1, loss_train, ler_train, duration_step, learning_rate))
start_time_step = time.time()
# Decode
labels_pred_st = sess.run(
decode_op, feed_dict=feed_dict)
# Visualize
try:
labels_pred = sparsetensor2list(
labels_pred_st, batch_size=batch_size)
if label_type == 'character':
print('Ref: %s' % idx2alpha(labels[0]))
print('Hyp: %s' % idx2alpha(labels_pred[0]))
else:
print('Ref: %s' % idx2phone(labels[0]))
print('Hyp: %s' % idx2phone(labels_pred[0]))
except IndexError:
if label_type == 'character':
print('Ref: %s' % idx2alpha(labels[0]))
print('Hyp: %s' % '')
else:
print('Ref: %s' % idx2phone(labels[0]))
print('Hyp: %s' % '')
# NOTE: This is for no prediction
if ler_train < 0.1:
print('Modle is Converged.')
if save_params:
# Save model (check point)
checkpoint_file = './model.ckpt'
save_path = saver.save(
sess, checkpoint_file, global_step=2)
print("Model saved in file: %s" % save_path)
break
# Update learning rate
learning_rate = lr_controller.decay_lr(
learning_rate=learning_rate,
epoch=step,
value=ler_train)
feed_dict[learning_rate_pl] = learning_rate
if __name__ == "__main__":
tf.test.main()
| |
# Copyright 2016-2019 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
from botocore.exceptions import ClientError
from concurrent.futures import as_completed
from datetime import datetime
from c7n.actions import BaseAction, ModifyVpcSecurityGroupsAction
from c7n.filters.kms import KmsRelatedFilter
from c7n import query
from c7n.manager import resources
from c7n.tags import (
TagDelayedAction, RemoveTag, TagActionFilter, Tag, universal_augment)
from c7n.utils import (
local_session, chunks, type_schema, snapshot_identifier)
from c7n.filters.vpc import SecurityGroupFilter, SubnetFilter
@resources.register('dynamodb-table')
class Table(query.QueryResourceManager):
class resource_type(query.TypeInfo):
service = 'dynamodb'
arn_type = 'table'
enum_spec = ('list_tables', 'TableNames', None)
detail_spec = ("describe_table", "TableName", None, "Table")
id = 'TableName'
name = 'TableName'
date = 'CreationDateTime'
dimension = 'TableName'
config_type = 'AWS::DynamoDB::Table'
universal_taggable = object()
permissions = ('dynamodb:ListTagsOfResource',)
def get_source(self, source_type):
if source_type == 'describe':
return DescribeTable(self)
elif source_type == 'config':
return ConfigTable(self)
raise ValueError('invalid source %s' % source_type)
class ConfigTable(query.ConfigSource):
def load_resource(self, item):
resource = super(ConfigTable, self).load_resource(item)
resource['CreationDateTime'] = datetime.fromtimestamp(resource['CreationDateTime'] / 1000.0)
if 'LastUpdateToPayPerRequestDateTime' in resource['BillingModeSummary']:
resource['BillingModeSummary'][
'LastUpdateToPayPerRequestDateTime'] = datetime.fromtimestamp(
resource['BillingModeSummary']['LastUpdateToPayPerRequestDateTime'] / 1000.0)
sse_info = resource.pop('Ssedescription', None)
if sse_info is None:
return resource
resource['SSEDescription'] = sse_info
for k, r in (('KmsmasterKeyArn', 'KMSMasterKeyArn'),
('Ssetype', 'SSEType')):
if k in sse_info:
sse_info[r] = sse_info.pop(k)
return resource
class DescribeTable(query.DescribeSource):
def augment(self, resources):
return universal_augment(
self.manager,
super(DescribeTable, self).augment(resources))
class StatusFilter(object):
"""Filter tables by status"""
valid_states = ()
def filter_table_state(self, tables, states=None):
states = states or self.valid_states
orig_count = len(tables)
result = [t for t in tables if t['TableStatus'] in states]
self.log.info("%s %d of %d tables" % (
self.__class__.__name__, len(result), orig_count))
return result
def filter_backup_state(self, tables, states=None):
states = states or self.valid_states
orig_count = len(tables)
result = [t for t in tables if t['BackupStatus'] in states]
self.log.info("%s %d of %d tables" % (
self.__class__.__name__, len(result), orig_count))
return result
@Table.filter_registry.register('kms-key')
class KmsFilter(KmsRelatedFilter):
"""
Filter a resource by its associcated kms key and optionally the aliasname
of the kms key by using 'c7n:AliasName'
:example:
.. code-block:: yaml
policies:
- name: dynamodb-kms-key-filters
resource: dynamodb-table
filters:
- type: kms-key
key: c7n:AliasName
value: "^(alias/aws/dynamodb)"
op: regex
"""
RelatedIdsExpression = 'SSEDescription.KMSMasterKeyArn'
@Table.action_registry.register('delete')
class DeleteTable(BaseAction, StatusFilter):
"""Action to delete dynamodb tables
:example:
.. code-block:: yaml
policies:
- name: delete-empty-tables
resource: dynamodb-table
filters:
- TableSizeBytes: 0
actions:
- delete
"""
valid_status = ('ACTIVE',)
schema = type_schema('delete')
permissions = ("dynamodb:DeleteTable",)
def delete_table(self, client, table_set):
for t in table_set:
client.delete_table(TableName=t['TableName'])
def process(self, resources):
resources = self.filter_table_state(
resources, self.valid_status)
if not len(resources):
return
futures = []
client = local_session(self.manager.session_factory).client('dynamodb')
with self.executor_factory(max_workers=2) as w:
for table_set in chunks(resources, 20):
futures.append(w.submit(self.delete_table, client, table_set))
for f in as_completed(futures):
if f.exception():
self.log.error(
"Exception deleting dynamodb table set \n %s"
% (f.exception()))
@Table.action_registry.register('set-stream')
class SetStream(BaseAction, StatusFilter):
"""Action to enable/disable streams on table.
:example:
.. code-block:: yaml
policies:
- name: stream-update
resource: dynamodb-table
filters:
- TableName: 'test'
- TableStatus: 'ACTIVE'
actions:
- type: set-stream
state: True
stream_view_type: 'NEW_IMAGE'
"""
valid_status = ('ACTIVE',)
schema = type_schema('set-stream',
state={'type': 'boolean'},
stream_view_type={'type': 'string'})
permissions = ("dynamodb:UpdateTable",)
def process(self, tables):
tables = self.filter_table_state(
tables, self.valid_status)
if not len(tables):
self.log.warning("Table not in ACTIVE state.")
return
state = self.data.get('state')
type = self.data.get('stream_view_type')
stream_spec = {"StreamEnabled": state}
if self.data.get('stream_view_type') is not None:
stream_spec.update({"StreamViewType": type})
c = local_session(self.manager.session_factory).client('dynamodb')
with self.executor_factory(max_workers=2) as w:
futures = {w.submit(c.update_table,
TableName=t['TableName'],
StreamSpecification=stream_spec): t for t in tables}
for f in as_completed(futures):
t = futures[f]
if f.exception():
self.log.error(
"Exception updating dynamodb table set \n %s"
% (f.exception()))
continue
if self.data.get('stream_view_type') is not None:
stream_state = \
f.result()['TableDescription']['StreamSpecification']['StreamEnabled']
stream_type = \
f.result()['TableDescription']['StreamSpecification']['StreamViewType']
t['c7n:StreamState'] = stream_state
t['c7n:StreamType'] = stream_type
@Table.action_registry.register('backup')
class CreateBackup(BaseAction, StatusFilter):
"""Creates a manual backup of a DynamoDB table. Use of the optional
prefix flag will attach a user specified prefix. Otherwise,
the backup prefix will default to 'Backup'.
:example:
.. code-block:: yaml
policies:
- name: dynamodb-create-backup
resource: dynamodb-table
actions:
- type: backup
prefix: custom
"""
valid_status = ('ACTIVE',)
schema = type_schema('backup',
prefix={'type': 'string'})
permissions = ('dynamodb:CreateBackup',)
def process(self, resources):
resources = self.filter_table_state(
resources, self.valid_status)
if not len(resources):
return
c = local_session(self.manager.session_factory).client('dynamodb')
futures = {}
prefix = self.data.get('prefix', 'Backup')
with self.executor_factory(max_workers=2) as w:
for t in resources:
futures[w.submit(
c.create_backup,
BackupName=snapshot_identifier(
prefix, t['TableName']),
TableName=t['TableName'])] = t
for f in as_completed(futures):
t = futures[f]
if f.exception():
self.manager.log.warning(
"Could not complete DynamoDB backup table:%s", t)
arn = f.result()['BackupDetails']['BackupArn']
t['c7n:BackupArn'] = arn
@resources.register('dynamodb-backup')
class Backup(query.QueryResourceManager):
class resource_type(query.TypeInfo):
service = 'dynamodb'
arn = 'BackupArn'
enum_spec = ('list_backups', 'BackupSummaries', None)
id = 'BackupArn'
name = 'BackupName'
date = 'BackupCreationDateTime'
@Backup.action_registry.register('delete')
class DeleteBackup(BaseAction, StatusFilter):
"""Deletes backups of a DynamoDB table
:example:
.. code-block:: yaml
policies:
- name: dynamodb-delete-backup
resource: dynamodb-backup
filters:
- type: value
key: BackupCreationDateTime
op: greater-than
value_type: age
value: 28
actions:
- type: delete
"""
valid_status = ('AVAILABLE',)
schema = type_schema('delete')
permissions = ('dynamodb:DeleteBackup',)
def process(self, backups):
backups = self.filter_backup_state(
backups, self.valid_status)
if not len(backups):
return
c = local_session(self.manager.session_factory).client('dynamodb')
for table_set in chunks(backups, 20):
self.process_dynamodb_backups(table_set, c)
def process_dynamodb_backups(self, table_set, c):
for t in table_set:
try:
c.delete_backup(
BackupArn=t['BackupArn'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
self.log.warning("Could not complete DynamoDB backup deletion for table:%s", t)
continue
raise
@resources.register('dynamodb-stream')
class Stream(query.QueryResourceManager):
# Note stream management takes place on the table resource
class resource_type(query.TypeInfo):
service = 'dynamodbstreams'
# Note max rate of 5 calls per second
enum_spec = ('list_streams', 'Streams', None)
# Note max rate of 10 calls per second.
detail_spec = (
"describe_stream", "StreamArn", "StreamArn", "StreamDescription")
arn = id = 'StreamArn'
arn_type = 'stream'
name = 'TableName'
date = 'CreationDateTime'
dimension = 'TableName'
@resources.register('dax')
class DynamoDbAccelerator(query.QueryResourceManager):
class resource_type(query.TypeInfo):
service = 'dax'
arn_type = 'cluster'
enum_spec = ('describe_clusters', 'Clusters', None)
id = 'ClusterArn'
name = 'ClusterName'
config_type = 'AWS::DAX::Cluster'
permissions = ('dax:ListTags',)
def get_source(self, source_type):
if source_type == 'describe':
return DescribeDaxCluster(self)
elif source_type == 'config':
return query.ConfigSource(self)
raise ValueError('invalid source %s' % source_type)
def get_resources(self, ids, cache=True, augment=True):
"""Override in order to disable the augment for serverless policies.
list_tags on dax resources always fail until the cluster is finished creating.
"""
return super(DynamoDbAccelerator, self).get_resources(ids, cache, augment=False)
class DescribeDaxCluster(query.DescribeSource):
def get_resources(self, ids, cache=True):
"""Retrieve dax resources for serverless policies or related resources
"""
client = local_session(self.manager.session_factory).client('dax')
return client.describe_clusters(ClusterNames=ids).get('Clusters')
def augment(self, clusters):
resources = super(DescribeDaxCluster, self).augment(clusters)
return list(filter(None, _dax_cluster_tags(
resources,
self.manager.session_factory,
self.manager.retry,
self.manager.log)))
def _dax_cluster_tags(tables, session_factory, retry, log):
client = local_session(session_factory).client('dax')
def process_tags(r):
try:
r['Tags'] = retry(
client.list_tags, ResourceName=r['ClusterArn'])['Tags']
return r
except (client.exceptions.ClusterNotFoundFault,
client.exceptions.InvalidClusterStateFault):
return None
return filter(None, list(map(process_tags, tables)))
DynamoDbAccelerator.filter_registry.register('marked-for-op', TagActionFilter)
@DynamoDbAccelerator.filter_registry.register('security-group')
class DaxSecurityGroupFilter(SecurityGroupFilter):
RelatedIdsExpression = "SecurityGroups[].SecurityGroupIdentifier"
@DynamoDbAccelerator.action_registry.register('tag')
class DaxTagging(Tag):
"""Action to create tag(s) on a resource
:example:
.. code-block:: yaml
policies:
- name: dax-cluster-tag
resource: dax
filters:
- "tag:target-tag": absent
actions:
- type: tag
key: target-tag
value: target-tag-value
"""
permissions = ('dax:TagResource',)
def process_resource_set(self, client, resources, tags):
mid = self.manager.resource_type.id
for r in resources:
try:
client.tag_resource(ResourceName=r[mid], Tags=tags)
except (client.exceptions.ClusterNotFoundFault,
client.exceptions.InvalidARNFault,
client.exceptions.InvalidClusterStateFault) as e:
self.log.warning('Exception tagging %s: \n%s', r['ClusterName'], e)
@DynamoDbAccelerator.action_registry.register('remove-tag')
class DaxRemoveTagging(RemoveTag):
"""Action to remove tag(s) on a resource
:example:
.. code-block:: yaml
policies:
- name: dax-remove-tag
resource: dax
filters:
- "tag:OutdatedTag": present
actions:
- type: remove-tag
tags: ["OutdatedTag"]
"""
permissions = ('dax:UntagResource',)
def process_resource_set(self, client, resources, tag_keys):
for r in resources:
try:
client.untag_resource(
ResourceName=r['ClusterArn'], TagKeys=tag_keys)
except (client.exceptions.ClusterNotFoundFault,
client.exceptions.TagNotFoundFault,
client.exceptions.InvalidClusterStateFault) as e:
self.log.warning('Exception removing tags on %s: \n%s', r['ClusterName'], e)
@DynamoDbAccelerator.action_registry.register('mark-for-op')
class DaxMarkForOp(TagDelayedAction):
"""Action to specify an action to occur at a later date
:example:
.. code-block:: yaml
policies:
- name: dax-mark-tag-compliance
resource: dax
filters:
- "tag:custodian_cleanup": absent
- "tag:OwnerName": absent
actions:
- type: mark-for-op
tag: custodian_cleanup
msg: "Missing tag 'OwnerName': {op}@{action_date}"
op: delete
days: 7
"""
@DynamoDbAccelerator.action_registry.register('delete')
class DaxDeleteCluster(BaseAction):
"""Action to delete a DAX cluster
:example:
.. code-block:: yaml
policies:
- name: dax-delete-cluster
resource: dax
filters:
- "tag:DeleteMe": present
actions:
- type: delete
"""
permissions = ('dax:DeleteCluster',)
schema = type_schema('delete')
def process(self, resources):
client = local_session(self.manager.session_factory).client('dax')
for r in resources:
try:
client.delete_cluster(ClusterName=r['ClusterName'])
except (client.exceptions.ClusterNotFoundFault,
client.exceptions.InvalidARNFault,
client.exceptions.InvalidClusterStateFault) as e:
self.log.warning('Exception marking %s: \n%s', r['ClusterName'], e)
@DynamoDbAccelerator.action_registry.register('update-cluster')
class DaxUpdateCluster(BaseAction):
"""Updates a DAX cluster configuration
:example:
.. code-block:: yaml
policies:
- name: dax-update-cluster
resource: dax
filters:
- ParameterGroup.ParameterGroupName: 'default.dax1.0'
actions:
- type: update-cluster
ParameterGroupName: 'testparamgroup'
"""
schema = {
'type': 'object',
'additionalProperties': False,
'properties': {
'type': {'enum': ['update-cluster']},
'Description': {'type': 'string'},
'PreferredMaintenanceWindow': {'type': 'string'},
'NotificationTopicArn': {'type': 'string'},
'NotificationTopicStatus': {'type': 'string'},
'ParameterGroupName': {'type': 'string'}
}
}
permissions = ('dax:UpdateCluster',)
def process(self, resources):
client = local_session(self.manager.session_factory).client('dax')
params = dict(self.data)
params.pop('type')
for r in resources:
params['ClusterName'] = r['ClusterName']
try:
client.update_cluster(**params)
except (client.exceptions.ClusterNotFoundFault,
client.exceptions.InvalidClusterStateFault) as e:
self.log.warning(
'Exception updating dax cluster %s: \n%s',
r['ClusterName'], e)
@DynamoDbAccelerator.action_registry.register('modify-security-groups')
class DaxModifySecurityGroup(ModifyVpcSecurityGroupsAction):
permissions = ('dax:UpdateCluster',)
def process(self, resources):
client = local_session(self.manager.session_factory).client('dax')
groups = super(DaxModifySecurityGroup, self).get_groups(resources)
for idx, r in enumerate(resources):
client.update_cluster(
ClusterName=r['ClusterName'], SecurityGroupIds=groups[idx])
@DynamoDbAccelerator.filter_registry.register('subnet')
class DaxSubnetFilter(SubnetFilter):
"""Filters DAX clusters based on their associated subnet group
:example:
.. code-block:: yaml
policies:
- name: dax-no-auto-public
resource: dax
filters:
- type: subnet
key: MapPublicIpOnLaunch
value: False
"""
RelatedIdsExpression = ""
def get_related_ids(self, resources):
group_ids = set()
for r in resources:
group_ids.update(
[s['SubnetIdentifier'] for s in
self.groups[r['SubnetGroup']]['Subnets']])
return group_ids
def process(self, resources, event=None):
client = local_session(self.manager.session_factory).client('dax')
subnet_groups = client.describe_subnet_groups()['SubnetGroups']
self.groups = {s['SubnetGroupName']: s for s in subnet_groups}
return super(DaxSubnetFilter, self).process(resources)
| |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2013 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import datetime
from .model_base import AccessControlledModel
from girder import events
from girder.constants import AccessType, CoreEventHandler
from girder.exceptions import ValidationException
class Group(AccessControlledModel):
"""
Groups are simply groups of users. The primary use of grouping users is
to simplify access control for resources in the system, but they can
be used for other purposes that require groupings of users as well.
Group membership is stored in the database on the user document only;
there is no "users" field in this model. This is to optimize for the most
common use case for querying membership, which involves checking access
control policies, which is always done relative to a specific user. The
task of querying all members within a group is much less common and
typically only performed on a single group at a time, so doing a find on the
indexed group list in the user collection is sufficiently fast.
Users with READ access on the group can see the group and its members.
Users with WRITE access on the group can add and remove members and
change the name or description.
Users with ADMIN access can promote group members to grant them WRITE or
ADMIN access, and can also delete the entire group.
This model uses a custom implementation of the access control methods,
because it uses only a subset of its capabilities and provides a more
optimized implementation for that subset. Specifically: read access is
implied by membership in the group or having an invitation to join the
group, so we don't store read access in the access document as normal.
Another constraint is that write and admin access on the group can only be
granted to members of the group. Also, group permissions are not allowed
on groups for the sake of simplicity.
"""
def initialize(self):
self.name = 'group'
self.ensureIndices(['lowerName'])
self.ensureTextIndex({
'name': 10,
'description': 1
})
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'name', 'public', 'description', 'created', 'updated',
'addAllowed', '_addToGroupPolicy'))
events.bind('model.group.save.created',
CoreEventHandler.GROUP_CREATOR_ACCESS,
self._grantCreatorAccess)
def validate(self, doc):
doc['name'] = doc['name'].strip()
doc['lowerName'] = doc['name'].lower()
doc['description'] = doc['description'].strip()
if not doc['name']:
raise ValidationException('Group name must not be empty.', 'name')
q = {
'lowerName': doc['lowerName'],
}
if '_id' in doc:
q['_id'] = {'$ne': doc['_id']}
duplicate = self.findOne(q, fields=['_id'])
if duplicate is not None:
raise ValidationException('A group with that name already exists.',
field='name')
return doc
def listMembers(self, group, offset=0, limit=0, sort=None):
"""
List members of the group.
"""
from .user import User
return User().find({
'groups': group['_id']
}, limit=limit, offset=offset, sort=sort)
def remove(self, group, **kwargs):
"""
Delete a group, and all references to it in the database.
:param group: The group document to delete.
:type group: dict
"""
# Remove references to this group from user group membership lists
from .user import User
User().update({
'groups': group['_id']
}, {
'$pull': {'groups': group['_id']}
})
# Finally, delete the document itself
AccessControlledModel.remove(self, group)
def getMembers(self, group, offset=0, limit=0, sort=None):
"""
Return the list of all users who belong to this group.
:param group: The group to list members on.
:param offset: Offset into the result set of users.
:param limit: Result set size limit.
:param sort: Sort parameter for the find query.
:returns: List of user documents.
"""
from .user import User
return User().find(
{'groups': group['_id']},
offset=offset, limit=limit, sort=sort)
def addUser(self, group, user, level=AccessType.READ):
"""
Add the user to the group. Records membership in the group in the
user document, and also grants the specified access level on the
group itself to the user. Any group member has at least read access on
the group. If the user already belongs to the group, this method can
be used to change their access level within it.
"""
from .user import User
if 'groups' not in user:
user['groups'] = []
if not group['_id'] in user['groups']:
user['groups'].append(group['_id'])
# saved again in setUserAccess...
user = User().save(user, validate=False)
# Delete outstanding request if one exists
self._deleteRequest(group, user)
self.setUserAccess(group, user, level, save=True)
return group
def _deleteRequest(self, group, user):
"""
Helper method to delete a request for the given user.
"""
if user['_id'] in group.get('requests', []):
group['requests'].remove(user['_id'])
self.save(group, validate=False)
def joinGroup(self, group, user):
"""
This method either accepts an invitation to join a group, or if the
given user has not been invited to the group, this will create an
invitation request that moderators and admins may grant or deny later.
"""
from .user import User
if 'groupInvites' not in user:
user['groupInvites'] = []
for invite in user['groupInvites']:
if invite['groupId'] == group['_id']:
self.addUser(group, user, level=invite['level'])
user['groupInvites'].remove(invite)
User().save(user, validate=False)
break
else:
if 'requests' not in group:
group['requests'] = []
if not user['_id'] in group['requests']:
group['requests'].append(user['_id'])
group = self.save(group, validate=False)
return group
def inviteUser(self, group, user, level=AccessType.READ):
"""
Invite a user to join the group. Inviting them automatically
grants the user read access to the group so that they can see it.
Once they accept the invitation, they will be given the specified level
of access.
If the user has requested an invitation to this group, calling this
will accept their request and add them to the group at the access
level specified.
"""
from .user import User
if group['_id'] in user.get('groups', []):
raise ValidationException('User is already in this group.')
# If there is an outstanding request to join from this user, we
# just add them to the group instead of invite them.
if user['_id'] in group.get('requests', []):
return self.addUser(group, user, level)
if 'groupInvites' not in user:
user['groupInvites'] = []
for invite in user['groupInvites']:
if invite['groupId'] == group['_id']:
invite['level'] = level
break
else:
user['groupInvites'].append({
'groupId': group['_id'],
'level': level
})
return User().save(user, validate=False)
def getInvites(self, group, limit=0, offset=0, sort=None):
"""
Return a page of outstanding invitations to a group. This is simply
a list of users invited to the group currently.
:param group: The group to find invitations for.
:param limit: Result set size limit.
:param offset: Offset into the results.
:param sort: The sort field.
"""
from .user import User
return User().find(
{'groupInvites.groupId': group['_id']},
limit=limit, offset=offset, sort=sort)
def removeUser(self, group, user):
"""
Remove the user from the group. If the user is not in the group but
has an outstanding invitation to the group, the invitation will be
revoked. If the user has requested an invitation, calling this will
deny that request, thereby deleting it.
"""
from .user import User
# Remove group membership for this user.
if 'groups' in user and group['_id'] in user['groups']:
user['groups'].remove(group['_id'])
# Remove outstanding requests from this user
self._deleteRequest(group, user)
# Remove any outstanding invitations for this group
user['groupInvites'] = list(filter(
lambda inv: not inv['groupId'] == group['_id'],
user.get('groupInvites', [])))
user = User().save(user, validate=False)
# Remove all group access for this user on this group.
self.setUserAccess(group, user, level=None, save=True)
return group
def createGroup(self, name, creator, description='', public=True):
"""
Create a new group. The creator will be given admin access to it.
:param name: The name of the folder.
:type name: str
:param description: Description for the folder.
:type description: str
:param public: Whether the group is publicly visible.
:type public: bool
:param creator: User document representing the creator of the group.
:type creator: dict
:returns: The group document that was created.
"""
assert isinstance(public, bool)
now = datetime.datetime.utcnow()
group = {
'name': name,
'description': description,
'creatorId': creator['_id'],
'created': now,
'updated': now,
'requests': []
}
self.setPublic(group, public, save=False)
return self.save(group)
def _grantCreatorAccess(self, event):
"""
This callback makes the group creator an administrator member of the
group.
This generally should not be called or overridden directly, but it may
be unregistered from the `model.group.save.created` event.
"""
from .user import User
group = event.info
creator = User().load(group['creatorId'], force=True, exc=True)
self.addUser(group, creator, level=AccessType.ADMIN)
def updateGroup(self, group):
"""
Updates a group.
:param group: The group document to update
:type group: dict
:returns: The group document that was edited.
"""
group['updated'] = datetime.datetime.utcnow()
# Validate and save the group
return self.save(group)
def getFullRequestList(self, group):
"""
Return the set of all outstanding requests, filled in with the login
and full names of the corresponding users.
:param group: The group to get requests for.
:type group: dict
"""
from .user import User
userModel = User()
for userId in group.get('requests', []):
user = userModel.load(userId, force=True, fields=['firstName', 'lastName', 'login'])
yield {
'id': userId,
'login': user['login'],
'name': '%s %s' % (user['firstName'], user['lastName'])
}
def hasAccess(self, doc, user=None, level=AccessType.READ):
"""
This overrides the default AccessControlledModel behavior for checking
access to perform an optimized subset of the access control behavior.
:param doc: The group to check permission on.
:type doc: dict
:param user: The user to check against.
:type user: dict
:param level: The access level.
:type level: AccessType
:returns: Whether the access is granted.
"""
if user is None:
# Short-circuit the case of anonymous users
return level == AccessType.READ and doc.get('public', False) is True
elif user['admin']:
# Short-circuit the case of admins
return True
elif level == AccessType.READ:
# For read access, just check user document for membership or public
return doc.get('public', False) is True or\
doc['_id'] in user.get('groups', []) or\
doc['_id'] in [i['groupId'] for i in
user.get('groupInvites', [])]
else:
# Check the actual permissions document for >=WRITE access
return self._hasUserAccess(doc.get('access', {}).get('users', []),
user['_id'], level)
def getAccessLevel(self, doc, user):
"""
Return the maximum access level for a given user on the group.
:param doc: The group to check access on.
:param user: The user to get the access level for.
:returns: The max AccessType available for the user on the object.
"""
if user is None:
if doc.get('public', False):
return AccessType.READ
else:
return AccessType.NONE
elif user['admin']:
return AccessType.ADMIN
else:
access = doc.get('access', {})
level = AccessType.NONE
if doc['_id'] in user.get('groups', []):
level = AccessType.READ
elif doc['_id'] in [i['groupId'] for i in
user.get('groupInvites', [])]:
return AccessType.READ
for userAccess in access.get('users', []):
if userAccess['id'] == user['_id']:
level = max(level, userAccess['level'])
if level == AccessType.ADMIN:
return level
return level
def setGroupAccess(self, doc, group, level, save=False):
raise NotImplementedError('Not implemented.')
def setUserAccess(self, doc, user, level, save=False):
"""
This override is used because we only need to augment the access
field in the case of WRITE access and above since READ access is
implied by membership or invitation.
"""
# save parameter not used?
if level is not None and level > AccessType.READ:
doc = AccessControlledModel.setUserAccess(
self, doc, user, level, save=True)
else:
doc = AccessControlledModel.setUserAccess(
self, doc, user, level=None, save=True)
return doc
| |
"""Plugin for plex media server (www.plexapp.com)."""
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import re
import logging
import os
from datetime import datetime
from os.path import basename
from socket import gethostbyname
from xml.dom.minidom import parseString
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.utils import requests
log = logging.getLogger('plex')
class InputPlex(object):
"""
Uses a plex media server (www.plexapp.com) tv section as an input.
'section' Required parameter, numerical (/library/sections/<num>) or section name.
'selection' Can be set to different keys:
- all : Default
- unwatched :
- recentlyAdded :
- recentlyViewed :
- recentlyViewedShows : Series only.
'all' and 'recentlyViewedShows' will only produce a list of show names while the other three will produce
filename and download url.
'token' Plex access token, used to connect to PMS
'username' Myplex (http://my.plexapp.com) username, used to connect to shared PMS'.
'password' Myplex (http://my.plexapp.com) password, used to connect to shared PMS'.
'server' Host/IP of PMS to connect to.
'lowercase_title' Convert filename (title) to lower case.
'strip_non_alpha' Sanitize filename (title), stripping all non-alphanumeric letters.
Better to turn off in case of non-english titles.
'strip_year' Remove year from title, ex: Show Name (2012) 01x01 => Show Name 01x01.
Movies will have year added to their filename unless this is set.
'strip_parens' Remove information in parens from title, ex: Show Name (UK)(2012) 01x01 => Show Name 01x01.
'original_filename' Use filename stored in PMS instead of transformed name. lowercase_title and strip_year
will be ignored.
'unwatched_only' Request only unwatched media from PMS.
'fetch' What to download, can be set to the following values:
- file The file itself, default.
- art Series or movie art as configured in PMS
- cover Series cover for series, movie cover for movies.
- thumb Episode thumbnail, series only.
- season_cover Season cover, series only. If used in movies, movie cover will be set.
Default parameters:
server : localhost
port : 32400
selection : all
lowercase_title : no
strip_non_alpha : yes
strip_year : yes
strip_parens : no
original_filename: no
unwatched_only : no
fetch : file
Example:
plex:
server: 192.168.1.23
section: 3
selection: recentlyAdded
fetch: series_art
"""
schema = {
'type': 'object',
'properties': {
'server': {'type': 'string', 'default': '127.0.0.1'},
'port': {'type': 'integer', 'default': 32400},
'username': {'type': 'string'},
'password': {'type': 'string'},
'token': {'type': 'string'},
'section': {'type': ['string', 'integer']},
'selection': {'type': 'string', 'default': 'all'},
'lowercase_title': {'type': 'boolean', 'default': False},
'strip_non_alpha': {'type': 'boolean', 'default': True},
'strip_year': {'type': 'boolean', 'default': True},
'strip_parens': {'type': 'boolean', 'default': False},
'original_filename': {'type': 'boolean', 'default': False},
'unwatched_only': {'type': 'boolean', 'default': False},
'fetch': {'type': 'string', 'default': 'file', 'enum': ['file', 'art', 'cover', 'thumb', 'season_cover']}
},
'required': ['section'],
'not': {
'anyOf': [
{'required': ['token', 'username']},
{'required': ['token', 'password']},
]},
'error_not': 'Cannot specify `username` and `password` with `token`',
'dependencies': {
'username': ['password'],
'password': ['username']
},
'additionalProperties': False
}
def prepare_config(self, config):
config['plexserver'] = config['server']
config = self.plex_format_server(config)
return config
def plex_get_globalaccesstoken(self, config):
header = {'X-Plex-Client-Identifier': 'flexget'}
try:
r = requests.post('https://my.plexapp.com/users/sign_in.xml',
auth=(config['username'], config['password']), headers=header)
except requests.RequestException as error:
raise plugin.PluginError('Could not log in to myplex! Error: %s' % error)
if 'Invalid email' in r.text:
raise plugin.PluginError('Myplex: invalid username and/or password!')
dom = parseString(r.text)
globalaccesstoken = dom.getElementsByTagName('authentication-token')[0].firstChild.nodeValue
if not globalaccesstoken:
raise plugin.PluginError('Myplex: could not find a server!')
else:
log.debug('Myplex: Got global accesstoken: %s', globalaccesstoken)
return globalaccesstoken
def plex_get_accesstoken(self, config, globalaccesstoken=""):
accesstoken = None
if not globalaccesstoken:
globalaccesstoken = self.plex_get_globalaccesstoken(config)
if config['server'] in ('localhost', '127.0.0.1'):
log.debug('Server using localhost. Global Token will be used')
return globalaccesstoken
try:
r = requests.get("https://my.plexapp.com/pms/servers?X-Plex-Token=%s" % globalaccesstoken)
except requests.RequestException as e:
raise plugin.PluginError("Could not get servers from my.plexapp.com using "
"authentication-token: %s. (%s)" % (globalaccesstoken, e))
dom = parseString(r.text)
for node in dom.getElementsByTagName('Server'):
if config['server'] in (node.getAttribute('address'), node.getAttribute('localAddresses')):
accesstoken = node.getAttribute('accessToken')
log.debug("Got plextoken: %s", accesstoken)
if not accesstoken:
raise plugin.PluginError('Could not retrieve accesstoken for %s.' % config['server'])
else:
return accesstoken
def plex_format_server(self, config):
if gethostbyname(config['server']) != config['server']:
config['server'] = gethostbyname(config['server'])
return config
def plex_section_is_int(self, section):
return isinstance(section, int)
def on_task_input(self, task, config):
config = self.prepare_config(config)
urlconfig = {}
urlappend = "?"
entries = []
if config['unwatched_only'] and config['section'] != 'recentlyViewedShows' and config['section'] != 'all':
urlconfig['unwatched'] = '1'
if config.get('token'):
accesstoken = config['token']
log.debug("Using accesstoken: %s", accesstoken)
urlconfig['X-Plex-Token'] = accesstoken
elif config.get('username'):
accesstoken = self.plex_get_accesstoken(config)
log.debug("Got accesstoken: %s", accesstoken)
urlconfig['X-Plex-Token'] = accesstoken
for key in urlconfig:
urlappend += '%s=%s&' % (key, urlconfig[key])
if not self.plex_section_is_int(config['section']):
try:
path = "/library/sections/"
r = requests.get("http://%s:%d%s%s" % (config['plexserver'], config['port'], path, urlappend))
except requests.RequestException as e:
raise plugin.PluginError('Error retrieving source: %s' % e)
dom = parseString(r.text.encode("utf-8"))
for node in dom.getElementsByTagName('Directory'):
if node.getAttribute('title') == config['section']:
config['section'] = int(node.getAttribute('key'))
if not self.plex_section_is_int(config['section']):
raise plugin.PluginError('Could not find section \'%s\'' % config['section'])
log.debug("Fetching http://%s:%d/library/sections/%s/%s%s",
config['server'], config['port'], config['section'], config['selection'], urlappend)
try:
path = "/library/sections/%s/%s" % (config['section'], config['selection'])
r = requests.get("http://%s:%d%s%s" % (config['plexserver'], config['port'], path, urlappend))
except requests.RequestException as e:
raise plugin.PluginError('There is no section with number %d. (%s)' % (config['section'], e))
dom = parseString(r.text.encode("utf-8"))
plexsectionname = dom.getElementsByTagName('MediaContainer')[0].getAttribute('title1')
viewgroup = dom.getElementsByTagName('MediaContainer')[0].getAttribute('viewGroup')
log.debug("Plex section \"%s\" is a \"%s\" section", plexsectionname, viewgroup)
if viewgroup != "movie" and viewgroup != "show" and viewgroup != "episode":
raise plugin.PluginError("Section is neither a movie nor tv show section!")
domroot = "Directory"
titletag = "title"
if viewgroup == "episode":
domroot = "Video"
titletag = "grandparentTitle"
thumbtag = "thumb"
arttag = "art"
seasoncovertag = "parentThumb"
covertag = "grandparentThumb"
elif viewgroup == "movie":
domroot = "Video"
titletag = "title"
arttag = "art"
seasoncovertag = "thumb"
covertag = "thumb"
if config['fetch'] == "thumb":
raise plugin.PluginError("Movie sections does not have any thumbnails to download!")
for node in dom.getElementsByTagName(domroot):
e = Entry()
e['plex_server'] = config['plexserver']
e['plex_port'] = config['port']
e['plex_section'] = config['section']
e['plex_section_name'] = plexsectionname
e['plex_episode_thumb'] = ''
title = node.getAttribute(titletag)
if config['strip_year']:
title = re.sub(r'^(.*)\(\d{4}\)(.*)', r'\1\2', title)
if config['strip_parens']:
title = re.sub(r'\(.*?\)', r'', title)
title = title.strip()
if config['strip_non_alpha']:
title = re.sub(r'[\(\)]', r'', title)
title = re.sub(r'&', r'And', title)
title = re.sub(r'[^A-Za-z0-9- \']', r'', title)
if config['lowercase_title']:
title = title.lower()
if viewgroup == "show":
e['title'] = title
e['url'] = 'NULL'
entries.append(e)
# show ends here.
continue
e['plex_art'] = "http://%s:%d%s%s" % (config['server'], config['port'],
node.getAttribute(arttag), urlappend)
e['plex_cover'] = "http://%s:%d%s%s" % (config['server'], config['port'],
node.getAttribute(covertag), urlappend)
e['plex_season_cover'] = "http://%s:%d%s%s" % (config['server'], config['port'],
node.getAttribute(seasoncovertag), urlappend)
if viewgroup == "episode":
e['plex_thumb'] = "http://%s:%d%s%s" % (
config['server'], config['port'], node.getAttribute('thumb'), urlappend)
e['series_name'] = title
e['plex_ep_name'] = node.getAttribute('title')
season = int(node.getAttribute('parentIndex'))
if node.getAttribute('parentIndex') == node.getAttribute('year'):
season = node.getAttribute('originallyAvailableAt')
filenamemap = "%s_%s%s_%s_%s_%s.%s"
episode = ""
e['series_id_type'] = 'date'
e['series_date'] = season
elif node.getAttribute('index'):
episode = int(node.getAttribute('index'))
filenamemap = "%s_%02dx%02d_%s_%s_%s.%s"
e['series_season'] = season
e['series_episode'] = episode
e['series_id_type'] = 'ep'
e['series_id'] = 'S%02dE%02d' % (season, episode)
else:
log.debug("Could not get episode number for '%s' (Hint, ratingKey: %s)",
title, node.getAttribute('ratingKey'))
break
elif viewgroup == "movie":
filenamemap = "%s_%s_%s_%s.%s"
e['plex_year'] = node.getAttribute('year')
e['plex_added'] = datetime.fromtimestamp(int(node.getAttribute('addedAt')))
e['plex_duration'] = node.getAttribute('duration')
e['plex_summary'] = node.getAttribute('summary')
e['plex_userrating'] = node.getAttribute('userrating')
e['plex_key'] = node.getAttribute('ratingKey')
count = node.getAttribute('viewCount')
offset = node.getAttribute('viewOffset')
if count:
e['plex_status'] = "seen"
elif offset:
e['plex_status'] = "inprogress"
else:
e['plex_status'] = "unwatched"
for media in node.getElementsByTagName('Media'):
entry = Entry(e)
vcodec = media.getAttribute('videoCodec')
acodec = media.getAttribute('audioCodec')
if media.hasAttribute('title'):
entry['plex_media_title'] = media.getAttribute('title')
if media.hasAttribute('optimizedForStreaming'):
entry['plex_stream_optimized'] = media.getAttribute('optimizedForStreaming')
if config['fetch'] == "file" or not config['fetch']:
container = media.getAttribute('container')
else:
container = "jpg"
resolution = media.getAttribute('videoResolution') + "p"
for part in media.getElementsByTagName('Part'):
if config['fetch'] == "file" or not config['fetch']:
key = part.getAttribute('key')
elif config['fetch'] == "art":
key = node.getAttribute(arttag)
elif config['fetch'] == "cover":
key = node.getAttribute(arttag)
elif config['fetch'] == "season_cover":
key = node.getAttribute(seasoncovertag)
elif config['fetch'] == "thumb":
key = node.getAttribute(thumbtag)
# key = part.getAttribute('key')
duration = part.getAttribute('duration')
entry['plex_title'] = title
entry['title'] = title
if config['original_filename']:
filename, fileext = os.path.splitext(basename(part.getAttribute('file')))
if config['fetch'] != 'file':
filename += ".jpg"
else:
filename = "%s%s" % (filename, fileext)
else:
if viewgroup == "episode":
filename = filenamemap % (title.replace(" ", "."), season, episode, resolution, vcodec,
acodec, container)
entry['title'] = filename
elif viewgroup == "movie":
filename = filenamemap % (title.replace(" ", "."), resolution, vcodec,
acodec, container)
entry['title'] = filename
entry['plex_url'] = "http://%s:%d%s%s" % (config['server'], config['port'], key, urlappend)
entry['plex_path'] = key
entry['url'] = "http://%s:%d%s%s" % (config['server'], config['port'], key, urlappend)
entry['plex_duration'] = duration
entry['filename'] = filename
if key == "":
log.debug("Could not find anything in PMS to download. Next!")
else:
entries.append(entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(InputPlex, 'plex', api_ver=2)
| |
import pytest
from unittest.mock import MagicMock, call
from typing import List, Tuple, Dict, Any
from opentrons.calibration_storage import modify, helpers
from opentrons.types import Mount, Point
from opentrons.hardware_control import pipette
from opentrons.config.pipette_config import load
from robot_server.service.errors import RobotServerError
from robot_server.service.session.models.command import CalibrationCommand
from robot_server.robot.calibration.pipette_offset.user_flow import \
PipetteOffsetCalibrationUserFlow
stub_jog_data = {'vector': Point(1, 1, 1)}
pipette_map = {
"p10_single_v1.5": "opentrons_96_tiprack_10ul",
"p50_single_v1.5": "opentrons_96_tiprack_300ul",
"p300_single_v1.5": "opentrons_96_tiprack_300ul",
"p1000_single_v1.5": "opentrons_96_tiprack_1000ul",
"p10_multi_v1.5": "opentrons_96_tiprack_10ul",
"p50_multi_v1.5": "opentrons_96_tiprack_300ul",
"p300_multi_v1.5": "opentrons_96_tiprack_300ul",
"p20_single_v2.1": "opentrons_96_tiprack_20ul",
"p300_single_v2.1": "opentrons_96_tiprack_300ul",
"p1000_single_v2.1": "opentrons_96_tiprack_1000ul",
"p20_multi_v2.1": "opentrons_96_tiprack_20ul",
"p300_multi_v2.1": "opentrons_96_tiprack_300ul",
}
@pytest.fixture(params=pipette_map.keys())
def mock_hw_pipette_all_combos(request):
model = request.param
return pipette.Pipette(load(model, 'testId'),
{
'single': [0, 0, 0],
'multi': [0, 0, 0]
},
'testId')
@pytest.fixture(params=[Mount.RIGHT, Mount.LEFT])
def mock_hw_all_combos(hardware, mock_hw_pipette_all_combos, request):
mount = request.param
hardware._attached_instruments = {mount: mock_hw_pipette_all_combos}
hardware._current_pos = Point(0, 0, 0)
async def async_mock(*args, **kwargs):
pass
async def async_mock_move_rel(*args, **kwargs):
delta = kwargs.get('delta', Point(0, 0, 0))
hardware._current_pos += delta
async def async_mock_move_to(*args, **kwargs):
to_pt = kwargs.get('abs_position', Point(0, 0, 0))
hardware._current_pos = to_pt
async def gantry_pos_mock(*args, **kwargs):
return hardware._current_pos
hardware.move_rel = MagicMock(side_effect=async_mock)
hardware.pick_up_tip = MagicMock(side_effect=async_mock)
hardware.drop_tip = MagicMock(side_effect=async_mock)
hardware.gantry_position = MagicMock(side_effect=gantry_pos_mock)
hardware.move_to = MagicMock(side_effect=async_mock_move_to)
hardware.get_instrument_max_height.return_value = 180
return hardware
@pytest.fixture
def mock_hw(hardware):
pip = pipette.Pipette(load("p300_single_v2.1", 'testId'),
{
'single': [0, 0, 0],
'multi': [0, 0, 0]
},
'testId')
hardware._attached_instruments = {Mount.RIGHT: pip}
hardware._current_pos = Point(0, 0, 0)
async def async_mock(*args, **kwargs):
pass
async def async_mock_move_rel(*args, **kwargs):
delta = kwargs.get('delta', Point(0, 0, 0))
hardware._current_pos += delta
async def async_mock_move_to(*args, **kwargs):
to_pt = kwargs.get('abs_position', Point(0, 0, 0))
hardware._current_pos = to_pt
async def gantry_pos_mock(*args, **kwargs):
return hardware._current_pos
hardware.move_rel = MagicMock(side_effect=async_mock_move_rel)
hardware.pick_up_tip = MagicMock(side_effect=async_mock)
hardware.drop_tip = MagicMock(side_effect=async_mock)
hardware.gantry_position = MagicMock(side_effect=gantry_pos_mock)
hardware.move_to = MagicMock(side_effect=async_mock_move_to)
hardware.get_instrument_max_height.return_value = 180
return hardware
@pytest.fixture
def mock_user_flow(mock_hw):
mount = next(k for k, v in
mock_hw._attached_instruments.items() if v)
m = PipetteOffsetCalibrationUserFlow(hardware=mock_hw, mount=mount)
yield m
hw_commands: List[Tuple[str, str, Dict[Any, Any], str]] = [
(CalibrationCommand.jog, 'preparingPipette', stub_jog_data, 'move_rel'),
(CalibrationCommand.pick_up_tip, 'preparingPipette', {}, 'pick_up_tip'),
(CalibrationCommand.move_to_deck, 'inspectingTip', {}, 'move_to'),
(CalibrationCommand.move_to_point_one, 'joggingToDeck', {}, 'move_to'),
(CalibrationCommand.move_to_tip_rack, 'labwareLoaded', {}, 'move_to'),
]
async def test_move_to_tip_rack(mock_user_flow):
uf = mock_user_flow
await uf.move_to_tip_rack()
cur_pt = await uf._get_current_point(None)
assert cur_pt == uf._deck['8'].wells()[0].top().point + Point(0, 0, 10)
async def test_jog(mock_user_flow):
uf = mock_user_flow
await uf.jog(vector=(0, 0, 0.1))
assert await uf._get_current_point(None) == Point(0, 0, 0.1)
await uf.jog(vector=(1, 0, 0))
assert await uf._get_current_point(None) == Point(1, 0, 0.1)
async def test_pick_up_tip(mock_user_flow):
uf = mock_user_flow
assert uf._tip_origin_pt is None
await uf.pick_up_tip()
# check that it saves the tip pick up location locally
assert uf._tip_origin_pt == Point(0, 0, 0)
async def test_return_tip(mock_user_flow):
uf = mock_user_flow
uf._tip_origin_pt = Point(1, 1, 1)
uf._hw_pipette._has_tip = True
z_offset = uf._hw_pipette.config.return_tip_height * \
uf._get_tip_length()
await uf._return_tip()
# should move to return tip
move_calls = [
call(
mount=Mount.RIGHT,
abs_position=Point(1, 1, 1 - z_offset),
critical_point=uf._get_critical_point_override()
),
]
uf._hardware.move_to.assert_has_calls(move_calls)
uf._hardware.drop_tip.assert_called()
@pytest.mark.parametrize('command,current_state,data,hw_meth', hw_commands)
async def test_hw_calls(command, current_state, data, hw_meth, mock_user_flow):
mock_user_flow._current_state = current_state
# z height reference must be present for moving to point one
if command == CalibrationCommand.move_to_point_one:
mock_user_flow._z_height_reference = 0.1
await mock_user_flow.handle_command(command, data)
getattr(mock_user_flow._hardware, hw_meth).assert_called()
def test_load_trash(mock_user_flow):
assert mock_user_flow._deck['12'].load_name == \
'opentrons_1_trash_1100ml_fixed'
def test_load_deck(mock_user_flow):
uf = mock_user_flow
pip_model = uf._hw_pipette.model
tip_rack = pipette_map[pip_model]
assert uf._deck['8'].load_name == tip_rack
@pytest.mark.parametrize(argnames="mount",
argvalues=[Mount.RIGHT, Mount.LEFT])
def test_no_pipette(hardware, mount):
hardware._attached_instruments = {mount: None}
with pytest.raises(RobotServerError) as error:
PipetteOffsetCalibrationUserFlow(hardware=hardware,
mount=mount)
assert error.value.error.detail == f"No pipette present on {mount} mount"
async def test_save_pipette_calibration(mock_user_flow):
uf = mock_user_flow
def mock_save_pipette_offset(*args, **kwargs):
pass
modify.save_pipette_calibration = \
MagicMock(side_effect=mock_save_pipette_offset)
uf._current_state = 'savingPointOne'
await uf._hardware.move_to(
mount=uf._mount,
abs_position=Point(x=10, y=10, z=40),
critical_point=uf._get_critical_point_override()
)
await uf.save_offset()
tiprack_hash = helpers.hash_labware_def(uf._tip_rack._definition)
offset = uf._cal_ref_point - Point(x=10, y=10, z=40)
modify.save_pipette_calibration.assert_called_with(
offset=offset,
mount=uf._mount,
pip_id=uf._hw_pipette.pipette_id,
tiprack_hash=tiprack_hash,
tiprack_uri=uf._tip_rack.uri
)
| |
""" CumulusCI Tasks for running Apex Tests """
import html
import io
import json
import re
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
from cumulusci.core.exceptions import (
TaskOptionsError,
ApexTestException,
CumulusCIException,
)
from cumulusci.core.utils import process_bool_arg, process_list_arg, decode_to_unicode
from cumulusci.utils.http.requests_utils import safe_json_from_response
APEX_LIMITS = {
"Soql": {
"Label": "TESTING_LIMITS: Number of SOQL queries",
"SYNC": 100,
"ASYNC": 200,
},
"Email": {
"Label": "TESTING_LIMITS: Number of Email Invocations",
"SYNC": 10,
"ASYNC": 10,
},
"AsyncCalls": {
"Label": "TESTING_LIMITS: Number of future calls",
"SYNC": 50,
"ASYNC": 50,
},
"DmlRows": {
"Label": "TESTING_LIMITS: Number of DML rows",
"SYNC": 10000,
"ASYNC": 10000,
},
"Cpu": {"Label": "TESTING_LIMITS: Maximum CPU time", "SYNC": 10000, "ASYNC": 60000},
"QueryRows": {
"Label": "TESTING_LIMITS: Number of query rows",
"SYNC": 50000,
"ASYNC": 50000,
},
"Dml": {
"Label": "TESTING_LIMITS: Number of DML statements",
"SYNC": 150,
"ASYNC": 150,
},
"MobilePush": {
"Label": "TESTING_LIMITS: Number of Mobile Apex push calls",
"SYNC": 10,
"ASYNC": 10,
},
"Sosl": {
"Label": "TESTING_LIMITS: Number of SOSL queries",
"SYNC": 20,
"ASYNC": 20,
},
"Callouts": {
"Label": "TESTING_LIMITS: Number of callouts",
"SYNC": 100,
"ASYNC": 100,
},
}
TEST_RESULT_QUERY = """
SELECT Id,ApexClassId,TestTimestamp,
Message,MethodName,Outcome,
RunTime,StackTrace,
(SELECT
Id,Callouts,AsyncCalls,DmlRows,Email,
LimitContext,LimitExceptions,MobilePush,
QueryRows,Sosl,Cpu,Dml,Soql
FROM ApexTestResults)
FROM ApexTestResult
WHERE AsyncApexJobId='{}'
"""
class RunApexTests(BaseSalesforceApiTask):
"""Task to run Apex tests with the Tooling API and report results.
This task optionally supports retrying unit tests that fail due to
transitory issues or concurrency-related row locks. To enable retries,
add ones or more regular expressions to the list option `retry_failures`.
When a test run fails, if all of the failures' error messages or stack traces
match one of these regular expressions, each failed test will be retried by
itself. This is often useful when running Apex tests in parallel; row locks
may automatically be retried. Note that retries are supported whether or not
the org has parallel Apex testing enabled.
The ``retry_always`` option modifies this behavior: if a test run fails and
any (not all) of the failures match the specified regular expressions,
all of the failed tests will be retried in serial. This is helpful when
underlying row locking errors are masked by custom exceptions.
A useful base configuration for projects wishing to use retries is:
.. code-block:: yaml
retry_failures:
- "unable to obtain exclusive access to this record"
- "UNABLE_TO_LOCK_ROW"
- "connection was cancelled here"
retry_always: True
Some projects' unit tests produce so many concurrency errors that
it's faster to execute the entire run in serial mode than to use retries.
Serial and parallel mode are configured in the scratch org definition file."""
api_version = "38.0"
name = "RunApexTests"
task_options = {
"test_name_match": {
"description": (
"Pattern to find Apex test classes to run "
'("%" is wildcard). Defaults to '
"project__test__name_match from project config. "
"Comma-separated list for multiple patterns."
),
"required": True,
},
"test_name_exclude": {
"description": (
"Query to find Apex test classes to exclude "
'("%" is wildcard). Defaults to '
"project__test__name_exclude from project config. "
"Comma-separated list for multiple patterns."
)
},
"namespace": {
"description": (
"Salesforce project namespace. Defaults to "
+ "project__package__namespace"
)
},
"managed": {
"description": (
"If True, search for tests in the namespace "
+ "only. Defaults to False"
)
},
"poll_interval": {
"description": ("Seconds to wait between polling for Apex test results.")
},
"junit_output": {
"description": "File name for JUnit output. Defaults to test_results.xml"
},
"json_output": {
"description": "File name for json output. Defaults to test_results.json"
},
"retry_failures": {
"description": "A list of regular expression patterns to match against "
"test failures. If failures match, the failing tests are retried in "
"serial mode."
},
"retry_always": {
"description": "By default, all failures must match retry_failures to perform "
"a retry. Set retry_always to True to retry all failed tests if any failure matches."
},
"required_org_code_coverage_percent": {
"description": "Require at least X percent code coverage across the org following the test run.",
"usage": "-o required_org_code_coverage_percent PERCENTAGE",
},
"verbose": {
"description": "By default, only failures get detailed output. "
"Set verbose to True to see all passed test methods."
},
}
def _init_options(self, kwargs):
super(RunApexTests, self)._init_options(kwargs)
self.options["test_name_match"] = self.options.get(
"test_name_match", self.project_config.project__test__name_match
)
self.options["test_name_exclude"] = self.options.get(
"test_name_exclude", self.project_config.project__test__name_exclude
)
if self.options["test_name_exclude"] is None:
self.options["test_name_exclude"] = ""
self.options["namespace"] = self.options.get(
"namespace", self.project_config.project__package__namespace
)
self.options["junit_output"] = self.options.get(
"junit_output", "test_results.xml"
)
self.options["json_output"] = self.options.get(
"json_output", "test_results.json"
)
self.options["retry_failures"] = process_list_arg(
self.options.get("retry_failures", [])
)
compiled_res = []
for regex in self.options["retry_failures"]:
try:
compiled_res.append(re.compile(regex))
except re.error as e:
raise TaskOptionsError(
"An invalid regular expression ({}) was provided ({})".format(
regex, e
)
)
self.options["retry_failures"] = compiled_res
self.options["retry_always"] = process_bool_arg(
self.options.get("retry_always") or False
)
self.verbose = process_bool_arg(self.options.get("verbose") or False)
self.counts = {}
if "required_org_code_coverage_percent" in self.options:
try:
self.code_coverage_level = int(
str(self.options["required_org_code_coverage_percent"]).rstrip("%")
)
except ValueError:
raise TaskOptionsError(
f"Invalid code coverage level {self.options['required_org_code_coverage_percent']}"
)
else:
self.code_coverage_level = None
# pylint: disable=W0201
def _init_class(self):
self.classes_by_id = {}
self.classes_by_name = {}
self.job_id = None
self.results_by_class_name = {}
self.result = None
self.retry_details = None
def _get_namespace_filter(self):
if self.options.get("managed"):
namespace = self.options.get("namespace")
if not namespace:
raise TaskOptionsError(
"Running tests in managed mode but no namespace available."
)
namespace = "'{}'".format(namespace)
else:
namespace = "null"
return namespace
def _get_test_class_query(self):
namespace = self._get_namespace_filter()
# Split by commas to allow multiple class name matching options
test_name_match = self.options["test_name_match"]
included_tests = []
for pattern in test_name_match.split(","):
if pattern:
included_tests.append("Name LIKE '{}'".format(pattern))
# Add any excludes to the where clause
test_name_exclude = self.options.get("test_name_exclude", "")
excluded_tests = []
for pattern in test_name_exclude.split(","):
if pattern:
excluded_tests.append("(NOT Name LIKE '{}')".format(pattern))
# Get all test classes for namespace
query = "SELECT Id, Name FROM ApexClass " + "WHERE NamespacePrefix = {}".format(
namespace
)
if included_tests:
query += " AND ({})".format(" OR ".join(included_tests))
if excluded_tests:
query += " AND {}".format(" AND ".join(excluded_tests))
return query
def _get_test_classes(self):
query = self._get_test_class_query()
# Run the query
self.logger.info("Running query: {}".format(query))
result = self.tooling.query_all(query)
self.logger.info("Found {} test classes".format(result["totalSize"]))
return result
def _get_test_methods_for_class(self, class_name):
result = self.tooling.query(
f"SELECT SymbolTable FROM ApexClass WHERE Name='{class_name}'"
)
test_methods = []
try:
methods = result["records"][0]["SymbolTable"]["methods"]
except (TypeError, IndexError, KeyError):
raise CumulusCIException(
f"Unable to acquire symbol table for failed Apex class {class_name}"
)
for m in methods:
for a in m.get("annotations", []):
if a["name"].lower() in ["istest", "testmethod"]:
test_methods.append(m["name"])
break
return test_methods
def _is_retriable_error_message(self, error_message):
return any(
[reg.search(error_message) for reg in self.options["retry_failures"]]
)
def _is_retriable_failure(self, test_result):
return self._is_retriable_error_message(
test_result["Message"] or ""
) or self._is_retriable_error_message(test_result["StackTrace"] or "")
def _get_test_results(self, allow_retries=True):
# We need to query at both the test result and test queue item level.
# Some concurrency problems manifest as all or part of the class failing,
# without leaving behind any visible ApexTestResult records.
# See https://salesforce.stackexchange.com/questions/262893/any-way-to-get-consistent-test-counts-when-parallel-testing-is-used
# First, gather the Ids of failed test classes.
test_classes = self.tooling.query_all(
"SELECT Id, Status, ExtendedStatus, ApexClassId FROM ApexTestQueueItem "
+ "WHERE ParentJobId = '{}' AND Status = 'Failed'".format(self.job_id)
)
class_level_errors = {
each_class["ApexClassId"]: each_class["ExtendedStatus"]
for each_class in test_classes["records"]
}
result = self.tooling.query_all(TEST_RESULT_QUERY.format(self.job_id))
if allow_retries:
self.retry_details = {}
for test_result in result["records"]:
class_name = self.classes_by_id[test_result["ApexClassId"]]
self.results_by_class_name[class_name][
test_result["MethodName"]
] = test_result
self.counts[test_result["Outcome"]] += 1
# If we have class-level failures that did not come with line-level
# failure details, report those as well.
for class_id, error in class_level_errors.items():
class_name = self.classes_by_id[class_id]
self.logger.error(
f"Class {class_name} failed to run some tests with the message {error}. Applying error to unit test results."
)
# In Spring '20, we cannot get symbol tables for managed classes.
if self.options.get("managed"):
self.logger.error(
f"Cannot access symbol table for managed class {class_name}. Failure will not be retried."
)
continue
# Get all the method names for this class
test_methods = self._get_test_methods_for_class(class_name)
for test_method in test_methods:
# If this method was not run due to a class-level failure,
# synthesize a failed result.
# If we're retrying and fail again, do the same.
if (
test_method not in self.results_by_class_name[class_name]
or self.results_by_class_name[class_name][test_method]["Outcome"]
== "Fail"
):
self.results_by_class_name[class_name][test_method] = {
"ApexClassId": class_id,
"MethodName": test_method,
"Outcome": "Fail",
"Message": f"Containing class {class_name} failed with message {error}",
"StackTrace": "",
"RunTime": 0,
}
self.counts["Fail"] += 1
if allow_retries:
for class_name, results in self.results_by_class_name.items():
for test_result in results.values():
# Determine whether this failure is retriable.
if test_result["Outcome"] == "Fail" and allow_retries:
can_retry_this_failure = self._is_retriable_failure(test_result)
if can_retry_this_failure:
self.counts["Retriable"] += 1
# Even if this failure is not retriable per se,
# persist its details if we might end up retrying
# all failures.
if self.options["retry_always"] or can_retry_this_failure:
self.retry_details.setdefault(
test_result["ApexClassId"], []
).append(test_result["MethodName"])
def _process_test_results(self):
test_results = []
class_names = list(self.results_by_class_name.keys())
class_names.sort()
for class_name in class_names:
has_failures = any(
result["Outcome"] in ["Fail", "CompileFail"]
for result in self.results_by_class_name[class_name].values()
)
if has_failures or self.verbose:
self.logger.info(f"Class: {class_name}")
method_names = list(self.results_by_class_name[class_name].keys())
method_names.sort()
for method_name in method_names:
result = self.results_by_class_name[class_name][method_name]
message = f"\t{result['Outcome']}: {result['MethodName']}"
duration = result["RunTime"]
result["stats"] = self._get_stats_from_result(result)
if duration:
message += f" ({duration}ms)"
test_results.append(
{
"Children": result.get("children", None),
"ClassName": decode_to_unicode(class_name),
"Method": decode_to_unicode(result["MethodName"]),
"Message": decode_to_unicode(result["Message"]),
"Outcome": decode_to_unicode(result["Outcome"]),
"StackTrace": decode_to_unicode(result["StackTrace"]),
"Stats": result.get("stats", None),
"TestTimestamp": result.get("TestTimestamp", None),
}
)
if result["Outcome"] in ["Fail", "CompileFail"]:
self.logger.info(message)
self.logger.info(f"\tMessage: {result['Message']}")
self.logger.info(f"\tStackTrace: {result['StackTrace']}")
elif self.verbose:
self.logger.info(message)
self.logger.info("-" * 80)
self.logger.info(
"Pass: {} Retried: {} Fail: {} CompileFail: {} Skip: {}".format(
self.counts["Pass"],
self.counts["Retriable"],
self.counts["Fail"],
self.counts["CompileFail"],
self.counts["Skip"],
)
)
self.logger.info("-" * 80)
if self.counts["Fail"] or self.counts["CompileFail"]:
self.logger.error("-" * 80)
self.logger.error("Failing Tests")
self.logger.error("-" * 80)
counter = 0
for result in test_results:
if result["Outcome"] in ["Fail", "CompileFail"]:
counter += 1
self.logger.error(
"{}: {}.{} - {}".format(
counter,
result["ClassName"],
result["Method"],
result["Outcome"],
)
)
self.logger.error(f"\tMessage: {result['Message']}")
self.logger.error(f"\tStackTrace: {result['StackTrace']}")
return test_results
def _get_stats_from_result(self, result):
stats = {"duration": result["RunTime"]}
if result.get("ApexTestResults", None):
for limit_name, details in APEX_LIMITS.items():
limit_use = result["ApexTestResults"]["records"][0][limit_name]
limit_allowed = details[
result["ApexTestResults"]["records"][0]["LimitContext"]
]
stats[details["Label"]] = {"used": limit_use, "allowed": limit_allowed}
return stats
def _enqueue_test_run(self, class_ids):
if isinstance(class_ids, dict):
body = {
"tests": [
{"classId": class_id, "testMethods": class_ids[class_id]}
for class_id in class_ids
]
}
else:
body = {"classids": ",".join(class_ids)}
return safe_json_from_response(
self.tooling._call_salesforce(
method="POST",
url=self.tooling.base_url + "runTestsAsynchronous",
json=body,
)
)
def _init_task(self):
super()._init_task()
if "managed" in self.options:
self.options["managed"] = process_bool_arg(self.options["managed"] or False)
else:
namespace = self.options.get("namespace")
self.options["managed"] = (
bool(namespace) and namespace in self.org_config.installed_packages
)
def _run_task(self):
result = self._get_test_classes()
if result["totalSize"] == 0:
return
for test_class in result["records"]:
self.classes_by_id[test_class["Id"]] = test_class["Name"]
self.classes_by_name[test_class["Name"]] = test_class["Id"]
self.results_by_class_name[test_class["Name"]] = {}
self.logger.info("Queuing tests for execution...")
self.counts = {
"Pass": 0,
"Fail": 0,
"CompileFail": 0,
"Skip": 0,
"Retriable": 0,
}
self.job_id = self._enqueue_test_run(
(str(id) for id in self.classes_by_id.keys())
)
self._wait_for_tests()
self._get_test_results()
# Did we get back retriable test results? Check our retry policy,
# then enqueue new runs individually, until either (a) all retriable
# tests succeed or (b) a test fails.
able_to_retry = (self.counts["Retriable"] and self.options["retry_always"]) or (
self.counts["Retriable"] and self.counts["Retriable"] == self.counts["Fail"]
)
if not able_to_retry:
self.counts["Retriable"] = 0
else:
self._attempt_retries()
test_results = self._process_test_results()
self._write_output(test_results)
if self.counts.get("Fail") or self.counts.get("CompileFail"):
raise ApexTestException(
"{} tests failed and {} tests failed compilation".format(
self.counts.get("Fail"), self.counts.get("CompileFail")
)
)
if self.code_coverage_level:
if self.options.get("namespace") not in self.org_config.installed_packages:
self._check_code_coverage()
else:
self.logger.info(
"This org contains a managed installation; not checking code coverage."
)
else:
self.logger.info(
"No code coverage level specified; not checking code coverage."
)
def _check_code_coverage(self):
result = self.tooling.query("SELECT PercentCovered FROM ApexOrgWideCoverage")
coverage = result["records"][0]["PercentCovered"]
if coverage < self.code_coverage_level:
raise ApexTestException(
f"Organization-wide code coverage of {coverage}% is below required level of {self.code_coverage_level}"
)
self.logger.info(
f"Organization-wide code coverage of {coverage}% meets expectations."
)
def _attempt_retries(self):
total_method_retries = sum(
[len(test_list) for test_list in self.retry_details.values()]
)
self.logger.warning(
"Retrying {} failed methods from {} test classes".format(
total_method_retries, len(self.retry_details)
)
)
self.counts["Fail"] = 0
for class_id, test_list in self.retry_details.items():
for each_test in test_list:
self.logger.warning(
"Retrying {}.{}".format(self.classes_by_id[class_id], each_test)
)
self.job_id = self._enqueue_test_run({class_id: [each_test]})
self._wait_for_tests()
self._get_test_results(allow_retries=False)
# If the retry failed, report the remaining failures.
if self.counts["Fail"]:
self.logger.error("Test retry failed.")
def _wait_for_tests(self):
self.poll_complete = False
self.poll_interval_s = int(self.options.get("poll_interval", 1))
self.poll_count = 0
self._poll()
def _poll_action(self):
self.result = self.tooling.query_all(
"SELECT Id, Status, ApexClassId FROM ApexTestQueueItem "
+ "WHERE ParentJobId = '{}'".format(self.job_id)
)
counts = {
"Aborted": 0,
"Completed": 0,
"Failed": 0,
"Holding": 0,
"Preparing": 0,
"Processing": 0,
"Queued": 0,
}
processing_class_id = None
total_test_count = self.result["totalSize"]
for test_queue_item in self.result["records"]:
counts[test_queue_item["Status"]] += 1
if test_queue_item["Status"] == "Processing":
processing_class_id = test_queue_item["ApexClassId"]
processing_class = ""
if counts["Processing"] == 1:
processing_class = f" ({self.classes_by_id[processing_class_id]})"
self.logger.info(
"Completed: {} Processing: {}{} Queued: {}".format(
counts["Completed"],
counts["Processing"],
processing_class,
counts["Queued"],
)
)
if (
total_test_count
== counts["Completed"] + counts["Failed"] + counts["Aborted"]
):
self.logger.info("Apex tests completed")
self.poll_complete = True
def _write_output(self, test_results):
junit_output = self.options["junit_output"]
with io.open(junit_output, mode="w", encoding="utf-8") as f:
f.write('<testsuite tests="{}">\n'.format(len(test_results)))
for result in test_results:
s = ' <testcase classname="{}" name="{}"'.format(
result["ClassName"], result["Method"]
)
if (
"Stats" in result
and result["Stats"]
and "duration" in result["Stats"]
):
s += ' time="{}"'.format(result["Stats"]["duration"])
if result["Outcome"] in ["Fail", "CompileFail"]:
s += ">\n"
s += ' <failure type="failed" '
if result["Message"]:
s += 'message="{}"'.format(html.escape(result["Message"]))
s += ">"
if result["StackTrace"]:
s += "<![CDATA[{}]]>".format(html.escape(result["StackTrace"]))
s += "</failure>\n"
s += " </testcase>\n"
else:
s += " />\n"
f.write(str(s))
f.write("</testsuite>")
json_output = self.options["json_output"]
with io.open(json_output, mode="w", encoding="utf-8") as f:
f.write(str(json.dumps(test_results, indent=4)))
| |
import json
import io
import mock
from nose.tools import (
assert_equal, assert_false, assert_is_instance, raises, assert_raises,
assert_regexp_matches)
import werkzeug, werkzeug.test
from .. import wsgi, metrics, py3comp
def env(path, **kwargs):
data = werkzeug.test.EnvironBuilder(path, environ_overrides=kwargs)
return data.get_environ()
def req(body):
data = werkzeug.test.EnvironBuilder(data=json.dumps(body), content_type="application/json")
return data.get_request(cls=werkzeug.wrappers.Request)
def check_dispatching(mw, url, method, expected):
urls = mw.url_map.bind_to_environ(env(url, REQUEST_METHOD=method))
try:
endpoint, _ = urls.match()
except expected:
pass
except Exception:
raise
else:
assert_equal(endpoint, expected)
def test_dispatching():
tests = [
("/_app-metrics", 'GET', werkzeug.exceptions.NotFound),
("/_app-metrics/metrics", 'GET', wsgi.handle_metrics_list),
("/_app-metrics/metrics", 'POST', werkzeug.exceptions.MethodNotAllowed),
("/_app-metrics/metrics/test", 'GET', wsgi.handle_metric_show),
("/_app-metrics/metrics/test", 'PUT', wsgi.handle_metric_new),
("/_app-metrics/metrics/test", 'POST', wsgi.handle_metric_update),
("/_app-metrics/metrics/test", 'DELETE', wsgi.handle_metric_delete),
("/_app-metrics/metrics/test", 'OPTIONS', werkzeug.exceptions.MethodNotAllowed),
("/_app-metrics/metrics/test/sub", 'GET', werkzeug.routing.NotFound),
]
mw = wsgi.AppMetricsMiddleware(None)
for url, method, expected in tests:
yield lambda url, method: check_dispatching(mw, url, method, expected), url, method
def test_dispatching_root():
tests = [
("/metrics", 'GET', wsgi.handle_metrics_list),
("/metrics", 'POST', werkzeug.exceptions.MethodNotAllowed),
("/metrics/test", 'GET', wsgi.handle_metric_show),
("/metrics/test", 'PUT', wsgi.handle_metric_new),
("/metrics/test", 'POST', wsgi.handle_metric_update),
("/metrics/test", 'DELETE', wsgi.handle_metric_delete),
("/metrics/test", 'OPTIONS', werkzeug.exceptions.MethodNotAllowed),
("/metrics/test/sub", 'GET', werkzeug.routing.NotFound),
]
mw = wsgi.AppMetricsMiddleware(None, "")
for url, method, expected in tests:
yield lambda url, method: check_dispatching(mw, url, method, expected), url, method
class TestAppMetricsMiddleware(object):
def setUp(self):
self.app = mock.Mock()
self.start_response = mock.Mock()
self.patch = mock.patch('appmetrics.wsgi.handle_metrics_list')
self.handler = self.patch.start()
self.mw = wsgi.AppMetricsMiddleware(self.app)
def tearDown(self):
self.patch.stop()
def test_call_not_matching(self):
res = self.mw(env("/"), self.start_response)
assert_equal(res, self.app.return_value)
assert_false(self.start_response.called)
assert_equal(
self.app.call_args_list,
[mock.call(env("/"), self.start_response)])
def test_call_not_matching_2(self):
res = self.mw(env("/test"), self.start_response)
assert_equal(res, self.app.return_value)
assert_false(self.start_response.called)
assert_equal(
self.app.call_args_list,
[mock.call(env("/test"), self.start_response)])
def test_call_not_matching_3(self):
res = self.mw(env("/_app-metrics/test/sub"), self.start_response)
assert_equal(res, self.app.return_value)
assert_false(self.start_response.called)
assert_equal(
self.app.call_args_list,
[mock.call(env("/_app-metrics/test/sub"), self.start_response)])
def test_call_with_invalid_status(self):
self.handler.side_effect = ValueError()
self.mw(env("/_app-metrics/metrics", REQUEST_METHOD='GET'), self.start_response)
assert_equal(
self.start_response.call_args_list,
[mock.call("500 INTERNAL SERVER ERROR", mock.ANY)]
)
def test_call_with_error_implicit(self):
self.handler.side_effect = werkzeug.exceptions.BadRequest()
body = self.mw(env("/_app-metrics/metrics", REQUEST_METHOD='GET'), self.start_response)
expected_body = json.dumps(werkzeug.exceptions.BadRequest.description)
assert_equal(b"".join(body), expected_body.encode('utf8'))
expected_headers = [
('Content-Type', 'application/json'),
('Content-Length', str(len(expected_body)))
]
assert_equal(
self.start_response.call_args_list,
[mock.call("400 BAD REQUEST", expected_headers)]
)
def test_call_with_error_explicit(self):
self.handler.side_effect = werkzeug.exceptions.BadRequest(description="bad request received")
body = self.mw(env("/_app-metrics/metrics", REQUEST_METHOD='GET'), self.start_response)
expected_body = json.dumps("bad request received")
assert_equal(b"".join(body), expected_body.encode('utf8'))
expected_headers = [
('Content-Type', 'application/json'),
('Content-Length', str(len(expected_body)))
]
assert_equal(
self.start_response.call_args_list,
[mock.call("400 BAD REQUEST", expected_headers)]
)
def test_call_with_invalid_method(self):
self.handler.side_effect = werkzeug.exceptions.BadRequest()
body = self.mw(env("/_app-metrics/metrics", REQUEST_METHOD='POST'), self.start_response)
expected_body = json.dumps(werkzeug.exceptions.MethodNotAllowed.description)
assert_equal(b"".join(body), expected_body.encode('utf8'))
assert_equal(
self.start_response.call_args_list,
[mock.call("405 METHOD NOT ALLOWED", mock.ANY)]
)
headers = dict(self.start_response.call_args_list[0][0][1])
assert_equal(headers['Content-Type'], "application/json")
assert_equal(headers['Content-Length'], str(len(expected_body)))
allow = {x.strip() for x in headers['Allow'].split(",")}
assert_equal(allow, {"HEAD", "GET"})
def test_call_ok(self):
self.handler.return_value = json.dumps("results")
body = self.mw(env("/_app-metrics/metrics", REQUEST_METHOD='GET'), self.start_response)
expected_headers = [
('Content-Type', 'application/json'),
('Content-Length', str(len(self.handler.return_value)))
]
assert_equal(
self.start_response.call_args_list,
[mock.call("200 OK", expected_headers)]
)
expected = json.dumps("results")
assert_equal(b"".join(body), expected.encode('utf8'))
def test_call_with_unicode(self):
if py3comp.PY3:
self.handler.return_value = json.dumps("results")
else:
self.handler.return_value = json.dumps("results").decode('utf8')
body = self.mw(env("/_app-metrics/metrics", REQUEST_METHOD='GET'), self.start_response)
expected_body = json.dumps("results")
assert_equal(b"".join(body), expected_body.encode('utf8'))
expected_headers = [
('Content-Type', 'application/json'),
('Content-Length', str(len(expected_body)))
]
assert_equal(
self.start_response.call_args_list,
[mock.call("200 OK", expected_headers)]
)
class TestWSGIHandlers(object):
def setUp(self):
self.original_registry = metrics.REGISTRY
metrics.REGISTRY.clear()
self.original_tags = metrics.TAGS
metrics.TAGS.clear()
def tearDown(self):
metrics.REGISTRY.clear()
metrics.REGISTRY.update(self.original_registry)
metrics.TAGS.clear()
metrics.TAGS.update(self.original_tags)
@mock.patch('appmetrics.wsgi.metrics.metrics')
def test_handle_metrics_list(self, metrics):
metrics.return_value = ["test1", "test2"]
assert_equal(wsgi.handle_metrics_list(mock.Mock()), '["test1", "test2"]')
@mock.patch('appmetrics.wsgi.metrics.metric')
def test_handle_metric_show(self, metric):
metric().get.return_value = "this is a test"
assert_equal(wsgi.handle_metric_show(mock.Mock(), "test"), '"this is a test"')
@mock.patch('appmetrics.wsgi.metrics.metric')
def test_handle_metric_show_not_found(self, metric):
metric.side_effect = KeyError("key")
with assert_raises(werkzeug.exceptions.NotFound) as exc:
wsgi.handle_metric_show(mock.Mock(), "test")
assert_equal(exc.exception.description, "No such metric: 'test'")
def test_handle_metric_delete(self):
with mock.patch.dict('appmetrics.metrics.REGISTRY', dict(test=mock.Mock())):
res = wsgi.handle_metric_delete(mock.Mock(), "test")
assert_equal(res, "deleted")
assert_equal(metrics.REGISTRY, dict())
def test_handle_metric_delete_not_found(self):
with mock.patch.dict('appmetrics.metrics.REGISTRY', dict(none="test")):
res = wsgi.handle_metric_delete(mock.Mock(), "test")
assert_equal(res, "not deleted")
assert_equal(metrics.REGISTRY, dict(none="test"))
@mock.patch('appmetrics.wsgi.metrics.tags')
def test_handle_tags_list(self, tags):
tags.return_value = dict(tag1=["test1", "test2"], tag2=["test3"])
assert_equal(wsgi.handle_tags_list(mock.Mock()), '["tag1", "tag2"]')
def test_handle_tag_add(self):
metrics.REGISTRY["test1"] = mock.Mock()
res = wsgi.handle_tag_add(mock.Mock(), "tag1", "test1")
assert_equal(res, "")
assert_equal(metrics.TAGS, {"tag1": {"test1"}})
@raises(werkzeug.exceptions.BadRequest)
def test_handle_tag_add_invalid(self):
res = wsgi.handle_tag_add(mock.Mock(), "tag1", "test1")
assert_equal(res, "")
assert_equal(metrics.TAGS, {"tag1": {"test1"}})
def test_handle_untag_not_existing(self):
res = wsgi.handle_untag(mock.Mock(), "tag1", "test1")
assert_equal(res, "not deleted")
def test_handle_untag(self):
metrics.TAGS["tag1"] = {"test1"}
res = wsgi.handle_untag(mock.Mock(), "tag1", "test1")
assert_equal(res, "deleted")
@raises(werkzeug.exceptions.NotFound)
def test_handle_tag_show_not_found(self):
wsgi.handle_tag_show(mock.Mock(), "tag1")
def test_handle_tag_show(self):
metrics.new_histogram("test1")
metrics.tag("test1", "tag1")
res = wsgi.handle_tag_show(mock.Mock(), "tag1")
assert_equal(res, '["test1"]')
def test_handle_tag_show_no_expand(self):
metrics.new_histogram("test1")
metrics.tag("test1", "tag1")
res = wsgi.handle_tag_show(mock.Mock(args={"expand": 'false'}), "tag1")
assert_equal(res, '["test1"]')
@mock.patch('appmetrics.metrics.metrics_by_tag')
def test_handle_tag_show_no_expand(self, mbt):
mbt.return_value = "this is a test"
metrics.new_histogram("test1")
metrics.tag("test1", "tag1")
res = wsgi.handle_tag_show(mock.Mock(args={"expand": 'true'}), "tag1")
assert_equal(res, '"this is a test"')
@raises(werkzeug.exceptions.UnsupportedMediaType)
def test_get_body_no_content_type(self):
request = werkzeug.wrappers.Request(dict(CONTENT_LENGTH=10))
wsgi.get_body(request)
@raises(werkzeug.exceptions.UnsupportedMediaType)
def test_get_body_bad_content_type(self):
request = werkzeug.wrappers.Request(dict(CONTENT_LENGTH=10, CONTENT_TYPE='text/html'))
wsgi.get_body(request)
def test_get_body_bad_content(self):
env = {'CONTENT_LENGTH': 4, 'CONTENT_TYPE': "application/json", 'wsgi.input': io.StringIO(u"test wrong")}
request = werkzeug.wrappers.Request(env)
with assert_raises(werkzeug.exceptions.BadRequest) as exc:
wsgi.get_body(request)
assert_equal(exc.exception.description, "invalid json")
def test_get_body(self):
env = {'CONTENT_LENGTH': 6, 'CONTENT_TYPE': "application/json", 'wsgi.input': io.StringIO(u'"test" with garbage')}
request = werkzeug.wrappers.Request(env)
assert_equal(wsgi.get_body(request), 'test')
def test_handle_metric_new_missing_type(self):
with assert_raises(werkzeug.exceptions.BadRequest) as exc:
wsgi.handle_metric_new(req(dict()), "test")
assert_equal(exc.exception.description, "metric type not provided")
def test_handle_metric_new_invalid_type(self):
with assert_raises(werkzeug.exceptions.BadRequest) as exc:
wsgi.handle_metric_new(req(dict(type="xxx")), "test")
assert_regexp_matches(exc.exception.description, "invalid metric type: .*'xxx'")
def test_handle_metric_new_app_error(self):
wsgi.handle_metric_new(req(dict(type="gauge")), "test")
with assert_raises(werkzeug.exceptions.BadRequest) as exc:
wsgi.handle_metric_new(req(dict(type="gauge")), "test")
assert_equal(exc.exception.description, "can't create metric gauge('test'): Metric test already exists of type Gauge")
def test_handle_metric_new_generic_error(self):
new_gauge = mock.Mock(side_effect=ValueError("an error"))
with mock.patch.dict('appmetrics.wsgi.metrics.METRIC_TYPES', gauge=new_gauge):
with assert_raises(werkzeug.exceptions.BadRequest) as exc:
wsgi.handle_metric_new(req(dict(type="gauge")), "test")
assert_equal(exc.exception.description, "can't create metric gauge('test')")
def test_handle_metric_new_metric(self):
res = wsgi.handle_metric_new(req(dict(type="gauge")), "test")
assert_equal(res, "")
metric = metrics.metric("test")
assert_is_instance(metric, metrics.simple_metrics.Gauge)
def test_handle_metric_update_missing_value(self):
with assert_raises(werkzeug.exceptions.BadRequest) as exc:
wsgi.handle_metric_update(req(dict()), "test")
assert_equal(exc.exception.description, "metric value not provided")
@raises(werkzeug.exceptions.NotFound)
def test_handle_metric_update_missing_metric(self):
wsgi.handle_metric_update(req(dict(value=1)), "test")
def test_handle_metric_update(self):
metric = metrics.new_gauge("test")
res = wsgi.handle_metric_update(req(dict(value=1.5)), "test")
assert_equal(res, "")
assert_equal(metric.get(), dict(kind="gauge", value=1.5))
| |
from freezegun import freeze_time
from app.dao import dao_create_record
from app.models import MAGAZINE, TICKET_STATUS_UNUSED, OrderError
from app.utils.time import get_local_time
from tests.db import (
create_article,
create_book,
create_event,
create_email,
create_fee,
create_member,
create_order,
create_speaker,
create_ticket
)
class WhenUsingEventModel(object):
def it_shows_event_info_id_on_str(self, db, db_session):
event = create_event()
assert str(event) == '<Event: id {}>'.format(event.id)
class WhenUsingFeeModel(object):
def it_shows_fee_json_on_serialize(self, db, db_session):
fee = create_fee(fee=5, conc_fee=3)
assert fee.serialize() == {
'id': str(fee.id),
'event_type_id': str(fee.event_type_id),
'fee': fee.fee,
'conc_fee': fee.conc_fee,
'multi_day_fee': fee.multi_day_fee,
'multi_day_conc_fee': fee.multi_day_conc_fee,
'valid_from': fee.valid_from.isoformat()
}
class WhenUsingSpeakerModel(object):
def it_shows_speaker_json_on_serialize(self, db, db_session):
speaker = create_speaker()
assert speaker.serialize() == {
'id': str(speaker.id),
'title': speaker.title,
'name': speaker.name,
'parent_id': None
}
def it_gets_last_name_correctly(self, db, db_session):
speaker = create_speaker(name='John Smith')
assert speaker.last_name == 'Smith'
class WhenUsingArticleModel(object):
def it_shows_article_summary_json_on_serialize(self, db, db_session):
article = create_article()
assert article.serialize_summary() == {
'id': str(article.id),
'author': article.author,
'title': article.title,
'short_content': article.content,
'very_short_content': article.content,
'image_filename': 'article.jpg'
}
def it_shows_shortened_content_article_summary_json_on_serialize_long_content(self, db_session):
long_content = ''
short_content_length = 0
very_short_content_length = 0
for i in range(120):
long_content += '{}some-text '.format(i)
if i == 30:
very_short_content_length = len(long_content) - 1
if i == 110:
short_content_length = len(long_content) - 1
article = create_article(content=long_content)
assert article.serialize_summary() == {
'id': str(article.id),
'author': article.author,
'title': article.title,
'short_content': long_content[:short_content_length] + '...',
'very_short_content': long_content[:very_short_content_length] + '...',
'image_filename': 'article.jpg'
}
def it_removes_html_tags_on_article_summary(self, db_session):
long_content_with_tags = '<h1>'
clean_long_content = ''
clean_very_short_content_length = 0
clean_short_content_length = 0
for i in range(120):
long_content_with_tags += '{}<div>text</div> '.format(i)
clean_long_content += '{}text '.format(i)
if i == 30:
clean_very_short_content_length = len(clean_long_content) - 1
if i == 110:
clean_short_content_length = len(clean_long_content) - 1
article = create_article(content=long_content_with_tags)
assert article.serialize_summary() == {
'id': str(article.id),
'author': article.author,
'title': article.title,
'short_content': clean_long_content[:clean_short_content_length] + '...',
'very_short_content': clean_long_content[:clean_very_short_content_length] + '...',
'image_filename': 'article.jpg'
}
class WhenUsingBookModel(object):
def it_shows_book_json_on_serialize(self, db_session):
book = create_book()
assert book.serialize() == {
'id': str(book.id),
'old_id': book.old_id,
'price': str(book.price),
'buy_code': book.buy_code,
'author': book.author,
'title': book.title,
'description': book.description,
'image_filename': book.image_filename,
'created_at': get_local_time(book.created_at).strftime('%Y-%m-%d')
}
class WhenUsingEmailModel:
def it_shows_email_json_on_serialize(self, db, db_session):
email = create_email(
created_at='2019-06-01T10:00:00', send_starts_at='2019-06-02T11:00:00', send_after='2019-06-02T12:00:00')
assert email.serialize() == {
'id': str(email.id),
'subject': 'workshop: test title',
'event_id': str(email.event_id),
'magazine_id': None,
'old_id': email.old_id,
'old_event_id': email.old_event_id,
'created_at': get_local_time(email.created_at).strftime('%Y-%m-%d %H:%M'),
'extra_txt': u'test extra text',
'details': u'test event details',
'replace_all': False,
'email_type': u'event',
'email_state': u'draft',
'send_starts_at': '2019-06-02',
'expires': '2019-06-21',
'send_after': get_local_time(email.send_after).strftime('%Y-%m-%d %H:%M'),
'emails_sent_counts': {
'success': 0,
'failed': 0,
'total_active_members': 0
}
}
def it_shows_email_json_on_serialize_only_members_active(self, db, db_session, sample_email):
email = create_email(
created_at='2019-06-01T10:00:00',
send_starts_at='2019-06-02T11:00:00',
send_after='2019-06-02T12:00:00'
)
create_member(name='Active 1', email='test1@example.com', created_at='2019-04-09T19:00:00')
create_member(name='Active 2', email='test2@example.com', created_at='2019-06-09T19:00:00')
# member created after email expired not counted
create_member(name='Test 3', email='test3@example.com', created_at='2019-08-09T19:00:00')
# member that is now inactive but was active before email expired is counted
create_member(
name='Active past',
active=False, last_updated='2019-06-19T19:00:00',
email='test4@example.com', created_at='2019-05-09T19:00:00'
)
# member that is now inactive and was created before email is ignored
create_member(
name='Active past before',
active=False, last_updated='2019-03-19T19:00:00',
email='test5@example.com', created_at='2019-02-09T19:00:00'
)
assert email.serialize() == {
'id': str(email.id),
'subject': 'workshop: test title',
'event_id': str(email.event_id),
'magazine_id': None,
'old_id': email.old_id,
'old_event_id': email.old_event_id,
'created_at': get_local_time(email.created_at).strftime('%Y-%m-%d %H:%M'),
'extra_txt': u'test extra text',
'details': u'test event details',
'replace_all': False,
'email_type': u'event',
'email_state': u'draft',
'send_starts_at': '2019-06-02',
'expires': '2019-06-21',
'send_after': get_local_time(email.send_after).strftime('%Y-%m-%d %H:%M'),
'emails_sent_counts': {
'success': 0,
'failed': 0,
'total_active_members': 3
}
}
def it_shows_magazine_email_json_on_serialize(self, db, db_session, sample_magazine):
email = create_email(
email_type=MAGAZINE, magazine_id=sample_magazine.id,
old_event_id=None,
created_at='2019-06-30T10:00:00', send_starts_at='2019-07-01T11:00:00')
assert email.serialize() == {
'id': str(email.id),
'subject': u'New Acropolis bi-monthly magazine: Test magazine',
'event_id': None,
'magazine_id': str(sample_magazine.id),
'old_id': email.old_id,
'old_event_id': None,
'created_at': get_local_time(email.created_at).strftime('%Y-%m-%d %H:%M'),
'extra_txt': u'test extra text',
'details': u'test event details',
'replace_all': False,
'email_type': u'magazine',
'email_state': u'draft',
'send_starts_at': '2019-07-01',
'expires': '2019-07-15',
'send_after': None,
'emails_sent_counts': {
'success': 0,
'failed': 0,
'total_active_members': 0
}
}
class WhenUsingOrderModel:
@freeze_time("2021-06-07T23:00:00")
def it_shows_order_serialized(self, db_session, sample_book, sample_event_with_dates):
book = create_book(
old_id=None,
price='7.00',
buy_code='112233AABBCC',
title='Nature',
author='Mr White',
description='Some info about Nature\r\n\"Something in quotes\"',
image_filename='nature.jpg'
)
event_dates = sample_event_with_dates.get_sorted_event_dates()
ticket = create_ticket(
status=TICKET_STATUS_UNUSED,
event_id=sample_event_with_dates.id,
eventdate_id=event_dates[0]['id']
)
order = create_order(books=[sample_book, book], tickets=[ticket])
error = OrderError(error='Test error', order_id=order.id)
dao_create_record(error)
assert order.serialize() == {
'id': str(order.id),
'txn_id': order.txn_id,
'txn_type': order.txn_type,
'buyer_name': order.buyer_name,
'created_at': get_local_time(order.created_at).strftime('%Y-%m-%d %H:%M'),
'payment_status': order.payment_status,
'payment_total': f"{order.payment_total:.2f}",
'is_donation': order.is_donation,
'address_country_code': order.address_country_code,
'address_street': order.address_street,
'address_city': order.address_city,
'address_postal_code': order.address_postal_code,
'address_state': order.address_state,
'address_country': order.address_country,
'delivery_zone': order.delivery_zone,
'delivery_status': order.delivery_status,
'delivery_sent': order.delivery_sent,
'refund_issued': order.refund_issued,
'delivery_balance': f"{order.delivery_balance:.2f}",
'notes': order.notes,
'books': [
{
'id': str(book.id),
'price': str(book.price),
'buy_code': book.buy_code,
'image_filename': book.image_filename,
'old_id': book.old_id,
'title': book.title,
'author': book.author,
'description': book.description,
'quantity': 1
},
{
'id': str(sample_book.id),
'price': str(sample_book.price),
'buy_code': sample_book.buy_code,
'image_filename': sample_book.image_filename,
'old_id': sample_book.old_id,
'title': sample_book.title,
'author': sample_book.author,
'description': sample_book.description,
'quantity': 1
},
],
'tickets': [
{
'id': str(ticket.id),
'event_id': str(ticket.event_id),
'old_id': ticket.old_id,
'ticket_type': ticket.ticket_type,
'eventdate_id': str(ticket.eventdate_id),
'name': order.buyer_name,
'price': ticket.price,
'last_updated': get_local_time(ticket.last_updated).strftime('%Y-%m-%d %H:%M'),
'created_at': get_local_time(ticket.created_at).strftime('%Y-%m-%d %H:%M'),
'status': ticket.status,
'ticket_number': ticket.ticket_number,
'event': {
'booking_code': None,
'conc_fee': 3,
'description': 'test description',
'event_state': 'draft',
'event_type': 'workshop',
'event_type_id': str(sample_event_with_dates.event_type_id),
'fee': 5,
'has_expired': True,
'show_banner_text': True,
'id': str(sample_event_with_dates.id),
'image_filename': None,
'multi_day_conc_fee': 10,
'multi_day_fee': 12,
'old_id': 1,
'reject_reasons': [],
'sub_title': None,
'title': 'test_title',
'venue': {'address': '10 London Street, N1 1NN',
'default': True,
'directions': 'By bus: 100, 111, 123',
'id': str(sample_event_with_dates.venue.id),
'name': 'Head office',
'old_id': 1}
},
'event_date': {
'end_time': None,
'event_datetime': '2018-01-01 19:00',
'event_id': str(ticket.event_id),
'id': str(ticket.event_date.id),
'speakers': []
},
}
],
'errors': [
{
'error': 'Test error',
'id': str(error.id)
}
]
}
| |
#!/usr/bin/env python
import argparse
import decimal
import gpudb
import itertools
import os
import struct
import socket
import sys
import tempfile
if sys.version_info < (3,):
def _decode_char(b):
return b[::-1].rstrip(b"\x00").decode("utf-8", errors="replace")
def _decode_string(b):
return b.decode("utf-8", errors="replace")
def _encode_char(s, size):
return s.encode("utf-8", errors="replace").ljust(size, b"\x00")[size - 1::-1]
def _encode_string(s):
return s.encode("utf-8", errors="replace")
else:
def _decode_char(b):
return b[::-1].rstrip(b"\x00").decode(errors="replace")
def _decode_string(b):
return b.decode(errors="replace")
def _encode_char(s, size):
return s.encode(errors="replace").ljust(size, b"\x00")[size - 1::-1]
def _encode_string(s):
return s.encode(errors="replace")
def _decode_date(value):
return datetime.date(1900 + (value >> 21), (value >> 17) & 0b1111, (value >> 12) & 0b11111)
def _decode_datetime(value):
return datetime.datetime(1900 + (value >> 53), (value >> 49) & 0b1111, (value >> 44) & 0b11111,
(value >> 39) & 0b11111, (value >> 33) & 0b111111, (value >> 27) & 0b111111, ((value >> 17) & 0b1111111111) * 1000)
def _decode_time(value):
return datetime.time(value >> 26, (value >> 20) & 0b111111, (value >> 14) & 0b111111, ((value >> 4) & 0b1111111111) * 1000)
def _encode_date(value):
return ((value.year - 1900) << 21) | (value.month << 17) | (value.day << 12)
def _encode_datetime(value):
return ((value.year - 1900) << 53) | (value.month << 49) | (value.day << 44) \
| (value.hour << 39) | (value.minute << 33) | (value.second << 27) | ((value.microsecond // 1000) << 17)
def _encode_time(value):
return (value.hour << 26) | (value.minute << 20) | (value.second << 14) | ((value.microsecond // 1000) << 4)
_char1_struct = struct.Struct("c")
_char2_struct = struct.Struct("2s")
_char4_struct = struct.Struct("4s")
_char8_struct = struct.Struct("8s")
_char16_struct = struct.Struct("16s")
_char32_struct = struct.Struct("32s")
_char64_struct = struct.Struct("64s")
_char128_struct = struct.Struct("128s")
_char256_struct = struct.Struct("256s")
_double_struct = struct.Struct("=d")
_float_struct = struct.Struct("=f")
_int8_struct = struct.Struct("=b")
_int16_struct = struct.Struct("=h")
_int32_struct = struct.Struct("=i")
_int64_struct = struct.Struct("=q")
_uint32_struct = struct.Struct("=I")
_uint64_struct = struct.Struct("=Q")
# File functions
def read_dict(f):
result = {}
length = read_uint64(f)
while length > 0:
key = read_string(f)
result[key] = read_string(f)
length = length - 1
return result
def read_string(f):
value_len = read_uint64(f)
value = f.read(value_len)
if len(value) < value_len:
raise RuntimeError("EOF reached")
return _decode_string(value)
def read_uint64(f):
value = f.read(8)
if len(value) < 8:
raise RuntimeError("EOF reached")
return _uint64_struct.unpack(value)[0]
def write_dict(f, value):
write_uint64(f, len(value))
for k, v in value.items():
write_string(f, k)
write_string(f, v)
def write_string(f, value):
value = _encode_string(value)
write_uint64(f, len(value))
f.write(value)
def write_uint64(f, value):
f.write(_uint64_struct.pack(value))
# Table functions
class ColumnType(object):
BYTES = 0x0000002
CHAR1 = 0x0080000
CHAR2 = 0x0100000
CHAR4 = 0x0001000
CHAR8 = 0x0002000
CHAR16 = 0x0004000
CHAR32 = 0x0200000
CHAR64 = 0x0400000
CHAR128 = 0x0800000
CHAR256 = 0x1000000
DATE = 0x2000000
DATETIME = 0x0000200
DECIMAL = 0x8000000
DOUBLE = 0x0000010
FLOAT = 0x0000020
INT = 0x0000040
INT8 = 0x0020000
INT16 = 0x0040000
IPV4 = 0x0008000
LONG = 0x0000080
STRING = 0x0000001
TIME = 0x4000000
TIMESTAMP = 0x0010000
def get_column_dt(column):
if column.column_type == gpudb.GPUdbRecordColumn._ColumnType.BYTES:
return ColumnType.BYTES
elif column.column_type == gpudb.GPUdbRecordColumn._ColumnType.DOUBLE:
return ColumnType.DOUBLE
elif column.column_type == gpudb.GPUdbRecordColumn._ColumnType.FLOAT:
return ColumnType.FLOAT
elif column.column_type == gpudb.GPUdbRecordColumn._ColumnType.INT:
if gpudb.GPUdbColumnProperty.INT8 in column.column_properties:
return ColumnType.INT8
elif gpudb.GPUdbColumnProperty.INT16 in column.column_properties:
return ColumnType.INT16
else:
return ColumnType.INT
elif column.column_type == gpudb.GPUdbRecordColumn._ColumnType.LONG:
if gpudb.GPUdbColumnProperty.TIMESTAMP in column.column_properties:
return ColumnType.TIMESTAMP
else:
return ColumnType.LONG
else:
if gpudb.GPUdbColumnProperty.CHAR1 in column.column_properties:
return ColumnType.CHAR1
elif gpudb.GPUdbColumnProperty.CHAR2 in column.column_properties:
return ColumnType.CHAR2
elif gpudb.GPUdbColumnProperty.CHAR4 in column.column_properties:
return ColumnType.CHAR4
elif gpudb.GPUdbColumnProperty.CHAR8 in column.column_properties:
return ColumnType.CHAR8
elif gpudb.GPUdbColumnProperty.CHAR16 in column.column_properties:
return ColumnType.CHAR16
elif gpudb.GPUdbColumnProperty.CHAR32 in column.column_properties:
return ColumnType.CHAR32
elif gpudb.GPUdbColumnProperty.CHAR64 in column.column_properties:
return ColumnType.CHAR64
elif gpudb.GPUdbColumnProperty.CHAR128 in column.column_properties:
return ColumnType.CHAR128
elif gpudb.GPUdbColumnProperty.CHAR256 in column.column_properties:
return ColumnType.CHAR256
elif gpudb.GPUdbColumnProperty.DATE in column.column_properties:
return ColumnType.DATE
elif gpudb.GPUdbColumnProperty.DATETIME in column.column_properties:
return ColumnType.DATETIME
elif gpudb.GPUdbColumnProperty.DECIMAL in column.column_properties:
return ColumnType.DECIMAL
elif gpudb.GPUdbColumnProperty.IPV4 in column.column_properties:
return ColumnType.IPV4
elif gpudb.GPUdbColumnProperty.TIME in column.column_properties:
return ColumnType.TIME
else:
return ColumnType.STRING
def get_dt_size(dt):
return {
ColumnType.BYTES: 8,
ColumnType.CHAR1: 1,
ColumnType.CHAR2: 2,
ColumnType.CHAR4: 4,
ColumnType.CHAR8: 8,
ColumnType.CHAR16: 16,
ColumnType.CHAR32: 32,
ColumnType.CHAR64: 64,
ColumnType.CHAR128: 128,
ColumnType.CHAR256: 256,
ColumnType.DATE: 4,
ColumnType.DATETIME: 8,
ColumnType.DECIMAL: 8,
ColumnType.DOUBLE: 8,
ColumnType.FLOAT: 4,
ColumnType.INT: 4,
ColumnType.INT8: 1,
ColumnType.INT16: 2,
ColumnType.IPV4: 4,
ColumnType.LONG: 8,
ColumnType.STRING: 8,
ColumnType.TIME: 4,
ColumnType.TIMESTAMP: 8
}[dt]
def read_column(f, column):
result = {}
result["name"] = column.name
if read_string(f) != column.name:
return None
dt = get_column_dt(column)
result["dt"] = dt
if read_uint64(f) != dt:
return None
result["data"] = open(read_string(f), "rb")
filename = read_string(f)
if gpudb.GPUdbColumnProperty.NULLABLE in column.column_properties:
if not filename:
return None
else:
result["null_data"] = open(filename, "rb")
elif filename:
return None
else:
result["null_data"] = None
filename = read_string(f)
if dt == ColumnType.BYTES or dt == ColumnType.STRING:
if not filename:
return None
else:
result["var_size"] = os.path.getsize(filename)
result["var_data"] = open(filename, "rb")
result["var_pos"] = -1
elif filename:
return None
else:
result["var_data"] = None
result["size"] = get_dt_size(dt)
if not result["var_data"]:
result["decode_data"] = {
ColumnType.CHAR1: lambda value: _decode_char(_char1_struct.unpack(value)[0]),
ColumnType.CHAR2: lambda value: _decode_char(_char2_struct.unpack(value)[0]),
ColumnType.CHAR4: lambda value: _decode_char(_char4_struct.unpack(value)[0]),
ColumnType.CHAR8: lambda value: _decode_char(_char8_struct.unpack(value)[0]),
ColumnType.CHAR16: lambda value: _decode_char(_char16_struct.unpack(value)[0]),
ColumnType.CHAR32: lambda value: _decode_char(_char32_struct.unpack(value)[0]),
ColumnType.CHAR64: lambda value: _decode_char(_char64_struct.unpack(value)[0]),
ColumnType.CHAR128: lambda value: _decode_char(_char128_struct.unpack(value)[0]),
ColumnType.CHAR256: lambda value: _decode_char(_char256_struct.unpack(value)[0]),
ColumnType.DATE: lambda value: _decode_date(_int32_struct.unpack(value)[0]).strftime("%Y-%m-%d"),
ColumnType.DATETIME: lambda value: _decode_datetime(_int64_struct.unpack(value)[0]).strftime("%Y-%m-%d %H:%M:%S.%f")[:-3],
ColumnType.DECIMAL: lambda value: decimal.Decimal(_int64_struct.unpack(value)[0]).scaleb(-4),
ColumnType.DOUBLE: lambda value: _double_struct.unpack(value)[0],
ColumnType.FLOAT: lambda value: _float_struct.unpack(value)[0],
ColumnType.INT: lambda value: _int32_struct.unpack(value)[0],
ColumnType.INT8: lambda value: _int8_struct.unpack(value)[0],
ColumnType.INT16: lambda value: _int16_struct.unpack(value)[0],
ColumnType.IPV4: lambda value: socket.inet_ntoa(_int32_struct.unpack(value)[0]),
ColumnType.LONG: lambda value: _int64_struct.unpack(value)[0],
ColumnType.TIME: lambda value: _decode_time(_int32_struct.unpack(value)[0]).strftime("%H:%M:%S.%f")[:-3],
ColumnType.TIMESTAMP: lambda value: _int64_struct.unpack(value)[0]
}[dt]
return result
def read_table(f, db):
table = read_string(f)
res = db.show_table(table_name=table, options={"no_error_if_not_exists": "true"})
if res["status_info"]["status"] != "OK":
raise RuntimeError(res["status_info"]["message"])
if not res["table_name"]:
raise RuntimeError("Table " + table + " does not exist")
type = gpudb.GPUdbRecordType(schema_string=res["type_schemas"][0], column_properties=res["properties"][0])
columns = []
if read_uint64(f) != len(type.columns):
raise RuntimeError("Table " + table + " type mismatch")
for type_column in type.columns:
column = read_column(f, type_column)
if column is None:
raise RuntimeError("Table " + table + " type mismatch")
columns.append(column)
records = []
record_count = 0
while True:
record = []
for column in columns:
dt = column["dt"]
data = column["data"]
null_data = column["null_data"]
var_data = column["var_data"]
if var_data:
var_pos = column["var_pos"]
if var_pos == -1:
var_pos = data.read(8)
if len(var_pos) < 8:
break
var_pos = _uint64_struct.unpack(var_pos)[0]
next_var_pos = data.read(8)
if len(next_var_pos) < 8:
next_var_pos = column["var_size"]
else:
next_var_pos = _uint64_struct.unpack(next_var_pos)[0]
column["var_pos"] = next_var_pos
if null_data:
null_value = null_data.read(1)
if len(null_value) < 1:
break
if null_value == b"\x01":
record.append(None)
continue
if next_var_pos < var_pos:
break
if var_pos == next_var_pos:
value = b""
else:
value_len = next_var_pos - var_pos
value = var_data.read(value_len)
if len(value) < value_len:
break
if dt == ColumnType.STRING:
value = _decode_string(value[:-1])
record.append(value)
else:
size = column["size"]
value = data.read(size)
if len(value) < size:
break
if null_data:
null_value = null_data.read(1)
if len(null_value) < 1:
break
if null_value == b"\x01":
record.append(None)
continue
record.append(column["decode_data"](value))
if len(record) < len(columns):
break
records.append(gpudb.GPUdbRecord(type, record).binary_data)
if len(records) == 10000:
if not args.dryrun:
res = db.insert_records(table_name=table, data=records, list_encoding="binary", options={})
if res["status_info"]["status"] != "OK":
raise RuntimeError(res["status_info"]["message"])
record_count = record_count + len(records)
records = []
if len(records) > 0:
if not args.dryrun:
res = db.insert_records(table_name=table, data=records, list_encoding="binary", options={})
if res["status_info"]["status"] != "OK":
raise RuntimeError(res["status_info"]["message"])
record_count = record_count + len(records);
print(table + ": " + str(record_count) + " records")
def write_column(f, column):
result = {}
result["name"] = column.name
dt = get_column_dt(column)
result["dt"] = dt
result["data"] = tempfile.NamedTemporaryFile(prefix="kinetica-udf-sim-", dir=args.path, delete=False)
if gpudb.GPUdbColumnProperty.NULLABLE in column.column_properties:
result["null_data"] = tempfile.NamedTemporaryFile(prefix="kinetica-udf-sim-", dir=args.path, delete=False)
else:
result["null_data"] = None
if dt == ColumnType.BYTES or dt == ColumnType.STRING:
result["var_data"] = tempfile.NamedTemporaryFile(prefix="kinetica-udf-sim-", dir=args.path, delete=False)
else:
result["var_data"] = None
result["size"] = get_dt_size(dt)
if not result["var_data"]:
result["encode_data"] = {
ColumnType.CHAR1: lambda value: _char1_struct.pack(_encode_char(value, 1)),
ColumnType.CHAR2: lambda value: _char2_struct.pack(_encode_char(value, 2)),
ColumnType.CHAR4: lambda value: _char4_struct.pack(_encode_char(value, 4)),
ColumnType.CHAR8: lambda value: _char8_struct.pack(_encode_char(value, 8)),
ColumnType.CHAR16: lambda value: _char16_struct.pack(_encode_char(value, 16)),
ColumnType.CHAR32: lambda value: _char32_struct.pack(_encode_char(value, 32)),
ColumnType.CHAR64: lambda value: _char64_struct.pack(_encode_char(value, 64)),
ColumnType.CHAR128: lambda value: _char128_struct.pack(_encode_char(value, 128)),
ColumnType.CHAR256: lambda value: _char256_struct.pack(_encode_char(value, 256)),
ColumnType.DATE: lambda value: _int32_struct.pack(_encode_date(datetime.datetime.strptime(value, "%Y-%m-%d"))),
ColumnType.DATETIME: lambda value: _int64_struct.pack(_encode_datetime(datetime.datetime.strptime(value, "%Y-%m-%d %H:%M:%S.%f"))),
ColumnType.DECIMAL: lambda value: _int64_struct.pack(decimal.Decimal(value) * 10000),
ColumnType.DOUBLE: lambda value: _double_struct.pack(value),
ColumnType.FLOAT: lambda value: _float_struct.pack(value),
ColumnType.INT: lambda value: _int32_struct.pack(value),
ColumnType.INT8: lambda value: _int8_struct.pack(value),
ColumnType.INT16: lambda value: _int16_struct.pack(value),
ColumnType.IPV4: lambda value: _int32_struct.pack(socket.inet_aton(value)),
ColumnType.LONG: lambda value: _int64_struct.pack(value),
ColumnType.TIME: lambda value: _int32_struct.pack(_encode_time(datetime.datetime.strptime(value, "%H:%M:%S.%f"))),
ColumnType.TIMESTAMP: lambda value: _int64_struct.pack(value)
}[dt]
write_string(f, result["name"])
write_uint64(f, result["dt"])
write_string(f, result["data"].name)
write_string(f, result["null_data"].name if result["null_data"] else "")
write_string(f, result["var_data"].name if result["var_data"] else "")
return result
def write_column_data(column, data):
name = column["name"]
dt = column["dt"]
data_file = column["data"]
var_file = column["var_data"]
null_file = column["null_data"]
size = column["size"]
if not var_file:
encode_data = column["encode_data"]
for record in data:
value = record[name]
if value is None:
null_file.write(b"\x01")
if var_file:
write_uint64(data_file, var_file.tell())
else:
data_file.write(b"\x00" * size)
else:
if null_file:
null_file.write(b"\x00")
if var_file:
write_uint64(data_file, var_file.tell())
if dt == ColumnType.BYTES:
var_file.write(value)
else:
var_file.write(_encode_string(value))
var_file.write(b"\x00")
else:
data_file.write(encode_data(value))
def write_table(f, db, table, write_data):
res = db.show_table(table_name=table[0], options={"no_error_if_not_exists": "true"})
if res["status_info"]["status"] != "OK":
raise RuntimeError(res["status_info"]["message"])
if not res["table_names"]:
raise RuntimeError("Table " + table[0] + " does not exist")
type = gpudb.GPUdbRecordType(schema_string=res["type_schemas"][0], column_properties=res["properties"][0])
write_string(f, table[0])
columns = []
if len(table) > 1:
write_uint64(f, len(table) - 1)
for column in table[1:]:
for type_column in type.columns:
if type_column.name == column:
columns.append(write_column(f, type_column))
break
else:
raise RuntimeError("Table " + table[0] + " column " + column + " does not exist")
else:
write_uint64(f, len(type.columns))
for type_column in type.columns:
columns.append(write_column(f, type_column))
if write_data:
i = 0
while True:
res = db.get_records(table_name=table[0], offset=i, limit=10000)
if res["status_info"]["status"] != "OK":
raise RuntimeError(res["status_info"]["message"])
if len(res["records_binary"]) == 0:
break
data = gpudb.GPUdbRecord.decode_binary_data(res["type_schema"], res["records_binary"])
for column in columns:
write_column_data(column, data)
if not res["has_more_records"] or len(res["records_binary"]) < 10000:
break
i = i + len(res["records_binary"])
# Main
def execute():
if args.distributed and args.nondistributed:
parser.error("-d/--distributed and -n/--nondistributed are mutually exclusive")
if args.nondistributed and args.input:
parser.error("-n/--nondistribtued and -i/--input are mutually exclusive")
if args.nondistributed and args.output:
parser.error("-n/--nondistributed and -o/--output are mutually exclusive")
if args.input or args.output:
args.distributed = True
elif not args.distributed:
args.nondistributed = True
icf = tempfile.NamedTemporaryFile(prefix="kinetica-udf-sim-icf-", dir=args.path, delete=False)
write_uint64(icf, 1)
icf_info = {}
icf_info["run_id"] = "0"
icf_info["proc_name"] = "proc"
if args.distributed:
icf_info["rank_number"] = "1"
icf_info["tom_number"] = "0"
else:
icf_info["rank_number"] = "0"
icf_info["data_segment_id"] = "0"
icf_info["data_segment_number"] = "0"
icf_info["data_segment_count"] = "1"
icf_info["head_url"] = args.url
icf_info["username"] = args.username
icf_info["password"] = args.password
write_dict(icf, icf_info)
write_dict(icf, {})
icf_params = {}
if args.param:
for param in args.param:
icf_params[param[0]] = param[1]
write_dict(icf, icf_params)
write_dict(icf, {})
if args.input or args.output:
db = gpudb.GPUdb(encoding="BINARY", host=args.url, username=args.username, password=args.password)
if args.input:
write_uint64(icf, len(args.input))
for table in args.input:
write_table(icf, db, table, True)
else:
write_uint64(icf, 0)
if args.output:
write_uint64(icf, len(args.output))
for table in args.output:
write_table(icf, db, [table], False)
else:
write_uint64(icf, 0)
write_string(icf, tempfile.NamedTemporaryFile(prefix="kinetica-udf-sim-", dir=args.path, delete=False).name)
print("export KINETICA_PCF=" + icf.name)
def output():
if "KINETICA_PCF" not in os.environ:
raise RuntimeError("No control file specified")
icf = os.environ["KINETICA_PCF"]
if not os.path.exists(icf):
raise RuntimeError("Specified control file does not exist")
icf = open(icf, "rb")
if read_uint64(icf) != 1:
raise RuntimeError("Unrecognized control file version")
read_dict(icf)
read_dict(icf)
read_dict(icf)
read_dict(icf)
for io in range(0, 2):
for i in range(0, read_uint64(icf)):
read_string(icf)
for j in range(0, read_uint64(icf)):
read_string(icf)
read_uint64(icf)
read_string(icf)
read_string(icf)
read_string(icf)
if io == 0:
output_pos = icf.tell()
ocf = read_string(icf)
if os.path.getsize(ocf) == 0:
raise RuntimeError("No output detected")
ocf = open(ocf, "rb")
if read_uint64(ocf) != 1:
raise RuntimeError("Unrecognized output control file version")
results = read_dict(ocf)
if results:
print("Results:")
print("")
for key, value in results.items():
print(key + ": " + value)
print("")
else:
print("No results")
icf.seek(output_pos)
table_count = read_uint64(icf)
if table_count > 0:
print("Output:")
print("")
db = gpudb.GPUdb(encoding="BINARY", host=args.url, username=args.username, password=args.password)
for i in range(0, table_count):
read_table(icf, db)
else:
print("No output")
def clean():
if "KINETICA_PCF" not in os.environ:
raise RuntimeError("No control file specified")
icf_name = os.environ["KINETICA_PCF"]
if not os.path.exists(icf_name):
raise RuntimeError("Specified control file does not exist")
icf = open(icf_name, "rb")
if read_uint64(icf) != 1:
raise RuntimeError("Unrecognized control file version")
read_dict(icf)
read_dict(icf)
read_dict(icf)
read_dict(icf)
for io in range(0, 2):
for i in range(0, read_uint64(icf)):
read_string(icf)
for j in range(0, read_uint64(icf)):
read_string(icf)
read_uint64(icf)
filename = read_string(icf)
if filename and os.path.exists(filename):
os.remove(filename)
filename = read_string(icf)
if filename and os.path.exists(filename):
os.remove(filename)
filename = read_string(icf)
if filename and os.path.exists(filename):
os.remove(filename)
filename = read_string(icf)
if filename and os.path.exists(filename):
os.remove(filename)
icf.close()
os.remove(icf_name)
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
execute_parser = subparsers.add_parser("execute", help="Simulate proc execution")
execute_parser.set_defaults(func=execute)
group = execute_parser.add_argument_group(title="Basic parameters")
group.add_argument("-f", "--path", default=".", metavar="PATH", help="Control file path")
group.add_argument("-p", "--param", action="append", metavar=("NAME","VALUE"), nargs=2, help="Proc parameter")
group = execute_parser.add_argument_group(title="Distributed")
group.add_argument("-d", "--distributed", action="store_true", help="Simulate distributed proc execution")
group.add_argument("-i", "--input", action="append", metavar=("TABLE","COLUMN"), nargs="+", help="Input table (optionally followed by column list)")
group.add_argument("-o", "--output", action="append", metavar="TABLE", help="Output table")
group = execute_parser.add_argument_group(title="Nondistributed")
group .add_argument("-n", "--nondistributed", action="store_true", help="Simulate nondistributed proc exeuction")
group = execute_parser.add_argument_group(title="Kinetica connection")
group.add_argument("-K", "--url", default="http://localhost:9191", help="Kinetica URL")
group.add_argument("-U", "--username", default="", help="Kinetica username")
group.add_argument("-P", "--password", default="", help="Kinetica password")
output_parser = subparsers.add_parser("output", help="Process proc output")
output_parser.set_defaults(func=output)
group = output_parser.add_argument_group(title="Basic parameters")
group.add_argument("-d", "--dry-run", dest="dryrun", action="store_true", help="Display output only, do not write to Kinetica")
group = output_parser.add_argument_group(title="Kinetica connection")
group.add_argument("-K", "--url", default="http://localhost:9191", help="Kinetica URL")
group.add_argument("-U", "--username", default="", help="Kinetica username")
group.add_argument("-P", "--password", default="", help="Kinetica password")
clean_parser = subparsers.add_parser("clean", help="Clean up files")
clean_parser.set_defaults(func=clean)
args = parser.parse_args()
args.func()
| |
from StringIO import StringIO
import pickle
import sys
import gc
from os import path
from numpy.testing import *
import numpy as np
rlevel = 1
def assert_valid_refcount(op):
a = np.arange(100 * 100)
b = np.arange(100*100).reshape(100, 100)
c = b
i = 1
rc = sys.getrefcount(i)
for j in range(15):
d = op(b,c)
assert(sys.getrefcount(i) >= rc)
class TestRegression(TestCase):
def test_invalid_round(self,level=rlevel):
"""Ticket #3"""
v = 4.7599999999999998
assert_array_equal(np.array([v]),np.array(v))
def test_mem_empty(self,level=rlevel):
"""Ticket #7"""
np.empty((1,),dtype=[('x',np.int64)])
def test_pickle_transposed(self,level=rlevel):
"""Ticket #16"""
a = np.transpose(np.array([[2,9],[7,0],[3,8]]))
f = StringIO()
pickle.dump(a,f)
f.seek(0)
b = pickle.load(f)
f.close()
assert_array_equal(a,b)
def test_masked_array_create(self,level=rlevel):
"""Ticket #17"""
x = np.ma.masked_array([0,1,2,3,0,4,5,6],mask=[0,0,0,1,1,1,0,0])
assert_array_equal(np.ma.nonzero(x),[[1,2,6,7]])
def test_poly1d(self,level=rlevel):
"""Ticket #28"""
assert_equal(np.poly1d([1]) - np.poly1d([1,0]),
np.poly1d([-1,1]))
def test_typeNA(self,level=rlevel):
"""Ticket #31"""
assert_equal(np.typeNA[np.int64],'Int64')
assert_equal(np.typeNA[np.uint64],'UInt64')
def test_dtype_names(self,level=rlevel):
"""Ticket #35"""
dt = np.dtype([(('name','label'),np.int32,3)])
def test_reduce(self,level=rlevel):
"""Ticket #40"""
assert_almost_equal(np.add.reduce([1.,.5],dtype=None), 1.5)
def test_zeros_order(self,level=rlevel):
"""Ticket #43"""
np.zeros([3], int, 'C')
np.zeros([3], order='C')
np.zeros([3], int, order='C')
def test_sort_bigendian(self,level=rlevel):
"""Ticket #47"""
a = np.linspace(0, 10, 11)
c = a.astype(np.dtype('<f8'))
c.sort()
assert_array_almost_equal(c, a)
def test_negative_nd_indexing(self,level=rlevel):
"""Ticket #49"""
c = np.arange(125).reshape((5,5,5))
origidx = np.array([-1, 0, 1])
idx = np.array(origidx)
c[idx]
assert_array_equal(idx, origidx)
def test_char_dump(self,level=rlevel):
"""Ticket #50"""
f = StringIO()
ca = np.char.array(np.arange(1000,1010),itemsize=4)
ca.dump(f)
f.seek(0)
ca = np.load(f)
f.close()
def test_noncontiguous_fill(self,level=rlevel):
"""Ticket #58."""
a = np.zeros((5,3))
b = a[:,:2,]
def rs():
b.shape = (10,)
self.failUnlessRaises(AttributeError,rs)
def test_bool(self,level=rlevel):
"""Ticket #60"""
x = np.bool_(1)
def test_masked_array(self,level=rlevel):
"""Ticket #61"""
x = np.ma.array(1,mask=[1])
def test_mem_masked_where(self,level=rlevel):
"""Ticket #62"""
from numpy.ma import masked_where, MaskType
a = np.zeros((1,1))
b = np.zeros(a.shape, MaskType)
c = masked_where(b,a)
a-c
def test_indexing1(self,level=rlevel):
"""Ticket #64"""
descr = [('x', [('y', [('z', 'c16', (2,)),]),]),]
buffer = ((([6j,4j],),),)
h = np.array(buffer, dtype=descr)
h['x']['y']['z']
def test_indexing2(self,level=rlevel):
"""Ticket #65"""
descr = [('x', 'i4', (2,))]
buffer = ([3,2],)
h = np.array(buffer, dtype=descr)
h['x']
def test_round(self,level=rlevel):
"""Ticket #67"""
x = np.array([1+2j])
assert_almost_equal(x**(-1), [1/(1+2j)])
def test_kron_matrix(self,level=rlevel):
"""Ticket #71"""
x = np.matrix('[1 0; 1 0]')
assert_equal(type(np.kron(x,x)),type(x))
def test_scalar_compare(self,level=rlevel):
"""Ticket #72"""
a = np.array(['test', 'auto'])
assert_array_equal(a == 'auto', np.array([False,True]))
self.assert_(a[1] == 'auto')
self.assert_(a[0] != 'auto')
b = np.linspace(0, 10, 11)
self.assert_(b != 'auto')
self.assert_(b[0] != 'auto')
def test_unicode_swapping(self,level=rlevel):
"""Ticket #79"""
ulen = 1
ucs_value = u'\U0010FFFF'
ua = np.array([[[ucs_value*ulen]*2]*3]*4, dtype='U%s' % ulen)
ua2 = ua.newbyteorder()
def test_matrix_std_argmax(self,level=rlevel):
"""Ticket #83"""
x = np.asmatrix(np.random.uniform(0,1,(3,3)))
self.assertEqual(x.std().shape, ())
self.assertEqual(x.argmax().shape, ())
def test_object_array_fill(self,level=rlevel):
"""Ticket #86"""
x = np.zeros(1, 'O')
x.fill([])
def test_cov_parameters(self,level=rlevel):
"""Ticket #91"""
x = np.random.random((3,3))
y = x.copy()
np.cov(x,rowvar=1)
np.cov(y,rowvar=0)
assert_array_equal(x,y)
def test_mem_dtype_align(self,level=rlevel):
"""Ticket #93"""
self.failUnlessRaises(TypeError,np.dtype,
{'names':['a'],'formats':['foo']},align=1)
def test_mem_digitize(self,level=rlevel):
"""Ticket #95"""
for i in range(100):
np.digitize([1,2,3,4],[1,3])
np.digitize([0,1,2,3,4],[1,3])
def test_intp(self,level=rlevel):
"""Ticket #99"""
i_width = np.int_(0).nbytes*2 - 1
long('0x' + 'f'*i_width,16)
#self.failUnlessRaises(OverflowError,np.intp,'0x' + 'f'*(i_width+1),16)
#self.failUnlessRaises(ValueError,np.intp,'0x1',32)
assert_equal(255,np.long('0xFF',16))
assert_equal(1024,np.long(1024))
def test_endian_bool_indexing(self,level=rlevel):
"""Ticket #105"""
a = np.arange(10.,dtype='>f8')
b = np.arange(10.,dtype='<f8')
xa = np.where((a>2) & (a<6))
xb = np.where((b>2) & (b<6))
ya = ((a>2) & (a<6))
yb = ((b>2) & (b<6))
assert_array_almost_equal(xa,ya.nonzero())
assert_array_almost_equal(xb,yb.nonzero())
assert(np.all(a[ya] > 0.5))
assert(np.all(b[yb] > 0.5))
def test_mem_dot(self,level=rlevel):
"""Ticket #106"""
x = np.random.randn(0,1)
y = np.random.randn(10,1)
z = np.dot(x, np.transpose(y))
def test_arange_endian(self,level=rlevel):
"""Ticket #111"""
ref = np.arange(10)
x = np.arange(10,dtype='<f8')
assert_array_equal(ref,x)
x = np.arange(10,dtype='>f8')
assert_array_equal(ref,x)
# Longfloat support is not consistent enough across
# platforms for this test to be meaningful.
# def test_longfloat_repr(self,level=rlevel):
# """Ticket #112"""
# if np.longfloat(0).itemsize > 8:
# a = np.exp(np.array([1000],dtype=np.longfloat))
# assert(str(a)[1:9] == str(a[0])[:8])
def test_argmax(self,level=rlevel):
"""Ticket #119"""
a = np.random.normal(0,1,(4,5,6,7,8))
for i in xrange(a.ndim):
aargmax = a.argmax(i)
def test_matrix_properties(self,level=rlevel):
"""Ticket #125"""
a = np.matrix([1.0],dtype=float)
assert(type(a.real) is np.matrix)
assert(type(a.imag) is np.matrix)
c,d = np.matrix([0.0]).nonzero()
assert(type(c) is np.matrix)
assert(type(d) is np.matrix)
def test_mem_divmod(self,level=rlevel):
"""Ticket #126"""
for i in range(10):
divmod(np.array([i])[0],10)
def test_hstack_invalid_dims(self,level=rlevel):
"""Ticket #128"""
x = np.arange(9).reshape((3,3))
y = np.array([0,0,0])
self.failUnlessRaises(ValueError,np.hstack,(x,y))
def test_squeeze_type(self,level=rlevel):
"""Ticket #133"""
a = np.array([3])
b = np.array(3)
assert(type(a.squeeze()) is np.ndarray)
assert(type(b.squeeze()) is np.ndarray)
def test_add_identity(self,level=rlevel):
"""Ticket #143"""
assert_equal(0,np.add.identity)
def test_binary_repr_0(self,level=rlevel):
"""Ticket #151"""
assert_equal('0',np.binary_repr(0))
def test_rec_iterate(self,level=rlevel):
"""Ticket #160"""
descr = np.dtype([('i',int),('f',float),('s','|S3')])
x = np.rec.array([(1,1.1,'1.0'),
(2,2.2,'2.0')],dtype=descr)
x[0].tolist()
[i for i in x[0]]
def test_unicode_string_comparison(self,level=rlevel):
"""Ticket #190"""
a = np.array('hello',np.unicode_)
b = np.array('world')
a == b
def test_tostring_FORTRANORDER_discontiguous(self,level=rlevel):
"""Fix in r2836"""
# Create discontiguous Fortran-ordered array
x = np.array(np.random.rand(3,3),order='F')[:,:2]
assert_array_almost_equal(x.ravel(),np.fromstring(x.tostring()))
def test_flat_assignment(self,level=rlevel):
"""Correct behaviour of ticket #194"""
x = np.empty((3,1))
x.flat = np.arange(3)
assert_array_almost_equal(x,[[0],[1],[2]])
x.flat = np.arange(3,dtype=float)
assert_array_almost_equal(x,[[0],[1],[2]])
def test_broadcast_flat_assignment(self,level=rlevel):
"""Ticket #194"""
x = np.empty((3,1))
def bfa(): x[:] = np.arange(3)
def bfb(): x[:] = np.arange(3,dtype=float)
self.failUnlessRaises(ValueError, bfa)
self.failUnlessRaises(ValueError, bfb)
def test_unpickle_dtype_with_object(self,level=rlevel):
"""Implemented in r2840"""
dt = np.dtype([('x',int),('y',np.object_),('z','O')])
f = StringIO()
pickle.dump(dt,f)
f.seek(0)
dt_ = pickle.load(f)
f.close()
assert_equal(dt,dt_)
def test_mem_array_creation_invalid_specification(self,level=rlevel):
"""Ticket #196"""
dt = np.dtype([('x',int),('y',np.object_)])
# Wrong way
self.failUnlessRaises(ValueError, np.array, [1,'object'], dt)
# Correct way
np.array([(1,'object')],dt)
def test_recarray_single_element(self,level=rlevel):
"""Ticket #202"""
a = np.array([1,2,3],dtype=np.int32)
b = a.copy()
r = np.rec.array(a,shape=1,formats=['3i4'],names=['d'])
assert_array_equal(a,b)
assert_equal(a,r[0][0])
def test_zero_sized_array_indexing(self,level=rlevel):
"""Ticket #205"""
tmp = np.array([])
def index_tmp(): tmp[np.array(10)]
self.failUnlessRaises(IndexError, index_tmp)
def test_unique_zero_sized(self,level=rlevel):
"""Ticket #205"""
assert_array_equal([], np.unique(np.array([])))
def test_chararray_rstrip(self,level=rlevel):
"""Ticket #222"""
x = np.chararray((1,),5)
x[0] = 'a '
x = x.rstrip()
assert_equal(x[0], 'a')
def test_object_array_shape(self,level=rlevel):
"""Ticket #239"""
assert_equal(np.array([[1,2],3,4],dtype=object).shape, (3,))
assert_equal(np.array([[1,2],[3,4]],dtype=object).shape, (2,2))
assert_equal(np.array([(1,2),(3,4)],dtype=object).shape, (2,2))
assert_equal(np.array([],dtype=object).shape, (0,))
assert_equal(np.array([[],[],[]],dtype=object).shape, (3,0))
assert_equal(np.array([[3,4],[5,6],None],dtype=object).shape, (3,))
def test_mem_around(self,level=rlevel):
"""Ticket #243"""
x = np.zeros((1,))
y = [0]
decimal = 6
np.around(abs(x-y),decimal) <= 10.0**(-decimal)
def test_character_array_strip(self,level=rlevel):
"""Ticket #246"""
x = np.char.array(("x","x ","x "))
for c in x: assert_equal(c,"x")
def test_lexsort(self,level=rlevel):
"""Lexsort memory error"""
v = np.array([1,2,3,4,5,6,7,8,9,10])
assert_equal(np.lexsort(v),0)
def test_pickle_dtype(self,level=rlevel):
"""Ticket #251"""
import pickle
pickle.dumps(np.float)
def test_masked_array_multiply(self,level=rlevel):
"""Ticket #254"""
a = np.ma.zeros((4,1))
a[2,0] = np.ma.masked
b = np.zeros((4,2))
a*b
b*a
def test_swap_real(self, level=rlevel):
"""Ticket #265"""
assert_equal(np.arange(4,dtype='>c8').imag.max(),0.0)
assert_equal(np.arange(4,dtype='<c8').imag.max(),0.0)
assert_equal(np.arange(4,dtype='>c8').real.max(),3.0)
assert_equal(np.arange(4,dtype='<c8').real.max(),3.0)
def test_object_array_from_list(self, level=rlevel):
"""Ticket #270"""
a = np.array([1,'A',None])
def test_masked_array_repeat(self, level=rlevel):
"""Ticket #271"""
np.ma.array([1],mask=False).repeat(10)
def test_multiple_assign(self, level=rlevel):
"""Ticket #273"""
a = np.zeros((3,1),int)
a[[1,2]] = 1
def test_empty_array_type(self, level=rlevel):
assert_equal(np.array([]).dtype, np.zeros(0).dtype)
def test_void_coercion(self, level=rlevel):
dt = np.dtype([('a','f4'),('b','i4')])
x = np.zeros((1,),dt)
assert(np.r_[x,x].dtype == dt)
def test_void_copyswap(self, level=rlevel):
dt = np.dtype([('one', '<i4'),('two', '<i4')])
x = np.array((1,2), dtype=dt)
x = x.byteswap()
assert(x['one'] > 1 and x['two'] > 2)
def test_method_args(self, level=rlevel):
# Make sure methods and functions have same default axis
# keyword and arguments
funcs1= ['argmax', 'argmin', 'sum', ('product', 'prod'),
('sometrue', 'any'),
('alltrue', 'all'), 'cumsum', ('cumproduct', 'cumprod'),
'ptp', 'cumprod', 'prod', 'std', 'var', 'mean',
'round', 'min', 'max', 'argsort', 'sort']
funcs2 = ['compress', 'take', 'repeat']
for func in funcs1:
arr = np.random.rand(8,7)
arr2 = arr.copy()
if isinstance(func, tuple):
func_meth = func[1]
func = func[0]
else:
func_meth = func
res1 = getattr(arr, func_meth)()
res2 = getattr(np, func)(arr2)
if res1 is None:
assert abs(arr-res2).max() < 1e-8, func
else:
assert abs(res1-res2).max() < 1e-8, func
for func in funcs2:
arr1 = np.random.rand(8,7)
arr2 = np.random.rand(8,7)
res1 = None
if func == 'compress':
arr1 = arr1.ravel()
res1 = getattr(arr2, func)(arr1)
else:
arr2 = (15*arr2).astype(int).ravel()
if res1 is None:
res1 = getattr(arr1, func)(arr2)
res2 = getattr(np, func)(arr1, arr2)
assert abs(res1-res2).max() < 1e-8, func
def test_mem_lexsort_strings(self, level=rlevel):
"""Ticket #298"""
lst = ['abc','cde','fgh']
np.lexsort((lst,))
def test_fancy_index(self, level=rlevel):
"""Ticket #302"""
x = np.array([1,2])[np.array([0])]
assert_equal(x.shape,(1,))
def test_recarray_copy(self, level=rlevel):
"""Ticket #312"""
dt = [('x',np.int16),('y',np.float64)]
ra = np.array([(1,2.3)], dtype=dt)
rb = np.rec.array(ra, dtype=dt)
rb['x'] = 2.
assert ra['x'] != rb['x']
def test_rec_fromarray(self, level=rlevel):
"""Ticket #322"""
x1 = np.array([[1,2],[3,4],[5,6]])
x2 = np.array(['a','dd','xyz'])
x3 = np.array([1.1,2,3])
np.rec.fromarrays([x1,x2,x3], formats="(2,)i4,a3,f8")
def test_object_array_assign(self, level=rlevel):
x = np.empty((2,2),object)
x.flat[2] = (1,2,3)
assert_equal(x.flat[2],(1,2,3))
def test_ndmin_float64(self, level=rlevel):
"""Ticket #324"""
x = np.array([1,2,3],dtype=np.float64)
assert_equal(np.array(x,dtype=np.float32,ndmin=2).ndim,2)
assert_equal(np.array(x,dtype=np.float64,ndmin=2).ndim,2)
def test_mem_vectorise(self, level=rlevel):
"""Ticket #325"""
vt = np.vectorize(lambda *args: args)
vt(np.zeros((1,2,1)), np.zeros((2,1,1)), np.zeros((1,1,2)))
vt(np.zeros((1,2,1)), np.zeros((2,1,1)), np.zeros((1,1,2)), np.zeros((2,2)))
def test_mem_axis_minimization(self, level=rlevel):
"""Ticket #327"""
data = np.arange(5)
data = np.add.outer(data,data)
def test_mem_float_imag(self, level=rlevel):
"""Ticket #330"""
np.float64(1.0).imag
def test_dtype_tuple(self, level=rlevel):
"""Ticket #334"""
assert np.dtype('i4') == np.dtype(('i4',()))
def test_dtype_posttuple(self, level=rlevel):
"""Ticket #335"""
np.dtype([('col1', '()i4')])
def test_mgrid_single_element(self, level=rlevel):
"""Ticket #339"""
assert_array_equal(np.mgrid[0:0:1j],[0])
assert_array_equal(np.mgrid[0:0],[])
def test_numeric_carray_compare(self, level=rlevel):
"""Ticket #341"""
assert_equal(np.array([ 'X' ], 'c'),'X')
def test_string_array_size(self, level=rlevel):
"""Ticket #342"""
self.failUnlessRaises(ValueError,
np.array,[['X'],['X','X','X']],'|S1')
def test_dtype_repr(self, level=rlevel):
"""Ticket #344"""
dt1=np.dtype(('uint32', 2))
dt2=np.dtype(('uint32', (2,)))
assert_equal(dt1.__repr__(), dt2.__repr__())
def test_reshape_order(self, level=rlevel):
"""Make sure reshape order works."""
a = np.arange(6).reshape(2,3,order='F')
assert_equal(a,[[0,2,4],[1,3,5]])
a = np.array([[1,2],[3,4],[5,6],[7,8]])
b = a[:,1]
assert_equal(b.reshape(2,2,order='F'), [[2,6],[4,8]])
def test_repeat_discont(self, level=rlevel):
"""Ticket #352"""
a = np.arange(12).reshape(4,3)[:,2]
assert_equal(a.repeat(3), [2,2,2,5,5,5,8,8,8,11,11,11])
def test_array_index(self, level=rlevel):
"""Make sure optimization is not called in this case."""
a = np.array([1,2,3])
a2 = np.array([[1,2,3]])
assert_equal(a[np.where(a==3)], a2[np.where(a2==3)])
def test_object_argmax(self, level=rlevel):
a = np.array([1,2,3],dtype=object)
assert a.argmax() == 2
def test_recarray_fields(self, level=rlevel):
"""Ticket #372"""
dt0 = np.dtype([('f0','i4'),('f1','i4')])
dt1 = np.dtype([('f0','i8'),('f1','i8')])
for a in [np.array([(1,2),(3,4)],"i4,i4"),
np.rec.array([(1,2),(3,4)],"i4,i4"),
np.rec.array([(1,2),(3,4)]),
np.rec.fromarrays([(1,2),(3,4)],"i4,i4"),
np.rec.fromarrays([(1,2),(3,4)])]:
assert(a.dtype in [dt0,dt1])
def test_random_shuffle(self, level=rlevel):
"""Ticket #374"""
a = np.arange(5).reshape((5,1))
b = a.copy()
np.random.shuffle(b)
assert_equal(np.sort(b, axis=0),a)
def test_refcount_vectorize(self, level=rlevel):
"""Ticket #378"""
def p(x,y): return 123
v = np.vectorize(p)
assert_valid_refcount(v)
def test_poly1d_nan_roots(self, level=rlevel):
"""Ticket #396"""
p = np.poly1d([np.nan,np.nan,1], r=0)
self.failUnlessRaises(np.linalg.LinAlgError,getattr,p,"r")
def test_refcount_vdot(self, level=rlevel):
"""Changeset #3443"""
assert_valid_refcount(np.vdot)
def test_startswith(self, level=rlevel):
ca = np.char.array(['Hi','There'])
assert_equal(ca.startswith('H'),[True,False])
def test_noncommutative_reduce_accumulate(self, level=rlevel):
"""Ticket #413"""
tosubtract = np.arange(5)
todivide = np.array([2.0, 0.5, 0.25])
assert_equal(np.subtract.reduce(tosubtract), -10)
assert_equal(np.divide.reduce(todivide), 16.0)
assert_array_equal(np.subtract.accumulate(tosubtract),
np.array([0, -1, -3, -6, -10]))
assert_array_equal(np.divide.accumulate(todivide),
np.array([2., 4., 16.]))
def test_mem_polymul(self, level=rlevel):
"""Ticket #448"""
np.polymul([],[1.])
def test_convolve_empty(self, level=rlevel):
"""Convolve should raise an error for empty input array."""
self.failUnlessRaises(ValueError,np.convolve,[],[1])
self.failUnlessRaises(ValueError,np.convolve,[1],[])
def test_multidim_byteswap(self, level=rlevel):
"""Ticket #449"""
r=np.array([(1,(0,1,2))], dtype="i2,3i2")
assert_array_equal(r.byteswap(),
np.array([(256,(0,256,512))],r.dtype))
def test_string_NULL(self, level=rlevel):
"""Changeset 3557"""
assert_equal(np.array("a\x00\x0b\x0c\x00").item(),
'a\x00\x0b\x0c')
def test_mem_string_concat(self, level=rlevel):
"""Ticket #469"""
x = np.array([])
np.append(x,'asdasd\tasdasd')
def test_matrix_multiply_by_1d_vector(self, level=rlevel) :
"""Ticket #473"""
def mul() :
np.mat(np.eye(2))*np.ones(2)
self.failUnlessRaises(ValueError,mul)
def test_junk_in_string_fields_of_recarray(self, level=rlevel):
"""Ticket #483"""
r = np.array([['abc']], dtype=[('var1', '|S20')])
assert str(r['var1'][0][0]) == 'abc'
def test_take_output(self, level=rlevel):
"""Ensure that 'take' honours output parameter."""
x = np.arange(12).reshape((3,4))
a = np.take(x,[0,2],axis=1)
b = np.zeros_like(a)
np.take(x,[0,2],axis=1,out=b)
assert_array_equal(a,b)
def test_array_str_64bit(self, level=rlevel):
"""Ticket #501"""
s = np.array([1, np.nan],dtype=np.float64)
errstate = np.seterr(all='raise')
try:
sstr = np.array_str(s)
finally:
np.seterr(**errstate)
def test_frompyfunc_endian(self, level=rlevel):
"""Ticket #503"""
from math import radians
uradians = np.frompyfunc(radians, 1, 1)
big_endian = np.array([83.4, 83.5], dtype='>f8')
little_endian = np.array([83.4, 83.5], dtype='<f8')
assert_almost_equal(uradians(big_endian).astype(float),
uradians(little_endian).astype(float))
def test_mem_string_arr(self, level=rlevel):
"""Ticket #514"""
s = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
t = []
np.hstack((t, s ))
def test_arr_transpose(self, level=rlevel):
"""Ticket #516"""
x = np.random.rand(*(2,)*16)
y = x.transpose(range(16))
def test_string_mergesort(self, level=rlevel):
"""Ticket #540"""
x = np.array(['a']*32)
assert_array_equal(x.argsort(kind='m'), np.arange(32))
def test_argmax_byteorder(self, level=rlevel):
"""Ticket #546"""
a = np.arange(3, dtype='>f')
assert a[a.argmax()] == a.max()
def test_numeric_random(self, level=rlevel):
"""Ticket #552"""
from numpy.oldnumeric.random_array import randint
randint(0,50,[2,3])
def test_poly_div(self, level=rlevel):
"""Ticket #553"""
u = np.poly1d([1,2,3])
v = np.poly1d([1,2,3,4,5])
q,r = np.polydiv(u,v)
assert_equal(q*v + r, u)
def test_poly_eq(self, level=rlevel):
"""Ticket #554"""
x = np.poly1d([1,2,3])
y = np.poly1d([3,4])
assert x != y
assert x == x
def test_rand_seed(self, level=rlevel):
"""Ticket #555"""
for l in np.arange(4):
np.random.seed(l)
def test_mem_deallocation_leak(self, level=rlevel):
"""Ticket #562"""
a = np.zeros(5,dtype=float)
b = np.array(a,dtype=float)
del a, b
def test_mem_insert(self, level=rlevel):
"""Ticket #572"""
np.lib.place(1,1,1)
def test_mem_on_invalid_dtype(self):
"Ticket #583"
self.failUnlessRaises(ValueError, np.fromiter, [['12',''],['13','']], str)
def test_dot_negative_stride(self, level=rlevel):
"""Ticket #588"""
x = np.array([[1,5,25,125.,625]])
y = np.array([[20.],[160.],[640.],[1280.],[1024.]])
z = y[::-1].copy()
y2 = y[::-1]
assert_equal(np.dot(x,z),np.dot(x,y2))
def test_object_casting(self, level=rlevel):
def rs():
x = np.ones([484,286])
y = np.zeros([484,286])
x |= y
self.failUnlessRaises(TypeError,rs)
def test_unicode_scalar(self, level=rlevel):
"""Ticket #600"""
import cPickle
x = np.array(["DROND", "DROND1"], dtype="U6")
el = x[1]
new = cPickle.loads(cPickle.dumps(el))
assert_equal(new, el)
def test_arange_non_native_dtype(self, level=rlevel):
"""Ticket #616"""
for T in ('>f4','<f4'):
dt = np.dtype(T)
assert_equal(np.arange(0,dtype=dt).dtype,dt)
assert_equal(np.arange(0.5,dtype=dt).dtype,dt)
assert_equal(np.arange(5,dtype=dt).dtype,dt)
def test_bool_indexing_invalid_nr_elements(self, level=rlevel):
s = np.ones(10,dtype=float)
x = np.array((15,),dtype=float)
def ia(x,s): x[(s>0)]=1.0
self.failUnlessRaises(ValueError,ia,x,s)
def test_mem_scalar_indexing(self, level=rlevel):
"""Ticket #603"""
x = np.array([0],dtype=float)
index = np.array(0,dtype=np.int32)
x[index]
def test_binary_repr_0_width(self, level=rlevel):
assert_equal(np.binary_repr(0,width=3),'000')
def test_fromstring(self, level=rlevel):
assert_equal(np.fromstring("12:09:09", dtype=int, sep=":"),
[12,9,9])
def test_searchsorted_variable_length(self, level=rlevel):
x = np.array(['a','aa','b'])
y = np.array(['d','e'])
assert_equal(x.searchsorted(y), [3,3])
def test_string_argsort_with_zeros(self, level=rlevel):
"""Check argsort for strings containing zeros."""
x = np.fromstring("\x00\x02\x00\x01", dtype="|S2")
assert_array_equal(x.argsort(kind='m'), np.array([1,0]))
assert_array_equal(x.argsort(kind='q'), np.array([1,0]))
def test_string_sort_with_zeros(self, level=rlevel):
"""Check sort for strings containing zeros."""
x = np.fromstring("\x00\x02\x00\x01", dtype="|S2")
y = np.fromstring("\x00\x01\x00\x02", dtype="|S2")
assert_array_equal(np.sort(x, kind="q"), y)
def test_hist_bins_as_list(self, level=rlevel):
"""Ticket #632"""
import warnings
warnings.simplefilter('ignore', Warning)
try:
hist,edges = np.histogram([1,2,3,4],[1,2], new=False)
assert_array_equal(hist,[1,3])
assert_array_equal(edges,[1,2])
finally:
warnings.resetwarnings()
def test_copy_detection_zero_dim(self, level=rlevel):
"""Ticket #658"""
np.indices((0,3,4)).T.reshape(-1,3)
def test_flat_byteorder(self, level=rlevel):
"""Ticket #657"""
x = np.arange(10)
assert_array_equal(x.astype('>i4'),x.astype('<i4').flat[:])
assert_array_equal(x.astype('>i4').flat[:],x.astype('<i4'))
def test_uint64_from_negative(self, level=rlevel) :
assert_equal(np.uint64(-2), np.uint64(18446744073709551614))
def test_sign_bit(self, level=rlevel):
x = np.array([0,-0.0,0])
assert_equal(str(np.abs(x)),'[ 0. 0. 0.]')
def test_flat_index_byteswap(self, level=rlevel):
for dt in (np.dtype('<i4'),np.dtype('>i4')):
x = np.array([-1,0,1],dtype=dt)
assert_equal(x.flat[0].dtype, x[0].dtype)
def test_copy_detection_corner_case(self, level=rlevel):
"""Ticket #658"""
np.indices((0,3,4)).T.reshape(-1,3)
def test_copy_detection_corner_case2(self, level=rlevel):
"""Ticket #771: strides are not set correctly when reshaping 0-sized
arrays"""
b = np.indices((0,3,4)).T.reshape(-1,3)
assert_equal(b.strides, (3 * b.itemsize, b.itemsize))
def test_object_array_refcounting(self, level=rlevel):
"""Ticket #633"""
if not hasattr(sys, 'getrefcount'):
return
# NB. this is probably CPython-specific
cnt = sys.getrefcount
a = object()
b = object()
c = object()
cnt0_a = cnt(a)
cnt0_b = cnt(b)
cnt0_c = cnt(c)
# -- 0d -> 1d broadcasted slice assignment
arr = np.zeros(5, dtype=np.object_)
arr[:] = a
assert cnt(a) == cnt0_a + 5
arr[:] = b
assert cnt(a) == cnt0_a
assert cnt(b) == cnt0_b + 5
arr[:2] = c
assert cnt(b) == cnt0_b + 3
assert cnt(c) == cnt0_c + 2
del arr
# -- 1d -> 2d broadcasted slice assignment
arr = np.zeros((5, 2), dtype=np.object_)
arr0 = np.zeros(2, dtype=np.object_)
arr0[0] = a
assert cnt(a) == cnt0_a + 1
arr0[1] = b
assert cnt(b) == cnt0_b + 1
arr[:,:] = arr0
assert cnt(a) == cnt0_a + 6
assert cnt(b) == cnt0_b + 6
arr[:,0] = None
assert cnt(a) == cnt0_a + 1
del arr, arr0
# -- 2d copying + flattening
arr = np.zeros((5, 2), dtype=np.object_)
arr[:,0] = a
arr[:,1] = b
assert cnt(a) == cnt0_a + 5
assert cnt(b) == cnt0_b + 5
arr2 = arr.copy()
assert cnt(a) == cnt0_a + 10
assert cnt(b) == cnt0_b + 10
arr2 = arr[:,0].copy()
assert cnt(a) == cnt0_a + 10
assert cnt(b) == cnt0_b + 5
arr2 = arr.flatten()
assert cnt(a) == cnt0_a + 10
assert cnt(b) == cnt0_b + 10
del arr, arr2
# -- concatenate, repeat, take, choose
arr1 = np.zeros((5, 1), dtype=np.object_)
arr2 = np.zeros((5, 1), dtype=np.object_)
arr1[...] = a
arr2[...] = b
assert cnt(a) == cnt0_a + 5
assert cnt(b) == cnt0_b + 5
arr3 = np.concatenate((arr1, arr2))
assert cnt(a) == cnt0_a + 5 + 5
assert cnt(b) == cnt0_b + 5 + 5
arr3 = arr1.repeat(3, axis=0)
assert cnt(a) == cnt0_a + 5 + 3*5
arr3 = arr1.take([1,2,3], axis=0)
assert cnt(a) == cnt0_a + 5 + 3
x = np.array([[0],[1],[0],[1],[1]], int)
arr3 = x.choose(arr1, arr2)
assert cnt(a) == cnt0_a + 5 + 2
assert cnt(b) == cnt0_b + 5 + 3
def test_mem_custom_float_to_array(self, level=rlevel):
"""Ticket 702"""
class MyFloat:
def __float__(self):
return 1
tmp = np.atleast_1d([MyFloat()])
tmp2 = tmp.astype(float)
def test_object_array_refcount_self_assign(self, level=rlevel):
"""Ticket #711"""
class VictimObject(object):
deleted = False
def __del__(self):
self.deleted = True
d = VictimObject()
arr = np.zeros(5, dtype=np.object_)
arr[:] = d
del d
arr[:] = arr # refcount of 'd' might hit zero here
assert not arr[0].deleted
arr[:] = arr # trying to induce a segfault by doing it again...
assert not arr[0].deleted
def test_mem_fromiter_invalid_dtype_string(self, level=rlevel):
x = [1,2,3]
self.failUnlessRaises(ValueError,
np.fromiter, [xi for xi in x], dtype='S')
def test_reduce_big_object_array(self, level=rlevel):
"""Ticket #713"""
oldsize = np.setbufsize(10*16)
a = np.array([None]*161, object)
assert not np.any(a)
np.setbufsize(oldsize)
def test_mem_0d_array_index(self, level=rlevel):
"""Ticket #714"""
np.zeros(10)[np.array(0)]
def test_floats_from_string(self, level=rlevel):
"""Ticket #640, floats from string"""
fsingle = np.single('1.234')
fdouble = np.double('1.234')
flongdouble = np.longdouble('1.234')
assert_almost_equal(fsingle, 1.234)
assert_almost_equal(fdouble, 1.234)
assert_almost_equal(flongdouble, 1.234)
def test_complex_dtype_printing(self, level=rlevel):
dt = np.dtype([('top', [('tiles', ('>f4', (64, 64)), (1,)),
('rtile', '>f4', (64, 36))], (3,)),
('bottom', [('bleft', ('>f4', (8, 64)), (1,)),
('bright', '>f4', (8, 36))])])
assert_equal(str(dt),
"[('top', [('tiles', ('>f4', (64, 64)), (1,)), "
"('rtile', '>f4', (64, 36))], (3,)), "
"('bottom', [('bleft', ('>f4', (8, 64)), (1,)), "
"('bright', '>f4', (8, 36))])]")
def test_nonnative_endian_fill(self, level=rlevel):
""" Non-native endian arrays were incorrectly filled with scalars before
r5034.
"""
if sys.byteorder == 'little':
dtype = np.dtype('>i4')
else:
dtype = np.dtype('<i4')
x = np.empty([1], dtype=dtype)
x.fill(1)
assert_equal(x, np.array([1], dtype=dtype))
def test_asfarray_none(self, level=rlevel):
"""Test for changeset r5065"""
assert_array_equal(np.array([np.nan]), np.asfarray([None]))
def test_dot_alignment_sse2(self, level=rlevel):
"""Test for ticket #551, changeset r5140"""
x = np.zeros((30,40))
y = pickle.loads(pickle.dumps(x))
# y is now typically not aligned on a 8-byte boundary
z = np.ones((1, y.shape[0]))
# This shouldn't cause a segmentation fault:
np.dot(z, y)
def test_astype_copy(self, level=rlevel):
"""Ticket #788, changeset r5155"""
# The test data file was generated by scipy.io.savemat.
# The dtype is float64, but the isbuiltin attribute is 0.
data_dir = path.join(path.dirname(__file__), 'data')
filename = path.join(data_dir, "astype_copy.pkl")
xp = pickle.load(open(filename))
xpd = xp.astype(np.float64)
assert (xp.__array_interface__['data'][0] !=
xpd.__array_interface__['data'][0])
def test_compress_small_type(self, level=rlevel):
"""Ticket #789, changeset 5217.
"""
# compress with out argument segfaulted if cannot cast safely
import numpy as np
a = np.array([[1, 2], [3, 4]])
b = np.zeros((2, 1), dtype = np.single)
try:
a.compress([True, False], axis = 1, out = b)
raise AssertionError("compress with an out which cannot be " \
"safely casted should not return "\
"successfully")
except TypeError:
pass
def test_attributes(self, level=rlevel):
"""Ticket #791
"""
import numpy as np
class TestArray(np.ndarray):
def __new__(cls, data, info):
result = np.array(data)
result = result.view(cls)
result.info = info
return result
def __array_finalize__(self, obj):
self.info = getattr(obj, 'info', '')
dat = TestArray([[1,2,3,4],[5,6,7,8]],'jubba')
assert dat.info == 'jubba'
dat.resize((4,2))
assert dat.info == 'jubba'
dat.sort()
assert dat.info == 'jubba'
dat.fill(2)
assert dat.info == 'jubba'
dat.put([2,3,4],[6,3,4])
assert dat.info == 'jubba'
dat.setfield(4, np.int32,0)
assert dat.info == 'jubba'
dat.setflags()
assert dat.info == 'jubba'
assert dat.all(1).info == 'jubba'
assert dat.any(1).info == 'jubba'
assert dat.argmax(1).info == 'jubba'
assert dat.argmin(1).info == 'jubba'
assert dat.argsort(1).info == 'jubba'
assert dat.astype(TestArray).info == 'jubba'
assert dat.byteswap().info == 'jubba'
assert dat.clip(2,7).info == 'jubba'
assert dat.compress([0,1,1]).info == 'jubba'
assert dat.conj().info == 'jubba'
assert dat.conjugate().info == 'jubba'
assert dat.copy().info == 'jubba'
dat2 = TestArray([2, 3, 1, 0],'jubba')
choices = [[0, 1, 2, 3], [10, 11, 12, 13],
[20, 21, 22, 23], [30, 31, 32, 33]]
assert dat2.choose(choices).info == 'jubba'
assert dat.cumprod(1).info == 'jubba'
assert dat.cumsum(1).info == 'jubba'
assert dat.diagonal().info == 'jubba'
assert dat.flatten().info == 'jubba'
assert dat.getfield(np.int32,0).info == 'jubba'
assert dat.imag.info == 'jubba'
assert dat.max(1).info == 'jubba'
assert dat.mean(1).info == 'jubba'
assert dat.min(1).info == 'jubba'
assert dat.newbyteorder().info == 'jubba'
assert dat.nonzero()[0].info == 'jubba'
assert dat.nonzero()[1].info == 'jubba'
assert dat.prod(1).info == 'jubba'
assert dat.ptp(1).info == 'jubba'
assert dat.ravel().info == 'jubba'
assert dat.real.info == 'jubba'
assert dat.repeat(2).info == 'jubba'
assert dat.reshape((2,4)).info == 'jubba'
assert dat.round().info == 'jubba'
assert dat.squeeze().info == 'jubba'
assert dat.std(1).info == 'jubba'
assert dat.sum(1).info == 'jubba'
assert dat.swapaxes(0,1).info == 'jubba'
assert dat.take([2,3,5]).info == 'jubba'
assert dat.transpose().info == 'jubba'
assert dat.T.info == 'jubba'
assert dat.var(1).info == 'jubba'
assert dat.view(TestArray).info == 'jubba'
def test_recarray_tolist(self, level=rlevel):
"""Ticket #793, changeset r5215
"""
# Comparisons fail for NaN, so we can't use random memory
# for the test.
buf = np.zeros(40, dtype=np.int8)
a = np.recarray(2, formats="i4,f8,f8", names="id,x,y", buf=buf)
b = a.tolist()
assert( a[0].tolist() == b[0])
assert( a[1].tolist() == b[1])
def test_large_fancy_indexing(self, level=rlevel):
# Large enough to fail on 64-bit.
nbits = np.dtype(np.intp).itemsize * 8
thesize = int((2**nbits)**(1.0/5.0)+1)
def dp():
n = 3
a = np.ones((n,)*5)
i = np.random.randint(0,n,size=thesize)
a[np.ix_(i,i,i,i,i)] = 0
def dp2():
n = 3
a = np.ones((n,)*5)
i = np.random.randint(0,n,size=thesize)
g = a[np.ix_(i,i,i,i,i)]
self.failUnlessRaises(ValueError, dp)
self.failUnlessRaises(ValueError, dp2)
def test_char_array_creation(self, level=rlevel):
a = np.array('123', dtype='c')
b = np.array(['1','2','3'])
assert_equal(a,b)
def test_unaligned_unicode_access(self, level=rlevel) :
"""Ticket #825"""
for i in range(1,9) :
msg = 'unicode offset: %d chars'%i
t = np.dtype([('a','S%d'%i),('b','U2')])
x = np.array([('a',u'b')], dtype=t)
assert_equal(str(x), "[('a', u'b')]", err_msg=msg)
def test_sign_for_complex_nan(self, level=rlevel):
"""Ticket 794."""
C = np.array([-np.inf, -2+1j, 0, 2-1j, np.inf, np.nan])
have = np.sign(C)
want = np.array([-1+0j, -1+0j, 0+0j, 1+0j, 1+0j, np.nan])
assert_equal(have, want)
def test_for_equal_names(self, level=rlevel):
"""Ticket #674"""
dt = np.dtype([('foo', float), ('bar', float)])
a = np.zeros(10, dt)
b = list(a.dtype.names)
b[0] = "notfoo"
a.dtype.names = b
assert a.dtype.names[0] == "notfoo"
assert a.dtype.names[1] == "bar"
def test_for_object_scalar_creation(self, level=rlevel):
"""Ticket #816"""
a = np.object_()
b = np.object_(3)
b2 = np.object_(3.0)
c = np.object_([4,5])
d = np.object_([None, {}, []])
assert a is None
assert type(b) is int
assert type(b2) is float
assert type(c) is np.ndarray
assert c.dtype == object
assert d.dtype == object
def test_for_zero_length_in_choose(self, level=rlevel):
"Ticket #882"
a = np.array(1)
self.failUnlessRaises(ValueError, lambda x: x.choose([]), a)
def test_array_ndmin_overflow(self):
"Ticket #947."
self.assertRaises(ValueError, lambda: np.array([1], ndmin=33))
def test_errobj_reference_leak(self, level=rlevel):
"""Ticket #955"""
z = int(0)
p = np.int32(-1)
gc.collect()
n_before = len(gc.get_objects())
z**p # this shouldn't leak a reference to errobj
gc.collect()
n_after = len(gc.get_objects())
assert n_before >= n_after, (n_before, n_after)
def test_void_scalar_with_titles(self, level=rlevel):
"""No ticket"""
data = [('john', 4), ('mary', 5)]
dtype1 = [(('source:yy', 'name'), 'O'), (('source:xx', 'id'), int)]
arr = np.array(data, dtype=dtype1)
assert arr[0][0] == 'john'
assert arr[0][1] == 4
def test_blasdot_uninitialized_memory(self):
"""Ticket #950"""
for m in [0, 1, 2]:
for n in [0, 1, 2]:
for k in xrange(3):
# Try to ensure that x->data contains non-zero floats
x = np.array([123456789e199], dtype=np.float64)
x.resize((m, 0))
y = np.array([123456789e199], dtype=np.float64)
y.resize((0, n))
# `dot` should just return zero (m,n) matrix
z = np.dot(x, y)
assert np.all(z == 0)
assert z.shape == (m, n)
def test_fromiter_comparison(self, level=rlevel):
"""Ticket #1058"""
a = np.fromiter(range(10), dtype='b')
b = np.fromiter(range(10), dtype='B')
assert np.alltrue(a == np.array([0,1,2,3,4,5,6,7,8,9]))
assert np.alltrue(b == np.array([0,1,2,3,4,5,6,7,8,9]))
if __name__ == "__main__":
run_module_suite()
| |
from __future__ import print_function, division, absolute_import
import matplotlib
matplotlib.use('Agg')
import os
os.environ['ODIN'] = 'gpu,float32,seed=1234'
from collections import OrderedDict
import numpy as np
import tensorflow as tf
from sklearn.decomposition import PCA
from sklearn.metrics import confusion_matrix, accuracy_score
from odin import backend as K, nnet as N, fuel as F
from odin.stats import train_valid_test_split, freqcount
from odin import training
from odin import preprocessing as pp
from odin.ml import evaluate, fast_tsne
from odin.visual import (print_dist, print_confusion, print_hist,
plot_scatter, plot_figure, plot_spectrogram, plot_save,
plot_confusion_matrix,
generate_random_colors, generate_random_marker)
from odin.utils import (get_logpath, get_modelpath, get_datasetpath, get_figpath,
Progbar, unique_labels, chain,
as_tuple_of_shape, stdio, ctext, ArgController)
# ===========================================================================
# Const
# ===========================================================================
FEAT = ['mspec', 'sad']
MODEL_PATH = get_modelpath(name='DIGITS', override=True)
LOG_PATH = get_logpath(name='digits.log', override=True)
FIG_PATH = get_figpath(name='DIGITS', override=True)
stdio(LOG_PATH)
DEBUG = False
# ====== trainign ====== #
BATCH_SIZE = 32
NB_EPOCH = 20
NB_SAMPLES = 8
VALID_PERCENTAGE = 0.4
# ===========================================================================
# Load dataset
# ===========================================================================
path = get_datasetpath(name='TIDIGITS_feats', override=False)
assert os.path.isdir(path), \
"Cannot find preprocessed feature at: %s, try to run 'odin/examples/features.py'" % path
ds = F.Dataset(path, read_only=True)
assert all(f in ds for f in FEAT), "Cannot find features with name: %s" % FEAT
# ====== get all the indices of single digit ====== #
indices = [(name, (s, e))
for name, (s, e) in list(ds['indices'].items())
if len(name.split('_')[-1]) == 1]
K.get_rng().shuffle(indices)
print("Found %s utterances of single digit" % ctext(len(indices), 'cyan'))
# ===========================================================================
# Load and visual the dataset
# ===========================================================================
train = []
test = []
max_length = 0
min_length = np.inf
for name, (start, end) in indices:
assert end - start > 0
if name.split('_')[0] == 'train':
train.append((name, (start, end)))
else:
test.append((name, (start, end)))
max_length = max(end - start, max_length)
min_length = min(end - start, min_length)
print(ctext("#Train:", 'yellow'), len(train), train[:2])
print(ctext("#Test:", 'yellow'), len(test), test[:2])
print("Min Length:", ctext(min_length, 'cyan'))
print("Max Length:", ctext(max_length, 'cyan'))
# ====== gender and single digit distribution ====== #
gender_digit = lambda x: x[0].split('_')[1] + '-' + x[0].split('_')[-1]
print(print_dist(d=freqcount(train, key=gender_digit),
show_number=True,
title="Training distribution"))
print(print_dist(d=freqcount(test, key=gender_digit),
show_number=True,
title="Testing distribution"))
# ====== digits ====== #
f_digits, digits = unique_labels(
[i[0] for i in train + test],
key_func=lambda x: x.split('_')[-1], return_labels=True)
print(ctext("All digits:", 'yellow'), ctext(digits, 'cyan'))
# ====== genders ====== #
f_genders, genders = unique_labels(
[i[0] for i in train + test],
key_func=lambda x: x.split('_')[1], return_labels=True)
print(ctext("All genders:", 'yellow'), ctext(genders, 'cyan'))
# ====== marker and color for visualization ====== #
digit_colors = generate_random_colors(n=len(digits))
digit_color_map = {i: j for i, j in zip(digits, digit_colors)}
gender_markers = generate_random_marker(n=len(genders))
gender_marker_map = {i: j for i, j in zip(genders, gender_markers)}
legends = OrderedDict()
for g, m in zip(genders, gender_markers):
for d, c in zip(digits, digit_colors):
legends[(c, m)] = str(g) + '-' + str(d)
# ===========================================================================
# SPlit dataset
# ===========================================================================
split_spkID = lambda x: x[0].split('_')[4]
split_dialID_spkID = lambda x: x[0].split('_')[3] + x[0].split('_')[4]
split_genID_spkID = lambda x: x[0].split('_')[1] + x[0].split('_')[4]
split_genID = lambda x: x[0].split('_')[1]
split_ageID = lambda x: x[0].split('_')[2]
# stratified sampling for each digit, splited based on speaker ID
train, valid = train_valid_test_split(x=train, train=0.6, inc_test=False,
idfunc=split_spkID,
seed=K.get_rng().randint(0, 10e8))
# make sure both train and valid set have all the numbers
assert set(i[0].split('_')[-1] for i in train) == set(digits)
assert set(i[0].split('_')[-1] for i in valid) == set(digits)
# ====== report ====== #
report_info = lambda idx, flist: sorted(list(set(i[0].split('_')[idx] for i in flist)))
print(ctext("#File train:", 'yellow'), len(train), train[:2])
print(' * Genders:', ctext(report_info(1, train), 'cyan'))
print(' * Age:', ctext(report_info(2, train), 'cyan'))
print(' * Dialects:', ctext(report_info(3, train), 'cyan'))
print(' * Speakers:', ctext(report_info(4, train), 'cyan'))
print(ctext("#File valid:", 'yellow'), len(valid), valid[:2])
print(' * Genders:', ctext(report_info(1, valid), 'cyan'))
print(' * Age:', ctext(report_info(2, valid), 'cyan'))
print(' * Dialects:', ctext(report_info(3, valid), 'cyan'))
print(' * Speakers:', ctext(report_info(4, valid), 'cyan'))
print(ctext("#File test:", 'yellow'), len(test), test[:2])
# ====== create recipe ====== #
recipes = [
F.recipes.Slice(slices=slice(40), axis=-1, data_idx=0),
F.recipes.Sequencing(frame_length=max_length, step_length=1,
end='pad', pad_mode='post', pad_value=0,
data_idx=None),
F.recipes.Name2Label(converter_func=f_digits),
F.recipes.LabelOneHot(nb_classes=len(digits), data_idx=-1),
]
data = [ds[f] for f in FEAT]
train = F.Feeder(F.IndexedData(data=data, indices=train),
dtype='float32', ncpu=6,
buffer_size=len(digits),
batch_mode='batch')
valid = F.Feeder(F.IndexedData(data=data, indices=valid),
dtype='float32', ncpu=2,
buffer_size=len(digits),
batch_mode='batch')
test = F.Feeder(F.IndexedData(data=data, indices=test),
dtype='float32', ncpu=1,
buffer_size=1,
batch_mode='file')
train.set_recipes(recipes)
valid.set_recipes(recipes)
test.set_recipes(recipes)
# ===========================================================================
# Create model
# ===========================================================================
inputs = [K.placeholder(shape=(None,) + shape[1:], dtype='float32', name='input%d' % i)
for i, shape in enumerate(train.shape)]
print("Inputs:", ctext(inputs, 'cyan'))
# ====== create the network ====== #
f_encoder = N.Sequence([
N.Dimshuffle(pattern=(0, 1, 2, 'x')),
N.Conv(num_filters=32, filter_size=(7, 7), b_init=None, activation=K.linear),
N.BatchNorm(),
N.Pool(pool_size=(3, 2), strides=2),
], debug=True, name='Encoder')
f_latent = N.Sequence([
N.Flatten(outdim=3),
N.CudnnRNN(num_units=128, num_layers=1, is_bidirectional=False,
rnn_mode='lstm'),
], debug=True, name='Latent')
f_decoder = N.Sequence([
N.Flatten(outdim=2),
N.Dense(num_units=1024, b_init=None, activation=K.linear),
N.BatchNorm(axes=0, activation=K.relu)
], debug=True, name='Decoder')
f_output = N.Sequence([
N.Dense(len(digits), activation=K.linear)
], debug=True, name='Output')
# ====== applying ====== #
E = f_encoder(inputs[0])
Z_train = f_latent(E, training=True)
Z_infer = f_latent(E, training=False)
D_train = f_decoder(Z_train)
D_infer = f_decoder(Z_infer)
y_logit = f_output(D_train)
y_prob = tf.nn.softmax(f_output(D_infer))
# ====== create loss ====== #
y = inputs[-1]
ce = tf.losses.softmax_cross_entropy(onehot_labels=y, logits=y_logit)
acc = K.metrics.categorical_accuracy(y_true=y, y_pred=y_prob)
cm = K.metrics.confusion_matrix(y_pred=y_prob, y_true=y, labels=len(digits))
# ====== params and optimizing ====== #
updates = K.optimizers.RMSProp(lr=0.0001).minimize(
loss=ce, roles=[K.role.Weight, K.role.Bias])
K.initialize_all_variables()
# ====== Functions ====== #
print('Building training functions ...')
f_train = K.function(inputs=inputs, outputs=[ce, acc, cm],
updates=updates, training=True)
print('Building testing functions ...')
f_test = K.function(inputs=inputs, outputs=[ce, acc, cm],
training=False)
print('Building predicting functions ...')
f_pred = K.function(inputs=inputs, outputs=y_prob, training=False)
print("Building other functions ...")
f_e = K.function(inputs=inputs, outputs=E, training=False)
f_z = K.function(inputs=inputs, outputs=Z_infer, training=False)
f_d = K.function(inputs=inputs, outputs=D_infer, training=False)
# ===========================================================================
# Training
# ===========================================================================
print('Start training ...')
task = training.MainLoop(batch_size=BATCH_SIZE,
seed=1234,
shuffle_level=2,
allow_rollback=True,
labels=digits)
task.set_checkpoint(MODEL_PATH, [f_encoder, f_decoder])
task.set_callbacks([
training.NaNDetector(),
training.EarlyStopGeneralizationLoss('valid', ce,
threshold=5, patience=5)
])
task.set_train_task(f_train, train, epoch=25, name='train')
task.set_valid_task(f_test, valid,
freq=training.Timer(percentage=0.8),
name='valid')
task.run()
# ===========================================================================
# Latent space
# ===========================================================================
def evaluate_latent(fn, feeder, title):
y_true = []
Z = []
for outputs in Progbar(feeder.set_batch(batch_mode='file'),
name=title,
print_report=True,
print_summary=False,
count_func=lambda x: x[-1].shape[0]):
name = str(outputs[0])
idx = int(outputs[1])
data = outputs[2:]
assert idx == 0
y_true.append(name)
Z.append(fn(*data))
Z = np.concatenate(Z, axis=0)
# ====== visualize spectrogram ====== #
if Z.ndim >= 3:
sample = np.random.choice(range(len(Z)), size=3, replace=False)
spec = Z[sample.astype('int32')]
y = [y_true[int(i)] for i in sample]
plot_figure(nrow=6, ncol=6)
for i, (s, tit) in enumerate(zip(spec, y)):
s = s.reshape(len(s), -1)
plot_spectrogram(s.T, ax=(1, 3, i + 1), title=tit)
# ====== visualize each point ====== #
# flattent to 2D
Z = np.reshape(Z, newshape=(len(Z), -1))
# tsne if necessary
if Z.shape[-1] > 3:
Z = fast_tsne(Z, n_components=3, n_jobs=8,
random_state=K.get_rng().randint(0, 10e8))
# color and marker
Z_color = [digit_color_map[i.split('_')[-1]] for i in y_true]
Z_marker = [gender_marker_map[i.split('_')[1]] for i in y_true]
plot_figure(nrow=6, ncol=20)
for i, azim in enumerate((15, 60, 120)):
plot_scatter(x=Z[:, 0], y=Z[:, 1], z=Z[:, 2], ax=(1, 3, i + 1),
size=4, color=Z_color, marker=Z_marker, azim=azim,
legend=legends if i == 1 else None, legend_ncol=11, fontsize=10,
title=title)
plot_save(os.path.join(FIG_PATH, '%s.pdf' % title))
# ====== differnt latent space ====== #
# encoder
evaluate_latent(f_e, valid, title="valid_encoder")
evaluate_latent(f_e, test, title="test_encoder")
# RNN latent
evaluate_latent(f_z, valid, title="valid_latent")
evaluate_latent(f_z, test, title="test_latent")
# Discriminator
evaluate_latent(f_d, valid, title="valid_decoder")
evaluate_latent(f_d, test, title="test_decoder")
# ===========================================================================
# Prediction
# ===========================================================================
def evaluate_feeder(feeder, title):
y_true_digit = []
y_true_gender = []
y_pred = []
for outputs in Progbar(feeder.set_batch(batch_mode='file'),
name=title,
print_report=True,
print_summary=False,
count_func=lambda x: x[-1].shape[0]):
name = str(outputs[0])
idx = int(outputs[1])
data = outputs[2:]
assert idx == 0
y_true_digit.append(f_digits(name))
y_true_gender.append(f_genders(name))
y_pred.append(f_pred(*data))
# ====== post processing ====== #
y_true_digit = np.array(y_true_digit, dtype='int32')
y_true_gender = np.array(y_true_gender, dtype='int32')
y_pred_proba = np.concatenate(y_pred, axis=0)
y_pred_all = np.argmax(y_pred_proba, axis=-1).astype('int32')
# ====== plotting for each gender ====== #
plot_figure(nrow=6, ncol=25)
for gen in range(len(genders)):
y_true, y_pred = [], []
for i, g in enumerate(y_true_gender):
if g == gen:
y_true.append(y_true_digit[i])
y_pred.append(y_pred_all[i])
if len(y_true) == 0:
continue
cm = confusion_matrix(y_true, y_pred, labels=range(len(digits)))
plot_confusion_matrix(cm, labels=digits, fontsize=8,
ax=(1, 4, gen + 1),
title='[%s]%s' % (genders[gen], title))
plot_save(os.path.join(FIG_PATH, '%s.pdf' % title))
evaluate_feeder(valid, title="valid")
evaluate_feeder(test, title="test")
# ===========================================================================
# print some log
# ===========================================================================
print("Log path:", ctext(LOG_PATH, 'cyan'))
print("Fig path:", ctext(FIG_PATH, 'cyan'))
| |
# Copyright (C) 2014, Hitachi, Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Self test for Hitachi Block Storage Driver
"""
import mock
from cinder import exception
from cinder import test
from cinder.volume import configuration as conf
from cinder.volume.drivers.hitachi import hbsd_basiclib
from cinder.volume.drivers.hitachi import hbsd_common
from cinder.volume.drivers.hitachi import hbsd_fc
from cinder.volume.drivers.hitachi import hbsd_snm2
def _exec_hsnm(*args, **kargs):
return HBSDSNM2FCDriverTest.hsnm_vals.get(args)
def _exec_hsnm_get_lu_ret_err(*args, **kargs):
return HBSDSNM2FCDriverTest.hsnm_get_lu_ret_err.get(args)
def _exec_hsnm_get_lu_vol_type_err(*args, **kargs):
return HBSDSNM2FCDriverTest.hsnm_get_lu_vol_type_err.get(args)
def _exec_hsnm_get_lu_dppool_err(*args, **kargs):
return HBSDSNM2FCDriverTest.hsnm_get_lu_dppool_err.get(args)
def _exec_hsnm_get_lu_size_err(*args, **kargs):
return HBSDSNM2FCDriverTest.hsnm_get_lu_size_err.get(args)
def _exec_hsnm_get_lu_num_port_err(*args, **kargs):
return HBSDSNM2FCDriverTest.hsnm_get_lu_num_port_err.get(args)
class HBSDSNM2FCDriverTest(test.TestCase):
"""Test HBSDSNM2FCDriver."""
audppool_result = " DP RAID \
Current Utilization Current Over Replication\
Available Current Replication Rotational \
\
Stripe \
Needing Preparation\n\
Pool Tier Mode Level Total Capacity Consumed Capacity \
Percent Provisioning Percent Capacity \
Utilization Percent Type Speed Encryption Status \
\
Reconstruction Progress Size Capacity\n\
30 Disable 1( 1D+1D) 532.0 GB 2.0 GB \
1% 24835% 532.0 GB \
1% SAS 10000rpm N/A Normal \
N/A \
256KB 0.0 GB"
aureplicationlocal_result = "Pair Name LUN Pair \
LUN Status Copy Type Group \
Point-in-Time MU Number\n\
0 10 0 Split( 99%) \
ShadowImage ---:Ungrouped N/A\
"
auluref_result = " Stripe RAID DP Tier \
RAID Rotational Number\n\
LU Capacity Size Group Pool Mode Level Type\
Speed of Paths Status\n\
0 2097152 blocks 256KB 0 0 Enable 0 Normal"
auluref_result1 = " Stripe RAID DP Tier \
RAID Rotational Number\n\
LU Capacity Size Group Pool Mode Level Type\
Speed of Paths Status\n\
0 2097152 blocks 256KB 0 0 Enable 0 DUMMY"
auhgwwn_result = "Port 00 Host Group Security ON\n Detected WWN\n \
Name Port Name Host Group\n\
HBSD-00 10000000C97BCE7A 001:HBSD-01\n\
Assigned WWN\n Name Port Name \
Host Group\n abcdefg 10000000C97BCE7A \
001:HBSD-01"
aufibre1_result = "Port Information\n\
Port Address\n CTL Port\
Node Name Port Name Setting Current\n 0 0 \
50060E801053C2E0 50060E801053C2E0 0000EF 272700"
auhgmap_result = "Mapping Mode = ON\nPort Group \
H-LUN LUN\n 00 001:HBSD-00 0 1000"
hsnm_vals = {
('audppool', '-unit None -refer -g'): [0, "%s" % audppool_result, ""],
('aureplicationlocal',
'-unit None -create -si -pvol 1 -svol 1 -compsplit -pace normal'):
[0, "", ""],
('aureplicationlocal',
'-unit None -create -si -pvol 3 -svol 1 -compsplit -pace normal'):
[1, "", ""],
('aureplicationlocal', '-unit None -refer -pvol 1'):
[0, "%s" % aureplicationlocal_result, ""],
('aureplicationlocal', '-unit None -refer -pvol 3'):
[1, "", "DMEC002015"],
('aureplicationlocal', '-unit None -refer -svol 3'):
[1, "", "DMEC002015"],
('aureplicationlocal', '-unit None -simplex -si -pvol 1 -svol 0'):
[0, "", ""],
('auluchgsize', '-unit None -lu 1 -size 256g'):
[0, "", ""],
('auludel', '-unit None -lu 1 -f'): [0, 0, ""],
('auludel', '-unit None -lu 3 -f'): [1, 0, ""],
('auluadd', '-unit None -lu 1 -dppoolno 30 -size 128g'): [0, 0, ""],
('auluadd', '-unit None -lu 1 -dppoolno 30 -size 256g'): [1, "", ""],
('auluref', '-unit None'): [0, "%s" % auluref_result, ""],
('auluref', '-unit None -lu 0'): [0, "%s" % auluref_result, ""],
('auhgmap', '-unit None -add 0 0 1 1 1'): [0, 0, ""],
('auhgwwn', '-unit None -refer'): [0, "%s" % auhgwwn_result, ""],
('aufibre1', '-unit None -refer'): [0, "%s" % aufibre1_result, ""],
('auhgmap', '-unit None -refer'): [0, "%s" % auhgmap_result, ""]}
auluref_ret_err = "Stripe RAID DP Tier \
RAID Rotational Number\n\
LU Capacity Size Group Pool Mode Level Type\
Speed of Paths Status\n\
0 2097152 blocks 256KB 0 0 Enable 0 Normal"
hsnm_get_lu_ret_err = {
('auluref', '-unit None -lu 0'): [1, "%s" % auluref_ret_err, ""],
}
auluref_vol_type_err = "Stripe RAID DP Tier \
RAID Rotational Number\n\
LU Capacity Size Group Pool Mode Level Type\
Speed of Paths Status\n\
0 2097152 blocks 256KB 0 0 Enable 0 DUMMY"
hsnm_get_lu_vol_type_err = {
('auluref', '-unit None -lu 0'):
[0, "%s" % auluref_vol_type_err, ""],
}
auluref_dppool_err = "Stripe RAID DP Tier \
RAID Rotational Number\n\
LU Capacity Size Group Pool Mode Level Type\
Speed of Paths Status\n\
0 2097152 blocks 256KB 0 N/A Enable 0 Normal"
hsnm_get_lu_dppool_err = {
('auluref', '-unit None -lu 0'):
[0, "%s" % auluref_dppool_err, ""],
}
auluref_size_err = "Stripe RAID DP Tier \
RAID Rotational Number\n\
LU Capacity Size Group Pool Mode Level Type\
Speed of Paths Status\n\
0 2097151 blocks 256KB N/A 0 Enable 0 Normal"
hsnm_get_lu_size_err = {
('auluref', '-unit None -lu 0'): [0, "%s" % auluref_size_err, ""],
}
auluref_num_port_err = "Stripe RAID DP Tier \
RAID Rotational Number\n\
LU Capacity Size Group Pool Mode Level Type\
Speed of Paths Status\n\
0 2097152 blocks 256KB 0 0 Enable 1 Normal"
hsnm_get_lu_num_port_err = {
('auluref', '-unit None -lu 0'): [0, "%s" % auluref_num_port_err, ""],
}
# The following information is passed on to tests, when creating a volume
_VOLUME = {'size': 128, 'volume_type': None, 'source_volid': '0',
'provider_location': '1', 'name': 'test',
'id': 'abcdefg', 'snapshot_id': '0', 'status': 'available'}
test_volume = {'name': 'test_volume', 'size': 128,
'id': 'test-volume-0',
'provider_location': '1', 'status': 'available'}
test_volume_error = {'name': 'test_volume_error', 'size': 256,
'id': 'test-volume-error',
'provider_location': '3', 'status': 'available'}
test_volume_error1 = {'name': 'test_volume_error', 'size': 128,
'id': 'test-volume-error',
'provider_location': None, 'status': 'available'}
test_volume_error2 = {'name': 'test_volume_error', 'size': 256,
'id': 'test-volume-error',
'provider_location': '1', 'status': 'available'}
test_volume_error3 = {'name': 'test_volume3', 'size': 128,
'id': 'test-volume3',
'volume_metadata': [{'key': 'type',
'value': 'V-VOL'}],
'provider_location': '1', 'status': 'available'}
test_volume_error4 = {'name': 'test_volume4', 'size': 128,
'id': 'test-volume2',
'provider_location': '3', 'status': 'available'}
test_snapshot = {'volume_name': 'test', 'size': 128,
'volume_size': 128, 'name': 'test-snap',
'volume_id': 0, 'id': 'test-snap-0', 'volume': _VOLUME,
'provider_location': '1', 'status': 'available'}
test_snapshot_error2 = {'volume_name': 'test', 'size': 128,
'volume_size': 128, 'name': 'test-snap',
'volume_id': 0, 'id': 'test-snap-0',
'volume': test_volume_error,
'provider_location': None, 'status': 'available'}
UNIT_NAME = 'HUS110_91122819'
test_existing_ref = {'ldev': '0', 'unit_name': UNIT_NAME}
test_existing_none_ldev_ref = {'ldev': None, 'unit_name': UNIT_NAME}
test_existing_invalid_ldev_ref = {'ldev': 'AAA', 'unit_name': UNIT_NAME}
test_existing_no_ldev_ref = {'unit_name': UNIT_NAME}
test_existing_none_unit_ref = {'ldev': '0', 'unit_name': None}
test_existing_invalid_unit_ref = {'ldev': '0', 'unit_name': 'Dummy'}
test_existing_no_unit_ref = {'ldev': '0'}
def __init__(self, *args, **kwargs):
super(HBSDSNM2FCDriverTest, self).__init__(*args, **kwargs)
def setUp(self):
super(HBSDSNM2FCDriverTest, self).setUp()
self._setup_config()
self._setup_driver()
def _setup_config(self):
self.configuration = mock.Mock(conf.Configuration)
self.configuration.hitachi_pool_id = 30
self.configuration.hitachi_target_ports = "00"
self.configuration.hitachi_debug_level = 0
self.configuration.hitachi_serial_number = "None"
self.configuration.hitachi_unit_name = "None"
self.configuration.hitachi_group_request = False
self.configuration.hitachi_zoning_request = False
self.configuration.config_group = "None"
self.configuration.hitachi_ldev_range = [0, 100]
self.configuration.hitachi_default_copy_method = 'SI'
self.configuration.hitachi_copy_check_interval = 1
self.configuration.hitachi_copy_speed = 3
def _setup_driver(self):
self.driver = hbsd_fc.HBSDFCDriver(
configuration=self.configuration)
context = None
db = None
self.driver.common = hbsd_common.HBSDCommon(
self.configuration, self.driver, context, db)
self.driver.common.command = hbsd_snm2.HBSDSNM2(self.configuration)
self.driver.common.pair_flock = \
self.driver.common.command.set_pair_flock()
self.driver.common.horcmgr_flock = \
self.driver.common.command.set_horcmgr_flock()
self.driver.do_setup_status.set()
# API test cases
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_common.HBSDCommon, 'get_volume_metadata')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_create_volume(self, arg1, arg2, arg3):
"""test create_volume."""
ret = self.driver.create_volume(self._VOLUME)
vol = self._VOLUME.copy()
vol['provider_location'] = ret['provider_location']
self.assertEqual(vol['provider_location'], '1')
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_common.HBSDCommon, 'get_volume_metadata')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_create_volume_error(self, arg1, arg2, arg3):
"""test create_volume."""
self.assertRaises(exception.HBSDCmdError,
self.driver.create_volume,
self.test_volume_error)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_get_volume_stats(self, arg1, arg2):
"""test get_volume_stats."""
stats = self.driver.get_volume_stats(True)
self.assertEqual(stats['vendor_name'], 'Hitachi')
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_get_volume_stats_error(self, arg1, arg2):
"""test get_volume_stats."""
self.configuration.hitachi_pool_id = 29
stats = self.driver.get_volume_stats(True)
self.assertEqual(stats, {})
self.configuration.hitachi_pool_id = 30
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_extend_volume(self, arg1, arg2):
"""test extend_volume."""
self.driver.extend_volume(self._VOLUME, 256)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_extend_volume_error(self, arg1, arg2):
"""test extend_volume."""
self.assertRaises(exception.HBSDError, self.driver.extend_volume,
self.test_volume_error3, 256)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_delete_volume(self, arg1, arg2):
"""test delete_volume."""
self.driver.delete_volume(self._VOLUME)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_delete_volume_error(self, arg1, arg2):
"""test delete_volume."""
self.assertRaises(exception.HBSDCmdError,
self.driver.delete_volume,
self.test_volume_error4)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_common.HBSDCommon, 'get_snapshot_metadata',
return_value={'dummy_snapshot_meta': 'snapshot_meta'})
@mock.patch.object(hbsd_common.HBSDCommon, 'get_volume_metadata',
return_value={'dummy_volume_meta': 'meta'})
@mock.patch.object(hbsd_common.HBSDCommon, 'get_volume',
return_value=_VOLUME)
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_create_snapshot(self, arg1, arg2, arg3, arg4, arg5):
"""test create_snapshot."""
ret = self.driver.create_volume(self._VOLUME)
ret = self.driver.create_snapshot(self.test_snapshot)
self.assertEqual(ret['provider_location'], '1')
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_common.HBSDCommon, 'get_snapshot_metadata',
return_value={'dummy_snapshot_meta': 'snapshot_meta'})
@mock.patch.object(hbsd_common.HBSDCommon, 'get_volume_metadata',
return_value={'dummy_volume_meta': 'meta'})
@mock.patch.object(hbsd_common.HBSDCommon, 'get_volume',
return_value=test_volume_error)
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_create_snapshot_error(self, arg1, arg2, arg3, arg4, arg5):
"""test create_snapshot."""
self.assertRaises(exception.HBSDCmdError,
self.driver.create_snapshot,
self.test_snapshot_error2)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_delete_snapshot(self, arg1, arg2):
"""test delete_snapshot."""
self.driver.delete_snapshot(self.test_snapshot)
return
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_delete_snapshot_error(self, arg1, arg2):
"""test delete_snapshot."""
self.driver.delete_snapshot(self.test_snapshot_error2)
return
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_common.HBSDCommon, 'get_volume_metadata',
return_value={'dummy_volume_meta': 'meta'})
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_create_volume_from_snapshot(self, arg1, arg2, arg3):
"""test create_volume_from_snapshot."""
vol = self.driver.create_volume_from_snapshot(self._VOLUME,
self.test_snapshot)
self.assertIsNotNone(vol)
return
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_common.HBSDCommon, 'get_volume_metadata',
return_value={'dummy_volume_meta': 'meta'})
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_create_volume_from_snapshot_error(self, arg1, arg2, arg3):
"""test create_volume_from_snapshot."""
self.assertRaises(exception.HBSDError,
self.driver.create_volume_from_snapshot,
self.test_volume_error2, self.test_snapshot)
return
@mock.patch.object(hbsd_common.HBSDCommon, 'get_volume_metadata',
return_value={'dummy_volume_meta': 'meta'})
@mock.patch.object(hbsd_common.HBSDCommon, 'get_volume',
return_value=_VOLUME)
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
def test_create_cloned_volume(self, arg1, arg2, arg3, arg4):
"""test create_cloned_volume."""
vol = self.driver.create_cloned_volume(self._VOLUME,
self.test_volume)
self.assertIsNotNone(vol)
return
@mock.patch.object(hbsd_common.HBSDCommon, 'get_volume_metadata',
return_value={'dummy_volume_meta': 'meta'})
@mock.patch.object(hbsd_common.HBSDCommon, 'get_volume',
return_value=test_volume_error1)
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
def test_create_cloned_volume_error(self, arg1, arg2, arg3, arg4):
"""test create_cloned_volume."""
self.assertRaises(exception.HBSDError,
self.driver.create_cloned_volume,
self._VOLUME, self.test_volume_error1)
return
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_initialize_connection(self, arg1, arg2):
"""test initialize connection."""
connector = {'wwpns': '0x100000', 'ip': '0xc0a80100'}
rc = self.driver.initialize_connection(self._VOLUME, connector)
self.assertEqual(rc['driver_volume_type'], 'fibre_channel')
self.assertEqual(rc['data']['target_wwn'], ['50060E801053C2E0'])
self.assertEqual(rc['data']['target_lun'], 1)
return
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_initialize_connection_error(self, arg1, arg2):
"""test initialize connection."""
connector = {'wwpns': 'x', 'ip': '0xc0a80100'}
self.assertRaises(exception.HBSDError,
self.driver.initialize_connection,
self._VOLUME, connector)
return
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_terminate_connection(self, arg1):
"""test terminate connection."""
connector = {'wwpns': '0x100000', 'ip': '0xc0a80100'}
rc = self.driver.terminate_connection(self._VOLUME, connector)
self.assertEqual(rc['driver_volume_type'], 'fibre_channel')
self.assertEqual(rc['data']['target_wwn'], ['50060E801053C2E0'])
return
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_terminate_connection_error(self, arg1):
"""test terminate connection."""
connector = {'ip': '0xc0a80100'}
self.assertRaises(exception.HBSDError,
self.driver.terminate_connection,
self._VOLUME, connector)
return
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_manage_existing(self, arg1, arg2):
rc = self.driver.manage_existing(self._VOLUME, self.test_existing_ref)
self.assertEqual(0, rc['provider_location'])
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
@mock.patch.object(hbsd_common.HBSDCommon, '_update_volume_metadata')
def test_manage_existing_get_size(self, arg1, arg2, arg3):
self.configuration.hitachi_unit_name = self.UNIT_NAME
size = self.driver.manage_existing_get_size(self._VOLUME,
self.test_existing_ref)
self.assertEqual(1, size)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
@mock.patch.object(hbsd_common.HBSDCommon, '_update_volume_metadata')
def test_manage_existing_get_size_none_ldev(self, arg1, arg2, arg3):
self.configuration.hitachi_unit_name = self.UNIT_NAME
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size, self._VOLUME,
self.test_existing_none_ldev_ref)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
@mock.patch.object(hbsd_common.HBSDCommon, '_update_volume_metadata')
def test_manage_existing_get_size_invalid_ldev_ref(self, arg1, arg2, arg3):
self.configuration.hitachi_unit_name = self.UNIT_NAME
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size, self._VOLUME,
self.test_existing_invalid_ldev_ref)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
@mock.patch.object(hbsd_common.HBSDCommon, '_update_volume_metadata')
def test_manage_existing_get_size_no_ldev_ref(self, arg1, arg2, arg3):
self.configuration.hitachi_unit_name = self.UNIT_NAME
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size, self._VOLUME,
self.test_existing_no_ldev_ref)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
@mock.patch.object(hbsd_common.HBSDCommon, '_update_volume_metadata')
def test_manage_existing_get_size_none_unit_ref(self, arg1, arg2, arg3):
self.configuration.hitachi_unit_name = self.UNIT_NAME
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size, self._VOLUME,
self.test_existing_none_unit_ref)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
@mock.patch.object(hbsd_common.HBSDCommon, '_update_volume_metadata')
def test_manage_existing_get_size_invalid_unit_ref(self, arg1, arg2, arg3):
self.configuration.hitachi_unit_name = self.UNIT_NAME
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size, self._VOLUME,
self.test_existing_invalid_unit_ref)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
@mock.patch.object(hbsd_common.HBSDCommon, '_update_volume_metadata')
def test_manage_existing_get_size_no_unit_ref(self, arg1, arg2, arg3):
self.configuration.hitachi_unit_name = self.UNIT_NAME
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size, self._VOLUME,
self.test_existing_no_unit_ref)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm',
side_effect=_exec_hsnm_get_lu_ret_err)
@mock.patch.object(hbsd_common.HBSDCommon, '_update_volume_metadata')
def test_manage_existing_get_size_ret_err(self, arg1, arg2, arg3):
self.configuration.hitachi_unit_name = self.UNIT_NAME
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size, self._VOLUME,
self.test_existing_ref)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm',
side_effect=_exec_hsnm_get_lu_vol_type_err)
@mock.patch.object(hbsd_common.HBSDCommon, '_update_volume_metadata')
def test_manage_existing_get_lu_vol_type_err(self, arg1, arg2, arg3):
self.configuration.hitachi_unit_name = self.UNIT_NAME
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size, self._VOLUME,
self.test_existing_ref)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm',
side_effect=_exec_hsnm_get_lu_dppool_err)
@mock.patch.object(hbsd_common.HBSDCommon, '_update_volume_metadata')
def test_manage_existing_get_lu_dppool_err(self, arg1, arg2, arg3):
self.configuration.hitachi_unit_name = self.UNIT_NAME
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size, self._VOLUME,
self.test_existing_ref)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm',
side_effect=_exec_hsnm_get_lu_size_err)
@mock.patch.object(hbsd_common.HBSDCommon, '_update_volume_metadata')
def test_manage_existing_get_lu_size_err(self, arg1, arg2, arg3):
self.configuration.hitachi_unit_name = self.UNIT_NAME
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size, self._VOLUME,
self.test_existing_ref)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm',
side_effect=_exec_hsnm_get_lu_num_port_err)
@mock.patch.object(hbsd_common.HBSDCommon, '_update_volume_metadata')
def test_manage_existing_get_lu_num_port_err(self, arg1, arg2, arg3):
self.configuration.hitachi_unit_name = self.UNIT_NAME
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size, self._VOLUME,
self.test_existing_ref)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_unmanage(self, arg1, arg2):
self.driver.unmanage(self._VOLUME)
@mock.patch.object(hbsd_basiclib, 'get_process_lock')
@mock.patch.object(hbsd_snm2.HBSDSNM2, 'exec_hsnm', side_effect=_exec_hsnm)
def test_unmanage_busy(self, arg1, arg2):
self.assertRaises(exception.HBSDVolumeIsBusy,
self.driver.unmanage, self.test_volume_error3)
| |
"""All pytest-django fixtures"""
from __future__ import with_statement
import os
import pytest
from . import live_server_helper
from .db_reuse import (monkey_patch_creation_for_db_reuse,
monkey_patch_creation_for_db_suffix)
from .django_compat import is_django_unittest
from .lazy_django import skip_if_no_django
__all__ = ['_django_db_setup', 'db', 'transactional_db',
'client', 'admin_client', 'rf', 'settings', 'live_server',
'_live_server_helper']
################ Internal Fixtures ################
@pytest.fixture(scope='session')
def _django_db_setup(request,
_django_test_environment,
_django_cursor_wrapper):
"""Session-wide database setup, internal to pytest-django"""
skip_if_no_django()
from .compat import setup_databases, teardown_databases
from django.core import management
# xdist
if hasattr(request.config, 'slaveinput'):
db_suffix = request.config.slaveinput['slaveid']
else:
db_suffix = None
monkey_patch_creation_for_db_suffix(db_suffix)
# Disable south's syncdb command
commands = management.get_commands()
if commands['syncdb'] == 'south':
management._commands['syncdb'] = 'django.core'
with _django_cursor_wrapper:
# Monkey patch Django's setup code to support database re-use
if request.config.getvalue('reuse_db'):
if not request.config.getvalue('create_db'):
monkey_patch_creation_for_db_reuse()
# Create the database
db_cfg = setup_databases(verbosity=0, interactive=False)
def teardown_database():
with _django_cursor_wrapper:
teardown_databases(db_cfg)
if not request.config.getvalue('reuse_db'):
request.addfinalizer(teardown_database)
################ User visible fixtures ################
@pytest.fixture(scope='function')
def db(request, _django_db_setup, _django_cursor_wrapper):
"""Require a django test database
This database will be setup with the default fixtures and will
have the transaction management disabled. At the end of the test
the transaction will be rolled back to undo any changes to the
database. This is more limited then the ``transaction_db``
resource but faster.
If both this and ``transaction_db`` are requested then the
database setup will behave as only ``transaction_db`` was
requested.
"""
if ('transactional_db' not in request.funcargnames and
'live_server' not in request.funcargnames and
not is_django_unittest(request.node)):
from django.test import TestCase
_django_cursor_wrapper.enable()
case = TestCase(methodName='__init__')
case._pre_setup()
request.addfinalizer(_django_cursor_wrapper.disable)
request.addfinalizer(case._post_teardown)
@pytest.fixture(scope='function')
def transactional_db(request, _django_db_setup, _django_cursor_wrapper):
"""Require a django test database with transaction support
This will re-initialise the django database for each test and is
thus slower then the normal ``db`` fixture.
If you want to use the database with transactions you must request
this resource. If both this and ``db`` are requested then the
database setup will behave as only ``transaction_db`` was
requested.
"""
if not is_django_unittest(request.node):
_django_cursor_wrapper.enable()
def flushdb():
"""Flush the database and close database connections"""
# Django does this by default *before* each test
# instead of after.
from django.db import connections
from django.core.management import call_command
for db in connections:
call_command('flush', verbosity=0,
interactive=False, database=db)
for conn in connections.all():
conn.close()
request.addfinalizer(_django_cursor_wrapper.disable)
request.addfinalizer(flushdb)
@pytest.fixture()
def client():
"""A Django test client instance"""
skip_if_no_django()
from django.test.client import Client
return Client()
@pytest.fixture()
def admin_client(db):
"""A Django test client logged in as an admin user"""
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except ImportError:
from django.contrib.auth.models import User
from django.test.client import Client
try:
User.objects.get(username='admin')
except User.DoesNotExist:
user = User.objects.create_user('admin', 'admin@example.com',
'password')
user.is_staff = True
user.is_superuser = True
user.save()
client = Client()
client.login(username='admin', password='password')
return client
@pytest.fixture()
def rf():
"""RequestFactory instance"""
skip_if_no_django()
from django.test.client import RequestFactory
return RequestFactory()
class MonkeyPatchWrapper(object):
def __init__(self, monkeypatch, wrapped_object):
super(MonkeyPatchWrapper, self).__setattr__('monkeypatch', monkeypatch)
super(MonkeyPatchWrapper, self).__setattr__('wrapped_object',
wrapped_object)
def __getattr__(self, attr):
return getattr(self.wrapped_object, attr)
def __setattr__(self, attr, value):
self.monkeypatch.setattr(self.wrapped_object, attr, value,
raising=False)
def __delattr__(self, attr):
self.monkeypatch.delattr(self.wrapped_object, attr)
@pytest.fixture()
def settings(request, monkeypatch):
"""A Django settings object which restores changes after the testrun"""
skip_if_no_django()
from django.conf import settings as django_settings
return MonkeyPatchWrapper(monkeypatch, django_settings)
@pytest.fixture(scope='session')
def live_server(request):
"""Run a live Django server in the background during tests
The address the server is started from is taken from the
--liveserver command line option or if this is not provided from
the DJANGO_LIVE_TEST_SERVER_ADDRESS environment variable. If
neither is provided ``localhost:8081,8100-8200`` is used. See the
Django documentation for it's full syntax.
NOTE: If the live server needs database access to handle a request
your test will have to request database access. Furthermore
when the tests want to see data added by the live-server (or
the other way around) transactional database access will be
needed as data inside a transaction is not shared between
the live server and test code.
"""
skip_if_no_django()
addr = request.config.getvalue('liveserver')
if not addr:
addr = os.getenv('DJANGO_TEST_LIVE_SERVER_ADDRESS')
if not addr:
addr = 'localhost:8081,8100-8200'
server = live_server_helper.LiveServer(addr)
request.addfinalizer(server.stop)
return server
@pytest.fixture(autouse=True, scope='function')
def _live_server_helper(request):
"""Helper to make live_server work, internal to pytest-django
This helper will dynamically request the transactional_db fixture
for a tests which uses the live_server fixture. This allows the
server and test to access the database without having to mark
this explicitly which is handy since it is usually required and
matches the Django behaviour.
The separate helper is required since live_server can not request
transactional_db directly since it is session scoped instead of
function-scoped.
"""
if 'live_server' in request.funcargnames:
request.getfuncargvalue('transactional_db')
| |
# Copyright 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for nova.compute.rpcapi
"""
import contextlib
import mock
from oslo_config import cfg
from oslo_serialization import jsonutils
from nova.compute import rpcapi as compute_rpcapi
from nova import context
from nova.objects import block_device as objects_block_dev
from nova.objects import network_request as objects_network_request
from nova import test
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_instance
CONF = cfg.CONF
class ComputeRpcAPITestCase(test.TestCase):
def setUp(self):
super(ComputeRpcAPITestCase, self).setUp()
self.context = context.get_admin_context()
instance_attr = {'host': 'fake_host',
'instance_type_id': 1}
self.fake_instance_obj = fake_instance.fake_instance_obj(self.context,
**instance_attr)
self.fake_instance = jsonutils.to_primitive(self.fake_instance_obj)
self.fake_volume_bdm = jsonutils.to_primitive(
fake_block_device.FakeDbBlockDeviceDict(
{'source_type': 'volume', 'destination_type': 'volume',
'instance_uuid': self.fake_instance['uuid'],
'volume_id': 'fake-volume-id'}))
def test_serialized_instance_has_name(self):
self.assertIn('name', self.fake_instance)
def _test_compute_api(self, method, rpc_method, **kwargs):
ctxt = context.RequestContext('fake_user', 'fake_project')
rpcapi = kwargs.pop('rpcapi_class', compute_rpcapi.ComputeAPI)()
self.assertIsNotNone(rpcapi.client)
self.assertEqual(rpcapi.client.target.topic, CONF.compute_topic)
orig_prepare = rpcapi.client.prepare
expected_version = kwargs.pop('version', rpcapi.client.target.version)
expected_kwargs = kwargs.copy()
if ('requested_networks' in expected_kwargs and
expected_version == '3.23'):
expected_kwargs['requested_networks'] = []
for requested_network in kwargs['requested_networks']:
expected_kwargs['requested_networks'].append(
(requested_network.network_id,
str(requested_network.address),
requested_network.port_id))
if 'host_param' in expected_kwargs:
expected_kwargs['host'] = expected_kwargs.pop('host_param')
else:
expected_kwargs.pop('host', None)
expected_kwargs.pop('destination', None)
cast_and_call = ['confirm_resize', 'stop_instance']
if rpc_method == 'call' and method in cast_and_call:
if method == 'confirm_resize':
kwargs['cast'] = False
else:
kwargs['do_cast'] = False
if 'host' in kwargs:
host = kwargs['host']
elif 'destination' in kwargs:
host = kwargs['destination']
elif 'instances' in kwargs:
host = kwargs['instances'][0]['host']
else:
host = kwargs['instance']['host']
with contextlib.nested(
mock.patch.object(rpcapi.client, rpc_method),
mock.patch.object(rpcapi.client, 'prepare'),
mock.patch.object(rpcapi.client, 'can_send_version'),
) as (
rpc_mock, prepare_mock, csv_mock
):
prepare_mock.return_value = rpcapi.client
if 'return_bdm_object' in kwargs:
del kwargs['return_bdm_object']
rpc_mock.return_value = objects_block_dev.BlockDeviceMapping()
elif rpc_method == 'call':
rpc_mock.return_value = 'foo'
else:
rpc_mock.return_value = None
csv_mock.side_effect = (
lambda v: orig_prepare(version=v).can_send_version())
retval = getattr(rpcapi, method)(ctxt, **kwargs)
self.assertEqual(retval, rpc_mock.return_value)
prepare_mock.assert_called_once_with(version=expected_version,
server=host)
rpc_mock.assert_called_once_with(ctxt, method, **expected_kwargs)
def test_add_aggregate_host(self):
self._test_compute_api('add_aggregate_host', 'cast',
aggregate={'id': 'fake_id'}, host_param='host', host='host',
slave_info={})
def test_add_fixed_ip_to_instance(self):
self._test_compute_api('add_fixed_ip_to_instance', 'cast',
instance=self.fake_instance_obj, network_id='id',
version='3.12')
def test_attach_interface(self):
self._test_compute_api('attach_interface', 'call',
instance=self.fake_instance_obj, network_id='id',
port_id='id2', version='3.17', requested_ip='192.168.1.50')
def test_attach_volume(self):
self._test_compute_api('attach_volume', 'cast',
instance=self.fake_instance_obj, volume_id='id',
mountpoint='mp', bdm=self.fake_volume_bdm, version='3.16')
def test_change_instance_metadata(self):
self._test_compute_api('change_instance_metadata', 'cast',
instance=self.fake_instance_obj, diff={}, version='3.7')
@mock.patch('nova.compute.rpcapi.ComputeAPI._warn_buggy_live_migrations')
def test_check_can_live_migrate_destination(self, mock_warn):
self._test_compute_api('check_can_live_migrate_destination', 'call',
instance=self.fake_instance_obj,
destination='dest', block_migration=True,
disk_over_commit=True, version='3.32')
self.assertFalse(mock_warn.called)
@mock.patch('nova.compute.rpcapi.ComputeAPI._warn_buggy_live_migrations')
def test_check_can_live_migrate_destination_old_warning(self, mock_warn):
self.flags(compute='3.0', group='upgrade_levels')
self._test_compute_api('check_can_live_migrate_destination', 'call',
instance=self.fake_instance_obj,
destination='dest', block_migration=True,
disk_over_commit=True, version='3.0')
mock_warn.assert_called_once_with()
@mock.patch('nova.compute.rpcapi.ComputeAPI._warn_buggy_live_migrations')
def test_check_can_live_migrate_source(self, mock_warn):
self._test_compute_api('check_can_live_migrate_source', 'call',
instance=self.fake_instance_obj,
dest_check_data={"test": "data"}, version='3.32')
self.assertFalse(mock_warn.called)
@mock.patch('nova.compute.rpcapi.ComputeAPI._warn_buggy_live_migrations')
def test_check_can_live_migrate_source_old_warning(self, mock_warn):
self.flags(compute='3.0', group='upgrade_levels')
self._test_compute_api('check_can_live_migrate_source', 'call',
instance=self.fake_instance_obj,
dest_check_data={"test": "data"}, version='3.0')
mock_warn.assert_called_once_with()
def test_check_instance_shared_storage(self):
self._test_compute_api('check_instance_shared_storage', 'call',
instance=self.fake_instance_obj, data='foo',
version='3.29')
def test_confirm_resize_cast(self):
self._test_compute_api('confirm_resize', 'cast',
instance=self.fake_instance_obj, migration={'id': 'foo'},
host='host', reservations=list('fake_res'))
def test_confirm_resize_call(self):
self._test_compute_api('confirm_resize', 'call',
instance=self.fake_instance_obj, migration={'id': 'foo'},
host='host', reservations=list('fake_res'))
def test_detach_interface(self):
self._test_compute_api('detach_interface', 'cast',
version='3.17', instance=self.fake_instance_obj,
port_id='fake_id')
def test_detach_volume(self):
self._test_compute_api('detach_volume', 'cast',
instance=self.fake_instance_obj, volume_id='id',
version='3.25')
def test_finish_resize(self):
self._test_compute_api('finish_resize', 'cast',
instance=self.fake_instance_obj, migration={'id': 'foo'},
image='image', disk_info='disk_info', host='host',
reservations=list('fake_res'))
def test_finish_revert_resize(self):
self._test_compute_api('finish_revert_resize', 'cast',
instance=self.fake_instance_obj, migration={'id': 'fake_id'},
host='host', reservations=list('fake_res'))
def test_get_console_output(self):
self._test_compute_api('get_console_output', 'call',
instance=self.fake_instance_obj, tail_length='tl',
version='3.28')
def test_get_console_pool_info(self):
self._test_compute_api('get_console_pool_info', 'call',
console_type='type', host='host')
def test_get_console_topic(self):
self._test_compute_api('get_console_topic', 'call', host='host')
def test_get_diagnostics(self):
self._test_compute_api('get_diagnostics', 'call',
instance=self.fake_instance_obj, version='3.18')
def test_get_instance_diagnostics(self):
self._test_compute_api('get_instance_diagnostics', 'call',
instance=self.fake_instance, version='3.31')
def test_get_vnc_console(self):
self._test_compute_api('get_vnc_console', 'call',
instance=self.fake_instance_obj, console_type='type',
version='3.2')
def test_get_spice_console(self):
self._test_compute_api('get_spice_console', 'call',
instance=self.fake_instance_obj, console_type='type',
version='3.1')
def test_get_rdp_console(self):
self._test_compute_api('get_rdp_console', 'call',
instance=self.fake_instance_obj, console_type='type',
version='3.10')
def test_get_serial_console(self):
self._test_compute_api('get_serial_console', 'call',
instance=self.fake_instance, console_type='serial',
version='3.34')
def test_validate_console_port(self):
self._test_compute_api('validate_console_port', 'call',
instance=self.fake_instance_obj, port="5900",
console_type="novnc", version='3.3')
def test_host_maintenance_mode(self):
self._test_compute_api('host_maintenance_mode', 'call',
host_param='param', mode='mode', host='host')
def test_host_power_action(self):
self._test_compute_api('host_power_action', 'call', action='action',
host='host')
def test_inject_network_info(self):
self._test_compute_api('inject_network_info', 'cast',
instance=self.fake_instance_obj)
def test_live_migration(self):
self._test_compute_api('live_migration', 'cast',
instance=self.fake_instance_obj, dest='dest',
block_migration='blockity_block', host='tsoh',
migrate_data={}, version='3.26')
def test_post_live_migration_at_destination(self):
self._test_compute_api('post_live_migration_at_destination', 'cast',
instance=self.fake_instance_obj,
block_migration='block_migration', host='host', version='3.14')
def test_pause_instance(self):
self._test_compute_api('pause_instance', 'cast',
instance=self.fake_instance_obj)
def test_soft_delete_instance(self):
self._test_compute_api('soft_delete_instance', 'cast',
instance=self.fake_instance_obj,
reservations=['uuid1', 'uuid2'])
def test_swap_volume(self):
self._test_compute_api('swap_volume', 'cast',
instance=self.fake_instance_obj, old_volume_id='oldid',
new_volume_id='newid')
def test_restore_instance(self):
self._test_compute_api('restore_instance', 'cast',
instance=self.fake_instance_obj, version='3.20')
def test_pre_live_migration(self):
self._test_compute_api('pre_live_migration', 'call',
instance=self.fake_instance_obj,
block_migration='block_migration', disk='disk', host='host',
migrate_data=None, version='3.19')
def test_prep_resize(self):
self.flags(compute='3.0', group='upgrade_levels')
self._test_compute_api('prep_resize', 'cast',
instance=self.fake_instance_obj, instance_type='fake_type',
image='fake_image', host='host',
reservations=list('fake_res'),
request_spec='fake_spec',
filter_properties={'fakeprop': 'fakeval'},
node='node', version='3.0')
self.flags(compute='3.38', group='upgrade_levels')
self._test_compute_api('prep_resize', 'cast',
instance=self.fake_instance_obj, instance_type='fake_type',
image='fake_image', host='host',
reservations=list('fake_res'),
request_spec='fake_spec',
filter_properties={'fakeprop': 'fakeval'},
node='node', clean_shutdown=True, version='3.38')
def test_reboot_instance(self):
self.maxDiff = None
self._test_compute_api('reboot_instance', 'cast',
instance=self.fake_instance_obj,
block_device_info={},
reboot_type='type')
def test_rebuild_instance(self):
self._test_compute_api('rebuild_instance', 'cast', new_pass='None',
injected_files='None', image_ref='None', orig_image_ref='None',
bdms=[], instance=self.fake_instance_obj, host='new_host',
orig_sys_metadata=None, recreate=True, on_shared_storage=True,
preserve_ephemeral=True, version='3.21')
def test_reserve_block_device_name(self):
self._test_compute_api('reserve_block_device_name', 'call',
instance=self.fake_instance_obj, device='device',
volume_id='id', disk_bus='ide', device_type='cdrom',
version='3.35', return_bdm_object=True)
def refresh_provider_fw_rules(self):
self._test_compute_api('refresh_provider_fw_rules', 'cast',
host='host')
def test_refresh_security_group_rules(self):
self._test_compute_api('refresh_security_group_rules', 'cast',
rpcapi_class=compute_rpcapi.SecurityGroupAPI,
security_group_id='id', host='host')
def test_refresh_security_group_members(self):
self._test_compute_api('refresh_security_group_members', 'cast',
rpcapi_class=compute_rpcapi.SecurityGroupAPI,
security_group_id='id', host='host')
def test_remove_aggregate_host(self):
self._test_compute_api('remove_aggregate_host', 'cast',
aggregate={'id': 'fake_id'}, host_param='host', host='host',
slave_info={})
def test_remove_fixed_ip_from_instance(self):
self._test_compute_api('remove_fixed_ip_from_instance', 'cast',
instance=self.fake_instance_obj, address='addr',
version='3.13')
def test_remove_volume_connection(self):
self._test_compute_api('remove_volume_connection', 'call',
instance=self.fake_instance, volume_id='id', host='host',
version='3.30')
def test_rescue_instance(self):
self.flags(compute='3.9', group='upgrade_levels')
self._test_compute_api('rescue_instance', 'cast',
instance=self.fake_instance_obj, rescue_password='pw',
version='3.9')
self.flags(compute='3.24', group='upgrade_levels')
self._test_compute_api('rescue_instance', 'cast',
instance=self.fake_instance_obj, rescue_password='pw',
rescue_image_ref='fake_image_ref', version='3.24')
self.flags(compute='3.37', group='upgrade_levels')
self._test_compute_api('rescue_instance', 'cast',
instance=self.fake_instance_obj, rescue_password='pw',
rescue_image_ref='fake_image_ref',
clean_shutdown=True, version='3.37')
def test_reset_network(self):
self._test_compute_api('reset_network', 'cast',
instance=self.fake_instance_obj)
def test_resize_instance(self):
self.flags(compute='3.0', group='upgrade_levels')
self._test_compute_api('resize_instance', 'cast',
instance=self.fake_instance_obj, migration={'id': 'fake_id'},
image='image', instance_type={'id': 1},
reservations=list('fake_res'), version='3.0')
self.flags(compute='3.37', group='upgrade_levels')
self._test_compute_api('resize_instance', 'cast',
instance=self.fake_instance_obj, migration={'id': 'fake_id'},
image='image', instance_type={'id': 1},
reservations=list('fake_res'),
clean_shutdown=True, version='3.37')
def test_resume_instance(self):
self._test_compute_api('resume_instance', 'cast',
instance=self.fake_instance_obj)
def test_revert_resize(self):
self._test_compute_api('revert_resize', 'cast',
instance=self.fake_instance_obj, migration={'id': 'fake_id'},
host='host', reservations=list('fake_res'))
@mock.patch('nova.compute.rpcapi.ComputeAPI._warn_buggy_live_migrations')
def test_rollback_live_migration_at_destination(self, mock_warn):
self._test_compute_api('rollback_live_migration_at_destination',
'cast', instance=self.fake_instance_obj, host='host',
destroy_disks=True, migrate_data=None, version='3.32')
self.assertFalse(mock_warn.called)
@mock.patch('nova.compute.rpcapi.ComputeAPI._warn_buggy_live_migrations')
def test_rollback_live_migration_at_destination_old_warning(self,
mock_warn):
self.flags(compute='3.0', group='upgrade_levels')
self._test_compute_api('rollback_live_migration_at_destination',
'cast', instance=self.fake_instance_obj, host='host',
version='3.0')
mock_warn.assert_called_once_with(None)
def test_run_instance(self):
self._test_compute_api('run_instance', 'cast',
instance=self.fake_instance_obj, host='fake_host',
request_spec='fake_spec', filter_properties={},
requested_networks='networks', injected_files='files',
admin_password='pw', is_first_time=True, node='node',
legacy_bdm_in_spec=False, version='3.27')
def test_set_admin_password(self):
self._test_compute_api('set_admin_password', 'call',
instance=self.fake_instance_obj, new_pass='pw',
version='3.8')
def test_set_host_enabled(self):
self._test_compute_api('set_host_enabled', 'call',
enabled='enabled', host='host')
def test_get_host_uptime(self):
self._test_compute_api('get_host_uptime', 'call', host='host')
def test_backup_instance(self):
self._test_compute_api('backup_instance', 'cast',
instance=self.fake_instance_obj, image_id='id',
backup_type='type', rotation='rotation')
def test_snapshot_instance(self):
self._test_compute_api('snapshot_instance', 'cast',
instance=self.fake_instance_obj, image_id='id')
def test_start_instance(self):
self._test_compute_api('start_instance', 'cast',
instance=self.fake_instance_obj)
def test_stop_instance_cast(self):
self.flags(compute='3.0', group='upgrade_levels')
self._test_compute_api('stop_instance', 'cast',
instance=self.fake_instance_obj, version='3.0')
self.flags(compute='3.37', group='upgrade_levels')
self._test_compute_api('stop_instance', 'cast',
instance=self.fake_instance_obj,
clean_shutdown=True, version='3.37')
def test_stop_instance_call(self):
self.flags(compute='3.0', group='upgrade_levels')
self._test_compute_api('stop_instance', 'call',
instance=self.fake_instance_obj, version='3.0')
self.flags(compute='3.37', group='upgrade_levels')
self._test_compute_api('stop_instance', 'call',
instance=self.fake_instance_obj,
clean_shutdown=True, version='3.37')
def test_suspend_instance(self):
self._test_compute_api('suspend_instance', 'cast',
instance=self.fake_instance_obj)
def test_terminate_instance(self):
self._test_compute_api('terminate_instance', 'cast',
instance=self.fake_instance_obj, bdms=[],
reservations=['uuid1', 'uuid2'], version='3.22')
def test_unpause_instance(self):
self._test_compute_api('unpause_instance', 'cast',
instance=self.fake_instance_obj)
def test_unrescue_instance(self):
self._test_compute_api('unrescue_instance', 'cast',
instance=self.fake_instance_obj, version='3.11')
def test_shelve_instance(self):
self.flags(compute='3.0', group='upgrade_levels')
self._test_compute_api('shelve_instance', 'cast',
instance=self.fake_instance_obj, image_id='image_id',
version='3.0')
self.flags(compute='3.37', group='upgrade_levels')
self._test_compute_api('shelve_instance', 'cast',
instance=self.fake_instance_obj, image_id='image_id',
clean_shutdown=True, version='3.37')
def test_shelve_offload_instance(self):
self.flags(compute='3.0', group='upgrade_levels')
self._test_compute_api('shelve_offload_instance', 'cast',
instance=self.fake_instance_obj,
version='3.0')
self.flags(compute='3.37', group='upgrade_levels')
self._test_compute_api('shelve_offload_instance', 'cast',
instance=self.fake_instance_obj,
clean_shutdown=True, version='3.37')
def test_unshelve_instance(self):
self._test_compute_api('unshelve_instance', 'cast',
instance=self.fake_instance_obj, host='host', image='image',
filter_properties={'fakeprop': 'fakeval'}, node='node',
version='3.15')
def test_volume_snapshot_create(self):
self._test_compute_api('volume_snapshot_create', 'cast',
instance=self.fake_instance, volume_id='fake_id',
create_info={}, version='3.6')
def test_volume_snapshot_delete(self):
self._test_compute_api('volume_snapshot_delete', 'cast',
instance=self.fake_instance_obj, volume_id='fake_id',
snapshot_id='fake_id2', delete_info={}, version='3.6')
def test_external_instance_event(self):
self._test_compute_api('external_instance_event', 'cast',
instances=[self.fake_instance_obj],
events=['event'],
version='3.23')
def test_build_and_run_instance(self):
self._test_compute_api('build_and_run_instance', 'cast',
instance=self.fake_instance_obj, host='host', image='image',
request_spec={'request': 'spec'}, filter_properties=[],
admin_password='passwd', injected_files=None,
requested_networks=['network1'], security_groups=None,
block_device_mapping=None, node='node', limits=[],
version='3.36')
@mock.patch('nova.utils.is_neutron', return_value=True)
def test_build_and_run_instance_icehouse_compat(self, is_neutron):
self.flags(compute='icehouse', group='upgrade_levels')
self._test_compute_api('build_and_run_instance', 'cast',
instance=self.fake_instance_obj, host='host', image='image',
request_spec={'request': 'spec'}, filter_properties=[],
admin_password='passwd', injected_files=None,
requested_networks= objects_network_request.NetworkRequestList(
objects=[objects_network_request.NetworkRequest(
network_id="fake_network_id", address="10.0.0.1",
port_id="fake_port_id")]),
security_groups=None,
block_device_mapping=None, node='node', limits=[],
version='3.23')
def test_quiesce_instance(self):
self._test_compute_api('quiesce_instance', 'call',
instance=self.fake_instance_obj, version='3.39')
def test_unquiesce_instance(self):
self._test_compute_api('unquiesce_instance', 'cast',
instance=self.fake_instance_obj, mapping=None, version='3.39')
| |
# -*- coding: UTF-8 -*-
"""Definitions for `Nester` class."""
import gc
import pickle
import sys
import numpy as np
from astrocats.catalog.model import MODEL
from astrocats.catalog.quantity import QUANTITY
from mosfit.samplers.sampler import Sampler
from mosfit.utils import pretty_num
class Nester(Sampler):
"""Fit transient events with the provided model."""
_MAX_ACORC = 5
_REPLACE_AGE = 20
def __init__(
self, fitter, model=None, iterations=2000, burn=None, post_burn=None,
num_walkers=None, convergence_criteria=None,
convergence_type='psrf', gibbs=False, fracking=True,
frack_step=20, **kwargs):
"""Initialize `Nester` class."""
super(Nester, self).__init__(fitter, num_walkers=num_walkers, **kwargs)
self._model = model
self._iterations = iterations
self._burn = burn
self._post_burn = post_burn
self._cc = convergence_criteria
self._ct = convergence_type
self._gibbs = gibbs
self._fracking = fracking
self._frack_step = frack_step
self._upload_model = None
self._ntemps = 1
def _get_best_kmat(self):
"""Get the kernel matrix associated with best current scoring model."""
sout = self._model.run_stack(
self._results.samples[np.unravel_index(
np.argmax(self._results.logl),
self._results.logl.shape)],
root='objective')
kmat = sout.get('kmat')
kdiag = sout.get('kdiagonal')
variance = sout.get('obandvs', sout.get('variance'))
if kdiag is not None and kmat is not None:
kmat[np.diag_indices_from(kmat)] += kdiag
elif kdiag is not None and kmat is None:
kmat = np.diag(kdiag + variance)
return kmat
def append_output(self, modeldict):
"""Append output from the nester to the model description."""
modeldict[MODEL.SCORE] = {
QUANTITY.VALUE: pretty_num(self._logz, sig=6),
QUANTITY.E_VALUE: pretty_num(self._e_logz, sig=6),
QUANTITY.KIND: 'Log(z)'
}
modeldict[MODEL.STEPS] = str(self._niter)
def prepare_output(self, check_upload_quality, upload):
"""Prepare output for writing to disk and uploading."""
self._pout = [self._results.samples]
self._lnprobout = [self._results.logl]
self._weights = [np.exp(self._results.logwt - max(
self._results.logwt))]
tweight = np.sum(self._weights)
self._weights = [x / tweight for x in self._weights]
if check_upload_quality:
pass
def run(self, walker_data):
"""Use nested sampling to determine posteriors."""
from dynesty import DynamicNestedSampler
from dynesty.dynamicsampler import stopping_function, weight_function
from mosfit.fitter import ln_likelihood, draw_from_icdf
prt = self._printer
if len(walker_data):
prt.message('nester_not_use_walkers', warning=True)
ndim = self._model._num_free_parameters
if self._num_walkers:
self._nwalkers = self._num_walkers
else:
self._nwalkers = 2 * ndim
self._nlive = 20 * ndim
self._lnprob = None
self._lnlike = None
prt.message('nmeas_nfree', [self._model._num_measurements, ndim])
nested_dlogz_init = self._cc
post_thresh = self._cc
max_iter = self._iterations if self._ct is None else np.inf
if max_iter <= 0:
return
s_exception = None
iter_denom = None if self._ct is not None else self._iterations
# Save a few things from the dynesty run for diagnostic purposes.
scales = []
try:
sampler = DynamicNestedSampler(
ln_likelihood, draw_from_icdf, ndim,
pool=self._pool, sample='rwalk',
queue_size=max(self._pool.size, 1))
# Perform initial sample.
ncall = sampler.ncall
self._niter = sampler.it - 1
for li, res in enumerate(sampler.sample_initial(
dlogz=nested_dlogz_init, nlive=self._nlive
)):
ncall0 = ncall
(worst, ustar, vstar, loglstar, logvol,
logwt, self._logz, logzvar, h, nc, worst_it,
propidx, propiter, eff, delta_logz) = res
ncall += nc
self._niter += 1
max_iter -= 1
if max_iter < 0:
break
if (self._fitter._maximum_walltime is not False and
self.time_running() >
self._fitter._maximum_walltime):
prt.message('exceeded_walltime', warning=True)
break
self._results = sampler.results
scales.append(sampler.results.scale)
kmat = self._get_best_kmat()
# The above added 1 call.
ncall += 1
self._e_logz = np.sqrt(logzvar)
prt.status(
self, 'baseline', kmat=kmat,
iterations=[self._niter, iter_denom],
nc=ncall - ncall0, ncall=ncall, eff=eff,
logz=[self._logz, self._e_logz,
delta_logz, nested_dlogz_init],
loglstar=[loglstar],
time_running=self.time_running(),
maximum_walltime=self._fitter._maximum_walltime)
if max_iter >= 0:
prt.status(
self, 'starting_batches', kmat=kmat,
iterations=[self._niter, iter_denom],
nc=ncall - ncall0, ncall=ncall, eff=eff,
logz=[self._logz, self._e_logz,
delta_logz, nested_dlogz_init],
loglstar=[loglstar],
time_running=self.time_running(),
maximum_walltime=self._fitter._maximum_walltime)
n = 0
while max_iter >= 0:
n += 1
if (self._fitter._maximum_walltime is not False and
self.time_running() >
self._fitter._maximum_walltime):
prt.message('exceeded_walltime', warning=True)
break
self._results = sampler.results
scales.append(sampler.results.scale)
stop, stop_vals = stopping_function(
self._results, return_vals=True, args={
'post_thresh': post_thresh})
stop_post, stop_evid, stop_val = stop_vals
if not stop:
logl_bounds = weight_function(self._results)
self._logz, self._e_logz = self._results.logz[
-1], self._results.logzerr[-1]
for res in sampler.sample_batch(
logl_bounds=logl_bounds,
nlive_new=int(np.ceil(self._nlive / 2))):
(worst, ustar, vstar, loglstar, nc,
worst_it, propidx, propiter, eff) = res
ncall0 = ncall
ncall += nc
self._niter += 1
max_iter -= 1
self._results = sampler.results
kmat = self._get_best_kmat()
# The above added 1 call.
ncall += 1
prt.status(
self, 'batching', kmat=kmat,
iterations=[self._niter, iter_denom],
batch=n, nc=ncall - ncall0, ncall=ncall, eff=eff,
logz=[self._logz, self._e_logz], loglstar=[
logl_bounds[0], loglstar,
logl_bounds[1]], stop=stop_val,
time_running=self.time_running(),
maximum_walltime=self._fitter._maximum_walltime)
if max_iter < 0:
break
sampler.combine_runs()
else:
break
# self._results.summary()
# prt.nester_status(self, desc='sampling')
except (KeyboardInterrupt, SystemExit):
prt.message('ctrl_c', error=True, prefix=False, color='!r')
s_exception = sys.exc_info()
except Exception:
print('Scale history:')
print(scales)
pickle.dump(sampler.results, open(
self._fitter._event_name + '-dynesty.pickle', 'wb'))
self._pool.close()
raise
if max_iter < 0:
prt.message('max_iter')
if s_exception is not None:
self._pool.close()
if (not prt.prompt('mc_interrupted')):
sys.exit()
sampler.reset()
gc.collect()
| |
"""
Functions for creating and restoring url-safe signed JSON objects.
The format used looks like this:
>>> signing.dumps("hello")
'ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk'
There are two components here, separated by a ':'. The first component is a
URLsafe base64 encoded JSON of the object passed to dumps(). The second
component is a base64 encoded hmac/SHA1 hash of "$first_component:$secret"
signing.loads(s) checks the signature and returns the deserialised object.
If the signature fails, a BadSignature exception is raised.
>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk")
u'hello'
>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk-modified")
...
BadSignature: Signature failed: ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk-modified
You can optionally compress the JSON prior to base64 encoding it to save
space, using the compress=True argument. This checks if compression actually
helps and only applies compression if the result is a shorter string:
>>> signing.dumps(range(1, 20), compress=True)
'.eJwFwcERACAIwLCF-rCiILN47r-GyZVJsNgkxaFxoDgxcOHGxMKD_T7vhAml:1QaUaL:BA0thEZrp4FQVXIXuOvYJtLJSrQ'
The fact that the string is compressed is signalled by the prefixed '.' at the
start of the base64 JSON.
There are 65 url-safe characters: the 64 used by url-safe base64 and the ':'.
These functions make use of all of them.
"""
from __future__ import unicode_literals
import base64
import json
import time
import zlib
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils import baseconv
from django.utils.crypto import constant_time_compare, salted_hmac
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.importlib import import_module
class BadSignature(Exception):
"""
Signature does not match
"""
pass
class SignatureExpired(BadSignature):
"""
Signature timestamp is older than required max_age
"""
pass
def b64_encode(s):
return base64.urlsafe_b64encode(s).strip(b'=')
def b64_decode(s):
pad = b'=' * (-len(s) % 4)
return base64.urlsafe_b64decode(s + pad)
def base64_hmac(salt, value, key):
return b64_encode(salted_hmac(salt, value, key).digest())
def get_cookie_signer(salt='django.core.signing.get_cookie_signer'):
modpath = settings.SIGNING_BACKEND
module, attr = modpath.rsplit('.', 1)
try:
mod = import_module(module)
except ImportError as e:
raise ImproperlyConfigured(
'Error importing cookie signer %s: "%s"' % (modpath, e))
try:
Signer = getattr(mod, attr)
except AttributeError as e:
raise ImproperlyConfigured(
'Error importing cookie signer %s: "%s"' % (modpath, e))
return Signer('django.http.cookies' + settings.SECRET_KEY, salt=salt)
class JSONSerializer(object):
"""
Simple wrapper around json to be used in signing.dumps and
signing.loads.
"""
def dumps(self, obj):
return json.dumps(obj, separators=(',', ':')).encode('latin-1')
def loads(self, data):
return json.loads(data.decode('latin-1'))
def dumps(obj, key=None, salt='django.core.signing', serializer=JSONSerializer, compress=False):
"""
Returns URL-safe, sha1 signed base64 compressed JSON string. If key is
None, settings.SECRET_KEY is used instead.
If compress is True (not the default) checks if compressing using zlib can
save some space. Prepends a '.' to signify compression. This is included
in the signature, to protect against zip bombs.
Salt can be used to namespace the hash, so that a signed string is
only valid for a given namespace. Leaving this at the default
value or re-using a salt value across different parts of your
application without good cause is a security risk.
The serializer is expected to return a bytestring.
"""
data = serializer().dumps(obj)
# Flag for if it's been compressed or not
is_compressed = False
if compress:
# Avoid zlib dependency unless compress is being used
compressed = zlib.compress(data)
if len(compressed) < (len(data) - 1):
data = compressed
is_compressed = True
base64d = b64_encode(data)
if is_compressed:
base64d = b'.' + base64d
return TimestampSigner(key, salt=salt).sign(base64d)
def loads(s, key=None, salt='django.core.signing', serializer=JSONSerializer, max_age=None):
"""
Reverse of dumps(), raises BadSignature if signature fails.
The serializer is expected to accept a bytestring.
"""
# TimestampSigner.unsign always returns unicode but base64 and zlib
# compression operate on bytes.
base64d = force_bytes(TimestampSigner(key, salt=salt).unsign(s, max_age=max_age))
decompress = False
if base64d[:1] == b'.':
# It's compressed; uncompress it first
base64d = base64d[1:]
decompress = True
data = b64_decode(base64d)
if decompress:
data = zlib.decompress(data)
return serializer().loads(data)
class Signer(object):
def __init__(self, key=None, sep=':', salt=None):
# Use of native strings in all versions of Python
self.sep = str(sep)
self.key = str(key or settings.SECRET_KEY)
self.salt = str(salt or
'%s.%s' % (self.__class__.__module__, self.__class__.__name__))
def signature(self, value):
signature = base64_hmac(self.salt + 'signer', value, self.key)
# Convert the signature from bytes to str only on Python 3
return force_str(signature)
def sign(self, value):
value = force_str(value)
return str('%s%s%s') % (value, self.sep, self.signature(value))
def unsign(self, signed_value):
signed_value = force_str(signed_value)
if not self.sep in signed_value:
raise BadSignature('No "%s" found in value' % self.sep)
value, sig = signed_value.rsplit(self.sep, 1)
if constant_time_compare(sig, self.signature(value)):
return force_text(value)
raise BadSignature('Signature "%s" does not match' % sig)
class TimestampSigner(Signer):
def timestamp(self):
return baseconv.base62.encode(int(time.time()))
def sign(self, value):
value = force_str(value)
value = str('%s%s%s') % (value, self.sep, self.timestamp())
return super(TimestampSigner, self).sign(value)
def unsign(self, value, max_age=None):
"""
Retrieve original value and check it wasn't signed more
than max_age seconds ago.
"""
result = super(TimestampSigner, self).unsign(value)
value, timestamp = result.rsplit(self.sep, 1)
timestamp = baseconv.base62.decode(timestamp)
if max_age is not None:
# Check timestamp is not older than max_age
age = time.time() - timestamp
if age > max_age:
raise SignatureExpired(
'Signature age %s > %s seconds' % (age, max_age))
return value
| |
'''
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
'''
import fcntl
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTShadowClient
import termios
import os
import sys
import logging
import time
import getopt
import json
class FireSprinkler:
''' Fire sprinkler device '''
def __init__(self, name):
self._name = name
self._reading = 0
self._state = 'Deactivated'
def readingMessage(self):
''' retrieve a message describing internal sensor reading '''
return self._name + ": smoke level at " + str(self._reading)
def readingPayload(self):
''' retrieve a JSON payload describing internal sensor reading '''
return '{"smoke": ' + str(self._reading) + '}'
def setSmoke(self, smoke):
self._reading = smoke
# Shadow JSON schema:
#
# Name: Bot
# {
# "state": {
# "desired":{
# "property":<INT VALUE>
# }
# }
#}
# Custom Shadow callback
def customShadowCallback_Update(payload, responseStatus, token):
# payload is a JSON string ready to be parsed using json.loads(...)
# in both Py2.x and Py3.x
if responseStatus == "timeout":
print("Update request " + token + " time out!")
if responseStatus == "accepted":
payloadDict = json.loads(payload)
print("~~~~~~~~~~~~~~~~~~~~~~~")
print("Update request with token: " + token + " accepted!")
print("sprinkler: " +
str(payloadDict["state"]["desired"]["sprinkler"]))
print("~~~~~~~~~~~~~~~~~~~~~~~\n\n")
if responseStatus == "rejected":
print("Update request " + token + " rejected!")
def customShadowCallback_Delete(payload, responseStatus, token):
if responseStatus == "timeout":
print("Delete request " + token + " time out!")
if responseStatus == "accepted":
print("~~~~~~~~~~~~~~~~~~~~~~~")
print("Delete request with token: " + token + " accepted!")
print("~~~~~~~~~~~~~~~~~~~~~~~\n\n")
if responseStatus == "rejected":
print("Delete request " + token + " rejected!")
def customShadowCallback_Delta(payload, responseStatus, token):
# payload is a JSON string ready to be parsed using json.loads(...)
# in both Py2.x and Py3.x
print(responseStatus)
payloadDict = json.loads(payload)
print("++++++++DELTA++++++++++")
print("sprinkler: " + str(payloadDict["state"]["sprinkler"]))
print("version: " + str(payloadDict["version"]))
print("+++++++++++++++++++++++\n\n")
# Custom MQTT message callback
def customCallback(client, userdata, message):
print("Received a new message: ")
print(message.payload)
print("from topic: ")
print(message.topic)
print("--------------\n\n")
# Usage
usageInfo = """Usage:
Use certificate based mutual authentication:
python fireSprinkler.py -e <endpoint> -r <rootCAFilePath> -c <certFilePath> -k <privateKeyFilePath>
Use MQTT over WebSocket:
python fireSprinkler.py -e <endpoint> -r <rootCAFilePath> -w
Type "python fireSprinkler.py -h" for available options.
"""
# Help info
helpInfo = """-e, --endpoint
Your AWS IoT custom endpoint
-r, --rootCA
Root CA file path
-c, --cert
Certificate file path
-k, --key
Private key file path
-w, --websocket
Use MQTT over WebSocket
-h, --help
Help information
"""
# Read in command-line parameters
useWebsocket = False
host = ""
rootCAPath = ""
certificatePath = ""
privateKeyPath = ""
try:
opts, args = getopt.getopt(sys.argv[1:], "hwe:k:c:r:", [
"help", "endpoint=", "key=", "cert=", "rootCA=", "websocket"])
if len(opts) == 0:
raise getopt.GetoptError("No input parameters!")
for opt, arg in opts:
if opt in ("-h", "--help"):
print(helpInfo)
exit(0)
if opt in ("-e", "--endpoint"):
host = arg
if opt in ("-r", "--rootCA"):
rootCAPath = arg
if opt in ("-c", "--cert"):
certificatePath = arg
if opt in ("-k", "--key"):
privateKeyPath = arg
if opt in ("-w", "--websocket"):
useWebsocket = True
except getopt.GetoptError:
print(usageInfo)
exit(1)
# Missing configuration notification
missingConfiguration = False
if not host:
print("Missing '-e' or '--endpoint'")
missingConfiguration = True
if not rootCAPath:
print("Missing '-r' or '--rootCA'")
missingConfiguration = True
if not useWebsocket:
if not certificatePath:
print("Missing '-c' or '--cert'")
missingConfiguration = True
if not privateKeyPath:
print("Missing '-k' or '--key'")
missingConfiguration = True
if missingConfiguration:
exit(2)
# Configure logging
logger = logging.getLogger("AWSIoTPythonSDK.core")
logger.setLevel(logging.ERROR)
streamHandler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
streamHandler.setFormatter(formatter)
logger.addHandler(streamHandler)
# Init AWSIoTMQTTShadowClient
myAWSIoTMQTTShadowClient = None
if useWebsocket:
myAWSIoTMQTTShadowClient = AWSIoTMQTTShadowClient(
"kit-fs-001", useWebsocket=True)
myAWSIoTMQTTShadowClient.configureEndpoint(host, 443)
myAWSIoTMQTTShadowClient.configureCredentials(rootCAPath)
else:
myAWSIoTMQTTShadowClient = AWSIoTMQTTShadowClient("kit-fs-001")
myAWSIoTMQTTShadowClient.configureEndpoint(host, 8883)
myAWSIoTMQTTShadowClient.configureCredentials(
rootCAPath, privateKeyPath, certificatePath)
myAWSIoTMQTTClient = myAWSIoTMQTTShadowClient.getMQTTConnection()
# AWSIoTMQTTClient connection configuration
myAWSIoTMQTTShadowClient.configureAutoReconnectBackoffTime(1, 32, 20)
# Infinite offline Publish queueing
myAWSIoTMQTTClient.configureOfflinePublishQueueing(-1)
myAWSIoTMQTTClient.configureDrainingFrequency(2) # Draining: 2 Hz
myAWSIoTMQTTShadowClient.configureConnectDisconnectTimeout(10) # 10 sec
myAWSIoTMQTTShadowClient.configureMQTTOperationTimeout(5) # 5 sec
# AWSIoTMQTTShadowClient configuration
myAWSIoTMQTTShadowClient.configureAutoReconnectBackoffTime(1, 32, 20)
myAWSIoTMQTTShadowClient.configureConnectDisconnectTimeout(10) # 10 sec
myAWSIoTMQTTShadowClient.configureMQTTOperationTimeout(5) # 5 sec
# Create a device
device = FireSprinkler("kit-fs-001")
# Connect and subscribe to AWS IoT
myAWSIoTMQTTShadowClient.connect()
myAWSIoTMQTTClient.subscribe("office/kitchen/kit-fs-001", 1, customCallback)
time.sleep(2)
# Create a deviceShadow with persistent subscription
Bot = myAWSIoTMQTTShadowClient.createShadowHandlerWithName(
"kit-fs-001", True)
# Delete shadow JSON doc
#Bot.shadowDelete(customShadowCallback_Delete, 5)
# Reset shadow doc
JSONPayload = '{"state":{"desired":{"sprinkler":"deactivated"}}}'
#Bot.shadowUpdate(JSONPayload, customShadowCallback_Update, 5)
# Listen on deltas
Bot.shadowRegisterDeltaCallback(customShadowCallback_Delta)
def get_char_keyboard_nonblock():
'''Capture keep presses without blocking'''
fd = sys.stdin.fileno()
oldterm = termios.tcgetattr(fd)
newattr = termios.tcgetattr(fd)
newattr[3] = newattr[3] & ~termios.ICANON & ~termios.ECHO
termios.tcsetattr(fd, termios.TCSANOW, newattr)
oldflags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, oldflags | os.O_NONBLOCK)
c = None
try:
c = sys.stdin.read(1)
except IOError:
pass
termios.tcsetattr(fd, termios.TCSAFLUSH, oldterm)
fcntl.fcntl(fd, fcntl.F_SETFL, oldflags)
return c
def testSmoke(device):
'''Increase the smoke level is space is pressed'''
c = get_char_keyboard_nonblock()
if c == " ":
print("somethings burning...")
device.setSmoke(1)
# Publish messages in a loop
loopCount = 0
while True:
testSmoke(device)
print("Publishing message to office/kitchen: " + device.readingMessage())
myAWSIoTMQTTClient.publish(
"office/kitchen", device.readingPayload(), 1)
loopCount += 1
device.setSmoke(0)
time.sleep(1)
| |
__author__ = 'oonarfiandwi'
from flask import Flask
from flask import request
from google.appengine.api import taskqueue, memcache
from google.appengine.datastore import entity_pb
from google.appengine.ext import ndb
from model_profile import Profile
from datetime import datetime, timedelta
from os import environ
from logging import error
from People import People
import simplejson as json
"""
this should be data from another API (contact circlecount? http://www.circlecount.com/id/profileslist/)
"""
indonesia_ids = \
['107432398539341365940', '107625144935146230047', '106601693943516044496', '104448874055406535418',
'112354545929966456048', '114384982500350295512', '108286657063314289856', '100376954084559205891',
'116869234231593219709', '100558479203992663037', '118252948887947024512', '113394938579974212693',
'112638457196220461637', '101810166866201144931', '106939868938222633265', '102822538152349669747',
'107507070338583266314', '102024051895024187351', '117108236116237500441', '118214294405590069582',
'105628667513453263078', '113811258740009429740', '104954947323368041426', '118343307903587596704',
'101778343594604092334', '105924429773397501596', '111156704479775949627', '104879387838839120510',
'102250841784466654092', '113014617490520808583', '101928252051944612621', '101682734905774807138',
'104357255131643322237', '102802823311284245007', '105687286181087087938', '106481269342600411633',
'104689423159256002886', '112498833559117226731', '117295569606047532013', '103537465357175776468',
'115597363814522022890', '107320220329059497679', '105178080302702831809', '100774395895038903027',
'112705926429019733037', '100402345470048010212', '114805887372864153486', '116709755762272494241',
'108616701717565220964', '110136850451233606756', '111583929679906329750', '104898639851403166441',
'111127624692119872997', '108373180619624143682', '102998236025601701615', '105965978920361943349',
'105195268799430749865', '115973691986694822274', '107081533430574750743', '113256479008912400953',
'104801716294445103557', '105088313441807388577', '101384003234517231964', '100064238664911237662',
'109462018029692971187', '110315331954590884708', '100401573219575028576', '100644820619132604408',
'105712719816201451857', '110999762221836400675', '110812640907879540139', '105228323457033543279',
'101659112905585086771', '115238169369932529641', '108459444669704402860', '112376808056265675751',
'113062200675898364899', '113790517052179637060', '117913114635691687230', '113609819507715309506',
'103880880568089001619', '115230166193627948492', '109292455464540990594', '102748241426689710933',
'103694044190443947860', '109499153411718537835', '117428153251899145718', '116564404830129709349',
'114673087345260135832', '110164135613724623208', '118003926024310980798', '106722816607940762364',
'109128804280051726481', '117823130965889718161', '103822648474011295451', '102875388305901271896',
'116430720068309201654', '111774305345889696894', '106323274296699264295', '116418544068626962508',
'101249952574241217402', '116729759094961531076', '102979881173826058281', '105371406280194453518',
'101898587084201015993', '116610801738829783940', '105887489575344254572', '108831587043640764362',
'105810954071112361976', '103105202350431983844', '101981660178179618739', '101701920696045165687',
'112669538420258358861', '100100066894130591020', '113353870864715471985', '100353356311983330157',
'117762593332079535969', '107507539268974734333', '104707033302283722890', '111115958534146764360',
'107903295674078716857', '105154411599585458501', '111034269652878471427', '100816763676617074031',
'113906325992504847601', '116282402565362776252', '114210320721253434841', '116653828204029811729',
'115041340633616548475', '112878166140838268180', '116540026763084000945', '113576769754615264600',
'106751479584722900909', '108484897076394591768', '108813917621464690074', '117791775710666768054',
'106180130032686806318', # google.com/+intananggita
'116865015561080085465', # google.com/+FirstaYunida
'103164032603442685281', # tikabanget
'102354805749063623353', # google.com/+oonarfiandwi
'108287824657082742169', # google.com/+eunikekartini
'111412165181181229281', '113622927385865973313', '114226493354271341433', '105529429805260442349',
'100635421636201432228', '104532937678377251261', '116643055326494693168',
# verified users
'112630572986759127431', '107473738764732280406', '113731426897634311366', '105820066174392707722',
'104744626545751417132', '101350140907529651441', '104457932600175607039', '114818895899066885015',
'116723877857974180090', '112282592694412599421', '105751407373154489199', '117704134005457696729',
'100835152593402953418', '116413840890200727723', '109710122744992700365', '115123109876426523868',
'103982177023364480741',
# not-active-user
'101874797485064592775', '103112988688147257701', '100575364581092867206', '111000770457789664008',
'108904522549191242618', '108288059417021620647', '100652824519298040016', '102185792362815488008',
]
profile_expired_time_in_seconds = 60 * len(indonesia_ids)
activity_expired_time_in_seconds = 1800
"""
TaskQueue only support relative URL
"""
TQ_URL_PROFILE = '/p'
TQ_URL_ACTIVITY = '/a'
app = Flask(__name__)
app.config['DEBUG'] = True
@app.route('/indonesia', methods=['POST'])
def get_indonesia():
""" only DB_INSTANCE serve /indonesia to preserve quota usage """
if not environ['DB_INSTANCE'] in request.url_root:
return '[]'
nitems = int(request.form['nitems'])
page = int(request.form['page'])
offset = 0
if page > 0:
offset = (page-1) * nitems
# process phase 1 (update database)
# now handled by cron per 10 minutes
# process phase 2 (response with data from memcache or Datastore)
# unfortunately total data from Datastore is bigger than max allowed on memcache (1MB)
# so we will split the query, and optimized with the current profile on memcache
profiles = deserialize_entities(memcache.get('profile_by_activity_updated:%d:%d' % (nitems, offset)))
if profiles is None:
profiles = Profile.query().order(-Profile.activity_updated). \
fetch(nitems, offset=offset, projection=[Profile.activity_updated])
if not memcache.add('profile_by_activity_updated:%d:%d' % (nitems, offset), serialize_entities(profiles), 60):
error('Memcache set failed: profile_by_activity_updated:%d:%d' % (nitems, offset))
indonesia_users = []
for profile in profiles:
# get data from memcache or datastore for specific user
p = get_profile(profile.key.id())
# 'updated' is field from Google+ API activities related to specified user
last_activity = '(unknown)'
activity_updated = profile.activity_updated
if activity_updated is not None:
last_activity = get_delta(activity_updated)
if p.activity_data is not None:
activity_data = json.loads(p.activity_data)
items = activity_data['items']
#items = activity_data.get('items', [])
if len(items) > 0:
item_object = items[0]['object']
t_reshare = str(item_object['resharers']['totalItems'])
t_plusone = str(item_object['plusoners']['totalItems'])
t_comment = str(item_object['replies']['totalItems'])
last_activity += '<br/>('+t_reshare+' reshares, '+t_plusone+' <b>+1</b>, '+t_comment+' comments)'
# user profile is a return of Google+ API people
person = People(p.user_data)
if person is not None:
user_dict = {'displayName': person.displayName, 'id': person.id,
'name': person.name, 'image': person.get_image(), 'verified': person.verified,
'last_activity': last_activity}
indonesia_users.append(user_dict)
data_indonesia_users = {'total_data': count_indonesia(), 'paging_data': indonesia_users}
return json.dumps(data_indonesia_users)
@app.route('/verified_indonesia', methods=['POST'])
def get_verified():
""" only DB_INSTANCE serve /verified to preserve quota usage """
if not environ['DB_INSTANCE'] in request.url_root:
return '[]'
nitems = int(request.form['nitems'])
page = int(request.form['page'])
offset = 0
if page > 0:
offset = (page-1) * nitems
# process phase 2 (response with data from memcache or Datastore)
# unfortunately total data from Datastore is bigger than max allowed on memcache (1MB)
# so we will split the query, and optimized with the current profile on memcache
profiles = deserialize_entities(memcache.get('profile_verified:%d:%d' % (nitems, offset)))
if profiles is None:
profiles = Profile.query(Profile.user_is_verified == True).order(-Profile.activity_updated). \
fetch(nitems, offset=offset, projection=[Profile.activity_updated])
if not memcache.add('profile_verified:%d:%d' % (nitems, offset), serialize_entities(profiles), 60):
error('Memcache set failed: profile_verified:%d:%d' % (nitems, offset))
verified_users = []
for profile in profiles:
# get data from memcache or datastore for specific user
p = get_profile(profile.key.id())
# 'updated' is field from Google+ API activities related to specified user
last_activity = '(unknown)'
activity_updated = profile.activity_updated
if activity_updated is not None:
last_activity = get_delta(activity_updated)
if p.activity_data is not None:
activity_data = json.loads(p.activity_data)
items = activity_data['items']
#items = activity_data.get('items', [])
if len(items) > 0:
item_object = items[0]['object']
t_reshare = str(item_object['resharers']['totalItems'])
t_plusone = str(item_object['plusoners']['totalItems'])
t_comment = str(item_object['replies']['totalItems'])
last_activity += '<br/>('+t_reshare+' reshares, '+t_plusone+' <b>+1</b>, '+t_comment+' comments)'
# user profile is a return of Google+ API people
person = People(p.user_data)
if person is not None:
user_dict = {'displayName': person.displayName, 'id': person.id,
'name': person.name, 'image': person.get_image(), 'verified': person.verified,
'last_activity': last_activity}
verified_users.append(user_dict)
data_verified_users = {'total_data': count_verified(), 'paging_data': verified_users}
return json.dumps(data_verified_users)
def count_verified():
count = memcache.get('count_verified')
if count is not None:
return count
else:
count = Profile.query(Profile.user_is_verified == True).count(limit=None, keys_only=True)
if not memcache.add('count_verified', count, 600): # 600 seconds
error('Memcache set failed: count_verified')
return count
def count_indonesia():
count = memcache.get('count_indonesia')
if count is not None:
return count
else:
count = Profile.query().count(limit=None, keys_only=True)
if not memcache.add('count_indonesia', count, 600): # 600 seconds
error('Memcache set failed: count_indonesia')
return count
def get_profile(profile_id):
profile = deserialize_entities(memcache.get('profile:%s' % profile_id))
if profile is None:
profile = ndb.Key(Profile, profile_id).get()
if profile is not None: # None in memcache, but not None on datastore
if not memcache.add('profile:%s' % profile_id, serialize_entities(profile), 600): # 600 seconds
error('Memcache set failed: profile:%s' % profile_id)
return profile
@app.route('/update_db_indonesia', methods=['POST'])
def update_db_indonesia():
""" only DB_INSTANCE serve /update_db_indonesia to preserve quota usage """
if environ['DB_INSTANCE'] in request.url_root:
nitems = int(request.form['nitems'])
page = int(request.form['page'])
# process phase 1 (update database)
for profile_id in indonesia_ids:
# request to memcache and ndb
profile = deserialize_entities(memcache.get('profile:%s' % profile_id))
if profile is None:
profile = ndb.Key(Profile, profile_id).get()
if profile is not None: # None in memcache, but not None on datastore
if not memcache.add('profile:%s' % profile_id, serialize_entities(profile), 600): # 600 seconds
error('Memcache set failed: profile:%s' % profile_id)
if profile is None: # None in Datastore
# Add the task to the default queue.
taskqueue.add(url=TQ_URL_PROFILE, params={'id': profile_id})
taskqueue.add(url=TQ_URL_ACTIVITY, params={'id': profile_id})
else:
# check if the database is expired? update via taskqueue if database expired
user_lastupdate = profile.user_lastupdate
if user_lastupdate is None:
user_lastupdate = datetime.now() - timedelta(seconds=(profile_expired_time_in_seconds+1))
user_delta = datetime.now() - user_lastupdate
if user_delta.total_seconds() > profile_expired_time_in_seconds:
# Add the task to the default queue after expired
taskqueue.add(url=TQ_URL_PROFILE, params={'id': profile_id})
# check if the database is expired? update via taskqueue if database expired
activity_lastupdate = profile.activity_lastupdate
if activity_lastupdate is None:
activity_lastupdate = datetime.now() - timedelta(seconds=(activity_expired_time_in_seconds+1))
activity_delta = datetime.now() - activity_lastupdate
if activity_delta.total_seconds() > activity_expired_time_in_seconds:
# Add the task to the default queue after expired
taskqueue.add(url=TQ_URL_ACTIVITY, params={'id': profile_id})
return '[]'
def get_delta(updated_datetime):
text = ''
delta = datetime.now() - updated_datetime
total_sec = delta.days*24*60*60+delta.seconds
total_min, secs = divmod(total_sec, 60)
total_hrs, mins = divmod(total_min, 60)
total_days, hours = divmod(total_hrs, 24)
if total_days > 1:
text += str(total_days) + ' days '
elif total_days == 1:
text += '1 day '
if hours > 1:
text += str(hours) + ' hours '
elif hours == 1:
text += '1 hour '
if mins > 1:
text += str(mins) + ' minutes '
elif mins == 1:
text += '1 minute '
#if secs > 1:
# text += str(secs) + ' seconds '
#elif secs == 1:
# text += '1 second '
text += 'ago.'
return text
"""
serialization method
from blog http://blog.notdot.net/2009/9/Efficient-model-memcaching
updated from db to ndb using method
from blog http://www.dylanv.org/2012/08/22/a-hitchhikers-guide-to-upgrading-app-engine-models-to-ndb/
"""
def serialize_entities(models):
if models is None:
return None
elif isinstance(models, ndb.Model):
# Just one instance
return ndb.ModelAdapter().entity_to_pb(models).Encode()
else:
# A list
return [ndb.ModelAdapter().entity_to_pb(x).Encode() for x in models]
def deserialize_entities(data):
# precondition: model class must be imported
if data is None:
return None
elif isinstance(data, str):
# Just one instance
return ndb.ModelAdapter().pb_to_entity(entity_pb.EntityProto(data))
else:
return [ndb.ModelAdapter().pb_to_entity(entity_pb.EntityProto(x)) for x in data]
| |
##
# \namespace cross3d.studiomax
#
# \remarks The cross3d.studiomax.Clip module contains an
# abstraction of the MAXScript MXClip class for interacting
# with the Motion Mixer.
#
# \author willc
# \author Blur Studio
# \date 09/28/15
#
import Py3dsMax
from Py3dsMax import mxs
from cross3d import ClipPortion, TrackPortion
from cross3d.abstract.mixer.clip import AbstractClip
################################################################################
#####------------------------------ Classes -------------------------------#####
################################################################################
class StudiomaxClip(AbstractClip):
"""An abstraction of the MAXScript MxClip class.
Attributes:
clip: The ValueWrapper for the MxClip this Clip is wrapping.
track: The Track instance for the MxClip's parent MxTrack.
numWeights: The number of weights in the clip's weight curve
(relevant only when clip is in a layer track)
globStart: The global frame value for the start point of the MxClip
globEnd: The global frame value for the end point of the MxClip
filename: The filename of the bip file used by the MxClip.
scale: The MxClip's scale. Modifying the scale will cause the Clip to
scale on the right edge. The left edge will not move.
"""
@property
def filename(self):
"""The filename of the bip file used by the MxClip."""
return self.clip.filename
@property
def globStart(self):
"""The global frame value for the start point of the MxClip"""
return float(self.clip.globStart)
@property
def globEnd(self):
"""The global frame value for the end point of the MxClip"""
return float(self.clip.globEnd)
@property
def numWeights(self):
"""The number of weights in the clip's weight curve
(relevant only when clip is in a layer track)"""
return int(self.clip.numWeights)
@property
def sourceEnd(self):
return float(self.clip.orgEnd)
@property
def sourceStart(self):
return float(self.clip.orgStart)
@property
def scale(self):
return float(self.clip.scale)
@property
def trimEnd(self):
return float(self.clip.trimEnd)
@property
def trimStart(self):
return float(self.clip.trimStart)
def analyzeWeights(self, occludedPortions):
"""Determines which portions of the Clip are used, and which portions of
the Clip will occlude Tracks below.
Args:
occludedPortions(list): A list of `TrackPortion` instances
for every portion of the Clip that will be occluded
by Tracks above it.
Returns:
tuple: A tuple containing a list of `ClipPortion`
instances for every used portion of the Clip, and a
list of `TrackPortion` instances for every portion of
the Clip that will occlude tracks below it.
"""
if self.track.isTransitionTrack:
# this won't work...
return
clipOcclPortions = []
ClipPortions = []
clipStart, clipEnd = self.globStart, self.globEnd
if self.numWeights:
usedPortions = []
# Initialize the first rangeStart with the global start for the
# clip. We'll modify this if the weights make the clip have no
# effect for part of its duration.
rangeStart, rangeEnd = clipStart, None
# Keep a seperate occluding clip range. We'll keep track of
# occluding clips so we can test against them to update clip ranges
# later on.
occlStart, occlEnd = None, None
prevWVal = 0.0
for wi, (wTime, wVal) in enumerate(self.iterWeights()):
# Always move the end to the current position
rangeEnd = wTime
if wVal == 0.0:
# If the usedPortion has a non-zero length and isn't
# non-effecting for its entire duration, add it to the used
# portions.
if rangeEnd > rangeStart and prevWVal:
usedPortions.append(
TrackPortion(self.track, rangeStart, rangeEnd)
)
# Reset start to current position
rangeStart = wTime
if wVal == 1.0:
# If this is the first weight, start at the beggining of the
# clip, since the curve will extend back past this weight.
if wi == 0:
occlStart = clipStart
# If we already have a start stored for an occluding
# portion, store this weight as the (new) end. Otherwise,
# store it as the start.
if occlStart:
occlEnd = wTime
else:
occlStart = wTime
else:
# If a start and end are stored for the occluding
# TrackPortion, add that TrackPortion to the list of
# occluding portions for this clip.
if occlStart and occlEnd:
clipOcclPortions.append(
TrackPortion(self.track, occlStart, occlEnd)
)
# Clear the occluding start/end, since the track weighting
# is no longer fully occluding.
occlStart, occlEnd = None, None
prevWVal = wVal
# If occlStart is set, add the remainder of the clip to occluding
# clips.
if occlStart:
clipOcclPortions.append(
TrackPortion(self.track, occlStart, clipEnd)
)
# If the clip ended with a non-zero weight, add the remainder as a
# usedPortion.
if wVal:
usedPortions.append(
TrackPortion(self.track, rangeStart, clipEnd)
)
# Finally, we'll clean up the list of ClipPortions by eliminating
# occluded sections of clips, and condensing continuous clips that
# were split where their weight dips tangential to zero.
usedSC = self._occludeClipPortions(usedPortions, occludedPortions)
ClipPortions = self._coalesceClipPortions(usedSC)
else:
clipRange = self.globStart, self.globEnd
clipOcclPortions = [TrackPortion(self.track, *clipRange)]
ClipPortions = self._occludeClipPortions(
[ClipPortion(self, *clipRange)],
occludedPortions
)
occludedPortions.extend(clipOcclPortions)
return ClipPortions, occludedPortions
def getWeightTime(self, index):
"""Retrieves the global frame number the weight at the specified index
is placed at.
Args:
index(int): Index of desired weight to retrieve a time
for.
Returns:
float: Global frame number for the position of the
weight.
Raises:
IndexError
"""
if index < 0 or index >= self.numWeights:
raise IndexError('Index out of range')
# Adjust the weight time to be global, not local to the clip.
return float(mxs.getWeightTime(self.clip, index+1)) + self.globStart
def getWeightValue(self, index):
"""Retrieves the value of the weight at the specified index.
Args:
index(int): Index of desired weight to retrieve a value
for.
Returns:
float: Value of the weight at the index specified.
Raises:
IndexError
"""
if index < 0 or index >= self.numWeights:
raise IndexError('Index out of range')
return float(mxs.getWeight(self.clip, index+1))
def iterWeights(self):
"""Wraps the MAXScript getWeight and getWeightTime global functions into
a generator that returns tuples of the time and value for all
weights in the Track.
Returns:
generator: Generator that produces tuples of
((float)time, (float)value) for weights on the
track.
"""
count = self.numWeights
for i in range(count):
t = self.getWeightTime(i)
v = self.getWeightValue(i)
yield (t, v)
def weights(self):
"""Wraps the MAXScript getWeight and getWeightTime global functions into
a generator that returns tuples of the time and value for all
weights on the Clip.
Returns:
list: List of tuples for every weight on the Clip in
the form ((float)time, (float)value).
"""
return [w for w in self.iterWeights()]
def _coalesceClipPortions(self, inputPortions):
ClipPortions = []
clip = inputPortions.pop(0)
scStart = clip.start
scEnd = clip.end
while len(inputPortions):
clip = inputPortions.pop(0)
if scEnd == clip.start:
scEnd = clip.end
else:
ClipPortions.append(ClipPortion(self, scStart, scEnd))
scStart, scEnd = clip.start, clip.end
ClipPortions.append(ClipPortion(self, scStart, scEnd))
return ClipPortions
def _occludeClipPortions(self, ClipPortions, occludedPortions):
outputClips = []
while len(ClipPortions):
sc = ClipPortions.pop(0)
for ocR in occludedPortions:
# if ClipPortion is completely occluded
if (ocR.start < sc.start) and (sc.end < ocR.end):
sc = None
break
containsOcclStart = (
(sc.start < ocR.start) and (ocR.start < sc.end)
)
containsOcclEnd = ((sc.start < ocR.end) and (ocR.end < sc.end))
if containsOcclStart and containsOcclEnd:
ClipPortions.append(ClipPortion(self, sc.start, ocR.start))
sc = ClipPortion(self, ocR.end, sc.end)
elif containsOcclStart:
sc = ClipPortion(self, sc.start, ocR.start)
elif containsOcclEnd:
sc = ClipPortion(self, ocR.end, sc.end)
else:
outputClips.append(sc)
return outputClips
def __str__(self):
return 'Clip [{}]'.format(self.filename)
################################################################################
# register the symbol
import cross3d
cross3d.registerSymbol('Clip', StudiomaxClip)
| |
"""Testing utilities."""
# Copyright (c) 2011, 2012
# Authors: Pietro Berkes,
# Andreas Muller
# Mathieu Blondel
# Olivier Grisel
# Arnaud Joly
# Denis Engemann
# License: BSD 3 clause
import inspect
import pkgutil
import warnings
import sys
import re
import platform
import scipy as sp
import scipy.io
from functools import wraps
try:
# Python 2
from urllib2 import urlopen
from urllib2 import HTTPError
except ImportError:
# Python 3+
from urllib.request import urlopen
from urllib.error import HTTPError
import sklearn
from sklearn.base import BaseEstimator
# Conveniently import all assertions in one place.
from nose.tools import assert_equal
from nose.tools import assert_not_equal
from nose.tools import assert_true
from nose.tools import assert_false
from nose.tools import assert_raises
from nose.tools import raises
from nose import SkipTest
from nose import with_setup
from numpy.testing import assert_almost_equal
from numpy.testing import assert_array_equal
from numpy.testing import assert_array_almost_equal
from numpy.testing import assert_array_less
import numpy as np
from sklearn.base import (ClassifierMixin, RegressorMixin, TransformerMixin,
ClusterMixin)
__all__ = ["assert_equal", "assert_not_equal", "assert_raises",
"assert_raises_regexp", "raises", "with_setup", "assert_true",
"assert_false", "assert_almost_equal", "assert_array_equal",
"assert_array_almost_equal", "assert_array_less"]
try:
from nose.tools import assert_in, assert_not_in
except ImportError:
# Nose < 1.0.0
def assert_in(x, container):
assert_true(x in container, msg="%r in %r" % (x, container))
def assert_not_in(x, container):
assert_false(x in container, msg="%r in %r" % (x, container))
try:
from nose.tools import assert_raises_regexp
except ImportError:
# for Py 2.6
def assert_raises_regexp(expected_exception, expected_regexp,
callable_obj=None, *args, **kwargs):
"""Helper function to check for message patterns in exceptions"""
not_raised = False
try:
callable_obj(*args, **kwargs)
not_raised = True
except Exception as e:
error_message = str(e)
if not re.compile(expected_regexp).match(error_message):
raise AssertionError("Error message should match pattern "
"'%s'. '%s' does not." %
(expected_regexp, error_message))
if not_raised:
raise AssertionError("Should have raised %r" %
expected_exception(expected_regexp))
def _assert_less(a, b, msg=None):
message = "%r is not lower than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a < b, message
def _assert_greater(a, b, msg=None):
message = "%r is not greater than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a > b, message
# To remove when we support numpy 1.7
def assert_warns(warning_class, func, *args, **kw):
"""Test that a certain warning occurs.
Parameters
----------
warning_class : the warning class
The class to test for, e.g. UserWarning.
func : callable
Calable object to trigger warnings.
*args : the positional arguments to `func`.
**kw : the keyword arguments to `func`
Returns
-------
result : the return value of `func`
"""
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
result = func(*args, **kw)
# Verify some things
if not len(w) > 0:
raise AssertionError("No warning raised when calling %s"
% func.__name__)
if not w[0].category is warning_class:
raise AssertionError("First warning for %s is not a "
"%s( is %s)"
% (func.__name__, warning_class, w[0]))
return result
def assert_warns_message(warning_class, message, func, *args, **kw):
# very important to avoid uncontrolled state propagation
"""Test that a certain warning occurs and with a certain message.
Parameters
----------
warning_class : the warning class
The class to test for, e.g. UserWarning.
message : str | callable
The entire message or a substring to test for. If callable,
it takes a string as argument and will trigger an assertion error
if it returns `False`.
func : callable
Calable object to trigger warnings.
*args : the positional arguments to `func`.
**kw : the keyword arguments to `func`.
Returns
-------
result : the return value of `func`
"""
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
result = func(*args, **kw)
# Verify some things
if not len(w) > 0:
raise AssertionError("No warning raised when calling %s"
% func.__name__)
if not w[0].category is warning_class:
raise AssertionError("First warning for %s is not a "
"%s( is %s)"
% (func.__name__, warning_class, w[0]))
# substring will match, the entire message with typo won't
msg = w[0].message # For Python 3 compatibility
msg = str(msg.args[0] if hasattr(msg, 'args') else msg)
if callable(message): # add support for certain tests
check_in_message = message
else:
check_in_message = lambda msg: message in msg
if not check_in_message(msg):
raise AssertionError("The message received ('%s') for <%s> is "
"not the one you expected ('%s')"
% (msg, func.__name__, message
))
return result
# To remove when we support numpy 1.7
def assert_no_warnings(func, *args, **kw):
# XXX: once we may depend on python >= 2.6, this can be replaced by the
# warnings module context manager.
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
result = func(*args, **kw)
if len(w) > 0:
raise AssertionError("Got warnings when calling %s: %s"
% (func.__name__, w))
return result
def ignore_warnings(obj=None):
""" Context manager and decorator to ignore warnings
Note. Using this (in both variants) will clear all warnings
from all python modules loaded. In case you need to test
cross-module-warning-logging this is not your tool of choice.
Examples
--------
>>> with ignore_warnings():
... warnings.warn('buhuhuhu')
>>> def nasty_warn():
... warnings.warn('buhuhuhu')
... print(42)
>>> ignore_warnings(nasty_warn)()
42
"""
if callable(obj):
return _ignore_warnings(obj)
else:
return _IgnoreWarnings()
def _ignore_warnings(fn):
"""Decorator to catch and hide warnings without visual nesting"""
@wraps(fn)
def wrapper(*args, **kwargs):
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
return fn(*args, **kwargs)
w[:] = []
return wrapper
class _IgnoreWarnings(object):
"""Improved and simplified Python warnings context manager
Copied from Python 2.7.5 and modified as required.
"""
def __init__(self):
"""
Parameters
==========
category : warning class
The category to filter. Defaults to Warning. If None,
all categories will be muted.
"""
self._record = True
self._module = sys.modules['warnings']
self._entered = False
self.log = []
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
clean_warning_registry() # be safe and not propagate state + chaos
warnings.simplefilter('always')
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._showwarning = self._module.showwarning
if self._record:
self.log = []
def showwarning(*args, **kwargs):
self.log.append(warnings.WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return self.log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module.showwarning = self._showwarning
self.log[:] = []
clean_warning_registry() # be safe and not propagate state + chaos
try:
from nose.tools import assert_less
except ImportError:
assert_less = _assert_less
try:
from nose.tools import assert_greater
except ImportError:
assert_greater = _assert_greater
def _assert_allclose(actual, desired, rtol=1e-7, atol=0,
err_msg='', verbose=True):
actual, desired = np.asanyarray(actual), np.asanyarray(desired)
if np.allclose(actual, desired, rtol=rtol, atol=atol):
return
msg = ('Array not equal to tolerance rtol=%g, atol=%g: '
'actual %s, desired %s') % (rtol, atol, actual, desired)
raise AssertionError(msg)
if hasattr(np.testing, 'assert_allclose'):
assert_allclose = np.testing.assert_allclose
else:
assert_allclose = _assert_allclose
def assert_raise_message(exception, message, function, *args, **kwargs):
"""Helper function to test error messages in exceptions"""
try:
function(*args, **kwargs)
raise AssertionError("Should have raised %r" % exception(message))
except exception as e:
error_message = str(e)
assert_in(message, error_message)
def fake_mldata(columns_dict, dataname, matfile, ordering=None):
"""Create a fake mldata data set.
Parameters
----------
columns_dict: contains data as
columns_dict[column_name] = array of data
dataname: name of data set
matfile: file-like object or file name
ordering: list of column_names, determines the ordering in the data set
Note: this function transposes all arrays, while fetch_mldata only
transposes 'data', keep that into account in the tests.
"""
datasets = dict(columns_dict)
# transpose all variables
for name in datasets:
datasets[name] = datasets[name].T
if ordering is None:
ordering = sorted(list(datasets.keys()))
# NOTE: setting up this array is tricky, because of the way Matlab
# re-packages 1D arrays
datasets['mldata_descr_ordering'] = sp.empty((1, len(ordering)),
dtype='object')
for i, name in enumerate(ordering):
datasets['mldata_descr_ordering'][0, i] = name
scipy.io.savemat(matfile, datasets, oned_as='column')
class mock_mldata_urlopen(object):
def __init__(self, mock_datasets):
"""Object that mocks the urlopen function to fake requests to mldata.
`mock_datasets` is a dictionary of {dataset_name: data_dict}, or
{dataset_name: (data_dict, ordering).
`data_dict` itself is a dictionary of {column_name: data_array},
and `ordering` is a list of column_names to determine the ordering
in the data set (see `fake_mldata` for details).
When requesting a dataset with a name that is in mock_datasets,
this object creates a fake dataset in a StringIO object and
returns it. Otherwise, it raises an HTTPError.
"""
self.mock_datasets = mock_datasets
def __call__(self, urlname):
dataset_name = urlname.split('/')[-1]
if dataset_name in self.mock_datasets:
resource_name = '_' + dataset_name
from io import BytesIO
matfile = BytesIO()
dataset = self.mock_datasets[dataset_name]
ordering = None
if isinstance(dataset, tuple):
dataset, ordering = dataset
fake_mldata(dataset, resource_name, matfile, ordering)
matfile.seek(0)
return matfile
else:
raise HTTPError(urlname, 404, dataset_name + " is not available",
[], None)
def install_mldata_mock(mock_datasets):
# Lazy import to avoid mutually recursive imports
from sklearn import datasets
datasets.mldata.urlopen = mock_mldata_urlopen(mock_datasets)
def uninstall_mldata_mock():
# Lazy import to avoid mutually recursive imports
from sklearn import datasets
datasets.mldata.urlopen = urlopen
# Meta estimators need another estimator to be instantiated.
meta_estimators = ["OneVsOneClassifier",
"OutputCodeClassifier", "OneVsRestClassifier", "RFE",
"RFECV", "BaseEnsemble"]
# estimators that there is no way to default-construct sensibly
other = ["Pipeline", "FeatureUnion", "GridSearchCV", "RandomizedSearchCV"]
def all_estimators(include_meta_estimators=False, include_other=False,
type_filter=None):
"""Get a list of all estimators from sklearn.
This function crawls the module and gets all classes that inherit
from BaseEstimator. Classes that are defined in test-modules are not
included.
By default meta_estimators such as GridSearchCV are also not included.
Parameters
----------
include_meta_estimators : boolean, default=False
Whether to include meta-estimators that can be constructed using
an estimator as their first argument. These are currently
BaseEnsemble, OneVsOneClassifier, OutputCodeClassifier,
OneVsRestClassifier, RFE, RFECV.
include_others : boolean, default=False
Wether to include meta-estimators that are somehow special and can
not be default-constructed sensibly. These are currently
Pipeline, FeatureUnion and GridSearchCV
type_filter : string or None, default=None
Which kind of estimators should be returned. If None, no filter is
applied and all estimators are returned. Possible values are
'classifier', 'regressor', 'cluster' and 'transformer' to get
estimators only of these specific types.
Returns
-------
estimators : list of tuples
List of (name, class), where ``name`` is the class name as string
and ``class`` is the actuall type of the class.
"""
def is_abstract(c):
if not(hasattr(c, '__abstractmethods__')):
return False
if not len(c.__abstractmethods__):
return False
return True
all_classes = []
# get parent folder
path = sklearn.__path__
for importer, modname, ispkg in pkgutil.walk_packages(
path=path, prefix='sklearn.', onerror=lambda x: None):
if ".tests." in modname:
continue
module = __import__(modname, fromlist="dummy")
classes = inspect.getmembers(module, inspect.isclass)
all_classes.extend(classes)
all_classes = set(all_classes)
estimators = [c for c in all_classes
if (issubclass(c[1], BaseEstimator)
and c[0] != 'BaseEstimator')]
# get rid of abstract base classes
estimators = [c for c in estimators if not is_abstract(c[1])]
if not include_other:
estimators = [c for c in estimators if not c[0] in other]
# possibly get rid of meta estimators
if not include_meta_estimators:
estimators = [c for c in estimators if not c[0] in meta_estimators]
if type_filter == 'classifier':
estimators = [est for est in estimators
if issubclass(est[1], ClassifierMixin)]
elif type_filter == 'regressor':
estimators = [est for est in estimators
if issubclass(est[1], RegressorMixin)]
elif type_filter == 'transformer':
estimators = [est for est in estimators
if issubclass(est[1], TransformerMixin)]
elif type_filter == 'cluster':
estimators = [est for est in estimators
if issubclass(est[1], ClusterMixin)]
elif type_filter is not None:
raise ValueError("Parameter type_filter must be 'classifier', "
"'regressor', 'transformer', 'cluster' or None, got"
" %s." % repr(type_filter))
# We sort in order to have reproducible test failures
return sorted(estimators)
def set_random_state(estimator, random_state=0):
if "random_state" in estimator.get_params().keys():
estimator.set_params(random_state=random_state)
def if_matplotlib(func):
"""Test decorator that skips test if matplotlib not installed. """
@wraps(func)
def run_test(*args, **kwargs):
try:
import matplotlib
matplotlib.use('Agg', warn=False)
# this fails if no $DISPLAY specified
matplotlib.pylab.figure()
except:
raise SkipTest('Matplotlib not available.')
else:
return func(*args, **kwargs)
return run_test
def if_not_mac_os(versions=('10.7', '10.8', '10.9'),
message='Multi-process bug in Mac OS X >= 10.7 '
'(see issue #636)'):
"""Test decorator that skips test if OS is Mac OS X and its
major version is one of ``versions``.
"""
mac_version, _, _ = platform.mac_ver()
skip = '.'.join(mac_version.split('.')[:2]) in versions
def decorator(func):
if skip:
@wraps(func)
def func(*args, **kwargs):
raise SkipTest(message)
return func
return decorator
def clean_warning_registry():
"""Safe way to reset warniings """
warnings.resetwarnings()
reg = "__warningregistry__"
for mod in sys.modules.values():
if hasattr(mod, reg):
getattr(mod, reg).clear()
| |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import os
import unittest
from pymatgen.io.qchem.sets import *
from pymatgen.util.testing import PymatgenTest
__author__ = "Samuel Blau, Brandon Wood, Shyam Dwaraknath"
__copyright__ = "Copyright 2018, The Materials Project"
__version__ = "0.1"
test_dir = os.path.join(PymatgenTest.TEST_FILES_DIR, "molecules")
class QChemDictSetTest(PymatgenTest):
def test_init(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
test_DictSet = QChemDictSet(
molecule=test_molecule,
job_type="opt",
basis_set="6-31G*",
scf_algorithm="diis",
)
self.assertEqual(
test_DictSet.rem,
{
"job_type": "opt",
"gen_scfman": "true",
"basis": "6-31g*",
"max_scf_cycles": 200,
"method": "wb97xv",
"scf_algorithm": "diis",
"xc_grid": "3",
"geom_opt_max_cycles": 200,
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
},
)
self.assertEqual(test_DictSet.pcm, {})
self.assertEqual(test_DictSet.solvent, {})
self.assertEqual(test_DictSet.smx, {})
self.assertEqual(test_DictSet.molecule, test_molecule)
def test_full_init(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
test_DictSet = QChemDictSet(
molecule=test_molecule,
job_type="opt",
basis_set="6-31g*",
scf_algorithm="diis",
dft_rung=1,
pcm_dielectric=10.0,
max_scf_cycles=35,
)
self.assertEqual(
test_DictSet.rem,
{
"job_type": "opt",
"gen_scfman": "true",
"basis": "6-31g*",
"max_scf_cycles": 35,
"method": "b3lyp",
"geom_opt_max_cycles": 200,
"scf_algorithm": "diis",
"xc_grid": "3",
"solvent_method": "pcm",
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
},
)
self.assertEqual(
test_DictSet.pcm,
{
"heavypoints": "194",
"hpoints": "194",
"radii": "uff",
"theory": "cpcm",
"vdwscale": "1.1",
},
)
self.assertEqual(test_DictSet.solvent, {"dielectric": 10.0})
self.assertEqual(test_DictSet.molecule, test_molecule)
test_DictSet = QChemDictSet(
molecule=test_molecule,
job_type="opt",
basis_set="6-31g*",
scf_algorithm="diis",
dft_rung=1,
smd_solvent="water",
max_scf_cycles=35,
)
self.assertEqual(
test_DictSet.rem,
{
"job_type": "opt",
"gen_scfman": "true",
"basis": "6-31g*",
"max_scf_cycles": 35,
"method": "b3lyp",
"geom_opt_max_cycles": 200,
"scf_algorithm": "diis",
"xc_grid": "3",
"solvent_method": "smd",
"ideriv": "1",
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
},
)
self.assertEqual(test_DictSet.smx, {"solvent": "water"})
def test_overwrite_input(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
overwrite_inputs = {
"rem": {
"method": "b3lyp",
"basis": "6-31g*",
"thresh": 10,
"xc_grid": "000150000302",
}
}
test_OptSet = OptSet(molecule=test_molecule, overwrite_inputs=overwrite_inputs)
act_rem = {
"job_type": "opt",
"gen_scfman": "true",
"basis": "6-31g*",
"max_scf_cycles": 200,
"method": "b3lyp",
"scf_algorithm": "diis",
"xc_grid": "000150000302",
"geom_opt_max_cycles": 200,
"thresh": 10,
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
}
self.assertDictEqual(act_rem, test_OptSet.rem)
def test_double_solvation(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
raised_error = False
dict_set = None
try:
dict_set = QChemDictSet(
molecule=test_molecule,
job_type="opt",
basis_set="6-31g*",
scf_algorithm="diis",
dft_rung=1,
pcm_dielectric=10.0,
smd_solvent="water",
max_scf_cycles=35,
)
except ValueError:
raised_error = True
self.assertTrue(raised_error)
self.assertEqual(dict_set, None)
def test_pcm_write(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
dict_set = QChemDictSet(
molecule=test_molecule,
job_type="opt",
basis_set="6-31g*",
scf_algorithm="diis",
dft_rung=5,
pcm_dielectric=10.0,
max_scf_cycles=35,
)
dict_set.write("mol.qin")
test_dict = QCInput.from_file("mol.qin").as_dict()
rem = {
"job_type": "opt",
"basis": "6-31G*",
"max_scf_cycles": "35",
"method": "wb97mv",
"geom_opt_max_cycles": "200",
"gen_scfman": "true",
"scf_algorithm": "diis",
"xc_grid": "3",
"solvent_method": "pcm",
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
}
pcm = {
"heavypoints": "194",
"hpoints": "194",
"radii": "uff",
"theory": "cpcm",
"vdwscale": "1.1",
}
qc_input = QCInput(molecule=test_molecule, rem=rem, pcm=pcm, solvent={"dielectric": "10.0"})
for k, v in qc_input.as_dict().items():
self.assertEqual(v, test_dict[k])
os.remove("mol.qin")
def test_smd_write(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
dict_set = QChemDictSet(
molecule=test_molecule,
job_type="opt",
basis_set="6-31g*",
scf_algorithm="diis",
dft_rung=5,
smd_solvent="water",
max_scf_cycles=35,
)
dict_set.write("mol.qin")
test_dict = QCInput.from_file("mol.qin").as_dict()
rem = {
"job_type": "opt",
"basis": "6-31G*",
"max_scf_cycles": "35",
"method": "wb97mv",
"geom_opt_max_cycles": "200",
"gen_scfman": "true",
"scf_algorithm": "diis",
"xc_grid": "3",
"solvent_method": "smd",
"ideriv": "1",
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
}
qc_input = QCInput(molecule=test_molecule, rem=rem, smx={"solvent": "water"})
for k, v in qc_input.as_dict().items():
self.assertEqual(v, test_dict[k])
os.remove("mol.qin")
def test_custom_smd_write(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
dict_set = QChemDictSet(
molecule=test_molecule,
job_type="opt",
basis_set="6-31g*",
scf_algorithm="diis",
dft_rung=5,
smd_solvent="custom",
custom_smd="90.00,1.415,0.00,0.735,20.2,0.00,0.00",
max_scf_cycles=35,
)
dict_set.write("mol.qin")
test_dict = QCInput.from_file("mol.qin").as_dict()
rem = {
"job_type": "opt",
"basis": "6-31G*",
"max_scf_cycles": "35",
"method": "wb97mv",
"geom_opt_max_cycles": "200",
"gen_scfman": "true",
"scf_algorithm": "diis",
"xc_grid": "3",
"solvent_method": "smd",
"ideriv": "1",
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
}
qc_input = QCInput(molecule=test_molecule, rem=rem, smx={"solvent": "other"})
for k, v in qc_input.as_dict().items():
self.assertEqual(v, test_dict[k])
os.remove("mol.qin")
with open("solvent_data") as sd:
lines = sd.readlines()
self.assertEqual(lines[0], "90.00,1.415,0.00,0.735,20.2,0.00,0.00")
os.remove("solvent_data")
class OptSetTest(PymatgenTest):
def test_init(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
test_OptSet = OptSet(molecule=test_molecule)
self.assertEqual(
test_OptSet.rem,
{
"job_type": "opt",
"gen_scfman": "true",
"basis": "def2-tzvppd",
"max_scf_cycles": 200,
"method": "wb97xd",
"scf_algorithm": "diis",
"xc_grid": "3",
"geom_opt_max_cycles": 200,
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
},
)
self.assertEqual(test_OptSet.pcm, {})
self.assertEqual(test_OptSet.solvent, {})
self.assertEqual(test_OptSet.smx, {})
self.assertEqual(test_OptSet.molecule, test_molecule)
def test_pcm_init(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
test_OptSet = OptSet(molecule=test_molecule, pcm_dielectric=10.0)
self.assertEqual(
test_OptSet.rem,
{
"job_type": "opt",
"gen_scfman": "true",
"basis": "def2-tzvppd",
"max_scf_cycles": 200,
"method": "wb97xd",
"geom_opt_max_cycles": 200,
"scf_algorithm": "diis",
"xc_grid": "3",
"solvent_method": "pcm",
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
},
)
self.assertEqual(
test_OptSet.pcm,
{
"heavypoints": "194",
"hpoints": "194",
"radii": "uff",
"theory": "cpcm",
"vdwscale": "1.1",
},
)
self.assertEqual(test_OptSet.solvent, {"dielectric": 10.0})
self.assertEqual(test_OptSet.molecule, test_molecule)
def test_smd_init(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
test_OptSet = OptSet(molecule=test_molecule, smd_solvent="water")
self.assertEqual(
test_OptSet.rem,
{
"job_type": "opt",
"gen_scfman": "true",
"basis": "def2-tzvppd",
"max_scf_cycles": 200,
"method": "wb97xd",
"geom_opt_max_cycles": 200,
"scf_algorithm": "diis",
"xc_grid": "3",
"solvent_method": "smd",
"ideriv": "1",
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
},
)
self.assertEqual(test_OptSet.smx, {"solvent": "water"})
self.assertEqual(test_OptSet.molecule, test_molecule)
class SinglePointSetTest(PymatgenTest):
def test_init(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
test_SPSet = SinglePointSet(molecule=test_molecule)
self.assertEqual(
test_SPSet.rem,
{
"job_type": "sp",
"gen_scfman": "true",
"basis": "def2-tzvppd",
"max_scf_cycles": 200,
"method": "wb97xd",
"scf_algorithm": "diis",
"xc_grid": "3",
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
},
)
self.assertEqual(test_SPSet.pcm, {})
self.assertEqual(test_SPSet.solvent, {})
self.assertEqual(test_SPSet.molecule, test_molecule)
def test_pcm_init(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
test_SPSet = SinglePointSet(molecule=test_molecule, pcm_dielectric=10.0)
self.assertEqual(
test_SPSet.rem,
{
"job_type": "sp",
"gen_scfman": "true",
"basis": "def2-tzvppd",
"max_scf_cycles": 200,
"method": "wb97xd",
"scf_algorithm": "diis",
"xc_grid": "3",
"solvent_method": "pcm",
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
},
)
self.assertEqual(
test_SPSet.pcm,
{
"heavypoints": "194",
"hpoints": "194",
"radii": "uff",
"theory": "cpcm",
"vdwscale": "1.1",
},
)
self.assertEqual(test_SPSet.solvent, {"dielectric": 10.0})
self.assertEqual(test_SPSet.molecule, test_molecule)
def test_smd_init(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
test_SPSet = SinglePointSet(molecule=test_molecule, smd_solvent="water")
self.assertEqual(
test_SPSet.rem,
{
"job_type": "sp",
"gen_scfman": "true",
"basis": "def2-tzvppd",
"max_scf_cycles": 200,
"method": "wb97xd",
"scf_algorithm": "diis",
"xc_grid": "3",
"solvent_method": "smd",
"ideriv": "1",
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
},
)
self.assertEqual(test_SPSet.smx, {"solvent": "water"})
self.assertEqual(test_SPSet.molecule, test_molecule)
def test_plots_init(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
test_SPSet = SinglePointSet(molecule=test_molecule, smd_solvent="water", plot_cubes=True)
self.assertEqual(
test_SPSet.rem,
{
"job_type": "sp",
"gen_scfman": "true",
"basis": "def2-tzvppd",
"max_scf_cycles": 200,
"method": "wb97xd",
"scf_algorithm": "diis",
"xc_grid": "3",
"solvent_method": "smd",
"ideriv": "1",
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
"plots": "true",
"make_cube_files": "true",
},
)
self.assertEqual(test_SPSet.plots, {"grid_spacing": "0.05", "total_density": "0"})
self.assertEqual(test_SPSet.smx, {"solvent": "water"})
self.assertEqual(test_SPSet.molecule, test_molecule)
class FreqSetTest(PymatgenTest):
def test_init(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
test_FreqSet = FreqSet(molecule=test_molecule)
self.assertEqual(
test_FreqSet.rem,
{
"job_type": "freq",
"gen_scfman": "true",
"basis": "def2-tzvppd",
"max_scf_cycles": 200,
"method": "wb97xd",
"scf_algorithm": "diis",
"xc_grid": "3",
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
},
)
self.assertEqual(test_FreqSet.pcm, {})
self.assertEqual(test_FreqSet.solvent, {})
self.assertEqual(test_FreqSet.molecule, test_molecule)
def test_pcm_init(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
test_FreqSet = FreqSet(molecule=test_molecule, pcm_dielectric=10.0)
self.assertEqual(
test_FreqSet.rem,
{
"job_type": "freq",
"gen_scfman": "true",
"basis": "def2-tzvppd",
"max_scf_cycles": 200,
"method": "wb97xd",
"scf_algorithm": "diis",
"xc_grid": "3",
"solvent_method": "pcm",
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
},
)
self.assertEqual(
test_FreqSet.pcm,
{
"heavypoints": "194",
"hpoints": "194",
"radii": "uff",
"theory": "cpcm",
"vdwscale": "1.1",
},
)
self.assertEqual(test_FreqSet.solvent, {"dielectric": 10.0})
self.assertEqual(test_FreqSet.molecule, test_molecule)
def test_smd_init(self):
test_molecule = QCInput.from_file(os.path.join(test_dir, "new_qchem_files/pcm.qin")).molecule
test_FreqSet = FreqSet(molecule=test_molecule, smd_solvent="water")
self.assertEqual(
test_FreqSet.rem,
{
"job_type": "freq",
"gen_scfman": "true",
"basis": "def2-tzvppd",
"max_scf_cycles": 200,
"method": "wb97xd",
"scf_algorithm": "diis",
"xc_grid": "3",
"solvent_method": "smd",
"ideriv": "1",
"symmetry": "false",
"sym_ignore": "true",
"resp_charges": "true",
},
)
self.assertEqual(test_FreqSet.smx, {"solvent": "water"})
self.assertEqual(test_FreqSet.molecule, test_molecule)
if __name__ == "__main__":
unittest.main()
| |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from typing import Any, Callable, Optional
from django.http import HttpRequest, HttpResponse
from django.test import TestCase
from zerver.models import (
get_client, get_realm, get_stream, get_user_profile_by_email,
Message, Recipient, UserProfile
)
from zerver.lib.actions import (
apply_events,
create_stream_if_needed,
do_add_alert_words,
check_add_realm_emoji,
do_add_realm_filter,
do_change_avatar_source,
do_change_default_all_public_streams,
do_change_default_events_register_stream,
do_change_default_sending_stream,
do_change_full_name,
do_change_is_admin,
do_change_stream_description,
do_change_subscription_property,
do_create_user,
do_deactivate_stream,
do_deactivate_user,
do_regenerate_api_key,
do_remove_alert_words,
do_remove_realm_emoji,
do_remove_realm_filter,
do_remove_subscription,
do_rename_stream,
do_add_default_stream,
do_set_muted_topics,
do_set_realm_create_stream_by_admins_only,
do_set_realm_name,
do_set_realm_restricted_to_domain,
do_set_realm_invite_required,
do_set_realm_invite_by_admins_only,
do_set_realm_message_editing,
do_set_realm_default_language,
do_update_message,
do_update_pointer,
do_change_twenty_four_hour_time,
do_change_left_side_userlist,
fetch_initial_state_data,
get_subscription
)
from zerver.lib.event_queue import allocate_client_descriptor
from zerver.lib.test_helpers import ZulipTestCase, POSTRequestMock
from zerver.lib.validator import (
check_bool, check_dict, check_int, check_list, check_string,
equals, check_none_or, Validator
)
from zerver.views import _default_all_public_streams, _default_narrow
from zerver.tornadoviews import get_events_backend
from collections import OrderedDict
import time
import ujson
from six.moves import range
class GetEventsTest(ZulipTestCase):
def tornado_call(self, view_func, user_profile, post_data):
# type: (Callable[[HttpRequest, UserProfile], HttpResponse], UserProfile, Dict[str, Any]) -> HttpResponse
request = POSTRequestMock(post_data, user_profile)
return view_func(request, user_profile)
def test_get_events(self):
# type: () -> None
email = "hamlet@zulip.com"
recipient_email = "othello@zulip.com"
user_profile = get_user_profile_by_email(email)
recipient_user_profile = get_user_profile_by_email(recipient_email)
self.login(email)
result = self.tornado_call(get_events_backend, user_profile,
{"apply_markdown": ujson.dumps(True),
"event_types": ujson.dumps(["message"]),
"user_client": "website",
"dont_block": ujson.dumps(True),
})
self.assert_json_success(result)
queue_id = ujson.loads(result.content)["queue_id"]
recipient_result = self.tornado_call(get_events_backend, recipient_user_profile,
{"apply_markdown": ujson.dumps(True),
"event_types": ujson.dumps(["message"]),
"user_client": "website",
"dont_block": ujson.dumps(True),
})
self.assert_json_success(recipient_result)
recipient_queue_id = ujson.loads(recipient_result.content)["queue_id"]
result = self.tornado_call(get_events_backend, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 0, True)
local_id = 10.01
self.send_message(email, recipient_email, Recipient.PERSONAL, "hello", local_id=local_id, sender_queue_id=queue_id)
result = self.tornado_call(get_events_backend, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 1, True)
self.assertEqual(events[0]["type"], "message")
self.assertEqual(events[0]["message"]["sender_email"], email)
self.assertEqual(events[0]["local_message_id"], local_id)
self.assertEqual(events[0]["message"]["display_recipient"][0]["is_mirror_dummy"], False)
self.assertEqual(events[0]["message"]["display_recipient"][1]["is_mirror_dummy"], False)
last_event_id = events[0]["id"]
local_id += 0.01
self.send_message(email, recipient_email, Recipient.PERSONAL, "hello", local_id=local_id, sender_queue_id=queue_id)
result = self.tornado_call(get_events_backend, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": last_event_id,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 1, True)
self.assertEqual(events[0]["type"], "message")
self.assertEqual(events[0]["message"]["sender_email"], email)
self.assertEqual(events[0]["local_message_id"], local_id)
# Test that the received message in the receiver's event queue
# exists and does not contain a local id
recipient_result = self.tornado_call(get_events_backend, recipient_user_profile,
{"queue_id": recipient_queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
recipient_events = ujson.loads(recipient_result.content)["events"]
self.assert_json_success(recipient_result)
self.assertEqual(len(recipient_events), 2)
self.assertEqual(recipient_events[0]["type"], "message")
self.assertEqual(recipient_events[0]["message"]["sender_email"], email)
self.assertTrue("local_message_id" not in recipient_events[0])
self.assertEqual(recipient_events[1]["type"], "message")
self.assertEqual(recipient_events[1]["message"]["sender_email"], email)
self.assertTrue("local_message_id" not in recipient_events[1])
def test_get_events_narrow(self):
# type: () -> None
email = "hamlet@zulip.com"
user_profile = get_user_profile_by_email(email)
self.login(email)
result = self.tornado_call(get_events_backend, user_profile,
{"apply_markdown": ujson.dumps(True),
"event_types": ujson.dumps(["message"]),
"narrow": ujson.dumps([["stream", "denmark"]]),
"user_client": "website",
"dont_block": ujson.dumps(True),
})
self.assert_json_success(result)
queue_id = ujson.loads(result.content)["queue_id"]
result = self.tornado_call(get_events_backend, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 0, True)
self.send_message(email, "othello@zulip.com", Recipient.PERSONAL, "hello")
self.send_message(email, "Denmark", Recipient.STREAM, "hello")
result = self.tornado_call(get_events_backend, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 1, True)
self.assertEqual(events[0]["type"], "message")
self.assertEqual(events[0]["message"]["display_recipient"], "Denmark")
class EventsRegisterTest(ZulipTestCase):
user_profile = get_user_profile_by_email("hamlet@zulip.com")
bot = get_user_profile_by_email("welcome-bot@zulip.com")
maxDiff = None # type: Optional[int]
def create_bot(self, email):
# type: (str) -> UserProfile
return do_create_user(email, '123',
get_realm('zulip.com'), 'Test Bot', 'test',
bot_type=UserProfile.DEFAULT_BOT, bot_owner=self.user_profile)
def realm_bot_schema(self, field_name, check):
# type: (str, Validator) -> Validator
return check_dict([
('type', equals('realm_bot')),
('op', equals('update')),
('bot', check_dict([
('email', check_string),
(field_name, check),
])),
])
def do_test(self, action, event_types=None):
# type: (Callable[[], Any], Optional[List[str]]) -> List[Dict[str, Any]]
client = allocate_client_descriptor(
dict(user_profile_id = self.user_profile.id,
user_profile_email = self.user_profile.email,
realm_id = self.user_profile.realm.id,
event_types = event_types,
client_type_name = "website",
apply_markdown = True,
all_public_streams = False,
queue_timeout = 600,
last_connection_time = time.time(),
narrow = [])
)
# hybrid_state = initial fetch state + re-applying events triggered by our action
# normal_state = do action then fetch at the end (the "normal" code path)
hybrid_state = fetch_initial_state_data(self.user_profile, event_types, "")
action()
events = client.event_queue.contents()
self.assertTrue(len(events) > 0)
apply_events(hybrid_state, events, self.user_profile)
normal_state = fetch_initial_state_data(self.user_profile, event_types, "")
self.match_states(hybrid_state, normal_state)
return events
def assert_on_error(self, error):
# type: (str) -> None
if error:
raise AssertionError(error)
def match_states(self, state1, state2):
# type: (Dict[str, Any], Dict[str, Any]) -> None
def normalize(state):
# type: (Dict[str, Any]) -> None
state['realm_users'] = {u['email']: u for u in state['realm_users']}
for u in state['subscriptions']:
u['subscribers'].sort()
state['subscriptions'] = {u['name']: u for u in state['subscriptions']}
state['unsubscribed'] = {u['name']: u for u in state['unsubscribed']}
if 'realm_bots' in state:
state['realm_bots'] = {u['email']: u for u in state['realm_bots']}
normalize(state1)
normalize(state2)
self.assertEqual(state1, state2)
def test_send_message_events(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('message')),
('flags', check_list(None)),
('message', check_dict([
('avatar_url', check_string),
('client', check_string),
('content', check_string),
('content_type', equals('text/html')),
('display_recipient', check_string),
('gravatar_hash', check_string),
('id', check_int),
('recipient_id', check_int),
('sender_domain', check_string),
('sender_email', check_string),
('sender_full_name', check_string),
('sender_id', check_int),
('sender_short_name', check_string),
('subject', check_string),
('subject_links', check_list(None)),
('timestamp', check_int),
('type', check_string),
])),
])
events = self.do_test(lambda: self.send_message("hamlet@zulip.com", "Verona", Recipient.STREAM, "hello"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = check_dict([
('type', equals('update_message')),
('flags', check_list(None)),
('content', check_string),
('edit_timestamp', check_int),
('flags', check_list(None)),
('message_id', check_int),
('message_ids', check_list(check_int)),
('orig_content', check_string),
('orig_rendered_content', check_string),
('orig_subject', check_string),
('propagate_mode', check_string),
('rendered_content', check_string),
('sender', check_string),
('stream_id', check_int),
('subject', check_string),
('subject_links', check_list(None)),
# There is also a timestamp field in the event, but we ignore it, as
# it's kind of an unwanted but harmless side effect of calling log_event.
])
message = Message.objects.order_by('-id')[0]
topic = 'new_topic'
propagate_mode = 'change_all'
content = 'new content'
rendered_content = message.render_markdown(content)
events = self.do_test(lambda: do_update_message(self.user_profile, message, topic, propagate_mode, content, rendered_content))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_pointer_events(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('pointer')),
('pointer', check_int)
])
events = self.do_test(lambda: do_update_pointer(self.user_profile, 1500))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_register_events(self):
# type: () -> None
realm_user_add_checker = check_dict([
('type', equals('realm_user')),
('op', equals('add')),
('person', check_dict([
('email', check_string),
('full_name', check_string),
('is_admin', check_bool),
('is_bot', check_bool),
])),
])
stream_create_checker = check_dict([
('type', equals('stream')),
('op', equals('create')),
('streams', check_list(check_dict([
('description', check_string),
('invite_only', check_bool),
('name', check_string),
('stream_id', check_int),
])))
])
events = self.do_test(lambda: self.register("test1", "test1"))
error = realm_user_add_checker('events[0]', events[0])
self.assert_on_error(error)
error = stream_create_checker('events[1]', events[1])
self.assert_on_error(error)
def test_alert_words_events(self):
# type: () -> None
alert_words_checker = check_dict([
('type', equals('alert_words')),
('alert_words', check_list(check_string)),
])
events = self.do_test(lambda: do_add_alert_words(self.user_profile, ["alert_word"]))
error = alert_words_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_remove_alert_words(self.user_profile, ["alert_word"]))
error = alert_words_checker('events[0]', events[0])
self.assert_on_error(error)
def test_default_streams_events(self):
# type: () -> None
default_streams_checker = check_dict([
('type', equals('default_streams')),
('default_streams', check_list(check_dict([
('description', check_string),
('invite_only', check_bool),
('name', check_string),
('stream_id', check_int),
]))),
])
events = self.do_test(lambda: do_add_default_stream(self.user_profile.realm, "Scotland"))
error = default_streams_checker('events[0]', events[0])
self.assert_on_error(error)
def test_muted_topics_events(self):
# type: () -> None
muted_topics_checker = check_dict([
('type', equals('muted_topics')),
('muted_topics', check_list(check_list(check_string, 2))),
])
events = self.do_test(lambda: do_set_muted_topics(self.user_profile, [[u"Denmark", u"topic"]]))
error = muted_topics_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_full_name(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict([
('email', check_string),
('full_name', check_string),
])),
])
events = self.do_test(lambda: do_change_full_name(self.user_profile, 'Sir Hamlet'))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_name(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('name')),
('value', check_string),
])
events = self.do_test(lambda: do_set_realm_name(self.user_profile.realm, 'New Realm Name'))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_restricted_to_domain(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('restricted_to_domain')),
('value', check_bool),
])
# The first True is probably a noop, then we get transitions in both directions.
for restricted_to_domain in (True, False, True):
events = self.do_test(lambda: do_set_realm_restricted_to_domain(self.user_profile.realm, restricted_to_domain))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_invite_required(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('invite_required')),
('value', check_bool),
])
# The first False is probably a noop, then we get transitions in both directions.
for invite_required in (False, True, False):
events = self.do_test(lambda: do_set_realm_invite_required(self.user_profile.realm, invite_required))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_invite_by_admins_only(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('invite_by_admins_only')),
('value', check_bool),
])
# The first False is probably a noop, then we get transitions in both directions.
for invite_by_admins_only in (False, True, False):
events = self.do_test(lambda: do_set_realm_invite_by_admins_only(self.user_profile.realm, invite_by_admins_only))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_default_language(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('default_language')),
('value', check_string),
])
events = self.do_test(lambda: do_set_realm_default_language(self.user_profile.realm, 'de'))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_create_stream_by_admins_only(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('create_stream_by_admins_only')),
('value', check_bool),
])
# The first False is probably a noop, then we get transitions in both directions.
for create_stream_by_admins_only in (False, True, False):
events = self.do_test(lambda: do_set_realm_create_stream_by_admins_only(self.user_profile.realm,
create_stream_by_admins_only))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_pin_stream(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('subscription')),
('op', equals('update')),
('property', equals('pin_to_top')),
('value', check_bool),
])
stream = "Denmark"
sub = get_subscription(stream, self.user_profile)
# The first False is probably a noop, then we get transitions in both directions.
for pinned in (False, True, False):
events = self.do_test(lambda: do_change_subscription_property(self.user_profile, sub, stream, "pin_to_top", pinned))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_message_edit_settings(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm')),
('op', equals('update_dict')),
('property', equals('default')),
('data', check_dict([('allow_message_editing', check_bool),
('message_content_edit_limit_seconds', check_int)])),
])
# Test every transition among the four possibilities {T,F} x {0, non-0}
for (allow_message_editing, message_content_edit_limit_seconds) in \
((True, 0), (False, 0), (True, 0), (False, 1234), (True, 0), (True, 1234), (True, 0),
(False, 0), (False, 1234), (False, 0), (True, 1234), (False, 0),
(True, 1234), (True, 600), (False, 600), (False, 1234), (True, 600)):
events = self.do_test(lambda: do_set_realm_message_editing(self.user_profile.realm,
allow_message_editing, message_content_edit_limit_seconds))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_is_admin(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict([
('email', check_string),
('is_admin', check_bool),
])),
])
# The first False is probably a noop, then we get transitions in both directions.
for is_admin in [False, True, False]:
events = self.do_test(lambda: do_change_is_admin(self.user_profile, is_admin))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_twenty_four_hour_time(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('update_display_settings')),
('setting_name', equals('twenty_four_hour_time')),
('user', check_string),
('setting', check_bool),
])
# The first False is probably a noop, then we get transitions in both directions.
for setting_value in [False, True, False]:
events = self.do_test(lambda: do_change_twenty_four_hour_time(self.user_profile, setting_value))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_left_side_userlist(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('update_display_settings')),
('setting_name', equals('left_side_userlist')),
('user', check_string),
('setting', check_bool),
])
# The first False is probably a noop, then we get transitions in both directions.
for setting_value in [False, True, False]:
events = self.do_test(lambda: do_change_left_side_userlist(self.user_profile, setting_value))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_realm_emoji_events(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm_emoji')),
('op', equals('update')),
('realm_emoji', check_dict([])),
])
events = self.do_test(lambda: check_add_realm_emoji(get_realm("zulip.com"), "my_emoji",
"https://realm.com/my_emoji"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_remove_realm_emoji(get_realm("zulip.com"), "my_emoji"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_realm_filter_events(self):
# type: () -> None
schema_checker = check_dict([
('type', equals('realm_filters')),
('realm_filters', check_list(None)), # TODO: validate tuples in the list
])
events = self.do_test(lambda: do_add_realm_filter(get_realm("zulip.com"), "#[123]",
"https://realm.com/my_realm_filter/%(id)s"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
self.do_test(lambda: do_remove_realm_filter(get_realm("zulip.com"), "#[123]"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_create_bot(self):
# type: () -> None
bot_created_checker = check_dict([
('type', equals('realm_bot')),
('op', equals('add')),
('bot', check_dict([
('email', check_string),
('full_name', check_string),
('api_key', check_string),
('default_sending_stream', check_none_or(check_string)),
('default_events_register_stream', check_none_or(check_string)),
('default_all_public_streams', check_bool),
('avatar_url', check_string),
])),
])
action = lambda: self.create_bot('test-bot@zulip.com')
events = self.do_test(action)
error = bot_created_checker('events[1]', events[1])
self.assert_on_error(error)
def test_change_bot_full_name(self):
# type: () -> None
action = lambda: do_change_full_name(self.bot, 'New Bot Name')
events = self.do_test(action)
error = self.realm_bot_schema('full_name', check_string)('events[1]', events[1])
self.assert_on_error(error)
def test_regenerate_bot_api_key(self):
# type: () -> None
action = lambda: do_regenerate_api_key(self.bot)
events = self.do_test(action)
error = self.realm_bot_schema('api_key', check_string)('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_avatar_source(self):
# type: () -> None
action = lambda: do_change_avatar_source(self.bot, self.bot.AVATAR_FROM_USER)
events = self.do_test(action)
error = self.realm_bot_schema('avatar_url', check_string)('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_default_all_public_streams(self):
# type: () -> None
action = lambda: do_change_default_all_public_streams(self.bot, True)
events = self.do_test(action)
error = self.realm_bot_schema('default_all_public_streams', check_bool)('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_default_sending_stream(self):
# type: () -> None
stream = get_stream("Rome", self.bot.realm)
action = lambda: do_change_default_sending_stream(self.bot, stream)
events = self.do_test(action)
error = self.realm_bot_schema('default_sending_stream', check_string)('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_default_events_register_stream(self):
# type: () -> None
stream = get_stream("Rome", self.bot.realm)
action = lambda: do_change_default_events_register_stream(self.bot, stream)
events = self.do_test(action)
error = self.realm_bot_schema('default_events_register_stream', check_string)('events[0]', events[0])
self.assert_on_error(error)
def test_do_deactivate_user(self):
# type: () -> None
bot_deactivate_checker = check_dict([
('type', equals('realm_bot')),
('op', equals('remove')),
('bot', check_dict([
('email', check_string),
('full_name', check_string),
])),
])
bot = self.create_bot('foo-bot@zulip.com')
action = lambda: do_deactivate_user(bot)
events = self.do_test(action)
error = bot_deactivate_checker('events[1]', events[1])
self.assert_on_error(error)
def test_rename_stream(self):
# type: () -> None
realm = get_realm('zulip.com')
stream, _ = create_stream_if_needed(realm, 'old_name')
new_name = u'stream with a brand new name'
self.subscribe_to_stream(self.user_profile.email, stream.name)
action = lambda: do_rename_stream(realm, stream.name, new_name)
events = self.do_test(action)
schema_checker = check_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('email_address')),
('value', check_string),
('name', equals('old_name')),
])
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = check_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('name')),
('value', equals(new_name)),
('name', equals('old_name')),
])
error = schema_checker('events[1]', events[1])
self.assert_on_error(error)
def test_deactivate_stream_neversubscribed(self):
# type: () -> None
realm = get_realm('zulip.com')
stream, _ = create_stream_if_needed(realm, 'old_name')
action = lambda: do_deactivate_stream(stream)
events = self.do_test(action)
schema_checker = check_dict([
('type', equals('stream')),
('op', equals('delete')),
('streams', check_list(check_dict([]))),
])
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_subscribe_other_user_never_subscribed(self):
# type: () -> None
action = lambda: self.subscribe_to_stream("othello@zulip.com", u"test_stream")
events = self.do_test(action)
schema_checker = check_dict([
('type', equals('subscription')),
('op', equals('peer_add')),
('user_email', check_string),
('subscriptions', check_list(check_string)),
])
error = schema_checker('events[2]', events[2])
self.assert_on_error(error)
def test_subscribe_events(self):
# type: () -> None
subscription_schema_checker = check_list(
check_dict([
('color', check_string),
('description', check_string),
('email_address', check_string),
('invite_only', check_bool),
('in_home_view', check_bool),
('name', check_string),
('desktop_notifications', check_bool),
('audible_notifications', check_bool),
('stream_id', check_int),
('subscribers', check_list(check_int)),
])
)
add_schema_checker = check_dict([
('type', equals('subscription')),
('op', equals('add')),
('subscriptions', subscription_schema_checker),
])
remove_schema_checker = check_dict([
('type', equals('subscription')),
('op', equals('remove')),
('subscriptions', check_list(
check_dict([
('name', equals('test_stream')),
('stream_id', check_int),
]),
)),
])
peer_add_schema_checker = check_dict([
('type', equals('subscription')),
('op', equals('peer_add')),
('user_email', check_string),
('subscriptions', check_list(check_string)),
])
peer_remove_schema_checker = check_dict([
('type', equals('subscription')),
('op', equals('peer_remove')),
('user_email', check_string),
('subscriptions', check_list(check_string)),
])
stream_update_schema_checker = check_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('description')),
('value', check_string),
('name', check_string),
])
action = lambda: self.subscribe_to_stream("hamlet@zulip.com", "test_stream") # type: Callable
events = self.do_test(action, event_types=["subscription", "realm_user"])
error = add_schema_checker('events[0]', events[0])
self.assert_on_error(error)
action = lambda: self.subscribe_to_stream("othello@zulip.com", "test_stream")
events = self.do_test(action)
error = peer_add_schema_checker('events[0]', events[0])
self.assert_on_error(error)
stream = get_stream("test_stream", self.user_profile.realm)
action = lambda: do_remove_subscription(get_user_profile_by_email("othello@zulip.com"), stream)
events = self.do_test(action)
error = peer_remove_schema_checker('events[0]', events[0])
self.assert_on_error(error)
action = lambda: do_remove_subscription(get_user_profile_by_email("hamlet@zulip.com"), stream)
events = self.do_test(action)
error = remove_schema_checker('events[1]', events[1])
self.assert_on_error(error)
action = lambda: self.subscribe_to_stream("hamlet@zulip.com", "test_stream")
events = self.do_test(action)
error = add_schema_checker('events[1]', events[1])
self.assert_on_error(error)
action = lambda: do_change_stream_description(get_realm('zulip.com'), 'test_stream', u'new description')
events = self.do_test(action)
error = stream_update_schema_checker('events[0]', events[0])
self.assert_on_error(error)
class FetchInitialStateDataTest(ZulipTestCase):
# Non-admin users don't have access to all bots
def test_realm_bots_non_admin(self):
# type: () -> None
email = 'cordelia@zulip.com'
user_profile = get_user_profile_by_email(email)
self.assertFalse(user_profile.is_realm_admin)
result = fetch_initial_state_data(user_profile, None, "")
self.assert_length(result['realm_bots'], 0)
# additionally the API key for a random bot is not present in the data
api_key = get_user_profile_by_email('notification-bot@zulip.com').api_key
self.assertNotIn(api_key, str(result))
# Admin users have access to all bots in the realm_bots field
def test_realm_bots_admin(self):
# type: () -> None
email = 'hamlet@zulip.com'
user_profile = get_user_profile_by_email(email)
do_change_is_admin(user_profile, True)
self.assertTrue(user_profile.is_realm_admin)
result = fetch_initial_state_data(user_profile, None, "")
self.assertTrue(len(result['realm_bots']) > 5)
from zerver.lib.event_queue import EventQueue
class EventQueueTest(TestCase):
def test_one_event(self):
# type: () -> None
queue = EventQueue("1")
queue.push({"type": "pointer",
"pointer": 1,
"timestamp": "1"})
self.assertFalse(queue.empty())
self.assertEqual(queue.contents(),
[{'id': 0,
'type': 'pointer',
"pointer": 1,
"timestamp": "1"}])
def test_event_collapsing(self):
# type: () -> None
queue = EventQueue("1")
for pointer_val in range(1, 10):
queue.push({"type": "pointer",
"pointer": pointer_val,
"timestamp": str(pointer_val)})
self.assertEqual(queue.contents(),
[{'id': 8,
'type': 'pointer',
"pointer": 9,
"timestamp": "9"}])
queue = EventQueue("2")
for pointer_val in range(1, 10):
queue.push({"type": "pointer",
"pointer": pointer_val,
"timestamp": str(pointer_val)})
queue.push({"type": "unknown"})
queue.push({"type": "restart", "server_generation": "1"})
for pointer_val in range(11, 20):
queue.push({"type": "pointer",
"pointer": pointer_val,
"timestamp": str(pointer_val)})
queue.push({"type": "restart", "server_generation": "2"})
self.assertEqual(queue.contents(),
[{"type": "unknown",
"id": 9,},
{'id': 19,
'type': 'pointer',
"pointer": 19,
"timestamp": "19"},
{"id": 20,
"type": "restart",
"server_generation": "2"}])
for pointer_val in range(21, 23):
queue.push({"type": "pointer",
"pointer": pointer_val,
"timestamp": str(pointer_val)})
self.assertEqual(queue.contents(),
[{"type": "unknown",
"id": 9,},
{'id': 19,
'type': 'pointer',
"pointer": 19,
"timestamp": "19"},
{"id": 20,
"type": "restart",
"server_generation": "2"},
{'id': 22,
'type': 'pointer',
"pointer": 22,
"timestamp": "22"},
])
def test_flag_add_collapsing(self):
# type: () -> None
queue = EventQueue("1")
queue.push({"type": "update_message_flags",
"flag": "read",
"operation": "add",
"all": False,
"messages": [1, 2, 3, 4],
"timestamp": "1"})
queue.push({"type": "update_message_flags",
"flag": "read",
"all": False,
"operation": "add",
"messages": [5, 6],
"timestamp": "1"})
self.assertEqual(queue.contents(),
[{'id': 1,
'type': 'update_message_flags',
"all": False,
"flag": "read",
"operation": "add",
"messages": [1, 2, 3, 4, 5, 6],
"timestamp": "1"}])
def test_flag_remove_collapsing(self):
# type: () -> None
queue = EventQueue("1")
queue.push({"type": "update_message_flags",
"flag": "collapsed",
"operation": "remove",
"all": False,
"messages": [1, 2, 3, 4],
"timestamp": "1"})
queue.push({"type": "update_message_flags",
"flag": "collapsed",
"all": False,
"operation": "remove",
"messages": [5, 6],
"timestamp": "1"})
self.assertEqual(queue.contents(),
[{'id': 1,
'type': 'update_message_flags',
"all": False,
"flag": "collapsed",
"operation": "remove",
"messages": [1, 2, 3, 4, 5, 6],
"timestamp": "1"}])
def test_collapse_event(self):
# type: () -> None
queue = EventQueue("1")
queue.push({"type": "pointer",
"pointer": 1,
"timestamp": "1"})
queue.push({"type": "unknown",
"timestamp": "1"})
self.assertEqual(queue.contents(),
[{'id': 0,
'type': 'pointer',
"pointer": 1,
"timestamp": "1"},
{'id': 1,
'type': 'unknown',
"timestamp": "1"}])
class TestEventsRegisterAllPublicStreamsDefaults(TestCase):
def setUp(self):
# type: () -> None
self.email = 'hamlet@zulip.com'
self.user_profile = get_user_profile_by_email(self.email)
def test_use_passed_all_public_true_default_false(self):
# type: () -> None
self.user_profile.default_all_public_streams = False
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, True)
self.assertTrue(result)
def test_use_passed_all_public_true_default(self):
# type: () -> None
self.user_profile.default_all_public_streams = True
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, True)
self.assertTrue(result)
def test_use_passed_all_public_false_default_false(self):
# type: () -> None
self.user_profile.default_all_public_streams = False
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, False)
self.assertFalse(result)
def test_use_passed_all_public_false_default_true(self):
# type: () -> None
self.user_profile.default_all_public_streams = True
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, False)
self.assertFalse(result)
def test_use_true_default_for_none(self):
# type: () -> None
self.user_profile.default_all_public_streams = True
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, None)
self.assertTrue(result)
def test_use_false_default_for_none(self):
# type: () -> None
self.user_profile.default_all_public_streams = False
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, None)
self.assertFalse(result)
class TestEventsRegisterNarrowDefaults(TestCase):
def setUp(self):
# type: () -> None
self.email = 'hamlet@zulip.com'
self.user_profile = get_user_profile_by_email(self.email)
self.stream = get_stream('Verona', self.user_profile.realm)
def test_use_passed_narrow_no_default(self):
# type: () -> None
self.user_profile.default_events_register_stream_id = None
self.user_profile.save()
result = _default_narrow(self.user_profile, [[u'stream', u'my_stream']])
self.assertEqual(result, [[u'stream', u'my_stream']])
def test_use_passed_narrow_with_default(self):
# type: () -> None
self.user_profile.default_events_register_stream_id = self.stream.id
self.user_profile.save()
result = _default_narrow(self.user_profile, [[u'stream', u'my_stream']])
self.assertEqual(result, [[u'stream', u'my_stream']])
def test_use_default_if_narrow_is_empty(self):
# type: () -> None
self.user_profile.default_events_register_stream_id = self.stream.id
self.user_profile.save()
result = _default_narrow(self.user_profile, [])
self.assertEqual(result, [[u'stream', u'Verona']])
def test_use_narrow_if_default_is_none(self):
# type: () -> None
self.user_profile.default_events_register_stream_id = None
self.user_profile.save()
result = _default_narrow(self.user_profile, [])
self.assertEqual(result, [])
| |
import struct
import numpy as np
import scipy as sp
from ctypes import *
"""
iwlnl_struct.py
This class has methods that extracts CSI information from NETLINK data. Can also parse
data from file, stored using log_to_file.c tool.
It has the following methods that compress and quantize the V matrix according to the
802.11n-draft standard.
@compress - given a unitary matrix V (3x3 for now) it compresses the matrix using
the procedure described in section 20.3.12.2.3 of 802.11n draft.
@quantize - given an angle, produced by the @compress method above, the number
of bits used to quantize and which angle it is (phi or psi) it quantizes the
angles as described on section 7.3.1.29 of 802.11n draft.
@concatinate_bits - the resulted, k values, from @quantize method are concatinated
into a bitstring, result is a single value that is applied on the card.
@break_bits - is the oposite of @concatinate_bits, given a bitpattern, and number
of bits used to quantize each angle, it reproduced the k-values.
Astrit Zhushi (c) 2011 a.zhushi@cs.ucl.ac.uk
"""
class iwlnl_struct:
def __init__(self, raw_data=False, from_file=False):
if raw_data:
self.parse(raw_data, from_file)
# """ Given raw bytes parse it to meaningful CSI information :D
#
# @raw_data - raw bytes (either from NETLINK socket or FILE)
# @from_file - speself.y if data was read from NETLINK or FILE, NETLINK default
# """
def parse(self, raw_data, from_file=False):
if not from_file:
self.unpacked = raw_data[38:] # skip NETLINK header stuff
else:
self.unpacked = raw_data[1:] # skip the first byte
# exract noise a b c, bfee_count each held on an unsigned char
# 0 - 4
tmp = struct.unpack("BBBBB", self.unpacked[:5])
self.noise_a = tmp[0] # 0
self.noise_b = tmp[1] # 1
self.noise_c = tmp[2] # 2
self.bfee_count = tmp[3] + (tmp[4] << 8) # 3
# extract Nrx, Ntx, rssi_a up to antenna_sel
# 7 - 19
tmp = struct.unpack("BBBBBbBBBBBB", self.unpacked[7:19])
self.Nrx = tmp[0] # 7
self.Ntx = tmp[1] # 8
self.rssi_a = tmp[2] # 9
self.rssi_b = tmp[3] # 10
self.rssi_c = tmp[4] # 11
self.noise = tmp[5] # 12
self.agc = tmp[6] # 13
self.antenna_sel = tmp[7] # 14
self.length = tmp[8] + (tmp[9] << 8) # 15-16
self.rate = tmp[10] + (tmp[11] << 8) # 16-17
# number of subcarriers
self.nrs = 30
self.perm = []
self.perm.append(((self.antenna_sel) & 0x3) + 1)
self.perm.append(((self.antenna_sel >> 2) & 0x3) + 1)
self.perm.append(((self.antenna_sel >> 4) & 0x3) + 1)
# print self.perm
self.csi = self.parse_csi(self.unpacked[19:])
# """ Set CSI """
def set_csi(self, csi):
self.csi = csi
# """ Get CSI """
def get_csi(self):
return self.csi
# """ Set number of TX elements """
def set_tx(self, tx):
self.Ntx = tx
# """ Set number of RX elements """
def set_rx(self, rx):
self.Nrx = rx
# """ Given raw_data (bytes) parse CSI complex values """
def parse_csi(self, raw_data):
index = 0
remainder = 0
# make a list of 30 elements
csi = [None] * 30
for i in range(0, self.nrs):
index += 3
remainder = (index % 8)
Hx = np.matrix(np.zeros((self.Nrx, self.Ntx), complex))
for r in range(0, self.Nrx):
for t in range(0, self.Ntx):
# first = struct.unpack("B",raw_data[index/8])[0] >> remainder
first = struct.unpack('B', bytes([raw_data[index // 8]]))[0] >> remainder
second = (struct.unpack('B', bytes([raw_data[index // 8 + 1]]))[0] << (8 - remainder))
tmp = (c_byte(first | second).value)
real = (c_double(tmp).value)
first = (struct.unpack('B', bytes([raw_data[index // 8 + 1]]))[0] >> remainder)
second = (struct.unpack('B', bytes([raw_data[index // 8 + 2]]))[0] << (8 - remainder))
tmp = (c_byte(first | second).value)
imag = (c_double(tmp).value)
index += 16
Hx.itemset((r, t), complex(real, imag))
csi[i] = Hx
return csi
def __str__(self):
return "NOISE(A,B,C)=[%d %d %d] Nrx=%d Ntx=%d RSSI(A,B,C)=[%d %d %d] Noise=%d AGC=%d" % (
self.noise_a, self.noise_b, self.noise_c, self.Nrx, self.Ntx, self.rssi_a, self.rssi_b, self.rssi_c,
self.noise,
self.agc)
# return 'Empty'
def print_csi(self):
i = 1
for c in self.csi:
print
"%3d." % i,
print(c),
if (i % 9) == 0:
print
""
i += 1
# """ Quantize the angle according to 802.11n standard
#
# @orig - angles being quantized (generated using compression see @compress)
# @which - which angle is being quantized (psi or phi), psi by default
# @psi_bits - number of bits used to represents PSI, possible values 1 2 3 or 4
#
# @return - a tuble containing the quantized angle and the k values
# """
def quantize(self, orig, which="psi", bits=3):
if bits < 1 or bits > 4:
raise Exception('psi_bits can be 1,2,3 or 4')
a = float(2 ** (bits + 1));
b = float(2 ** (bits + 2));
psi = 0;
if 'psi' is which:
psi = 1
elif 'phi' is which:
psi = 0;
else:
raise Exception('Please speself.y either psi or phi\n');
min = np.pi / bits ** b;
if psi == 1:
k_max = 2 ** (bits - 1)
else:
k_max = 2 ** (bits + 2) - 1;
max = k_max * np.pi / a + np.pi / b;
if orig <= min:
quant_angle = min;
k = 0;
return (quant_angle, 0);
if orig >= max:
quant_angle = max;
k = k_max;
return (quant_angle, k)
t = orig / np.pi * a - a / b
k = np.ceil(t)
quant_angle = k * np.pi / a + np.pi / b;
return (quant_angle, k)
# """ Helper method to quantize a list of set of angles (one set per subcarrier) """
def quantize_angles(self, angles):
l = len(angles)
result = [None] * l
for sc in range(l):
sca = angles[sc]
r = []
for which, a in sca:
r.append((which, self.quantize(a, which, 3)))
result[sc] = r
return result
#
# """ Given k values generated by quantization, it concatinates the bits do generate the bitpattern to be supplied to the card.
# Note: no need to reverse the order of angles as the function will do that.
# @k - array of data as returned by quantize_angles function
# @psi_bits - number of bits used to represent psi
#
# @return - a tuple ([bits], bitpattern), where bits are the number of bits used to represent each angle
# """
def concatinate_bits(self, k, psi_bits=3):
l = len(k)
result = [None] * l
# for every sub-carrier
for sc in range(l):
sck = k[sc]
# need to reverse the order of angles as the last angles should end in the high order bits
# so the order should be: psi_32, phi_22, psi_31, psi_21, phi_21, phi_11
sck.reverse()
r = []
# get each k value (which,(quantized_angle, k_value))
# which can either be 'psi' or 'phi'
alength = len(sck)
(w, (a, result)) = sck[0]
result = int(result)
bits = []
if 'phi' is w:
bits.append(psi_bits + 2)
else:
bits.append(psi_bits)
for i in range(1, alength):
(which, (a, kvalue)) = sck[i]
bit = psi_bits
if 'phi' is which:
bit = psi_bits + 2
bits.append(bit)
kvalue = int(kvalue)
result = (result << bit)
result += kvalue
r.append((bits, result))
return r
# """
# Given a list of bit-string pattern as a tuple (([angle1_bits, angle2_bits, ..., anglen_bits]), bitpattern) it breaks the pattern into individual k values
#
# @return - a tuple ([bits], [k-values])
# """
def break_bits(self, bitpatterns):
l = len(bitpatterns)
result = [None] * l
# for every sub-carrier
for sc in range(l):
(bits, bitstring) = bitpatterns[sc];
ks = []
## reverse the number of bits, since the last bitstring concatinated is the first one now
bits.reverse()
for b in bits:
bitmask = 2 ** b - 1
n = bitstring & bitmask
ks.append(n)
bitstring = bitstring >> b
result[sc] = (bits, ks)
return result
#
# """
# Compress and quantize CSI
# @psi_bits - the number of bits used to quantize psi
# """
def compress(self, psi_bits=3):
if self.Ntx != 3:
raise Exception('Only 3x3 configuration currently valid!')
angles = np.angle(self.csi)
length = len(angles)
return_angles = [None] * length
for i in range(length):
DTilde = np.matrix(sp.eye(self.Ntx, dtype=complex))
## only 3x3 matrix currently so 6 angles
quant_angles = []
# print len(angles)
for tx in range(0, self.Ntx):
ang = angles[i]
DTilde[tx, tx] = np.exp(1j * ang[self.Nrx - 1, tx]);
csi = self.csi[i]
tmp = csi * DTilde.getH()
phi_11 = np.angle(tmp[0, 0])
if phi_11 < 0:
phi_11 += 2 * np.pi
phi_21 = np.angle(tmp[1, 0]);
if phi_21 < 0:
phi_21 += 2 * np.pi
d1 = np.matrix(np.diag([np.exp(1j * phi_11), np.exp(1j * phi_21), 1]))
tmp = d1.getH() * csi * DTilde.getH()
x1 = tmp[0, 0]
x2 = tmp[1, 0]
psi_21 = self.calc_psi(x1, x2)
G21 = np.matrix(sp.eye(self.Ntx, dtype=complex))
G21[0, 0] = np.cos(psi_21)
G21[0, 1] = np.sin(psi_21)
G21[1, 0] = -np.sin(psi_21)
G21[1, 1] = np.cos(psi_21)
tmp = G21 * d1.getH() * csi * DTilde.getH()
x1 = tmp[0, 0]
x2 = tmp[2, 0]
psi_31 = self.calc_psi(x1, x2)
G31 = np.matrix(sp.eye(self.Ntx, dtype=complex))
G31[0, 0] = np.cos(psi_31);
G31[0, 2] = np.sin(psi_31);
G31[2, 0] = -np.sin(psi_31);
G31[2, 2] = np.cos(psi_31);
V2 = G31 * G21 * d1.getH() * csi * DTilde.getH();
phi_22 = np.angle(V2[1, 1])
if phi_22 < 0:
phi_22 += 2 * np.pi
d2 = np.matrix(np.diag([1, np.exp(1j * phi_22), 1]))
tmp = d2.getH() * V2;
x1 = tmp[1, 1];
x2 = tmp[2, 1];
psi_32 = self.calc_psi(x1, x2)
# ['phi11 ', 'phi21 ', 'psi21 ', 'psi31 ', 'phi22 ', 'psi32']
return_angles[i] = [('phi', phi_11), ('phi', phi_21), ('psi', psi_21), ('psi', psi_31), ('phi', phi_22),
('psi', psi_32)]
return return_angles
# """ Performs a Givens rotation """
def calc_psi(self, x1, x2):
y = np.sqrt([x1 ** 2 + x2 ** 2])
return np.real(np.arccos(x1 / y))[0]
def get_scaled_csi(self):
rssi = []
noise = []
csi_sum = 0
# if self.rssi_a > 0:
# rssi.append(self.rssi_a)
if self.rssi_b > 0:
rssi.append(self.rssi_b)
if self.rssi_c > 0:
rssi.append(self.rssi_c)
rssi = [r - 44 - self.agc for r in rssi]
noise.append(self.noise_a)
noise.append(self.noise_b)
noise.append(self.noise_c)
noise = [noise[i] for i in range(0, self.Nrx)]
ref_noise = max(noise)
noise_diff = [ref_noise - n for n in noise]
noise_diff_abs = [np.power(10, n / 10) for n in noise_diff]
ref_rssi = [r - ref_noise for r in rssi]
rssi_sum = np.sum([pow(10, (r / 10)) for r in ref_rssi])
# rssi_sum = numpy.sum(rssi_sum)
for i in range(0, self.nrs):
tmpAbs = np.abs(self.csi[0])
csi_sum = csi_sum + np.sqrt(tmpAbs)
common_scale = np.sqrt(rssi_sum / csi_sum * self.nrs)
scale_per_rx = [common_scale * np.sqrt(n) for n in noise_diff_abs]
ret = [None] * 30
for j in range(0, self.nrs):
tmpHx = self.csi[j]
tmpRet = np.matrix(np.zeros((self.Ntx, self.Nrx), complex))
for r in range(0, len(scale_per_rx)):
for t in range(0, self.Ntx):
tmpVal = tmpHx.item((r, t)) * scale_per_rx[r]
tmpRet.itemset((t, r), tmpVal.item(0))
ret[j] = tmpRet
return ret
| |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from io import BytesIO
from datetime import datetime
from struct import Struct
from ftplib import FTP
from netrc import netrc
class Helper(object):
@classmethod
def nonull(cls, line):
return str(line.split('\x00')[0])
class FormatException(Exception):
def __init__(self, message, *args, **kwargs):
self.message = message
super(FormatException, self).__init__(message, *args, **kwargs)
class MailboxRecordingFile(object):
__slots__ = ['filename', 'full_path', 'file_size']
def __init__(self, filename=None, file_size=None, file_path=None):
self.filename = filename
self.full_path = file_path
self.file_size = file_size
def __str__(self):
return self.filename
class MailboxItem(object):
__slots__ = ['data', 'encoding', 'raw_data']
formatstring = '>Ib3x6I 20x 16s 56x 15s 17x 80s 48x 5B 31x 24sI'
def __init__(self, data):
self.data = data
self.encoding = 'latin-1'
self.raw_data = self.unpack(data)
if self.ident != 348:
raise FormatException('Unknown format identifier: %d' % self.ident)
def __str__(self):
return u'{time}\t{caller}\t{number}\t{duration}s\t{file}'.format(time=self.call_time, caller=self.caller_number,
number=self.number, duration=self.duration, file=self.recording.filename)
@classmethod
def struct(cls):
return Struct(format=cls.formatstring)
@classmethod
def unpack(cls, data):
return cls.struct().unpack_from(data)
@classmethod
def size(cls):
return cls.struct().size
@property
def ident(self):
return self.raw_data[0]
@property
def seq(self):
return self.raw_data[1]
@property
def duration(self):
return self.raw_data[5]
@property
def is_new(self):
if self.raw_data[6] == 1:
return True
else:
return False
@property
def sampling(self):
return self.raw_data[3]
@property
def caller_number(self):
return Helper.nonull(self.raw_data[8]).decode(self.encoding)
@property
def number(self):
return Helper.nonull(self.raw_data[16]).decode(self.encoding)
@property
def call_time(self):
(day, month, year, hour, minute) = self.raw_data[11:16]
year += 2000
return datetime(year, month, day, hour, minute)
@property
def recording(self):
mrf = MailboxRecordingFile()
mrf.filename = Helper.nonull(self.raw_data[9]).decode(self.encoding)
mrf.full_path = Helper.nonull(self.raw_data[10]).decode(self.encoding)
mrf.file_size = self.raw_data[4]
return mrf
def dump(self):
return self.raw_data
class MailboxReader(object):
"""
Grobe Info http://www.ip-symcon.de/forum/threads/11555-Fritzbox-Anrufbeantworter?highlight=meta0
"""
__slots__ = ['mailbox_file']
def __init__(self, mailbox_file=None):
self.mailbox_file = mailbox_file
def __enter__(self):
return self.__iter__()
def __exit__(self, exc_type, exc_value, traceback):
if exc_type:
return False
def __iter__(self):
with self.mailbox_file as f:
while True:
data = f.read(MailboxItem.size())
if not data: break
yield MailboxItem(data)
class FtpReaderException(Exception):
def __init__(self, message, *args, **kwargs):
self.message = message
super(FtpReaderException, self).__init__(message, *args, **kwargs)
class FtpReader(BytesIO):
"""
Almost no Error handling
buffer = FtpReader(host='fritz.home.kerpe.net')
buffer.connect()
with buffer as f:
buffer.read(123)
"""
__slots__ = ['host', 'user', 'password', 'basepath', 'filename', 'data']
def __init__(self, host='fritz.box', user='', password='', use_netrc=True, basepath='voicebox'):
self.host = host
self.use_netrc = use_netrc
self.user = user
self.password = password
self.basepath = basepath
self.filename = None
self.data = None
self._netrc_credentials()
super(FtpReader, self).__init__()
def _netrc_credentials(self):
if self.use_netrc:
nrc = netrc()
(user, account, password) = nrc.authenticators(self.host)
if user:
self.user = user
if password:
self.password = password
def connect(self):
try:
self._conn = FTP(self.host)
except (Exception, e):
raise FtpReaderException('could not connect to host: ' + str(e))
self._conn.login(self.user, self.password)
if self.basepath:
self._conn.cwd(self.basepath)
def read_file(self, filename='meta0', path=None):
if not self._conn:
raise FtpReaderException('Not connected')
self.filename = filename
if path:
self._conn.cwd(path)
self._conn.retrbinary ('RETR '+ self.filename , self.write)
self.flush()
self.seek(0)
def close(self):
self._conn.quit()
if __name__ == '__main__':
fbftp = FtpReader(host='fritz.home.kerpe.net', use_netrc=True)
fbftp.connect()
fbftp.read_file('meta0')
mbf = fbftp
#mbf = io.open('meta0', 'rb')
mb = MailboxReader(mbf)
with MailboxReader(mbf) as mb:
for record in mb:
if record.is_new:
flag = '* '
else:
flag = '- '
print(flag + str(record))
print record.dump()
| |
#!/usr/local/bin/python2.7
"""
The MIT License (MIT)
Copyright (c) 2015 Maker Musings
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
# For a complete discussion, see http://www.makermusings.com
import email.utils
import select
import socket
import struct
import sys
import time
import urllib
import uuid
import ISY
base_port = 54900
isypass = "admin"
isyuser = "admin"
isyaddr = "10.1.1.36"
# DOF DON DIM BRT
# /rest/nodes/<node-id>/cmd/<cmd>>/<cmd value>
# /rest/nodes/16 3F E5 1/cmd/DOF
isydevs = {
"office light": "16 3F E5 1",
"garage": "20326",
"floor lamp": "32468",
"bathroom light": "17 54 69 1",
"bathroom Fan": "17 50 F4 1",
"desk light": "16 D3 73 1",
"backyard lights": "FF 03 0F 2",
}
# /rest/programs/<pgm-id>/<pgm-cmd>
# Valid Commands : 'run', 'runThen', 'runElse', 'stop', 'enable', 'disable', 'enableRunAtStartup', 'disableRunAtStartup'
# This XML is the minimum needed to define one of our virtual switches
# to the Amazon Echo
SETUP_XML = """<?xml version="1.0"?>
<root>
<device>
<deviceType>urn:MakerMusings:device:controllee:1</deviceType>
<friendlyName>%(device_name)s</friendlyName>
<manufacturer>Belkin International Inc.</manufacturer>
<modelName>Emulated Socket</modelName>
<modelNumber>3.1415</modelNumber>
<binaryState>%(device_state)</binaryState>
<UDN>uuid:Socket-1_0-%(device_serial)s</UDN>
</device>
</root>
"""
DEBUG = True
def dbg(msg):
global DEBUG
if DEBUG:
print msg
sys.stdout.flush()
# A simple utility class to wait for incoming data to be
# ready on a socket.
class poller:
def __init__(self):
if 'poll' in dir(select):
self.use_poll = True
self.poller = select.poll()
else:
self.use_poll = False
self.targets = {}
def add(self, target, fileno=None):
if not fileno:
fileno = target.fileno()
if self.use_poll:
self.poller.register(fileno, select.POLLIN)
self.targets[fileno] = target
def remove(self, target, fileno=None):
if not fileno:
fileno = target.fileno()
if self.use_poll:
self.poller.unregister(fileno)
del(self.targets[fileno])
def poll(self, timeout=0):
if self.use_poll:
ready = self.poller.poll(timeout)
else:
ready = []
if len(self.targets) > 0:
(rlist, wlist, xlist) = select.select(self.targets.keys(), [], [], timeout)
ready = [(x, None) for x in rlist]
for one_ready in ready:
target = self.targets.get(one_ready[0], None)
if target:
target.do_read(one_ready[0])
# Base class for a generic UPnP device. This is far from complete
# but it supports either specified or automatic IP address and port
# selection.
class upnp_device(object):
this_host_ip = None
@staticmethod
def local_ip_address():
if not upnp_device.this_host_ip:
temp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
temp_socket.connect(('8.8.8.8', 53))
upnp_device.this_host_ip = temp_socket.getsockname()[0]
except:
upnp_device.this_host_ip = '127.0.0.1'
del(temp_socket)
dbg("got local address of %s" % upnp_device.this_host_ip)
return upnp_device.this_host_ip
def __init__(self, listener, poller, port, root_url, server_version, persistent_uuid, other_headers=None, ip_address=None):
self.listener = listener
self.poller = poller
self.port = port
self.root_url = root_url
self.server_version = server_version
self.persistent_uuid = persistent_uuid
self.uuid = uuid.uuid4()
self.other_headers = other_headers
if ip_address:
self.ip_address = ip_address
else:
self.ip_address = upnp_device.local_ip_address()
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.bind((self.ip_address, self.port))
self.socket.listen(5)
if self.port == 0:
self.port = self.socket.getsockname()[1]
self.poller.add(self)
self.client_sockets = {}
self.listener.add_device(self)
def fileno(self):
return self.socket.fileno()
def do_read(self, fileno):
if fileno == self.socket.fileno():
(client_socket, client_address) = self.socket.accept()
self.poller.add(self, client_socket.fileno())
self.client_sockets[client_socket.fileno()] = client_socket
else:
data, sender = self.client_sockets[fileno].recvfrom(4096)
if not data:
self.poller.remove(self, fileno)
del(self.client_sockets[fileno])
else:
self.handle_request(data, sender, self.client_sockets[fileno])
def handle_request(self, data, sender, socket):
pass
def get_name(self):
return "unknown"
def respond_to_search(self, destination, search_target):
dbg("Responding to search for {!s:} @ {!s:}".format(self.get_name(), destination))
date_str = email.utils.formatdate(timeval=None, localtime=False, usegmt=True)
location_url = self.root_url % {'ip_address': self.ip_address, 'port': self.port}
message = ("HTTP/1.1 200 OK\r\n"
"CACHE-CONTROL: max-age=86400\r\n"
"DATE: %s\r\n"
"EXT:\r\n"
"LOCATION: %s\r\n"
"OPT: \"http://schemas.upnp.org/upnp/1/0/\"; ns=01\r\n"
"01-NLS: %s\r\n"
"SERVER: %s\r\n"
"ST: %s\r\n"
"USN: uuid:%s::%s\r\n" % (date_str, location_url, self.uuid, self.server_version, search_target, self.persistent_uuid, search_target))
if self.other_headers:
for header in self.other_headers:
message += "%s\r\n" % header
message += "\r\n"
temp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
temp_socket.sendto(message, destination)
# This subclass does the bulk of the work to mimic a WeMo switch on the network.
class fauxmo(upnp_device):
@staticmethod
def make_uuid(name):
return ''.join(["%x" % sum([ord(c) for c in name])] + ["%x" % ord(c) for c in "%sfauxmo!" % name])[:14]
def __init__(self, name, listener, poller, ip_address, port, action_handler=None):
self.serial = self.make_uuid(name)
self.name = "test " + name
self.ip_address = ip_address
persistent_uuid = "Socket-1_0-" + self.serial
other_headers = ['X-User-Agent: redsonic']
upnp_device.__init__(self, listener, poller, port, "http://%(ip_address)s:%(port)s/setup.xml", "Unspecified, UPnP/1.0, Unspecified", persistent_uuid, other_headers=other_headers, ip_address=ip_address)
if action_handler:
self.action_handler = action_handler
else:
self.action_handler = self
dbg("FauxMo device '%s' ready on %s:%s state=%s" % (self.name, self.ip_address, self.port, self.action_handler.status))
def get_name(self):
return self.name
def handle_request(self, data, sender, socket):
if data.find('GET /setup.xml HTTP/1.1') == 0:
dbg("Responding to setup.xml for %s" % self.name)
device_state = self.action_handler.status
if device_state == "" or device_state == "0":
device_state = "0"
else:
device_state = "1"
xml = SETUP_XML % {'device_name': self.name,
'device_serial': self.serial,
'device_state': "device_state"}
date_str = email.utils.formatdate(timeval=None, localtime=False, usegmt=True)
message = ("HTTP/1.1 200 OK\r\n"
"CONTENT-LENGTH: %d\r\n"
"CONTENT-TYPE: text/xml\r\n"
"DATE: %s\r\n"
"LAST-MODIFIED: Sat, 01 Jan 2000 00:01:15 GMT\r\n"
"SERVER: Unspecified, UPnP/1.0, Unspecified\r\n"
"X-User-Agent: redsonic\r\n"
"CONNECTION: close\r\n"
"\r\n"
"%s" % (len(xml), date_str, xml))
socket.send(message)
elif data.find('SOAPACTION: "urn:Belkin:service:basicevent:1#SetBinaryState"') != -1:
success = False
if data.find('<BinaryState>1</BinaryState>') != -1:
# on
dbg("Responding to ON for %s" % self.name)
success = self.action_handler.on()
elif data.find('<BinaryState>0</BinaryState>') != -1:
# off
dbg("Responding to OFF for %s" % self.name)
success = self.action_handler.off()
else:
print("Unknown Binary State request:")
print(data)
if success:
# The echo is happy with the 200 status code and doesn't
# appear to care about the SOAP response body
soap = ""
date_str = email.utils.formatdate(timeval=None, localtime=False, usegmt=True)
message = ("HTTP/1.1 200 OK\r\n"
"CONTENT-LENGTH: %d\r\n"
"CONTENT-TYPE: text/xml charset=\"utf-8\"\r\n"
"DATE: %s\r\n"
"EXT:\r\n"
"SERVER: Unspecified, UPnP/1.0, Unspecified\r\n"
"X-User-Agent: redsonic\r\n"
"CONNECTION: close\r\n"
"\r\n"
"%s" % (len(soap), date_str, soap))
socket.send(message)
else:
dbg("success={!s:}".format(success))
else:
dbg(data)
def on(self):
return False
def off(self):
return True
# Since we have a single process managing several virtual UPnP devices,
# we only need a single listener for UPnP broadcasts. When a matching
# search is received, it causes each device instance to respond.
#
# Note that this is currently hard-coded to recognize only the search
# from the Amazon Echo for WeMo devices. In particular, it does not
# support the more common root device general search. The Echo
# doesn't search for root devices.
class upnp_broadcast_responder(object):
TIMEOUT = 0
def __init__(self):
self.devices = []
def init_socket(self):
ok = True
self.ip = '239.255.255.250'
self.port = 1900
try:
# This is needed to join a multicast group
self.mreq = struct.pack("4sl", socket.inet_aton(self.ip), socket.INADDR_ANY)
# Set up server socket
self.ssock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
self.ssock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
self.ssock.bind(('', self.port))
except Exception, e:
dbg("WARNING: Failed to bind %s:%d: %s", (self.ip, self.port, e))
ok = False
try:
self.ssock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, self.mreq)
except Exception, e:
dbg('WARNING: Failed to join multicast group:', e)
ok = False
except Exception, e:
dbg("Failed to initialize UPnP sockets:", e)
return False
if ok:
dbg("Listening for UPnP broadcasts")
def fileno(self):
return self.ssock.fileno()
def do_read(self, fileno):
data, sender = self.recvfrom(1024)
if data:
if data.find('M-SEARCH') == 0 and data.find('urn:Belkin:device:**') != -1:
for device in self.devices:
time.sleep(0.1)
device.respond_to_search(sender, 'urn:Belkin:device:**')
else:
pass
# Receive network data
def recvfrom(self, size):
if self.TIMEOUT:
self.ssock.setblocking(0)
ready = select.select([self.ssock], [], [], self.TIMEOUT)[0]
else:
self.ssock.setblocking(1)
ready = True
try:
if ready:
return self.ssock.recvfrom(size)
else:
return False, False
except Exception, e:
dbg("recvfrom")
dbg(e)
return False, False
def add_device(self, device):
self.devices.append(device)
# dbg("UPnP broadcast listener: new device registered")
# Each entry is a list with the following elements:
#
# name of the virtual switch
# object with 'on' and 'off' methods
# port # (optional; may be omitted)
# NOTE: As of 2015-08-17, the Echo appears to have a hard-coded limit of
# 16 switches it can control. Only the first 16 elements of the FAUXMOS
# list will be used.
def load_fauxmos(myisy=None, fport=None):
conf = None
r = None
try:
js = myisy.soapcomm("GetSysConf", name="/WEB/CONF/fauxmo.jsn")
# print "r=",r
except Exception, e:
return(None)
pass
else:
import json
conf = json.loads(js)
if conf is not None:
ret_list = list()
baseurl = "http://{:s}:{:s}@{:s}/rest".format(isyuser,isypass, isyaddr)
#
# isydevs = {
# "office light" : "16 3F E5 1" ,
# "garage" : "20326",
# }
if "isydevs" in conf:
isydevs = conf['isydevs']
for k in sorted(isydevs.keys()):
try:
nod = myisy[k]
except Exception, e:
pass
else:
l = [ k, nod ]
if fport is not None:
l.append(fport)
fport = fport + 1
ret_list.append(l)
# isyprog {
# bath fan" : {
# "on" : ("006E", "runThen"),
# "off" : ("0070", "runThen"),
# },
# }
#
# the following is incomplete
#
if isyprog in conf:
isyprog = conf['isyprog']
for k in sorted(isyprog.keys()):
try:
prg = myisy.get_prog(k)
except Exception, e:
pass
else:
l = [ k, prg ]
if fport is not None:
l.append(fport)
fport = fport + 1
ret_list.append( l )
return ret_list
def build_fauxmos(myisy=None, fport=None):
if myisy is None:
print("myisy is None")
exit(0)
ret_list = list()
for nod in myisy:
if nod.objtype != "node":
continue
if nod.enabled is not True:
continue
if (nod.spoken):
# print("Node :", nod.name, nod.address, nod.enabled, nod.spoken)
l = [nod.spoken, nod]
if fport is not None:
l.append(fport)
fport = fport + 1
ret_list.append(l)
return ret_list
def main(myisy):
global DEBUG
if len(sys.argv) > 1 and sys.argv[1] == '-d':
DEBUG = True
bfm = load_fauxmos(myisy)
if bfm is None:
print "building device list from ISY"
bfm = build_fauxmos(myisy, base_port)
# Set up our singleton for polling the sockets for data ready
p = poller()
# Set up our singleton listener for UPnP broadcasts
u = upnp_broadcast_responder()
u.init_socket()
# Add the UPnP broadcast listener to the poller so we can respond
# when a broadcast is received.
p.add(u)
if DEBUG > 2:
import pprint
#
print "\nbfm :",
pprint.pprint(bfm)
# Create our FauxMo virtual switch devices
for one_faux in bfm:
if len(one_faux) == 2:
# a fixed port wasn't specified, use a dynamic one
one_faux.append(0)
switch = fauxmo(one_faux[0], u, p, None, one_faux[2], action_handler=one_faux[1])
dbg("Entering main loop\n")
exit(0)
while True:
try:
# Allow time for a ctrl-c to stop the process
p.poll(100)
time.sleep(0.1)
except Exception, e:
dbg(e)
break
if __name__ == '__main__':
myisy = ISY.Isy(parsearg=1) # debug=0x80
main(myisy)
exit(0)
| |
from __future__ import unicode_literals
import unittest
from rdflib import URIRef, RDF
from r2dto_rdf import RdfSerializer, RdfIriField, RdfStringField, RdfObjectField, RdfSetField
from tests.utils import RdflibTestCaseMixin, get_triples
class SerializerTests(RdflibTestCaseMixin, unittest.TestCase):
def test_basic_building_of_graph(self):
class Model(object):
def __init__(self):
self.id = "http://api.nickswebsite.net/data#1"
self.field = "xyz"
class ModelSerializer(RdfSerializer):
id = RdfIriField()
field = RdfStringField(predicate="nws:field")
class Meta:
model = Model
rdf_prefixes = {
"nws": "http://api.nickswebsite.net/ns/",
"nws-data": "http://api.nickswebsite.net/data#"
}
rdf_subject = "id"
rdf_type = "nws:Type"
m = Model()
s = ModelSerializer(object=m)
g = s.build_graph()
self.assert_triple(g,
"http://api.nickswebsite.net/data#1",
"http://api.nickswebsite.net/ns/field",
"xyz")
self.assert_triple(g,
"http://api.nickswebsite.net/data#1",
RDF.type,
"http://api.nickswebsite.net/ns/Type")
def test_data_types(self):
class Model(object):
def __init__(self):
self.field = "ABC"
self.id = "http://api.nickswebsite.net/data#1"
class ModelSerializer(RdfSerializer):
field = RdfStringField(predicate="nws:field", datatype="nws:Stringish")
class Meta:
rdf_subject = "id"
rdf_prefixes = {
"nws": "http://api.nickswebsite.net/ns/",
"nws-data": "http://api.nickswebsite.net/data#"
}
m = Model()
s = ModelSerializer(object=m)
g = s.build_graph()
self.assert_triple(g,
"http://api.nickswebsite.net/data#1",
"http://api.nickswebsite.net/ns/field",
"ABC",
datatype="http://api.nickswebsite.net/ns/Stringish")
def test_language(self):
class Model(object):
def __init__(self):
self.field = "aef"
self.id = "http://api.nickswebsite.net/data#1"
class ModelSerializer(RdfSerializer):
field = RdfStringField(predicate="nws:field",
language="en")
class Meta:
rdf_subject = "id"
rdf_prefixes = {
"nws": "http://api.nickswebsite.net/ns/",
"nws-data": "http://api.nickswebsite.net/data#"
}
m = Model()
s = ModelSerializer(object=m)
g = s.build_graph()
self.assert_triple(g,
"http://api.nickswebsite.net/data#1",
"http://api.nickswebsite.net/ns/field",
"aef",
language="en")
def test_sub_objects_collapsed(self):
class SubModel(object):
def __init__(self):
self.field_one = "field one"
class Model(object):
def __init__(self):
self.id = "http://api.nickswebsite.net/data#1"
self.field = "Field"
self.sub_field = SubModel()
class SubModelSerializer(RdfSerializer):
field_one = RdfStringField(predicate="nws:sub-field")
class Meta:
rdf_prefixes = {"nws": "http://www.nickswebsite.net/ns/"}
class ModelSerializer(RdfSerializer):
sub_field = RdfObjectField(SubModelSerializer, collapse=True)
field = RdfStringField(predicate="nws:field")
class Meta:
rdf_prefixes = {"nws": "http://www.nickswebsite.net/ns/"}
rdf_subject = "id"
m = Model()
s = ModelSerializer(object=m)
g = s.build_graph()
self.assert_triple(g, m.id, "http://www.nickswebsite.net/ns/sub-field", "field one")
def test_sub_objects_with_blank_nodes(self):
class SubModel(object):
def __init__(self):
self.one = "Field One"
class Model(object):
def __init__(self):
self.sub_field = SubModel()
self.id = "http://api.nickswebsite.net/data#1"
class SubModelSerializer(RdfSerializer):
one = RdfStringField(predicate="nws:one")
class Meta:
rdf_prefixes = {"nws": "http://api.nickswebsite.net/ns/"}
class ModelSerializer(RdfSerializer):
sub_field = RdfObjectField(SubModelSerializer, "nws:sub-field", collapse=False)
class Meta:
rdf_prefixes = {"nws": "http://api.nickswebsite.net/ns/"}
rdf_subject = "id"
m = Model()
s = ModelSerializer(object=m)
s.validate()
g = s.build_graph()
result = get_triples(g, "http://api.nickswebsite.net/data#1", "http://api.nickswebsite.net/ns/sub-field", None)
result = list(result)
result_submodel_triples = get_triples(g, result[0][-1], None, None)
result_submodel_triples = list(result_submodel_triples)
self.assert_uri_equal("http://api.nickswebsite.net/ns/one", result_submodel_triples[0][1])
self.assert_literal_equal("Field One", result_submodel_triples[0][2])
def test_set_objects_collapsed(self):
class Model(object):
def __init__(self):
self.id = "http://api.nickswebsite.net/data#2"
self.fields = ["String One", "String Two"]
class ModelSerializer(RdfSerializer):
fields = RdfSetField(RdfStringField(), predicate="nws:field", collapse=True)
class Meta:
rdf_prefixes = {
"nws": "http://api.nickswebsite.net/ns/",
"nws-data": "http://api.nickswebsite.net/data#"
}
rdf_subject = "id"
m = Model()
s = ModelSerializer(object=m)
s.validate()
g = s.build_graph()
self.assert_triple(g, m.id, "http://api.nickswebsite.net/ns/field", "String One")
self.assert_triple(g, m.id, "http://api.nickswebsite.net/ns/field", "String Two")
def test_objects_as_ids(self):
class Model(object):
def __init__(self):
self.id = "http://api.nickswebsite.net/data#3"
self.link = "http://api.nickswebsite.net/data#3"
class ModelSerializer(RdfSerializer):
class Meta:
rdf_subject = "id"
link = RdfIriField(predicate="http://api.nickswebsite.net/ns/link")
model = Model()
s = ModelSerializer(object=model)
s.validate()
g = s.build_graph()
triples = list(get_triples(g, model.id, "http://api.nickswebsite.net/ns/link", None))
self.assertEqual(URIRef(model.link), triples[0][-1])
def test_objects_in_lists(self):
class SubModel(object):
def __init__(self, val):
self.val = val
class Model(object):
def __init__(self):
self.values = [SubModel("One"), SubModel("Two")]
self.id = "http://api.nickswebsite.net/data#8"
class SubModelSerializer(RdfSerializer):
val = RdfStringField(predicate="http://api.nickswebsite.net/ns/value")
class ModelSerializer(RdfSerializer):
values = RdfSetField(
RdfObjectField(SubModelSerializer),
predicate="http://api.nickswebsite.net/ns/sub-object"
)
class Meta:
rdf_subject = "id"
# @prefix nws: <http://api.nickswebsite.net/>
#
# nws:data#8 nws:ns/sub-object [ nws:ns/value "One" ],
# [ nws:ns/value "Two" ] .
#
m = Model()
s = ModelSerializer(object=m)
s.validate()
g = s.build_graph()
model_triples = get_triples(g, m.id, "http://api.nickswebsite.net/ns/sub-object", None)
self.assertEqual(2, len(model_triples))
submodel_triples_a = get_triples(g, model_triples[0][2], "http://api.nickswebsite.net/ns/value", None)
submodel_triples_b = get_triples(g, model_triples[1][2], "http://api.nickswebsite.net/ns/value", None)
self.assertEqual({"One", "Two"}, {submodel_triples_a[0][2].value, submodel_triples_b[0][2].value})
def test_non_required_none_fields(self):
class Model(object):
def __init__(self):
self.prop = "value"
self.none = None
self.id = "http://api.nickswebsite.net/data#9"
class ModelSerializer(RdfSerializer):
prop = RdfStringField(predicate="http://api.nickswebsite.net/ns/prop")
none = RdfIriField(predicate="http://api.nickswebsite.net/ns/none", required=False)
class Meta:
rdf_subject = "id"
m = Model()
s = ModelSerializer(object=m)
s.validate()
g = s.build_graph()
prop_triples = get_triples(g, m.id, "http://api.nickswebsite.net/ns/prop", None)
self.assertEqual(1, len(prop_triples))
self.assertEqual(m.prop, prop_triples[0][-1].toPython())
none_triples = get_triples(g, m.id, "http://api.nickswebsite.net/ns/none", None)
self.assertEqual(0, len(none_triples))
| |
#!/usr/bin/env python
from __future__ import with_statement
from __future__ import print_function
import os
import os.path
import glob
import subprocess
import traceback
import platform
from distutils.command.build import build as _build
from setuptools import setup, Extension, Command, find_packages
from setuptools.command.develop import develop as _develop
from setuptools.command.test import test as TestCommand
from setuptools.command.easy_install import is_64bit
from distutils.sysconfig import get_config_vars
DEBUG = False
V8_SNAPSHOT_ENABLED = not DEBUG # build using snapshots for faster start-up
V8_NATIVE_REGEXP = True # Whether to use native or interpreted regexp implementation
V8_OBJECT_PRINT = DEBUG # enable object printing
V8_EXTRA_CHECKS = DEBUG # enable extra checks
V8_VERIFY_HEAP = DEBUG # enable verify heap
V8_GDB_JIT = False # enable GDB jit
V8_VTUNE_JIT = False
V8_DISASSEMBLEER = DEBUG # enable the disassembler to inspect generated code
V8_DEBUGGER_SUPPORT = True # enable debugging of JavaScript code
V8_LIVE_OBJECT_LIST = DEBUG # enable live object list features in the debugger
V8_WERROR = False # ignore compile warnings
V8_STRICTALIASING = True # enable strict aliasing
V8_BACKTRACE = True
V8_I18N = False
IS_64BIT = is_64bit()
IS_ARM = 'arm' in platform.processor()
ARCH = 'x64' if IS_64BIT else 'arm' if IS_ARM else 'ia32'
MODE = 'debug' if DEBUG else 'release'
LIBV8_PATH = "libv8"
LIBV8_SVN_URL = "http://v8.googlecode.com/svn/trunk/"
LIBV8_SVN_REV = 19632
# fixes distutils bug that causes warning when compiling
# c++ with gcc and the -Wstrict-prototypes flag
(opt,) = get_config_vars('OPT')
os.environ['OPT'] = " ".join(
flag for flag in opt.split() if flag != '-Wstrict-prototypes'
)
## macros
macros = [("BOOST_PYTHON_STATIC_LIB", None)]
if DEBUG:
macros += [("V8_ENABLE_CHECKS", None)]
if V8_NATIVE_REGEXP:
macros += [("V8_NATIVE_REGEXP", None)]
else:
macros += [("V8_INTERPRETED_REGEXP", None)]
if V8_DISASSEMBLEER:
macros += [("ENABLE_DISASSEMBLER", None)]
if V8_LIVE_OBJECT_LIST:
V8_OBJECT_PRINT = True
V8_DEBUGGER_SUPPORT = True
macros += [("LIVE_OBJECT_LIST", None)]
if V8_OBJECT_PRINT:
macros += [("OBJECT_PRINT", None)]
if V8_DEBUGGER_SUPPORT:
macros += [("ENABLE_DEBUGGER_SUPPORT", None)]
if IS_64BIT:
macros += [("V8_TARGET_ARCH_X64", None)]
elif IS_ARM:
macros += [("V8_TARGET_ARCH_ARM", None)]
else:
macros += [("V8_TARGET_ARCH_IA32", None)]
## libs
libraries = [
'v8_base.' + ARCH,
'v8_snapshot' if V8_SNAPSHOT_ENABLED else ('v8_nosnapshot.' + ARCH),
'rt'
]
boost_libs = ['boost_python', 'boost_thread', 'boost_system']
if DEBUG:
boost_libs = [lib + '-d' for lib in boost_libs]
libraries += boost_libs
library_dirs = [
"/usr/local/lib",
"%s/out/%s.%s/obj.target/tools/gyp/" % (LIBV8_PATH, ARCH, MODE)
]
native_path = "%s/out/native/obj.target/tools/gyp/" % LIBV8_PATH
if os.path.isdir(native_path):
library_dirs.append(native_path)
## include
include_dirs = [
os.path.join(LIBV8_PATH, 'include'),
LIBV8_PATH,
os.path.join(LIBV8_PATH, 'src'),
]
include_dirs += ['/usr/local/include']
## extras
extra_compile_args = []
extra_link_args = []
extra_objects = []
extra_compile_args += ["-Wno-write-strings"]
if IS_64BIT:
extra_link_args += ["-fPIC"]
extra_link_args += ["-lrt"] # make ubuntu happy
if DEBUG:
extra_compile_args += ['-g', '-O0', '-fno-inline']
else:
extra_compile_args += ['-g', '-O3']
if V8_I18N:
icu_path = "%s/out/%s.%s/obj.target/third_party/icu/" % (LIBV8_PATH, ARCH, MODE)
extra_objects += ["%slib%s.a" % (icu_path, name) for name in ['icui18n', 'icuuc', 'icudata']]
def ensure_libv8():
if os.path.isdir(LIBV8_PATH):
return
args = (LIBV8_SVN_URL, LIBV8_SVN_REV, LIBV8_PATH)
exec_cmd("svn export {}@{} {}".format(*args), "fetching libv8")
exec_cmd("make dependencies", "fetching libv8 dependencies", cwd=LIBV8_PATH)
def exec_cmd(cmdline_or_args, msg, shell=True, cwd=None, env=None, output=False):
print("-" * 20)
print("INFO: %s ..." % msg)
print("DEBUG: > %s" % cmdline_or_args)
if cwd:
proc = subprocess.Popen(cmdline_or_args, shell=shell, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
proc = subprocess.Popen(cmdline_or_args, shell=shell, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
succeeded = proc.returncode == 0
if not succeeded:
print("ERROR: %s failed: code=%d" % (msg or "Execute command", proc.returncode))
print("DEBUG: %s" % err)
return succeeded, out, err if output else succeeded
def build_libv8():
print("=" * 20)
print("INFO: Patching the GYP scripts")
# Next up, we have to patch the SConstruct file from the v8 source to remove -no-rtti and -no-exceptions
gypi = os.path.join(LIBV8_PATH, "build/standalone.gypi")
# Check if we need to patch by searching for rtti flag in the data
with open(gypi, 'r') as f:
build_script = f.read()
fixed_build_script = build_script.replace('-fno-rtti', '') \
.replace('-fno-exceptions', '') \
.replace('-Werror', '') \
.replace("'RuntimeTypeInfo': 'false',", "'RuntimeTypeInfo': 'true',") \
.replace("'ExceptionHandling': '0',", "'ExceptionHandling': '1',") \
.replace("'GCC_ENABLE_CPP_EXCEPTIONS': 'NO'", "'GCC_ENABLE_CPP_EXCEPTIONS': 'YES'") \
.replace("'GCC_ENABLE_CPP_RTTI': 'NO'", "'GCC_ENABLE_CPP_RTTI': 'YES'")
if build_script == fixed_build_script:
print("INFO: skip to patch the Google v8 build/standalone.gypi file ")
else:
print("INFO: patch the Google v8 build/standalone.gypi file to enable RTTI and C++ Exceptions")
if os.path.exists(gypi + '.bak'):
os.remove(gypi + '.bak')
os.rename(gypi, gypi + '.bak')
with open(gypi, 'w') as f:
f.write(fixed_build_script)
options = {
'disassembler': 'on' if V8_DISASSEMBLEER else 'off',
'objectprint': 'on' if V8_OBJECT_PRINT else 'off',
'verifyheap': 'on' if V8_VERIFY_HEAP else 'off',
'snapshot': 'on' if V8_SNAPSHOT_ENABLED else 'off',
'extrachecks': 'on' if V8_EXTRA_CHECKS else 'off',
'gdbjit': 'on' if V8_GDB_JIT else 'off',
'vtunejit': 'on' if V8_VTUNE_JIT else 'off',
'liveobjectlist': 'on' if V8_LIVE_OBJECT_LIST else 'off',
'debuggersupport': 'on' if V8_DEBUGGER_SUPPORT else 'off',
'regexp': 'native' if V8_NATIVE_REGEXP else 'interpreted',
'strictaliasing': 'on' if V8_STRICTALIASING else 'off',
'werror': 'yes' if V8_WERROR else 'no',
'backtrace': 'on' if V8_BACKTRACE else 'off',
'i18nsupport': 'on' if V8_I18N else 'off',
'visibility': 'on',
'library': 'shared',
}
print("=" * 20)
print("INFO: building Google v8 with GYP for %s platform with %s mode" % (ARCH, MODE))
options = ' '.join(["%s=%s" % (k, v) for k, v in options.items()])
cmdline = "make -j 8 %s %s.%s" % (options, ARCH, MODE)
exec_cmd(cmdline, "build v8 from SVN", cwd=LIBV8_PATH)
def generate_probes():
build_path = "build"
if not os.path.exists(build_path):
print("INFO: automatic make the build folder: %s" % build_path)
try:
os.makedirs(build_path, 0755)
except os.error as ex:
print("WARN: fail to create the build folder, %s" % ex)
probes_d = "src/probes.d"
probes_h = "src/probes.h"
probes_o = os.path.join(build_path, "probes.o")
if (exec_cmd("dtrace -h -C -s %s -o %s" % (probes_d, probes_h), "generate DTrace probes.h") and \
exec_cmd("dtrace -G -C -s %s -o %s" % (probes_d, probes_o), "generate DTrace probes.o")):
extra_objects.append(probes_o)
else:
print("INFO: dtrace or systemtap doesn't works, force to disable probes")
config_file = "src/Config.h"
with open(config_file, "r") as f:
config_settings= f.read()
modified_config_settings = config_settings.replace("\n#define SUPPORT_PROBES 1", "\n//#define SUPPORT_PROBES 1")
if modified_config_settings != config_settings:
if os.path.exists(config_file + '.bak'):
os.remove(config_file + '.bak')
os.rename(config_file, config_file + '.bak')
with open(config_file, 'w') as f:
f.write(modified_config_settings)
def prepare_v8():
try:
ensure_libv8()
build_libv8()
generate_probes()
except Exception as e:
print("ERROR: fail to checkout and build v8, %s" % e)
traceback.print_exc()
class build(_build):
def run(self):
prepare_v8()
_build.run(self)
class develop(_develop):
def run(self):
prepare_v8()
_develop.run(self)
python_v8 = Extension(name="_v8",
sources=glob.glob("src/*.cpp"),
define_macros=macros,
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries,
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args,
extra_objects=extra_objects,
)
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
raise SystemExit(errno)
setup(
name="v8",
version="0.1.5",
description="Python Wrapper for Google V8 Engine",
author="Lex Berezhny",
author_email="lex@damoti.com",
url="http://github.com/damoti/python-v8",
license="Apache Software License",
platforms=["linux", "osx", "cygwin", "win32"],
packages=find_packages(),
include_package_data=True,
tests_require=['pytest'],
ext_modules=[python_v8],
cmdclass = {
"build": build,
"v8build": _build,
"develop": develop
},
classifiers = [
"Programming Language :: C++",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Development Status :: 4 - Beta",
"Environment :: Plugins",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities",
],
keywords = "js javascript v8"
)
| |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# import sys
import os
from loguru import logger
import sys
import os.path
import numpy as np
from io import open
from .image import DataPlus
path_to_script = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(path_to_script, "./extern/sPickle"))
from .dili_subset import ndarray_to_list_in_structure
# from imma.image import resize_to_mm, resize_to_shape
def old_str_format_to_new(string):
"""
convert old format style to new style. Works for digits only
%05d is converted to {:05d}
:param string:
:return:
"""
import re
return re.sub(r"%(\d*d)", r"{:\1}", string)
def suggest_filename(file_path, exists=None):
"""
Try if exist path and append number to its end.
For debug you can set as input if file exists or not.
"""
import os.path
import re
if not isinstance(exists, bool):
exists = os.path.exists(file_path)
if exists:
file_path, file_extension = os.path.splitext(file_path)
# print(file_path)
m = re.search(r"_\d+$", file_path)
if m is None:
# cislo = 2
new_cislo_str = "_2"
else:
cislostr = m.group()
cislo = int(cislostr[1:]) + 1
# it is normal number
file_path = file_path[: -len(cislostr)]
new_cislo_str = "_" + str(cislo)
file_path = file_path + new_cislo_str + file_extension # .zfill(2)
# trorcha rekurze
file_path = suggest_filename(file_path)
return file_path
def obj_from_file(filename="annotation.yaml", filetype="auto", yaml_typ="unsafe"):
""" Read object from file """
if filetype == "auto":
_, ext = os.path.splitext(filename)
filetype = ext[1:]
if filetype in ("yaml", "yml"):
from ruamel.yaml import YAML
# yaml = YAML(typ="unsafe")
yaml = YAML(typ=yaml_typ)
with open(filename, encoding="utf-8") as f:
obj = yaml.load(f)
if obj is None:
obj = {}
# import yaml
# with open(filename, encoding="utf-8") as f:
# intext = f.read()
# obj = yaml.load(intext)
elif filetype in ("pickle", "pkl", "pklz", "picklezip"):
fcontent = read_pkl_and_pklz(filename)
# import pickle
if sys.version_info[0] < 3:
import cPickle as pickle
else:
import _pickle as pickle
# import sPickle as pickle
if sys.version_info.major == 2:
obj = pickle.loads(fcontent)
else:
obj = pickle.loads(fcontent, encoding="latin1")
else:
logger.error("Unknown filetype " + filetype)
return obj
def read_pkl_and_pklz(filename):
"""
Try read zipped or not zipped pickle file
"""
fcontent = None
try:
import gzip
f = gzip.open(filename, "rb")
fcontent = f.read()
f.close()
except IOError as e:
# if the problem is in not gzip file
logger.info("Input gzip exception: " + str(e))
f = open(filename, "rb")
fcontent = f.read()
f.close()
except Exception as e:
# other problem
import traceback
logger.error("Input gzip exception: " + str(e))
logger.error(traceback.format_exc())
return fcontent
def obj_to_file(
obj,
filename,
filetype="auto",
ndarray_to_list=False,
squeeze=True,
yaml_typ="unsafe",
):
"""Writes annotation in file.
:param filetype:
auto
yaml
pkl, pickle
pklz, picklezip
:param ndarray_to_list: convert ndarrays in obj to lists
:param squeeze: squeeze ndarray
"""
# import json
# with open(filename, mode='w') as f:
# json.dump(annotation,f)
if type(obj) == DataPlus:
obj = dict(obj)
if ndarray_to_list:
obj = ndarray_to_list_in_structure(obj, squeeze=squeeze)
# write to yaml
d = os.path.dirname(os.path.abspath(filename))
if not os.path.exists(d):
os.makedirs(d)
if filetype == "auto":
_, ext = os.path.splitext(filename)
filetype = ext[1:]
if filetype in ("yaml", "yml"):
# import yaml
from ruamel.yaml import YAML
# yaml = YAML(typ="unsafe")
yaml = YAML(typ=yaml_typ)
with open(filename, "wt", encoding="utf-8") as f:
yaml.dump(obj, f)
# if sys.version_info.major == 2:
# with open(filename, 'wb') as f:
# yaml.dump(obj, f, encoding="utf-8")
# else:
# with open(filename, "w", encoding="utf-8") as f:
# yaml.dump(obj, f)
elif filetype in ("pickle", "pkl"):
f = open(filename, "wb")
logger.info("filename " + filename)
# if sys.version_info[0] < 3: import cPickle as pickle
# else: import _pickle as pickle
import pickle
pickle.dump(obj, f, -1)
f.close
elif filetype in ("streamingpicklezip", "spklz"):
# this is not working :-(
import gzip
import sPickle as pickle
f = gzip.open(filename, "wb", compresslevel=1)
# f = open(filename, 'wb')
pickle.s_dump(obj, f)
f.close
elif filetype in ("picklezip", "pklz"):
import gzip
if sys.version_info[0] < 3:
import cPickle as pickle
else:
import _pickle as pickle
f = gzip.open(filename, "wb", compresslevel=1)
# f = open(filename, 'wb')
pickle.dump(obj, f)
f.close
elif filetype in ("mat"):
import scipy.io as sio
sio.savemat(filename, obj)
else:
logger.error("Unknown filetype " + filetype)
from imma.image import resize_to_shape, resize_to_shape
# def resize_to_mm(data3d, voxelsize_mm, new_voxelsize_mm, mode="nearest", order=1):
# """
# Function can resize data3d or segmentation to specifed voxelsize_mm
# :new_voxelsize_mm: requested voxelsize. List of 3 numbers, also
# can be a string 'orig', 'orgi*2' and 'orgi*4'.
#
# :voxelsize_mm: size of voxel
# :mode: default is 'nearest'
# """
# import scipy
# import scipy.ndimage
#
# if np.all(list(new_voxelsize_mm) == "orig"):
# new_voxelsize_mm = np.array(voxelsize_mm)
# elif np.all(list(new_voxelsize_mm) == "orig*2"):
# new_voxelsize_mm = np.array(voxelsize_mm) * 2
# elif np.all(list(new_voxelsize_mm) == "orig*4"):
# new_voxelsize_mm = np.array(voxelsize_mm) * 4
# # vx_size = np.array(metadata['voxelsize_mm']) * 4
#
# zoom = voxelsize_mm / (1.0 * np.array(new_voxelsize_mm))
# data3d_res = scipy.ndimage.zoom(data3d, zoom, mode=mode, order=order).astype(
# data3d.dtype
# )
# return data3d_res
def suits_with_dtype(mn, mx, dtype):
"""
Check whether range of values can be stored into defined data type.
:param mn: range minimum
:param mx: range maximum
:param dtype:
:return:
"""
type_info = np.iinfo(dtype)
if mx <= type_info.max and mn >= type_info.min:
return True
else:
return False
def use_economic_dtype(data3d, slope=1, inter=0, dtype=None):
""" Use more economic integer-like dtype if it is possible.
:param data3d:
:param dtype: if dtype is not used, the automatic is used
:return:
"""
if dtype is None:
dtype = data3d.dtype
if issubclass(dtype.type, np.integer):
mn = data3d.min() * slope + inter
mx = data3d.max() * slope + inter
if suits_with_dtype(mn, mx, dtype=np.uint8):
dtype = np.uint8
elif suits_with_dtype(mn, mx, dtype=np.int8):
dtype = np.int8
elif suits_with_dtype(mn, mx, dtype=np.uint16):
dtype = np.uint16
elif suits_with_dtype(mn, mx, dtype=np.int16):
dtype = np.int16
elif suits_with_dtype(mn, mx, dtype=np.uint32):
dtype = np.uint32
elif suits_with_dtype(mn, mx, dtype=np.int32):
dtype = np.int32
# new_data3d = ((np.float(slope) * data3d) + np.float(inter)).astype(dtype)
if slope == 1 and inter == 0:
# this can prevent out of memmory
new_data3d = data3d.astype(dtype)
else:
new_data3d = ((slope * data3d) + inter).astype(dtype)
return new_data3d
| |
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Sarielsaz Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Functionality to build scripts, as well as SignatureHash().
This file is modified from python-sarielsazlib.
"""
from .mininode import CTransaction, CTxOut, sha256, hash256, uint256_from_str, ser_uint256, ser_string
from binascii import hexlify
import hashlib
import sys
bchr = chr
bord = ord
if sys.version > '3':
long = int
bchr = lambda x: bytes([x])
bord = lambda x: x
import struct
from .bignum import bn2vch
MAX_SCRIPT_ELEMENT_SIZE = 520
OPCODE_NAMES = {}
def hash160(s):
return hashlib.new('ripemd160', sha256(s)).digest()
_opcode_instances = []
class CScriptOp(int):
"""A single script opcode"""
__slots__ = []
@staticmethod
def encode_op_pushdata(d):
"""Encode a PUSHDATA op, returning bytes"""
if len(d) < 0x4c:
return b'' + bchr(len(d)) + d # OP_PUSHDATA
elif len(d) <= 0xff:
return b'\x4c' + bchr(len(d)) + d # OP_PUSHDATA1
elif len(d) <= 0xffff:
return b'\x4d' + struct.pack(b'<H', len(d)) + d # OP_PUSHDATA2
elif len(d) <= 0xffffffff:
return b'\x4e' + struct.pack(b'<I', len(d)) + d # OP_PUSHDATA4
else:
raise ValueError("Data too long to encode in a PUSHDATA op")
@staticmethod
def encode_op_n(n):
"""Encode a small integer op, returning an opcode"""
if not (0 <= n <= 16):
raise ValueError('Integer must be in range 0 <= n <= 16, got %d' % n)
if n == 0:
return OP_0
else:
return CScriptOp(OP_1 + n-1)
def decode_op_n(self):
"""Decode a small integer opcode, returning an integer"""
if self == OP_0:
return 0
if not (self == OP_0 or OP_1 <= self <= OP_16):
raise ValueError('op %r is not an OP_N' % self)
return int(self - OP_1+1)
def is_small_int(self):
"""Return true if the op pushes a small integer to the stack"""
if 0x51 <= self <= 0x60 or self == 0:
return True
else:
return False
def __str__(self):
return repr(self)
def __repr__(self):
if self in OPCODE_NAMES:
return OPCODE_NAMES[self]
else:
return 'CScriptOp(0x%x)' % self
def __new__(cls, n):
try:
return _opcode_instances[n]
except IndexError:
assert len(_opcode_instances) == n
_opcode_instances.append(super(CScriptOp, cls).__new__(cls, n))
return _opcode_instances[n]
# Populate opcode instance table
for n in range(0xff+1):
CScriptOp(n)
# push value
OP_0 = CScriptOp(0x00)
OP_FALSE = OP_0
OP_PUSHDATA1 = CScriptOp(0x4c)
OP_PUSHDATA2 = CScriptOp(0x4d)
OP_PUSHDATA4 = CScriptOp(0x4e)
OP_1NEGATE = CScriptOp(0x4f)
OP_RESERVED = CScriptOp(0x50)
OP_1 = CScriptOp(0x51)
OP_TRUE=OP_1
OP_2 = CScriptOp(0x52)
OP_3 = CScriptOp(0x53)
OP_4 = CScriptOp(0x54)
OP_5 = CScriptOp(0x55)
OP_6 = CScriptOp(0x56)
OP_7 = CScriptOp(0x57)
OP_8 = CScriptOp(0x58)
OP_9 = CScriptOp(0x59)
OP_10 = CScriptOp(0x5a)
OP_11 = CScriptOp(0x5b)
OP_12 = CScriptOp(0x5c)
OP_13 = CScriptOp(0x5d)
OP_14 = CScriptOp(0x5e)
OP_15 = CScriptOp(0x5f)
OP_16 = CScriptOp(0x60)
# control
OP_NOP = CScriptOp(0x61)
OP_VER = CScriptOp(0x62)
OP_IF = CScriptOp(0x63)
OP_NOTIF = CScriptOp(0x64)
OP_VERIF = CScriptOp(0x65)
OP_VERNOTIF = CScriptOp(0x66)
OP_ELSE = CScriptOp(0x67)
OP_ENDIF = CScriptOp(0x68)
OP_VERIFY = CScriptOp(0x69)
OP_RETURN = CScriptOp(0x6a)
# stack ops
OP_TOALTSTACK = CScriptOp(0x6b)
OP_FROMALTSTACK = CScriptOp(0x6c)
OP_2DROP = CScriptOp(0x6d)
OP_2DUP = CScriptOp(0x6e)
OP_3DUP = CScriptOp(0x6f)
OP_2OVER = CScriptOp(0x70)
OP_2ROT = CScriptOp(0x71)
OP_2SWAP = CScriptOp(0x72)
OP_IFDUP = CScriptOp(0x73)
OP_DEPTH = CScriptOp(0x74)
OP_DROP = CScriptOp(0x75)
OP_DUP = CScriptOp(0x76)
OP_NIP = CScriptOp(0x77)
OP_OVER = CScriptOp(0x78)
OP_PICK = CScriptOp(0x79)
OP_ROLL = CScriptOp(0x7a)
OP_ROT = CScriptOp(0x7b)
OP_SWAP = CScriptOp(0x7c)
OP_TUCK = CScriptOp(0x7d)
# splice ops
OP_CAT = CScriptOp(0x7e)
OP_SUBSTR = CScriptOp(0x7f)
OP_LEFT = CScriptOp(0x80)
OP_RIGHT = CScriptOp(0x81)
OP_SIZE = CScriptOp(0x82)
# bit logic
OP_INVERT = CScriptOp(0x83)
OP_AND = CScriptOp(0x84)
OP_OR = CScriptOp(0x85)
OP_XOR = CScriptOp(0x86)
OP_EQUAL = CScriptOp(0x87)
OP_EQUALVERIFY = CScriptOp(0x88)
OP_RESERVED1 = CScriptOp(0x89)
OP_RESERVED2 = CScriptOp(0x8a)
# numeric
OP_1ADD = CScriptOp(0x8b)
OP_1SUB = CScriptOp(0x8c)
OP_2MUL = CScriptOp(0x8d)
OP_2DIV = CScriptOp(0x8e)
OP_NEGATE = CScriptOp(0x8f)
OP_ABS = CScriptOp(0x90)
OP_NOT = CScriptOp(0x91)
OP_0NOTEQUAL = CScriptOp(0x92)
OP_ADD = CScriptOp(0x93)
OP_SUB = CScriptOp(0x94)
OP_MUL = CScriptOp(0x95)
OP_DIV = CScriptOp(0x96)
OP_MOD = CScriptOp(0x97)
OP_LSHIFT = CScriptOp(0x98)
OP_RSHIFT = CScriptOp(0x99)
OP_BOOLAND = CScriptOp(0x9a)
OP_BOOLOR = CScriptOp(0x9b)
OP_NUMEQUAL = CScriptOp(0x9c)
OP_NUMEQUALVERIFY = CScriptOp(0x9d)
OP_NUMNOTEQUAL = CScriptOp(0x9e)
OP_LESSTHAN = CScriptOp(0x9f)
OP_GREATERTHAN = CScriptOp(0xa0)
OP_LESSTHANOREQUAL = CScriptOp(0xa1)
OP_GREATERTHANOREQUAL = CScriptOp(0xa2)
OP_MIN = CScriptOp(0xa3)
OP_MAX = CScriptOp(0xa4)
OP_WITHIN = CScriptOp(0xa5)
# crypto
OP_RIPEMD160 = CScriptOp(0xa6)
OP_SHA1 = CScriptOp(0xa7)
OP_SHA256 = CScriptOp(0xa8)
OP_HASH160 = CScriptOp(0xa9)
OP_HASH256 = CScriptOp(0xaa)
OP_CODESEPARATOR = CScriptOp(0xab)
OP_CHECKSIG = CScriptOp(0xac)
OP_CHECKSIGVERIFY = CScriptOp(0xad)
OP_CHECKMULTISIG = CScriptOp(0xae)
OP_CHECKMULTISIGVERIFY = CScriptOp(0xaf)
# expansion
OP_NOP1 = CScriptOp(0xb0)
OP_CHECKLOCKTIMEVERIFY = CScriptOp(0xb1)
OP_CHECKSEQUENCEVERIFY = CScriptOp(0xb2)
OP_NOP4 = CScriptOp(0xb3)
OP_NOP5 = CScriptOp(0xb4)
OP_NOP6 = CScriptOp(0xb5)
OP_NOP7 = CScriptOp(0xb6)
OP_NOP8 = CScriptOp(0xb7)
OP_NOP9 = CScriptOp(0xb8)
OP_NOP10 = CScriptOp(0xb9)
# template matching params
OP_SMALLINTEGER = CScriptOp(0xfa)
OP_PUBKEYS = CScriptOp(0xfb)
OP_PUBKEYHASH = CScriptOp(0xfd)
OP_PUBKEY = CScriptOp(0xfe)
OP_INVALIDOPCODE = CScriptOp(0xff)
OPCODE_NAMES.update({
OP_0 : 'OP_0',
OP_PUSHDATA1 : 'OP_PUSHDATA1',
OP_PUSHDATA2 : 'OP_PUSHDATA2',
OP_PUSHDATA4 : 'OP_PUSHDATA4',
OP_1NEGATE : 'OP_1NEGATE',
OP_RESERVED : 'OP_RESERVED',
OP_1 : 'OP_1',
OP_2 : 'OP_2',
OP_3 : 'OP_3',
OP_4 : 'OP_4',
OP_5 : 'OP_5',
OP_6 : 'OP_6',
OP_7 : 'OP_7',
OP_8 : 'OP_8',
OP_9 : 'OP_9',
OP_10 : 'OP_10',
OP_11 : 'OP_11',
OP_12 : 'OP_12',
OP_13 : 'OP_13',
OP_14 : 'OP_14',
OP_15 : 'OP_15',
OP_16 : 'OP_16',
OP_NOP : 'OP_NOP',
OP_VER : 'OP_VER',
OP_IF : 'OP_IF',
OP_NOTIF : 'OP_NOTIF',
OP_VERIF : 'OP_VERIF',
OP_VERNOTIF : 'OP_VERNOTIF',
OP_ELSE : 'OP_ELSE',
OP_ENDIF : 'OP_ENDIF',
OP_VERIFY : 'OP_VERIFY',
OP_RETURN : 'OP_RETURN',
OP_TOALTSTACK : 'OP_TOALTSTACK',
OP_FROMALTSTACK : 'OP_FROMALTSTACK',
OP_2DROP : 'OP_2DROP',
OP_2DUP : 'OP_2DUP',
OP_3DUP : 'OP_3DUP',
OP_2OVER : 'OP_2OVER',
OP_2ROT : 'OP_2ROT',
OP_2SWAP : 'OP_2SWAP',
OP_IFDUP : 'OP_IFDUP',
OP_DEPTH : 'OP_DEPTH',
OP_DROP : 'OP_DROP',
OP_DUP : 'OP_DUP',
OP_NIP : 'OP_NIP',
OP_OVER : 'OP_OVER',
OP_PICK : 'OP_PICK',
OP_ROLL : 'OP_ROLL',
OP_ROT : 'OP_ROT',
OP_SWAP : 'OP_SWAP',
OP_TUCK : 'OP_TUCK',
OP_CAT : 'OP_CAT',
OP_SUBSTR : 'OP_SUBSTR',
OP_LEFT : 'OP_LEFT',
OP_RIGHT : 'OP_RIGHT',
OP_SIZE : 'OP_SIZE',
OP_INVERT : 'OP_INVERT',
OP_AND : 'OP_AND',
OP_OR : 'OP_OR',
OP_XOR : 'OP_XOR',
OP_EQUAL : 'OP_EQUAL',
OP_EQUALVERIFY : 'OP_EQUALVERIFY',
OP_RESERVED1 : 'OP_RESERVED1',
OP_RESERVED2 : 'OP_RESERVED2',
OP_1ADD : 'OP_1ADD',
OP_1SUB : 'OP_1SUB',
OP_2MUL : 'OP_2MUL',
OP_2DIV : 'OP_2DIV',
OP_NEGATE : 'OP_NEGATE',
OP_ABS : 'OP_ABS',
OP_NOT : 'OP_NOT',
OP_0NOTEQUAL : 'OP_0NOTEQUAL',
OP_ADD : 'OP_ADD',
OP_SUB : 'OP_SUB',
OP_MUL : 'OP_MUL',
OP_DIV : 'OP_DIV',
OP_MOD : 'OP_MOD',
OP_LSHIFT : 'OP_LSHIFT',
OP_RSHIFT : 'OP_RSHIFT',
OP_BOOLAND : 'OP_BOOLAND',
OP_BOOLOR : 'OP_BOOLOR',
OP_NUMEQUAL : 'OP_NUMEQUAL',
OP_NUMEQUALVERIFY : 'OP_NUMEQUALVERIFY',
OP_NUMNOTEQUAL : 'OP_NUMNOTEQUAL',
OP_LESSTHAN : 'OP_LESSTHAN',
OP_GREATERTHAN : 'OP_GREATERTHAN',
OP_LESSTHANOREQUAL : 'OP_LESSTHANOREQUAL',
OP_GREATERTHANOREQUAL : 'OP_GREATERTHANOREQUAL',
OP_MIN : 'OP_MIN',
OP_MAX : 'OP_MAX',
OP_WITHIN : 'OP_WITHIN',
OP_RIPEMD160 : 'OP_RIPEMD160',
OP_SHA1 : 'OP_SHA1',
OP_SHA256 : 'OP_SHA256',
OP_HASH160 : 'OP_HASH160',
OP_HASH256 : 'OP_HASH256',
OP_CODESEPARATOR : 'OP_CODESEPARATOR',
OP_CHECKSIG : 'OP_CHECKSIG',
OP_CHECKSIGVERIFY : 'OP_CHECKSIGVERIFY',
OP_CHECKMULTISIG : 'OP_CHECKMULTISIG',
OP_CHECKMULTISIGVERIFY : 'OP_CHECKMULTISIGVERIFY',
OP_NOP1 : 'OP_NOP1',
OP_CHECKLOCKTIMEVERIFY : 'OP_CHECKLOCKTIMEVERIFY',
OP_CHECKSEQUENCEVERIFY : 'OP_CHECKSEQUENCEVERIFY',
OP_NOP4 : 'OP_NOP4',
OP_NOP5 : 'OP_NOP5',
OP_NOP6 : 'OP_NOP6',
OP_NOP7 : 'OP_NOP7',
OP_NOP8 : 'OP_NOP8',
OP_NOP9 : 'OP_NOP9',
OP_NOP10 : 'OP_NOP10',
OP_SMALLINTEGER : 'OP_SMALLINTEGER',
OP_PUBKEYS : 'OP_PUBKEYS',
OP_PUBKEYHASH : 'OP_PUBKEYHASH',
OP_PUBKEY : 'OP_PUBKEY',
OP_INVALIDOPCODE : 'OP_INVALIDOPCODE',
})
class CScriptInvalidError(Exception):
"""Base class for CScript exceptions"""
pass
class CScriptTruncatedPushDataError(CScriptInvalidError):
"""Invalid pushdata due to truncation"""
def __init__(self, msg, data):
self.data = data
super(CScriptTruncatedPushDataError, self).__init__(msg)
# This is used, eg, for blockchain heights in coinbase scripts (bip34)
class CScriptNum():
def __init__(self, d=0):
self.value = d
@staticmethod
def encode(obj):
r = bytearray(0)
if obj.value == 0:
return bytes(r)
neg = obj.value < 0
absvalue = -obj.value if neg else obj.value
while (absvalue):
r.append(absvalue & 0xff)
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return bytes(bchr(len(r)) + r)
class CScript(bytes):
"""Serialized script
A bytes subclass, so you can use this directly whenever bytes are accepted.
Note that this means that indexing does *not* work - you'll get an index by
byte rather than opcode. This format was chosen for efficiency so that the
general case would not require creating a lot of little CScriptOP objects.
iter(script) however does iterate by opcode.
"""
@classmethod
def __coerce_instance(cls, other):
# Coerce other into bytes
if isinstance(other, CScriptOp):
other = bchr(other)
elif isinstance(other, CScriptNum):
if (other.value == 0):
other = bchr(CScriptOp(OP_0))
else:
other = CScriptNum.encode(other)
elif isinstance(other, int):
if 0 <= other <= 16:
other = bytes(bchr(CScriptOp.encode_op_n(other)))
elif other == -1:
other = bytes(bchr(OP_1NEGATE))
else:
other = CScriptOp.encode_op_pushdata(bn2vch(other))
elif isinstance(other, (bytes, bytearray)):
other = CScriptOp.encode_op_pushdata(other)
return other
def __add__(self, other):
# Do the coercion outside of the try block so that errors in it are
# noticed.
other = self.__coerce_instance(other)
try:
# bytes.__add__ always returns bytes instances unfortunately
return CScript(super(CScript, self).__add__(other))
except TypeError:
raise TypeError('Can not add a %r instance to a CScript' % other.__class__)
def join(self, iterable):
# join makes no sense for a CScript()
raise NotImplementedError
def __new__(cls, value=b''):
if isinstance(value, bytes) or isinstance(value, bytearray):
return super(CScript, cls).__new__(cls, value)
else:
def coerce_iterable(iterable):
for instance in iterable:
yield cls.__coerce_instance(instance)
# Annoyingly on both python2 and python3 bytes.join() always
# returns a bytes instance even when subclassed.
return super(CScript, cls).__new__(cls, b''.join(coerce_iterable(value)))
def raw_iter(self):
"""Raw iteration
Yields tuples of (opcode, data, sop_idx) so that the different possible
PUSHDATA encodings can be accurately distinguished, as well as
determining the exact opcode byte indexes. (sop_idx)
"""
i = 0
while i < len(self):
sop_idx = i
opcode = bord(self[i])
i += 1
if opcode > OP_PUSHDATA4:
yield (opcode, None, sop_idx)
else:
datasize = None
pushdata_type = None
if opcode < OP_PUSHDATA1:
pushdata_type = 'PUSHDATA(%d)' % opcode
datasize = opcode
elif opcode == OP_PUSHDATA1:
pushdata_type = 'PUSHDATA1'
if i >= len(self):
raise CScriptInvalidError('PUSHDATA1: missing data length')
datasize = bord(self[i])
i += 1
elif opcode == OP_PUSHDATA2:
pushdata_type = 'PUSHDATA2'
if i + 1 >= len(self):
raise CScriptInvalidError('PUSHDATA2: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8)
i += 2
elif opcode == OP_PUSHDATA4:
pushdata_type = 'PUSHDATA4'
if i + 3 >= len(self):
raise CScriptInvalidError('PUSHDATA4: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8) + (bord(self[i+2]) << 16) + (bord(self[i+3]) << 24)
i += 4
else:
assert False # shouldn't happen
data = bytes(self[i:i+datasize])
# Check for truncation
if len(data) < datasize:
raise CScriptTruncatedPushDataError('%s: truncated data' % pushdata_type, data)
i += datasize
yield (opcode, data, sop_idx)
def __iter__(self):
"""'Cooked' iteration
Returns either a CScriptOP instance, an integer, or bytes, as
appropriate.
See raw_iter() if you need to distinguish the different possible
PUSHDATA encodings.
"""
for (opcode, data, sop_idx) in self.raw_iter():
if data is not None:
yield data
else:
opcode = CScriptOp(opcode)
if opcode.is_small_int():
yield opcode.decode_op_n()
else:
yield CScriptOp(opcode)
def __repr__(self):
# For Python3 compatibility add b before strings so testcases don't
# need to change
def _repr(o):
if isinstance(o, bytes):
return b"x('%s')" % hexlify(o).decode('ascii')
else:
return repr(o)
ops = []
i = iter(self)
while True:
op = None
try:
op = _repr(next(i))
except CScriptTruncatedPushDataError as err:
op = '%s...<ERROR: %s>' % (_repr(err.data), err)
break
except CScriptInvalidError as err:
op = '<ERROR: %s>' % err
break
except StopIteration:
break
finally:
if op is not None:
ops.append(op)
return "CScript([%s])" % ', '.join(ops)
def GetSigOpCount(self, fAccurate):
"""Get the SigOp count.
fAccurate - Accurately count CHECKMULTISIG, see BIP16 for details.
Note that this is consensus-critical.
"""
n = 0
lastOpcode = OP_INVALIDOPCODE
for (opcode, data, sop_idx) in self.raw_iter():
if opcode in (OP_CHECKSIG, OP_CHECKSIGVERIFY):
n += 1
elif opcode in (OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY):
if fAccurate and (OP_1 <= lastOpcode <= OP_16):
n += opcode.decode_op_n()
else:
n += 20
lastOpcode = opcode
return n
SIGHASH_ALL = 1
SIGHASH_NONE = 2
SIGHASH_SINGLE = 3
SIGHASH_ANYONECANPAY = 0x80
def FindAndDelete(script, sig):
"""Consensus critical, see FindAndDelete() in Satoshi codebase"""
r = b''
last_sop_idx = sop_idx = 0
skip = True
for (opcode, data, sop_idx) in script.raw_iter():
if not skip:
r += script[last_sop_idx:sop_idx]
last_sop_idx = sop_idx
if script[sop_idx:sop_idx + len(sig)] == sig:
skip = True
else:
skip = False
if not skip:
r += script[last_sop_idx:]
return CScript(r)
def SignatureHash(script, txTo, inIdx, hashtype):
"""Consensus-correct SignatureHash
Returns (hash, err) to precisely match the consensus-critical behavior of
the SIGHASH_SINGLE bug. (inIdx is *not* checked for validity)
"""
HASH_ONE = b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
if inIdx >= len(txTo.vin):
return (HASH_ONE, "inIdx %d out of range (%d)" % (inIdx, len(txTo.vin)))
txtmp = CTransaction(txTo)
for txin in txtmp.vin:
txin.scriptSig = b''
txtmp.vin[inIdx].scriptSig = FindAndDelete(script, CScript([OP_CODESEPARATOR]))
if (hashtype & 0x1f) == SIGHASH_NONE:
txtmp.vout = []
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
elif (hashtype & 0x1f) == SIGHASH_SINGLE:
outIdx = inIdx
if outIdx >= len(txtmp.vout):
return (HASH_ONE, "outIdx %d out of range (%d)" % (outIdx, len(txtmp.vout)))
tmp = txtmp.vout[outIdx]
txtmp.vout = []
for i in range(outIdx):
txtmp.vout.append(CTxOut(-1))
txtmp.vout.append(tmp)
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
if hashtype & SIGHASH_ANYONECANPAY:
tmp = txtmp.vin[inIdx]
txtmp.vin = []
txtmp.vin.append(tmp)
s = txtmp.serialize()
s += struct.pack(b"<I", hashtype)
hash = hash256(s)
return (hash, None)
# TODO: Allow cached hashPrevouts/hashSequence/hashOutputs to be provided.
# Performance optimization probably not necessary for python tests, however.
# Note that this corresponds to sigversion == 1 in EvalScript, which is used
# for version 0 witnesses.
def SegwitVersion1SignatureHash(script, txTo, inIdx, hashtype, amount):
hashPrevouts = 0
hashSequence = 0
hashOutputs = 0
if not (hashtype & SIGHASH_ANYONECANPAY):
serialize_prevouts = bytes()
for i in txTo.vin:
serialize_prevouts += i.prevout.serialize()
hashPrevouts = uint256_from_str(hash256(serialize_prevouts))
if (not (hashtype & SIGHASH_ANYONECANPAY) and (hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_sequence = bytes()
for i in txTo.vin:
serialize_sequence += struct.pack("<I", i.nSequence)
hashSequence = uint256_from_str(hash256(serialize_sequence))
if ((hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_outputs = bytes()
for o in txTo.vout:
serialize_outputs += o.serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
elif ((hashtype & 0x1f) == SIGHASH_SINGLE and inIdx < len(txTo.vout)):
serialize_outputs = txTo.vout[inIdx].serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
ss = bytes()
ss += struct.pack("<i", txTo.nVersion)
ss += ser_uint256(hashPrevouts)
ss += ser_uint256(hashSequence)
ss += txTo.vin[inIdx].prevout.serialize()
ss += ser_string(script)
ss += struct.pack("<q", amount)
ss += struct.pack("<I", txTo.vin[inIdx].nSequence)
ss += ser_uint256(hashOutputs)
ss += struct.pack("<i", txTo.nLockTime)
ss += struct.pack("<I", hashtype)
return hash256(ss)
| |
"""
A Printer for generating readable representation of most sympy classes.
"""
from __future__ import print_function, division
from sympy.core import S, Rational, Pow, Basic, Mul
from sympy.core.mul import _keep_coeff
from sympy.core.numbers import Integer
from .printer import Printer
from sympy.printing.precedence import precedence, PRECEDENCE
import sympy.mpmath.libmp as mlib
from sympy.mpmath.libmp import prec_to_dps
from sympy.utilities import default_sort_key
class StrPrinter(Printer):
printmethod = "_sympystr"
_default_settings = {
"order": None,
"full_prec": "auto",
}
def parenthesize(self, item, level):
if precedence(item) <= level:
return "(%s)" % self._print(item)
else:
return self._print(item)
def stringify(self, args, sep, level=0):
return sep.join([self.parenthesize(item, level) for item in args])
def emptyPrinter(self, expr):
if isinstance(expr, str):
return expr
elif isinstance(expr, Basic):
if hasattr(expr, "args"):
return repr(expr)
else:
raise
else:
return str(expr)
def _print_Add(self, expr, order=None):
if self.order == 'none':
terms = list(expr.args)
else:
terms = self._as_ordered_terms(expr, order=order)
PREC = precedence(expr)
l = []
for term in terms:
t = self._print(term)
if t.startswith('-'):
sign = "-"
t = t[1:]
else:
sign = "+"
if precedence(term) < PREC:
l.extend([sign, "(%s)" % t])
else:
l.extend([sign, t])
sign = l.pop(0)
if sign == '+':
sign = ""
return sign + ' '.join(l)
def _print_And(self, expr):
return '%s(%s)' % (expr.func, ', '.join(sorted(self._print(a) for a in
expr.args)))
def _print_Or(self, expr):
return '%s(%s)' % (expr.func, ', '.join(sorted(self._print(a) for a in
expr.args)))
def _print_AppliedPredicate(self, expr):
return '%s(%s)' % (expr.func, expr.arg)
def _print_Basic(self, expr):
l = [self._print(o) for o in expr.args]
return expr.__class__.__name__ + "(%s)" % ", ".join(l)
def _print_BlockMatrix(self, B):
if B.blocks.shape == (1, 1):
self._print(B.blocks[0, 0])
return self._print(B.blocks)
def _print_Catalan(self, expr):
return 'Catalan'
def _print_ComplexInfinity(self, expr):
return 'zoo'
def _print_Derivative(self, expr):
return 'Derivative(%s)' % ", ".join(map(self._print, expr.args))
def _print_dict(self, d):
keys = sorted(d.keys(), key=default_sort_key)
items = []
for key in keys:
item = "%s: %s" % (self._print(key), self._print(d[key]))
items.append(item)
return "{%s}" % ", ".join(items)
def _print_Dict(self, expr):
return self._print_dict(expr)
def _print_RandomDomain(self, d):
try:
return 'Domain: ' + self._print(d.as_boolean())
except:
try:
return ('Domain: ' + self._print(d.symbols) + ' in ' +
self._print(d.set))
except:
return 'Domain on ' + self._print(d.symbols)
def _print_Dummy(self, expr):
return '_' + expr.name
def _print_EulerGamma(self, expr):
return 'EulerGamma'
def _print_Exp1(self, expr):
return 'E'
def _print_ExprCondPair(self, expr):
return '(%s, %s)' % (expr.expr, expr.cond)
def _print_FiniteSet(self, s):
s = sorted(s, key=default_sort_key)
if len(s) > 10:
printset = s[:3] + ['...'] + s[-3:]
else:
printset = s
return '{' + ', '.join(self._print(el) for el in printset) + '}'
def _print_Function(self, expr):
return expr.func.__name__ + "(%s)" % self.stringify(expr.args, ", ")
def _print_GeometryEntity(self, expr):
# GeometryEntity is special -- it's base is tuple
return str(expr)
def _print_GoldenRatio(self, expr):
return 'GoldenRatio'
def _print_ImaginaryUnit(self, expr):
return 'I'
def _print_Infinity(self, expr):
return 'oo'
def _print_Integral(self, expr):
def _xab_tostr(xab):
if len(xab) == 1:
return self._print(xab[0])
else:
return self._print((xab[0],) + tuple(xab[1:]))
L = ', '.join([_xab_tostr(l) for l in expr.limits])
return 'Integral(%s, %s)' % (self._print(expr.function), L)
def _print_Interval(self, i):
if i.left_open:
left = '('
else:
left = '['
if i.right_open:
right = ')'
else:
right = ']'
return "%s%s, %s%s" % \
(left, self._print(i.start), self._print(i.end), right)
def _print_Inverse(self, I):
return "%s^-1" % self.parenthesize(I.arg, PRECEDENCE["Pow"])
def _print_Lambda(self, obj):
args, expr = obj.args
if len(args) == 1:
return "Lambda(%s, %s)" % (args.args[0], expr)
else:
arg_string = ", ".join(self._print(arg) for arg in args)
return "Lambda((%s), %s" % (arg_string, expr)
def _print_LatticeOp(self, expr):
args = sorted(expr.args, key=default_sort_key)
return expr.func.__name__ + "(%s)" % ", ".join(self._print(arg) for arg in args)
def _print_Limit(self, expr):
e, z, z0, dir = expr.args
if str(dir) == "+":
return "Limit(%s, %s, %s)" % (e, z, z0)
else:
return "Limit(%s, %s, %s, dir='%s')" % (e, z, z0, dir)
def _print_list(self, expr):
return "[%s]" % self.stringify(expr, ", ")
def _print_MatrixBase(self, expr):
return expr._format_str(self)
_print_SparseMatrix = \
_print_MutableSparseMatrix = \
_print_ImmutableSparseMatrix = \
_print_Matrix = \
_print_DenseMatrix = \
_print_MutableDenseMatrix = \
_print_ImmutableMatrix = \
_print_ImmutableDenseMatrix = \
_print_MatrixBase
def _print_MatrixElement(self, expr):
return self._print(expr.parent) + '[%s, %s]'%(expr.i, expr.j)
def _print_MatrixSlice(self, expr):
def strslice(x):
x = list(x)
if x[2] == 1:
del x[2]
if x[1] == x[0] + 1:
del x[1]
if x[0] == 0:
x[0] = ''
return ':'.join(map(self._print, x))
return (self._print(expr.parent) + '[' +
strslice(expr.rowslice) + ', ' +
strslice(expr.colslice) + ']')
def _print_DeferredVector(self, expr):
return expr.name
def _print_Mul(self, expr):
prec = precedence(expr)
c, e = expr.as_coeff_Mul()
if c < 0:
expr = _keep_coeff(-c, e)
sign = "-"
else:
sign = ""
a = [] # items in the numerator
b = [] # items that are in the denominator (if any)
if self.order not in ('old', 'none'):
args = expr.as_ordered_factors()
else:
# use make_args in case expr was something like -x -> x
args = Mul.make_args(expr)
# Gather args for numerator/denominator
for item in args:
if item.is_commutative and item.is_Pow and item.exp.is_Rational and item.exp.is_negative:
if item.exp != -1:
b.append(Pow(item.base, -item.exp, evaluate=False))
else:
b.append(Pow(item.base, -item.exp))
elif item.is_Rational and item is not S.Infinity:
if item.p != 1:
a.append(Rational(item.p))
if item.q != 1:
b.append(Rational(item.q))
else:
a.append(item)
a = a or [S.One]
a_str = list(map(lambda x: self.parenthesize(x, prec), a))
b_str = list(map(lambda x: self.parenthesize(x, prec), b))
if len(b) == 0:
return sign + '*'.join(a_str)
elif len(b) == 1:
if len(a) == 1 and not (a[0].is_Atom or a[0].is_Add):
return sign + "%s/" % a_str[0] + '*'.join(b_str)
else:
return sign + '*'.join(a_str) + "/%s" % b_str[0]
else:
return sign + '*'.join(a_str) + "/(%s)" % '*'.join(b_str)
def _print_MatMul(self, expr):
return '*'.join([self.parenthesize(arg, precedence(expr))
for arg in expr.args])
def _print_HadamardProduct(self, expr):
return '.*'.join([self.parenthesize(arg, precedence(expr))
for arg in expr.args])
def _print_MatAdd(self, expr):
return ' + '.join([self.parenthesize(arg, precedence(expr))
for arg in expr.args])
def _print_NaN(self, expr):
return 'nan'
def _print_NegativeInfinity(self, expr):
return '-oo'
def _print_Normal(self, expr):
return "Normal(%s, %s)" % (expr.mu, expr.sigma)
def _print_Order(self, expr):
if expr.point == S.Zero or not len(expr.variables):
if len(expr.variables) <= 1:
return 'O(%s)' % self._print(expr.expr)
else:
return 'O(%s)' % self.stringify(expr.args[:-1], ', ', 0)
else:
return 'O(%s)' % self.stringify(expr.args, ', ', 0)
def _print_Cycle(self, expr):
"""We want it to print as Cycle in doctests for which a repr is required.
With __repr__ defined in Cycle, interactive output gives Cycle form but
during doctests, the dict's __repr__ form is used. Defining this _print
function solves that problem.
>>> from sympy.combinatorics import Cycle
>>> Cycle(1, 2) # will print as a dict without this method
Cycle(1, 2)
"""
return expr.__repr__()
def _print_Permutation(self, expr):
from sympy.combinatorics.permutations import Permutation, Cycle
if Permutation.print_cyclic:
if not expr.size:
return 'Permutation()'
# before taking Cycle notation, see if the last element is
# a singleton and move it to the head of the string
s = Cycle(expr)(expr.size - 1).__repr__()[len('Cycle'):]
last = s.rfind('(')
if not last == 0 and ',' not in s[last:]:
s = s[last:] + s[:last]
return 'Permutation%s' % s
else:
s = expr.support()
if not s:
if expr.size < 5:
return 'Permutation(%s)' % str(expr.array_form)
return 'Permutation([], size=%s)' % expr.size
trim = str(expr.array_form[:s[-1] + 1]) + ', size=%s' % expr.size
use = full = str(expr.array_form)
if len(trim) < len(full):
use = trim
return 'Permutation(%s)' % use
def _print_TensorIndex(self, expr):
return expr._pretty()
def _print_TensorHead(self, expr):
return expr._pretty()
def _print_TensMul(self, expr):
return expr._pretty()
def _print_TensAdd(self, expr):
return expr._pretty()
def _print_PermutationGroup(self, expr):
p = [' %s' % str(a) for a in expr.args]
return 'PermutationGroup([\n%s])' % ',\n'.join(p)
def _print_PDF(self, expr):
return 'PDF(%s, (%s, %s, %s))' % \
(self._print(expr.pdf.args[1]), self._print(expr.pdf.args[0]),
self._print(expr.domain[0]), self._print(expr.domain[1]))
def _print_Pi(self, expr):
return 'pi'
def _print_PolyRing(self, ring):
return "Polynomial ring in %s over %s with %s order" % \
(", ".join(map(self._print, ring.symbols)), ring.domain, ring.order)
def _print_FracField(self, field):
return "Rational function field in %s over %s with %s order" % \
(", ".join(map(self._print, field.symbols)), field.domain, field.order)
def _print_PolyElement(self, poly):
return poly.str(self, PRECEDENCE, "%s**%d", "*")
def _print_FracElement(self, frac):
if frac.denom == 1:
return self._print(frac.numer)
else:
numer = self.parenthesize(frac.numer, PRECEDENCE["Add"])
denom = self.parenthesize(frac.denom, PRECEDENCE["Atom"]-1)
return numer + "/" + denom
def _print_Poly(self, expr):
terms, gens = [], [ self._print(s) for s in expr.gens ]
for monom, coeff in expr.terms():
s_monom = []
for i, exp in enumerate(monom):
if exp > 0:
if exp == 1:
s_monom.append(gens[i])
else:
s_monom.append(gens[i] + "**%d" % exp)
s_monom = "*".join(s_monom)
if coeff.is_Add:
if s_monom:
s_coeff = "(" + self._print(coeff) + ")"
else:
s_coeff = self._print(coeff)
else:
if s_monom:
if coeff is S.One:
terms.extend(['+', s_monom])
continue
if coeff is S.NegativeOne:
terms.extend(['-', s_monom])
continue
s_coeff = self._print(coeff)
if not s_monom:
s_term = s_coeff
else:
s_term = s_coeff + "*" + s_monom
if s_term.startswith('-'):
terms.extend(['-', s_term[1:]])
else:
terms.extend(['+', s_term])
if terms[0] in ['-', '+']:
modifier = terms.pop(0)
if modifier == '-':
terms[0] = '-' + terms[0]
format = expr.__class__.__name__ + "(%s, %s"
from sympy.polys.polyerrors import PolynomialError
try:
format += ", modulus=%s" % expr.get_modulus()
except PolynomialError:
format += ", domain='%s'" % expr.get_domain()
format += ")"
return format % (' '.join(terms), ', '.join(gens))
def _print_ProductSet(self, p):
return ' x '.join(self._print(set) for set in p.sets)
def _print_AlgebraicNumber(self, expr):
if expr.is_aliased:
return self._print(expr.as_poly().as_expr())
else:
return self._print(expr.as_expr())
def _print_Pow(self, expr, rational=False):
PREC = precedence(expr)
if expr.exp is S.Half and not rational:
return "sqrt(%s)" % self._print(expr.base)
if expr.is_commutative:
if -expr.exp is S.Half and not rational:
# Note: Don't test "expr.exp == -S.Half" here, because that will
# match -0.5, which we don't want.
return "1/sqrt(%s)" % self._print(expr.base)
if expr.exp == -1:
return '1/%s' % self.parenthesize(expr.base, PREC)
e = self.parenthesize(expr.exp, PREC)
if self.printmethod == '_sympyrepr' and expr.exp.is_Rational and expr.exp.q != 1:
# the parenthesized exp should be '(Rational(a, b))' so strip parens,
# but just check to be sure.
if e.startswith('(Rational'):
return '%s**%s' % (self.parenthesize(expr.base, PREC), e[1:-1])
return '%s**%s' % (self.parenthesize(expr.base, PREC), e)
def _print_MatPow(self, expr):
PREC = precedence(expr)
return '%s**%s' % (self.parenthesize(expr.base, PREC),
self.parenthesize(expr.exp, PREC))
def _print_Integer(self, expr):
return str(expr.p)
def _print_int(self, expr):
return str(expr)
def _print_mpz(self, expr):
return str(expr)
def _print_Rational(self, expr):
if expr.q == 1:
return str(expr.p)
else:
return "%s/%s" % (expr.p, expr.q)
def _print_PythonRational(self, expr):
if expr.q == 1:
return str(expr.p)
else:
return "%d/%d" % (expr.p, expr.q)
def _print_Fraction(self, expr):
if expr.denominator == 1:
return str(expr.numerator)
else:
return "%s/%s" % (expr.numerator, expr.denominator)
def _print_mpq(self, expr):
if expr.denominator == 1:
return str(expr.numerator)
else:
return "%s/%s" % (expr.numerator, expr.denominator)
def _print_Float(self, expr):
prec = expr._prec
if prec < 5:
dps = 0
else:
dps = prec_to_dps(expr._prec)
if self._settings["full_prec"] is True:
strip = False
elif self._settings["full_prec"] is False:
strip = True
elif self._settings["full_prec"] == "auto":
strip = self._print_level > 1
rv = mlib.to_str(expr._mpf_, dps, strip_zeros=strip)
if rv.startswith('-.0'):
rv = '-0.' + rv[3:]
elif rv.startswith('.0'):
rv = '0.' + rv[2:]
return rv
def _print_Relational(self, expr):
return '%s %s %s' % (self.parenthesize(expr.lhs, precedence(expr)),
expr.rel_op,
self.parenthesize(expr.rhs, precedence(expr)))
def _print_RootOf(self, expr):
return "RootOf(%s, %d)" % (self._print_Add(expr.expr, order='lex'), expr.index)
def _print_RootSum(self, expr):
args = [self._print_Add(expr.expr, order='lex')]
if expr.fun is not S.IdentityFunction:
args.append(self._print(expr.fun))
return "RootSum(%s)" % ", ".join(args)
def _print_GroebnerBasis(self, basis):
cls = basis.__class__.__name__
exprs = [ self._print_Add(arg, order=basis.order)
for arg in basis.exprs ]
exprs = "[%s]" % ", ".join(exprs)
gens = [ self._print(gen) for gen in basis.gens ]
domain = "domain='%s'" % self._print(basis.domain)
order = "order='%s'" % self._print(basis.order)
args = [exprs] + gens + [domain, order]
return "%s(%s)" % (cls, ", ".join(args))
def _print_Sample(self, expr):
return "Sample([%s])" % self.stringify(expr, ", ", 0)
def _print_set(self, s):
items = sorted(s, key=default_sort_key)
args = ', '.join(self._print(item) for item in items)
if args:
args = '[%s]' % args
return '%s(%s)' % (type(s).__name__, args)
_print_frozenset = _print_set
def _print_SparseMatrix(self, expr):
from sympy.matrices import Matrix
return self._print(Matrix(expr))
def _print_Sum(self, expr):
def _xab_tostr(xab):
if len(xab) == 1:
return self._print(xab[0])
else:
return self._print((xab[0],) + tuple(xab[1:]))
L = ', '.join([_xab_tostr(l) for l in expr.limits])
return 'Sum(%s, %s)' % (self._print(expr.function), L)
def _print_Symbol(self, expr):
return expr.name
_print_MatrixSymbol = _print_Symbol
_print_RandomSymbol = _print_Symbol
def _print_Identity(self, expr):
return "I"
def _print_ZeroMatrix(self, expr):
return "0"
def _print_Predicate(self, expr):
return "Q.%s" % expr.name
def _print_str(self, expr):
return expr
def _print_tuple(self, expr):
if len(expr) == 1:
return "(%s,)" % self._print(expr[0])
else:
return "(%s)" % self.stringify(expr, ", ")
def _print_Tuple(self, expr):
return self._print_tuple(expr)
def _print_Transpose(self, T):
return "%s'" % self.parenthesize(T.arg, PRECEDENCE["Pow"])
def _print_Uniform(self, expr):
return "Uniform(%s, %s)" % (expr.a, expr.b)
def _print_Union(self, expr):
return ' U '.join(self._print(set) for set in expr.args)
def _print_Unit(self, expr):
return expr.abbrev
def _print_Wild(self, expr):
return expr.name + '_'
def _print_WildFunction(self, expr):
return expr.name + '_'
def _print_Zero(self, expr):
return "0"
def _print_DMP(self, p):
from sympy.core.sympify import SympifyError
try:
if p.ring is not None:
# TODO incorporate order
return self._print(p.ring.to_sympy(p))
except SympifyError:
pass
cls = p.__class__.__name__
rep = self._print(p.rep)
dom = self._print(p.dom)
ring = self._print(p.ring)
return "%s(%s, %s, %s)" % (cls, rep, dom, ring)
def _print_DMF(self, expr):
return self._print_DMP(expr)
def _print_Object(self, object):
return 'Object("%s")' % object.name
def _print_IdentityMorphism(self, morphism):
return 'IdentityMorphism(%s)' % morphism.domain
def _print_NamedMorphism(self, morphism):
return 'NamedMorphism(%s, %s, "%s")' % \
(morphism.domain, morphism.codomain, morphism.name)
def _print_Category(self, category):
return 'Category("%s")' % category.name
def _print_BaseScalarField(self, field):
return field._coord_sys._names[field._index]
def _print_BaseVectorField(self, field):
return 'e_%s' % field._coord_sys._names[field._index]
def _print_Differential(self, diff):
field = diff._form_field
if hasattr(field, '_coord_sys'):
return 'd%s' % field._coord_sys._names[field._index]
else:
return 'd(%s)' % self._print(field)
def _print_Tr(self, expr):
#TODO : Handle indices
return "%s(%s)" % ("Tr", self._print(expr.args[0]))
def sstr(expr, **settings):
"""Returns the expression as a string.
For large expressions where speed is a concern, use the setting
order='none'.
Examples
========
>>> from sympy import symbols, Eq, sstr
>>> a, b = symbols('a b')
>>> sstr(Eq(a + b, 0))
'a + b == 0'
"""
p = StrPrinter(settings)
s = p.doprint(expr)
return s
class StrReprPrinter(StrPrinter):
"""(internal) -- see sstrrepr"""
def _print_str(self, s):
return repr(s)
def sstrrepr(expr, **settings):
"""return expr in mixed str/repr form
i.e. strings are returned in repr form with quotes, and everything else
is returned in str form.
This function could be useful for hooking into sys.displayhook
"""
p = StrReprPrinter(settings)
s = p.doprint(expr)
return s
| |
# Copyright 2018 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for PerfKitBenchmarker' StaticVirtualMachine."""
import io
import unittest
from absl import flags
import mock
from perfkitbenchmarker import disk
from perfkitbenchmarker import static_virtual_machine as svm
from perfkitbenchmarker import vm_util
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
_COMPONENT = 'test_static_vm_spec'
_DISK_SPEC_DICTS = [{
'device_path': '/test_device_path'
}, {
'mount_point': '/test_mount_point'
}]
class TestStaticVirtualMachine(pkb_common_test_case.TestOsMixin,
svm.StaticVirtualMachine):
pass
def CreateTestStaticVm():
vm_spec = svm.StaticVmSpec(_COMPONENT)
return TestStaticVirtualMachine(vm_spec=vm_spec)
class StaticVmSpecTest(pkb_common_test_case.PkbCommonTestCase):
def testDefaults(self):
spec = svm.StaticVmSpec(_COMPONENT)
self.assertIsNone(spec.ip_address)
self.assertIsNone(spec.user_name)
self.assertIsNone(spec.ssh_private_key)
self.assertIsNone(spec.internal_ip)
self.assertEqual(spec.ssh_port, 22)
self.assertIsNone(spec.password)
self.assertIsNone(spec.os_type)
self.assertEqual(spec.disk_specs, [])
def testDiskSpecs(self):
spec = svm.StaticVmSpec(_COMPONENT, disk_specs=_DISK_SPEC_DICTS)
self.assertEqual(len(spec.disk_specs), 2)
for disk_spec in spec.disk_specs:
self.assertIsInstance(disk_spec, disk.BaseDiskSpec)
self.assertEqual(spec.disk_specs[0].device_path, '/test_device_path')
self.assertIsNone(spec.disk_specs[0].mount_point)
self.assertIsNone(spec.disk_specs[1].device_path)
self.assertEqual(spec.disk_specs[1].mount_point, '/test_mount_point')
class StaticVirtualMachineTest(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(StaticVirtualMachineTest, self).setUp()
self._initial_pool = svm.StaticVirtualMachine.vm_pool
svm.StaticVirtualMachine.vm_pool.clear()
p = mock.patch(vm_util.__name__ + '.GetTempDir', return_value='/tmp/dir')
p.start()
self.addCleanup(p.stop)
FLAGS.image = 'test_image'
def tearDown(self):
super(StaticVirtualMachineTest, self).tearDown()
svm.StaticVirtualMachine.vm_pool = self._initial_pool
def _AssertStaticVMsEqual(self, vm1, vm2):
self.assertEqual(vm1.ip_address, vm2.ip_address)
self.assertEqual(vm1.internal_ip, vm2.internal_ip)
self.assertEqual(vm1.user_name, vm2.user_name)
self.assertEqual(vm1.zone, vm2.zone)
self.assertEqual(vm1.ssh_private_key, vm2.ssh_private_key)
def testReadFromFile_WrongFormat(self):
fp = io.StringIO('{}')
self.assertRaises(ValueError,
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile, fp)
def testReadFromFile_MissingKey(self):
fp = io.StringIO('[{"ip_address": "10.10.10.3"}]')
self.assertRaises(ValueError,
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile, fp)
def testReadFromFile_Empty(self):
fp = io.StringIO('[]')
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile(fp)
self.assertEqual([], list(svm.StaticVirtualMachine.vm_pool))
def testReadFromFile_NoErr(self):
s = ('[{'
' "ip_address": "174.12.14.1", '
' "user_name": "perfkitbenchmarker", '
' "keyfile_path": "perfkitbenchmarker.pem" '
'}, '
'{ '
' "ip_address": "174.12.14.121", '
' "user_name": "ubuntu", '
' "keyfile_path": "rackspace.pem", '
' "internal_ip": "10.10.10.2", '
' "zone": "rackspace_dallas" '
'}] ')
fp = io.StringIO(s)
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile(fp)
vm_pool = svm.StaticVirtualMachine.vm_pool
self.assertEqual(2, len(vm_pool))
self._AssertStaticVMsEqual(
TestStaticVirtualMachine(
svm.StaticVmSpec(
_COMPONENT,
ip_address='174.12.14.1',
user_name='perfkitbenchmarker',
ssh_private_key='perfkitbenchmarker.pem')), vm_pool[0])
self._AssertStaticVMsEqual(
TestStaticVirtualMachine(
svm.StaticVmSpec(
_COMPONENT,
ip_address='174.12.14.121',
user_name='ubuntu',
ssh_private_key='rackspace.pem',
internal_ip='10.10.10.2',
zone='rackspace_dallas')), vm_pool[1])
def testReadFromFile_InvalidScratchDisksType(self):
s = ('[{'
' "ip_address": "174.12.14.1", '
' "user_name": "perfkitbenchmarker", '
' "keyfile_path": "perfkitbenchmarker.pem", '
' "scratch_disk_mountpoints": "/tmp/google-pkb" '
'}]')
fp = io.StringIO(s)
self.assertRaises(ValueError,
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile, fp)
def testReadFromFile_UnknownOsTypeDefaultsToLinuxRequiredKeys(self):
FLAGS.os_type = 'unknown_os_type'
s = ('[{'
' "ip_address": "174.12.14.1", '
' "user_name": "perfkitbenchmarker", '
' "keyfile_path": "perfkitbenchmarker.pem"'
'}]')
fp = io.StringIO(s)
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile(fp)
vm_pool = svm.StaticVirtualMachine.vm_pool
self.assertEqual(1, len(vm_pool))
self._AssertStaticVMsEqual(
TestStaticVirtualMachine(
svm.StaticVmSpec(
_COMPONENT,
ip_address='174.12.14.1',
user_name='perfkitbenchmarker',
ssh_private_key='perfkitbenchmarker.pem')), vm_pool[0])
def testCreateReturn(self):
s = ('[{'
' "ip_address": "174.12.14.1", '
' "user_name": "perfkitbenchmarker", '
' "keyfile_path": "perfkitbenchmarker.pem" '
'}, '
'{ '
' "ip_address": "174.12.14.121", '
' "user_name": "ubuntu", '
' "keyfile_path": "rackspace.pem", '
' "internal_ip": "10.10.10.2", '
' "zone": "rackspace_dallas" '
'}] ')
fp = io.StringIO(s)
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile(fp)
self.assertEqual(2, len(svm.StaticVirtualMachine.vm_pool))
vm0 = svm.StaticVirtualMachine.GetStaticVirtualMachine()
self.assertTrue(vm0.from_pool)
self.assertEqual(1, len(svm.StaticVirtualMachine.vm_pool))
vm0.Delete()
self.assertEqual(2, len(svm.StaticVirtualMachine.vm_pool))
vm1 = svm.StaticVirtualMachine.GetStaticVirtualMachine()
self.assertIs(vm0, vm1)
def testDiskSpecs(self):
s = """
[{
"ip_address": "174.12.14.1",
"user_name": "ubuntu",
"keyfile_path": "test_keyfile_path",
"local_disks": ["/test_local_disk_0", "/test_local_disk_1"],
"scratch_disk_mountpoints": ["/test_scratch_disk_0",
"/test_scratch_disk_1"]
}]
"""
expected_paths_and_mount_points = ((None, '/test_scratch_disk_0'),
(None, '/test_scratch_disk_1'),
('/test_local_disk_0',
None), ('/test_local_disk_1', None))
fp = io.StringIO(s)
svm.StaticVirtualMachine.ReadStaticVirtualMachineFile(fp)
self.assertEqual(1, len(svm.StaticVirtualMachine.vm_pool))
vm = svm.StaticVirtualMachine.GetStaticVirtualMachine()
self.assertTrue(vm.from_pool)
self.assertEqual(len(vm.disk_specs), 4)
for disk_spec, expected_paths in zip(vm.disk_specs,
expected_paths_and_mount_points):
expected_device_path, expected_mount_point = expected_paths
self.assertEqual(disk_spec.device_path, expected_device_path)
self.assertEqual(disk_spec.mount_point, expected_mount_point)
if __name__ == '__main__':
unittest.main()
| |
from test.lib.testing import eq_, is_, is_not_
from test.lib import testing
from test.lib.schema import Table, Column
from sqlalchemy import Integer, String, ForeignKey, bindparam
from sqlalchemy.orm import backref, subqueryload, subqueryload_all, \
mapper, relationship, clear_mappers, create_session, lazyload, \
aliased, joinedload, deferred, undefer, eagerload_all,\
Session
from test.lib.testing import eq_, assert_raises, \
assert_raises_message
from test.lib.assertsql import CompiledSQL
from test.lib import fixtures
from test.orm import _fixtures
import sqlalchemy as sa
class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
run_inserts = 'once'
run_deletes = None
def test_basic(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(
mapper(Address, addresses),
order_by=Address.id)
})
sess = create_session()
q = sess.query(User).options(subqueryload(User.addresses))
def go():
eq_(
[User(id=7, addresses=[
Address(id=1, email_address='jack@bean.com')])],
q.filter(User.id==7).all()
)
self.assert_sql_count(testing.db, go, 2)
def go():
eq_(
self.static.user_address_result,
q.order_by(User.id).all()
)
self.assert_sql_count(testing.db, go, 2)
def test_from_aliased(self):
users, Dingaling, User, dingalings, Address, addresses = (self.tables.users,
self.classes.Dingaling,
self.classes.User,
self.tables.dingalings,
self.classes.Address,
self.tables.addresses)
mapper(Dingaling, dingalings)
mapper(Address, addresses, properties={
'dingalings':relationship(Dingaling, order_by=Dingaling.id)
})
mapper(User, users, properties={
'addresses':relationship(
Address,
order_by=Address.id)
})
sess = create_session()
u = aliased(User)
q = sess.query(u).options(subqueryload(u.addresses))
def go():
eq_(
[User(id=7, addresses=[
Address(id=1, email_address='jack@bean.com')])],
q.filter(u.id==7).all()
)
self.assert_sql_count(testing.db, go, 2)
def go():
eq_(
self.static.user_address_result,
q.order_by(u.id).all()
)
self.assert_sql_count(testing.db, go, 2)
q = sess.query(u).\
options(subqueryload_all(u.addresses, Address.dingalings))
def go():
eq_(
[
User(id=8, addresses=[
Address(id=2, email_address='ed@wood.com', dingalings=[Dingaling()]),
Address(id=3, email_address='ed@bettyboop.com'),
Address(id=4, email_address='ed@lala.com'),
]),
User(id=9, addresses=[
Address(id=5, dingalings=[Dingaling()])
]),
],
q.filter(u.id.in_([8, 9])).all()
)
self.assert_sql_count(testing.db, go, 3)
def test_from_get(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(
mapper(Address, addresses),
order_by=Address.id)
})
sess = create_session()
q = sess.query(User).options(subqueryload(User.addresses))
def go():
eq_(
User(id=7, addresses=[
Address(id=1, email_address='jack@bean.com')]),
q.get(7)
)
self.assert_sql_count(testing.db, go, 2)
def test_from_params(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(
mapper(Address, addresses),
order_by=Address.id)
})
sess = create_session()
q = sess.query(User).options(subqueryload(User.addresses))
def go():
eq_(
User(id=7, addresses=[
Address(id=1, email_address='jack@bean.com')]),
q.filter(User.id==bindparam('foo')).params(foo=7).one()
)
self.assert_sql_count(testing.db, go, 2)
def test_disable_dynamic(self):
"""test no subquery option on a dynamic."""
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(Address, lazy="dynamic")
})
mapper(Address, addresses)
sess = create_session()
# previously this would not raise, but would emit
# the query needlessly and put the result nowhere.
assert_raises_message(
sa.exc.InvalidRequestError,
"User.addresses' does not support object population - eager loading cannot be applied.",
sess.query(User).options(subqueryload(User.addresses)).first,
)
def test_many_to_many_plain(self):
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties = dict(
keywords = relationship(Keyword, secondary=item_keywords,
lazy='subquery', order_by=keywords.c.id)))
q = create_session().query(Item).order_by(Item.id)
def go():
eq_(self.static.item_keyword_result, q.all())
self.assert_sql_count(testing.db, go, 2)
def test_many_to_many_with_join(self):
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties = dict(
keywords = relationship(Keyword, secondary=item_keywords,
lazy='subquery', order_by=keywords.c.id)))
q = create_session().query(Item).order_by(Item.id)
def go():
eq_(self.static.item_keyword_result[0:2],
q.join('keywords').filter(Keyword.name == 'red').all())
self.assert_sql_count(testing.db, go, 2)
def test_many_to_many_with_join_alias(self):
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties = dict(
keywords = relationship(Keyword, secondary=item_keywords,
lazy='subquery', order_by=keywords.c.id)))
q = create_session().query(Item).order_by(Item.id)
def go():
eq_(self.static.item_keyword_result[0:2],
(q.join('keywords', aliased=True).
filter(Keyword.name == 'red')).all())
self.assert_sql_count(testing.db, go, 2)
def test_orderby(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties = {
'addresses':relationship(mapper(Address, addresses),
lazy='subquery', order_by=addresses.c.email_address),
})
q = create_session().query(User)
eq_([
User(id=7, addresses=[
Address(id=1)
]),
User(id=8, addresses=[
Address(id=3, email_address='ed@bettyboop.com'),
Address(id=4, email_address='ed@lala.com'),
Address(id=2, email_address='ed@wood.com')
]),
User(id=9, addresses=[
Address(id=5)
]),
User(id=10, addresses=[])
], q.order_by(User.id).all())
def test_orderby_multi(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties = {
'addresses':relationship(mapper(Address, addresses),
lazy='subquery',
order_by=[
addresses.c.email_address,
addresses.c.id]),
})
q = create_session().query(User)
eq_([
User(id=7, addresses=[
Address(id=1)
]),
User(id=8, addresses=[
Address(id=3, email_address='ed@bettyboop.com'),
Address(id=4, email_address='ed@lala.com'),
Address(id=2, email_address='ed@wood.com')
]),
User(id=9, addresses=[
Address(id=5)
]),
User(id=10, addresses=[])
], q.order_by(User.id).all())
def test_orderby_related(self):
"""A regular mapper select on a single table can
order by a relationship to a second table"""
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties = dict(
addresses = relationship(Address,
lazy='subquery',
order_by=addresses.c.id),
))
q = create_session().query(User)
l = q.filter(User.id==Address.user_id).\
order_by(Address.email_address).all()
eq_([
User(id=8, addresses=[
Address(id=2, email_address='ed@wood.com'),
Address(id=3, email_address='ed@bettyboop.com'),
Address(id=4, email_address='ed@lala.com'),
]),
User(id=9, addresses=[
Address(id=5)
]),
User(id=7, addresses=[
Address(id=1)
]),
], l)
def test_orderby_desc(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties = dict(
addresses = relationship(Address, lazy='subquery',
order_by=[
sa.desc(addresses.c.email_address)
]),
))
sess = create_session()
eq_([
User(id=7, addresses=[
Address(id=1)
]),
User(id=8, addresses=[
Address(id=2, email_address='ed@wood.com'),
Address(id=4, email_address='ed@lala.com'),
Address(id=3, email_address='ed@bettyboop.com'),
]),
User(id=9, addresses=[
Address(id=5)
]),
User(id=10, addresses=[])
], sess.query(User).order_by(User.id).all())
_pathing_runs = [
( "lazyload", "lazyload", "lazyload", 15 ),
("subqueryload", "lazyload", "lazyload", 12),
("subqueryload", "subqueryload", "lazyload", 8),
("joinedload", "subqueryload", "lazyload", 7),
("lazyload", "lazyload", "subqueryload", 12),
("subqueryload", "subqueryload", "subqueryload", 4),
("subqueryload", "subqueryload", "joinedload", 3),
]
# _pathing_runs = [("subqueryload", "subqueryload", "joinedload", 3)]
# _pathing_runs = [("subqueryload", "subqueryload", "subqueryload", 4)]
def test_options_pathing(self):
self._do_options_test(self._pathing_runs)
def test_mapper_pathing(self):
self._do_mapper_test(self._pathing_runs)
def _do_options_test(self, configs):
users, Keyword, orders, items, order_items, Order, Item, User, keywords, item_keywords = (self.tables.users,
self.classes.Keyword,
self.tables.orders,
self.tables.items,
self.tables.order_items,
self.classes.Order,
self.classes.Item,
self.classes.User,
self.tables.keywords,
self.tables.item_keywords)
mapper(User, users, properties={
'orders':relationship(Order, order_by=orders.c.id), # o2m, m2o
})
mapper(Order, orders, properties={
'items':relationship(Item,
secondary=order_items, order_by=items.c.id), #m2m
})
mapper(Item, items, properties={
'keywords':relationship(Keyword,
secondary=item_keywords,
order_by=keywords.c.id) #m2m
})
mapper(Keyword, keywords)
callables = {
'joinedload':joinedload,
'subqueryload':subqueryload
}
for o, i, k, count in configs:
options = []
if o in callables:
options.append(callables[o](User.orders))
if i in callables:
options.append(callables[i](User.orders, Order.items))
if k in callables:
options.append(callables[k](User.orders, Order.items, Item.keywords))
self._do_query_tests(options, count)
def _do_mapper_test(self, configs):
users, Keyword, orders, items, order_items, Order, Item, User, keywords, item_keywords = (self.tables.users,
self.classes.Keyword,
self.tables.orders,
self.tables.items,
self.tables.order_items,
self.classes.Order,
self.classes.Item,
self.classes.User,
self.tables.keywords,
self.tables.item_keywords)
opts = {
'lazyload':'select',
'joinedload':'joined',
'subqueryload':'subquery',
}
for o, i, k, count in configs:
mapper(User, users, properties={
'orders':relationship(Order, lazy=opts[o], order_by=orders.c.id),
})
mapper(Order, orders, properties={
'items':relationship(Item,
secondary=order_items, lazy=opts[i], order_by=items.c.id),
})
mapper(Item, items, properties={
'keywords':relationship(Keyword,
lazy=opts[k],
secondary=item_keywords,
order_by=keywords.c.id)
})
mapper(Keyword, keywords)
try:
self._do_query_tests([], count)
finally:
clear_mappers()
def _do_query_tests(self, opts, count):
Order, User = self.classes.Order, self.classes.User
sess = create_session()
def go():
eq_(
sess.query(User).options(*opts).order_by(User.id).all(),
self.static.user_item_keyword_result
)
self.assert_sql_count(testing.db, go, count)
eq_(
sess.query(User).options(*opts).filter(User.name=='fred').
order_by(User.id).all(),
self.static.user_item_keyword_result[2:3]
)
sess = create_session()
eq_(
sess.query(User).options(*opts).join(User.orders).
filter(Order.id==3).\
order_by(User.id).all(),
self.static.user_item_keyword_result[0:1]
)
def test_cyclical(self):
"""A circular eager relationship breaks the cycle with a lazy loader"""
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties = dict(
addresses = relationship(Address, lazy='subquery',
backref=sa.orm.backref('user', lazy='subquery'),
order_by=Address.id)
))
is_(sa.orm.class_mapper(User).get_property('addresses').lazy, 'subquery')
is_(sa.orm.class_mapper(Address).get_property('user').lazy, 'subquery')
sess = create_session()
eq_(self.static.user_address_result, sess.query(User).order_by(User.id).all())
def test_double(self):
"""Eager loading with two relationships simultaneously,
from the same table, using aliases."""
users, orders, User, Address, Order, addresses = (self.tables.users,
self.tables.orders,
self.classes.User,
self.classes.Address,
self.classes.Order,
self.tables.addresses)
openorders = sa.alias(orders, 'openorders')
closedorders = sa.alias(orders, 'closedorders')
mapper(Address, addresses)
mapper(Order, orders)
open_mapper = mapper(Order, openorders, non_primary=True)
closed_mapper = mapper(Order, closedorders, non_primary=True)
mapper(User, users, properties = dict(
addresses = relationship(Address, lazy='subquery',
order_by=addresses.c.id),
open_orders = relationship(
open_mapper,
primaryjoin=sa.and_(openorders.c.isopen == 1,
users.c.id==openorders.c.user_id),
lazy='subquery', order_by=openorders.c.id),
closed_orders = relationship(
closed_mapper,
primaryjoin=sa.and_(closedorders.c.isopen == 0,
users.c.id==closedorders.c.user_id),
lazy='subquery', order_by=closedorders.c.id)))
q = create_session().query(User).order_by(User.id)
def go():
eq_([
User(
id=7,
addresses=[Address(id=1)],
open_orders = [Order(id=3)],
closed_orders = [Order(id=1), Order(id=5)]
),
User(
id=8,
addresses=[Address(id=2), Address(id=3), Address(id=4)],
open_orders = [],
closed_orders = []
),
User(
id=9,
addresses=[Address(id=5)],
open_orders = [Order(id=4)],
closed_orders = [Order(id=2)]
),
User(id=10)
], q.all())
self.assert_sql_count(testing.db, go, 4)
def test_double_same_mappers(self):
"""Eager loading with two relationships simulatneously,
from the same table, using aliases."""
addresses, items, order_items, orders, Item, User, Address, Order, users = (self.tables.addresses,
self.tables.items,
self.tables.order_items,
self.tables.orders,
self.classes.Item,
self.classes.User,
self.classes.Address,
self.classes.Order,
self.tables.users)
mapper(Address, addresses)
mapper(Order, orders, properties={
'items': relationship(Item, secondary=order_items, lazy='subquery',
order_by=items.c.id)})
mapper(Item, items)
mapper(User, users, properties=dict(
addresses=relationship(Address, lazy='subquery', order_by=addresses.c.id),
open_orders=relationship(
Order,
primaryjoin=sa.and_(orders.c.isopen == 1,
users.c.id==orders.c.user_id),
lazy='subquery', order_by=orders.c.id),
closed_orders=relationship(
Order,
primaryjoin=sa.and_(orders.c.isopen == 0,
users.c.id==orders.c.user_id),
lazy='subquery', order_by=orders.c.id)))
q = create_session().query(User).order_by(User.id)
def go():
eq_([
User(id=7,
addresses=[
Address(id=1)],
open_orders=[Order(id=3,
items=[
Item(id=3),
Item(id=4),
Item(id=5)])],
closed_orders=[Order(id=1,
items=[
Item(id=1),
Item(id=2),
Item(id=3)]),
Order(id=5,
items=[
Item(id=5)])]),
User(id=8,
addresses=[
Address(id=2),
Address(id=3),
Address(id=4)],
open_orders = [],
closed_orders = []),
User(id=9,
addresses=[
Address(id=5)],
open_orders=[
Order(id=4,
items=[
Item(id=1),
Item(id=5)])],
closed_orders=[
Order(id=2,
items=[
Item(id=1),
Item(id=2),
Item(id=3)])]),
User(id=10)
], q.all())
self.assert_sql_count(testing.db, go, 6)
@testing.fails_on('maxdb', 'FIXME: unknown')
def test_limit(self):
"""Limit operations combined with lazy-load relationships."""
users, items, order_items, orders, Item, User, Address, Order, addresses = (self.tables.users,
self.tables.items,
self.tables.order_items,
self.tables.orders,
self.classes.Item,
self.classes.User,
self.classes.Address,
self.classes.Order,
self.tables.addresses)
mapper(Item, items)
mapper(Order, orders, properties={
'items':relationship(Item, secondary=order_items, lazy='subquery',
order_by=items.c.id)
})
mapper(User, users, properties={
'addresses':relationship(mapper(Address, addresses),
lazy='subquery',
order_by=addresses.c.id),
'orders':relationship(Order, lazy='select', order_by=orders.c.id)
})
sess = create_session()
q = sess.query(User)
l = q.order_by(User.id).limit(2).offset(1).all()
eq_(self.static.user_all_result[1:3], l)
sess = create_session()
l = q.order_by(sa.desc(User.id)).limit(2).offset(2).all()
eq_(list(reversed(self.static.user_all_result[0:2])), l)
def test_mapper_order_by(self):
users, User, Address, addresses = (self.tables.users,
self.classes.User,
self.classes.Address,
self.tables.addresses)
mapper(Address, addresses)
mapper(User, users, properties={
'addresses':relationship(Address,
lazy='subquery',
order_by=addresses.c.id),
},order_by=users.c.id.desc())
sess = create_session()
q = sess.query(User)
l = q.limit(2).all()
eq_(l, list(reversed(self.static.user_address_result[2:4])))
def test_one_to_many_scalar(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
mapper(User, users, properties = dict(
address = relationship(mapper(Address, addresses),
lazy='subquery', uselist=False)
))
q = create_session().query(User)
def go():
l = q.filter(users.c.id == 7).all()
eq_([User(id=7, address=Address(id=1))], l)
self.assert_sql_count(testing.db, go, 2)
@testing.fails_on('maxdb', 'FIXME: unknown')
def test_many_to_one(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(Address, addresses, properties = dict(
user = relationship(mapper(User, users), lazy='subquery')
))
sess = create_session()
q = sess.query(Address)
def go():
a = q.filter(addresses.c.id==1).one()
is_not_(a.user, None)
u1 = sess.query(User).get(7)
is_(a.user, u1)
self.assert_sql_count(testing.db, go, 2)
def test_double_with_aggregate(self):
User, users, orders, Order = (self.classes.User,
self.tables.users,
self.tables.orders,
self.classes.Order)
max_orders_by_user = sa.select([sa.func.max(orders.c.id).label('order_id')],
group_by=[orders.c.user_id]
).alias('max_orders_by_user')
max_orders = orders.select(orders.c.id==max_orders_by_user.c.order_id).\
alias('max_orders')
mapper(Order, orders)
mapper(User, users, properties={
'orders':relationship(Order, backref='user', lazy='subquery',
order_by=orders.c.id),
'max_order':relationship(
mapper(Order, max_orders, non_primary=True),
lazy='subquery', uselist=False)
})
q = create_session().query(User)
def go():
eq_([
User(id=7, orders=[
Order(id=1),
Order(id=3),
Order(id=5),
],
max_order=Order(id=5)
),
User(id=8, orders=[]),
User(id=9, orders=[Order(id=2),Order(id=4)],
max_order=Order(id=4)
),
User(id=10),
], q.order_by(User.id).all())
self.assert_sql_count(testing.db, go, 3)
def test_uselist_false_warning(self):
"""test that multiple rows received by a
uselist=False raises a warning."""
User, users, orders, Order = (self.classes.User,
self.tables.users,
self.tables.orders,
self.classes.Order)
mapper(User, users, properties={
'order':relationship(Order, uselist=False)
})
mapper(Order, orders)
s = create_session()
assert_raises(sa.exc.SAWarning,
s.query(User).options(subqueryload(User.order)).all)
class LoadOnExistingTest(_fixtures.FixtureTest):
"""test that loaders from a base Query fully populate."""
run_inserts = 'once'
run_deletes = None
def _collection_to_scalar_fixture(self):
User, Address, Dingaling = self.classes.User, \
self.classes.Address, self.classes.Dingaling
mapper(User, self.tables.users, properties={
'addresses':relationship(Address),
})
mapper(Address, self.tables.addresses, properties={
'dingaling':relationship(Dingaling)
})
mapper(Dingaling, self.tables.dingalings)
sess = Session(autoflush=False)
return User, Address, Dingaling, sess
def _collection_to_collection_fixture(self):
User, Order, Item = self.classes.User, \
self.classes.Order, self.classes.Item
mapper(User, self.tables.users, properties={
'orders':relationship(Order),
})
mapper(Order, self.tables.orders, properties={
'items':relationship(Item, secondary=self.tables.order_items),
})
mapper(Item, self.tables.items)
sess = Session(autoflush=False)
return User, Order, Item, sess
def _eager_config_fixture(self):
User, Address = self.classes.User, self.classes.Address
mapper(User, self.tables.users, properties={
'addresses':relationship(Address, lazy="subquery"),
})
mapper(Address, self.tables.addresses)
sess = Session(autoflush=False)
return User, Address, sess
def _deferred_config_fixture(self):
User, Address = self.classes.User, self.classes.Address
mapper(User, self.tables.users, properties={
'name':deferred(self.tables.users.c.name),
'addresses':relationship(Address, lazy="subquery"),
})
mapper(Address, self.tables.addresses)
sess = Session(autoflush=False)
return User, Address, sess
def test_no_query_on_refresh(self):
User, Address, sess = self._eager_config_fixture()
u1 = sess.query(User).get(8)
assert 'addresses' in u1.__dict__
sess.expire(u1)
def go():
eq_(u1.id, 8)
self.assert_sql_count(testing.db, go, 1)
assert 'addresses' not in u1.__dict__
def test_no_query_on_deferred(self):
User, Address, sess = self._deferred_config_fixture()
u1 = sess.query(User).get(8)
assert 'addresses' in u1.__dict__
sess.expire(u1, ['addresses'])
def go():
eq_(u1.name, 'ed')
self.assert_sql_count(testing.db, go, 1)
assert 'addresses' not in u1.__dict__
def test_populate_existing_propagate(self):
User, Address, sess = self._eager_config_fixture()
u1 = sess.query(User).get(8)
u1.addresses[2].email_address = "foofoo"
del u1.addresses[1]
u1 = sess.query(User).populate_existing().filter_by(id=8).one()
# collection is reverted
eq_(len(u1.addresses), 3)
# attributes on related items reverted
eq_(u1.addresses[2].email_address, "ed@lala.com")
def test_loads_second_level_collection_to_scalar(self):
User, Address, Dingaling, sess = self._collection_to_scalar_fixture()
u1 = sess.query(User).get(8)
a1 = Address()
u1.addresses.append(a1)
a2 = u1.addresses[0]
a2.email_address = 'foo'
sess.query(User).options(subqueryload_all("addresses.dingaling")).\
filter_by(id=8).all()
assert u1.addresses[-1] is a1
for a in u1.addresses:
if a is not a1:
assert 'dingaling' in a.__dict__
else:
assert 'dingaling' not in a.__dict__
if a is a2:
eq_(a2.email_address, 'foo')
def test_loads_second_level_collection_to_collection(self):
User, Order, Item, sess = self._collection_to_collection_fixture()
u1 = sess.query(User).get(7)
u1.orders
o1 = Order()
u1.orders.append(o1)
sess.query(User).options(subqueryload_all("orders.items")).\
filter_by(id=7).all()
for o in u1.orders:
if o is not o1:
assert 'items' in o.__dict__
else:
assert 'items' not in o.__dict__
def test_load_two_levels_collection_to_scalar(self):
User, Address, Dingaling, sess = self._collection_to_scalar_fixture()
u1 = sess.query(User).filter_by(id=8).options(subqueryload("addresses")).one()
sess.query(User).filter_by(id=8).options(subqueryload_all("addresses.dingaling")).first()
assert 'dingaling' in u1.addresses[0].__dict__
def test_load_two_levels_collection_to_collection(self):
User, Order, Item, sess = self._collection_to_collection_fixture()
u1 = sess.query(User).filter_by(id=7).options(subqueryload("orders")).one()
sess.query(User).filter_by(id=7).options(subqueryload_all("orders.items")).first()
assert 'items' in u1.orders[0].__dict__
class OrderBySecondaryTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('m2m', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('aid', Integer, ForeignKey('a.id')),
Column('bid', Integer, ForeignKey('b.id')))
Table('a', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(50)))
Table('b', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(50)))
@classmethod
def fixtures(cls):
return dict(
a=(('id', 'data'),
(1, 'a1'),
(2, 'a2')),
b=(('id', 'data'),
(1, 'b1'),
(2, 'b2'),
(3, 'b3'),
(4, 'b4')),
m2m=(('id', 'aid', 'bid'),
(2, 1, 1),
(4, 2, 4),
(1, 1, 3),
(6, 2, 2),
(3, 1, 2),
(5, 2, 3)))
def test_ordering(self):
a, m2m, b = (self.tables.a,
self.tables.m2m,
self.tables.b)
class A(fixtures.ComparableEntity):pass
class B(fixtures.ComparableEntity):pass
mapper(A, a, properties={
'bs':relationship(B, secondary=m2m, lazy='subquery', order_by=m2m.c.id)
})
mapper(B, b)
sess = create_session()
def go():
eq_(sess.query(A).all(), [
A(data='a1', bs=[B(data='b3'), B(data='b1'), B(data='b2')]),
A(bs=[B(data='b4'), B(data='b3'), B(data='b2')])
])
self.assert_sql_count(testing.db, go, 2)
class SelfReferentialTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('nodes', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('parent_id', Integer, ForeignKey('nodes.id')),
Column('data', String(30)))
@testing.fails_on('maxdb', 'FIXME: unknown')
def test_basic(self):
nodes = self.tables.nodes
class Node(fixtures.ComparableEntity):
def append(self, node):
self.children.append(node)
mapper(Node, nodes, properties={
'children':relationship(Node,
lazy='subquery',
join_depth=3, order_by=nodes.c.id)
})
sess = create_session()
n1 = Node(data='n1')
n1.append(Node(data='n11'))
n1.append(Node(data='n12'))
n1.append(Node(data='n13'))
n1.children[1].append(Node(data='n121'))
n1.children[1].append(Node(data='n122'))
n1.children[1].append(Node(data='n123'))
n2 = Node(data='n2')
n2.append(Node(data='n21'))
n2.children[0].append(Node(data='n211'))
n2.children[0].append(Node(data='n212'))
sess.add(n1)
sess.add(n2)
sess.flush()
sess.expunge_all()
def go():
d = sess.query(Node).filter(Node.data.in_(['n1', 'n2'])).\
order_by(Node.data).all()
eq_([Node(data='n1', children=[
Node(data='n11'),
Node(data='n12', children=[
Node(data='n121'),
Node(data='n122'),
Node(data='n123')
]),
Node(data='n13')
]),
Node(data='n2', children=[
Node(data='n21', children=[
Node(data='n211'),
Node(data='n212'),
])
])
], d)
self.assert_sql_count(testing.db, go, 4)
def test_lazy_fallback_doesnt_affect_eager(self):
nodes = self.tables.nodes
class Node(fixtures.ComparableEntity):
def append(self, node):
self.children.append(node)
mapper(Node, nodes, properties={
'children':relationship(Node, lazy='subquery', join_depth=1,
order_by=nodes.c.id)
})
sess = create_session()
n1 = Node(data='n1')
n1.append(Node(data='n11'))
n1.append(Node(data='n12'))
n1.append(Node(data='n13'))
n1.children[1].append(Node(data='n121'))
n1.children[1].append(Node(data='n122'))
n1.children[1].append(Node(data='n123'))
sess.add(n1)
sess.flush()
sess.expunge_all()
def go():
allnodes = sess.query(Node).order_by(Node.data).all()
n12 = allnodes[2]
eq_(n12.data, 'n12')
eq_([
Node(data='n121'),
Node(data='n122'),
Node(data='n123')
], list(n12.children))
self.assert_sql_count(testing.db, go, 4)
def test_with_deferred(self):
nodes = self.tables.nodes
class Node(fixtures.ComparableEntity):
def append(self, node):
self.children.append(node)
mapper(Node, nodes, properties={
'children':relationship(Node, lazy='subquery', join_depth=3,
order_by=nodes.c.id),
'data':deferred(nodes.c.data)
})
sess = create_session()
n1 = Node(data='n1')
n1.append(Node(data='n11'))
n1.append(Node(data='n12'))
sess.add(n1)
sess.flush()
sess.expunge_all()
def go():
eq_(
Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
sess.query(Node).order_by(Node.id).first(),
)
self.assert_sql_count(testing.db, go, 6)
sess.expunge_all()
def go():
eq_(Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
sess.query(Node).options(undefer('data')).order_by(Node.id).first())
self.assert_sql_count(testing.db, go, 5)
sess.expunge_all()
def go():
eq_(Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
sess.query(Node).options(undefer('data'),
undefer('children.data')).first())
self.assert_sql_count(testing.db, go, 3)
def test_options(self):
nodes = self.tables.nodes
class Node(fixtures.ComparableEntity):
def append(self, node):
self.children.append(node)
mapper(Node, nodes, properties={
'children':relationship(Node, order_by=nodes.c.id)
}, order_by=nodes.c.id)
sess = create_session()
n1 = Node(data='n1')
n1.append(Node(data='n11'))
n1.append(Node(data='n12'))
n1.append(Node(data='n13'))
n1.children[1].append(Node(data='n121'))
n1.children[1].append(Node(data='n122'))
n1.children[1].append(Node(data='n123'))
sess.add(n1)
sess.flush()
sess.expunge_all()
def go():
d = sess.query(Node).filter_by(data='n1').\
options(subqueryload_all('children.children')).first()
eq_(Node(data='n1', children=[
Node(data='n11'),
Node(data='n12', children=[
Node(data='n121'),
Node(data='n122'),
Node(data='n123')
]),
Node(data='n13')
]), d)
self.assert_sql_count(testing.db, go, 3)
@testing.fails_on('maxdb', 'FIXME: unknown')
def test_no_depth(self):
"""no join depth is set, so no eager loading occurs."""
nodes = self.tables.nodes
class Node(fixtures.ComparableEntity):
def append(self, node):
self.children.append(node)
mapper(Node, nodes, properties={
'children':relationship(Node, lazy='subquery')
})
sess = create_session()
n1 = Node(data='n1')
n1.append(Node(data='n11'))
n1.append(Node(data='n12'))
n1.append(Node(data='n13'))
n1.children[1].append(Node(data='n121'))
n1.children[1].append(Node(data='n122'))
n1.children[1].append(Node(data='n123'))
n2 = Node(data='n2')
n2.append(Node(data='n21'))
sess.add(n1)
sess.add(n2)
sess.flush()
sess.expunge_all()
def go():
d = sess.query(Node).filter(Node.data.in_(['n1', 'n2'])).order_by(Node.data).all()
eq_([
Node(data='n1', children=[
Node(data='n11'),
Node(data='n12', children=[
Node(data='n121'),
Node(data='n122'),
Node(data='n123')
]),
Node(data='n13')
]),
Node(data='n2', children=[
Node(data='n21')
])
], d)
self.assert_sql_count(testing.db, go, 4)
class InheritanceToRelatedTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('foo', metadata,
Column("id", Integer, primary_key=True),
Column("type", String(50)),
Column("related_id", Integer, ForeignKey("related.id"))
)
Table("bar", metadata,
Column("id", Integer, ForeignKey('foo.id'), primary_key=True),
)
Table("baz", metadata,
Column("id", Integer, ForeignKey('foo.id'), primary_key=True),
)
Table("related", metadata,
Column("id", Integer, primary_key=True),
)
@classmethod
def setup_classes(cls):
class Foo(cls.Comparable):
pass
class Bar(Foo):
pass
class Baz(Foo):
pass
class Related(cls.Comparable):
pass
@classmethod
def fixtures(cls):
return dict(
foo = [
('id', 'type', 'related_id'),
(1, 'bar', 1),
(2, 'bar', 2),
(3, 'baz', 1),
(4, 'baz', 2),
],
bar = [
('id', ),
(1,),
(2,)
],
baz = [
('id', ),
(3,),
(4,)
],
related = [
('id', ),
(1,),
(2,)
]
)
@classmethod
def setup_mappers(cls):
mapper(cls.classes.Foo, cls.tables.foo, properties={
'related':relationship(cls.classes.Related)
}, polymorphic_on=cls.tables.foo.c.type)
mapper(cls.classes.Bar, cls.tables.bar, polymorphic_identity='bar',
inherits=cls.classes.Foo)
mapper(cls.classes.Baz, cls.tables.baz, polymorphic_identity='baz',
inherits=cls.classes.Foo)
mapper(cls.classes.Related, cls.tables.related)
def test_caches_query_per_base(self):
Foo, Bar, Baz, Related = self.classes.Foo, self.classes.Bar, \
self.classes.Baz, self.classes.Related
s = Session(testing.db)
def go():
eq_(
s.query(Foo).with_polymorphic([Bar, Baz]).order_by(Foo.id).options(subqueryload(Foo.related)).all(),
[
Bar(id=1,related=Related(id=1)),
Bar(id=2,related=Related(id=2)),
Baz(id=3,related=Related(id=1)),
Baz(id=4,related=Related(id=2))
]
)
self.assert_sql_count(testing.db, go, 2)
| |
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from paddle.version import full_version as __version__
from paddle.version import commit as __git_commit__
from paddle.cuda_env import *
except ImportError:
import sys
sys.stderr.write('''Warning with import paddle: you should not
import paddle from the source directory; please install paddlepaddle*.whl firstly.'''
)
from .batch import batch # noqa: F401
from .fluid import monkey_patch_variable
from .fluid.dygraph import monkey_patch_math_varbase
monkey_patch_variable()
monkey_patch_math_varbase()
from .framework.dtype import dtype as dtype # noqa: F401
from paddle.framework.dtype import uint8 # noqa: F401
from paddle.framework.dtype import int8 # noqa: F401
from paddle.framework.dtype import int16 # noqa: F401
from paddle.framework.dtype import int32 # noqa: F401
from paddle.framework.dtype import int64 # noqa: F401
from paddle.framework.dtype import float16 # noqa: F401
from paddle.framework.dtype import float32 # noqa: F401
from paddle.framework.dtype import float64 # noqa: F401
from paddle.framework.dtype import bfloat16 # noqa: F401
from paddle.framework.dtype import bool # noqa: F401
from paddle.framework.dtype import complex64 # noqa: F401
from paddle.framework.dtype import complex128 # noqa: F401
from .framework import VarBase as Tensor # noqa: F401
Tensor.__qualname__ = 'Tensor' # noqa: F401
import paddle.compat # noqa: F401
import paddle.distributed # noqa: F401
import paddle.sysconfig # noqa: F401
import paddle.distribution # noqa: F401
import paddle.nn # noqa: F401
import paddle.distributed.fleet # noqa: F401
import paddle.optimizer # noqa: F401
import paddle.metric # noqa: F401
import paddle.regularizer # noqa: F401
import paddle.incubate # noqa: F401
import paddle.autograd # noqa: F401
import paddle.device # noqa: F401
import paddle.jit # noqa: F401
import paddle.amp # noqa: F401
import paddle.dataset # noqa: F401
import paddle.inference # noqa: F401
import paddle.io # noqa: F401
import paddle.onnx # noqa: F401
import paddle.reader # noqa: F401
import paddle.static # noqa: F401
import paddle.vision # noqa: F401
from .tensor.attribute import is_complex # noqa: F401
from .tensor.attribute import is_integer # noqa: F401
from .tensor.attribute import rank # noqa: F401
from .tensor.attribute import shape # noqa: F401
from .tensor.attribute import real # noqa: F401
from .tensor.attribute import imag # noqa: F401
from .tensor.attribute import is_floating_point # noqa: F401
from .tensor.creation import to_tensor # noqa: F401
from .tensor.creation import diag # noqa: F401
from .tensor.creation import diagflat # noqa: F401
from .tensor.creation import eye # noqa: F401
from .tensor.creation import linspace # noqa: F401
from .tensor.creation import ones # noqa: F401
from .tensor.creation import ones_like # noqa: F401
from .tensor.creation import zeros # noqa: F401
from .tensor.creation import zeros_like # noqa: F401
from .tensor.creation import arange # noqa: F401
from .tensor.creation import full # noqa: F401
from .tensor.creation import full_like # noqa: F401
from .tensor.creation import triu # noqa: F401
from .tensor.creation import tril # noqa: F401
from .tensor.creation import meshgrid # noqa: F401
from .tensor.creation import empty # noqa: F401
from .tensor.creation import empty_like # noqa: F401
from .tensor.creation import assign # noqa: F401
from .tensor.creation import complex # noqa: F401
from .tensor.creation import clone # noqa: F401
from .tensor.linalg import matmul # noqa: F401
from .tensor.linalg import dot # noqa: F401
from .tensor.linalg import norm # noqa: F401
from .tensor.linalg import transpose # noqa: F401
from .tensor.linalg import dist # noqa: F401
from .tensor.linalg import t # noqa: F401
from .tensor.linalg import cross # noqa: F401
from .tensor.linalg import cholesky # noqa: F401
from .tensor.linalg import bmm # noqa: F401
from .tensor.linalg import histogram # noqa: F401
from .tensor.linalg import bincount # noqa: F401
from .tensor.linalg import mv # noqa: F401
from .tensor.logic import equal # noqa: F401
from .tensor.linalg import eigvalsh # noqa: F401
from .tensor.logic import greater_equal # noqa: F401
from .tensor.logic import greater_than # noqa: F401
from .tensor.logic import is_empty # noqa: F401
from .tensor.logic import less_equal # noqa: F401
from .tensor.logic import less_than # noqa: F401
from .tensor.logic import logical_and # noqa: F401
from .tensor.logic import logical_not # noqa: F401
from .tensor.logic import logical_or # noqa: F401
from .tensor.logic import logical_xor # noqa: F401
from .tensor.logic import bitwise_and # noqa: F401
from .tensor.logic import bitwise_not # noqa: F401
from .tensor.logic import bitwise_or # noqa: F401
from .tensor.logic import bitwise_xor # noqa: F401
from .tensor.logic import not_equal # noqa: F401
from .tensor.logic import allclose # noqa: F401
from .tensor.logic import isclose # noqa: F401
from .tensor.logic import equal_all # noqa: F401
from .tensor.logic import is_tensor # noqa: F401
from .tensor.manipulation import cast # noqa: F401
from .tensor.manipulation import concat # noqa: F401
from .tensor.manipulation import broadcast_tensors # noqa: F401
from .tensor.manipulation import expand # noqa: F401
from .tensor.manipulation import broadcast_to # noqa: F401
from .tensor.manipulation import expand_as # noqa: F401
from .tensor.manipulation import tile # noqa: F401
from .tensor.manipulation import flatten # noqa: F401
from .tensor.manipulation import gather # noqa: F401
from .tensor.manipulation import gather_nd # noqa: F401
from .tensor.manipulation import reshape # noqa: F401
from .tensor.manipulation import reshape_ # noqa: F401
from .tensor.manipulation import flip as reverse # noqa: F401
from .tensor.manipulation import scatter # noqa: F401
from .tensor.manipulation import scatter_ # noqa: F401
from .tensor.manipulation import scatter_nd_add # noqa: F401
from .tensor.manipulation import scatter_nd # noqa: F401
from .tensor.manipulation import shard_index # noqa: F401
from .tensor.manipulation import slice # noqa: F401
from .tensor.manipulation import split # noqa: F401
from .tensor.manipulation import squeeze # noqa: F401
from .tensor.manipulation import squeeze_ # noqa: F401
from .tensor.manipulation import stack # noqa: F401
from .tensor.manipulation import strided_slice # noqa: F401
from .tensor.manipulation import unique # noqa: F401
from .tensor.manipulation import unique_consecutive # noqa: F401
from .tensor.manipulation import unsqueeze # noqa: F401
from .tensor.manipulation import unsqueeze_ # noqa: F401
from .tensor.manipulation import unstack # noqa: F401
from .tensor.manipulation import flip # noqa: F401
from .tensor.manipulation import rot90 # noqa: F401
from .tensor.manipulation import unbind # noqa: F401
from .tensor.manipulation import roll # noqa: F401
from .tensor.manipulation import chunk # noqa: F401
from .tensor.manipulation import tolist # noqa: F401
from .tensor.manipulation import take_along_axis # noqa: F401
from .tensor.manipulation import put_along_axis # noqa: F401
from .tensor.manipulation import tensordot # noqa: F401
from .tensor.manipulation import as_complex # noqa: F401
from .tensor.manipulation import as_real # noqa: F401
from .tensor.manipulation import moveaxis # noqa: F401
from .tensor.manipulation import repeat_interleave # noqa: F401
from .tensor.math import abs # noqa: F401
from .tensor.math import acos # noqa: F401
from .tensor.math import asin # noqa: F401
from .tensor.math import atan # noqa: F401
from .tensor.math import atan2 # noqa: F401
from .tensor.math import ceil # noqa: F401
from .tensor.math import cos # noqa: F401
from .tensor.math import tan # noqa: F401
from .tensor.math import cosh # noqa: F401
from .tensor.math import cumsum # noqa: F401
from .tensor.math import cumprod # noqa: F401
from .tensor.math import logit # noqa: F401
from .tensor.math import exp # noqa: F401
from .tensor.math import expm1 # noqa: F401
from .tensor.math import floor # noqa: F401
from .tensor.math import increment # noqa: F401
from .tensor.math import log # noqa: F401
from .tensor.math import log2 # noqa: F401
from .tensor.math import log10 # noqa: F401
from .tensor.math import multiplex # noqa: F401
from .tensor.math import pow # noqa: F401
from .tensor.math import reciprocal # noqa: F401
from .tensor.math import all # noqa: F401
from .tensor.math import any # noqa: F401
from .tensor.math import round # noqa: F401
from .tensor.math import rsqrt # noqa: F401
from .tensor.math import scale # noqa: F401
from .tensor.math import sign # noqa: F401
from .tensor.math import sin # noqa: F401
from .tensor.math import sinh # noqa: F401
from .tensor.math import sqrt # noqa: F401
from .tensor.math import square # noqa: F401
from .tensor.math import stanh # noqa: F401
from .tensor.math import sum # noqa: F401
from .tensor.math import nansum # noqa: F401
from .tensor.math import tanh # noqa: F401
from .tensor.math import tanh_ # noqa: F401
from .tensor.math import add_n # noqa: F401
from .tensor.math import max # noqa: F401
from .tensor.math import maximum # noqa: F401
from .tensor.math import amax # noqa: F401
from .tensor.math import min # noqa: F401
from .tensor.math import minimum # noqa: F401
from .tensor.math import amin # noqa: F401
from .tensor.math import mm # noqa: F401
from .tensor.math import divide # noqa: F401
from .tensor.math import floor_divide # noqa: F401
from .tensor.math import remainder # noqa: F401
from .tensor.math import mod # noqa: F401
from .tensor.math import floor_mod # noqa: F401
from .tensor.math import multiply # noqa: F401
from .tensor.math import renorm # noqa: F401
from .tensor.math import add # noqa: F401
from .tensor.math import subtract # noqa: F401
from .tensor.math import logsumexp # noqa: F401
from .tensor.math import inverse # noqa: F401
from .tensor.math import log1p # noqa: F401
from .tensor.math import erf # noqa: F401
from .tensor.math import addmm # noqa: F401
from .tensor.math import clip # noqa: F401
from .tensor.math import trace # noqa: F401
from .tensor.math import diagonal # noqa: F401
from .tensor.math import kron # noqa: F401
from .tensor.math import isfinite # noqa: F401
from .tensor.math import isinf # noqa: F401
from .tensor.math import isnan # noqa: F401
from .tensor.math import prod # noqa: F401
from .tensor.math import broadcast_shape # noqa: F401
from .tensor.math import conj # noqa: F401
from .tensor.math import trunc # noqa: F401
from .tensor.math import digamma # noqa: F401
from .tensor.math import neg # noqa: F401
from .tensor.math import lgamma # noqa: F401
from .tensor.math import acosh # noqa: F401
from .tensor.math import asinh # noqa: F401
from .tensor.math import atanh # noqa: F401
from .tensor.math import lerp # noqa: F401
from .tensor.math import erfinv # noqa: F401
from .tensor.math import rad2deg # noqa: F401
from .tensor.math import deg2rad # noqa: F401
from .tensor.math import gcd # noqa: F401
from .tensor.math import lcm # noqa: F401
from .tensor.math import diff # noqa: F401
from .tensor.math import angle # noqa: F401
from .tensor.math import fmax # noqa: F401
from .tensor.math import fmin # noqa: F401
from .tensor.math import inner # noqa: F401
from .tensor.math import outer # noqa: F401
from .tensor.random import bernoulli # noqa: F401
from .tensor.random import poisson # noqa: F401
from .tensor.random import multinomial # noqa: F401
from .tensor.random import standard_normal # noqa: F401
from .tensor.random import normal # noqa: F401
from .tensor.random import uniform # noqa: F401
from .tensor.random import randn # noqa: F401
from .tensor.random import rand # noqa: F401
from .tensor.random import randint # noqa: F401
from .tensor.random import randint_like # noqa: F401
from .tensor.random import randperm # noqa: F401
from .tensor.search import argmax # noqa: F401
from .tensor.search import argmin # noqa: F401
from .tensor.search import argsort # noqa: F401
from .tensor.search import searchsorted # noqa: F401
from .tensor.search import masked_select # noqa: F401
from .tensor.search import topk # noqa: F401
from .tensor.search import where # noqa: F401
from .tensor.search import index_select # noqa: F401
from .tensor.search import nonzero # noqa: F401
from .tensor.search import sort # noqa: F401
from .tensor.search import kthvalue # noqa: F401
from .tensor.search import mode # noqa: F401
from .tensor.to_string import set_printoptions # noqa: F401
from .tensor.einsum import einsum # noqa: F401
from .framework.random import seed # noqa: F401
from .framework.random import get_cuda_rng_state # noqa: F401
from .framework.random import set_cuda_rng_state # noqa: F401
from .framework import ParamAttr # noqa: F401
from .framework import create_parameter # noqa: F401
from .framework import CPUPlace # noqa: F401
from .framework import IPUPlace # noqa: F401
from .framework import CUDAPlace # noqa: F401
from .framework import NPUPlace # noqa: F401
from .framework import CUDAPinnedPlace # noqa: F401
from .framework import MLUPlace # noqa: F401
from .autograd import grad # noqa: F401
from .autograd import no_grad # noqa: F401
from .autograd import set_grad_enabled # noqa: F401
from .autograd import is_grad_enabled # noqa: F401
from .framework import save # noqa: F401
from .framework import load # noqa: F401
from .framework import DataParallel # noqa: F401
from .framework import set_default_dtype # noqa: F401
from .framework import get_default_dtype # noqa: F401
from .tensor.search import index_sample # noqa: F401
from .tensor.stat import mean # noqa: F401
from .tensor.stat import std # noqa: F401
from .tensor.stat import var # noqa: F401
from .tensor.stat import numel # noqa: F401
from .tensor.stat import median # noqa: F401
from .tensor.stat import quantile # noqa: F401
from .device import get_cudnn_version # noqa: F401
from .device import set_device # noqa: F401
from .device import get_device # noqa: F401
from .fluid.framework import is_compiled_with_cinn # noqa: F401
from .fluid.framework import is_compiled_with_cuda # noqa: F401
from .fluid.framework import is_compiled_with_rocm # noqa: F401
from .fluid.framework import disable_signal_handler # noqa: F401
from .fluid.framework import get_flags # noqa: F401
from .fluid.framework import set_flags # noqa: F401
from .device import is_compiled_with_xpu # noqa: F401
from .device import is_compiled_with_npu # noqa: F401
from .device import is_compiled_with_ipu # noqa: F401
from .device import is_compiled_with_mlu # noqa: F401
from .device import XPUPlace # noqa: F401
from .fluid.dygraph.base import enable_dygraph as disable_static # noqa: F401
from .fluid.dygraph.base import disable_dygraph as enable_static # noqa: F401
from .fluid.framework import in_dygraph_mode as in_dynamic_mode # noqa: F401
from .fluid.layers import crop_tensor as crop # noqa: F401
# high-level api
from .hapi import Model # noqa: F401
from . import callbacks # noqa: F401
from .hapi import summary # noqa: F401
from .hapi import flops # noqa: F401
from . import hub # noqa: F401
from . import linalg # noqa: F401
from . import fft # noqa: F401
from . import signal # noqa: F401
import paddle.text # noqa: F401
import paddle.vision # noqa: F401
from .tensor.random import check_shape # noqa: F401
# CINN has to set a flag to include a lib
if is_compiled_with_cinn():
import os
package_dir = os.path.dirname(os.path.abspath(__file__))
runtime_include_dir = os.path.join(package_dir, "libs")
cuh_file = os.path.join(runtime_include_dir, "cinn_cuda_runtime_source.cuh")
if os.path.exists(cuh_file):
os.environ['runtime_include_dir'] = runtime_include_dir
disable_static()
__all__ = [ # noqa
'dtype',
'uint8',
'int8',
'int16',
'int32',
'int64',
'float16',
'float32',
'float64',
'bfloat16',
'bool',
'complex64',
'complex128',
'addmm',
'allclose',
'isclose',
't',
'add',
'subtract',
'diag',
'diagflat',
'isnan',
'scatter_nd_add',
'unstack',
'get_default_dtype',
'save',
'multinomial',
'get_cuda_rng_state',
'rank',
'empty_like',
'eye',
'cumsum',
'cumprod',
'logit',
'sign',
'is_empty',
'equal',
'equal_all',
'is_tensor',
'is_complex',
'is_integer',
'cross',
'where',
'log1p',
'cos',
'tan',
'mean',
'mode',
'mv',
'in_dynamic_mode',
'min',
'amin',
'any',
'slice',
'normal',
'logsumexp',
'full',
'unsqueeze',
'unsqueeze_',
'argmax',
'Model',
'summary',
'flops',
'sort',
'searchsorted',
'split',
'logical_and',
'full_like',
'less_than',
'kron',
'clip',
'Tensor',
'crop',
'ParamAttr',
'stanh',
'randint',
'randint_like',
'assign',
'gather',
'scale',
'zeros',
'rsqrt',
'squeeze',
'squeeze_',
'to_tensor',
'gather_nd',
'isinf',
'uniform',
'floor_divide',
'remainder',
'floor_mod',
'roll',
'batch',
'max',
'amax',
'logical_or',
'bitwise_and',
'bitwise_or',
'bitwise_xor',
'bitwise_not',
'mm',
'flip',
'rot90',
'bincount',
'histogram',
'multiplex',
'CUDAPlace',
'NPUPlace',
'empty',
'shape',
'real',
'imag',
'is_floating_point',
'complex',
'reciprocal',
'rand',
'less_equal',
'triu',
'sin',
'dist',
'unbind',
'meshgrid',
'arange',
'load',
'numel',
'median',
'quantile',
'no_grad',
'set_grad_enabled',
'is_grad_enabled',
'mod',
'abs',
'tril',
'pow',
'zeros_like',
'maximum',
'topk',
'index_select',
'CPUPlace',
'matmul',
'seed',
'acos',
'logical_xor',
'exp',
'expm1',
'bernoulli',
'poisson',
'sinh',
'round',
'DataParallel',
'argmin',
'prod',
'broadcast_shape',
'conj',
'neg',
'lgamma',
'lerp',
'erfinv',
'inner',
'outer',
'square',
'divide',
'ceil',
'atan',
'atan2',
'rad2deg',
'deg2rad',
'gcd',
'lcm',
'expand',
'broadcast_to',
'ones_like',
'index_sample',
'cast',
'grad',
'all',
'ones',
'not_equal',
'sum',
'nansum',
'tile',
'greater_equal',
'isfinite',
'create_parameter',
'dot',
'increment',
'erf',
'bmm',
'chunk',
'tolist',
'tensordot',
'greater_than',
'shard_index',
'argsort',
'tanh',
'tanh_',
'transpose',
'randn',
'strided_slice',
'unique',
'unique_consecutive',
'set_cuda_rng_state',
'set_printoptions',
'std',
'flatten',
'asin',
'multiply',
'disable_static',
'masked_select',
'var',
'trace',
'enable_static',
'scatter_nd',
'set_default_dtype',
'disable_signal_handler',
'expand_as',
'stack',
'sqrt',
'randperm',
'linspace',
'reshape',
'reshape_',
'reverse',
'nonzero',
'CUDAPinnedPlace',
'logical_not',
'add_n',
'minimum',
'scatter',
'scatter_',
'floor',
'cosh',
'log',
'log2',
'log10',
'concat',
'check_shape',
'trunc',
'digamma',
'standard_normal',
'diagonal',
'broadcast_tensors',
'einsum',
'set_flags',
'get_flags',
'asinh',
'acosh',
'atanh',
'as_complex',
'as_real',
'diff',
'angle',
'fmax',
'fmin',
'moveaxis',
'repeat_interleave',
'clone',
'kthvalue',
'renorm',
'take_along_axis',
'put_along_axis',
]
| |
import logging
import collections
import bintrees
import networkx
from ...errors import SimEngineError
from ..plugin import KnowledgeBasePlugin
from .function import Function
l = logging.getLogger("angr.knowledge.function_manager")
class FunctionDict(dict):
"""
FunctionDict is a dict where the keys are function starting addresses and
map to the associated :class:`Function`.
"""
def __init__(self, backref, *args, **kwargs):
self._backref = backref
self._avltree = bintrees.AVLTree()
super(FunctionDict, self).__init__(*args, **kwargs)
def __missing__(self, key):
if isinstance(key, (int, long)):
addr = key
else:
raise ValueError("FunctionDict.__missing__ only supports int as key type")
t = Function(self._backref, addr)
self[addr] = t
return t
def __setitem__(self, addr, func):
self._avltree[addr] = func
super(FunctionDict, self).__setitem__(addr, func)
def __delitem__(self, addr):
del self._avltree[addr]
super(FunctionDict, self).__delitem__(addr)
def floor_addr(self, addr):
return self._avltree.floor_key(addr)
def ceiling_addr(self, addr):
return self._avltree.ceiling_key(addr)
class FunctionManager(KnowledgeBasePlugin, collections.Mapping):
"""
This is a function boundaries management tool. It takes in intermediate
results during CFG generation, and manages a function map of the binary.
"""
def __init__(self, kb):
super(FunctionManager, self).__init__()
self._kb = kb
self._function_map = FunctionDict(self)
self.callgraph = networkx.MultiDiGraph()
self.block_map = {}
# Registers used for passing arguments around
self._arg_registers = kb._project.arch.argument_registers
def copy(self):
fm = FunctionManager(self._kb)
fm._function_map = self._function_map.copy()
fm.callgraph = networkx.MultiDiGraph(self.callgraph)
fm._arg_registers = self._arg_registers.copy()
return fm
def clear(self):
self._function_map.clear()
self.callgraph = networkx.MultiDiGraph()
self.block_map.clear()
def _genenare_callmap_sif(self, filepath):
"""
Generate a sif file from the call map.
:param filepath: Path of the sif file
:return: None
"""
with open(filepath, "wb") as f:
for src, dst in self.callgraph.edges():
f.write("%#x\tDirectEdge\t%#x\n" % (src, dst))
def _add_node(self, function_addr, node, syscall=None, size=None):
if type(node) in (int, long): # pylint: disable=unidiomatic-typecheck
node = self._kb._project.factory.snippet(node, size=size)
dst_func = self._function_map[function_addr]
if syscall in (True, False):
dst_func.is_syscall = syscall
dst_func._register_nodes(True, node)
self.block_map[node.addr] = node
def _add_call_to(self, function_addr, from_node, to_addr, retn_node, syscall=None, stmt_idx=None, ins_addr=None,
return_to_outside=False):
if type(from_node) in (int, long): # pylint: disable=unidiomatic-typecheck
from_node = self._kb._project.factory.snippet(from_node)
if type(retn_node) in (int, long): # pylint: disable=unidiomatic-typecheck
retn_node = self._kb._project.factory.snippet(retn_node)
dest_func = self._function_map[to_addr]
if syscall in (True, False):
dest_func.is_syscall = syscall
func = self._function_map[function_addr]
func._call_to(from_node, dest_func, retn_node, stmt_idx=stmt_idx, ins_addr=ins_addr,
return_to_outside=return_to_outside
)
func._add_call_site(from_node.addr, to_addr, retn_node.addr if retn_node else None)
if return_to_outside:
func.add_retout_site(from_node)
# is there any existing edge on the callgraph?
edge_data = {'type': 'call'}
if function_addr not in self.callgraph or \
to_addr not in self.callgraph[function_addr] or \
edge_data not in self.callgraph[function_addr][to_addr].values():
self.callgraph.add_edge(function_addr, to_addr, **edge_data)
def _add_fakeret_to(self, function_addr, from_node, to_node, confirmed=None, syscall=None, to_outside=False,
to_function_addr=None):
if type(from_node) in (int, long): # pylint: disable=unidiomatic-typecheck
from_node = self._kb._project.factory.snippet(from_node)
if type(to_node) in (int, long): # pylint: disable=unidiomatic-typecheck
to_node = self._kb._project.factory.snippet(to_node)
src_func = self._function_map[function_addr]
if syscall in (True, False):
src_func.is_syscall = syscall
src_func._fakeret_to(from_node, to_node, confirmed=confirmed, to_outside=to_outside)
if to_outside and to_function_addr is not None:
# mark it on the callgraph
edge_data = {'type': 'fakeret'}
if function_addr not in self.callgraph or \
to_function_addr not in self.callgraph[function_addr] or \
edge_data not in self.callgraph[function_addr][to_function_addr].values():
self.callgraph.add_edge(function_addr, to_function_addr, **edge_data)
def _remove_fakeret(self, function_addr, from_node, to_node):
if type(from_node) in (int, long): # pylint: disable=unidiomatic-typecheck
from_node = self._kb._project.factory.snippet(from_node)
if type(to_node) in (int, long): # pylint: disable=unidiomatic-typecheck
to_node = self._kb._project.factory.snippet(to_node)
self._function_map[function_addr]._remove_fakeret(from_node, to_node)
def _add_return_from(self, function_addr, from_node, to_node=None): #pylint:disable=unused-argument
if type(from_node) in (int, long): # pylint: disable=unidiomatic-typecheck
from_node = self._kb._project.factory.snippet(from_node)
self._function_map[function_addr]._add_return_site(from_node)
def _add_transition_to(self, function_addr, from_node, to_node, ins_addr=None, stmt_idx=None):
if type(from_node) in (int, long): # pylint: disable=unidiomatic-typecheck
from_node = self._kb._project.factory.snippet(from_node)
if type(to_node) in (int, long): # pylint: disable=unidiomatic-typecheck
to_node = self._kb._project.factory.snippet(to_node)
self._function_map[function_addr]._transit_to(from_node, to_node, ins_addr=ins_addr, stmt_idx=stmt_idx)
def _add_outside_transition_to(self, function_addr, from_node, to_node, to_function_addr=None, ins_addr=None,
stmt_idx=None):
if type(from_node) in (int, long): # pylint: disable=unidiomatic-typecheck
from_node = self._kb._project.factory.snippet(from_node)
if type(to_node) in (int, long): # pylint: disable=unidiomatic-typecheck
try:
to_node = self._kb._project.factory.snippet(to_node)
except SimEngineError:
# we cannot get the snippet, but we should at least tell the function that it's going to jump out here
self._function_map[function_addr].add_jumpout_site(from_node)
return
self._function_map[function_addr]._transit_to(from_node, to_node, outside=True, ins_addr=ins_addr,
stmt_idx=stmt_idx
)
if to_function_addr is not None:
# mark it on the callgraph
edge_data = {'type': 'transition'}
if function_addr not in self.callgraph or \
to_function_addr not in self.callgraph[function_addr] or \
edge_data not in self.callgraph[function_addr][to_function_addr].values():
self.callgraph.add_edge(function_addr, to_function_addr, **edge_data)
def _add_return_from_call(self, function_addr, src_function_addr, to_node, to_outside=False):
# Note that you will never return to a syscall
if type(to_node) in (int, long): # pylint: disable=unidiomatic-typecheck
to_node = self._kb._project.factory.snippet(to_node)
func = self._function_map[function_addr]
src_func = self._function_map[src_function_addr]
func._return_from_call(src_func, to_node, to_outside=to_outside)
#
# Dict methods
#
def __getitem__(self, k):
if isinstance(k, (int, long)):
f = self.function(addr=k)
elif isinstance(k, str):
f = self.function(name=k)
else:
raise ValueError("FunctionManager.__getitem__ deos not support keys of type %s" % type(k))
if f is None:
raise KeyError(k)
return f
def __setitem__(self, k, v):
if isinstance(k, (int, long)):
self._function_map[k] = v
else:
raise ValueError("FunctionManager.__setitem__ keys must be an int")
def __delitem__(self, k):
if isinstance(k, (int, long)):
del self._function_map[k]
if k in self.callgraph:
self.callgraph.remove_node(k)
else:
raise ValueError("FunctionManager.__delitem__ only accepts int as key")
def __len__(self):
return len(self._function_map)
def __iter__(self):
for i in sorted(self._function_map.iterkeys()):
yield i
def contains_addr(self, addr):
"""
Decide if an address is handled by the function manager.
Note: this function is non-conformant with python programming idioms, but its needed for performance reasons.
:param int addr: Address of the function.
"""
return addr in self._function_map
def ceiling_func(self, addr):
"""
Return the function who has the least address that is greater than or equal to `addr`.
:param int addr: The address to query.
:return: A Function instance, or None if there is no other function after `addr`.
:rtype: Function or None
"""
try:
next_addr = self._function_map.ceiling_addr(addr)
return self._function_map[next_addr]
except KeyError:
return None
def floor_func(self, addr):
"""
Return the function who has the greatest address that is less than or equal to `addr`.
:param int addr: The address to query.
:return: A Function instance, or None if there is no other function before `addr`.
:rtype: Function or None
"""
try:
prev_addr = self._function_map.floor_addr(addr)
return self._function_map[prev_addr]
except KeyError:
return None
def function(self, addr=None, name=None, create=False, syscall=False, plt=None):
"""
Get a function object from the function manager.
Pass either `addr` or `name` with the appropriate values.
:param int addr: Address of the function.
:param str name: Name of the function.
:param bool create: Whether to create the function or not if the function does not exist.
:param bool syscall: True to create the function as a syscall, False otherwise.
:param bool or None plt: True to find the PLT stub, False to find a non-PLT stub, None to disable this
restriction.
:return: The Function instance, or None if the function is not found and create is False.
:rtype: Function or None
"""
if addr is not None:
if addr in self._function_map:
f = self._function_map[addr]
if plt is None or f.is_plt == plt:
return f
elif create:
# the function is not found
f = self._function_map[addr]
if syscall:
f.is_syscall=True
return f
elif name is not None:
for func in self._function_map.itervalues():
if func.name == name:
if plt is None or func.is_plt == plt:
return func
return None
def dbg_draw(self, prefix='dbg_function_'):
for func_addr, func in self._function_map.iteritems():
filename = "%s%#08x.png" % (prefix, func_addr)
func.dbg_draw(filename)
KnowledgeBasePlugin.register_default('functions', FunctionManager)
| |
import math, sys
import numpy as np
import pandas as pd
import brewer2mpl
from pylab import figure, plot
from scipy.stats.kde import gaussian_kde
import matplotlib.pyplot as plt
from django.http import HttpResponse
from django.core.exceptions import ImproperlyConfigured, FieldError
from django.db.models.loading import get_model
from django.shortcuts import get_object_or_404
from django.views.generic.list import ListView
from cuff.models import Experiment
from plot.ggstyle import rstyle
class PlotMixin(object):
'''
A mixin that allows matplotlib plotting. Should be used together
with ListView or DetailView subclasses to get the plotting data
from the database.
'''
format = None
def make_plot(self):
'''
This needs to be implemented in the subclass.
'''
pass
def style_plot(self, axes):
'''
By default does nothing. May be used to style the plot
(xkcd, ggplot2, etc).
'''
pass
def get_response_content_type(self):
'''
Returns plot format to be used in the response.
'''
if self.format is not None:
return 'image/{0}'.format(self.format)
else:
raise ImproperlyConfigured('No format is specified for the plot')
def render_to_plot(self, context, **response_kwargs):
response = HttpResponse(
content_type=self.get_response_content_type()
)
fig = self.make_plot()
fig.savefig(response, format=self.format)
return response
class QuerysetPlotView(ListView, PlotMixin):
'''
A view that plots data from a queryset.
'''
data_fields = None
def _get_model_from_track(self):
return get_model('cuff', self.kwargs['track'])
def get_queryset(self):
self.model = self._get_model_from_track()
self.exp = get_object_or_404(Experiment, pk=int(self.kwargs.get('exp_pk', '')))
return self.model._default_manager.for_exp(self.exp)
def get_dataframe(self, sample=None):
'''
Builds a pandas dataframe by retrieving the fields specified
in self.data_fields from self.queryset.
'''
opts = self.model._meta
fields = [f for f in opts.get_all_field_names() if f in self.data_fields]
if sample is None:
values_dict = self.object_list.values(*fields)
else:
values_dict = self.object_list.filter(sample=sample).values(*fields)
df = pd.DataFrame.from_records(values_dict)
return df
def render_to_response(self, context, **response_kwargs):
return self.render_to_plot(context, **response_kwargs)
class BarPlotView(QuerysetPlotView):
data_fields = ['fpkm', 'sample', 'conf_hi', 'conf_lo',]
format = 'png'
pk_list = None
def _get_model_from_track(self):
track = self.kwargs['track']
return get_model('cuff', '{0}Data'.format(track))
def _get_gene_short_names(self):
gene_name_field = 'gene__gene_short_name'
track = self.kwargs['track'].lower()
if track == 'tss':
gene_name_field = 'tss_group__{0}'.format(gene_name_field)
elif track != 'gene':
gene_name_field = '{0}__{1}'.format(track, gene_name_field)
return self.object_list.values_list(gene_name_field, flat=True)
def get_queryset(self):
qs = super(BarPlotView, self).get_queryset()
if self.pk_list:
return qs.filter(pk__in=self.pk_list)
else:
return qs
def get(self, request, *args, **kwargs):
gene_pks = request.GET.get('genes')
self.pk_list = [int(x) for x in gene_pks.split(',')]
return super(BarPlotView, self).get(request, *args, **kwargs)
def make_plot(self):
fig = plt.figure()
fig.patch.set_alpha(0)
ax = fig.add_subplot(111)
ax.set_xlabel('Genes')
ax.set_ylabel('FPKM')
ax.title.set_fontsize(18)
cmap = brewer2mpl.get_map('Set2', 'qualitative', 8).mpl_colors
gene_names = self._get_gene_short_names()
num_exp = self.exp.sample_set.count()
bar_width = 1. / num_exp
# We plot separately for each sample in the experiment
for i,sample in enumerate(self.exp.sample_set.all()):
df = self.get_dataframe(sample=sample)
index = np.arange(df.shape[0])
ax.bar(index + i*bar_width, df['fpkm'],
width=bar_width,
yerr=[df['fpkm']-df['conf_lo'], df['conf_hi']-df['fpkm']],
error_kw={'ecolor': cmap[i],},
label=sample.sample_name,
color=cmap[i],
edgecolor=cmap[i],
alpha=0.45)
plt.xticks(np.arange(self.object_list.count() // num_exp),
[name for i,name in enumerate(gene_names) if i % num_exp == 0])
plt.legend()
plt.tight_layout()
rstyle(ax)
return fig
class VolcanoPlotView(QuerysetPlotView):
data_fields = ['log2_fold_change', 'js_dist', 'p_value', 'q_value']
format = 'png'
alpha = 0.05
def _get_model_from_track(self):
track = self.kwargs['track']
if track in ['splicing', 'promoter', 'relcds']:
diff_data = 'DiffData'
else:
diff_data = 'ExpDiffData'
return get_model('cuff', '{0}{1}'.format(track, diff_data))
def make_plot(self):
if self.kwargs['track'] in ['splicing', 'promoter', 'relcds']:
x_field = 'js_dist'
else:
x_field = 'log2_fold_change'
df = self.get_dataframe()
df = df[df['p_value'] > 0]
df['p_value'] = -1 * df['p_value'].map(math.log10)
# This is somewhat arbitrary
max_ = sys.float_info.max * 0.1
df = df[df[x_field] < max_]
df = df[df[x_field] > -max_]
fig = plt.figure()
fig.patch.set_alpha(0)
ax = fig.add_subplot(111)
ax.set_xlabel('log$_{2}$(fold change)')
ax.set_ylabel('-log$_{10}$(p value)')
ax.title.set_fontsize(18)
df_sig = df[df['q_value'] <= self.alpha]
df_nonsig = df[df['q_value'] > self.alpha]
ax.plot(df_sig[x_field], df_sig['p_value'], 'o',
color='#cb4b16',
label='significant',
alpha=0.45)
ax.plot(df_nonsig[x_field], df_nonsig['p_value'], 'o',
color='#268bd2',
label='not significant',
alpha=0.2)
ax.legend()
rstyle(ax)
return fig
class DensityPlotView(QuerysetPlotView):
data_fields = ['fpkm',]
format = 'png'
def _get_model_from_track(self):
return get_model('cuff', '{0}Data'.format(self.kwargs['track']))
def make_plot(self):
c_map = ['#268bd2', '#cb4b16',]
fig = plt.figure()
fig.patch.set_alpha(0)
ax = fig.add_subplot(111)
ax.set_xlabel('log$_{10}$(FPKM)')
ax.set_ylabel('Density')
ax.title.set_fontsize(18)
for i,sample in enumerate(self.exp.sample_set.all()):
df = self.get_dataframe(sample)[self.data_fields[0]]
df = df[df > 0]
df = df.map(math.log10)
base = np.linspace(min(df), max(df), 200)
kde = gaussian_kde(df)
kde_pdf = kde.evaluate(base)
ax.plot(base, kde_pdf,
color=c_map[i],
label=sample.sample_name,
alpha=0.8)
ax.fill_between(base, kde_pdf, color=c_map[i], alpha=0.4)
ax.legend()
rstyle(ax)
return fig
class DispersionPlotView(QuerysetPlotView):
data_fields = ['count', 'dispersion',]
format = 'png'
def _get_model_from_track(self):
return get_model('cuff', '{0}Count'.format(self.kwargs['track']))
def get_queryset(self):
qs = super(DispersionPlotView, self).get_queryset()
self.num_samples = self.exp.sample_set.count()
self.sample_names = self.exp.sample_set.values_list('sample_name', flat=True)
return qs
def make_plot(self):
fig = plt.figure(figsize=(10,5))
fig.patch.set_alpha(0)
for i,sample in enumerate(self.exp.sample_set.all()):
df = self.get_dataframe(sample)
ax = fig.add_subplot(1,self.num_samples,i,adjustable='datalim',aspect='equal')
ax.plot(df['count']+1, df['dispersion']+1, 'o', color='#268bd2', alpha=0.2)
ax.set_title(sample.sample_name)
ax.set_xlabel('Count')
ax.set_ylabel('Dispersion')
ax.set_xscale('log')
ax.set_yscale('log')
rstyle(ax)
return fig
| |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'UniqueEntityView'
db.create_table('website_uniqueentityview', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('entity_name', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=32, null=True, blank=True)),
('entity_id', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True, null=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('session_key', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=40, null=True, blank=True)),
('latest_datetime', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
))
db.send_create_signal('website', ['UniqueEntityView'])
# Adding model 'EntityViewsCount'
db.create_table('website_entityviewscount', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('entity_name', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=32, null=True, blank=True)),
('entity_id', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True, null=True, blank=True)),
('count', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True, null=True, blank=True)),
))
db.send_create_signal('website', ['EntityViewsCount'])
def backwards(self, orm):
# Deleting model 'UniqueEntityView'
db.delete_table('website_uniqueentityview')
# Deleting model 'EntityViewsCount'
db.delete_table('website_entityviewscount')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'subscribed_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'website.action': {
'Meta': {'object_name': 'Action'},
'action_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.ActionCategory']", 'null': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'entity_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'entity_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingLevel']", 'null': 'True', 'blank': 'True'}),
'question_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.QuestionCategory']", 'null': 'True', 'blank': 'True'}),
'scale': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.actioncategory': {
'Meta': {'object_name': 'ActionCategory'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'points': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'rating_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingCategory']", 'null': 'True', 'blank': 'True'})
},
'website.actiontutorial': {
'Meta': {'object_name': 'ActionTutorial'},
'action_identifier': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Tutorial']", 'null': 'True', 'blank': 'True'})
},
'website.address': {
'Meta': {'object_name': 'Address'},
'address1': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'address2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'})
},
'website.answerchoice': {
'Meta': {'object_name': 'AnswerChoice'},
'answer_choice_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.AnswerChoiceGroup']"}),
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'website.answerchoicegroup': {
'Meta': {'object_name': 'AnswerChoiceGroup'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'})
},
'website.answerreference': {
'Meta': {'object_name': 'AnswerReference'},
'approval_status': ('django.db.models.fields.CharField', [], {'default': "'P'", 'max_length': '8', 'db_index': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'file_upload': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_callout': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_current': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Question']"}),
'rating': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'rating_status': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '8', 'db_index': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'website.applicability': {
'Meta': {'object_name': 'Applicability'},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'reviewed': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'website.application': {
'Meta': {'object_name': 'Application'},
'address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Address']", 'null': 'True', 'blank': 'True'}),
'applicant': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'current_status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '8', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'status_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Template']", 'null': 'True', 'blank': 'True'})
},
'website.applicationanswer': {
'Meta': {'object_name': 'ApplicationAnswer'},
'application': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Application']"}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'file_upload': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Question']"}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Template']"}),
'value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'website.applicationhistory': {
'Meta': {'object_name': 'ApplicationHistory'},
'application': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Application']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '8', 'null': 'True', 'blank': 'True'}),
'status_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'status_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'})
},
'website.comment': {
'Meta': {'object_name': 'Comment'},
'approval_status': ('django.db.models.fields.CharField', [], {'default': "'P'", 'max_length': '8', 'db_index': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'comment_type': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '8', 'db_index': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'entity_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'entity_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'rating': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'rating_status': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '8', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.document': {
'Meta': {'object_name': 'Document'},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'file_path': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Region']", 'null': 'True', 'blank': 'True'}),
'reviewed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'})
},
'website.documentcategory': {
'Meta': {'object_name': 'DocumentCategory'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'website.entityviewscount': {
'Meta': {'object_name': 'EntityViewsCount'},
'count': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'entity_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'entity_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'website.jurisdiction': {
'Meta': {'object_name': 'Jurisdiction'},
'city': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'county': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction_type': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Organization']", 'null': 'True', 'blank': 'True'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Region']", 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '8', 'null': 'True', 'blank': 'True'})
},
'website.jurisdictioncontributor': {
'Meta': {'object_name': 'JurisdictionContributor'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Organization']", 'null': 'True', 'blank': 'True'}),
'points': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'question_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.QuestionCategory']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.organization': {
'Meta': {'object_name': 'Organization'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.OrganizationCategory']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'fax': ('django.contrib.localflavor.us.models.PhoneNumberField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'logo_scaled': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'parent_org': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Organization']", 'null': 'True', 'blank': 'True'}),
'phone': ('django.contrib.localflavor.us.models.PhoneNumberField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'A'", 'max_length': '8', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'status_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'website.organizationaddress': {
'Meta': {'object_name': 'OrganizationAddress'},
'address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Address']", 'null': 'True', 'blank': 'True'}),
'address_type': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '8', 'null': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Organization']", 'null': 'True', 'blank': 'True'})
},
'website.organizationcategory': {
'Meta': {'object_name': 'OrganizationCategory'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'website.organizationmember': {
'Meta': {'object_name': 'OrganizationMember'},
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Organization']", 'null': 'True', 'blank': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Person']", 'null': 'True', 'blank': 'True'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RoleType']", 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'P'", 'max_length': '8', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.organizationrating': {
'Meta': {'object_name': 'OrganizationRating'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingCategory']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingLevel']", 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Organization']", 'null': 'True', 'blank': 'True'}),
'scale': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'updated_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'website.person': {
'Meta': {'object_name': 'Person'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'phone_primary': ('django.contrib.localflavor.us.models.PhoneNumberField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'phone_secondary': ('django.contrib.localflavor.us.models.PhoneNumberField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.personaddress': {
'Meta': {'object_name': 'PersonAddress'},
'address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Address']", 'null': 'True', 'blank': 'True'}),
'address_type': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '8', 'null': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Person']", 'null': 'True', 'blank': 'True'})
},
'website.question': {
'Meta': {'object_name': 'Question'},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'answer_choice_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.AnswerChoiceGroup']", 'null': 'True', 'blank': 'True'}),
'applicability': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Applicability']", 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.QuestionCategory']", 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'default_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'form_type': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instruction': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'question': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'reviewed': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'website.questioncategory': {
'Meta': {'object_name': 'QuestionCategory'},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'reviewed': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'website.ratingcategory': {
'Meta': {'object_name': 'RatingCategory'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'rating_type': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'})
},
'website.ratinglevel': {
'Meta': {'object_name': 'RatingLevel'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingCategory']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'rank': ('django.db.models.fields.PositiveSmallIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'})
},
'website.reaction': {
'Meta': {'object_name': 'Reaction'},
'action': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Action']", 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.ReactionCategory']", 'null': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingLevel']", 'null': 'True', 'blank': 'True'}),
'question_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.QuestionCategory']", 'null': 'True', 'blank': 'True'}),
'reaction_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'scale': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.reactioncategory': {
'Meta': {'object_name': 'ReactionCategory'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'points': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'rating_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingCategory']", 'null': 'True', 'blank': 'True'})
},
'website.region': {
'Meta': {'object_name': 'Region'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '8', 'null': 'True', 'blank': 'True'})
},
'website.rewardcategory': {
'Meta': {'object_name': 'RewardCategory'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'website.roletype': {
'Meta': {'object_name': 'RoleType'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'website.template': {
'Meta': {'object_name': 'Template'},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'reviewed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'template_type': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '8', 'db_index': 'True', 'blank': 'True'})
},
'website.templatequestion': {
'Meta': {'object_name': 'TemplateQuestion'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Question']"}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Template']"})
},
'website.tutorial': {
'Meta': {'object_name': 'Tutorial'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'end_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'start_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'website.tutorialpage': {
'Meta': {'object_name': 'TutorialPage'},
'display_order': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'selector': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'tip': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Tutorial']", 'null': 'True', 'blank': 'True'})
},
'website.uniqueentityview': {
'Meta': {'object_name': 'UniqueEntityView'},
'entity_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'entity_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latest_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'session_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.userdetail': {
'Meta': {'object_name': 'UserDetail'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.userrating': {
'Meta': {'object_name': 'UserRating'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingCategory']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingLevel']", 'null': 'True', 'blank': 'True'}),
'scale': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'updated_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.userreward': {
'Meta': {'object_name': 'UserReward'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reward': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RewardCategory']", 'null': 'True', 'blank': 'True'}),
'reward_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.usertutorialhistory': {
'Meta': {'object_name': 'UserTutorialHistory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Tutorial']", 'null': 'True', 'blank': 'True'}),
'user_email': ('django.db.models.fields.EmailField', [], {'db_index': 'True', 'max_length': '75', 'null': 'True', 'blank': 'True'}),
'view_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'website.usertutorialpagehistory': {
'Meta': {'object_name': 'UserTutorialPageHistory'},
'checked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.TutorialPage']", 'null': 'True', 'blank': 'True'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Tutorial']", 'null': 'True', 'blank': 'True'}),
'user_email': ('django.db.models.fields.EmailField', [], {'db_index': 'True', 'max_length': '75', 'null': 'True', 'blank': 'True'})
},
'website.zipcode': {
'Meta': {'object_name': 'Zipcode'},
'city': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'county': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'db_index': 'True', 'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'db_index': 'True', 'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '2', 'null': 'True', 'blank': 'True'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '10', 'db_index': 'True'})
}
}
complete_apps = ['website']
| |
"""Test the Tradfri config flow."""
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.tradfri import config_flow
from tests.async_mock import patch
from tests.common import MockConfigEntry
@pytest.fixture
def mock_auth():
"""Mock authenticate."""
with patch(
"homeassistant.components.tradfri.config_flow.authenticate"
) as mock_auth:
yield mock_auth
async def test_user_connection_successful(hass, mock_auth, mock_entry_setup):
"""Test a successful connection."""
mock_auth.side_effect = lambda hass, host, code: {"host": host, "gateway_id": "bla"}
flow = await hass.config_entries.flow.async_init(
"tradfri", context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
flow["flow_id"], {"host": "123.123.123.123", "security_code": "abcd"}
)
assert len(mock_entry_setup.mock_calls) == 1
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].data == {
"host": "123.123.123.123",
"gateway_id": "bla",
"import_groups": False,
}
async def test_user_connection_timeout(hass, mock_auth, mock_entry_setup):
"""Test a connection timeout."""
mock_auth.side_effect = config_flow.AuthError("timeout")
flow = await hass.config_entries.flow.async_init(
"tradfri", context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
flow["flow_id"], {"host": "127.0.0.1", "security_code": "abcd"}
)
assert len(mock_entry_setup.mock_calls) == 0
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "timeout"}
async def test_user_connection_bad_key(hass, mock_auth, mock_entry_setup):
"""Test a connection with bad key."""
mock_auth.side_effect = config_flow.AuthError("invalid_security_code")
flow = await hass.config_entries.flow.async_init(
"tradfri", context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
flow["flow_id"], {"host": "127.0.0.1", "security_code": "abcd"}
)
assert len(mock_entry_setup.mock_calls) == 0
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"security_code": "invalid_security_code"}
async def test_discovery_connection(hass, mock_auth, mock_entry_setup):
"""Test a connection via discovery."""
mock_auth.side_effect = lambda hass, host, code: {"host": host, "gateway_id": "bla"}
flow = await hass.config_entries.flow.async_init(
"tradfri",
context={"source": "homekit"},
data={"host": "123.123.123.123", "properties": {"id": "homekit-id"}},
)
result = await hass.config_entries.flow.async_configure(
flow["flow_id"], {"security_code": "abcd"}
)
assert len(mock_entry_setup.mock_calls) == 1
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == "homekit-id"
assert result["result"].data == {
"host": "123.123.123.123",
"gateway_id": "bla",
"import_groups": False,
}
async def test_import_connection(hass, mock_auth, mock_entry_setup):
"""Test a connection via import."""
mock_auth.side_effect = lambda hass, host, code: {
"host": host,
"gateway_id": "bla",
"identity": "mock-iden",
"key": "mock-key",
}
flow = await hass.config_entries.flow.async_init(
"tradfri",
context={"source": "import"},
data={"host": "123.123.123.123", "import_groups": True},
)
result = await hass.config_entries.flow.async_configure(
flow["flow_id"], {"security_code": "abcd"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].data == {
"host": "123.123.123.123",
"gateway_id": "bla",
"identity": "mock-iden",
"key": "mock-key",
"import_groups": True,
}
assert len(mock_entry_setup.mock_calls) == 1
async def test_import_connection_no_groups(hass, mock_auth, mock_entry_setup):
"""Test a connection via import and no groups allowed."""
mock_auth.side_effect = lambda hass, host, code: {
"host": host,
"gateway_id": "bla",
"identity": "mock-iden",
"key": "mock-key",
}
flow = await hass.config_entries.flow.async_init(
"tradfri",
context={"source": "import"},
data={"host": "123.123.123.123", "import_groups": False},
)
result = await hass.config_entries.flow.async_configure(
flow["flow_id"], {"security_code": "abcd"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].data == {
"host": "123.123.123.123",
"gateway_id": "bla",
"identity": "mock-iden",
"key": "mock-key",
"import_groups": False,
}
assert len(mock_entry_setup.mock_calls) == 1
async def test_import_connection_legacy(hass, mock_gateway_info, mock_entry_setup):
"""Test a connection via import."""
mock_gateway_info.side_effect = lambda hass, host, identity, key: {
"host": host,
"identity": identity,
"key": key,
"gateway_id": "mock-gateway",
}
result = await hass.config_entries.flow.async_init(
"tradfri",
context={"source": "import"},
data={"host": "123.123.123.123", "key": "mock-key", "import_groups": True},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].data == {
"host": "123.123.123.123",
"gateway_id": "mock-gateway",
"identity": "homeassistant",
"key": "mock-key",
"import_groups": True,
}
assert len(mock_gateway_info.mock_calls) == 1
assert len(mock_entry_setup.mock_calls) == 1
async def test_import_connection_legacy_no_groups(
hass, mock_gateway_info, mock_entry_setup
):
"""Test a connection via legacy import and no groups allowed."""
mock_gateway_info.side_effect = lambda hass, host, identity, key: {
"host": host,
"identity": identity,
"key": key,
"gateway_id": "mock-gateway",
}
result = await hass.config_entries.flow.async_init(
"tradfri",
context={"source": "import"},
data={"host": "123.123.123.123", "key": "mock-key", "import_groups": False},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].data == {
"host": "123.123.123.123",
"gateway_id": "mock-gateway",
"identity": "homeassistant",
"key": "mock-key",
"import_groups": False,
}
assert len(mock_gateway_info.mock_calls) == 1
assert len(mock_entry_setup.mock_calls) == 1
async def test_discovery_duplicate_aborted(hass):
"""Test a duplicate discovery host aborts and updates existing entry."""
entry = MockConfigEntry(
domain="tradfri", data={"host": "some-host"}, unique_id="homekit-id"
)
entry.add_to_hass(hass)
flow = await hass.config_entries.flow.async_init(
"tradfri",
context={"source": "homekit"},
data={"host": "new-host", "properties": {"id": "homekit-id"}},
)
assert flow["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert flow["reason"] == "already_configured"
assert entry.data["host"] == "new-host"
async def test_import_duplicate_aborted(hass):
"""Test a duplicate import host is ignored."""
MockConfigEntry(domain="tradfri", data={"host": "some-host"}).add_to_hass(hass)
flow = await hass.config_entries.flow.async_init(
"tradfri", context={"source": "import"}, data={"host": "some-host"}
)
assert flow["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert flow["reason"] == "already_configured"
async def test_duplicate_discovery(hass, mock_auth, mock_entry_setup):
"""Test a duplicate discovery in progress is ignored."""
result = await hass.config_entries.flow.async_init(
"tradfri",
context={"source": "homekit"},
data={"host": "123.123.123.123", "properties": {"id": "homekit-id"}},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result2 = await hass.config_entries.flow.async_init(
"tradfri",
context={"source": "homekit"},
data={"host": "123.123.123.123", "properties": {"id": "homekit-id"}},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_discovery_updates_unique_id(hass):
"""Test a duplicate discovery host aborts and updates existing entry."""
entry = MockConfigEntry(domain="tradfri", data={"host": "some-host"},)
entry.add_to_hass(hass)
flow = await hass.config_entries.flow.async_init(
"tradfri",
context={"source": "homekit"},
data={"host": "some-host", "properties": {"id": "homekit-id"}},
)
assert flow["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert flow["reason"] == "already_configured"
assert entry.unique_id == "homekit-id"
| |
"""Advanced timeout handling.
Set of helper classes to handle timeouts of tasks with advanced options
like zones and freezing of timeouts.
"""
from __future__ import annotations
import asyncio
import enum
from types import TracebackType
from typing import Any
from .async_ import run_callback_threadsafe
ZONE_GLOBAL = "global"
class _State(str, enum.Enum):
"""States of a task."""
INIT = "INIT"
ACTIVE = "ACTIVE"
TIMEOUT = "TIMEOUT"
EXIT = "EXIT"
class _GlobalFreezeContext:
"""Context manager that freezes the global timeout."""
def __init__(self, manager: TimeoutManager) -> None:
"""Initialize internal timeout context manager."""
self._loop: asyncio.AbstractEventLoop = asyncio.get_running_loop()
self._manager: TimeoutManager = manager
async def __aenter__(self) -> _GlobalFreezeContext:
self._enter()
return self
async def __aexit__(
self,
exc_type: type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType,
) -> bool | None:
self._exit()
return None
def __enter__(self) -> _GlobalFreezeContext:
self._loop.call_soon_threadsafe(self._enter)
return self
def __exit__( # pylint: disable=useless-return
self,
exc_type: type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType,
) -> bool | None:
self._loop.call_soon_threadsafe(self._exit)
return None
def _enter(self) -> None:
"""Run freeze."""
if not self._manager.freezes_done:
return
# Global reset
for task in self._manager.global_tasks:
task.pause()
# Zones reset
for zone in self._manager.zones.values():
if not zone.freezes_done:
continue
zone.pause()
self._manager.global_freezes.append(self)
def _exit(self) -> None:
"""Finish freeze."""
self._manager.global_freezes.remove(self)
if not self._manager.freezes_done:
return
# Global reset
for task in self._manager.global_tasks:
task.reset()
# Zones reset
for zone in self._manager.zones.values():
if not zone.freezes_done:
continue
zone.reset()
class _ZoneFreezeContext:
"""Context manager that freezes a zone timeout."""
def __init__(self, zone: _ZoneTimeoutManager) -> None:
"""Initialize internal timeout context manager."""
self._loop: asyncio.AbstractEventLoop = asyncio.get_running_loop()
self._zone: _ZoneTimeoutManager = zone
async def __aenter__(self) -> _ZoneFreezeContext:
self._enter()
return self
async def __aexit__(
self,
exc_type: type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType,
) -> bool | None:
self._exit()
return None
def __enter__(self) -> _ZoneFreezeContext:
self._loop.call_soon_threadsafe(self._enter)
return self
def __exit__( # pylint: disable=useless-return
self,
exc_type: type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType,
) -> bool | None:
self._loop.call_soon_threadsafe(self._exit)
return None
def _enter(self) -> None:
"""Run freeze."""
if self._zone.freezes_done:
self._zone.pause()
self._zone.enter_freeze(self)
def _exit(self) -> None:
"""Finish freeze."""
self._zone.exit_freeze(self)
if not self._zone.freezes_done:
return
self._zone.reset()
class _GlobalTaskContext:
"""Context manager that tracks a global task."""
def __init__(
self,
manager: TimeoutManager,
task: asyncio.Task[Any],
timeout: float,
cool_down: float,
) -> None:
"""Initialize internal timeout context manager."""
self._loop: asyncio.AbstractEventLoop = asyncio.get_running_loop()
self._manager: TimeoutManager = manager
self._task: asyncio.Task[Any] = task
self._time_left: float = timeout
self._expiration_time: float | None = None
self._timeout_handler: asyncio.Handle | None = None
self._wait_zone: asyncio.Event = asyncio.Event()
self._state: _State = _State.INIT
self._cool_down: float = cool_down
async def __aenter__(self) -> _GlobalTaskContext:
self._manager.global_tasks.append(self)
self._start_timer()
self._state = _State.ACTIVE
return self
async def __aexit__(
self,
exc_type: type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType,
) -> bool | None:
self._stop_timer()
self._manager.global_tasks.remove(self)
# Timeout on exit
if exc_type is asyncio.CancelledError and self.state == _State.TIMEOUT:
raise asyncio.TimeoutError
self._state = _State.EXIT
self._wait_zone.set()
return None
@property
def state(self) -> _State:
"""Return state of the Global task."""
return self._state
def zones_done_signal(self) -> None:
"""Signal that all zones are done."""
self._wait_zone.set()
def _start_timer(self) -> None:
"""Start timeout handler."""
if self._timeout_handler:
return
self._expiration_time = self._loop.time() + self._time_left
self._timeout_handler = self._loop.call_at(
self._expiration_time, self._on_timeout
)
def _stop_timer(self) -> None:
"""Stop zone timer."""
if self._timeout_handler is None:
return
self._timeout_handler.cancel()
self._timeout_handler = None
# Calculate new timeout
assert self._expiration_time
self._time_left = self._expiration_time - self._loop.time()
def _on_timeout(self) -> None:
"""Process timeout."""
self._state = _State.TIMEOUT
self._timeout_handler = None
# Reset timer if zones are running
if not self._manager.zones_done:
asyncio.create_task(self._on_wait())
else:
self._cancel_task()
def _cancel_task(self) -> None:
"""Cancel own task."""
if self._task.done():
return
self._task.cancel()
def pause(self) -> None:
"""Pause timers while it freeze."""
self._stop_timer()
def reset(self) -> None:
"""Reset timer after freeze."""
self._start_timer()
async def _on_wait(self) -> None:
"""Wait until zones are done."""
await self._wait_zone.wait()
await asyncio.sleep(self._cool_down) # Allow context switch
if self.state != _State.TIMEOUT:
return
self._cancel_task()
class _ZoneTaskContext:
"""Context manager that tracks an active task for a zone."""
def __init__(
self,
zone: _ZoneTimeoutManager,
task: asyncio.Task[Any],
timeout: float,
) -> None:
"""Initialize internal timeout context manager."""
self._loop: asyncio.AbstractEventLoop = asyncio.get_running_loop()
self._zone: _ZoneTimeoutManager = zone
self._task: asyncio.Task[Any] = task
self._state: _State = _State.INIT
self._time_left: float = timeout
self._expiration_time: float | None = None
self._timeout_handler: asyncio.Handle | None = None
@property
def state(self) -> _State:
"""Return state of the Zone task."""
return self._state
async def __aenter__(self) -> _ZoneTaskContext:
self._zone.enter_task(self)
self._state = _State.ACTIVE
# Zone is on freeze
if self._zone.freezes_done:
self._start_timer()
return self
async def __aexit__(
self,
exc_type: type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType,
) -> bool | None:
self._zone.exit_task(self)
self._stop_timer()
# Timeout on exit
if exc_type is asyncio.CancelledError and self.state == _State.TIMEOUT:
raise asyncio.TimeoutError
self._state = _State.EXIT
return None
def _start_timer(self) -> None:
"""Start timeout handler."""
if self._timeout_handler:
return
self._expiration_time = self._loop.time() + self._time_left
self._timeout_handler = self._loop.call_at(
self._expiration_time, self._on_timeout
)
def _stop_timer(self) -> None:
"""Stop zone timer."""
if self._timeout_handler is None:
return
self._timeout_handler.cancel()
self._timeout_handler = None
# Calculate new timeout
assert self._expiration_time
self._time_left = self._expiration_time - self._loop.time()
def _on_timeout(self) -> None:
"""Process timeout."""
self._state = _State.TIMEOUT
self._timeout_handler = None
# Timeout
if self._task.done():
return
self._task.cancel()
def pause(self) -> None:
"""Pause timers while it freeze."""
self._stop_timer()
def reset(self) -> None:
"""Reset timer after freeze."""
self._start_timer()
class _ZoneTimeoutManager:
"""Manage the timeouts for a zone."""
def __init__(self, manager: TimeoutManager, zone: str) -> None:
"""Initialize internal timeout context manager."""
self._manager: TimeoutManager = manager
self._zone: str = zone
self._tasks: list[_ZoneTaskContext] = []
self._freezes: list[_ZoneFreezeContext] = []
def __repr__(self) -> str:
"""Representation of a zone."""
return f"<{self.name}: {len(self._tasks)} / {len(self._freezes)}>"
@property
def name(self) -> str:
"""Return Zone name."""
return self._zone
@property
def active(self) -> bool:
"""Return True if zone is active."""
return len(self._tasks) > 0 or len(self._freezes) > 0
@property
def freezes_done(self) -> bool:
"""Return True if all freeze are done."""
return len(self._freezes) == 0 and self._manager.freezes_done
def enter_task(self, task: _ZoneTaskContext) -> None:
"""Start into new Task."""
self._tasks.append(task)
def exit_task(self, task: _ZoneTaskContext) -> None:
"""Exit a running Task."""
self._tasks.remove(task)
# On latest listener
if not self.active:
self._manager.drop_zone(self.name)
def enter_freeze(self, freeze: _ZoneFreezeContext) -> None:
"""Start into new freeze."""
self._freezes.append(freeze)
def exit_freeze(self, freeze: _ZoneFreezeContext) -> None:
"""Exit a running Freeze."""
self._freezes.remove(freeze)
# On latest listener
if not self.active:
self._manager.drop_zone(self.name)
def pause(self) -> None:
"""Stop timers while it freeze."""
if not self.active:
return
# Forward pause
for task in self._tasks:
task.pause()
def reset(self) -> None:
"""Reset timer after freeze."""
if not self.active:
return
# Forward reset
for task in self._tasks:
task.reset()
class TimeoutManager:
"""Class to manage timeouts over different zones.
Manages both global and zone based timeouts.
"""
def __init__(self) -> None:
"""Initialize TimeoutManager."""
self._loop: asyncio.AbstractEventLoop = asyncio.get_running_loop()
self._zones: dict[str, _ZoneTimeoutManager] = {}
self._globals: list[_GlobalTaskContext] = []
self._freezes: list[_GlobalFreezeContext] = []
@property
def zones_done(self) -> bool:
"""Return True if all zones are finished."""
return not bool(self._zones)
@property
def freezes_done(self) -> bool:
"""Return True if all freezes are finished."""
return not self._freezes
@property
def zones(self) -> dict[str, _ZoneTimeoutManager]:
"""Return all Zones."""
return self._zones
@property
def global_tasks(self) -> list[_GlobalTaskContext]:
"""Return all global Tasks."""
return self._globals
@property
def global_freezes(self) -> list[_GlobalFreezeContext]:
"""Return all global Freezes."""
return self._freezes
def drop_zone(self, zone_name: str) -> None:
"""Drop a zone out of scope."""
self._zones.pop(zone_name, None)
if self._zones:
return
# Signal Global task, all zones are done
for task in self._globals:
task.zones_done_signal()
def async_timeout(
self, timeout: float, zone_name: str = ZONE_GLOBAL, cool_down: float = 0
) -> _ZoneTaskContext | _GlobalTaskContext:
"""Timeout based on a zone.
For using as Async Context Manager.
"""
current_task: asyncio.Task[Any] | None = asyncio.current_task()
assert current_task
# Global Zone
if zone_name == ZONE_GLOBAL:
task = _GlobalTaskContext(self, current_task, timeout, cool_down)
return task
# Zone Handling
if zone_name in self.zones:
zone: _ZoneTimeoutManager = self.zones[zone_name]
else:
self.zones[zone_name] = zone = _ZoneTimeoutManager(self, zone_name)
# Create Task
return _ZoneTaskContext(zone, current_task, timeout)
def async_freeze(
self, zone_name: str = ZONE_GLOBAL
) -> _ZoneFreezeContext | _GlobalFreezeContext:
"""Freeze all timer until job is done.
For using as Async Context Manager.
"""
# Global Freeze
if zone_name == ZONE_GLOBAL:
return _GlobalFreezeContext(self)
# Zone Freeze
if zone_name in self.zones:
zone: _ZoneTimeoutManager = self.zones[zone_name]
else:
self.zones[zone_name] = zone = _ZoneTimeoutManager(self, zone_name)
return _ZoneFreezeContext(zone)
def freeze(
self, zone_name: str = ZONE_GLOBAL
) -> _ZoneFreezeContext | _GlobalFreezeContext:
"""Freeze all timer until job is done.
For using as Context Manager.
"""
return run_callback_threadsafe(
self._loop, self.async_freeze, zone_name
).result()
| |
import logging
import os
import socket
import sqlite3
from datetime import date, datetime, timedelta
from os.path import normpath, split
from uuid import uuid4
import icalendar
from atomicwrites import AtomicWriter
from dateutil.rrule import rrulestr
from dateutil.tz import tzlocal
from todoman import exceptions
logger = logging.getLogger(name=__name__)
# Initialize this only once
# We were doing this all over the place (even if unused!), so at least only do
# it once.
LOCAL_TIMEZONE = tzlocal()
class cached_property: # noqa
'''A read-only @property that is only evaluated once. Only usable on class
instances' methods.
'''
def __init__(self, fget, doc=None):
self.__name__ = fget.__name__
self.__module__ = fget.__module__
self.__doc__ = doc or fget.__doc__
self.fget = fget
def __get__(self, obj, cls):
if obj is None:
return self
obj.__dict__[self.__name__] = result = self.fget(obj)
return result
class Todo:
"""
Represents a task/todo, and wrapps around icalendar.Todo.
All text attributes are always treated as text, and "" will be returned if
they are not defined.
Date attributes are treated as datetime objects, and None will be returned
if they are not defined.
All datetime objects have tzinfo, either the one defined in the file, or
the local system's one.
"""
def __init__(self, filename=None, mtime=None, new=False, list=None):
"""
Creates a new todo using `todo` as a source.
:param str filename: The name of the file for this todo. Defaults to
the <uid>.ics
:param mtime int: The last modified time for the file backing this
Todo.
:param bool new: Indicate that a new Todo is being created and should
be populated with default values.
:param List list: The list to which this Todo belongs.
"""
self.list = list
now = datetime.now(LOCAL_TIMEZONE)
self.uid = '{}@{}'.format(uuid4().hex, socket.gethostname())
self.list = list
if new:
self.created_at = now
else:
self.created_at = None
# Default values for supported fields
self.categories = []
self.completed_at = None
self.description = ''
self.dtstamp = now
self.due = None
self.id = None
self.last_modified = None
self.location = ''
self.percent_complete = 0
self.priority = 0
self.rrule = ''
self.sequence = 0
self.start = None
self.status = 'NEEDS-ACTION'
self.summary = ''
self.filename = filename or "{}.ics".format(self.uid)
self.related = []
if os.path.basename(self.filename) != self.filename:
raise ValueError(
'Must not be an absolute path: {}' .format(self.filename)
)
self.mtime = mtime or datetime.now()
def clone(self):
"""
Returns a clone of this todo
Returns a copy of this todo, which is almost identical, except that is
has a different UUID and filename.
"""
todo = Todo(new=True, list=self.list)
fields = (
Todo.STRING_FIELDS +
Todo.INT_FIELDS +
Todo.LIST_FIELDS +
Todo.DATETIME_FIELDS
)
fields.remove('uid')
for field in fields:
setattr(todo, field, getattr(self, field))
return todo
STRING_FIELDS = [
'description',
'location',
'status',
'summary',
'uid',
'rrule',
]
INT_FIELDS = [
'percent_complete',
'priority',
'sequence',
]
LIST_FIELDS = [
'categories',
]
DATETIME_FIELDS = [
'completed_at',
'created_at',
'dtstamp',
'start',
'due',
'last_modified',
]
RRULE_FIELDS = [
'rrule',
]
ALL_SUPPORTED_FIELDS = (
DATETIME_FIELDS +
INT_FIELDS +
LIST_FIELDS +
RRULE_FIELDS +
STRING_FIELDS
)
VALID_STATUSES = (
"CANCELLED",
"COMPLETED",
"IN-PROCESS",
"NEEDS-ACTION",
)
def __setattr__(self, name, value):
# Avoids accidentally setting a field to None when that's not a valid
# attribute.
if not value:
if name in Todo.RRULE_FIELDS:
return object.__setattr__(self, name, '')
if name in Todo.STRING_FIELDS:
return object.__setattr__(self, name, '')
if name in Todo.INT_FIELDS:
return object.__setattr__(self, name, 0)
if name in Todo.LIST_FIELDS:
return object.__setattr__(self, name, [])
return object.__setattr__(self, name, value)
@property
def is_completed(self):
return (
bool(self.completed_at) or
self.status in ('CANCELLED', 'COMPLETED')
)
@property
def is_recurring(self):
return bool(self.rrule)
def _apply_recurrence_to_dt(self, dt):
if not dt:
return None
recurrence = rrulestr(self.rrule, dtstart=dt)
# Nasty hack around: https://github.com/dateutil/dateutil/issues/341
try:
return recurrence.after(dt)
except TypeError:
tz = dt.tzinfo
dt = dt.replace(tzinfo=LOCAL_TIMEZONE)
recurrence = rrulestr(self.rrule, dtstart=dt)
return recurrence.after(dt).replace(tzinfo=tz)
def _create_next_instance(self):
copy = self.clone()
copy.due = self._apply_recurrence_to_dt(self.due)
copy.start = self._apply_recurrence_to_dt(self.start)
assert copy.uid != self.uid
# TODO: Push copy's alarms.
return copy
def complete(self):
"""
Immediately completes this todo
Immediately marks this todo as completed, sets the percentage to 100%
and the completed_at datetime to now.
If this todo belongs to a series, newly created todo are added to the
``related`` list.
"""
if self.is_recurring:
related = self._create_next_instance()
if related:
self.rrule = None
self.related.append(related)
self.completed_at = datetime.now(tz=LOCAL_TIMEZONE)
self.percent_complete = 100
self.status = 'COMPLETED'
@cached_property
def path(self):
return os.path.join(self.list.path, self.filename)
def cancel(self):
self.status = 'CANCELLED'
class VtodoWritter:
"""Writes a Todo as a VTODO file."""
"""Maps Todo field names to VTODO field names"""
FIELD_MAP = {
'summary': 'summary',
'priority': 'priority',
'sequence': 'sequence',
'uid': 'uid',
'categories': 'categories',
'completed_at': 'completed',
'description': 'description',
'dtstamp': 'dtstamp',
'start': 'dtstart',
'due': 'due',
'location': 'location',
'percent_complete': 'percent-complete',
'priority': 'priority',
'status': 'status',
'created_at': 'created',
'last_modified': 'last-modified',
'rrule': 'rrule',
}
def __init__(self, todo):
self.todo = todo
def normalize_datetime(self, dt):
'''
Eliminate several differences between dates, times and datetimes which
are hindering comparison:
- Convert everything to datetime
- Add missing timezones
'''
if isinstance(dt, date) and not isinstance(dt, datetime):
dt = datetime(dt.year, dt.month, dt.day)
if not dt.tzinfo:
dt = dt.replace(tzinfo=LOCAL_TIMEZONE)
return dt
def serialize_field(self, name, value):
if name in Todo.RRULE_FIELDS:
return icalendar.vRecur.from_ical(value)
if name in Todo.DATETIME_FIELDS:
return self.normalize_datetime(value)
if name in Todo.LIST_FIELDS:
return ','.join(value)
if name in Todo.INT_FIELDS:
return int(value)
if name in Todo.STRING_FIELDS:
return value
raise Exception('Unknown field {} serialized.'.format(name))
def set_field(self, name, value):
# If serialized value is None:
self.vtodo.pop(name)
if value:
logger.debug("Setting field %s to %s.", name, value)
self.vtodo.add(name, value)
def serialize(self, original=None):
"""Serialize a Todo into a VTODO."""
if not original:
original = icalendar.Todo()
self.vtodo = original
for source, target in self.FIELD_MAP.items():
self.vtodo.pop(target)
if getattr(self.todo, source):
self.set_field(
target,
self.serialize_field(source, getattr(self.todo, source)),
)
return self.vtodo
def _read(self, path):
with open(path, 'rb') as f:
cal = f.read()
cal = icalendar.Calendar.from_ical(cal)
for component in cal.walk('VTODO'):
return component
def write(self):
if os.path.exists(self.todo.path):
self._write_existing(self.todo.path)
else:
self._write_new(self.todo.path)
return self.vtodo
def _write_existing(self, path):
original = self._read(path)
vtodo = self.serialize(original)
with open(path, 'rb') as f:
cal = icalendar.Calendar.from_ical(f.read())
for index, component in enumerate(cal.subcomponents):
if component.get('uid', None) == self.todo.uid:
cal.subcomponents[index] = vtodo
with AtomicWriter(path, overwrite=True).open() as f:
f.write(cal.to_ical().decode("UTF-8"))
def _write_new(self, path):
vtodo = self.serialize()
c = icalendar.Calendar()
c.add_component(vtodo)
with AtomicWriter(path).open() as f:
c.add('prodid', 'io.barrera.todoman')
c.add('version', '2.0')
f.write(c.to_ical().decode("UTF-8"))
return vtodo
class Cache:
"""
Caches Todos for faster read and simpler querying interface
The Cache class persists relevant[1] fields into an SQL database, which is
only updated if the actual file has been modified. This greatly increases
load times, but, more importantly, provides a simpler interface for
filtering/querying/sorting.
[1]: Relevant fields are those we show when listing todos, or those which
may be used for filtering/sorting.
"""
SCHEMA_VERSION = 5
def __init__(self, path):
self.cache_path = str(path)
os.makedirs(os.path.dirname(self.cache_path), exist_ok=True)
self._conn = sqlite3.connect(self.cache_path)
self._conn.row_factory = sqlite3.Row
self._conn.execute("PRAGMA foreign_keys = ON")
self.create_tables()
def save_to_disk(self):
self._conn.commit()
def is_latest_version(self):
"""Checks if the cache DB schema is the latest version."""
try:
return self._conn.execute(
'SELECT version FROM meta WHERE version = ?',
(Cache.SCHEMA_VERSION,),
).fetchone()
except sqlite3.OperationalError:
return False
def create_tables(self):
if self.is_latest_version():
return
self._conn.executescript('''
DROP TABLE IF EXISTS lists;
DROP TABLE IF EXISTS files;
DROP TABLE IF EXISTS todos;
''')
self._conn.execute(
'CREATE TABLE IF NOT EXISTS meta ("version" INT)'
)
self._conn.execute(
'INSERT INTO meta (version) VALUES (?)',
(Cache.SCHEMA_VERSION,),
)
self._conn.execute('''
CREATE TABLE IF NOT EXISTS lists (
"name" TEXT PRIMARY KEY,
"path" TEXT,
"colour" TEXT,
CONSTRAINT path_unique UNIQUE (path)
);
''')
self._conn.execute('''
CREATE TABLE IF NOT EXISTS files (
"path" TEXT PRIMARY KEY,
"list_name" TEXT,
"mtime" INTEGER,
CONSTRAINT path_unique UNIQUE (path),
FOREIGN KEY(list_name) REFERENCES lists(name) ON DELETE CASCADE
);
''')
self._conn.execute('''
CREATE TABLE IF NOT EXISTS todos (
"file_path" TEXT,
"id" INTEGER PRIMARY KEY,
"uid" TEXT,
"summary" TEXT,
"due" INTEGER,
"start" INTEGER,
"priority" INTEGER,
"created_at" INTEGER,
"completed_at" INTEGER,
"percent_complete" INTEGER,
"dtstamp" INTEGER,
"status" TEXT,
"description" TEXT,
"location" TEXT,
"categories" TEXT,
"sequence" INTEGER,
"last_modified" INTEGER,
"rrule" TEXT,
FOREIGN KEY(file_path) REFERENCES files(path) ON DELETE CASCADE
);
''')
def clear(self):
self._conn.close()
os.remove(self.cache_path)
self._conn = None
def add_list(self, name, path, colour):
"""
Inserts a new list into the cache.
Returns the id of the newly inserted list.
"""
result = self._conn.execute(
'SELECT name FROM lists WHERE path = ?',
(path,),
).fetchone()
if result:
return result['name']
try:
self._conn.execute(
"INSERT INTO lists (name, path, colour) VALUES (?, ?, ?)",
(name, path, colour,),
)
except sqlite3.IntegrityError as e:
raise exceptions.AlreadyExists('list', name) from e
return self.add_list(name, path, colour)
def add_file(self, list_name, path, mtime):
try:
self._conn.execute('''
INSERT INTO files (
list_name,
path,
mtime
) VALUES (?, ?, ?);
''', (
list_name,
path,
mtime,
))
except sqlite3.IntegrityError as e:
raise exceptions.AlreadyExists('file', list_name) from e
def _serialize_datetime(self, todo, field):
dt = todo.decoded(field, None)
if not dt:
return None
if isinstance(dt, date) and not isinstance(dt, datetime):
dt = datetime(dt.year, dt.month, dt.day)
if not dt.tzinfo:
dt = dt.replace(tzinfo=LOCAL_TIMEZONE)
return dt.timestamp()
def _serialize_rrule(self, todo, field):
rrule = todo.get(field)
if not rrule:
return None
return rrule.to_ical().decode()
def add_vtodo(self, todo, file_path, id=None):
"""
Adds a todo into the cache.
:param icalendar.Todo todo: The icalendar component object on which
"""
sql = '''
INSERT INTO todos (
{}
file_path,
uid,
summary,
due,
start,
priority,
created_at,
completed_at,
percent_complete,
dtstamp,
status,
description,
location,
categories,
sequence,
last_modified,
rrule
) VALUES ({}?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
'''
due = self._serialize_datetime(todo, 'due')
start = self._serialize_datetime(todo, 'dtstart')
if start and due:
start = None if start >= due else start
params = (
file_path,
todo.get('uid'),
todo.get('summary'),
due,
start,
todo.get('priority', 0) or None,
self._serialize_datetime(todo, 'created'),
self._serialize_datetime(todo, 'completed'),
todo.get('percent-complete', None),
self._serialize_datetime(todo, 'dtstamp'),
todo.get('status', 'NEEDS-ACTION'),
todo.get('description', None),
todo.get('location', None),
todo.get('categories', None),
todo.get('sequence', 1),
self._serialize_datetime(todo, 'last-modified'),
self._serialize_rrule(todo, 'rrule'),
)
if id:
params = (id,) + params
sql = sql.format('id,\n', '?, ')
else:
sql = sql.format('', '')
cursor = self._conn.cursor()
try:
cursor.execute(sql, params)
rv = cursor.lastrowid
finally:
cursor.close()
return rv
def todos(self, lists=(), priority=None, location='', category='', grep='',
sort=(), reverse=True, due=None, start=None, startable=False,
status=('NEEDS-ACTION', 'IN-PROCESS',)):
"""
Returns filtered cached todos, in a specified order.
If no order is specified, todos are sorted by the following fields::
completed_at
-priority
due
-created_at
:param list lists: Only return todos for these lists.
:param str location: Only return todos with a location containing this
string.
:param str category: Only return todos with a category containing this
string.
:param str grep: Filter common fields with this substring.
:param list sort: Order returned todos by these fields. Field names
with a ``-`` prepended will be used to sort in reverse order.
:param bool reverse: Reverse the order of the todos after sorting.
:param int due: Return only todos due within ``due`` hours.
:param str priority: Only return todos with priority at least as
high as specified.
:param tuple(bool, datetime) start: Return only todos before/after
``start`` date
:param list(str) status: Return only todos with any of the given
statuses.
:return: A sorted, filtered list of todos.
:rtype: generator
"""
extra_where = []
params = []
if 'ANY' not in status:
extra_where.append(
'AND status IN ({})'.format(', '.join(['?'] * len(status)))
)
params.extend(s.upper() for s in status)
if lists:
lists = [l.name if isinstance(l, List) else l for l in lists]
q = ', '.join(['?'] * len(lists))
extra_where.append('AND files.list_name IN ({})'.format(q))
params.extend(lists)
if priority:
extra_where.append('AND PRIORITY > 0 AND PRIORITY <= ?')
params.append('{}'.format(priority))
if location:
extra_where.append('AND location LIKE ?')
params.append('%{}%'.format(location))
if category:
extra_where.append('AND categories LIKE ?')
params.append('%{}%'.format(category))
if grep:
# # requires sqlite with pcre, which won't be available everywhere:
# extra_where.append('AND summary REGEXP ?')
# params.append(grep)
extra_where.append('AND summary LIKE ?')
params.append('%{}%'.format(grep))
if due:
max_due = (datetime.now() + timedelta(hours=due)).timestamp()
extra_where.append('AND due IS NOT NULL AND due < ?')
params.append(max_due)
if start:
is_before, dt = start
dt = dt.timestamp()
if is_before:
extra_where.append('AND start <= ?')
params.append(dt)
else:
extra_where.append('AND start >= ?')
params.append(dt)
if startable:
extra_where.append('AND (start IS NULL OR start <= ?)')
params.append(datetime.now().timestamp())
if sort:
order = []
for s in sort:
if s.startswith('-'):
order.append(' {} ASC'.format(s[1:]))
else:
order.append(' {} DESC'.format(s))
order = ','.join(order)
else:
order = '''
completed_at DESC,
priority IS NOT NULL, priority DESC,
due IS NOT NULL, due DESC,
created_at ASC
'''
if not reverse:
# Note the change in case to avoid swapping all of them. sqlite
# doesn't care about casing anyway.
order = order.replace(' DESC', ' asc').replace(' ASC', ' desc')
query = '''
SELECT todos.*, files.list_name, files.path
FROM todos, files
WHERE todos.file_path = files.path {}
ORDER BY {}
'''.format(' '.join(extra_where), order,)
logger.debug(query)
logger.debug(params)
result = self._conn.execute(query, params)
seen_paths = set()
warned_paths = set()
for row in result:
todo = self._todo_from_db(row)
path = row['path']
if path in seen_paths and path not in warned_paths:
logger.warning('Todo is in read-only mode because there are '
'multiple todos in %s', path)
warned_paths.add(path)
seen_paths.add(path)
yield todo
def _dt_from_db(self, dt):
if dt:
return datetime.fromtimestamp(dt, LOCAL_TIMEZONE)
return None
def _todo_from_db(self, row):
todo = Todo()
todo.id = row['id']
todo.uid = row['uid']
todo.summary = row['summary']
todo.due = self._dt_from_db(row['due'])
todo.start = self._dt_from_db(row['start'])
todo.priority = row['priority']
todo.created_at = self._dt_from_db(row['created_at'])
todo.completed_at = self._dt_from_db(row['completed_at'])
todo.dtstamp = self._dt_from_db(row['dtstamp'])
todo.percent_complete = row['percent_complete']
todo.status = row['status']
todo.description = row['description']
todo.location = row['location']
todo.sequence = row['sequence']
todo.last_modified = row['last_modified']
todo.list = self.lists_map[row['list_name']]
todo.filename = os.path.basename(row['path'])
todo.rrule = row['rrule']
return todo
def lists(self):
result = self._conn.execute("SELECT * FROM lists")
for row in result:
yield List(
name=row['name'],
path=row['path'],
colour=row['colour'],
)
@cached_property
def lists_map(self):
return {l.name: l for l in self.lists()}
def expire_lists(self, paths):
results = self._conn.execute("SELECT path, name from lists")
for result in results:
if result['path'] not in paths:
self.delete_list(result['name'])
def delete_list(self, name):
self._conn.execute("DELETE FROM lists WHERE lists.name = ?", (name,))
def todo(self, id, read_only=False):
# XXX: DON'T USE READ_ONLY
result = self._conn.execute('''
SELECT todos.*, files.list_name, files.path
FROM todos, files
WHERE files.path = todos.file_path
AND todos.id = ?
''', (id,)
).fetchone()
if not result:
raise exceptions.NoSuchTodo(id)
if not read_only:
count = self._conn.execute('''
SELECT count(id) AS c
FROM files, todos
WHERE todos.file_path = files.path
AND path=?
''', (result['path'],)
).fetchone()
if count['c'] > 1:
raise exceptions.ReadOnlyTodo(result['path'])
return self._todo_from_db(result)
def expire_files(self, paths_to_mtime):
"""Remove stale cache entries based on the given fresh data."""
result = self._conn.execute("SELECT path, mtime FROM files")
for row in result:
path, mtime = row['path'], row['mtime']
if paths_to_mtime.get(path, None) != mtime:
self.expire_file(path)
def expire_file(self, path):
self._conn.execute("DELETE FROM files WHERE path = ?", (path,))
class List:
def __init__(self, name, path, colour=None):
self.path = path
self.name = name
self.colour = colour
@staticmethod
def colour_for_path(path):
try:
with open(os.path.join(path, 'color')) as f:
return f.read().strip()
except (OSError, IOError):
logger.debug('No colour for list %s', path)
@staticmethod
def name_for_path(path):
try:
with open(os.path.join(path, 'displayname')) as f:
return f.read().strip()
except (OSError, IOError):
return split(normpath(path))[1]
def __eq__(self, other):
if isinstance(other, List):
return self.name == other.name
return object.__eq__(self, other)
def __str__(self):
return self.name
class Database:
"""
This class is essentially a wrapper around all the lists (which in turn,
contain all the todos).
Caching in abstracted inside this class, and is transparent to outside
classes.
"""
def __init__(self, paths, cache_path):
self.cache = Cache(cache_path)
self.paths = [str(path) for path in paths]
self.update_cache()
def update_cache(self):
self.cache.expire_lists(self.paths)
paths_to_mtime = {}
paths_to_list_name = {}
for path in self.paths:
list_name = self.cache.add_list(
List.name_for_path(path),
path,
List.colour_for_path(path),
)
for entry in os.listdir(path):
if not entry.endswith('.ics'):
continue
entry_path = os.path.join(path, entry)
mtime = _getmtime(entry_path)
paths_to_mtime[entry_path] = mtime
paths_to_list_name[entry_path] = list_name
self.cache.expire_files(paths_to_mtime)
for entry_path, mtime in paths_to_mtime.items():
list_name = paths_to_list_name[entry_path]
try:
self.cache.add_file(list_name, entry_path, mtime)
except exceptions.AlreadyExists:
logger.debug('File already in cache: %s', entry_path)
continue
try:
with open(entry_path, 'rb') as f:
cal = f.read()
cal = icalendar.Calendar.from_ical(cal)
for component in cal.walk('VTODO'):
self.cache.add_vtodo(component, entry_path)
except Exception as e:
logger.exception("Failed to read entry %s.", entry_path)
self.cache.save_to_disk()
def todos(self, **kwargs):
return self.cache.todos(**kwargs)
def todo(self, id, **kwargs):
return self.cache.todo(id, **kwargs)
def lists(self):
return self.cache.lists()
def move(self, todo, new_list, from_list=None):
from_list = from_list or todo.list
orig_path = os.path.join(from_list.path, todo.filename)
dest_path = os.path.join(new_list.path, todo.filename)
os.rename(orig_path, dest_path)
def delete(self, todo):
path = os.path.join(todo.list.path, todo.filename)
os.remove(path)
def flush(self):
for todo in self.todos(status=['ANY']):
if todo.is_completed:
yield todo
self.delete(todo)
self.cache.clear()
self.cache = None
def save(self, todo):
for related in todo.related:
self.save(related)
todo.sequence += 1
todo.last_modified = datetime.now(LOCAL_TIMEZONE)
vtodo = VtodoWritter(todo).write()
self.cache.expire_file(todo.path)
mtime = _getmtime(todo.path)
self.cache.add_file(todo.list.name, todo.path, mtime)
todo.id = self.cache.add_vtodo(vtodo, todo.path, todo.id)
self.cache.save_to_disk()
def _getmtime(path):
stat = os.stat(path)
return getattr(stat, 'st_mtime_ns', stat.st_mtime)
| |
"""Weak reference support for Python.
This module is an implementation of PEP 205:
http://python.sourceforge.net/peps/pep-0205.html
"""
# Naming convention: Variables named "wr" are weak reference objects;
# they are called this instead of "ref" to avoid name collisions with
# the module-global ref() function imported from _weakref.
import UserDict
from _weakref import (
getweakrefcount,
getweakrefs,
ref,
proxy,
CallableProxyType,
ProxyType,
ReferenceType)
from exceptions import ReferenceError
ProxyTypes = (ProxyType, CallableProxyType)
__all__ = ["ref", "proxy", "getweakrefcount", "getweakrefs",
"WeakKeyDictionary", "ReferenceType", "ProxyType",
"CallableProxyType", "ProxyTypes", "WeakValueDictionary"]
class WeakValueDictionary(UserDict.UserDict):
"""Mapping class that references values weakly.
Entries in the dictionary will be discarded when no strong
reference to the value exists anymore
"""
# We inherit the constructor without worrying about the input
# dictionary; since it uses our .update() method, we get the right
# checks (if the other dictionary is a WeakValueDictionary,
# objects are unwrapped on the way out, and we always wrap on the
# way in).
def __init__(self, *args, **kw):
UserDict.UserDict.__init__(self, *args, **kw)
def remove(wr, selfref=ref(self)):
self = selfref()
if self is not None:
del self.data[wr.key]
self._remove = remove
def __getitem__(self, key):
o = self.data[key]()
if o is None:
raise KeyError, key
else:
return o
def __contains__(self, key):
try:
o = self.data[key]()
except KeyError:
return False
return o is not None
def has_key(self, key):
try:
o = self.data[key]()
except KeyError:
return False
return o is not None
def __repr__(self):
return "<WeakValueDictionary at %s>" % id(self)
def __setitem__(self, key, value):
self.data[key] = KeyedRef(value, self._remove, key)
def copy(self):
new = WeakValueDictionary()
for key, wr in self.data.items():
o = wr()
if o is not None:
new[key] = o
return new
def get(self, key, default=None):
try:
wr = self.data[key]
except KeyError:
return default
else:
o = wr()
if o is None:
# This should only happen
return default
else:
return o
def items(self):
L = []
for key, wr in self.data.items():
o = wr()
if o is not None:
L.append((key, o))
return L
def iteritems(self):
for wr in self.data.itervalues():
value = wr()
if value is not None:
yield wr.key, value
def iterkeys(self):
return self.data.iterkeys()
def __iter__(self):
return self.data.iterkeys()
def itervalues(self):
for wr in self.data.itervalues():
obj = wr()
if obj is not None:
yield obj
def popitem(self):
while 1:
key, wr = self.data.popitem()
o = wr()
if o is not None:
return key, o
def pop(self, key, *args):
try:
o = self.data.pop(key)()
except KeyError:
if args:
return args[0]
raise
if o is None:
raise KeyError, key
else:
return o
def setdefault(self, key, default=None):
try:
wr = self.data[key]
except KeyError:
self.data[key] = KeyedRef(default, self._remove, key)
return default
else:
return wr()
def update(self, dict=None, **kwargs):
d = self.data
if dict is not None:
if not hasattr(dict, "items"):
dict = type({})(dict)
for key, o in dict.items():
d[key] = KeyedRef(o, self._remove, key)
if len(kwargs):
self.update(kwargs)
def values(self):
L = []
for wr in self.data.values():
o = wr()
if o is not None:
L.append(o)
return L
class KeyedRef(ref):
"""Specialized reference that includes a key corresponding to the value.
This is used in the WeakValueDictionary to avoid having to create
a function object for each key stored in the mapping. A shared
callback object can use the 'key' attribute of a KeyedRef instead
of getting a reference to the key from an enclosing scope.
"""
__slots__ = "key",
def __new__(type, ob, callback, key):
self = ref.__new__(type, ob, callback)
self.key = key
return self
def __init__(self, ob, callback, key):
super(KeyedRef, self).__init__(ob, callback)
class WeakKeyDictionary(UserDict.UserDict):
""" Mapping class that references keys weakly.
Entries in the dictionary will be discarded when there is no
longer a strong reference to the key. This can be used to
associate additional data with an object owned by other parts of
an application without adding attributes to those objects. This
can be especially useful with objects that override attribute
accesses.
"""
def __init__(self, dict=None):
self.data = {}
def remove(k, selfref=ref(self)):
self = selfref()
if self is not None:
del self.data[k]
self._remove = remove
if dict is not None: self.update(dict)
def __delitem__(self, key):
del self.data[ref(key)]
def __getitem__(self, key):
return self.data[ref(key)]
def __repr__(self):
return "<WeakKeyDictionary at %s>" % id(self)
def __setitem__(self, key, value):
self.data[ref(key, self._remove)] = value
def copy(self):
new = WeakKeyDictionary()
for key, value in self.data.items():
o = key()
if o is not None:
new[o] = value
return new
def get(self, key, default=None):
return self.data.get(ref(key),default)
def has_key(self, key):
try:
wr = ref(key)
except TypeError:
return 0
return wr in self.data
def __contains__(self, key):
try:
wr = ref(key)
except TypeError:
return 0
return wr in self.data
def items(self):
L = []
for key, value in self.data.items():
o = key()
if o is not None:
L.append((o, value))
return L
def iteritems(self):
for wr, value in self.data.iteritems():
key = wr()
if key is not None:
yield key, value
def iterkeys(self):
for wr in self.data.iterkeys():
obj = wr()
if obj is not None:
yield obj
def __iter__(self):
return self.iterkeys()
def itervalues(self):
return self.data.itervalues()
def keys(self):
L = []
for wr in self.data.keys():
o = wr()
if o is not None:
L.append(o)
return L
def popitem(self):
while 1:
key, value = self.data.popitem()
o = key()
if o is not None:
return o, value
def pop(self, key, *args):
return self.data.pop(ref(key), *args)
def setdefault(self, key, default=None):
return self.data.setdefault(ref(key, self._remove),default)
def update(self, dict=None, **kwargs):
d = self.data
if dict is not None:
if not hasattr(dict, "items"):
dict = type({})(dict)
for key, value in dict.items():
d[ref(key, self._remove)] = value
if len(kwargs):
self.update(kwargs)
| |
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is configman
#
# The Initial Developer of the Original Code is
# Mozilla Foundation
# Portions created by the Initial Developer are Copyright (C) 2011
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# K Lars Lohn, lars@mozilla.com
# Peter Bengtsson, peterbe@mozilla.com
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
"""This module implements a configuration value source from the commandline.
It uses getopt in its implementation. It is thought that this implementation
will be supplanted by the argparse implementation when using Python 2.7 or
greater.
This module declares that its ValueSource constructor implementation can
handle the getopt module or a list. If specified as the getopt module, the
constructor will fetch the source of argv from the configmanager that was
passed in. If specified as a list, the constructor will assume the list
represents the argv source."""
import getopt
import collections
from .. import dotdict
from .. import option
from .. import namespace
from ..config_exceptions import NotAnOptionError
from .. import converters as conv
from source_exceptions import ValueException, CantHandleTypeException
class GetOptFailureException(ValueException):
pass
can_handle = (getopt,
list, # a list of options to serve as the argv source
)
class ValueSource(object):
"""The ValueSource implementation for the getopt module. This class will
interpret an argv list of commandline arguments using getopt."""
def __init__(self, source, the_config_manager=None):
if source is getopt:
self.argv_source = the_config_manager.argv_source
elif isinstance(source, collections.Sequence):
self.argv_source = source
else:
raise CantHandleTypeException("don't know how to handle"
" %s." % str(source))
def get_values(self, config_manager, ignore_mismatches):
"""This is the black sheep of the crowd of ValueSource implementations.
It needs to know ahead of time all of the parameters that it will need,
but we cannot give it. We may not know all the parameters because
not all classes may have been expanded yet. The two parameters allow
this ValueSource implementation to know what the parameters have
already been defined. The 'ignore_mismatches' parameter tells the
implementation if it can or cannot ignore extraneous commandline
options. The last time this function is called, it will be required
to test for illegal commandline options and respond accordingly."""
short_options_str, \
long_options_list = self.getopt_create_opts(
config_manager.option_definitions)
try:
if ignore_mismatches:
fn = ValueSource.getopt_with_ignore
else:
fn = getopt.gnu_getopt
# here getopt looks through the command line arguments and
# consumes the defined switches. The things that are not
# consumed are then offered as the 'args' variable of the
# parent configuration_manager
getopt_options, config_manager.args = fn(self.argv_source,
short_options_str,
long_options_list)
except getopt.GetoptError, x:
raise NotAnOptionError(str(x))
command_line_values = dotdict.DotDict()
for opt_name, opt_val in getopt_options:
if opt_name.startswith('--'):
name = opt_name[2:]
else:
name = self.find_name_with_short_form(opt_name[1:],
config_manager.option_definitions,
'')
if not name:
raise NotAnOptionError('%s is not a valid short'
' form option' % opt_name[1:])
option_ = config_manager._get_option(name)
if option_.from_string_converter == conv.boolean_converter:
command_line_values[name] = not option_.default
else:
command_line_values[name] = opt_val
return command_line_values
def getopt_create_opts(self, option_definitions):
short_options_list = []
long_options_list = []
self.getopt_create_opts_recursive(option_definitions,
"",
short_options_list,
long_options_list)
short_options_str = ''.join(short_options_list)
return short_options_str, long_options_list
def getopt_create_opts_recursive(self, source,
prefix,
short_options_list,
long_options_list):
for key, val in source.items():
if isinstance(val, option.Option):
boolean_option = type(val.default) == bool
if val.short_form:
try:
if boolean_option:
if val.short_form not in short_options_list:
short_options_list.append(val.short_form)
else:
short_with_parameter = "%s:" % val.short_form
if short_with_parameter not in short_options_list:
short_options_list.append(short_with_parameter)
except AttributeError:
pass
if boolean_option:
long_options_list.append('%s%s' % (prefix, val.name))
else:
long_options_list.append('%s%s=' % (prefix, val.name))
elif isinstance(val, option.Aggregation):
pass # skip Aggregations they have nothing to do with getopt
else: # Namespace case
new_prefix = '%s%s.' % (prefix, key)
self.getopt_create_opts_recursive(val,
new_prefix,
short_options_list,
long_options_list)
#--------------------------------------------------------------------------
@staticmethod
def getopt_with_ignore(args, shortopts, longopts=[]):
"""my_getopt(args, options[, long_options]) -> opts, args
This function works like gnu_getopt(), except that unknown parameters
are ignored rather than raising an error.
"""
opts = []
prog_args = []
if isinstance(longopts, str):
longopts = [longopts]
else:
longopts = list(longopts)
while args:
if args[0] == '--':
prog_args += args[1:]
break
if args[0][:2] == '--':
try:
opts, args = getopt.do_longs(opts, args[0][2:],
longopts, args[1:])
except getopt.GetoptError:
prog_args.append(args[0])
args = args[1:]
elif args[0][:1] == '-':
try:
opts, args = getopt.do_shorts(opts, args[0][1:], shortopts,
args[1:])
except getopt.GetoptError:
prog_args.append(args[0])
args = args[1:]
else:
prog_args.append(args[0])
args = args[1:]
return opts, prog_args
#--------------------------------------------------------------------------
def find_name_with_short_form(self, short_name, source, prefix):
for key, val in source.items():
if isinstance(val, namespace.Namespace):
new_prefix = '%s.' % key
name = self.find_name_with_short_form(short_name, val,
new_prefix)
if name:
return name
elif isinstance(val, option.Option):
try:
if short_name == val.short_form:
return '%s%s' % (prefix, val.name)
except KeyError:
continue
return None
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.