code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
#
# summarize_tests.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
# Invoke this script as
#
# python3 parse_test_output.py <path to test output>.xml
#
# It will print on a single line
#
# <No of tests run> <No of skipped tests> <No of failed tests> <No of errored tests> <List of unsuccessful tests>
import junitparser as jp
import glob
import os
import sys
import xml
assert int(jp.version.split('.')[0]) >= 2, 'junitparser version must be >= 2'
def parse_result_file(fname):
results = jp.JUnitXml.fromfile(fname)
if isinstance(results, jp.junitparser.JUnitXml):
# special case for pytest, which wraps all once more
suites = list(results)
assert len(suites) == 1, "JUnit XML files may only contain results from a single testsuite."
results = suites[0]
assert all(len(case.result) == 1 for case in results if case.result), 'Case result has unexpected length > 1'
failed_tests = ['.'.join((case.classname, case.name)) for case in results
if case.result and not isinstance(case.result[0], jp.junitparser.Skipped)]
return {'Tests': results.tests,
'Skipped': results.skipped,
'Failures': results.failures,
'Errors': results.errors,
'Time': results.time,
'Failed tests': failed_tests}
if __name__ == '__main__':
assert len(sys.argv) == 2, 'summarize_tests must be called with TEST_OUTDIR.'
test_outdir = sys.argv[1]
results = {}
totals = {'Tests': 0, 'Skipped': 0,
'Failures': 0, 'Errors': 0,
'Time': 0, 'Failed tests': []}
for pfile in sorted(glob.glob(os.path.join(test_outdir, '*.xml'))):
ph_name = os.path.splitext(os.path.split(pfile)[1])[0].replace('_', ' ')
ph_res = parse_result_file(pfile)
results[ph_name] = ph_res
for k, v in ph_res.items():
totals[k] += v
cols = ['Tests', 'Skipped', 'Failures', 'Errors', 'Time']
tline = '-' * (len(cols) * 10 + 20)
print()
print()
print(tline)
print('NEST Testsuite Results')
print(tline)
print('{:<20s}'.format('Phase'), end='')
for c in cols:
print('{:>10s}'.format(c), end='')
print()
print(tline)
for pn, pr in results.items():
print('{:<20s}'.format(pn), end='')
for c in cols:
fstr = '{:10.1f}' if c == 'Time' else '{:10d}'
print(fstr.format(pr[c]), end='')
print()
print(tline)
print('{:<20s}'.format('Total'), end='')
for c in cols:
fstr = '{:10.1f}' if c == 'Time' else '{:10d}'
print(fstr.format(totals[c]), end='')
print()
print(tline)
print()
if totals['Failures'] + totals['Errors'] > 0:
print('THE NEST TESTSUITE DISCOVERED PROBLEMS')
print(' The following tests failed')
for t in totals['Failed tests']:
print(f' | {t}') # | marks line for parsing
print()
print(' Please report test failures by creating an issue at')
print(' https://github.com/nest/nest_simulator/issues')
print()
print(tline)
print()
sys.exit(1)
else:
print('The NEST Testsuite passed successfully.')
print()
print(tline)
print()
|
sdiazpier/nest-simulator
|
testsuite/summarize_tests.py
|
Python
|
gpl-2.0
| 3,966
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Graph actions tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import tempfile
from tensorflow.contrib import testing
from tensorflow.contrib.framework.python.framework import checkpoint_utils
from tensorflow.contrib.framework.python.ops import variables as variables_lib
from tensorflow.contrib.learn.python import learn
from tensorflow.contrib.learn.python.learn.monitors import BaseMonitor
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import meta_graph
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import resources
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.summary import summary
from tensorflow.python.training import monitored_session
from tensorflow.python.training import saver as saver_lib
class _Feeder(object):
"""Simple generator for `feed_fn`, returning 10 * step."""
def __init__(self, tensor, max_step):
self._step = 0
self._tensor = tensor
self._max_step = max_step
@property
def step(self):
return self._step
def feed_fn(self):
if self._step >= self._max_step:
raise StopIteration
value = self._step * 10.0
self._step += 1
return {self._tensor: value}
class _BaseMonitorWrapper(BaseMonitor):
"""Base monitor wrapper to facilitate testing.
This monitor can act as either chief-exclusive or non-exclusive.
"""
def __init__(self, run_on_all_workers):
super(_BaseMonitorWrapper, self).__init__()
self._run_on_all_workers = run_on_all_workers
self._is_active = False
self._has_step = False
@property
def run_on_all_workers(self):
return self._run_on_all_workers
@property
def is_active(self):
return self._is_active
@property
def has_step(self):
return self._has_step
def begin(self, max_steps=None):
self._is_active = True
return super(_BaseMonitorWrapper, self).begin(max_steps)
def step_begin(self, step):
self._has_step = True
return super(_BaseMonitorWrapper, self).step_begin(step)
class GraphActionsTest(test.TestCase):
"""Graph actions tests."""
def setUp(self):
learn.graph_actions.clear_summary_writers()
self._output_dir = tempfile.mkdtemp()
testing.FakeSummaryWriter.install()
def tearDown(self):
testing.FakeSummaryWriter.uninstall()
if self._output_dir:
shutil.rmtree(self._output_dir)
learn.graph_actions.clear_summary_writers()
def _assert_summaries(self,
output_dir,
writer,
expected_summaries=None,
expected_graphs=None,
expected_meta_graphs=None,
expected_session_logs=None):
self.assertTrue(isinstance(writer, testing.FakeSummaryWriter))
writer.assert_summaries(
self,
expected_logdir=output_dir,
expected_graph=ops.get_default_graph(),
expected_summaries=expected_summaries,
expected_added_graphs=expected_graphs,
expected_added_meta_graphs=expected_meta_graphs,
expected_session_logs=expected_session_logs)
# TODO(ptucker): Test number and contents of checkpoint files.
def _assert_ckpt(self, output_dir, expected=True):
ckpt_state = saver_lib.get_checkpoint_state(output_dir)
if expected:
pattern = '%s/model.ckpt-.*' % output_dir
primary_ckpt_path = ckpt_state.model_checkpoint_path
self.assertRegexpMatches(primary_ckpt_path, pattern)
all_ckpt_paths = ckpt_state.all_model_checkpoint_paths
self.assertTrue(primary_ckpt_path in all_ckpt_paths)
for ckpt_path in all_ckpt_paths:
self.assertRegexpMatches(ckpt_path, pattern)
else:
self.assertTrue(ckpt_state is None)
# TODO(ptucker): Test lock, multi-threaded access?
def test_summary_writer(self):
writer = learn.graph_actions.get_summary_writer('log/dir/0')
self._assert_summaries('log/dir/0', writer)
self.assertTrue(
learn.graph_actions.get_summary_writer('log/dir/0') is
learn.graph_actions.get_summary_writer('log/dir/0'))
self.assertTrue(
learn.graph_actions.get_summary_writer('log/dir/0') is
not learn.graph_actions.get_summary_writer('log/dir/1'))
# TODO(ptucker): Test restore_checkpoint_path for eval; this should obsolete
# test_evaluate_with_saver().
# TODO(ptucker): Test start_queue_runners for both eval & train.
# TODO(ptucker): Test coord.request_stop & coord.join for eval.
def _build_inference_graph(self):
"""Build simple inference graph.
This includes a regular variable, local variable, and fake table.
Returns:
Tuple of 3 `Tensor` objects, 2 input and 1 output.
"""
variables_lib.create_global_step()
in0 = variables.Variable(1.0)
in1 = variables_lib.local_variable(2.0)
fake_table = variables.Variable(
3.0,
trainable=False,
collections=['fake_tables'],
name='fake_table_var')
in0.graph.add_to_collections([ops.GraphKeys.TABLE_INITIALIZERS],
fake_table.initializer)
out = in0 + in1 + fake_table
return in0, in1, out
def test_infer(self):
with ops.Graph().as_default() as g, self.test_session(g):
self._assert_ckpt(self._output_dir, False)
in0, in1, out = self._build_inference_graph()
self.assertEqual({
'a': 1.0,
'b': 2.0,
'c': 6.0
}, learn.graph_actions.infer(None, {'a': in0,
'b': in1,
'c': out}))
self._assert_ckpt(self._output_dir, False)
@test.mock.patch.object(
learn.graph_actions.coordinator.Coordinator,
'request_stop',
side_effect=learn.graph_actions.coordinator.Coordinator.request_stop,
autospec=True)
def test_coordinator_request_stop_called(self, request_stop):
with ops.Graph().as_default() as g, self.test_session(g):
in0, in1, out = self._build_inference_graph()
learn.graph_actions.infer(None, {'a': in0, 'b': in1, 'c': out})
self.assertTrue(request_stop.called)
@test.mock.patch.object(
learn.graph_actions.coordinator.Coordinator,
'request_stop',
side_effect=learn.graph_actions.coordinator.Coordinator.request_stop,
autospec=True)
def test_run_feeds_iter_cleanup_with_exceptions(self, request_stop):
with ops.Graph().as_default() as g, self.test_session(g):
in0, in1, out = self._build_inference_graph()
try:
for _ in learn.graph_actions.run_feeds_iter({
'a': in0,
'b': in1,
'c': out
}, [None] * 3):
self.assertFalse(request_stop.called)
raise ValueError('Fake exception')
except ValueError:
pass
self.assertTrue(request_stop.called)
def test_run_feeds_iter_calls_resources_init(self):
with ops.Graph().as_default() as g:
in0, _, _ = self._build_inference_graph()
handle = test_ops.stub_resource_handle_op(container='a', shared_name='b')
resources.register_resource(
handle=handle,
create_op=test_ops.resource_create_op(handle),
is_initialized_op=test_ops.resource_initialized_op(handle))
for _ in learn.graph_actions.run_feeds_iter(
{
'in0': in0
}, feed_dicts=[{}]):
self.assertTrue(test_ops.resource_initialized_op(handle).eval())
def test_infer_different_default_graph(self):
with self.test_session():
self._assert_ckpt(self._output_dir, False)
with ops.Graph().as_default():
in0, in1, out = self._build_inference_graph()
with ops.Graph().as_default():
self.assertEqual({
'a': 1.0,
'b': 2.0,
'c': 6.0
}, learn.graph_actions.infer(None, {'a': in0,
'b': in1,
'c': out}))
self._assert_ckpt(self._output_dir, False)
def test_infer_invalid_feed(self):
with ops.Graph().as_default() as g, self.test_session(g):
self._assert_ckpt(self._output_dir, False)
in0, _, _ = self._build_inference_graph()
with self.assertRaisesRegexp(TypeError, 'Can not convert a NoneType'):
learn.graph_actions.infer(None, {'a': in0}, feed_dict={None: 4.0})
self._assert_ckpt(self._output_dir, False)
def test_infer_feed(self):
with ops.Graph().as_default() as g, self.test_session(g):
self._assert_ckpt(self._output_dir, False)
in0, _, out = self._build_inference_graph()
self.assertEqual(
{
'c': 9.0
},
learn.graph_actions.infer(
None, {'c': out}, feed_dict={in0: 4.0}))
self._assert_ckpt(self._output_dir, False)
# TODO(ptucker): Test eval for 1 epoch.
def test_evaluate_invalid_args(self):
with ops.Graph().as_default() as g, self.test_session(g):
self._assert_ckpt(self._output_dir, False)
with self.assertRaisesRegexp(ValueError, 'utput directory'):
learn.graph_actions.evaluate(
g,
output_dir=None,
checkpoint_path=None,
eval_dict={'a': constant_op.constant(1.0)})
with self.assertRaisesRegexp(ValueError, 'utput directory'):
learn.graph_actions.evaluate(
g,
output_dir='',
checkpoint_path=None,
eval_dict={'a': constant_op.constant(1.0)})
self._assert_ckpt(self._output_dir, False)
def test_evaluate(self):
with ops.Graph().as_default() as g, self.test_session(g):
_, _, out = self._build_inference_graph()
writer = learn.graph_actions.get_summary_writer(self._output_dir)
self._assert_summaries(self._output_dir, writer, expected_session_logs=[])
self._assert_ckpt(self._output_dir, False)
results = learn.graph_actions.evaluate(
g,
output_dir=self._output_dir,
checkpoint_path=None,
eval_dict={'a': out},
max_steps=1)
self.assertEqual(({'a': 6.0}, 0), results)
self._assert_summaries(
self._output_dir,
writer,
expected_summaries={0: {
'a': 6.0
}},
expected_session_logs=[])
self._assert_ckpt(self._output_dir, False)
def test_evaluate_ready_for_local_init(self):
with ops.Graph().as_default() as g, self.test_session(g):
variables_lib.create_global_step()
v = variables.Variable(1.0)
w = variables.Variable(
v + 1, collections=[ops.GraphKeys.LOCAL_VARIABLES], trainable=False)
ready_for_local_init_op = variables.report_uninitialized_variables(
variables.global_variables())
ops.add_to_collection(ops.GraphKeys.READY_FOR_LOCAL_INIT_OP,
ready_for_local_init_op)
_ = learn.graph_actions.evaluate(
g,
output_dir=self._output_dir,
checkpoint_path=None,
eval_dict={'a': v},
max_steps=1)
def test_evaluate_feed_fn(self):
with ops.Graph().as_default() as g, self.test_session(g):
in0, _, out = self._build_inference_graph()
writer = learn.graph_actions.get_summary_writer(self._output_dir)
self._assert_summaries(self._output_dir, writer, expected_session_logs=[])
self._assert_ckpt(self._output_dir, False)
feeder = _Feeder(in0, 3)
results = learn.graph_actions.evaluate(
g,
output_dir=self._output_dir,
checkpoint_path=None,
eval_dict={'a': out},
feed_fn=feeder.feed_fn,
max_steps=3)
self.assertEqual(3, feeder.step)
self.assertEqual(({'a': 25.0}, 0), results)
self._assert_summaries(
self._output_dir,
writer,
expected_summaries={0: {
'a': 25.0
}},
expected_session_logs=[])
self._assert_ckpt(self._output_dir, False)
def test_evaluate_feed_fn_with_exhaustion(self):
with ops.Graph().as_default() as g, self.test_session(g):
in0, _, out = self._build_inference_graph()
writer = learn.graph_actions.get_summary_writer(self._output_dir)
self._assert_summaries(self._output_dir, writer, expected_session_logs=[])
feeder = _Feeder(in0, 2)
results = learn.graph_actions.evaluate(
g,
output_dir=self._output_dir,
checkpoint_path=None,
eval_dict={'a': out},
feed_fn=feeder.feed_fn,
max_steps=3)
self.assertEqual(2, feeder.step)
self.assertEqual(({'a': 15.0}, 0), results)
self._assert_summaries(
self._output_dir,
writer,
expected_summaries={0: {
'a': 15.0
}},
expected_session_logs=[])
def test_evaluate_with_saver(self):
with ops.Graph().as_default() as g, self.test_session(g):
_, _, out = self._build_inference_graph()
ops.add_to_collection(ops.GraphKeys.SAVERS, saver_lib.Saver())
writer = learn.graph_actions.get_summary_writer(self._output_dir)
self._assert_summaries(self._output_dir, writer, expected_session_logs=[])
results = learn.graph_actions.evaluate(
g,
output_dir=self._output_dir,
checkpoint_path=None,
eval_dict={'a': out},
max_steps=1)
self.assertEqual(({'a': 6.0}, 0), results)
self._assert_summaries(
self._output_dir,
writer,
expected_summaries={0: {
'a': 6.0
}},
expected_session_logs=[])
def test_train_invalid_args(self):
with ops.Graph().as_default() as g, self.test_session(g):
train_op = constant_op.constant(1.0)
loss_op = constant_op.constant(2.0)
with self.assertRaisesRegexp(ValueError, 'utput directory'):
learn.graph_actions._monitored_train(
g, # pylint: disable=protected-access
output_dir=None,
train_op=train_op,
loss_op=loss_op)
with self.assertRaisesRegexp(ValueError, 'utput directory'):
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir='',
train_op=constant_op.constant(1.0),
loss_op=constant_op.constant(2.0))
with self.assertRaisesRegexp(ValueError, 'train_op'):
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=None,
loss_op=loss_op)
with self.assertRaisesRegexp(ValueError, 'loss_op'):
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=constant_op.constant(1.0),
loss_op=None)
with self.assertRaisesRegexp(ValueError, 'global_step'):
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=constant_op.constant(1.0),
loss_op=loss_op)
# TODO(ptucker): Resume training from previous ckpt.
# TODO(ptucker): !supervisor_is_chief
# TODO(ptucker): Custom init op for training.
# TODO(ptucker): Mock supervisor, and assert all interactions.
def test_train(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
writer = learn.graph_actions.get_summary_writer(self._output_dir)
self._assert_summaries(self._output_dir, writer)
self._assert_ckpt(self._output_dir, False)
loss = learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
steps=1)
meta_graph_def = meta_graph.create_meta_graph_def(
graph_def=g.as_graph_def(add_shapes=True),
saver_def=monitored_session.Scaffold().finalize().saver.saver_def)
self.assertEqual(2.0, loss)
self._assert_summaries(
self._output_dir,
writer,
expected_graphs=[g],
expected_meta_graphs=[meta_graph_def])
self._assert_ckpt(self._output_dir, True)
def test_train_steps_is_incremental(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
steps=10)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(10, step)
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
steps=15)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(25, step)
def test_train_max_steps_is_not_incremental(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
max_steps=10)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(10, step)
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
max_steps=15)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(15, step)
def test_train_skip_train_if_max_step_already_saved(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
max_steps=10)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(10, step)
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
max_steps=10)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(10, step)
def test_train_loss(self):
with ops.Graph().as_default() as g, self.test_session(g):
variables_lib.create_global_step()
loss_var = variables_lib.local_variable(10.0)
train_op = control_flow_ops.group(
state_ops.assign_add(variables_lib.get_global_step(), 1),
state_ops.assign_add(loss_var, -1.0))
writer = learn.graph_actions.get_summary_writer(self._output_dir)
self._assert_summaries(self._output_dir, writer)
self._assert_ckpt(self._output_dir, False)
loss = learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=loss_var.value(),
steps=6)
self.assertEqual(4.0, loss)
self._assert_summaries(
self._output_dir,
writer,
expected_graphs=[g],
expected_meta_graphs=None)
self._assert_ckpt(self._output_dir, True)
def test_train_summaries(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
loss_op = constant_op.constant(2.0)
summary.scalar('loss', loss_op)
writer = learn.graph_actions.get_summary_writer(self._output_dir)
self._assert_summaries(self._output_dir, writer)
self._assert_ckpt(self._output_dir, False)
loss = learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=loss_op,
steps=1)
meta_graph_def = meta_graph.create_meta_graph_def(
graph_def=g.as_graph_def(add_shapes=True),
saver_def=monitored_session.Scaffold().finalize().saver.saver_def)
self.assertEqual(2.0, loss)
self._assert_summaries(
self._output_dir,
writer,
expected_graphs=[g],
expected_meta_graphs=[meta_graph_def],
expected_summaries={1: {
'loss': 2.0
}})
self._assert_ckpt(self._output_dir, True)
def test_train_override_saver(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
self._assert_ckpt(self._output_dir, False)
real_saver = saver_lib.Saver()
saver = test.mock.Mock(wraps=real_saver, saver_def=real_saver.saver_def)
ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
loss = learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
steps=1)
self.assertEqual(2.0, loss)
self._assert_ckpt(self._output_dir, True)
self.assertTrue(saver.build.called)
self.assertEqual(1, saver.save.call_count)
# TODO(ispir): remove following tests after deprecated train.
class GraphActionsTrainTest(test.TestCase):
"""Tests for train."""
def setUp(self):
learn.graph_actions.clear_summary_writers()
self._output_dir = tempfile.mkdtemp()
testing.FakeSummaryWriter.install()
def tearDown(self):
testing.FakeSummaryWriter.uninstall()
if self._output_dir:
shutil.rmtree(self._output_dir)
learn.graph_actions.clear_summary_writers()
def _assert_summaries(self,
output_dir,
expected_summaries=None,
expected_graphs=None,
expected_meta_graphs=None,
expected_session_logs=None):
writer = learn.graph_actions.get_summary_writer(output_dir)
self.assertTrue(isinstance(writer, testing.FakeSummaryWriter))
writer.assert_summaries(
self,
expected_logdir=output_dir,
expected_graph=ops.get_default_graph(),
expected_summaries=expected_summaries,
expected_added_graphs=expected_graphs,
expected_added_meta_graphs=expected_meta_graphs,
expected_session_logs=expected_session_logs)
# TODO(ptucker): Test number and contents of checkpoint files.
def _assert_ckpt(self, output_dir, expected=True):
ckpt_state = saver_lib.get_checkpoint_state(output_dir)
if expected:
pattern = '%s/model.ckpt-.*' % output_dir
primary_ckpt_path = ckpt_state.model_checkpoint_path
self.assertRegexpMatches(primary_ckpt_path, pattern)
all_ckpt_paths = ckpt_state.all_model_checkpoint_paths
self.assertTrue(primary_ckpt_path in all_ckpt_paths)
for ckpt_path in all_ckpt_paths:
self.assertRegexpMatches(ckpt_path, pattern)
else:
self.assertTrue(ckpt_state is None)
def _build_inference_graph(self):
"""Build simple inference graph.
This includes a regular variable, local variable, and fake table.
Returns:
Tuple of 3 `Tensor` objects, 2 input and 1 output.
"""
variables_lib.create_global_step()
in0 = variables.Variable(1.0)
in1 = variables_lib.local_variable(2.0)
fake_table = variables.Variable(
3.0,
trainable=False,
collections=['fake_tables'],
name='fake_table_var')
in0.graph.add_to_collections([ops.GraphKeys.TABLE_INITIALIZERS],
fake_table.initializer)
out = in0 + in1 + fake_table
return in0, in1, out
def test_train_invalid_args(self):
with ops.Graph().as_default() as g, self.test_session(g):
train_op = constant_op.constant(1.0)
loss_op = constant_op.constant(2.0)
with self.assertRaisesRegexp(ValueError, 'utput directory'):
learn.graph_actions.train(
g, output_dir=None, train_op=train_op, loss_op=loss_op)
with self.assertRaisesRegexp(ValueError, 'utput directory'):
learn.graph_actions.train(
g,
output_dir='',
train_op=constant_op.constant(1.0),
loss_op=constant_op.constant(2.0))
with self.assertRaisesRegexp(ValueError, 'train_op'):
learn.graph_actions.train(
g, output_dir=self._output_dir, train_op=None, loss_op=loss_op)
with self.assertRaisesRegexp(ValueError, 'loss_op'):
learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=constant_op.constant(1.0),
loss_op=None)
with self.assertRaisesRegexp(ValueError, 'global_step'):
learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=constant_op.constant(1.0),
loss_op=loss_op)
# TODO(ptucker): Resume training from previous ckpt.
# TODO(ptucker): !supervisor_is_chief
# TODO(ptucker): Custom init op for training.
# TODO(ptucker): Mock supervisor, and assert all interactions.
def test_train(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
self._assert_summaries(self._output_dir)
self._assert_ckpt(self._output_dir, False)
loss = learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
steps=1)
# TODO(ebrevdo,ptucker,ispir): this meta_graph_def lacks the
# SaverDef, so we can't add it to the summary assertion test below.
# meta_graph_def = meta_graph.create_meta_graph_def()
self.assertEqual(2.0, loss)
self._assert_summaries(self._output_dir, expected_graphs=[g])
self._assert_ckpt(self._output_dir, True)
def test_train_steps_is_incremental(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
steps=10)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(10, step)
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
steps=15)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(25, step)
def test_train_max_steps_is_not_incremental(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
max_steps=10)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(10, step)
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
max_steps=15)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(15, step)
def test_train_loss(self):
with ops.Graph().as_default() as g, self.test_session(g):
variables_lib.create_global_step()
loss_var = variables_lib.local_variable(10.0)
train_op = control_flow_ops.group(
state_ops.assign_add(variables_lib.get_global_step(), 1),
state_ops.assign_add(loss_var, -1.0))
self._assert_summaries(self._output_dir)
self._assert_ckpt(self._output_dir, False)
loss = learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=loss_var.value(),
steps=6)
# TODO(ebrevdo,ptucker,ispir): this meta_graph_def lacks the
# SaverDef, so we can't add it to the summary assertion test below.
# meta_graph_def = meta_graph.create_meta_graph_def()
self.assertEqual(4.0, loss)
self._assert_summaries(self._output_dir, expected_graphs=[g])
self._assert_ckpt(self._output_dir, True)
def test_train_summaries(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
loss_op = constant_op.constant(2.0)
summary.scalar('loss', loss_op)
self._assert_summaries(self._output_dir)
self._assert_ckpt(self._output_dir, False)
loss = learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=loss_op,
steps=1)
# TODO(ebrevdo,ptucker,ispir): this meta_graph_def lacks the
# SaverDef, so we can't add it to the summary assertion test below.
# meta_graph_def = meta_graph.create_meta_graph_def()
self.assertEqual(2.0, loss)
self._assert_summaries(
self._output_dir,
expected_graphs=[g],
expected_summaries={1: {
'loss': 2.0
}})
self._assert_ckpt(self._output_dir, True)
def test_train_chief_monitor(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
loss_op = constant_op.constant(2.0)
summary.scalar('loss', loss_op)
chief_exclusive_monitor = _BaseMonitorWrapper(False)
all_workers_monitor = _BaseMonitorWrapper(True)
loss = learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=loss_op,
supervisor_is_chief=True,
steps=1,
monitors=[chief_exclusive_monitor, all_workers_monitor])
self.assertEqual(2.0, loss)
self.assertTrue(chief_exclusive_monitor.is_active and
all_workers_monitor.is_active,
'All monitors must have been active.')
self.assertTrue(chief_exclusive_monitor.has_step and
all_workers_monitor.has_step,
'All monitors must have a step.')
def test_train_worker_monitor(self):
# We need to explicitly set device due to check on non-chief workers
# requiring all variables to have a device assigned.
with ops.Graph().as_default() as g, g.device('/cpu:0'):
global_step = variables_lib.create_global_step(g)
train_op = state_ops.assign_add(global_step, 1)
loss_op = constant_op.constant(2.0)
summary.scalar('loss', loss_op)
# Add explicit "local" init op to initialize all variables
# as there's no chief to init here.
init_op = variables.global_variables_initializer()
ops.add_to_collection(ops.GraphKeys.LOCAL_INIT_OP, init_op)
# Create worker monitors where one should be active on the worker
# and the other chief exclusive.
chief_exclusive_monitor = _BaseMonitorWrapper(False)
all_workers_monitor = _BaseMonitorWrapper(True)
with self.test_session(g):
loss = learn.graph_actions.train(
g,
output_dir=self._output_dir,
global_step_tensor=global_step,
train_op=train_op,
loss_op=loss_op,
supervisor_is_chief=False,
steps=1,
monitors=[chief_exclusive_monitor, all_workers_monitor])
self.assertEqual(2.0, loss)
self.assertTrue(not chief_exclusive_monitor.is_active and
all_workers_monitor.is_active,
'Only non-chief runnable monitor must have been active.')
self.assertTrue(not chief_exclusive_monitor.has_step and
all_workers_monitor.has_step,
'Only non-chief runnable monitor must have a step.')
if __name__ == '__main__':
test.main()
|
strint/tensorflow
|
tensorflow/contrib/learn/python/learn/graph_actions_test.py
|
Python
|
apache-2.0
| 35,811
|
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import os.path
from neutron.agent.linux import external_process as ep
from neutron.common import utils as common_utils
from neutron.tests import base
from neutron.tests import tools
TEST_UUID = 'test-uuid'
TEST_SERVICE = 'testsvc'
TEST_PID = 1234
class BaseTestProcessMonitor(base.BaseTestCase):
def setUp(self):
super(BaseTestProcessMonitor, self).setUp()
self.log_patch = mock.patch("neutron.agent.linux.external_process."
"LOG.error")
self.error_log = self.log_patch.start()
self.spawn_patch = mock.patch("eventlet.spawn")
self.eventlent_spawn = self.spawn_patch.start()
# create a default process monitor
self.create_child_process_monitor('respawn')
def create_child_process_monitor(self, action):
conf = mock.Mock()
conf.AGENT.check_child_processes_action = action
conf.AGENT.check_child_processes = True
self.pmonitor = ep.ProcessMonitor(
config=conf,
resource_type='test')
def get_monitored_process(self, uuid, service=None):
monitored_process = mock.Mock()
self.pmonitor.register(uuid=uuid,
service_name=service,
monitored_process=monitored_process)
return monitored_process
class TestProcessMonitor(BaseTestProcessMonitor):
def test_error_logged(self):
pm = self.get_monitored_process(TEST_UUID)
pm.active = False
self.pmonitor._check_child_processes()
self.assertTrue(self.error_log.called)
def test_exit_handler(self):
self.create_child_process_monitor('exit')
pm = self.get_monitored_process(TEST_UUID)
pm.active = False
with mock.patch.object(ep.ProcessMonitor,
'_exit_handler') as exit_handler:
self.pmonitor._check_child_processes()
exit_handler.assert_called_once_with(TEST_UUID, None)
def test_register(self):
pm = self.get_monitored_process(TEST_UUID)
self.assertEqual(len(self.pmonitor._monitored_processes), 1)
self.assertIn(pm, self.pmonitor._monitored_processes.values())
def test_register_same_service_twice(self):
self.get_monitored_process(TEST_UUID)
self.get_monitored_process(TEST_UUID)
self.assertEqual(len(self.pmonitor._monitored_processes), 1)
def test_register_different_service_types(self):
self.get_monitored_process(TEST_UUID)
self.get_monitored_process(TEST_UUID, TEST_SERVICE)
self.assertEqual(len(self.pmonitor._monitored_processes), 2)
def test_unregister(self):
self.get_monitored_process(TEST_UUID)
self.pmonitor.unregister(TEST_UUID, None)
self.assertEqual(len(self.pmonitor._monitored_processes), 0)
def test_unregister_unknown_process(self):
self.pmonitor.unregister(TEST_UUID, None)
self.assertEqual(len(self.pmonitor._monitored_processes), 0)
class TestProcessManager(base.BaseTestCase):
def setUp(self):
super(TestProcessManager, self).setUp()
self.execute_p = mock.patch('neutron.agent.common.utils.execute')
self.execute = self.execute_p.start()
self.delete_if_exists = mock.patch(
'oslo_utils.fileutils.delete_if_exists').start()
self.ensure_dir = mock.patch.object(
common_utils, 'ensure_dir').start()
self.conf = mock.Mock()
self.conf.external_pids = '/var/path'
def test_processmanager_ensures_pid_dir(self):
pid_file = os.path.join(self.conf.external_pids, 'pid')
ep.ProcessManager(self.conf, 'uuid', pid_file=pid_file)
self.ensure_dir.assert_called_once_with(self.conf.external_pids)
def test_enable_no_namespace(self):
callback = mock.Mock()
callback.return_value = ['the', 'cmd']
with mock.patch.object(ep.ProcessManager, 'get_pid_file_name') as name:
name.return_value = 'pidfile'
with mock.patch.object(ep.ProcessManager, 'active') as active:
active.__get__ = mock.Mock(return_value=False)
manager = ep.ProcessManager(self.conf, 'uuid')
manager.enable(callback)
callback.assert_called_once_with('pidfile')
self.execute.assert_called_once_with(['the', 'cmd'],
check_exit_code=True,
extra_ok_codes=None,
run_as_root=False,
log_fail_as_error=True)
def test_enable_with_namespace(self):
callback = mock.Mock()
callback.return_value = ['the', 'cmd']
with mock.patch.object(ep.ProcessManager, 'get_pid_file_name') as name:
name.return_value = 'pidfile'
with mock.patch.object(ep.ProcessManager, 'active') as active:
active.__get__ = mock.Mock(return_value=False)
manager = ep.ProcessManager(self.conf, 'uuid', namespace='ns')
with mock.patch.object(ep, 'ip_lib') as ip_lib:
manager.enable(callback)
callback.assert_called_once_with('pidfile')
ip_lib.assert_has_calls([
mock.call.IPWrapper(namespace='ns'),
mock.call.IPWrapper().netns.execute(
['the', 'cmd'], addl_env=None, run_as_root=False)])
def test_enable_with_namespace_process_active(self):
callback = mock.Mock()
callback.return_value = ['the', 'cmd']
with mock.patch.object(ep.ProcessManager, 'active') as active:
active.__get__ = mock.Mock(return_value=True)
manager = ep.ProcessManager(self.conf, 'uuid', namespace='ns')
with mock.patch.object(ep, 'ip_lib'):
manager.enable(callback)
self.assertFalse(callback.called)
def test_disable_no_namespace(self):
with mock.patch.object(ep.ProcessManager, 'pid') as pid:
pid.__get__ = mock.Mock(return_value=4)
with mock.patch.object(ep.ProcessManager, 'active') as active:
active.__get__ = mock.Mock(return_value=True)
manager = ep.ProcessManager(self.conf, 'uuid')
with mock.patch.object(ep, 'utils') as utils:
manager.disable()
utils.assert_has_calls([
mock.call.execute(['kill', '-9', 4],
run_as_root=True)])
def test_disable_namespace(self):
with mock.patch.object(ep.ProcessManager, 'pid') as pid:
pid.__get__ = mock.Mock(return_value=4)
with mock.patch.object(ep.ProcessManager, 'active') as active:
active.__get__ = mock.Mock(return_value=True)
manager = ep.ProcessManager(self.conf, 'uuid', namespace='ns')
with mock.patch.object(ep, 'utils') as utils:
manager.disable()
utils.assert_has_calls([
mock.call.execute(['kill', '-9', 4],
run_as_root=True)])
def test_disable_not_active(self):
with mock.patch.object(ep.ProcessManager, 'pid') as pid:
pid.__get__ = mock.Mock(return_value=4)
with mock.patch.object(ep.ProcessManager, 'active') as active:
active.__get__ = mock.Mock(return_value=False)
with mock.patch.object(ep.LOG, 'debug') as debug:
manager = ep.ProcessManager(self.conf, 'uuid')
manager.disable()
debug.assert_called_once_with(mock.ANY, mock.ANY)
def test_disable_no_pid(self):
with mock.patch.object(ep.ProcessManager, 'pid') as pid:
pid.__get__ = mock.Mock(return_value=None)
with mock.patch.object(ep.ProcessManager, 'active') as active:
active.__get__ = mock.Mock(return_value=False)
with mock.patch.object(ep.LOG, 'debug') as debug:
manager = ep.ProcessManager(self.conf, 'uuid')
manager.disable()
debug.assert_called_once_with(mock.ANY, mock.ANY)
def test_get_pid_file_name_default(self):
manager = ep.ProcessManager(self.conf, 'uuid')
retval = manager.get_pid_file_name()
self.assertEqual(retval, '/var/path/uuid.pid')
def test_pid(self):
self.useFixture(tools.OpenFixture('/var/path/uuid.pid', '5'))
manager = ep.ProcessManager(self.conf, 'uuid')
self.assertEqual(manager.pid, 5)
def test_pid_no_an_int(self):
self.useFixture(tools.OpenFixture('/var/path/uuid.pid', 'foo'))
manager = ep.ProcessManager(self.conf, 'uuid')
self.assertIsNone(manager.pid)
def test_pid_invalid_file(self):
with mock.patch.object(ep.ProcessManager, 'get_pid_file_name') as name:
name.return_value = '.doesnotexist/pid'
manager = ep.ProcessManager(self.conf, 'uuid')
self.assertIsNone(manager.pid)
def test_active(self):
mock_open = self.useFixture(
tools.OpenFixture('/proc/4/cmdline', 'python foo --router_id=uuid')
).mock_open
with mock.patch.object(ep.ProcessManager, 'pid') as pid:
pid.__get__ = mock.Mock(return_value=4)
manager = ep.ProcessManager(self.conf, 'uuid')
self.assertTrue(manager.active)
mock_open.assert_called_once_with('/proc/4/cmdline', 'r')
def test_active_none(self):
dummy_cmd_line = 'python foo --router_id=uuid'
self.execute.return_value = dummy_cmd_line
with mock.patch.object(ep.ProcessManager, 'pid') as pid:
pid.__get__ = mock.Mock(return_value=None)
manager = ep.ProcessManager(self.conf, 'uuid')
self.assertFalse(manager.active)
def test_active_cmd_mismatch(self):
mock_open = self.useFixture(
tools.OpenFixture('/proc/4/cmdline',
'python foo --router_id=anotherid')
).mock_open
with mock.patch.object(ep.ProcessManager, 'pid') as pid:
pid.__get__ = mock.Mock(return_value=4)
manager = ep.ProcessManager(self.conf, 'uuid')
self.assertFalse(manager.active)
mock_open.assert_called_once_with('/proc/4/cmdline', 'r')
|
glove747/liberty-neutron
|
neutron/tests/unit/agent/linux/test_external_process.py
|
Python
|
apache-2.0
| 11,219
|
# -*- coding: utf-8 -*-
"""
github3.gists.comment
---------------------
Module containing the logic for a GistComment
"""
from __future__ import unicode_literals
from ..models import BaseComment
from ..users import User
class GistComment(BaseComment):
"""This object represents a comment on a gist.
Two comment instances can be checked like so::
c1 == c2
c1 != c2
And is equivalent to::
c1.id == c2.id
c1.id != c2.id
See also: http://developer.github.com/v3/gists/comments/
"""
def _update_attributes(self, comment):
self._api = comment.get('url')
#: :class:`User <github3.users.User>` who made the comment
#: Unless it is not associated with an account
self.user = None
if comment.get('user'):
self.user = User(comment.get('user'), self) # (No coverage)
def _repr(self):
return '<Gist Comment [{0}]>'.format(self.user.login)
|
h4ck3rm1k3/github3.py
|
github3/gists/comment.py
|
Python
|
bsd-3-clause
| 961
|
import time
import os
import sys
import hashlib
import gc
import shutil
import platform
import errno
import logging
try:
import cPickle as pickle
except:
import pickle
from parso._compatibility import FileNotFoundError
LOG = logging.getLogger(__name__)
_PICKLE_VERSION = 30
"""
Version number (integer) for file system cache.
Increment this number when there are any incompatible changes in
the parser tree classes. For example, the following changes
are regarded as incompatible.
- A class name is changed.
- A class is moved to another module.
- A __slot__ of a class is changed.
"""
_VERSION_TAG = '%s-%s%s-%s' % (
platform.python_implementation(),
sys.version_info[0],
sys.version_info[1],
_PICKLE_VERSION
)
"""
Short name for distinguish Python implementations and versions.
It's like `sys.implementation.cache_tag` but for Python < 3.3
we generate something similar. See:
http://docs.python.org/3/library/sys.html#sys.implementation
"""
def _get_default_cache_path():
if platform.system().lower() == 'windows':
dir_ = os.path.join(os.getenv('LOCALAPPDATA') or '~', 'Parso', 'Parso')
elif platform.system().lower() == 'darwin':
dir_ = os.path.join('~', 'Library', 'Caches', 'Parso')
else:
dir_ = os.path.join(os.getenv('XDG_CACHE_HOME') or '~/.cache', 'parso')
return os.path.expanduser(dir_)
_default_cache_path = _get_default_cache_path()
"""
The path where the cache is stored.
On Linux, this defaults to ``~/.cache/parso/``, on OS X to
``~/Library/Caches/Parso/`` and on Windows to ``%LOCALAPPDATA%\\Parso\\Parso\\``.
On Linux, if environment variable ``$XDG_CACHE_HOME`` is set,
``$XDG_CACHE_HOME/parso`` is used instead of the default one.
"""
parser_cache = {}
class _NodeCacheItem(object):
def __init__(self, node, lines, change_time=None):
self.node = node
self.lines = lines
if change_time is None:
change_time = time.time()
self.change_time = change_time
def load_module(hashed_grammar, path, cache_path=None):
"""
Returns a module or None, if it fails.
"""
try:
p_time = os.path.getmtime(path)
except FileNotFoundError:
return None
try:
module_cache_item = parser_cache[hashed_grammar][path]
if p_time <= module_cache_item.change_time:
return module_cache_item.node
except KeyError:
return _load_from_file_system(hashed_grammar, path, p_time, cache_path=cache_path)
def _load_from_file_system(hashed_grammar, path, p_time, cache_path=None):
cache_path = _get_hashed_path(hashed_grammar, path, cache_path=cache_path)
try:
try:
if p_time > os.path.getmtime(cache_path):
# Cache is outdated
return None
except OSError as e:
if e.errno == errno.ENOENT:
# In Python 2 instead of an IOError here we get an OSError.
raise FileNotFoundError
else:
raise
with open(cache_path, 'rb') as f:
gc.disable()
try:
module_cache_item = pickle.load(f)
finally:
gc.enable()
except FileNotFoundError:
return None
else:
parser_cache.setdefault(hashed_grammar, {})[path] = module_cache_item
LOG.debug('pickle loaded: %s', path)
return module_cache_item.node
def save_module(hashed_grammar, path, module, lines, pickling=True, cache_path=None):
try:
p_time = None if path is None else os.path.getmtime(path)
except OSError:
p_time = None
pickling = False
item = _NodeCacheItem(module, lines, p_time)
parser_cache.setdefault(hashed_grammar, {})[path] = item
if pickling and path is not None:
_save_to_file_system(hashed_grammar, path, item, cache_path=cache_path)
def _save_to_file_system(hashed_grammar, path, item, cache_path=None):
with open(_get_hashed_path(hashed_grammar, path, cache_path=cache_path), 'wb') as f:
pickle.dump(item, f, pickle.HIGHEST_PROTOCOL)
def clear_cache(cache_path=None):
if cache_path is None:
cache_path = _default_cache_path
shutil.rmtree(cache_path)
parser_cache.clear()
def _get_hashed_path(hashed_grammar, path, cache_path=None):
directory = _get_cache_directory_path(cache_path=cache_path)
file_hash = hashlib.sha256(path.encode("utf-8")).hexdigest()
return os.path.join(directory, '%s-%s.pkl' % (hashed_grammar, file_hash))
def _get_cache_directory_path(cache_path=None):
if cache_path is None:
cache_path = _default_cache_path
directory = os.path.join(cache_path, _VERSION_TAG)
if not os.path.exists(directory):
os.makedirs(directory)
return directory
|
archifix/settings
|
sublime/Packages/Jedi - Python autocompletion/dependencies/parso/cache.py
|
Python
|
mit
| 4,791
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/globocom/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com timehome@corp.globo.com
from os.path import splitext
from thumbor.ext.filters import _alpha
from thumbor.filters import BaseFilter, filter_method
import tornado.gen
class Filter(BaseFilter):
regex = r'(?:watermark\((?P<url>.*?),(?P<x>-?[\d]*?),(?P<y>-?[\d]*?),(?P<alpha>[\d]*?)\))'
def on_image_ready(self, buffer):
self.watermark_engine.load(buffer, self.extension)
self.watermark_engine.enable_alpha()
mode, data = self.watermark_engine.image_data_as_rgb()
imgdata = _alpha.apply(mode,
self.alpha,
data)
self.watermark_engine.set_image_data(imgdata)
inv_x = self.x[0] == '-'
inv_y = self.y[0] == '-'
x, y = int(self.x), int(self.y)
sz = self.engine.size
watermark_sz = self.watermark_engine.size
if inv_x:
x = (sz[0] - watermark_sz[0]) + x
if inv_y:
y = (sz[1] - watermark_sz[1]) + y
self.engine.paste(self.watermark_engine, (x, y), merge=True)
self.callback()
def on_fetch_done(self, buffer):
self.watermark_engine.load(buffer, self.extension)
self.storage.put(self.url, self.watermark_engine.read())
self.storage.put_crypto(self.url)
self.on_image_ready(buffer)
@filter_method(BaseFilter.String, r'-?[\d]+', r'-?[\d]+', BaseFilter.PositiveNumber, async=True)
@tornado.gen.coroutine
def watermark(self, callback, url, x, y, alpha):
self.url = url
self.x = x
self.y = y
self.alpha = alpha
self.callback = callback
self.extension = splitext(self.url)[-1].lower()
self.watermark_engine = self.context.modules.engine.__class__(self.context)
self.storage = self.context.modules.storage
buffer = yield tornado.gen.maybe_future(self.storage.get(self.url))
if buffer is not None:
self.on_image_ready(buffer)
else:
self.context.modules.loader.load(self.context, self.url, self.on_fetch_done)
|
MaTriXy/thumbor
|
thumbor/filters/watermark.py
|
Python
|
mit
| 2,292
|
class NotFound(object):
pass
|
siddhika1889/Pydev-Editor
|
tests/pysrc/extendable/grammar3/relative.py
|
Python
|
epl-1.0
| 32
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.exceptions import ValidationError
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
transaction_ids = fields.Many2many('payment.transaction', 'account_invoice_transaction_rel', 'invoice_id', 'transaction_id',
string='Transactions', copy=False, readonly=True)
authorized_transaction_ids = fields.Many2many('payment.transaction', compute='_compute_authorized_transaction_ids',
string='Authorized Transactions', copy=False, readonly=True)
@api.depends('transaction_ids')
def _compute_authorized_transaction_ids(self):
for trans in self:
trans.authorized_transaction_ids = trans.transaction_ids.filtered(lambda t: t.state == 'authorized')
@api.multi
def get_portal_last_transaction(self):
self.ensure_one()
return self.transaction_ids.get_last_transaction()
@api.multi
def _create_payment_transaction(self, vals):
'''Similar to self.env['payment.transaction'].create(vals) but the values are filled with the
current invoices fields (e.g. the partner or the currency).
:param vals: The values to create a new payment.transaction.
:return: The newly created payment.transaction record.
'''
# Ensure the currencies are the same.
currency = self[0].currency_id
if any([inv.currency_id != currency for inv in self]):
raise ValidationError(_('A transaction can\'t be linked to invoices having different currencies.'))
# Ensure the partner are the same.
partner = self[0].partner_id
if any([inv.partner_id != partner for inv in self]):
raise ValidationError(_('A transaction can\'t be linked to invoices having different partners.'))
# Try to retrieve the acquirer. However, fallback to the token's acquirer.
acquirer_id = vals.get('acquirer_id')
acquirer = None
payment_token_id = vals.get('payment_token_id')
if payment_token_id:
payment_token = self.env['payment.token'].sudo().browse(payment_token_id)
# Check payment_token/acquirer matching or take the acquirer from token
if acquirer_id:
acquirer = self.env['payment.acquirer'].browse(acquirer_id)
if payment_token and payment_token.acquirer_id != acquirer:
raise ValidationError(_('Invalid token found! Token acquirer %s != %s') % (
payment_token.acquirer_id.name, acquirer.name))
if payment_token and payment_token.partner_id != partner:
raise ValidationError(_('Invalid token found! Token partner %s != %s') % (
payment_token.partner.name, partner.name))
else:
acquirer = payment_token.acquirer_id
# Check an acquirer is there.
if not acquirer_id and not acquirer:
raise ValidationError(_('A payment acquirer is required to create a transaction.'))
if not acquirer:
acquirer = self.env['payment.acquirer'].browse(acquirer_id)
# Check a journal is set on acquirer.
if not acquirer.journal_id:
raise ValidationError(_('A journal must be specified of the acquirer %s.' % acquirer.name))
if not acquirer_id and acquirer:
vals['acquirer_id'] = acquirer.id
vals.update({
'amount': sum(self.mapped('residual')),
'currency_id': currency.id,
'partner_id': partner.id,
'invoice_ids': [(6, 0, self.ids)],
})
transaction = self.env['payment.transaction'].create(vals)
# Process directly if payment_token
if transaction.payment_token_id:
transaction.s2s_do_transaction()
return transaction
@api.multi
def payment_action_capture(self):
self.authorized_transaction_ids.s2s_capture_transaction()
@api.multi
def payment_action_void(self):
self.authorized_transaction_ids.s2s_void_transaction()
|
t3dev/odoo
|
addons/payment/models/account_invoice.py
|
Python
|
gpl-3.0
| 4,255
|
# -*- python -*-
# test_host_specific_configuration.py - Unit tests for
# swift_build_support.host_specific_configuration
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2019 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
import unittest
from argparse import Namespace
from swift_build_support.host_specific_configuration import \
HostSpecificConfiguration
class ToolchainTestCase(unittest.TestCase):
def test_should_configure_and_build_when_deployment_is_all(self):
host_target = 'macosx-x86_64'
args = self.default_args()
args.build_osx = True
args.build_ios_device = True
args.host_target = host_target
args.stdlib_deployment_targets = [host_target, 'iphoneos-arm64']
args.build_stdlib_deployment_targets = 'all'
hsc = HostSpecificConfiguration(host_target, args)
self.assertEqual(len(hsc.sdks_to_configure), 2)
self.assertIn('OSX', hsc.sdks_to_configure)
self.assertIn('IOS', hsc.sdks_to_configure)
self.assertEqual(len(hsc.swift_stdlib_build_targets), 2)
self.assertIn('swift-test-stdlib-macosx-x86_64',
hsc.swift_stdlib_build_targets)
self.assertIn('swift-test-stdlib-iphoneos-arm64',
hsc.swift_stdlib_build_targets)
def test_should_only_deployment_if_specified(self):
host_target = 'macosx-x86_64'
args = self.default_args()
args.build_osx = True
args.build_ios_device = True
args.host_target = host_target
args.stdlib_deployment_targets = [host_target, 'iphoneos-arm64']
args.build_stdlib_deployment_targets = ['iphoneos-arm64']
hsc = HostSpecificConfiguration(host_target, args)
self.assertEqual(len(hsc.sdks_to_configure), 2)
self.assertIn('OSX', hsc.sdks_to_configure)
self.assertIn('IOS', hsc.sdks_to_configure)
self.assertEqual(len(hsc.swift_stdlib_build_targets), 1)
self.assertIn('swift-test-stdlib-iphoneos-arm64',
hsc.swift_stdlib_build_targets)
def test_should_configure_and_build_when_cross_compiling(self):
args = self.default_args()
args.build_ios_device = True
args.host_target = 'macosx-x86_64'
hsc = HostSpecificConfiguration('iphoneos-arm64', args)
self.assertEqual(len(hsc.sdks_to_configure), 1)
self.assertIn('IOS', hsc.sdks_to_configure)
self.assertEqual(len(hsc.swift_stdlib_build_targets), 1)
self.assertIn('swift-test-stdlib-iphoneos-arm64',
hsc.swift_stdlib_build_targets)
def generate_should_skip_building_platform(
host_target, sdk_name, build_target, build_arg_name):
def test(self):
args = self.default_args()
args.host_target = host_target
args.stdlib_deployment_targets = [host_target]
args.build_stdlib_deployment_targets = 'all'
before = HostSpecificConfiguration(host_target, args)
self.assertIn(sdk_name, before.sdks_to_configure)
self.assertNotIn(build_target, before.swift_stdlib_build_targets)
setattr(args, build_arg_name, True)
after = HostSpecificConfiguration(host_target, args)
self.assertIn(sdk_name, after.sdks_to_configure)
self.assertIn(build_target, after.swift_stdlib_build_targets)
return test
test_should_skip_building_android =\
generate_should_skip_building_platform(
'android-armv7',
'ANDROID',
'swift-test-stdlib-android-armv7',
'build_android')
test_should_skip_building_cygwin =\
generate_should_skip_building_platform(
'cygwin-x86_64',
'CYGWIN',
'swift-test-stdlib-cygwin-x86_64',
'build_cygwin')
test_should_skip_building_freebsd =\
generate_should_skip_building_platform(
'freebsd-x86_64',
'FREEBSD',
'swift-test-stdlib-freebsd-x86_64',
'build_freebsd')
test_should_skip_building_ios =\
generate_should_skip_building_platform(
'iphoneos-arm64',
'IOS',
'swift-test-stdlib-iphoneos-arm64',
'build_ios_device')
test_should_skip_building_ios_sim =\
generate_should_skip_building_platform(
'iphonesimulator-x86_64',
'IOS_SIMULATOR',
'swift-test-stdlib-iphonesimulator-x86_64',
'build_ios_simulator')
test_should_skip_building_linux =\
generate_should_skip_building_platform(
'linux-x86_64',
'LINUX',
'swift-test-stdlib-linux-x86_64',
'build_linux')
test_should_skip_building_osx =\
generate_should_skip_building_platform(
'macosx-x86_64',
'OSX',
'swift-test-stdlib-macosx-x86_64',
'build_osx')
test_should_skip_building_tvos =\
generate_should_skip_building_platform(
'appletvos-arm64',
'TVOS',
'swift-test-stdlib-appletvos-arm64',
'build_tvos_device')
test_should_skip_building_tvos_sim =\
generate_should_skip_building_platform(
'appletvsimulator-x86_64', 'TVOS_SIMULATOR',
'swift-test-stdlib-appletvsimulator-x86_64',
'build_tvos_simulator')
test_should_skip_building_watchos =\
generate_should_skip_building_platform(
'watchos-armv7k',
'WATCHOS',
'swift-test-stdlib-watchos-armv7k',
'build_watchos_device')
test_should_skip_building_watchos_sim =\
generate_should_skip_building_platform(
'watchsimulator-i386',
'WATCHOS_SIMULATOR',
'swift-test-stdlib-watchsimulator-i386',
'build_watchos_simulator')
def generate_should_build_full_targets_when_test(test_arg_name):
def test(self):
host_target = 'macosx-x86_64'
args = self.default_args()
args.build_osx = True
args.host_target = host_target
args.stdlib_deployment_targets = [host_target]
args.build_stdlib_deployment_targets = 'all'
before = HostSpecificConfiguration(host_target, args)
self.assertIn('swift-test-stdlib-macosx-x86_64',
before.swift_stdlib_build_targets)
self.assertNotIn('swift-stdlib-macosx-x86_64',
before.swift_stdlib_build_targets)
setattr(args, test_arg_name, True)
after = HostSpecificConfiguration(host_target, args)
self.assertIn('swift-stdlib-macosx-x86_64',
after.swift_stdlib_build_targets)
self.assertNotIn('swift-test-stdlib-macosx-x86_64',
after.swift_stdlib_build_targets)
return test
test_should_build_full_targets_when_unittest_extra =\
generate_should_build_full_targets_when_test(
'build_swift_stdlib_unittest_extra')
test_should_build_full_targets_when_validation_test =\
generate_should_build_full_targets_when_test(
'validation_test')
test_should_build_full_targets_when_long_test =\
generate_should_build_full_targets_when_test(
'long_test')
test_should_build_full_targets_when_stress_test =\
generate_should_build_full_targets_when_test(
'stress_test')
def generate_should_skip_testing_platform(
host_target, build_arg_name, test_arg_name):
def test(self):
args = self.default_args()
setattr(args, build_arg_name, True)
args.host_target = host_target
args.stdlib_deployment_targets = [host_target]
args.build_stdlib_deployment_targets = 'all'
before = HostSpecificConfiguration(host_target, args)
self.assertEqual(len(before.swift_test_run_targets), 0)
setattr(args, test_arg_name, True)
after = HostSpecificConfiguration(host_target, args)
self.assertIn('check-swift-{}'.format(host_target),
after.swift_test_run_targets)
return test
test_should_skip_testing_android =\
generate_should_skip_testing_platform(
'android-armv7',
'build_android',
'test_android')
test_should_skip_testing_cygwin =\
generate_should_skip_testing_platform(
'cygwin-x86_64',
'build_cygwin',
'test_cygwin')
test_should_skip_testing_freebsd =\
generate_should_skip_testing_platform(
'freebsd-x86_64',
'build_freebsd',
'test_freebsd')
# NOTE: test_ios_host is not supported in open-source Swift
test_should_skip_testing_ios_sim =\
generate_should_skip_testing_platform(
'iphonesimulator-x86_64',
'build_ios_simulator',
'test_ios_simulator')
test_should_skip_testing_linux =\
generate_should_skip_testing_platform(
'linux-x86_64',
'build_linux',
'test_linux')
test_should_skip_testing_osx =\
generate_should_skip_testing_platform(
'macosx-x86_64',
'build_osx',
'test_osx')
# NOTE: test_tvos_host is not supported in open-source Swift
test_should_skip_testing_tvos_sim =\
generate_should_skip_testing_platform(
'appletvsimulator-x86_64',
'build_tvos_simulator',
'test_tvos_simulator')
# NOTE: test_watchos_host is not supported in open-source Swift
test_should_skip_testing_watchos_sim =\
generate_should_skip_testing_platform(
'watchsimulator-i386',
'build_watchos_simulator',
'test_watchos_simulator')
def test_should_skip_testing_32bit_ios(self):
host_target = 'iphonesimulator-i386'
args = self.default_args()
args.build_ios_simulator = True
args.test_ios_simulator = True
args.host_target = host_target
args.stdlib_deployment_targets = [host_target]
args.build_stdlib_deployment_targets = 'all'
before = HostSpecificConfiguration(host_target, args)
self.assertEqual(len(before.swift_test_run_targets), 0)
args.test_ios_32bit_simulator = True
after = HostSpecificConfiguration(host_target, args)
self.assertIn('check-swift-iphonesimulator-i386',
after.swift_test_run_targets)
def generate_should_allow_testing_only_host(
host_target, build_arg_name, test_arg_name, host_test_arg_name):
def test(self):
args = self.default_args()
setattr(args, build_arg_name, True)
setattr(args, test_arg_name, True)
args.host_target = host_target
args.stdlib_deployment_targets = [host_target]
args.build_stdlib_deployment_targets = 'all'
before = HostSpecificConfiguration(host_target, args)
self.assertIn('check-swift-{}'.format(host_target),
before.swift_test_run_targets)
setattr(args, host_test_arg_name, True)
after = HostSpecificConfiguration(host_target, args)
self.assertIn(
'check-swift-only_non_executable-{}'.format(host_target),
after.swift_test_run_targets)
return test
test_should_allow_testing_only_host_android =\
generate_should_allow_testing_only_host(
'android-armv7',
'build_android',
'test_android',
'test_android_host')
# NOTE: test_ios_host is not supported in open-source Swift
# NOTE: test_tvos_host is not supported in open-source Swift
# NOTE: test_watchos_host is not supported in open-source Swift
def test_should_allow_testing_only_executable_tests(self):
args = self.default_args()
args.build_osx = True
args.test_osx = True
args.host_target = 'macosx-x86_64'
args.stdlib_deployment_targets = ['macosx-x86_64']
args.build_stdlib_deployment_targets = 'all'
before = HostSpecificConfiguration('macosx-x86_64', args)
self.assertIn('check-swift-macosx-x86_64',
before.swift_test_run_targets)
args.only_executable_test = True
after = HostSpecificConfiguration('macosx-x86_64', args)
self.assertIn('check-swift-only_executable-macosx-x86_64',
after.swift_test_run_targets)
def test_should_allow_testing_only_non_executable_tests(self):
args = self.default_args()
args.build_osx = True
args.test_osx = True
args.host_target = 'macosx-x86_64'
args.stdlib_deployment_targets = ['macosx-x86_64']
args.build_stdlib_deployment_targets = 'all'
before = HostSpecificConfiguration('macosx-x86_64', args)
self.assertIn('check-swift-macosx-x86_64',
before.swift_test_run_targets)
args.only_non_executable_test = True
after = HostSpecificConfiguration('macosx-x86_64', args)
self.assertIn('check-swift-only_non_executable-macosx-x86_64',
after.swift_test_run_targets)
def generate_should_build_benchmarks(host_target, build_arg_name):
def test(self):
args = self.default_args()
setattr(args, build_arg_name, True)
args.host_target = host_target
args.stdlib_deployment_targets = [host_target]
args.build_stdlib_deployment_targets = 'all'
with_benchmark = HostSpecificConfiguration(host_target, args)
self.assertIn('swift-benchmark-{}'.format(host_target),
with_benchmark.swift_benchmark_build_targets)
self.assertNotIn('check-swift-benchmark-{}'.format(host_target),
with_benchmark.swift_benchmark_run_targets)
args.benchmark = True
running_benchmarks = HostSpecificConfiguration(host_target, args)
self.assertIn('swift-benchmark-{}'.format(host_target),
running_benchmarks.swift_benchmark_build_targets)
self.assertIn('check-swift-benchmark-{}'.format(host_target),
running_benchmarks.swift_benchmark_run_targets)
args.build_external_benchmarks = True
with_external_benchmarks = HostSpecificConfiguration(host_target,
args)
self.assertIn(
'swift-benchmark-{}'.format(host_target),
with_external_benchmarks.swift_benchmark_build_targets)
self.assertIn(
'swift-benchmark-{}-external'.format(host_target),
with_external_benchmarks.swift_benchmark_build_targets)
self.assertIn('check-swift-benchmark-{}'.format(host_target),
with_external_benchmarks.swift_benchmark_run_targets)
self.assertIn(
'check-swift-benchmark-{}-external'.format(host_target),
with_external_benchmarks.swift_benchmark_run_targets)
args.benchmark = False
not_running_benchmarks = HostSpecificConfiguration(host_target,
args)
self.assertIn('swift-benchmark-{}'.format(host_target),
not_running_benchmarks.swift_benchmark_build_targets)
self.assertIn('swift-benchmark-{}-external'.format(host_target),
not_running_benchmarks.swift_benchmark_build_targets)
self.assertNotIn(
'check-swift-benchmark-{}'.format(host_target),
not_running_benchmarks.swift_benchmark_run_targets)
self.assertNotIn(
'check-swift-benchmark-{}-external'.format(host_target),
not_running_benchmarks.swift_benchmark_run_targets)
return test
test_should_build_and_run_benchmarks_osx_x86_64 =\
generate_should_build_benchmarks(
'macosx-x86_64',
'build_osx')
test_should_build_and_run_benchmarks_ios_armv7 =\
generate_should_build_benchmarks(
'iphoneos-armv7',
'build_ios_device')
test_should_build_and_run_benchmarks_ios_arm64 =\
generate_should_build_benchmarks(
'iphoneos-arm64',
'build_ios_device')
test_should_build_and_run_benchmarks_tvos_arm64 =\
generate_should_build_benchmarks(
'appletvos-arm64',
'build_tvos_device')
test_should_build_and_run_benchmarks_watchos_armv7k =\
generate_should_build_benchmarks(
'watchos-armv7k',
'build_watchos_device')
# NOTE: other platforms/architectures do not support benchmarks
def generate_should_test_only_subset(subset_name, subset_arg_name):
def test(self):
host_target = 'macosx-x86_64'
args = self.default_args()
args.build_osx = True
args.test_osx = True
args.host_target = host_target
args.stdlib_deployment_targets = [host_target]
args.build_stdlib_deployment_targets = 'all'
all = 'check-swift-macosx-x86_64'
subset = 'check-swift-{}-macosx-x86_64'.format(subset_name)
before = HostSpecificConfiguration(host_target, args)
self.assertIn(all, before.swift_test_run_targets)
self.assertNotIn(subset, before.swift_test_run_targets)
setattr(args, subset_arg_name, True)
after = HostSpecificConfiguration(host_target, args)
self.assertIn(subset, after.swift_test_run_targets)
self.assertNotIn(all, after.swift_test_run_targets)
return test
test_should_test_only_subset_validation =\
generate_should_test_only_subset('validation', 'validation_test')
test_should_test_only_subset_long =\
generate_should_test_only_subset('only_long', 'long_test')
test_should_test_only_subset_stress =\
generate_should_test_only_subset('only_stress', 'stress_test')
def test_should_test_all_when_validation_long_and_stress(self):
host_target = 'macosx-x86_64'
args = self.default_args()
args.build_osx = True
args.test_osx = True
args.host_target = host_target
args.stdlib_deployment_targets = [host_target]
args.build_stdlib_deployment_targets = 'all'
all = 'check-swift-macosx-x86_64'
subset = 'check-swift-all-macosx-x86_64'
before = HostSpecificConfiguration(host_target, args)
self.assertIn(all, before.swift_test_run_targets)
self.assertNotIn(subset, before.swift_test_run_targets)
args.validation_test = True
args.long_test = True
args.stress_test = True
after = HostSpecificConfiguration(host_target, args)
self.assertIn(subset, after.swift_test_run_targets)
self.assertNotIn(all, after.swift_test_run_targets)
def generate_should_test_only_subset_for_host_only_tests(
subset_name, subset_arg_name):
def test(self):
host_target = 'android-armv7'
args = self.default_args()
args.build_android = True
args.test_android = True
args.test_android_host = True
args.host_target = host_target
args.stdlib_deployment_targets = [host_target]
args.build_stdlib_deployment_targets = 'all'
all = 'check-swift-only_non_executable-android-armv7'
subset = 'check-swift-{}-only_non_executable-android-armv7'\
.format(subset_name)
before = HostSpecificConfiguration(host_target, args)
self.assertIn(all, before.swift_test_run_targets)
self.assertNotIn(subset, before.swift_test_run_targets)
setattr(args, subset_arg_name, True)
after = HostSpecificConfiguration(host_target, args)
self.assertIn(subset, after.swift_test_run_targets)
self.assertNotIn(all, after.swift_test_run_targets)
return test
test_should_test_only_subset_for_host_only_tests_validation =\
generate_should_test_only_subset_for_host_only_tests(
'validation',
'validation_test')
test_should_test_only_subset_for_host_only_tests_long =\
generate_should_test_only_subset_for_host_only_tests(
'only_long',
'long_test')
test_should_test_only_subset_for_host_only_tests_stress =\
generate_should_test_only_subset_for_host_only_tests(
'only_stress',
'stress_test')
def test_should_test_all_when_validation_long_and_stress_with_host_only(
self):
host_target = 'android-armv7'
args = self.default_args()
args.build_android = True
args.test_android = True
args.test_android_host = True
args.host_target = host_target
args.stdlib_deployment_targets = [host_target]
args.build_stdlib_deployment_targets = 'all'
all = 'check-swift-only_non_executable-android-armv7'
subset = 'check-swift-all-only_non_executable-android-armv7'
before = HostSpecificConfiguration(host_target, args)
self.assertIn(all, before.swift_test_run_targets)
self.assertNotIn(subset, before.swift_test_run_targets)
args.validation_test = True
args.long_test = True
args.stress_test = True
after = HostSpecificConfiguration(host_target, args)
self.assertIn(subset, after.swift_test_run_targets)
self.assertNotIn(all, after.swift_test_run_targets)
def generate_should_test_optimizations(
optimize_name, optimize_arg_name):
def test(self):
host_target = 'macosx-x86_64'
args = self.default_args()
args.build_osx = True
args.test_osx = True
args.host_target = host_target
args.stdlib_deployment_targets = [host_target]
args.build_stdlib_deployment_targets = 'all'
target = 'check-swift-{}-macosx-x86_64'.format(optimize_name)
before = HostSpecificConfiguration(host_target, args)
self.assertNotIn(target, before.swift_test_run_targets)
setattr(args, optimize_arg_name, True)
after = HostSpecificConfiguration(host_target, args)
self.assertIn(target, after.swift_test_run_targets)
return test
test_should_test_optimizations =\
generate_should_test_optimizations(
'optimize',
'test_optimized')
test_should_test_optimizations_size =\
generate_should_test_optimizations(
'optimize_size',
'test_optimize_for_size')
test_should_test_optimizations_none_implicit_dynamic =\
generate_should_test_optimizations(
'optimize_none_with_implicit_dynamic',
'test_optimize_none_with_implicit_dynamic')
def test_should_not_test_optimizations_when_testing_only_host(self):
host_target = 'android-armv7'
args = self.default_args()
args.host_target = host_target
args.build_android = True
args.test_android = True
args.stdlib_deployment_targets = [host_target]
args.build_stdlib_deployment_targets = 'all'
args.test_optimized = True
args.test_optimize_for_size = True
args.test_optimize_none_with_implicit_dynamic = True
before = HostSpecificConfiguration(host_target, args)
self.assertIn('check-swift-optimize-android-armv7',
before.swift_test_run_targets)
self.assertIn('check-swift-optimize_size-android-armv7',
before.swift_test_run_targets)
self.assertIn(
'check-swift-optimize_none_with_implicit_dynamic-android-armv7',
before.swift_test_run_targets)
args.test_android_host = True
after = HostSpecificConfiguration(host_target, args)
self.assertNotIn('check-swift-optimize-android-armv7',
after.swift_test_run_targets)
self.assertNotIn(
'check-swift-optimize_size-android-armv7',
after.swift_test_run_targets)
self.assertNotIn(
'check-swift-optimize_none_with_implicit_dynamic-android-armv7',
after.swift_test_run_targets)
def test_should_test_optimizations_with_subsets(self):
host_target = 'android-armv7'
args = self.default_args()
args.host_target = host_target
args.build_android = True
args.test_android = True
args.stdlib_deployment_targets = [host_target]
args.build_stdlib_deployment_targets = 'all'
args.test_optimized = True
args.test_optimize_for_size = True
args.test_optimize_none_with_implicit_dynamic = True
args.long_test = True
target_name = 'check-swift-only_long-{}-android-armv7'
before = HostSpecificConfiguration(host_target, args)
self.assertIn(target_name.format('optimize'),
before.swift_test_run_targets)
self.assertIn(target_name.format('optimize_size'),
before.swift_test_run_targets)
self.assertIn(target_name.format(
'optimize_none_with_implicit_dynamic'),
before.swift_test_run_targets)
def default_args(self):
return Namespace(
benchmark=False,
build_android=False,
build_cygwin=False,
build_external_benchmarks=False,
build_freebsd=False,
build_ios_device=False,
build_ios_simulator=False,
build_linux=False,
build_osx=False,
build_swift_stdlib_unittest_extra=False,
build_tvos_device=False,
build_tvos_simulator=False,
build_watchos_device=False,
build_watchos_simulator=False,
maccatalyst=False,
maccatalyst_ios_tests=False,
long_test=False,
only_executable_test=False,
only_non_executable_test=False,
stress_test=False,
test_android=False,
test_android_host=False,
test_cygwin=False,
test_freebsd=False,
test_ios_host=False,
test_ios_simulator=False,
test_ios_32bit_simulator=False,
test_linux=False,
test_optimize_for_size=False,
test_optimize_none_with_implicit_dynamic=False,
test_optimized=False,
test_osx=False,
test_tvos_host=False,
test_tvos_simulator=False,
test_watchos_host=False,
test_watchos_simulator=False,
validation_test=False)
if __name__ == '__main__':
unittest.main()
|
jckarter/swift
|
utils/swift_build_support/tests/test_host_specific_configuration.py
|
Python
|
apache-2.0
| 27,447
|
"""Auto-generated file, do not edit by hand. IO metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_IO = PhoneMetadata(id='IO', country_code=246, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='3\\d{6}', possible_number_pattern='\\d{7}'),
fixed_line=PhoneNumberDesc(national_number_pattern='37\\d{5}', possible_number_pattern='\\d{7}', example_number='3709100'),
mobile=PhoneNumberDesc(national_number_pattern='38\\d{5}', possible_number_pattern='\\d{7}', example_number='3801234'),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
number_format=[NumberFormat(pattern='(\\d{3})(\\d{4})', format='\\1 \\2')])
|
roubert/python-phonenumbers
|
python/phonenumbers/data/region_IO.py
|
Python
|
apache-2.0
| 1,488
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def check_c_count(expected_count):
test.assertEqual(expected_count,
len(reality.resources_by_logical_name('C')))
example_template = Template({
'A': RsrcDef({'a': 'initial'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
})
engine.create_stack('foo', example_template)
engine.noop(5)
engine.call(verify, example_template)
example_template2 = Template({
'A': RsrcDef({'a': 'updated'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
})
engine.update_stack('foo', example_template2)
engine.noop(4)
engine.rollback_stack('foo')
engine.call(check_c_count, 2)
engine.noop(11)
engine.call(verify, example_template)
engine.delete_stack('foo')
engine.noop(12)
engine.call(verify, Template({}))
|
dims/heat
|
heat/tests/convergence/scenarios/update_replace_rollback.py
|
Python
|
apache-2.0
| 1,550
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for return_statements module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.autograph.converters import functions
from tensorflow.python.autograph.converters import return_statements
from tensorflow.python.autograph.core import converter_testing
from tensorflow.python.framework import ops
from tensorflow.python.platform import test
class SingleReturnTest(converter_testing.TestCase):
def assertTransformedEquivalent(self, f, *inputs):
tr = self.transform(f, (functions, return_statements))
self.assertEqual(f(*inputs), tr(*inputs))
def test_straightline(self):
def f(x):
return x * x
self.assertTransformedEquivalent(f, 2)
def test_superfluous_returns(self):
def f():
retval = 1
return retval
retval = 2 # pylint:disable=unreachable
return retval
self.assertTransformedEquivalent(f)
def test_superfluous_returns_adjacent(self):
def f():
return 1
return 2 # pylint:disable=unreachable
self.assertTransformedEquivalent(f)
def test_conditional(self):
def f(x):
if x > 0:
return x
else:
return x * x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_conditional_missing_else(self):
def f(x):
if x > 0:
return x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_conditional_missing_else_then_default(self):
def f(x):
if x > 0:
return x
return x * x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_conditional_else_only_then_default(self):
def f(x):
if x < 0:
x *= x
else:
return x
return x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_conditional_nested(self):
def f(x):
if x > 0:
if x < 5:
return x
else:
return x * x
else:
return x * x * x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
self.assertTransformedEquivalent(f, 5)
def test_context_manager(self):
def f(x):
with ops.name_scope(''):
return x * x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_context_manager_in_conditional(self):
def f(x):
if x > 0:
with ops.name_scope(''):
return x * x
else:
return x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def text_conditional_in_context_manager(self):
def f(x):
with ops.name_scope(''):
if x > 0:
return x * x
else:
return x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_no_return(self):
def f(x):
x *= x
self.assertTransformedEquivalent(f, 2)
def test_nested_function(self):
def f(x):
def inner_fn(y):
if y > 0:
return y * y
else:
return y
return inner_fn(x)
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_nested_function_in_control_flow(self):
def f(x):
if x:
def inner_fn(y):
return y
inner_fn(x)
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_for_loop(self):
def f(n):
for _ in range(n):
return 1
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, 0)
def test_while_loop(self):
def f(n):
i = 0
s = 0
while i < n:
i += 1
s += i
if s > 4:
return s
return -1
self.assertTransformedEquivalent(f, 0)
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, 4)
def test_null_return(self):
def f(n):
if n > 4:
return
return
self.assertTransformedEquivalent(f, 4)
self.assertTransformedEquivalent(f, 5)
def test_nested_multiple_withs(self):
def f(x):
v = []
while x > 0:
x -= 1
with ops.name_scope(''):
if x % 2 == 0:
return v
with ops.name_scope(''):
v.append(x)
v.append(x)
return v
self.assertTransformedEquivalent(f, 0)
self.assertTransformedEquivalent(f, 1)
self.assertTransformedEquivalent(f, 3)
self.assertTransformedEquivalent(f, 4)
def test_multiple_returns_in_nested_scope(self):
def f(a):
v = []
for x in a:
x -= 1
if x > 100:
return v
try:
raise ValueError('intentional')
except ValueError: # pylint:disable=bare-except
return v
v.append(x)
return v
self.assertTransformedEquivalent(f, [])
self.assertTransformedEquivalent(f, [1])
self.assertTransformedEquivalent(f, [2])
self.assertTransformedEquivalent(f, [1, 2, 3])
if __name__ == '__main__':
test.main()
|
frreiss/tensorflow-fred
|
tensorflow/python/autograph/converters/return_statements_test.py
|
Python
|
apache-2.0
| 5,879
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""FeatureColumns: tools for ingesting and representing features."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,line-too-long,wildcard-import,g-bad-import-order
from tensorflow.python.feature_column.feature_column import *
from tensorflow.python.feature_column.feature_column_v2 import *
from tensorflow.python.feature_column.sequence_feature_column import *
from tensorflow.python.feature_column.serialization import *
# We import dense_features_v2 first so that the V1 DenseFeatures is the default
# if users directly import feature_column_lib.
from tensorflow.python.keras.feature_column.dense_features_v2 import *
from tensorflow.python.keras.feature_column.dense_features import *
from tensorflow.python.keras.feature_column.sequence_feature_column import *
# pylint: enable=unused-import,line-too-long
|
annarev/tensorflow
|
tensorflow/python/feature_column/feature_column_lib.py
|
Python
|
apache-2.0
| 1,597
|
# $Id$
# Christopher Lee clee@users.sourceforge.net
# based upon pdfmetrics.py by Andy Robinson
from . import fontinfo
from . import latin1MetricsCache
##############################################################
#
# PDF Metrics
# This is a preamble to give us a stringWidth function.
# loads and caches AFM files, but won't need to as the
# standard fonts are there already
##############################################################
_stdenc_widths = {
'courier':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 600, 600,
600, 0, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 0, 600, 600, 600, 600, 600, 600, 600,
600, 0, 600, 600, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 600, 0,
600, 0, 0, 0, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 600, 0, 0, 0, 600, 0, 0, 600, 600, 600, 600,
0, 0, 600],
'courier-bold':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 600, 600,
600, 0, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 0, 600, 600, 600, 600, 600, 600, 600,
600, 0, 600, 600, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 600, 0,
600, 0, 0, 0, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 600, 0, 0, 0, 600, 0, 0, 600, 600, 600, 600,
0, 0, 600],
'courier-boldoblique':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 600, 600,
600, 0, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 0, 600, 600, 600, 600, 600, 600, 600,
600, 0, 600, 600, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 600, 0,
600, 0, 0, 0, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 600, 0, 0, 0, 600, 0, 0, 600, 600, 600, 600,
0, 0, 600],
'courier-oblique':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600,
600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 600, 600,
600, 0, 600, 600, 600, 600, 600, 600, 600, 600, 0, 600, 0, 600, 600, 600, 600, 600, 600, 600,
600, 0, 600, 600, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 600, 0,
600, 0, 0, 0, 0, 600, 600, 600, 600, 0, 0, 0, 0, 0, 600, 0, 0, 0, 600, 0, 0, 600, 600, 600, 600,
0, 0, 600],
'helvetica':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
278, 278, 355, 556, 556, 889, 667, 222, 333, 333, 389, 584, 278, 333, 278, 278, 556, 556, 556,
556, 556, 556, 556, 556, 556, 556, 278, 278, 584, 584, 584, 556, 1015, 667, 667, 722, 722, 667,
611, 778, 722, 278, 500, 667, 556, 833, 722, 778, 667, 778, 722, 667, 611, 722, 667, 944, 667,
667, 611, 278, 278, 278, 469, 556, 222, 556, 556, 500, 556, 556, 278, 556, 556, 222, 222, 500,
222, 833, 556, 556, 556, 556, 333, 500, 278, 556, 500, 722, 500, 500, 500, 334, 260, 334, 584, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 333, 556, 556, 167, 556, 556, 556, 556, 191, 333, 556, 333, 333, 500, 500, 0, 556, 556, 556,
278, 0, 537, 350, 222, 333, 333, 556, 1000, 1000, 0, 611, 0, 333, 333, 333, 333, 333, 333, 333,
333, 0, 333, 333, 0, 333, 333, 333, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1000,
0, 370, 0, 0, 0, 0, 556, 778, 1000, 365, 0, 0, 0, 0, 0, 889, 0, 0, 0, 278, 0, 0, 222, 611, 944,
611, 0, 0, 834],
'helvetica-bold':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
278, 333, 474, 556, 556, 889, 722, 278, 333, 333, 389, 584, 278, 333, 278, 278, 556, 556, 556,
556, 556, 556, 556, 556, 556, 556, 333, 333, 584, 584, 584, 611, 975, 722, 722, 722, 722, 667,
611, 778, 722, 278, 556, 722, 611, 833, 722, 778, 667, 778, 722, 667, 611, 722, 667, 944, 667,
667, 611, 333, 278, 333, 584, 556, 278, 556, 611, 556, 611, 556, 333, 611, 611, 278, 278, 556,
278, 889, 611, 611, 611, 611, 389, 556, 333, 611, 556, 778, 556, 556, 500, 389, 280, 389, 584, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 333, 556, 556, 167, 556, 556, 556, 556, 238, 500, 556, 333, 333, 611, 611, 0, 556, 556, 556,
278, 0, 556, 350, 278, 500, 500, 556, 1000, 1000, 0, 611, 0, 333, 333, 333, 333, 333, 333, 333,
333, 0, 333, 333, 0, 333, 333, 333, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1000,
0, 370, 0, 0, 0, 0, 611, 778, 1000, 365, 0, 0, 0, 0, 0, 889, 0, 0, 0, 278, 0, 0, 278, 611, 944,
611, 0, 0, 834],
'helvetica-boldoblique':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
278, 333, 474, 556, 556, 889, 722, 278, 333, 333, 389, 584, 278, 333, 278, 278, 556, 556, 556,
556, 556, 556, 556, 556, 556, 556, 333, 333, 584, 584, 584, 611, 975, 722, 722, 722, 722, 667,
611, 778, 722, 278, 556, 722, 611, 833, 722, 778, 667, 778, 722, 667, 611, 722, 667, 944, 667,
667, 611, 333, 278, 333, 584, 556, 278, 556, 611, 556, 611, 556, 333, 611, 611, 278, 278, 556,
278, 889, 611, 611, 611, 611, 389, 556, 333, 611, 556, 778, 556, 556, 500, 389, 280, 389, 584, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 333, 556, 556, 167, 556, 556, 556, 556, 238, 500, 556, 333, 333, 611, 611, 0, 556, 556, 556,
278, 0, 556, 350, 278, 500, 500, 556, 1000, 1000, 0, 611, 0, 333, 333, 333, 333, 333, 333, 333,
333, 0, 333, 333, 0, 333, 333, 333, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1000,
0, 370, 0, 0, 0, 0, 611, 778, 1000, 365, 0, 0, 0, 0, 0, 889, 0, 0, 0, 278, 0, 0, 278, 611, 944,
611, 0, 0, 834],
'helvetica-oblique':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
278, 278, 355, 556, 556, 889, 667, 222, 333, 333, 389, 584, 278, 333, 278, 278, 556, 556, 556,
556, 556, 556, 556, 556, 556, 556, 278, 278, 584, 584, 584, 556, 1015, 667, 667, 722, 722, 667,
611, 778, 722, 278, 500, 667, 556, 833, 722, 778, 667, 778, 722, 667, 611, 722, 667, 944, 667,
667, 611, 278, 278, 278, 469, 556, 222, 556, 556, 500, 556, 556, 278, 556, 556, 222, 222, 500,
222, 833, 556, 556, 556, 556, 333, 500, 278, 556, 500, 722, 500, 500, 500, 334, 260, 334, 584, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 333, 556, 556, 167, 556, 556, 556, 556, 191, 333, 556, 333, 333, 500, 500, 0, 556, 556, 556,
278, 0, 537, 350, 222, 333, 333, 556, 1000, 1000, 0, 611, 0, 333, 333, 333, 333, 333, 333, 333,
333, 0, 333, 333, 0, 333, 333, 333, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1000,
0, 370, 0, 0, 0, 0, 556, 778, 1000, 365, 0, 0, 0, 0, 0, 889, 0, 0, 0, 278, 0, 0, 222, 611, 944,
611, 0, 0, 834],
'symbol':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
250, 333, 713, 500, 549, 833, 778, 439, 333, 333, 500, 549, 250, 549, 250, 278, 500, 500, 500,
500, 500, 500, 500, 500, 500, 500, 278, 278, 549, 549, 549, 444, 549, 722, 667, 722, 612, 611,
763, 603, 722, 333, 631, 722, 686, 889, 722, 722, 768, 741, 556, 592, 611, 690, 439, 768, 645,
795, 611, 333, 863, 333, 658, 500, 500, 631, 549, 549, 494, 439, 521, 411, 603, 329, 603, 549,
549, 576, 521, 549, 549, 521, 549, 603, 439, 576, 713, 686, 493, 686, 494, 480, 200, 480, 549, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 620, 247, 549, 167, 713, 500, 753, 753, 753, 753, 1042, 987, 603, 987, 603, 400, 549, 411,
549, 549, 713, 494, 460, 549, 549, 549, 549, 1000, 603, 1000, 658, 823, 686, 795, 987, 768, 768,
823, 768, 768, 713, 713, 713, 713, 713, 713, 713, 768, 713, 790, 790, 890, 823, 549, 250, 713,
603, 603, 1042, 987, 603, 987, 603, 494, 329, 790, 790, 786, 713, 384, 384, 384, 384, 384, 384,
494, 494, 494, 494, 0, 329, 274, 686, 686, 686, 384, 384, 384, 384, 384, 384, 494, 494, 790],
'times-bold':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
250, 333, 555, 500, 500, 1000, 833, 333, 333, 333, 500, 570, 250, 333, 250, 278, 500, 500, 500,
500, 500, 500, 500, 500, 500, 500, 333, 333, 570, 570, 570, 500, 930, 722, 667, 722, 722, 667,
611, 778, 778, 389, 500, 778, 667, 944, 722, 778, 611, 778, 722, 556, 667, 722, 722, 1000, 722,
722, 667, 333, 278, 333, 581, 500, 333, 500, 556, 444, 556, 444, 333, 500, 556, 278, 333, 556,
278, 833, 556, 500, 556, 556, 444, 389, 333, 556, 500, 722, 500, 500, 444, 394, 220, 394, 520, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 333, 500, 500, 167, 500, 500, 500, 500, 278, 500, 500, 333, 333, 556, 556, 0, 500, 500, 500,
250, 0, 540, 350, 333, 500, 500, 500, 1000, 1000, 0, 500, 0, 333, 333, 333, 333, 333, 333, 333,
333, 0, 333, 333, 0, 333, 333, 333, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1000,
0, 300, 0, 0, 0, 0, 667, 778, 1000, 330, 0, 0, 0, 0, 0, 722, 0, 0, 0, 278, 0, 0, 278, 500, 722,
556, 0, 0, 750],
'times-bolditalic':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
250, 389, 555, 500, 500, 833, 778, 333, 333, 333, 500, 570, 250, 333, 250, 278, 500, 500, 500,
500, 500, 500, 500, 500, 500, 500, 333, 333, 570, 570, 570, 500, 832, 667, 667, 667, 722, 667,
667, 722, 778, 389, 500, 667, 611, 889, 722, 722, 611, 722, 667, 556, 611, 722, 667, 889, 667,
611, 611, 333, 278, 333, 570, 500, 333, 500, 500, 444, 500, 444, 333, 500, 556, 278, 278, 500,
278, 778, 556, 500, 500, 500, 389, 389, 278, 556, 444, 667, 500, 444, 389, 348, 220, 348, 570, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 389, 500, 500, 167, 500, 500, 500, 500, 278, 500, 500, 333, 333, 556, 556, 0, 500, 500, 500,
250, 0, 500, 350, 333, 500, 500, 500, 1000, 1000, 0, 500, 0, 333, 333, 333, 333, 333, 333, 333,
333, 0, 333, 333, 0, 333, 333, 333, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 944, 0,
266, 0, 0, 0, 0, 611, 722, 944, 300, 0, 0, 0, 0, 0, 722, 0, 0, 0, 278, 0, 0, 278, 500, 722, 500,
0, 0, 750],
'times-italic':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
250, 333, 420, 500, 500, 833, 778, 333, 333, 333, 500, 675, 250, 333, 250, 278, 500, 500, 500,
500, 500, 500, 500, 500, 500, 500, 333, 333, 675, 675, 675, 500, 920, 611, 611, 667, 722, 611,
611, 722, 722, 333, 444, 667, 556, 833, 667, 722, 611, 722, 611, 500, 556, 722, 611, 833, 611,
556, 556, 389, 278, 389, 422, 500, 333, 500, 500, 444, 500, 444, 278, 500, 500, 278, 278, 444,
278, 722, 500, 500, 500, 500, 389, 389, 278, 500, 444, 667, 444, 444, 389, 400, 275, 400, 541, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 389, 500, 500, 167, 500, 500, 500, 500, 214, 556, 500, 333, 333, 500, 500, 0, 500, 500, 500,
250, 0, 523, 350, 333, 556, 556, 500, 889, 1000, 0, 500, 0, 333, 333, 333, 333, 333, 333, 333,
333, 0, 333, 333, 0, 333, 333, 333, 889, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 889, 0,
276, 0, 0, 0, 0, 556, 722, 944, 310, 0, 0, 0, 0, 0, 667, 0, 0, 0, 278, 0, 0, 278, 500, 667, 500,
0, 0, 750],
'times-roman':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
250, 333, 408, 500, 500, 833, 778, 333, 333, 333, 500, 564, 250, 333, 250, 278, 500, 500, 500,
500, 500, 500, 500, 500, 500, 500, 278, 278, 564, 564, 564, 444, 921, 722, 667, 667, 722, 611,
556, 722, 722, 333, 389, 722, 611, 889, 722, 722, 556, 722, 667, 556, 611, 722, 722, 944, 722,
722, 611, 333, 278, 333, 469, 500, 333, 444, 500, 444, 500, 444, 333, 500, 500, 278, 278, 500,
278, 778, 500, 500, 500, 500, 333, 389, 278, 500, 500, 722, 500, 500, 444, 480, 200, 480, 541, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 333, 500, 500, 167, 500, 500, 500, 500, 180, 444, 500, 333, 333, 556, 556, 0, 500, 500, 500,
250, 0, 453, 350, 333, 444, 444, 500, 1000, 1000, 0, 444, 0, 333, 333, 333, 333, 333, 333, 333,
333, 0, 333, 333, 0, 333, 333, 333, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 889, 0,
276, 0, 0, 0, 0, 611, 722, 889, 310, 0, 0, 0, 0, 0, 667, 0, 0, 0, 278, 0, 0, 278, 500, 722, 500,
0, 0, 750],
'zapfdingbats':
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
278, 974, 961, 974, 980, 719, 789, 790, 791, 690, 960, 939, 549, 855, 911, 933, 911, 945, 974,
755, 846, 762, 761, 571, 677, 763, 760, 759, 754, 494, 552, 537, 577, 692, 786, 788, 788, 790,
793, 794, 816, 823, 789, 841, 823, 833, 816, 831, 923, 744, 723, 749, 790, 792, 695, 776, 768,
792, 759, 707, 708, 682, 701, 826, 815, 789, 789, 707, 687, 696, 689, 786, 787, 713, 791, 785,
791, 873, 761, 762, 762, 759, 759, 892, 892, 788, 784, 438, 138, 277, 415, 392, 392, 668, 668, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 732, 544, 544, 910, 667, 760, 760, 776, 595, 694, 626, 788, 788, 788, 788, 788, 788, 788, 788,
788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788,
788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 788, 894, 838, 1016, 458, 748, 924,
748, 918, 927, 928, 928, 834, 873, 828, 924, 924, 917, 930, 931, 463, 883, 836, 836, 867, 867,
696, 696, 874, 0, 874, 760, 946, 771, 865, 771, 888, 967, 888, 831, 873, 927, 970, 234]
}
ascent_descent = {'Courier': (629, -157),
'Courier-Bold': (626, -142),
'Courier-BoldOblique': (626, -142),
'Courier-Oblique': (629, -157),
'Helvetica': (718, -207),
'Helvetica-Bold': (718, -207),
'Helvetica-BoldOblique': (718, -207),
'Helvetica-Oblique': (718, -207),
'Symbol': (0, 0),
'Times-Bold': (676, -205),
'Times-BoldItalic': (699, -205),
'Times-Italic': (683, -205),
'Times-Roman': (683, -217),
'ZapfDingbats': (0, 0)}
_Widths = {'StandardEncoding': _stdenc_widths, 'Latin1Encoding': latin1MetricsCache.FontWidths}
def stringwidth(text, font, encoding):
if font in fontinfo.NonRomanFonts:
widths = _Widths['StandardEncoding'][font.lower()]
else:
try:
widths = _Widths[encoding][font.lower()]
except Exception:
raise KeyError("Improper encoding {0} or font name {1}".format(encoding, font))
w = 0
for char in text:
chr_idx = ord(char)
if chr_idx < len(widths):
chr_width = widths[chr_idx]
else:
chr_width = max(widths)
w = w + chr_width
return w
|
bp-kelley/rdkit
|
rdkit/sping/PS/psmetrics.py
|
Python
|
bsd-3-clause
| 17,605
|
#!/usr/bin/env python
#
# Copyright 2012 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities for working with threads and ``Futures``.
``Futures`` are a pattern for concurrent programming introduced in
Python 3.2 in the `concurrent.futures` package. This package defines
a mostly-compatible `Future` class designed for use from coroutines,
as well as some utility functions for interacting with the
`concurrent.futures` package.
"""
from __future__ import absolute_import, division, print_function, with_statement
import functools
import platform
import textwrap
import traceback
import sys
from tornado.log import app_log
from tornado.stack_context import ExceptionStackContext, wrap
from tornado.util import raise_exc_info, ArgReplacer
try:
from concurrent import futures
except ImportError:
futures = None
# Can the garbage collector handle cycles that include __del__ methods?
# This is true in cpython beginning with version 3.4 (PEP 442).
_GC_CYCLE_FINALIZERS = (platform.python_implementation() == 'CPython' and
sys.version_info >= (3, 4))
class ReturnValueIgnoredError(Exception):
pass
# This class and associated code in the future object is derived
# from the Trollius project, a backport of asyncio to Python 2.x - 3.x
class _TracebackLogger(object):
"""Helper to log a traceback upon destruction if not cleared.
This solves a nasty problem with Futures and Tasks that have an
exception set: if nobody asks for the exception, the exception is
never logged. This violates the Zen of Python: 'Errors should
never pass silently. Unless explicitly silenced.'
However, we don't want to log the exception as soon as
set_exception() is called: if the calling code is written
properly, it will get the exception and handle it properly. But
we *do* want to log it if result() or exception() was never called
-- otherwise developers waste a lot of time wondering why their
buggy code fails silently.
An earlier attempt added a __del__() method to the Future class
itself, but this backfired because the presence of __del__()
prevents garbage collection from breaking cycles. A way out of
this catch-22 is to avoid having a __del__() method on the Future
class itself, but instead to have a reference to a helper object
with a __del__() method that logs the traceback, where we ensure
that the helper object doesn't participate in cycles, and only the
Future has a reference to it.
The helper object is added when set_exception() is called. When
the Future is collected, and the helper is present, the helper
object is also collected, and its __del__() method will log the
traceback. When the Future's result() or exception() method is
called (and a helper object is present), it removes the the helper
object, after calling its clear() method to prevent it from
logging.
One downside is that we do a fair amount of work to extract the
traceback from the exception, even when it is never logged. It
would seem cheaper to just store the exception object, but that
references the traceback, which references stack frames, which may
reference the Future, which references the _TracebackLogger, and
then the _TracebackLogger would be included in a cycle, which is
what we're trying to avoid! As an optimization, we don't
immediately format the exception; we only do the work when
activate() is called, which call is delayed until after all the
Future's callbacks have run. Since usually a Future has at least
one callback (typically set by 'yield From') and usually that
callback extracts the callback, thereby removing the need to
format the exception.
PS. I don't claim credit for this solution. I first heard of it
in a discussion about closing files when they are collected.
"""
__slots__ = ('exc_info', 'formatted_tb')
def __init__(self, exc_info):
self.exc_info = exc_info
self.formatted_tb = None
def activate(self):
exc_info = self.exc_info
if exc_info is not None:
self.exc_info = None
self.formatted_tb = traceback.format_exception(*exc_info)
def clear(self):
self.exc_info = None
self.formatted_tb = None
def __del__(self):
if self.formatted_tb:
app_log.error('Future exception was never retrieved: %s',
''.join(self.formatted_tb).rstrip())
class Future(object):
"""Placeholder for an asynchronous result.
A ``Future`` encapsulates the result of an asynchronous
operation. In synchronous applications ``Futures`` are used
to wait for the result from a thread or process pool; in
Tornado they are normally used with `.IOLoop.add_future` or by
yielding them in a `.gen.coroutine`.
`tornado.concurrent.Future` is similar to
`concurrent.futures.Future`, but not thread-safe (and therefore
faster for use with single-threaded event loops).
In addition to ``exception`` and ``set_exception``, methods ``exc_info``
and ``set_exc_info`` are supported to capture tracebacks in Python 2.
The traceback is automatically available in Python 3, but in the
Python 2 futures backport this information is discarded.
This functionality was previously available in a separate class
``TracebackFuture``, which is now a deprecated alias for this class.
.. versionchanged:: 4.0
`tornado.concurrent.Future` is always a thread-unsafe ``Future``
with support for the ``exc_info`` methods. Previously it would
be an alias for the thread-safe `concurrent.futures.Future`
if that package was available and fall back to the thread-unsafe
implementation if it was not.
.. versionchanged:: 4.1
If a `.Future` contains an error but that error is never observed
(by calling ``result()``, ``exception()``, or ``exc_info()``),
a stack trace will be logged when the `.Future` is garbage collected.
This normally indicates an error in the application, but in cases
where it results in undesired logging it may be necessary to
suppress the logging by ensuring that the exception is observed:
``f.add_done_callback(lambda f: f.exception())``.
"""
def __init__(self):
self._done = False
self._result = None
self._exc_info = None
self._log_traceback = False # Used for Python >= 3.4
self._tb_logger = None # Used for Python <= 3.3
self._callbacks = []
# Implement the Python 3.5 Awaitable protocol if possible
# (we can't use return and yield together until py33).
if sys.version_info >= (3, 3):
exec(textwrap.dedent("""
def __await__(self):
return (yield self)
"""))
def cancel(self):
"""Cancel the operation, if possible.
Tornado ``Futures`` do not support cancellation, so this method always
returns False.
"""
return False
def cancelled(self):
"""Returns True if the operation has been cancelled.
Tornado ``Futures`` do not support cancellation, so this method
always returns False.
"""
return False
def running(self):
"""Returns True if this operation is currently running."""
return not self._done
def done(self):
"""Returns True if the future has finished running."""
return self._done
def _clear_tb_log(self):
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
def result(self, timeout=None):
"""If the operation succeeded, return its result. If it failed,
re-raise its exception.
This method takes a ``timeout`` argument for compatibility with
`concurrent.futures.Future` but it is an error to call it
before the `Future` is done, so the ``timeout`` is never used.
"""
self._clear_tb_log()
if self._result is not None:
return self._result
if self._exc_info is not None:
raise_exc_info(self._exc_info)
self._check_done()
return self._result
def exception(self, timeout=None):
"""If the operation raised an exception, return the `Exception`
object. Otherwise returns None.
This method takes a ``timeout`` argument for compatibility with
`concurrent.futures.Future` but it is an error to call it
before the `Future` is done, so the ``timeout`` is never used.
"""
self._clear_tb_log()
if self._exc_info is not None:
return self._exc_info[1]
else:
self._check_done()
return None
def add_done_callback(self, fn):
"""Attaches the given callback to the `Future`.
It will be invoked with the `Future` as its argument when the Future
has finished running and its result is available. In Tornado
consider using `.IOLoop.add_future` instead of calling
`add_done_callback` directly.
"""
if self._done:
fn(self)
else:
self._callbacks.append(fn)
def set_result(self, result):
"""Sets the result of a ``Future``.
It is undefined to call any of the ``set`` methods more than once
on the same object.
"""
self._result = result
self._set_done()
def set_exception(self, exception):
"""Sets the exception of a ``Future.``"""
self.set_exc_info(
(exception.__class__,
exception,
getattr(exception, '__traceback__', None)))
def exc_info(self):
"""Returns a tuple in the same format as `sys.exc_info` or None.
.. versionadded:: 4.0
"""
self._clear_tb_log()
return self._exc_info
def set_exc_info(self, exc_info):
"""Sets the exception information of a ``Future.``
Preserves tracebacks on Python 2.
.. versionadded:: 4.0
"""
self._exc_info = exc_info
self._log_traceback = True
if not _GC_CYCLE_FINALIZERS:
self._tb_logger = _TracebackLogger(exc_info)
try:
self._set_done()
finally:
# Activate the logger after all callbacks have had a
# chance to call result() or exception().
if self._log_traceback and self._tb_logger is not None:
self._tb_logger.activate()
self._exc_info = exc_info
def _check_done(self):
if not self._done:
raise Exception("DummyFuture does not support blocking for results")
def _set_done(self):
self._done = True
for cb in self._callbacks:
try:
cb(self)
except Exception:
app_log.exception('Exception in callback %r for %r',
cb, self)
self._callbacks = None
# On Python 3.3 or older, objects with a destructor part of a reference
# cycle are never destroyed. It's no longer the case on Python 3.4 thanks to
# the PEP 442.
if _GC_CYCLE_FINALIZERS:
def __del__(self):
if not self._log_traceback:
# set_exception() was not called, or result() or exception()
# has consumed the exception
return
tb = traceback.format_exception(*self._exc_info)
app_log.error('Future %r exception was never retrieved: %s',
self, ''.join(tb).rstrip())
TracebackFuture = Future
if futures is None:
FUTURES = Future
else:
FUTURES = (futures.Future, Future)
def is_future(x):
return isinstance(x, FUTURES)
class DummyExecutor(object):
def submit(self, fn, *args, **kwargs):
future = TracebackFuture()
try:
future.set_result(fn(*args, **kwargs))
except Exception:
future.set_exc_info(sys.exc_info())
return future
def shutdown(self, wait=True):
pass
dummy_executor = DummyExecutor()
def run_on_executor(*args, **kwargs):
"""Decorator to run a synchronous method asynchronously on an executor.
The decorated method may be called with a ``callback`` keyword
argument and returns a future.
The `.IOLoop` and executor to be used are determined by the ``io_loop``
and ``executor`` attributes of ``self``. To use different attributes,
pass keyword arguments to the decorator::
@run_on_executor(executor='_thread_pool')
def foo(self):
pass
.. versionchanged:: 4.2
Added keyword arguments to use alternative attributes.
"""
def run_on_executor_decorator(fn):
executor = kwargs.get("executor", "executor")
io_loop = kwargs.get("io_loop", "io_loop")
@functools.wraps(fn)
def wrapper(self, *args, **kwargs):
callback = kwargs.pop("callback", None)
future = getattr(self, executor).submit(fn, self, *args, **kwargs)
if callback:
getattr(self, io_loop).add_future(
future, lambda future: callback(future.result()))
return future
return wrapper
if args and kwargs:
raise ValueError("cannot combine positional and keyword args")
if len(args) == 1:
return run_on_executor_decorator(args[0])
elif len(args) != 0:
raise ValueError("expected 1 argument, got %d", len(args))
return run_on_executor_decorator
_NO_RESULT = object()
def return_future(f):
"""Decorator to make a function that returns via callback return a
`Future`.
The wrapped function should take a ``callback`` keyword argument
and invoke it with one argument when it has finished. To signal failure,
the function can simply raise an exception (which will be
captured by the `.StackContext` and passed along to the ``Future``).
From the caller's perspective, the callback argument is optional.
If one is given, it will be invoked when the function is complete
with `Future.result()` as an argument. If the function fails, the
callback will not be run and an exception will be raised into the
surrounding `.StackContext`.
If no callback is given, the caller should use the ``Future`` to
wait for the function to complete (perhaps by yielding it in a
`.gen.engine` function, or passing it to `.IOLoop.add_future`).
Usage:
.. testcode::
@return_future
def future_func(arg1, arg2, callback):
# Do stuff (possibly asynchronous)
callback(result)
@gen.engine
def caller(callback):
yield future_func(arg1, arg2)
callback()
..
Note that ``@return_future`` and ``@gen.engine`` can be applied to the
same function, provided ``@return_future`` appears first. However,
consider using ``@gen.coroutine`` instead of this combination.
"""
replacer = ArgReplacer(f, 'callback')
@functools.wraps(f)
def wrapper(*args, **kwargs):
future = TracebackFuture()
callback, args, kwargs = replacer.replace(
lambda value=_NO_RESULT: future.set_result(value),
args, kwargs)
def handle_error(typ, value, tb):
future.set_exc_info((typ, value, tb))
return True
exc_info = None
with ExceptionStackContext(handle_error):
try:
result = f(*args, **kwargs)
if result is not None:
raise ReturnValueIgnoredError(
"@return_future should not be used with functions "
"that return values")
except:
exc_info = sys.exc_info()
raise
if exc_info is not None:
# If the initial synchronous part of f() raised an exception,
# go ahead and raise it to the caller directly without waiting
# for them to inspect the Future.
future.result()
# If the caller passed in a callback, schedule it to be called
# when the future resolves. It is important that this happens
# just before we return the future, or else we risk confusing
# stack contexts with multiple exceptions (one here with the
# immediate exception, and again when the future resolves and
# the callback triggers its exception by calling future.result()).
if callback is not None:
def run_callback(future):
result = future.result()
if result is _NO_RESULT:
callback()
else:
callback(future.result())
future.add_done_callback(wrap(run_callback))
return future
return wrapper
def chain_future(a, b):
"""Chain two futures together so that when one completes, so does the other.
The result (success or failure) of ``a`` will be copied to ``b``, unless
``b`` has already been completed or cancelled by the time ``a`` finishes.
"""
def copy(future):
assert future is a
if b.done():
return
if (isinstance(a, TracebackFuture) and isinstance(b, TracebackFuture)
and a.exc_info() is not None):
b.set_exc_info(a.exc_info())
elif a.exception() is not None:
b.set_exception(a.exception())
else:
b.set_result(a.result())
a.add_done_callback(copy)
|
andyaguiar/tornado
|
tornado/concurrent.py
|
Python
|
apache-2.0
| 18,212
|
# coding: utf-8
#
# Copyright 2015 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from core.domain import classifier_services
from core.tests import test_utils
# pylint: disable=protected-access
class StringClassifierUnitTests(test_utils.GenericTestBase):
_EXAMPLES_TRAIN = [
['i eat fish and vegetables', ['food']],
['fish are pets', ['pets']]
]
_NEW_EXAMPLES_TRAIN = [
['my kitten eats fish', ['food', 'pets']]
]
_EXAMPLES_TEST = [
'i only eat fish and vegetables',
'pets are friends',
'a b c d e f g h i j k l m n o p q r s t u v w x y z'
]
def setUp(self):
super(StringClassifierUnitTests, self).setUp()
self.string_classifier = classifier_services.StringClassifier()
self.string_classifier.load_examples(self._EXAMPLES_TRAIN)
def _validate_instance(self):
self.assertIn('_alpha', dir(self.string_classifier))
self.assertIn('_beta', dir(self.string_classifier))
self.assertIn('_prediction_threshold', dir(self.string_classifier))
self.assertIn('_training_iterations', dir(self.string_classifier))
self.assertIn('_prediction_iterations', dir(self.string_classifier))
for d in xrange(self.string_classifier._num_docs):
self.assertEquals(
len(self.string_classifier._l_dp[d]),
len(self.string_classifier._w_dp[d]))
self.assertEquals(
len(self.string_classifier._label_to_id),
self.string_classifier._num_labels)
self.assertEquals(
len(self.string_classifier._word_to_id),
self.string_classifier._num_words)
self.assertEquals(
len(self.string_classifier._w_dp),
self.string_classifier._num_docs)
self.assertEquals(
len(self.string_classifier._b_dl),
self.string_classifier._num_docs)
if self.string_classifier._num_docs > 0:
self.assertEquals(
len(self.string_classifier._b_dl[0]),
self.string_classifier._num_labels)
self.assertEquals(
len(self.string_classifier._l_dp),
self.string_classifier._num_docs)
self.assertEquals(
len(self.string_classifier._c_dl),
self.string_classifier._num_docs)
if self.string_classifier._num_docs > 0:
self.assertEquals(
len(self.string_classifier._c_dl[0]),
self.string_classifier._num_labels)
self.assertEquals(
len(self.string_classifier._c_lw),
self.string_classifier._num_labels)
if self.string_classifier._num_labels > 0:
self.assertEquals(
len(self.string_classifier._c_lw[0]),
self.string_classifier._num_words)
self.assertEquals(
len(self.string_classifier._c_l),
self.string_classifier._num_labels)
def test_valid_state(self):
self.assertEquals(self.string_classifier._num_labels, 3)
self.assertEquals(self.string_classifier._num_docs, 2)
self.assertEquals(self.string_classifier._num_words, 7)
self._validate_instance()
def test_add_train_examples(self):
self.string_classifier.add_examples_for_training(
self._NEW_EXAMPLES_TRAIN)
self.assertEquals(self.string_classifier._num_labels, 3)
self.assertEquals(self.string_classifier._num_docs, 3)
self.assertEquals(self.string_classifier._num_words, 10)
self._validate_instance()
def test_add_test_examples(self):
self.string_classifier.add_examples_for_predicting(self._EXAMPLES_TEST)
self.assertEquals(self.string_classifier._num_labels, 3)
self.assertEquals(self.string_classifier._num_docs, 5)
self.assertEquals(self.string_classifier._num_words, 34)
self._validate_instance()
def test_empty_load(self):
self.string_classifier.load_examples([])
# Still got the default label
self.assertEquals(self.string_classifier._num_labels, 1)
self.assertEquals(self.string_classifier._num_docs, 0)
self.assertEquals(self.string_classifier._num_words, 0)
self._validate_instance()
def test_empty_add(self):
self.string_classifier.add_examples_for_training([])
self.assertEquals(self.string_classifier._num_labels, 3)
self.assertEquals(self.string_classifier._num_docs, 2)
self.assertEquals(self.string_classifier._num_words, 7)
self._validate_instance()
def test_model_to_and_from_dict(self):
self.assertEquals(
self.string_classifier._num_docs,
len(self._EXAMPLES_TRAIN))
# When the model is converted into a dictionary, check that updating
# the dictionary does not alter the model.
model = self.string_classifier.to_dict()
model['_num_docs'] = 9
self.assertEquals(model['_num_docs'], 9)
self.assertEquals(
self.string_classifier._num_docs,
len(self._EXAMPLES_TRAIN))
# When the model is updated, check that the dictionary remains
# unchanged.
self.string_classifier.add_examples_for_predicting(self._EXAMPLES_TEST)
self.assertEquals(
self.string_classifier._num_docs,
len(self._EXAMPLES_TRAIN) + len(self._EXAMPLES_TEST))
self.assertEquals(model['_num_docs'], 9)
# When a dictionary is loaded into a model, check that the altered
# values are now consistent.
self.string_classifier.from_dict(model)
self.assertEquals(self.string_classifier._num_docs, 9)
self.assertEquals(model['_num_docs'], 9)
def test_get_word_id(self):
word_count = self.string_classifier._num_words
self.string_classifier._get_word_id('_non_existent_word_1')
self.assertEquals(self.string_classifier._num_words, word_count + 1)
self.string_classifier._get_word_id('i')
self.assertEquals(self.string_classifier._num_words, word_count + 1)
self.string_classifier._get_word_id('_non_existent_word_2')
self.assertEquals(self.string_classifier._num_words, word_count + 2)
def test_get_label_id(self):
label_count = self.string_classifier._num_labels
self.string_classifier._get_label_id('_non_existent_label_1')
self.assertEquals(self.string_classifier._num_labels, label_count + 1)
self.string_classifier._get_label_id('food')
self.assertEquals(self.string_classifier._num_labels, label_count + 1)
self.string_classifier._get_label_id('_non_existent_label_2')
self.assertEquals(self.string_classifier._num_labels, label_count + 2)
def test_get_label_name(self):
label_id = self.string_classifier._get_label_id('food')
label_name = self.string_classifier._get_label_name(label_id)
self.assertEquals(label_name, 'food')
with self.assertRaises(Exception):
label_id = self.string_classifier._get_label_name(-1)
def test_reload_valid_state(self):
self.string_classifier.load_examples(self._NEW_EXAMPLES_TRAIN)
self.assertEquals(self.string_classifier._num_labels, 3)
self.assertEquals(
self.string_classifier._num_docs,
len(self._NEW_EXAMPLES_TRAIN))
self.assertEquals(self.string_classifier._num_words, 4)
self._validate_instance()
def test_prediction_report(self):
def _mock_get_label_probabilities(d):
self.assertEquals(d, -1)
return [0.5, 0.3, 0.2]
def _mock_get_label_id(unused_label):
return 0
def _mock_get_label_name(unused_label):
return 'fake_label'
self.string_classifier._prediction_threshold = 0
self.string_classifier._get_label_probabilities = (
_mock_get_label_probabilities)
self.string_classifier._get_label_id = _mock_get_label_id
prediction_report = (
self.string_classifier._get_prediction_report_for_doc(-1))
self.assertEquals(prediction_report['prediction_label_id'], 1)
def test_predict_label_for_doc(self):
"""This test ensures that the predictor is predicting the labels that
are provided (in this case, 'food', 'pets', and the generic label
'_default'). This test does not cover prediction accuracy, so
_DEFAULT_MIN_DOCS_TO_PREDICT and _DEFAULT_MIN_LABELS_TO_PREDICT have
been set to zero. This allows the predictor to predict on smaller data
sets, which is useful for testing purposes. Setting the above constants
to zero is not recommended in a serving system.
"""
self.string_classifier._DEFAULT_MIN_DOCS_TO_PREDICT = 0
self.string_classifier._DEFAULT_MIN_LABELS_TO_PREDICT = 0
doc_ids = self.string_classifier.add_examples_for_predicting(
self._EXAMPLES_TEST)
predicted_label = self.string_classifier.predict_label_for_doc(
doc_ids[0])
self.assertEquals(predicted_label, 'food')
predicted_label = self.string_classifier.predict_label_for_doc(
doc_ids[1])
self.assertEquals(predicted_label, 'pets')
# Testing a doc predicted with the default label
self.string_classifier._prediction_threshold = 0.7
predicted_label = self.string_classifier.predict_label_for_doc(
doc_ids[2])
self.assertEquals(predicted_label, '_default')
self._validate_instance()
|
sdulal/oppia
|
core/domain/classifier_services_test.py
|
Python
|
apache-2.0
| 10,135
|
def foo(a, /, b):
print(a, b)
def egg():
a = 1
b = 2
foo(a, b)<caret>
|
siosio/intellij-community
|
python/testData/multipleArgumentsCompletion/slashParameter.after.py
|
Python
|
apache-2.0
| 87
|
# -*- coding: utf-8 -*-
from gluon import current
#from gluon.html import *
from gluon.storage import Storage
from s3 import S3CustomController
THEME = "historic.CRMT"
# =============================================================================
class index(S3CustomController):
""" Custom Home Page """
def __call__(self):
output = {}
# Latest Activities
db = current.db
s3db = current.s3db
atable = s3db.project_activity
query = (atable.deleted == False)
output["total_activities"] = db(query).count()
#gtable = s3db.gis_location
#query &= (atable.location_id == gtable.id)
ogtable = s3db.org_group
ltable = s3db.project_activity_group
query &= (atable.id == ltable.activity_id) & \
(ogtable.id == ltable.group_id)
rows = db(query).select(atable.id,
atable.name,
atable.date,
#gtable.L3,
ogtable.name,
limitby = (0, 3),
orderby = ~atable.date
)
latest_activities = []
current.deployment_settings.L10n.date_format = "%d %b %y"
drepresent = atable.date.represent
for row in rows:
date = row["project_activity.date"]
if date:
nice_date = drepresent(date)
else:
nice_date = ""
latest_activities.append(Storage(id = row["project_activity.id"],
name = row["project_activity.name"],
date = nice_date,
date_iso = date or "",
org_group = row["org_group.name"],
#location = row["gis_location.L3"],
))
output["latest_activities"] = latest_activities
# Which Map should we link to in "Know your community"?
auth = current.auth
table = s3db.gis_config
if auth.is_logged_in() and auth.user.org_group_id:
# Coalition Map
ogtable = s3db.org_group
og = db(ogtable.id == auth.user.org_group_id).select(ogtable.pe_id,
limitby=(0, 1)
).first()
query = (table.pe_id == og.pe_id)
else:
# Default Map
query = (table.uuid == "SITE_DEFAULT")
config = db(query).select(table.id,
limitby=(0, 1)
).first()
try:
output["config_id"] = config.id
except:
output["config_id"] = None
self._view(THEME, "index.html")
return output
# END =========================================================================
|
flavour/eden
|
modules/templates/historic/CRMT/controllers.py
|
Python
|
mit
| 3,111
|
# encoding: utf-8
import os
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
app_dir_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
class Migration(SchemaMigration):
def forwards(self, orm):
if app_dir_name == 'forum':
try:
db.rename_table('forum_anonymousanswer', 'askbot_anonymousanswer')
db.rename_table('forum_anonymousquestion', 'askbot_anonymousquestion')
db.rename_table('forum_emailfeedsetting', 'askbot_emailfeedsetting')
db.rename_table('forum_markedtag', 'askbot_markedtag')
db.rename_table('forum_questionview', 'askbot_questionview')
db.rename_table('forum_validationhash', 'askbot_validationhash')
except:
pass
def backwards(self, orm):
if app_dir_name == 'forum':
db.rename_table('askbot_anonymousanswer', 'forum_anonymousanswer')
db.rename_table('askbot_anonymousquestion', 'forum_anonymousquestion')
db.rename_table('askbot_emailfeedsetting', 'forum_emailfeedsetting')
db.rename_table('askbot_markedtag', 'forum_markedtag')
db.rename_table('askbot_questionview', 'forum_questionview')
db.rename_table('askbot_validationhash', 'forum_validationhash')
if app_dir_name == 'forum':
models = {
'forum.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'forum.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['forum.Question']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'forum.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'forum.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['forum.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'forum.answerrevision': {
'Meta': {'object_name': 'AnswerRevision', 'db_table': "u'answer_revision'"},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['forum.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answerrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'forum.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['forum.Badge']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'forum.badge': {
'Meta': {'unique_together': "(('name', 'type'),)", 'object_name': 'Badge', 'db_table': "u'badge'"},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "'Award'", 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multiple': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {})
},
'forum.comment': {
'Meta': {'object_name': 'Comment', 'db_table': "u'comment'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'html': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2048'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['auth.User']"})
},
'forum.emailfeedsetting': {
'Meta': {'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'forum.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['forum.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'forum.flaggeditem': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'FlaggedItem', 'db_table': "u'flagged_item'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'flagged_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flaggeditems'", 'to': "orm['auth.User']"})
},
'forum.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['forum.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'forum.question': {
'Meta': {'object_name': 'Question', 'db_table': "u'question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['auth.User']"}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_questions'", 'symmetrical': 'False', 'through': "'FavoriteQuestion'", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_questions'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_questions'", 'to': "orm['auth.User']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'questions'", 'symmetrical': 'False', 'to': "orm['forum.Tag']"}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'forum.questionrevision': {
'Meta': {'object_name': 'QuestionRevision', 'db_table': "u'question_revision'"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questionrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['forum.Question']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'forum.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['forum.Question']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'forum.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['forum.Question']"}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'forum.tag': {
'Meta': {'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'forum.validationhash': {
'Meta': {'unique_together': "(('user', 'type'),)", 'object_name': 'ValidationHash'},
'expiration': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2010, 6, 13, 23, 16, 4, 680070)'}),
'hash_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'seed': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'forum.vote': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'hide_ignored_questions': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'tag_filter_setting': ('django.db.models.fields.CharField', [], {'default': "'ignored'", 'max_length': '16'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
else:
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Question']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['askbot.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.answerrevision': {
'Meta': {'object_name': 'AnswerRevision', 'db_table': "u'answer_revision'"},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answerrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.Badge']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badge': {
'Meta': {'unique_together': "(('name', 'type'),)", 'object_name': 'Badge', 'db_table': "u'badge'"},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "'Award'", 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multiple': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {})
},
'askbot.comment': {
'Meta': {'object_name': 'Comment', 'db_table': "u'comment'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'html': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2048'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['auth.User']"})
},
'askbot.emailfeedsetting': {
'Meta': {'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.flaggeditem': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'FlaggedItem', 'db_table': "u'flagged_item'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'flagged_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flaggeditems'", 'to': "orm['auth.User']"})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.question': {
'Meta': {'object_name': 'Question', 'db_table': "u'question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['auth.User']"}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_questions'", 'symmetrical': 'False', 'through': "'FavoriteQuestion'", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_questions'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_questions'", 'to': "orm['auth.User']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'questions'", 'symmetrical': 'False', 'to': "orm['askbot.Tag']"}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.questionrevision': {
'Meta': {'object_name': 'QuestionRevision', 'db_table': "u'question_revision'"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questionrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Question']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Question']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']"}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.validationhash': {
'Meta': {'unique_together': "(('user', 'type'),)", 'object_name': 'ValidationHash'},
'expiration': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2010, 6, 13, 23, 16, 4, 680070)'}),
'hash_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'seed': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.vote': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'hide_ignored_questions': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'tag_filter_setting': ('django.db.models.fields.CharField', [], {'default': "'ignored'", 'max_length': '16'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = [app_dir_name]
|
stianrh/askbot-nordic
|
askbot/migrations/0014_rename_schema_from_forum_to_askbot.py
|
Python
|
gpl-3.0
| 55,671
|
# -*- coding: utf-8 -*-
from openerp import models, api
class sale_order_line(models.Model):
_inherit = "sale.order.line"
@api.one
def button_confirm(self):
if self.product_id.recurring_invoice and self.order_id.project_id:
invoice_line_ids = [((0, 0, {
'product_id': self.product_id.id,
'analytic_account_id': self.order_id.project_id.id,
'name': self.name,
'quantity': self.product_uom_qty,
'uom_id': self.product_uom.id,
'price_unit': self.price_unit,
'price_subtotal': self.price_subtotal
}))]
analytic_values = {'recurring_invoices': True, 'recurring_invoice_line_ids': invoice_line_ids}
if not self.order_id.project_id.partner_id:
analytic_values['partner_id'] = self.order_id.partner_id.id
self.order_id.project_id.write(analytic_values)
return super(sale_order_line, self).button_confirm()
|
odoousers2014/odoo
|
addons/account_analytic_analysis/sale_order.py
|
Python
|
agpl-3.0
| 1,018
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c)2012 Rackspace US, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import os
import sys
import pyrax
pyrax.set_setting("identity_type", "rackspace")
creds_file = os.path.expanduser("~/.rackspace_cloud_credentials")
pyrax.set_credential_file(creds_file)
clb = pyrax.cloud_loadbalancers
try:
lb = clb.list()[0]
except IndexError:
print("You do not have any load balancers yet.")
print("Please create one and then re-run this script.")
sys.exit()
print("Load Balancer:", lb)
orig = lb.session_persistence
print("Current setting of session persistence:", orig or '""')
print()
if orig:
print("Clearing...")
lb.session_persistence = ""
else:
print("Setting persistence to HTTP_COOKIE...")
lb.session_persistence = "HTTP_COOKIE"
print("New setting of session persistence:", lb.session_persistence or '""')
|
0dataloss/pyrax
|
samples/cloud_loadbalancers/session_persistence.py
|
Python
|
apache-2.0
| 1,492
|
"""
Hacks for the Django 1.0/1.0.2 releases.
"""
from django.conf import settings
from django.db.backends.creation import BaseDatabaseCreation
from django.db.models.loading import cache
from django.core import management
from django.core.management.commands.flush import Command as FlushCommand
from django.utils.datastructures import SortedDict
class SkipFlushCommand(FlushCommand):
def handle_noargs(self, **options):
# no-op to avoid calling flush
return
class Hacks:
def set_installed_apps(self, apps):
"""
Sets Django's INSTALLED_APPS setting to be effectively the list passed in.
"""
# Make sure it's a list.
apps = list(apps)
# Make sure it contains strings
if apps:
assert isinstance(apps[0], basestring), "The argument to set_installed_apps must be a list of strings."
# Monkeypatch in!
settings.INSTALLED_APPS, settings.OLD_INSTALLED_APPS = (
apps,
settings.INSTALLED_APPS,
)
self._redo_app_cache()
def reset_installed_apps(self):
"""
Undoes the effect of set_installed_apps.
"""
settings.INSTALLED_APPS = settings.OLD_INSTALLED_APPS
self._redo_app_cache()
def _redo_app_cache(self):
"""
Used to repopulate AppCache after fiddling with INSTALLED_APPS.
"""
cache.loaded = False
cache.handled = {}
cache.postponed = []
cache.app_store = SortedDict()
cache.app_models = SortedDict()
cache.app_errors = {}
cache._populate()
def clear_app_cache(self):
"""
Clears the contents of AppCache to a blank state, so new models
from the ORM can be added.
"""
self.old_app_models, cache.app_models = cache.app_models, {}
def unclear_app_cache(self):
"""
Reversed the effects of clear_app_cache.
"""
cache.app_models = self.old_app_models
cache._get_models_cache = {}
def repopulate_app_cache(self):
"""
Rebuilds AppCache with the real model definitions.
"""
cache._populate()
def store_app_cache_state(self):
self.stored_app_cache_state = dict(**cache.__dict__)
def restore_app_cache_state(self):
cache.__dict__ = self.stored_app_cache_state
def patch_flush_during_test_db_creation(self):
"""
Patches BaseDatabaseCreation.create_test_db to not flush database
"""
def patch(f):
def wrapper(*args, **kwargs):
# hold onto the original and replace flush command with a no-op
original_flush_command = management._commands['flush']
try:
management._commands['flush'] = SkipFlushCommand()
# run create_test_db
f(*args, **kwargs)
finally:
# unpatch flush back to the original
management._commands['flush'] = original_flush_command
return wrapper
BaseDatabaseCreation.create_test_db = patch(BaseDatabaseCreation.create_test_db)
|
edisonlz/fruit
|
web_project/base/site-packages/south/hacks/django_1_0.py
|
Python
|
apache-2.0
| 3,274
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from telemetry import story
from telemetry import page as page_module
from telemetry import value
from telemetry.value import skip
class TestBase(unittest.TestCase):
def setUp(self):
story_set = story.StorySet(base_dir=os.path.dirname(__file__))
story_set.AddStory(
page_module.Page('http://www.bar.com/', story_set, story_set.base_dir))
self.story_set = story_set
@property
def pages(self):
return self.story_set.stories
class ValueTest(TestBase):
def testBuildbotAndRepresentativeValue(self):
v = skip.SkipValue(self.pages[0], 'page skipped for testing reason')
self.assertIsNone(v.GetBuildbotValue())
self.assertIsNone(v.GetBuildbotDataType(
value.COMPUTED_PER_PAGE_SUMMARY_OUTPUT_CONTEXT))
self.assertIsNone(v.GetChartAndTraceNameForPerPageResult())
self.assertIsNone(v.GetRepresentativeNumber())
self.assertIsNone(v.GetRepresentativeString())
def testAsDict(self):
v = skip.SkipValue(self.pages[0], 'page skipped for testing reason')
d = v.AsDictWithoutBaseClassEntries()
self.assertEquals(d['reason'], 'page skipped for testing reason')
def testFromDict(self):
d = {
'type': 'skip',
'name': 'skip',
'units': '',
'reason': 'page skipped for testing reason'
}
v = value.Value.FromDict(d, {})
self.assertTrue(isinstance(v, skip.SkipValue))
self.assertEquals(v.reason, 'page skipped for testing reason')
|
Chilledheart/chromium
|
tools/telemetry/telemetry/value/skip_unittest.py
|
Python
|
bsd-3-clause
| 1,626
|
from collections import namedtuple
import cx_Oracle
from django.db import models
from django.db.backends.base.introspection import (
BaseDatabaseIntrospection, FieldInfo as BaseFieldInfo, TableInfo,
)
from django.utils.functional import cached_property
FieldInfo = namedtuple('FieldInfo', BaseFieldInfo._fields + ('is_autofield', 'is_json'))
class DatabaseIntrospection(BaseDatabaseIntrospection):
cache_bust_counter = 1
# Maps type objects to Django Field types.
@cached_property
def data_types_reverse(self):
if self.connection.cx_oracle_version < (8,):
return {
cx_Oracle.BLOB: 'BinaryField',
cx_Oracle.CLOB: 'TextField',
cx_Oracle.DATETIME: 'DateField',
cx_Oracle.FIXED_CHAR: 'CharField',
cx_Oracle.FIXED_NCHAR: 'CharField',
cx_Oracle.INTERVAL: 'DurationField',
cx_Oracle.NATIVE_FLOAT: 'FloatField',
cx_Oracle.NCHAR: 'CharField',
cx_Oracle.NCLOB: 'TextField',
cx_Oracle.NUMBER: 'DecimalField',
cx_Oracle.STRING: 'CharField',
cx_Oracle.TIMESTAMP: 'DateTimeField',
}
else:
return {
cx_Oracle.DB_TYPE_DATE: 'DateField',
cx_Oracle.DB_TYPE_BINARY_DOUBLE: 'FloatField',
cx_Oracle.DB_TYPE_BLOB: 'BinaryField',
cx_Oracle.DB_TYPE_CHAR: 'CharField',
cx_Oracle.DB_TYPE_CLOB: 'TextField',
cx_Oracle.DB_TYPE_INTERVAL_DS: 'DurationField',
cx_Oracle.DB_TYPE_NCHAR: 'CharField',
cx_Oracle.DB_TYPE_NCLOB: 'TextField',
cx_Oracle.DB_TYPE_NVARCHAR: 'CharField',
cx_Oracle.DB_TYPE_NUMBER: 'DecimalField',
cx_Oracle.DB_TYPE_TIMESTAMP: 'DateTimeField',
cx_Oracle.DB_TYPE_VARCHAR: 'CharField',
}
def get_field_type(self, data_type, description):
if data_type == cx_Oracle.NUMBER:
precision, scale = description[4:6]
if scale == 0:
if precision > 11:
return 'BigAutoField' if description.is_autofield else 'BigIntegerField'
elif 1 < precision < 6 and description.is_autofield:
return 'SmallAutoField'
elif precision == 1:
return 'BooleanField'
elif description.is_autofield:
return 'AutoField'
else:
return 'IntegerField'
elif scale == -127:
return 'FloatField'
elif data_type == cx_Oracle.NCLOB and description.is_json:
return 'JSONField'
return super().get_field_type(data_type, description)
def get_table_list(self, cursor):
"""Return a list of table and view names in the current database."""
cursor.execute("""
SELECT table_name, 't'
FROM user_tables
WHERE
NOT EXISTS (
SELECT 1
FROM user_mviews
WHERE user_mviews.mview_name = user_tables.table_name
)
UNION ALL
SELECT view_name, 'v' FROM user_views
UNION ALL
SELECT mview_name, 'v' FROM user_mviews
""")
return [TableInfo(self.identifier_converter(row[0]), row[1]) for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"""
Return a description of the table with the DB-API cursor.description
interface.
"""
# user_tab_columns gives data default for columns
cursor.execute("""
SELECT
user_tab_cols.column_name,
user_tab_cols.data_default,
CASE
WHEN user_tab_cols.collation = user_tables.default_collation
THEN NULL
ELSE user_tab_cols.collation
END collation,
CASE
WHEN user_tab_cols.char_used IS NULL
THEN user_tab_cols.data_length
ELSE user_tab_cols.char_length
END as internal_size,
CASE
WHEN user_tab_cols.identity_column = 'YES' THEN 1
ELSE 0
END as is_autofield,
CASE
WHEN EXISTS (
SELECT 1
FROM user_json_columns
WHERE
user_json_columns.table_name = user_tab_cols.table_name AND
user_json_columns.column_name = user_tab_cols.column_name
)
THEN 1
ELSE 0
END as is_json
FROM user_tab_cols
LEFT OUTER JOIN
user_tables ON user_tables.table_name = user_tab_cols.table_name
WHERE user_tab_cols.table_name = UPPER(%s)
""", [table_name])
field_map = {
column: (internal_size, default if default != 'NULL' else None, collation, is_autofield, is_json)
for column, default, collation, internal_size, is_autofield, is_json in cursor.fetchall()
}
self.cache_bust_counter += 1
cursor.execute("SELECT * FROM {} WHERE ROWNUM < 2 AND {} > 0".format(
self.connection.ops.quote_name(table_name),
self.cache_bust_counter))
description = []
for desc in cursor.description:
name = desc[0]
internal_size, default, collation, is_autofield, is_json = field_map[name]
name = name % {} # cx_Oracle, for some reason, doubles percent signs.
description.append(FieldInfo(
self.identifier_converter(name), *desc[1:3], internal_size, desc[4] or 0,
desc[5] or 0, *desc[6:], default, collation, is_autofield, is_json,
))
return description
def identifier_converter(self, name):
"""Identifier comparison is case insensitive under Oracle."""
return name.lower()
def get_sequences(self, cursor, table_name, table_fields=()):
cursor.execute("""
SELECT
user_tab_identity_cols.sequence_name,
user_tab_identity_cols.column_name
FROM
user_tab_identity_cols,
user_constraints,
user_cons_columns cols
WHERE
user_constraints.constraint_name = cols.constraint_name
AND user_constraints.table_name = user_tab_identity_cols.table_name
AND cols.column_name = user_tab_identity_cols.column_name
AND user_constraints.constraint_type = 'P'
AND user_tab_identity_cols.table_name = UPPER(%s)
""", [table_name])
# Oracle allows only one identity column per table.
row = cursor.fetchone()
if row:
return [{
'name': self.identifier_converter(row[0]),
'table': self.identifier_converter(table_name),
'column': self.identifier_converter(row[1]),
}]
# To keep backward compatibility for AutoFields that aren't Oracle
# identity columns.
for f in table_fields:
if isinstance(f, models.AutoField):
return [{'table': table_name, 'column': f.column}]
return []
def get_relations(self, cursor, table_name):
"""
Return a dictionary of {field_name: (field_name_other_table, other_table)}
representing all relationships to the given table.
"""
table_name = table_name.upper()
cursor.execute("""
SELECT ca.column_name, cb.table_name, cb.column_name
FROM user_constraints, USER_CONS_COLUMNS ca, USER_CONS_COLUMNS cb
WHERE user_constraints.table_name = %s AND
user_constraints.constraint_name = ca.constraint_name AND
user_constraints.r_constraint_name = cb.constraint_name AND
ca.position = cb.position""", [table_name])
return {
self.identifier_converter(field_name): (
self.identifier_converter(rel_field_name),
self.identifier_converter(rel_table_name),
) for field_name, rel_table_name, rel_field_name in cursor.fetchall()
}
def get_key_columns(self, cursor, table_name):
cursor.execute("""
SELECT ccol.column_name, rcol.table_name AS referenced_table, rcol.column_name AS referenced_column
FROM user_constraints c
JOIN user_cons_columns ccol
ON ccol.constraint_name = c.constraint_name
JOIN user_cons_columns rcol
ON rcol.constraint_name = c.r_constraint_name
WHERE c.table_name = %s AND c.constraint_type = 'R'""", [table_name.upper()])
return [
tuple(self.identifier_converter(cell) for cell in row)
for row in cursor.fetchall()
]
def get_primary_key_column(self, cursor, table_name):
cursor.execute("""
SELECT
cols.column_name
FROM
user_constraints,
user_cons_columns cols
WHERE
user_constraints.constraint_name = cols.constraint_name AND
user_constraints.constraint_type = 'P' AND
user_constraints.table_name = UPPER(%s) AND
cols.position = 1
""", [table_name])
row = cursor.fetchone()
return self.identifier_converter(row[0]) if row else None
def get_constraints(self, cursor, table_name):
"""
Retrieve any constraints or keys (unique, pk, fk, check, index) across
one or more columns.
"""
constraints = {}
# Loop over the constraints, getting PKs, uniques, and checks
cursor.execute("""
SELECT
user_constraints.constraint_name,
LISTAGG(LOWER(cols.column_name), ',') WITHIN GROUP (ORDER BY cols.position),
CASE user_constraints.constraint_type
WHEN 'P' THEN 1
ELSE 0
END AS is_primary_key,
CASE
WHEN user_constraints.constraint_type IN ('P', 'U') THEN 1
ELSE 0
END AS is_unique,
CASE user_constraints.constraint_type
WHEN 'C' THEN 1
ELSE 0
END AS is_check_constraint
FROM
user_constraints
LEFT OUTER JOIN
user_cons_columns cols ON user_constraints.constraint_name = cols.constraint_name
WHERE
user_constraints.constraint_type = ANY('P', 'U', 'C')
AND user_constraints.table_name = UPPER(%s)
GROUP BY user_constraints.constraint_name, user_constraints.constraint_type
""", [table_name])
for constraint, columns, pk, unique, check in cursor.fetchall():
constraint = self.identifier_converter(constraint)
constraints[constraint] = {
'columns': columns.split(','),
'primary_key': pk,
'unique': unique,
'foreign_key': None,
'check': check,
'index': unique, # All uniques come with an index
}
# Foreign key constraints
cursor.execute("""
SELECT
cons.constraint_name,
LISTAGG(LOWER(cols.column_name), ',') WITHIN GROUP (ORDER BY cols.position),
LOWER(rcols.table_name),
LOWER(rcols.column_name)
FROM
user_constraints cons
INNER JOIN
user_cons_columns rcols ON rcols.constraint_name = cons.r_constraint_name AND rcols.position = 1
LEFT OUTER JOIN
user_cons_columns cols ON cons.constraint_name = cols.constraint_name
WHERE
cons.constraint_type = 'R' AND
cons.table_name = UPPER(%s)
GROUP BY cons.constraint_name, rcols.table_name, rcols.column_name
""", [table_name])
for constraint, columns, other_table, other_column in cursor.fetchall():
constraint = self.identifier_converter(constraint)
constraints[constraint] = {
'primary_key': False,
'unique': False,
'foreign_key': (other_table, other_column),
'check': False,
'index': False,
'columns': columns.split(','),
}
# Now get indexes
cursor.execute("""
SELECT
ind.index_name,
LOWER(ind.index_type),
LOWER(ind.uniqueness),
LISTAGG(LOWER(cols.column_name), ',') WITHIN GROUP (ORDER BY cols.column_position),
LISTAGG(cols.descend, ',') WITHIN GROUP (ORDER BY cols.column_position)
FROM
user_ind_columns cols, user_indexes ind
WHERE
cols.table_name = UPPER(%s) AND
NOT EXISTS (
SELECT 1
FROM user_constraints cons
WHERE ind.index_name = cons.index_name
) AND cols.index_name = ind.index_name
GROUP BY ind.index_name, ind.index_type, ind.uniqueness
""", [table_name])
for constraint, type_, unique, columns, orders in cursor.fetchall():
constraint = self.identifier_converter(constraint)
constraints[constraint] = {
'primary_key': False,
'unique': unique == 'unique',
'foreign_key': None,
'check': False,
'index': True,
'type': 'idx' if type_ == 'normal' else type_,
'columns': columns.split(','),
'orders': orders.split(','),
}
return constraints
|
ar4s/django
|
django/db/backends/oracle/introspection.py
|
Python
|
bsd-3-clause
| 14,220
|
"""
General image database
An image database creates a list of relative image path called image_set_index and
transform index to absolute image path. As to training, it is necessary that ground
truth and proposals are mixed together for training.
roidb
basic format [image_index]
['image', 'height', 'width', 'flipped',
'boxes', 'gt_classes', 'gt_overlaps', 'max_classes', 'max_overlaps', 'bbox_targets']
"""
from ..logger import logger
import os
import cPickle
import numpy as np
from ..processing.bbox_transform import bbox_overlaps
class IMDB(object):
def __init__(self, name, image_set, root_path, dataset_path):
"""
basic information about an image database
:param name: name of image database will be used for any output
:param root_path: root path store cache and proposal data
:param dataset_path: dataset path store images and image lists
"""
self.name = name + '_' + image_set
self.image_set = image_set
self.root_path = root_path
self.data_path = dataset_path
# abstract attributes
self.classes = []
self.num_classes = 0
self.image_set_index = []
self.num_images = 0
self.config = {}
def image_path_from_index(self, index):
raise NotImplementedError
def gt_roidb(self):
raise NotImplementedError
def evaluate_detections(self, detections):
raise NotImplementedError
@property
def cache_path(self):
"""
make a directory to store all caches
:return: cache path
"""
cache_path = os.path.join(self.root_path, 'cache')
if not os.path.exists(cache_path):
os.mkdir(cache_path)
return cache_path
def image_path_at(self, index):
"""
access image at index in image database
:param index: image index in image database
:return: image path
"""
return self.image_path_from_index(self.image_set_index[index])
def load_rpn_data(self, full=False):
if full:
rpn_file = os.path.join(self.root_path, 'rpn_data', self.name + '_full_rpn.pkl')
else:
rpn_file = os.path.join(self.root_path, 'rpn_data', self.name + '_rpn.pkl')
assert os.path.exists(rpn_file), '%s rpn data not found at %s' % (self.name, rpn_file)
logger.info('%s loading rpn data from %s' % (self.name, rpn_file))
with open(rpn_file, 'rb') as f:
box_list = cPickle.load(f)
return box_list
def load_rpn_roidb(self, gt_roidb):
"""
turn rpn detection boxes into roidb
:param gt_roidb: [image_index]['boxes', 'gt_classes', 'gt_overlaps', 'flipped']
:return: roidb: [image_index]['boxes', 'gt_classes', 'gt_overlaps', 'flipped']
"""
box_list = self.load_rpn_data()
return self.create_roidb_from_box_list(box_list, gt_roidb)
def rpn_roidb(self, gt_roidb, append_gt=False):
"""
get rpn roidb and ground truth roidb
:param gt_roidb: ground truth roidb
:param append_gt: append ground truth
:return: roidb of rpn
"""
if append_gt:
logger.info('%s appending ground truth annotations' % self.name)
rpn_roidb = self.load_rpn_roidb(gt_roidb)
roidb = IMDB.merge_roidbs(gt_roidb, rpn_roidb)
else:
roidb = self.load_rpn_roidb(gt_roidb)
return roidb
def create_roidb_from_box_list(self, box_list, gt_roidb):
"""
given ground truth, prepare roidb
:param box_list: [image_index] ndarray of [box_index][x1, x2, y1, y2]
:param gt_roidb: [image_index]['boxes', 'gt_classes', 'gt_overlaps', 'flipped']
:return: roidb: [image_index]['boxes', 'gt_classes', 'gt_overlaps', 'flipped']
"""
assert len(box_list) == self.num_images, 'number of boxes matrix must match number of images'
roidb = []
for i in range(self.num_images):
roi_rec = dict()
roi_rec['image'] = gt_roidb[i]['image']
roi_rec['height'] = gt_roidb[i]['height']
roi_rec['width'] = gt_roidb[i]['width']
boxes = box_list[i]
if boxes.shape[1] == 5:
boxes = boxes[:, :4]
num_boxes = boxes.shape[0]
overlaps = np.zeros((num_boxes, self.num_classes), dtype=np.float32)
if gt_roidb is not None and gt_roidb[i]['boxes'].size > 0:
gt_boxes = gt_roidb[i]['boxes']
gt_classes = gt_roidb[i]['gt_classes']
# n boxes and k gt_boxes => n * k overlap
gt_overlaps = bbox_overlaps(boxes.astype(np.float), gt_boxes.astype(np.float))
# for each box in n boxes, select only maximum overlap (must be greater than zero)
argmaxes = gt_overlaps.argmax(axis=1)
maxes = gt_overlaps.max(axis=1)
I = np.where(maxes > 0)[0]
overlaps[I, gt_classes[argmaxes[I]]] = maxes[I]
roi_rec.update({'boxes': boxes,
'gt_classes': np.zeros((num_boxes,), dtype=np.int32),
'gt_overlaps': overlaps,
'max_classes': overlaps.argmax(axis=1),
'max_overlaps': overlaps.max(axis=1),
'flipped': False})
# background roi => background class
zero_indexes = np.where(roi_rec['max_overlaps'] == 0)[0]
assert all(roi_rec['max_classes'][zero_indexes] == 0)
# foreground roi => foreground class
nonzero_indexes = np.where(roi_rec['max_overlaps'] > 0)[0]
assert all(roi_rec['max_classes'][nonzero_indexes] != 0)
roidb.append(roi_rec)
return roidb
def append_flipped_images(self, roidb):
"""
append flipped images to an roidb
flip boxes coordinates, images will be actually flipped when loading into network
:param roidb: [image_index]['boxes', 'gt_classes', 'gt_overlaps', 'flipped']
:return: roidb: [image_index]['boxes', 'gt_classes', 'gt_overlaps', 'flipped']
"""
logger.info('%s append flipped images to roidb' % self.name)
assert self.num_images == len(roidb)
for i in range(self.num_images):
roi_rec = roidb[i]
boxes = roi_rec['boxes'].copy()
oldx1 = boxes[:, 0].copy()
oldx2 = boxes[:, 2].copy()
boxes[:, 0] = roi_rec['width'] - oldx2 - 1
boxes[:, 2] = roi_rec['width'] - oldx1 - 1
assert (boxes[:, 2] >= boxes[:, 0]).all()
entry = {'image': roi_rec['image'],
'height': roi_rec['height'],
'width': roi_rec['width'],
'boxes': boxes,
'gt_classes': roidb[i]['gt_classes'],
'gt_overlaps': roidb[i]['gt_overlaps'],
'max_classes': roidb[i]['max_classes'],
'max_overlaps': roidb[i]['max_overlaps'],
'flipped': True}
roidb.append(entry)
self.image_set_index *= 2
return roidb
def evaluate_recall(self, roidb, candidate_boxes=None, thresholds=None):
"""
evaluate detection proposal recall metrics
record max overlap value for each gt box; return vector of overlap values
:param roidb: used to evaluate
:param candidate_boxes: if not given, use roidb's non-gt boxes
:param thresholds: array-like recall threshold
:return: None
ar: average recall, recalls: vector recalls at each IoU overlap threshold
thresholds: vector of IoU overlap threshold, gt_overlaps: vector of all ground-truth overlaps
"""
area_names = ['all', '0-25', '25-50', '50-100',
'100-200', '200-300', '300-inf']
area_ranges = [[0**2, 1e5**2], [0**2, 25**2], [25**2, 50**2], [50**2, 100**2],
[100**2, 200**2], [200**2, 300**2], [300**2, 1e5**2]]
area_counts = []
for area_name, area_range in zip(area_names[1:], area_ranges[1:]):
area_count = 0
for i in range(self.num_images):
if candidate_boxes is None:
# default is use the non-gt boxes from roidb
non_gt_inds = np.where(roidb[i]['gt_classes'] == 0)[0]
boxes = roidb[i]['boxes'][non_gt_inds, :]
else:
boxes = candidate_boxes[i]
boxes_areas = (boxes[:, 2] - boxes[:, 0] + 1) * (boxes[:, 3] - boxes[:, 1] + 1)
valid_range_inds = np.where((boxes_areas >= area_range[0]) & (boxes_areas < area_range[1]))[0]
area_count += len(valid_range_inds)
area_counts.append(area_count)
total_counts = float(sum(area_counts))
for area_name, area_count in zip(area_names[1:], area_counts):
logger.info('percentage of %s is %f' % (area_name, area_count / total_counts))
logger.info('average number of proposal is %f' % (total_counts / self.num_images))
for area_name, area_range in zip(area_names, area_ranges):
gt_overlaps = np.zeros(0)
num_pos = 0
for i in range(self.num_images):
# check for max_overlaps == 1 avoids including crowd annotations
max_gt_overlaps = roidb[i]['gt_overlaps'].max(axis=1)
gt_inds = np.where((roidb[i]['gt_classes'] > 0) & (max_gt_overlaps == 1))[0]
gt_boxes = roidb[i]['boxes'][gt_inds, :]
gt_areas = (gt_boxes[:, 2] - gt_boxes[:, 0] + 1) * (gt_boxes[:, 3] - gt_boxes[:, 1] + 1)
valid_gt_inds = np.where((gt_areas >= area_range[0]) & (gt_areas < area_range[1]))[0]
gt_boxes = gt_boxes[valid_gt_inds, :]
num_pos += len(valid_gt_inds)
if candidate_boxes is None:
# default is use the non-gt boxes from roidb
non_gt_inds = np.where(roidb[i]['gt_classes'] == 0)[0]
boxes = roidb[i]['boxes'][non_gt_inds, :]
else:
boxes = candidate_boxes[i]
if boxes.shape[0] == 0:
continue
overlaps = bbox_overlaps(boxes.astype(np.float), gt_boxes.astype(np.float))
_gt_overlaps = np.zeros((gt_boxes.shape[0]))
# choose whatever is smaller to iterate
rounds = min(boxes.shape[0], gt_boxes.shape[0])
for j in range(rounds):
# find which proposal maximally covers each gt box
argmax_overlaps = overlaps.argmax(axis=0)
# get the IoU amount of coverage for each gt box
max_overlaps = overlaps.max(axis=0)
# find which gt box is covered by most IoU
gt_ind = max_overlaps.argmax()
gt_ovr = max_overlaps.max()
assert (gt_ovr >= 0), '%s\n%s\n%s' % (boxes, gt_boxes, overlaps)
# find the proposal box that covers the best covered gt box
box_ind = argmax_overlaps[gt_ind]
# record the IoU coverage of this gt box
_gt_overlaps[j] = overlaps[box_ind, gt_ind]
assert (_gt_overlaps[j] == gt_ovr)
# mark the proposal box and the gt box as used
overlaps[box_ind, :] = -1
overlaps[:, gt_ind] = -1
# append recorded IoU coverage level
gt_overlaps = np.hstack((gt_overlaps, _gt_overlaps))
gt_overlaps = np.sort(gt_overlaps)
if thresholds is None:
step = 0.05
thresholds = np.arange(0.5, 0.95 + 1e-5, step)
recalls = np.zeros_like(thresholds)
# compute recall for each IoU threshold
for i, t in enumerate(thresholds):
recalls[i] = (gt_overlaps >= t).sum() / float(num_pos)
ar = recalls.mean()
# print results
print('average recall for {}: {:.3f}'.format(area_name, ar))
for threshold, recall in zip(thresholds, recalls):
print('recall @{:.2f}: {:.3f}'.format(threshold, recall))
@staticmethod
def merge_roidbs(a, b):
"""
merge roidbs into one
:param a: roidb to be merged into
:param b: roidb to be merged
:return: merged imdb
"""
assert len(a) == len(b)
for i in range(len(a)):
a[i]['boxes'] = np.vstack((a[i]['boxes'], b[i]['boxes']))
a[i]['gt_classes'] = np.hstack((a[i]['gt_classes'], b[i]['gt_classes']))
a[i]['gt_overlaps'] = np.vstack((a[i]['gt_overlaps'], b[i]['gt_overlaps']))
a[i]['max_classes'] = np.hstack((a[i]['max_classes'], b[i]['max_classes']))
a[i]['max_overlaps'] = np.hstack((a[i]['max_overlaps'], b[i]['max_overlaps']))
return a
|
deepakkumar1984/sia-cog
|
vis/rcnn/dataset/imdb.py
|
Python
|
mit
| 13,177
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Linus Unnebäck <linus@folkdatorn.se>
# Copyright: (c) 2017, Sébastien DA ROCHA <sebastien@da-rocha.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: iptables
short_description: Modify iptables rules
version_added: "2.0"
author:
- Linus Unnebäck (@LinusU) <linus@folkdatorn.se>
- Sébastien DA ROCHA (@sebastiendarocha)
description:
- C(iptables) is used to set up, maintain, and inspect the tables of IP packet
filter rules in the Linux kernel.
- This module does not handle the saving and/or loading of rules, but rather
only manipulates the current rules that are present in memory. This is the
same as the behaviour of the C(iptables) and C(ip6tables) command which
this module uses internally.
notes:
- This module just deals with individual rules.If you need advanced
chaining of rules the recommended way is to template the iptables restore
file.
options:
table:
description:
- This option specifies the packet matching table which the command should operate on.
- If the kernel is configured with automatic module loading, an attempt will be made
to load the appropriate module for that table if it is not already there.
type: str
choices: [ filter, nat, mangle, raw, security ]
default: filter
state:
description:
- Whether the rule should be absent or present.
type: str
choices: [ absent, present ]
default: present
action:
description:
- Whether the rule should be appended at the bottom or inserted at the top.
- If the rule already exists the chain will not be modified.
type: str
choices: [ append, insert ]
default: append
version_added: "2.2"
rule_num:
description:
- Insert the rule as the given rule number.
- This works only with C(action=insert).
type: str
version_added: "2.5"
ip_version:
description:
- Which version of the IP protocol this rule should apply to.
type: str
choices: [ ipv4, ipv6 ]
default: ipv4
chain:
description:
- Specify the iptables chain to modify.
- This could be a user-defined chain or one of the standard iptables chains, like
C(INPUT), C(FORWARD), C(OUTPUT), C(PREROUTING), C(POSTROUTING), C(SECMARK) or C(CONNSECMARK).
type: str
protocol:
description:
- The protocol of the rule or of the packet to check.
- The specified protocol can be one of C(tcp), C(udp), C(udplite), C(icmp), C(esp),
C(ah), C(sctp) or the special keyword C(all), or it can be a numeric value,
representing one of these protocols or a different one.
- A protocol name from I(/etc/protocols) is also allowed.
- A C(!) argument before the protocol inverts the test.
- The number zero is equivalent to all.
- C(all) will match with all protocols and is taken as default when this option is omitted.
type: str
source:
description:
- Source specification.
- Address can be either a network name, a hostname, a network IP address
(with /mask), or a plain IP address.
- Hostnames will be resolved once only, before the rule is submitted to
the kernel. Please note that specifying any name to be resolved with
a remote query such as DNS is a really bad idea.
- The mask can be either a network mask or a plain number, specifying
the number of 1's at the left side of the network mask. Thus, a mask
of 24 is equivalent to 255.255.255.0. A C(!) argument before the
address specification inverts the sense of the address.
type: str
destination:
description:
- Destination specification.
- Address can be either a network name, a hostname, a network IP address
(with /mask), or a plain IP address.
- Hostnames will be resolved once only, before the rule is submitted to
the kernel. Please note that specifying any name to be resolved with
a remote query such as DNS is a really bad idea.
- The mask can be either a network mask or a plain number, specifying
the number of 1's at the left side of the network mask. Thus, a mask
of 24 is equivalent to 255.255.255.0. A C(!) argument before the
address specification inverts the sense of the address.
type: str
tcp_flags:
description:
- TCP flags specification.
- C(tcp_flags) expects a dict with the two keys C(flags) and C(flags_set).
type: dict
default: {}
version_added: "2.4"
suboptions:
flags:
description:
- List of flags you want to examine.
type: list
flags_set:
description:
- Flags to be set.
type: list
match:
description:
- Specifies a match to use, that is, an extension module that tests for
a specific property.
- The set of matches make up the condition under which a target is invoked.
- Matches are evaluated first to last if specified as an array and work in short-circuit
fashion, i.e. if one extension yields false, evaluation will stop.
type: list
default: []
jump:
description:
- This specifies the target of the rule; i.e., what to do if the packet matches it.
- The target can be a user-defined chain (other than the one
this rule is in), one of the special builtin targets which decide the
fate of the packet immediately, or an extension (see EXTENSIONS
below).
- If this option is omitted in a rule (and the goto parameter
is not used), then matching the rule will have no effect on the
packet's fate, but the counters on the rule will be incremented.
type: str
gateway:
description:
- This specifies the IP address of host to send the cloned packets.
- This option is only valid when C(jump) is set to C(TEE).
type: str
version_added: "2.8"
log_prefix:
description:
- Specifies a log text for the rule. Only make sense with a LOG jump.
type: str
version_added: "2.5"
log_level:
description:
- Logging level according to the syslogd-defined priorities.
- The value can be strings or numbers from 1-8.
- This parameter is only applicable if C(jump) is set to C(LOG).
type: str
version_added: "2.8"
choices: [ '0', '1', '2', '3', '4', '5', '6', '7', 'emerg', 'alert', 'crit', 'error', 'warning', 'notice', 'info', 'debug' ]
goto:
description:
- This specifies that the processing should continue in a user specified chain.
- Unlike the jump argument return will not continue processing in
this chain but instead in the chain that called us via jump.
type: str
in_interface:
description:
- Name of an interface via which a packet was received (only for packets
entering the C(INPUT), C(FORWARD) and C(PREROUTING) chains).
- When the C(!) argument is used before the interface name, the sense is inverted.
- If the interface name ends in a C(+), then any interface which begins with
this name will match.
- If this option is omitted, any interface name will match.
type: str
out_interface:
description:
- Name of an interface via which a packet is going to be sent (for
packets entering the C(FORWARD), C(OUTPUT) and C(POSTROUTING) chains).
- When the C(!) argument is used before the interface name, the sense is inverted.
- If the interface name ends in a C(+), then any interface which begins
with this name will match.
- If this option is omitted, any interface name will match.
type: str
fragment:
description:
- This means that the rule only refers to second and further fragments
of fragmented packets.
- Since there is no way to tell the source or destination ports of such
a packet (or ICMP type), such a packet will not match any rules which specify them.
- When the "!" argument precedes fragment argument, the rule will only match head fragments,
or unfragmented packets.
type: str
set_counters:
description:
- This enables the administrator to initialize the packet and byte
counters of a rule (during C(INSERT), C(APPEND), C(REPLACE) operations).
type: str
source_port:
description:
- Source port or port range specification.
- This can either be a service name or a port number.
- An inclusive range can also be specified, using the format C(first:last).
- If the first port is omitted, C(0) is assumed; if the last is omitted, C(65535) is assumed.
- If the first port is greater than the second one they will be swapped.
type: str
destination_port:
description:
- "Destination port or port range specification. This can either be
a service name or a port number. An inclusive range can also be
specified, using the format first:last. If the first port is omitted,
'0' is assumed; if the last is omitted, '65535' is assumed. If the
first port is greater than the second one they will be swapped.
This is only valid if the rule also specifies one of the following
protocols: tcp, udp, dccp or sctp."
type: str
to_ports:
description:
- This specifies a destination port or range of ports to use, without
this, the destination port is never altered.
- This is only valid if the rule also specifies one of the protocol
C(tcp), C(udp), C(dccp) or C(sctp).
type: str
to_destination:
description:
- This specifies a destination address to use with C(DNAT).
- Without this, the destination address is never altered.
type: str
version_added: "2.1"
to_source:
description:
- This specifies a source address to use with C(SNAT).
- Without this, the source address is never altered.
type: str
version_added: "2.2"
syn:
description:
- This allows matching packets that have the SYN bit set and the ACK
and RST bits unset.
- When negated, this matches all packets with the RST or the ACK bits set.
type: str
choices: [ ignore, match, negate ]
default: ignore
version_added: "2.5"
set_dscp_mark:
description:
- This allows specifying a DSCP mark to be added to packets.
It takes either an integer or hex value.
- Mutually exclusive with C(set_dscp_mark_class).
type: str
version_added: "2.1"
set_dscp_mark_class:
description:
- This allows specifying a predefined DiffServ class which will be
translated to the corresponding DSCP mark.
- Mutually exclusive with C(set_dscp_mark).
type: str
version_added: "2.1"
comment:
description:
- This specifies a comment that will be added to the rule.
type: str
ctstate:
description:
- C(ctstate) is a list of the connection states to match in the conntrack module.
- Possible states are C(INVALID), C(NEW), C(ESTABLISHED), C(RELATED), C(UNTRACKED), C(SNAT), C(DNAT)
type: list
default: []
src_range:
description:
- Specifies the source IP range to match in the iprange module.
type: str
version_added: "2.8"
dst_range:
description:
- Specifies the destination IP range to match in the iprange module.
type: str
version_added: "2.8"
limit:
description:
- Specifies the maximum average number of matches to allow per second.
- The number can specify units explicitly, using `/second', `/minute',
`/hour' or `/day', or parts of them (so `5/second' is the same as
`5/s').
type: str
limit_burst:
description:
- Specifies the maximum burst before the above limit kicks in.
type: str
version_added: "2.1"
uid_owner:
description:
- Specifies the UID or username to use in match by owner rule.
- From Ansible 2.6 when the C(!) argument is prepended then the it inverts
the rule to apply instead to all users except that one specified.
type: str
version_added: "2.1"
reject_with:
description:
- 'Specifies the error packet type to return while rejecting. It implies
"jump: REJECT"'
type: str
version_added: "2.1"
icmp_type:
description:
- This allows specification of the ICMP type, which can be a numeric
ICMP type, type/code pair, or one of the ICMP type names shown by the
command 'iptables -p icmp -h'
type: str
version_added: "2.2"
flush:
description:
- Flushes the specified table and chain of all rules.
- If no chain is specified then the entire table is purged.
- Ignores all other parameters.
type: bool
version_added: "2.2"
policy:
description:
- Set the policy for the chain to the given target.
- Only built-in chains can have policies.
- This parameter requires the C(chain) parameter.
- Ignores all other parameters.
type: str
choices: [ ACCEPT, DROP, QUEUE, RETURN ]
version_added: "2.2"
'''
EXAMPLES = r'''
- name: Block specific IP
iptables:
chain: INPUT
source: 8.8.8.8
jump: DROP
become: yes
- name: Forward port 80 to 8600
iptables:
table: nat
chain: PREROUTING
in_interface: eth0
protocol: tcp
match: tcp
destination_port: 80
jump: REDIRECT
to_ports: 8600
comment: Redirect web traffic to port 8600
become: yes
- name: Allow related and established connections
iptables:
chain: INPUT
ctstate: ESTABLISHED,RELATED
jump: ACCEPT
become: yes
- name: Allow new incoming SYN packets on TCP port 22 (SSH).
iptables:
chain: INPUT
protocol: tcp
destination_port: 22
ctstate: NEW
syn: match
jump: ACCEPT
comment: Accept new SSH connections.
- name: Match on IP ranges
iptables:
chain: FORWARD
src_range: 192.168.1.100-192.168.1.199
dst_range: 10.0.0.1-10.0.0.50
jump: ACCEPT
- name: Tag all outbound tcp packets with DSCP mark 8
iptables:
chain: OUTPUT
jump: DSCP
table: mangle
set_dscp_mark: 8
protocol: tcp
- name: Tag all outbound tcp packets with DSCP DiffServ class CS1
iptables:
chain: OUTPUT
jump: DSCP
table: mangle
set_dscp_mark_class: CS1
protocol: tcp
- name: Insert a rule on line 5
iptables:
chain: INPUT
protocol: tcp
destination_port: 8080
jump: ACCEPT
action: insert
rule_num: 5
- name: Set the policy for the INPUT chain to DROP
iptables:
chain: INPUT
policy: DROP
- name: Reject tcp with tcp-reset
iptables:
chain: INPUT
protocol: tcp
reject_with: tcp-reset
ip_version: ipv4
- name: Set tcp flags
iptables:
chain: OUTPUT
jump: DROP
protocol: tcp
tcp_flags:
flags: ALL
flags_set:
- ACK
- RST
- SYN
- FIN
- name: iptables flush filter
iptables:
chain: "{{ item }}"
flush: yes
with_items: [ 'INPUT', 'FORWARD', 'OUTPUT' ]
- name: iptables flush nat
iptables:
table: nat
chain: '{{ item }}'
flush: yes
with_items: [ 'INPUT', 'OUTPUT', 'PREROUTING', 'POSTROUTING' ]
- name: Log packets arriving into an user-defined chain
iptables:
chain: LOGGING
action: append
state: present
limit: 2/second
limit_burst: 20
log_prefix: "IPTABLES:INFO: "
log_level: info
'''
import re
from ansible.module_utils.basic import AnsibleModule
BINS = dict(
ipv4='iptables',
ipv6='ip6tables',
)
ICMP_TYPE_OPTIONS = dict(
ipv4='--icmp-type',
ipv6='--icmpv6-type',
)
def append_param(rule, param, flag, is_list):
if is_list:
for item in param:
append_param(rule, item, flag, False)
else:
if param is not None:
if param[0] == '!':
rule.extend(['!', flag, param[1:]])
else:
rule.extend([flag, param])
def append_tcp_flags(rule, param, flag):
if param:
if 'flags' in param and 'flags_set' in param:
rule.extend([flag, ','.join(param['flags']), ','.join(param['flags_set'])])
def append_match_flag(rule, param, flag, negatable):
if param == 'match':
rule.extend([flag])
elif negatable and param == 'negate':
rule.extend(['!', flag])
def append_csv(rule, param, flag):
if param:
rule.extend([flag, ','.join(param)])
def append_match(rule, param, match):
if param:
rule.extend(['-m', match])
def append_jump(rule, param, jump):
if param:
rule.extend(['-j', jump])
def construct_rule(params):
rule = []
append_param(rule, params['protocol'], '-p', False)
append_param(rule, params['source'], '-s', False)
append_param(rule, params['destination'], '-d', False)
append_param(rule, params['match'], '-m', True)
append_tcp_flags(rule, params['tcp_flags'], '--tcp-flags')
append_param(rule, params['jump'], '-j', False)
if params.get('jump') and params['jump'].lower() == 'tee':
append_param(rule, params['gateway'], '--gateway', False)
append_param(rule, params['log_prefix'], '--log-prefix', False)
append_param(rule, params['log_level'], '--log-level', False)
append_param(rule, params['to_destination'], '--to-destination', False)
append_param(rule, params['to_source'], '--to-source', False)
append_param(rule, params['goto'], '-g', False)
append_param(rule, params['in_interface'], '-i', False)
append_param(rule, params['out_interface'], '-o', False)
append_param(rule, params['fragment'], '-f', False)
append_param(rule, params['set_counters'], '-c', False)
append_param(rule, params['source_port'], '--source-port', False)
append_param(rule, params['destination_port'], '--destination-port', False)
append_param(rule, params['to_ports'], '--to-ports', False)
append_param(rule, params['set_dscp_mark'], '--set-dscp', False)
append_param(
rule,
params['set_dscp_mark_class'],
'--set-dscp-class',
False)
append_match_flag(rule, params['syn'], '--syn', True)
append_match(rule, params['comment'], 'comment')
append_param(rule, params['comment'], '--comment', False)
if 'conntrack' in params['match']:
append_csv(rule, params['ctstate'], '--ctstate')
elif 'state' in params['match']:
append_csv(rule, params['ctstate'], '--state')
elif params['ctstate']:
append_match(rule, params['ctstate'], 'conntrack')
append_csv(rule, params['ctstate'], '--ctstate')
if 'iprange' in params['match']:
append_param(rule, params['src_range'], '--src-range', False)
append_param(rule, params['dst_range'], '--dst-range', False)
elif params['src_range'] or params['dst_range']:
append_match(rule, params['src_range'] or params['dst_range'], 'iprange')
append_param(rule, params['src_range'], '--src-range', False)
append_param(rule, params['dst_range'], '--dst-range', False)
append_match(rule, params['limit'] or params['limit_burst'], 'limit')
append_param(rule, params['limit'], '--limit', False)
append_param(rule, params['limit_burst'], '--limit-burst', False)
append_match(rule, params['uid_owner'], 'owner')
append_match_flag(rule, params['uid_owner'], '--uid-owner', True)
append_param(rule, params['uid_owner'], '--uid-owner', False)
if params['jump'] is None:
append_jump(rule, params['reject_with'], 'REJECT')
append_param(rule, params['reject_with'], '--reject-with', False)
append_param(
rule,
params['icmp_type'],
ICMP_TYPE_OPTIONS[params['ip_version']],
False)
return rule
def push_arguments(iptables_path, action, params, make_rule=True):
cmd = [iptables_path]
cmd.extend(['-t', params['table']])
cmd.extend([action, params['chain']])
if action == '-I' and params['rule_num']:
cmd.extend([params['rule_num']])
if make_rule:
cmd.extend(construct_rule(params))
return cmd
def check_present(iptables_path, module, params):
cmd = push_arguments(iptables_path, '-C', params)
rc, _, __ = module.run_command(cmd, check_rc=False)
return (rc == 0)
def append_rule(iptables_path, module, params):
cmd = push_arguments(iptables_path, '-A', params)
module.run_command(cmd, check_rc=True)
def insert_rule(iptables_path, module, params):
cmd = push_arguments(iptables_path, '-I', params)
module.run_command(cmd, check_rc=True)
def remove_rule(iptables_path, module, params):
cmd = push_arguments(iptables_path, '-D', params)
module.run_command(cmd, check_rc=True)
def flush_table(iptables_path, module, params):
cmd = push_arguments(iptables_path, '-F', params, make_rule=False)
module.run_command(cmd, check_rc=True)
def set_chain_policy(iptables_path, module, params):
cmd = push_arguments(iptables_path, '-P', params, make_rule=False)
cmd.append(params['policy'])
module.run_command(cmd, check_rc=True)
def get_chain_policy(iptables_path, module, params):
cmd = push_arguments(iptables_path, '-L', params)
rc, out, _ = module.run_command(cmd, check_rc=True)
chain_header = out.split("\n")[0]
result = re.search(r'\(policy ([A-Z]+)\)', chain_header)
if result:
return result.group(1)
return None
def main():
module = AnsibleModule(
supports_check_mode=True,
argument_spec=dict(
table=dict(type='str', default='filter', choices=['filter', 'nat', 'mangle', 'raw', 'security']),
state=dict(type='str', default='present', choices=['absent', 'present']),
action=dict(type='str', default='append', choices=['append', 'insert']),
ip_version=dict(type='str', default='ipv4', choices=['ipv4', 'ipv6']),
chain=dict(type='str'),
rule_num=dict(type='str'),
protocol=dict(type='str'),
source=dict(type='str'),
to_source=dict(type='str'),
destination=dict(type='str'),
to_destination=dict(type='str'),
match=dict(type='list', default=[]),
tcp_flags=dict(type='dict',
options=dict(
flags=dict(type='list'),
flags_set=dict(type='list'))
),
jump=dict(type='str'),
gateway=dict(type='str'),
log_prefix=dict(type='str'),
log_level=dict(type='str',
choices=['0', '1', '2', '3', '4', '5', '6', '7',
'emerg', 'alert', 'crit', 'error',
'warning', 'notice', 'info', 'debug'],
default=None,
),
goto=dict(type='str'),
in_interface=dict(type='str'),
out_interface=dict(type='str'),
fragment=dict(type='str'),
set_counters=dict(type='str'),
source_port=dict(type='str'),
destination_port=dict(type='str'),
to_ports=dict(type='str'),
set_dscp_mark=dict(type='str'),
set_dscp_mark_class=dict(type='str'),
comment=dict(type='str'),
ctstate=dict(type='list', default=[]),
src_range=dict(type='str'),
dst_range=dict(type='str'),
limit=dict(type='str'),
limit_burst=dict(type='str'),
uid_owner=dict(type='str'),
reject_with=dict(type='str'),
icmp_type=dict(type='str'),
syn=dict(type='str', default='ignore', choices=['ignore', 'match', 'negate']),
flush=dict(type='bool', default=False),
policy=dict(type='str', choices=['ACCEPT', 'DROP', 'QUEUE', 'RETURN']),
),
mutually_exclusive=(
['set_dscp_mark', 'set_dscp_mark_class'],
['flush', 'policy'],
),
required_if=[
['jump', 'TEE', ['gateway']],
['jump', 'tee', ['gateway']],
]
)
args = dict(
changed=False,
failed=False,
ip_version=module.params['ip_version'],
table=module.params['table'],
chain=module.params['chain'],
flush=module.params['flush'],
rule=' '.join(construct_rule(module.params)),
state=module.params['state'],
)
ip_version = module.params['ip_version']
iptables_path = module.get_bin_path(BINS[ip_version], True)
# Check if chain option is required
if args['flush'] is False and args['chain'] is None:
module.fail_json(msg="Either chain or flush parameter must be specified.")
if module.params.get('log_prefix', None) or module.params.get('log_level', None):
if module.params['jump'] is None:
module.params['jump'] = 'LOG'
elif module.params['jump'] != 'LOG':
module.fail_json(msg="Logging options can only be used with the LOG jump target.")
# Flush the table
if args['flush'] is True:
args['changed'] = True
if not module.check_mode:
flush_table(iptables_path, module, module.params)
# Set the policy
elif module.params['policy']:
current_policy = get_chain_policy(iptables_path, module, module.params)
if not current_policy:
module.fail_json(msg='Can\'t detect current policy')
changed = current_policy != module.params['policy']
args['changed'] = changed
if changed and not module.check_mode:
set_chain_policy(iptables_path, module, module.params)
else:
insert = (module.params['action'] == 'insert')
rule_is_present = check_present(iptables_path, module, module.params)
should_be_present = (args['state'] == 'present')
# Check if target is up to date
args['changed'] = (rule_is_present != should_be_present)
if args['changed'] is False:
# Target is already up to date
module.exit_json(**args)
# Check only; don't modify
if not module.check_mode:
if should_be_present:
if insert:
insert_rule(iptables_path, module, module.params)
else:
append_rule(iptables_path, module, module.params)
else:
remove_rule(iptables_path, module, module.params)
module.exit_json(**args)
if __name__ == '__main__':
main()
|
alxgu/ansible
|
lib/ansible/modules/system/iptables.py
|
Python
|
gpl-3.0
| 26,931
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_gtm_server
short_description: Manages F5 BIG-IP GTM servers
description:
- Manage BIG-IP server configuration. This module is able to manipulate the server
definitions in a BIG-IP.
version_added: 2.5
options:
name:
description:
- The name of the server.
required: True
state:
description:
- The server state. If C(absent), an attempt to delete the server will be made.
This will only succeed if this server is not in use by a virtual server.
C(present) creates the server and enables it. If C(enabled), enable the server
if it exists. If C(disabled), create the server if needed, and set state to
C(disabled).
default: present
choices:
- present
- absent
- enabled
- disabled
datacenter:
description:
- Data center the server belongs to. When creating a new GTM server, this value
is required.
devices:
description:
- Lists the self IP addresses and translations for each device. When creating a
new GTM server, this value is required. This list is a complex list that
specifies a number of keys.
- The C(name) key specifies a name for the device. The device name must
be unique per server. This key is required.
- The C(address) key contains an IP address, or list of IP addresses, for the
destination server. This key is required.
- The C(translation) key contains an IP address to translate the C(address)
value above to. This key is optional.
- Specifying duplicate C(name) fields is a supported means of providing device
addresses. In this scenario, the addresses will be assigned to the C(name)'s list
of addresses.
server_type:
description:
- Specifies the server type. The server type determines the metrics that the
system can collect from the server. When creating a new GTM server, the default
value C(bigip) is used.
choices:
- alteon-ace-director
- cisco-css
- cisco-server-load-balancer
- generic-host
- radware-wsd
- windows-nt-4.0
- bigip
- cisco-local-director-v2
- extreme
- generic-load-balancer
- sun-solaris
- cacheflow
- cisco-local-director-v3
- foundry-server-iron
- netapp
- windows-2000-server
aliases:
- product
link_discovery:
description:
- Specifies whether the system auto-discovers the links for this server. When
creating a new GTM server, if this parameter is not specified, the default
value C(disabled) is used.
- If you set this parameter to C(enabled) or C(enabled-no-delete), you must
also ensure that the C(virtual_server_discovery) parameter is also set to
C(enabled) or C(enabled-no-delete).
choices:
- enabled
- disabled
- enabled-no-delete
virtual_server_discovery:
description:
- Specifies whether the system auto-discovers the virtual servers for this server.
When creating a new GTM server, if this parameter is not specified, the default
value C(disabled) is used.
choices:
- enabled
- disabled
- enabled-no-delete
partition:
description:
- Device partition to manage resources on.
default: Common
version_added: 2.5
iquery_options:
description:
- Specifies whether the Global Traffic Manager uses this BIG-IP
system to conduct a variety of probes before delegating traffic to it.
suboptions:
allow_path:
description:
- Specifies that the system verifies the logical network route between a data
center server and a local DNS server.
type: bool
allow_service_check:
description:
- Specifies that the system verifies that an application on a server is running,
by remotely running the application using an external service checker program.
type: bool
allow_snmp:
description:
- Specifies that the system checks the performance of a server running an SNMP
agent.
type: bool
version_added: 2.7
monitors:
description:
- Specifies the health monitors that the system currently uses to monitor this resource.
- When C(availability_requirements.type) is C(require), you may only have a single monitor in the
C(monitors) list.
version_added: 2.8
availability_requirements:
description:
- Specifies, if you activate more than one health monitor, the number of health
monitors that must receive successful responses in order for the link to be
considered available.
suboptions:
type:
description:
- Monitor rule type when C(monitors) is specified.
- When creating a new pool, if this value is not specified, the default of 'all' will be used.
choices: ['all', 'at_least', 'require']
at_least:
description:
- Specifies the minimum number of active health monitors that must be successful
before the link is considered up.
- This parameter is only relevant when a C(type) of C(at_least) is used.
- This parameter will be ignored if a type of either C(all) or C(require) is used.
number_of_probes:
description:
- Specifies the minimum number of probes that must succeed for this server to be declared up.
- When creating a new virtual server, if this parameter is specified, then the C(number_of_probers)
parameter must also be specified.
- The value of this parameter should always be B(lower) than, or B(equal to), the value of C(number_of_probers).
- This parameter is only relevant when a C(type) of C(require) is used.
- This parameter will be ignored if a type of either C(all) or C(at_least) is used.
number_of_probers:
description:
- Specifies the number of probers that should be used when running probes.
- When creating a new virtual server, if this parameter is specified, then the C(number_of_probes)
parameter must also be specified.
- The value of this parameter should always be B(higher) than, or B(equal to), the value of C(number_of_probers).
- This parameter is only relevant when a C(type) of C(require) is used.
- This parameter will be ignored if a type of either C(all) or C(at_least) is used.
version_added: 2.8
prober_preference:
description:
- Specifies the type of prober to use to monitor this server's resources.
- This option is ignored in C(TMOS) version C(12.x).
- From C(TMOS) version C(13.x) and up, when prober_preference is set to C(pool)
a C(prober_pool) parameter must be specified.
choices:
- inside-datacenter
- outside-datacenter
- inherit
- pool
version_added: 2.8
prober_fallback:
description:
- Specifies the type of prober to use to monitor this server's resources
when the preferred prober is not available.
- This option is ignored in C(TMOS) version C(12.x).
- From C(TMOS) version C(13.x) and up, when prober_preference is set to C(pool)
a C(prober_pool) parameter must be specified.
- The choices are mutually exclusive with prober_preference parameter,
with the exception of C(any-available) or C(none) option.
choices:
- any
- inside-datacenter
- outside-datacenter
- inherit
- pool
- none
version_added: 2.8
prober_pool:
description:
- Specifies the name of the prober pool to use to monitor this server's resources.
- From C(TMOS) version C(13.x) and up, this parameter is mandatory when C(prober_preference) is set to C(pool).
- Format of the name can be either be prepended by partition (C(/Common/foo)), or specified
just as an object name (C(foo)).
- In C(TMOS) version C(12.x) prober_pool can be set to empty string to revert to default setting of inherit.
version_added: 2.8
limits:
description:
- Specifies resource thresholds or limit requirements at the pool member level.
- When you enable one or more limit settings, the system then uses that data to take
members in and out of service.
- You can define limits for any or all of the limit settings. However, when a
member does not meet the resource threshold limit requirement, the system marks
the member as unavailable and directs load-balancing traffic to another resource.
version_added: 2.8
suboptions:
bits_enabled:
description:
- Whether the bits limit it enabled or not.
- This parameter allows you to switch on or off the effect of the limit.
type: bool
packets_enabled:
description:
- Whether the packets limit it enabled or not.
- This parameter allows you to switch on or off the effect of the limit.
type: bool
connections_enabled:
description:
- Whether the current connections limit it enabled or not.
- This parameter allows you to switch on or off the effect of the limit.
type: bool
cpu_enabled:
description:
- Whether the CPU limit it enabled or not.
- This parameter allows you to switch on or off the effect of the limit.
type: bool
memory_enabled:
description:
- Whether the memory limit it enabled or not.
- This parameter allows you to switch on or off the effect of the limit.
type: bool
bits_limit:
description:
- Specifies the maximum allowable data throughput rate, in bits per second,
for the member.
- If the network traffic volume exceeds this limit, the system marks the
member as unavailable.
packets_limit:
description:
- Specifies the maximum allowable data transfer rate, in packets per second,
for the member.
- If the network traffic volume exceeds this limit, the system marks the
member as unavailable.
connections_limit:
description:
- Specifies the maximum number of concurrent connections, combined, for all of
the member.
- If the connections exceed this limit, the system marks the server as
unavailable.
cpu_limit:
description:
- Specifies the percent of CPU usage.
- If percent of CPU usage goes above the limit, the system marks the server as unavailable.
memory_limit:
description:
- Specifies the available memory required by the virtual servers on the server.
- If available memory falls below this limit, the system marks the server as unavailable.
extends_documentation_fragment: f5
author:
- Robert Teller (@r-teller)
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create server "GTM_Server"
bigip_gtm_server:
name: GTM_Server
datacenter: /Common/New York
server_type: bigip
link_discovery: disabled
virtual_server_discovery: disabled
devices:
- name: server_1
address: 1.1.1.1
- name: server_2
address: 2.2.2.1
translation: 192.168.2.1
- name: server_2
address: 2.2.2.2
- name: server_3
addresses:
- address: 3.3.3.1
- address: 3.3.3.2
- name: server_4
addresses:
- address: 4.4.4.1
translation: 192.168.14.1
- address: 4.4.4.2
provider:
user: admin
password: secret
server: lb.mydomain.com
delegate_to: localhost
- name: Create server "GTM_Server" with expanded keys
bigip_gtm_server:
server: lb.mydomain.com
user: admin
password: secret
name: GTM_Server
datacenter: /Common/New York
server_type: bigip
link_discovery: disabled
virtual_server_discovery: disabled
devices:
- name: server_1
address: 1.1.1.1
- name: server_2
address: 2.2.2.1
translation: 192.168.2.1
- name: server_2
address: 2.2.2.2
- name: server_3
addresses:
- address: 3.3.3.1
- address: 3.3.3.2
- name: server_4
addresses:
- address: 4.4.4.1
translation: 192.168.14.1
- address: 4.4.4.2
provider:
user: admin
password: secret
server: lb.mydomain.com
delegate_to: localhost
'''
RETURN = r'''
bits_enabled:
description: Whether the bits limit is enabled.
returned: changed
type: bool
sample: yes
bits_limit:
description: The new bits_enabled limit.
returned: changed
type: int
sample: 100
connections_enabled:
description: Whether the connections limit is enabled.
returned: changed
type: bool
sample: yes
connections_limit:
description: The new connections_limit limit.
returned: changed
type: int
sample: 100
monitors:
description: The new list of monitors for the resource.
returned: changed
type: list
sample: ['/Common/monitor1', '/Common/monitor2']
link_discovery:
description: The new C(link_discovery) configured on the remote device.
returned: changed
type: str
sample: enabled
virtual_server_discovery:
description: The new C(virtual_server_discovery) name for the trap destination.
returned: changed
type: str
sample: disabled
server_type:
description: The new type of the server.
returned: changed
type: str
sample: bigip
datacenter:
description: The new C(datacenter) which the server is part of.
returned: changed
type: str
sample: datacenter01
packets_enabled:
description: Whether the packets limit is enabled.
returned: changed
type: bool
sample: yes
packets_limit:
description: The new packets_limit limit.
returned: changed
type: int
sample: 100
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
from distutils.version import LooseVersion
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.network.f5.common import is_empty_list
from library.module_utils.network.f5.icontrol import tmos_version
from library.module_utils.network.f5.icontrol import module_provisioned
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.network.f5.common import is_empty_list
from ansible.module_utils.network.f5.icontrol import tmos_version
from ansible.module_utils.network.f5.icontrol import module_provisioned
try:
from collections import OrderedDict
except ImportError:
try:
from ordereddict import OrderedDict
except ImportError:
pass
class Parameters(AnsibleF5Parameters):
api_map = {
'product': 'server_type',
'virtualServerDiscovery': 'virtual_server_discovery',
'linkDiscovery': 'link_discovery',
'addresses': 'devices',
'iqAllowPath': 'iquery_allow_path',
'iqAllowServiceCheck': 'iquery_allow_service_check',
'iqAllowSnmp': 'iquery_allow_snmp',
'monitor': 'monitors',
'proberPreference': 'prober_preference',
'proberPool': 'prober_pool',
'proberFallback': 'prober_fallback',
'limitMaxBps': 'bits_limit',
'limitMaxBpsStatus': 'bits_enabled',
'limitMaxConnections': 'connections_limit',
'limitMaxConnectionsStatus': 'connections_enabled',
'limitMaxPps': 'packets_limit',
'limitMaxPpsStatus': 'packets_enabled',
'limitCpuUsage': 'cpu_limit',
'limitCpuUsageStatus': 'cpu_enabled',
'limitMemAvail': 'memory_limit',
'limitMemAvailStatus': 'memory_enabled',
}
api_attributes = [
'linkDiscovery',
'virtualServerDiscovery',
'product',
'addresses',
'datacenter',
'enabled',
'disabled',
'iqAllowPath',
'iqAllowServiceCheck',
'iqAllowSnmp',
'monitor',
'proberPreference',
'proberPool',
'proberFallback',
'limitMaxBps',
'limitMaxBpsStatus',
'limitMaxConnections',
'limitMaxConnectionsStatus',
'limitMaxPps',
'limitMaxPpsStatus',
'limitCpuUsage',
'limitCpuUsageStatus',
'limitMemAvail',
'limitMemAvailStatus',
]
updatables = [
'link_discovery',
'virtual_server_discovery',
'server_type_and_devices',
'datacenter',
'state',
'iquery_allow_path',
'iquery_allow_service_check',
'iquery_allow_snmp',
'monitors',
'prober_preference',
'prober_pool',
'prober_fallback',
'bits_enabled',
'bits_limit',
'connections_enabled',
'connections_limit',
'packets_enabled',
'packets_limit',
'cpu_enabled',
'cpu_limit',
'memory_enabled',
'memory_limit',
]
returnables = [
'link_discovery',
'virtual_server_discovery',
'server_type',
'datacenter',
'enabled',
'iquery_allow_path',
'iquery_allow_service_check',
'iquery_allow_snmp',
'devices',
'monitors',
'availability_requirements',
'prober_preference',
'prober_pool',
'prober_fallback',
'bits_enabled',
'bits_limit',
'connections_enabled',
'connections_limit',
'packets_enabled',
'packets_limit',
'cpu_enabled',
'cpu_limit',
'memory_enabled',
'memory_limit',
]
class ApiParameters(Parameters):
@property
def devices(self):
if self._values['devices'] is None:
return None
return self._values['devices']
@property
def server_type(self):
if self._values['server_type'] is None:
return None
elif self._values['server_type'] in ['single-bigip', 'redundant-bigip']:
return 'bigip'
else:
return self._values['server_type']
@property
def raw_server_type(self):
if self._values['server_type'] is None:
return None
return self._values['server_type']
@property
def enabled(self):
if self._values['enabled'] is None:
return None
return True
@property
def disabled(self):
if self._values['disabled'] is None:
return None
return True
@property
def iquery_allow_path(self):
if self._values['iquery_allow_path'] is None:
return None
elif self._values['iquery_allow_path'] == 'yes':
return True
return False
@property
def iquery_allow_service_check(self):
if self._values['iquery_allow_service_check'] is None:
return None
elif self._values['iquery_allow_service_check'] == 'yes':
return True
return False
@property
def iquery_allow_snmp(self):
if self._values['iquery_allow_snmp'] is None:
return None
elif self._values['iquery_allow_snmp'] == 'yes':
return True
return False
@property
def availability_requirement_type(self):
if self._values['monitors'] is None:
return None
if 'min ' in self._values['monitors']:
return 'at_least'
elif 'require ' in self._values['monitors']:
return 'require'
else:
return 'all'
@property
def monitors_list(self):
if self._values['monitors'] is None:
return []
try:
result = re.findall(r'/\w+/[^\s}]+', self._values['monitors'])
result.sort()
return result
except Exception:
return self._values['monitors']
@property
def monitors(self):
if self._values['monitors'] is None:
return None
if self._values['monitors'] == '/Common/bigip':
return '/Common/bigip'
monitors = [fq_name(self.partition, x) for x in self.monitors_list]
if self.availability_requirement_type == 'at_least':
monitors = ' '.join(monitors)
result = 'min {0} of {{ {1} }}'.format(self.at_least, monitors)
elif self.availability_requirement_type == 'require':
monitors = ' '.join(monitors)
result = 'require {0} from {1} {{ {2} }}'.format(self.number_of_probes, self.number_of_probers, monitors)
else:
result = ' and '.join(monitors).strip()
return result
@property
def number_of_probes(self):
"""Returns the probes value from the monitor string.
The monitor string for a Require monitor looks like this.
require 1 from 2 { /Common/tcp }
This method parses out the first of the numeric values. This values represents
the "probes" value that can be updated in the module.
Returns:
int: The probes value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'require\s+(?P<probes>\d+)\s+from'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return matches.group('probes')
@property
def number_of_probers(self):
"""Returns the probers value from the monitor string.
The monitor string for a Require monitor looks like this.
require 1 from 2 { /Common/tcp }
This method parses out the first of the numeric values. This values represents
the "probers" value that can be updated in the module.
Returns:
int: The probers value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'require\s+\d+\s+from\s+(?P<probers>\d+)\s+'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return matches.group('probers')
@property
def at_least(self):
"""Returns the 'at least' value from the monitor string.
The monitor string for a Require monitor looks like this.
min 1 of { /Common/gateway_icmp }
This method parses out the first of the numeric values. This values represents
the "at_least" value that can be updated in the module.
Returns:
int: The at_least value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'min\s+(?P<least>\d+)\s+of\s+'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return matches.group('least')
class ModuleParameters(Parameters):
def _get_limit_value(self, type):
if self._values['limits'] is None:
return None
if self._values['limits'][type] is None:
return None
return int(self._values['limits'][type])
def _get_limit_status(self, type):
if self._values['limits'] is None:
return None
if self._values['limits'][type] is None:
return None
if self._values['limits'][type]:
return 'enabled'
return 'disabled'
@property
def devices(self):
if self._values['devices'] is None:
return None
result = []
for device in self._values['devices']:
if not any(x for x in ['address', 'addresses'] if x in device):
raise F5ModuleError(
"The specified device list must contain an 'address' or 'addresses' key"
)
if 'address' in device:
translation = self._determine_translation(device)
name = device['address']
device_name = device['name']
result.append({
'name': name,
'deviceName': device_name,
'translation': translation
})
elif 'addresses' in device:
for address in device['addresses']:
translation = self._determine_translation(address)
name = address['address']
device_name = device['name']
result.append({
'name': name,
'deviceName': device_name,
'translation': translation
})
return result
@property
def enabled(self):
if self._values['state'] in ['present', 'enabled']:
return True
return False
@property
def datacenter(self):
if self._values['datacenter'] is None:
return None
return fq_name(self.partition, self._values['datacenter'])
def _determine_translation(self, device):
if 'translation' not in device:
return 'none'
return device['translation']
@property
def state(self):
if self._values['state'] == 'enabled':
return 'present'
return self._values['state']
@property
def iquery_allow_path(self):
if self._values['iquery_options'] is None:
return None
elif self._values['iquery_options']['allow_path'] is None:
return None
return self._values['iquery_options']['allow_path']
@property
def iquery_allow_service_check(self):
if self._values['iquery_options'] is None:
return None
elif self._values['iquery_options']['allow_service_check'] is None:
return None
return self._values['iquery_options']['allow_service_check']
@property
def iquery_allow_snmp(self):
if self._values['iquery_options'] is None:
return None
elif self._values['iquery_options']['allow_snmp'] is None:
return None
return self._values['iquery_options']['allow_snmp']
@property
def monitors_list(self):
if self._values['monitors'] is None:
return []
try:
result = re.findall(r'/\w+/[^\s}]+', self._values['monitors'])
result.sort()
return result
except Exception:
return self._values['monitors']
@property
def monitors(self):
if self._values['monitors'] is None:
return None
if is_empty_list(self._values['monitors']):
return '/Common/bigip'
monitors = [fq_name(self.partition, x) for x in self.monitors_list]
if self.availability_requirement_type == 'at_least':
if self.at_least > len(self.monitors_list):
raise F5ModuleError(
"The 'at_least' value must not exceed the number of 'monitors'."
)
monitors = ' '.join(monitors)
result = 'min {0} of {{ {1} }}'.format(self.at_least, monitors)
elif self.availability_requirement_type == 'require':
monitors = ' '.join(monitors)
if self.number_of_probes > self.number_of_probers:
raise F5ModuleError(
"The 'number_of_probes' must not exceed the 'number_of_probers'."
)
result = 'require {0} from {1} {{ {2} }}'.format(self.number_of_probes, self.number_of_probers, monitors)
else:
result = ' and '.join(monitors).strip()
return result
def _get_availability_value(self, type):
if self._values['availability_requirements'] is None:
return None
if self._values['availability_requirements'][type] is None:
return None
return int(self._values['availability_requirements'][type])
@property
def availability_requirement_type(self):
if self._values['availability_requirements'] is None:
return None
return self._values['availability_requirements']['type']
@property
def number_of_probes(self):
return self._get_availability_value('number_of_probes')
@property
def number_of_probers(self):
return self._get_availability_value('number_of_probers')
@property
def at_least(self):
return self._get_availability_value('at_least')
@property
def prober_pool(self):
if self._values['prober_pool'] is None:
return None
if self._values['prober_pool'] == '':
return self._values['prober_pool']
result = fq_name(self.partition, self._values['prober_pool'])
return result
@property
def prober_fallback(self):
if self._values['prober_fallback'] == 'any':
return 'any-available'
return self._values['prober_fallback']
@property
def bits_limit(self):
return self._get_limit_value('bits_limit')
@property
def packets_limit(self):
return self._get_limit_value('packets_limit')
@property
def connections_limit(self):
return self._get_limit_value('connections_limit')
@property
def cpu_limit(self):
return self._get_limit_value('cpu_limit')
@property
def memory_limit(self):
return self._get_limit_value('memory_limit')
@property
def bits_enabled(self):
return self._get_limit_status('bits_enabled')
@property
def packets_enabled(self):
return self._get_limit_status('packets_enabled')
@property
def connections_enabled(self):
return self._get_limit_status('connections_enabled')
@property
def cpu_enabled(self):
return self._get_limit_status('cpu_enabled')
@property
def memory_enabled(self):
return self._get_limit_status('memory_enabled')
class Changes(Parameters):
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
class UsableChanges(Changes):
@property
def monitors(self):
monitor_string = self._values['monitors']
if monitor_string is None:
return None
if '{' in monitor_string and '}':
tmp = monitor_string.strip('}').split('{')
monitor = ''.join(tmp).rstrip()
return monitor
return monitor_string
@property
def iquery_allow_path(self):
if self._values['iquery_allow_path'] is None:
return None
elif self._values['iquery_allow_path']:
return 'yes'
return 'no'
@property
def iquery_allow_service_check(self):
if self._values['iquery_allow_service_check'] is None:
return None
elif self._values['iquery_allow_service_check']:
return 'yes'
return 'no'
@property
def iquery_allow_snmp(self):
if self._values['iquery_allow_snmp'] is None:
return None
elif self._values['iquery_allow_snmp']:
return 'yes'
return 'no'
class ReportableChanges(Changes):
@property
def server_type(self):
if self._values['server_type'] in ['single-bigip', 'redundant-bigip']:
return 'bigip'
return self._values['server_type']
@property
def monitors(self):
if self._values['monitors'] is None:
return []
try:
result = re.findall(r'/\w+/[^\s}]+', self._values['monitors'])
result.sort()
return result
except Exception:
return self._values['monitors']
@property
def availability_requirement_type(self):
if self._values['monitors'] is None:
return None
if 'min ' in self._values['monitors']:
return 'at_least'
elif 'require ' in self._values['monitors']:
return 'require'
else:
return 'all'
@property
def number_of_probes(self):
"""Returns the probes value from the monitor string.
The monitor string for a Require monitor looks like this.
require 1 from 2 { /Common/tcp }
This method parses out the first of the numeric values. This values represents
the "probes" value that can be updated in the module.
Returns:
int: The probes value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'require\s+(?P<probes>\d+)\s+from'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return int(matches.group('probes'))
@property
def number_of_probers(self):
"""Returns the probers value from the monitor string.
The monitor string for a Require monitor looks like this.
require 1 from 2 { /Common/tcp }
This method parses out the first of the numeric values. This values represents
the "probers" value that can be updated in the module.
Returns:
int: The probers value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'require\s+\d+\s+from\s+(?P<probers>\d+)\s+'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return int(matches.group('probers'))
@property
def at_least(self):
"""Returns the 'at least' value from the monitor string.
The monitor string for a Require monitor looks like this.
min 1 of { /Common/gateway_icmp }
This method parses out the first of the numeric values. This values represents
the "at_least" value that can be updated in the module.
Returns:
int: The at_least value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'min\s+(?P<least>\d+)\s+of\s+'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return int(matches.group('least'))
@property
def availability_requirements(self):
if self._values['monitors'] is None:
return None
result = dict()
result['type'] = self.availability_requirement_type
result['at_least'] = self.at_least
result['number_of_probers'] = self.number_of_probers
result['number_of_probes'] = self.number_of_probes
return result
@property
def prober_fallback(self):
if self._values['prober_fallback'] == 'any-available':
return 'any'
return self._values['prober_fallback']
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
want = getattr(self.want, param)
try:
have = getattr(self.have, param)
if want != have:
return want
except AttributeError:
return want
def _discovery_constraints(self):
if self.want.virtual_server_discovery is None:
virtual_server_discovery = self.have.virtual_server_discovery
else:
virtual_server_discovery = self.want.virtual_server_discovery
if self.want.link_discovery is None:
link_discovery = self.have.link_discovery
else:
link_discovery = self.want.link_discovery
if link_discovery in ['enabled', 'enabled-no-delete'] and virtual_server_discovery == 'disabled':
raise F5ModuleError(
"Virtual server discovery must be enabled if link discovery is enabled"
)
def _devices_changed(self):
if self.want.devices is None and self.want.server_type is None:
return None
if self.want.devices is None:
devices = self.have.devices
else:
devices = self.want.devices
if self.have.devices is None:
have_devices = []
else:
have_devices = self.have.devices
if len(devices) == 0:
raise F5ModuleError(
"A GTM server must have at least one device associated with it."
)
want = [OrderedDict(sorted(d.items())) for d in devices]
have = [OrderedDict(sorted(d.items())) for d in have_devices]
if want != have:
return True
return False
def _server_type_changed(self):
if self.want.server_type is None:
self.want.update({'server_type': self.have.server_type})
if self.want.server_type != self.have.server_type:
return True
return False
@property
def link_discovery(self):
self._discovery_constraints()
if self.want.link_discovery != self.have.link_discovery:
return self.want.link_discovery
@property
def virtual_server_discovery(self):
self._discovery_constraints()
if self.want.virtual_server_discovery != self.have.virtual_server_discovery:
return self.want.virtual_server_discovery
def _handle_current_server_type_and_devices(self, devices_change, server_change):
result = {}
if devices_change:
result['devices'] = self.want.devices
if server_change:
result['server_type'] = self.want.server_type
return result
def _handle_legacy_server_type_and_devices(self, devices_change, server_change):
result = {}
if server_change and devices_change:
result['devices'] = self.want.devices
if len(self.want.devices) > 1 and self.want.server_type == 'bigip':
if self.have.raw_server_type != 'redundant-bigip':
result['server_type'] = 'redundant-bigip'
elif self.want.server_type == 'bigip':
if self.have.raw_server_type != 'single-bigip':
result['server_type'] = 'single-bigip'
else:
result['server_type'] = self.want.server_type
elif devices_change:
result['devices'] = self.want.devices
if len(self.want.devices) > 1 and self.have.server_type == 'bigip':
if self.have.raw_server_type != 'redundant-bigip':
result['server_type'] = 'redundant-bigip'
elif self.have.server_type == 'bigip':
if self.have.raw_server_type != 'single-bigip':
result['server_type'] = 'single-bigip'
else:
result['server_type'] = self.want.server_type
elif server_change:
if len(self.have.devices) > 1 and self.want.server_type == 'bigip':
if self.have.raw_server_type != 'redundant-bigip':
result['server_type'] = 'redundant-bigip'
elif self.want.server_type == 'bigip':
if self.have.raw_server_type != 'single-bigip':
result['server_type'] = 'single-bigip'
else:
result['server_type'] = self.want.server_type
return result
@property
def server_type_and_devices(self):
"""Compares difference between server type and devices list
These two parameters are linked with each other and, therefore, must be
compared together to ensure that the correct setting is sent to BIG-IP
:return:
"""
devices_change = self._devices_changed()
server_change = self._server_type_changed()
if not devices_change and not server_change:
return None
tmos = tmos_version(self.client)
if LooseVersion(tmos) >= LooseVersion('13.0.0'):
result = self._handle_current_server_type_and_devices(
devices_change, server_change
)
return result
else:
result = self._handle_legacy_server_type_and_devices(
devices_change, server_change
)
return result
@property
def state(self):
if self.want.state == 'disabled' and self.have.enabled:
return dict(disabled=True)
elif self.want.state in ['present', 'enabled'] and self.have.disabled:
return dict(enabled=True)
@property
def monitors(self):
if self.want.monitors is None:
return None
if self.want.monitors == '/Common/bigip' and self.have.monitors == '/Common/bigip':
return None
if self.want.monitors == '/Common/bigip' and self.have.monitors is None:
return None
if self.want.monitors == '/Common/bigip' and len(self.have.monitors) > 0:
return '/Common/bigip'
if self.have.monitors is None:
return self.want.monitors
if self.have.monitors != self.want.monitors:
return self.want.monitors
@property
def prober_pool(self):
if self.want.prober_pool is None:
return None
if self.have.prober_pool is None:
if self.want.prober_pool == '':
return None
if self.want.prober_pool != self.have.prober_pool:
return self.want.prober_pool
@property
def prober_preference(self):
if self.want.prober_preference is None:
return None
if self.want.prober_preference == self.have.prober_preference:
return None
if self.want.prober_preference == 'pool' and self.want.prober_pool is None:
raise F5ModuleError(
"A prober_pool needs to be set if prober_preference is set to 'pool'"
)
if self.want.prober_preference != 'pool' and self.have.prober_preference == 'pool':
if self.want.prober_fallback != 'pool' and self.want.prober_pool != '':
raise F5ModuleError(
"To change prober_preference from {0} to {1}, set prober_pool to an empty string".format(
self.have.prober_preference,
self.want.prober_preference
)
)
if self.want.prober_preference == self.want.prober_fallback:
raise F5ModuleError(
"Prober_preference and prober_fallback must not be equal."
)
if self.want.prober_preference == self.have.prober_fallback:
raise F5ModuleError(
"Cannot set prober_preference to {0} if prober_fallback on device is set to {1}.".format(
self.want.prober_preference,
self.have.prober_fallback
)
)
if self.want.prober_preference != self.have.prober_preference:
return self.want.prober_preference
@property
def prober_fallback(self):
if self.want.prober_fallback is None:
return None
if self.want.prober_fallback == self.have.prober_fallback:
return None
if self.want.prober_fallback == 'pool' and self.want.prober_pool is None:
raise F5ModuleError(
"A prober_pool needs to be set if prober_fallback is set to 'pool'"
)
if self.want.prober_fallback != 'pool' and self.have.prober_fallback == 'pool':
if self.want.prober_preference != 'pool' and self.want.prober_pool != '':
raise F5ModuleError(
"To change prober_fallback from {0} to {1}, set prober_pool to an empty string".format(
self.have.prober_fallback,
self.want.prober_fallback
)
)
if self.want.prober_preference == self.want.prober_fallback:
raise F5ModuleError(
"Prober_preference and prober_fallback must not be equal."
)
if self.want.prober_fallback == self.have.prober_preference:
raise F5ModuleError(
"Cannot set prober_fallback to {0} if prober_preference on device is set to {1}.".format(
self.want.prober_fallback,
self.have.prober_preference
)
)
if self.want.prober_fallback != self.have.prober_fallback:
return self.want.prober_fallback
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.kwargs = kwargs
def exec_module(self):
if not module_provisioned(self.client, 'gtm'):
raise F5ModuleError(
"GTM must be provisioned to use this module."
)
if self.version_is_less_than('13.0.0'):
manager = self.get_manager('v1')
else:
manager = self.get_manager('v2')
return manager.exec_module()
def get_manager(self, type):
if type == 'v1':
return V1Manager(**self.kwargs)
elif type == 'v2':
return V2Manager(**self.kwargs)
def version_is_less_than(self, version):
tmos = tmos_version(self.client)
if LooseVersion(tmos) < LooseVersion(version):
return True
else:
return False
class BaseManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.want = ModuleParameters(params=self.module.params)
self.want.update(dict(client=self.client))
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
diff.client = self.client
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state in ['present', 'enabled', 'disabled']:
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def _check_link_discovery_requirements(self):
if self.want.link_discovery in ['enabled', 'enabled-no-delete'] and self.want.virtual_server_discovery == 'disabled':
raise F5ModuleError(
"Virtual server discovery must be enabled if link discovery is enabled"
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def create(self):
if self.want.state == 'disabled':
self.want.update({'disabled': True})
elif self.want.state in ['present', 'enabled']:
self.want.update({'enabled': True})
self.adjust_server_type_by_version()
self.should_update()
if self.want.devices is None:
raise F5ModuleError(
"You must provide an initial device."
)
self._assign_creation_defaults()
self.handle_prober_settings()
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
if self.exists():
return True
else:
raise F5ModuleError("Failed to create the server")
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/gtm/server/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return response['selfLink']
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/server/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/gtm/server/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def absent(self):
changed = False
if self.exists():
changed = self.remove()
return changed
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the server")
return True
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/server/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/server/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
class V1Manager(BaseManager):
def _assign_creation_defaults(self):
if self.want.server_type is None:
if len(self.want.devices) == 0:
raise F5ModuleError(
"You must provide at least one device."
)
elif len(self.want.devices) == 1:
self.want.update({'server_type': 'single-bigip'})
else:
self.want.update({'server_type': 'redundant-bigip'})
if self.want.link_discovery is None:
self.want.update({'link_discovery': 'disabled'})
if self.want.virtual_server_discovery is None:
self.want.update({'virtual_server_discovery': 'disabled'})
self._check_link_discovery_requirements()
def adjust_server_type_by_version(self):
if len(self.want.devices) == 1 and self.want.server_type == 'bigip':
self.want.update({'server_type': 'single-bigip'})
if len(self.want.devices) > 1 and self.want.server_type == 'bigip':
self.want.update({'server_type': 'redundant-bigip'})
def update(self):
self.have = self.read_current_from_device()
self.handle_prober_settings()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def handle_prober_settings(self):
if self.want.prober_preference is not None:
self.want._values.pop('prober_preference')
if self.want.prober_fallback is not None:
self.want._values.pop('prober_fallback')
class V2Manager(BaseManager):
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def _assign_creation_defaults(self):
if self.want.server_type is None:
self.want.update({'server_type': 'bigip'})
if self.want.link_discovery is None:
self.want.update({'link_discovery': 'disabled'})
if self.want.virtual_server_discovery is None:
self.want.update({'virtual_server_discovery': 'disabled'})
self._check_link_discovery_requirements()
def adjust_server_type_by_version(self):
pass
def handle_prober_settings(self):
if self.want.prober_preference == 'pool' and self.want.prober_pool is None:
raise F5ModuleError(
"A prober_pool needs to be set if prober_preference is set to 'pool'"
)
if self.want.prober_preference is not None and self.want.prober_fallback is not None:
if self.want.prober_preference == self.want.prober_fallback:
raise F5ModuleError(
"The parameters for prober_preference and prober_fallback must not be the same."
)
if self.want.prober_fallback == 'pool' and self.want.prober_pool is None:
raise F5ModuleError(
"A prober_pool needs to be set if prober_fallback is set to 'pool'"
)
class ArgumentSpec(object):
def __init__(self):
self.states = ['absent', 'present', 'enabled', 'disabled']
self.server_types = [
'alteon-ace-director',
'cisco-css',
'cisco-server-load-balancer',
'generic-host',
'radware-wsd',
'windows-nt-4.0',
'bigip',
'cisco-local-director-v2',
'extreme',
'generic-load-balancer',
'sun-solaris',
'cacheflow',
'cisco-local-director-v3',
'foundry-server-iron',
'netapp',
'windows-2000-server'
]
self.supports_check_mode = True
argument_spec = dict(
state=dict(
default='present',
choices=self.states,
),
name=dict(required=True),
server_type=dict(
choices=self.server_types,
aliases=['product']
),
datacenter=dict(),
link_discovery=dict(
choices=['enabled', 'disabled', 'enabled-no-delete']
),
virtual_server_discovery=dict(
choices=['enabled', 'disabled', 'enabled-no-delete']
),
devices=dict(
type='list'
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
),
iquery_options=dict(
type='dict',
options=dict(
allow_path=dict(type='bool'),
allow_service_check=dict(type='bool'),
allow_snmp=dict(type='bool')
)
),
availability_requirements=dict(
type='dict',
options=dict(
type=dict(
choices=['all', 'at_least', 'require'],
required=True
),
at_least=dict(type='int'),
number_of_probes=dict(type='int'),
number_of_probers=dict(type='int')
),
mutually_exclusive=[
['at_least', 'number_of_probes'],
['at_least', 'number_of_probers'],
],
required_if=[
['type', 'at_least', ['at_least']],
['type', 'require', ['number_of_probes', 'number_of_probers']]
]
),
limits=dict(
type='dict',
options=dict(
bits_enabled=dict(type='bool'),
packets_enabled=dict(type='bool'),
connections_enabled=dict(type='bool'),
cpu_enabled=dict(type='bool'),
memory_enabled=dict(type='bool'),
bits_limit=dict(type='int'),
packets_limit=dict(type='int'),
connections_limit=dict(type='int'),
cpu_limit=dict(type='int'),
memory_limit=dict(type='int'),
)
),
monitors=dict(type='list'),
prober_preference=dict(
choices=['inside-datacenter', 'outside-datacenter', 'inherit', 'pool']
),
prober_fallback=dict(
choices=['inside-datacenter', 'outside-datacenter',
'inherit', 'pool', 'any', 'none']
),
prober_pool=dict()
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
|
Jorge-Rodriguez/ansible
|
lib/ansible/modules/network/f5/bigip_gtm_server.py
|
Python
|
gpl-3.0
| 61,691
|
import sys
import os
import platform
import re
import imp
from Tkinter import *
import tkSimpleDialog
import tkMessageBox
import webbrowser
from idlelib.MultiCall import MultiCallCreator
from idlelib import idlever
from idlelib import WindowList
from idlelib import SearchDialog
from idlelib import GrepDialog
from idlelib import ReplaceDialog
from idlelib import PyParse
from idlelib.configHandler import idleConf
from idlelib import aboutDialog, textView, configDialog
from idlelib import macosxSupport
# The default tab setting for a Text widget, in average-width characters.
TK_TABWIDTH_DEFAULT = 8
_py_version = ' (%s)' % platform.python_version()
def _sphinx_version():
"Format sys.version_info to produce the Sphinx version string used to install the chm docs"
major, minor, micro, level, serial = sys.version_info
release = '%s%s' % (major, minor)
if micro:
release += '%s' % (micro,)
if level == 'candidate':
release += 'rc%s' % (serial,)
elif level != 'final':
release += '%s%s' % (level[0], serial)
return release
def _find_module(fullname, path=None):
"""Version of imp.find_module() that handles hierarchical module names"""
file = None
for tgt in fullname.split('.'):
if file is not None:
file.close() # close intermediate files
(file, filename, descr) = imp.find_module(tgt, path)
if descr[2] == imp.PY_SOURCE:
break # find but not load the source file
module = imp.load_module(tgt, file, filename, descr)
try:
path = module.__path__
except AttributeError:
raise ImportError, 'No source for module ' + module.__name__
if descr[2] != imp.PY_SOURCE:
# If all of the above fails and didn't raise an exception,fallback
# to a straight import which can find __init__.py in a package.
m = __import__(fullname)
try:
filename = m.__file__
except AttributeError:
pass
else:
file = None
base, ext = os.path.splitext(filename)
if ext == '.pyc':
ext = '.py'
filename = base + ext
descr = filename, None, imp.PY_SOURCE
return file, filename, descr
class HelpDialog(object):
def __init__(self):
self.parent = None # parent of help window
self.dlg = None # the help window iteself
def display(self, parent, near=None):
""" Display the help dialog.
parent - parent widget for the help window
near - a Toplevel widget (e.g. EditorWindow or PyShell)
to use as a reference for placing the help window
"""
if self.dlg is None:
self.show_dialog(parent)
if near:
self.nearwindow(near)
def show_dialog(self, parent):
self.parent = parent
fn=os.path.join(os.path.abspath(os.path.dirname(__file__)),'help.txt')
self.dlg = dlg = textView.view_file(parent,'Help',fn, modal=False)
dlg.bind('<Destroy>', self.destroy, '+')
def nearwindow(self, near):
# Place the help dialog near the window specified by parent.
# Note - this may not reposition the window in Metacity
# if "/apps/metacity/general/disable_workarounds" is enabled
dlg = self.dlg
geom = (near.winfo_rootx() + 10, near.winfo_rooty() + 10)
dlg.withdraw()
dlg.geometry("=+%d+%d" % geom)
dlg.deiconify()
dlg.lift()
def destroy(self, ev=None):
self.dlg = None
self.parent = None
helpDialog = HelpDialog() # singleton instance
def _help_dialog(parent): # wrapper for htest
helpDialog.show_dialog(parent)
class EditorWindow(object):
from idlelib.Percolator import Percolator
from idlelib.ColorDelegator import ColorDelegator
from idlelib.UndoDelegator import UndoDelegator
from idlelib.IOBinding import IOBinding, filesystemencoding, encoding
from idlelib import Bindings
from Tkinter import Toplevel
from idlelib.MultiStatusBar import MultiStatusBar
help_url = None
def __init__(self, flist=None, filename=None, key=None, root=None):
if EditorWindow.help_url is None:
dochome = os.path.join(sys.prefix, 'Doc', 'index.html')
if sys.platform.count('linux'):
# look for html docs in a couple of standard places
pyver = 'python-docs-' + '%s.%s.%s' % sys.version_info[:3]
if os.path.isdir('/var/www/html/python/'): # "python2" rpm
dochome = '/var/www/html/python/index.html'
else:
basepath = '/usr/share/doc/' # standard location
dochome = os.path.join(basepath, pyver,
'Doc', 'index.html')
elif sys.platform[:3] == 'win':
chmfile = os.path.join(sys.prefix, 'Doc',
'Python%s.chm' % _sphinx_version())
if os.path.isfile(chmfile):
dochome = chmfile
elif sys.platform == 'darwin':
# documentation may be stored inside a python framework
dochome = os.path.join(sys.prefix,
'Resources/English.lproj/Documentation/index.html')
dochome = os.path.normpath(dochome)
if os.path.isfile(dochome):
EditorWindow.help_url = dochome
if sys.platform == 'darwin':
# Safari requires real file:-URLs
EditorWindow.help_url = 'file://' + EditorWindow.help_url
else:
EditorWindow.help_url = "https://docs.python.org/%d.%d/" % sys.version_info[:2]
currentTheme=idleConf.CurrentTheme()
self.flist = flist
root = root or flist.root
self.root = root
try:
sys.ps1
except AttributeError:
sys.ps1 = '>>> '
self.menubar = Menu(root)
self.top = top = WindowList.ListedToplevel(root, menu=self.menubar)
if flist:
self.tkinter_vars = flist.vars
#self.top.instance_dict makes flist.inversedict available to
#configDialog.py so it can access all EditorWindow instances
self.top.instance_dict = flist.inversedict
else:
self.tkinter_vars = {} # keys: Tkinter event names
# values: Tkinter variable instances
self.top.instance_dict = {}
self.recent_files_path = os.path.join(idleConf.GetUserCfgDir(),
'recent-files.lst')
self.text_frame = text_frame = Frame(top)
self.vbar = vbar = Scrollbar(text_frame, name='vbar')
self.width = idleConf.GetOption('main','EditorWindow','width', type='int')
text_options = {
'name': 'text',
'padx': 5,
'wrap': 'none',
'width': self.width,
'height': idleConf.GetOption('main', 'EditorWindow', 'height', type='int')}
if TkVersion >= 8.5:
# Starting with tk 8.5 we have to set the new tabstyle option
# to 'wordprocessor' to achieve the same display of tabs as in
# older tk versions.
text_options['tabstyle'] = 'wordprocessor'
self.text = text = MultiCallCreator(Text)(text_frame, **text_options)
self.top.focused_widget = self.text
self.createmenubar()
self.apply_bindings()
self.top.protocol("WM_DELETE_WINDOW", self.close)
self.top.bind("<<close-window>>", self.close_event)
if macosxSupport.isAquaTk():
# Command-W on editorwindows doesn't work without this.
text.bind('<<close-window>>', self.close_event)
# Some OS X systems have only one mouse button,
# so use control-click for pulldown menus there.
# (Note, AquaTk defines <2> as the right button if
# present and the Tk Text widget already binds <2>.)
text.bind("<Control-Button-1>",self.right_menu_event)
else:
# Elsewhere, use right-click for pulldown menus.
text.bind("<3>",self.right_menu_event)
text.bind("<<cut>>", self.cut)
text.bind("<<copy>>", self.copy)
text.bind("<<paste>>", self.paste)
text.bind("<<center-insert>>", self.center_insert_event)
text.bind("<<help>>", self.help_dialog)
text.bind("<<python-docs>>", self.python_docs)
text.bind("<<about-idle>>", self.about_dialog)
text.bind("<<open-config-dialog>>", self.config_dialog)
text.bind("<<open-config-extensions-dialog>>",
self.config_extensions_dialog)
text.bind("<<open-module>>", self.open_module)
text.bind("<<do-nothing>>", lambda event: "break")
text.bind("<<select-all>>", self.select_all)
text.bind("<<remove-selection>>", self.remove_selection)
text.bind("<<find>>", self.find_event)
text.bind("<<find-again>>", self.find_again_event)
text.bind("<<find-in-files>>", self.find_in_files_event)
text.bind("<<find-selection>>", self.find_selection_event)
text.bind("<<replace>>", self.replace_event)
text.bind("<<goto-line>>", self.goto_line_event)
text.bind("<<smart-backspace>>",self.smart_backspace_event)
text.bind("<<newline-and-indent>>",self.newline_and_indent_event)
text.bind("<<smart-indent>>",self.smart_indent_event)
text.bind("<<indent-region>>",self.indent_region_event)
text.bind("<<dedent-region>>",self.dedent_region_event)
text.bind("<<comment-region>>",self.comment_region_event)
text.bind("<<uncomment-region>>",self.uncomment_region_event)
text.bind("<<tabify-region>>",self.tabify_region_event)
text.bind("<<untabify-region>>",self.untabify_region_event)
text.bind("<<toggle-tabs>>",self.toggle_tabs_event)
text.bind("<<change-indentwidth>>",self.change_indentwidth_event)
text.bind("<Left>", self.move_at_edge_if_selection(0))
text.bind("<Right>", self.move_at_edge_if_selection(1))
text.bind("<<del-word-left>>", self.del_word_left)
text.bind("<<del-word-right>>", self.del_word_right)
text.bind("<<beginning-of-line>>", self.home_callback)
if flist:
flist.inversedict[self] = key
if key:
flist.dict[key] = self
text.bind("<<open-new-window>>", self.new_callback)
text.bind("<<close-all-windows>>", self.flist.close_all_callback)
text.bind("<<open-class-browser>>", self.open_class_browser)
text.bind("<<open-path-browser>>", self.open_path_browser)
self.set_status_bar()
vbar['command'] = text.yview
vbar.pack(side=RIGHT, fill=Y)
text['yscrollcommand'] = vbar.set
fontWeight = 'normal'
if idleConf.GetOption('main', 'EditorWindow', 'font-bold', type='bool'):
fontWeight='bold'
text.config(font=(idleConf.GetOption('main', 'EditorWindow', 'font'),
idleConf.GetOption('main', 'EditorWindow',
'font-size', type='int'),
fontWeight))
text_frame.pack(side=LEFT, fill=BOTH, expand=1)
text.pack(side=TOP, fill=BOTH, expand=1)
text.focus_set()
# usetabs true -> literal tab characters are used by indent and
# dedent cmds, possibly mixed with spaces if
# indentwidth is not a multiple of tabwidth,
# which will cause Tabnanny to nag!
# false -> tab characters are converted to spaces by indent
# and dedent cmds, and ditto TAB keystrokes
# Although use-spaces=0 can be configured manually in config-main.def,
# configuration of tabs v. spaces is not supported in the configuration
# dialog. IDLE promotes the preferred Python indentation: use spaces!
usespaces = idleConf.GetOption('main', 'Indent', 'use-spaces', type='bool')
self.usetabs = not usespaces
# tabwidth is the display width of a literal tab character.
# CAUTION: telling Tk to use anything other than its default
# tab setting causes it to use an entirely different tabbing algorithm,
# treating tab stops as fixed distances from the left margin.
# Nobody expects this, so for now tabwidth should never be changed.
self.tabwidth = 8 # must remain 8 until Tk is fixed.
# indentwidth is the number of screen characters per indent level.
# The recommended Python indentation is four spaces.
self.indentwidth = self.tabwidth
self.set_notabs_indentwidth()
# If context_use_ps1 is true, parsing searches back for a ps1 line;
# else searches for a popular (if, def, ...) Python stmt.
self.context_use_ps1 = False
# When searching backwards for a reliable place to begin parsing,
# first start num_context_lines[0] lines back, then
# num_context_lines[1] lines back if that didn't work, and so on.
# The last value should be huge (larger than the # of lines in a
# conceivable file).
# Making the initial values larger slows things down more often.
self.num_context_lines = 50, 500, 5000000
self.per = per = self.Percolator(text)
self.undo = undo = self.UndoDelegator()
per.insertfilter(undo)
text.undo_block_start = undo.undo_block_start
text.undo_block_stop = undo.undo_block_stop
undo.set_saved_change_hook(self.saved_change_hook)
# IOBinding implements file I/O and printing functionality
self.io = io = self.IOBinding(self)
io.set_filename_change_hook(self.filename_change_hook)
# Create the recent files submenu
self.recent_files_menu = Menu(self.menubar)
self.menudict['file'].insert_cascade(3, label='Recent Files',
underline=0,
menu=self.recent_files_menu)
self.update_recent_files_list()
self.color = None # initialized below in self.ResetColorizer
if filename:
if os.path.exists(filename) and not os.path.isdir(filename):
io.loadfile(filename)
else:
io.set_filename(filename)
self.ResetColorizer()
self.saved_change_hook()
self.set_indentation_params(self.ispythonsource(filename))
self.load_extensions()
menu = self.menudict.get('windows')
if menu:
end = menu.index("end")
if end is None:
end = -1
if end >= 0:
menu.add_separator()
end = end + 1
self.wmenu_end = end
WindowList.register_callback(self.postwindowsmenu)
# Some abstractions so IDLE extensions are cross-IDE
self.askyesno = tkMessageBox.askyesno
self.askinteger = tkSimpleDialog.askinteger
self.showerror = tkMessageBox.showerror
self._highlight_workaround() # Fix selection tags on Windows
def _highlight_workaround(self):
# On Windows, Tk removes painting of the selection
# tags which is different behavior than on Linux and Mac.
# See issue14146 for more information.
if not sys.platform.startswith('win'):
return
text = self.text
text.event_add("<<Highlight-FocusOut>>", "<FocusOut>")
text.event_add("<<Highlight-FocusIn>>", "<FocusIn>")
def highlight_fix(focus):
sel_range = text.tag_ranges("sel")
if sel_range:
if focus == 'out':
HILITE_CONFIG = idleConf.GetHighlight(
idleConf.CurrentTheme(), 'hilite')
text.tag_config("sel_fix", HILITE_CONFIG)
text.tag_raise("sel_fix")
text.tag_add("sel_fix", *sel_range)
elif focus == 'in':
text.tag_remove("sel_fix", "1.0", "end")
text.bind("<<Highlight-FocusOut>>",
lambda ev: highlight_fix("out"))
text.bind("<<Highlight-FocusIn>>",
lambda ev: highlight_fix("in"))
def _filename_to_unicode(self, filename):
"""convert filename to unicode in order to display it in Tk"""
if isinstance(filename, unicode) or not filename:
return filename
else:
try:
return filename.decode(self.filesystemencoding)
except UnicodeDecodeError:
# XXX
try:
return filename.decode(self.encoding)
except UnicodeDecodeError:
# byte-to-byte conversion
return filename.decode('iso8859-1')
def new_callback(self, event):
dirname, basename = self.io.defaultfilename()
self.flist.new(dirname)
return "break"
def home_callback(self, event):
if (event.state & 4) != 0 and event.keysym == "Home":
# state&4==Control. If <Control-Home>, use the Tk binding.
return
if self.text.index("iomark") and \
self.text.compare("iomark", "<=", "insert lineend") and \
self.text.compare("insert linestart", "<=", "iomark"):
# In Shell on input line, go to just after prompt
insertpt = int(self.text.index("iomark").split(".")[1])
else:
line = self.text.get("insert linestart", "insert lineend")
for insertpt in xrange(len(line)):
if line[insertpt] not in (' ','\t'):
break
else:
insertpt=len(line)
lineat = int(self.text.index("insert").split('.')[1])
if insertpt == lineat:
insertpt = 0
dest = "insert linestart+"+str(insertpt)+"c"
if (event.state&1) == 0:
# shift was not pressed
self.text.tag_remove("sel", "1.0", "end")
else:
if not self.text.index("sel.first"):
self.text.mark_set("my_anchor", "insert") # there was no previous selection
else:
if self.text.compare(self.text.index("sel.first"), "<", self.text.index("insert")):
self.text.mark_set("my_anchor", "sel.first") # extend back
else:
self.text.mark_set("my_anchor", "sel.last") # extend forward
first = self.text.index(dest)
last = self.text.index("my_anchor")
if self.text.compare(first,">",last):
first,last = last,first
self.text.tag_remove("sel", "1.0", "end")
self.text.tag_add("sel", first, last)
self.text.mark_set("insert", dest)
self.text.see("insert")
return "break"
def set_status_bar(self):
self.status_bar = self.MultiStatusBar(self.top)
if sys.platform == "darwin":
# Insert some padding to avoid obscuring some of the statusbar
# by the resize widget.
self.status_bar.set_label('_padding1', ' ', side=RIGHT)
self.status_bar.set_label('column', 'Col: ?', side=RIGHT)
self.status_bar.set_label('line', 'Ln: ?', side=RIGHT)
self.status_bar.pack(side=BOTTOM, fill=X)
self.text.bind("<<set-line-and-column>>", self.set_line_and_column)
self.text.event_add("<<set-line-and-column>>",
"<KeyRelease>", "<ButtonRelease>")
self.text.after_idle(self.set_line_and_column)
def set_line_and_column(self, event=None):
line, column = self.text.index(INSERT).split('.')
self.status_bar.set_label('column', 'Col: %s' % column)
self.status_bar.set_label('line', 'Ln: %s' % line)
menu_specs = [
("file", "_File"),
("edit", "_Edit"),
("format", "F_ormat"),
("run", "_Run"),
("options", "_Options"),
("windows", "_Windows"),
("help", "_Help"),
]
if sys.platform == "darwin":
menu_specs[-2] = ("windows", "_Window")
def createmenubar(self):
mbar = self.menubar
self.menudict = menudict = {}
for name, label in self.menu_specs:
underline, label = prepstr(label)
menudict[name] = menu = Menu(mbar, name=name)
mbar.add_cascade(label=label, menu=menu, underline=underline)
if macosxSupport.isCarbonTk():
# Insert the application menu
menudict['application'] = menu = Menu(mbar, name='apple')
mbar.add_cascade(label='IDLE', menu=menu)
self.fill_menus()
self.base_helpmenu_length = self.menudict['help'].index(END)
self.reset_help_menu_entries()
def postwindowsmenu(self):
# Only called when Windows menu exists
menu = self.menudict['windows']
end = menu.index("end")
if end is None:
end = -1
if end > self.wmenu_end:
menu.delete(self.wmenu_end+1, end)
WindowList.add_windows_to_menu(menu)
rmenu = None
def right_menu_event(self, event):
self.text.mark_set("insert", "@%d,%d" % (event.x, event.y))
if not self.rmenu:
self.make_rmenu()
rmenu = self.rmenu
self.event = event
iswin = sys.platform[:3] == 'win'
if iswin:
self.text.config(cursor="arrow")
for item in self.rmenu_specs:
try:
label, eventname, verify_state = item
except ValueError: # see issue1207589
continue
if verify_state is None:
continue
state = getattr(self, verify_state)()
rmenu.entryconfigure(label, state=state)
rmenu.tk_popup(event.x_root, event.y_root)
if iswin:
self.text.config(cursor="ibeam")
rmenu_specs = [
# ("Label", "<<virtual-event>>", "statefuncname"), ...
("Close", "<<close-window>>", None), # Example
]
def make_rmenu(self):
rmenu = Menu(self.text, tearoff=0)
for item in self.rmenu_specs:
label, eventname = item[0], item[1]
if label is not None:
def command(text=self.text, eventname=eventname):
text.event_generate(eventname)
rmenu.add_command(label=label, command=command)
else:
rmenu.add_separator()
self.rmenu = rmenu
def rmenu_check_cut(self):
return self.rmenu_check_copy()
def rmenu_check_copy(self):
try:
indx = self.text.index('sel.first')
except TclError:
return 'disabled'
else:
return 'normal' if indx else 'disabled'
def rmenu_check_paste(self):
try:
self.text.tk.call('tk::GetSelection', self.text, 'CLIPBOARD')
except TclError:
return 'disabled'
else:
return 'normal'
def about_dialog(self, event=None):
aboutDialog.AboutDialog(self.top,'About IDLE')
def config_dialog(self, event=None):
configDialog.ConfigDialog(self.top,'Settings')
def config_extensions_dialog(self, event=None):
configDialog.ConfigExtensionsDialog(self.top)
def help_dialog(self, event=None):
if self.root:
parent = self.root
else:
parent = self.top
helpDialog.display(parent, near=self.top)
def python_docs(self, event=None):
if sys.platform[:3] == 'win':
try:
os.startfile(self.help_url)
except WindowsError as why:
tkMessageBox.showerror(title='Document Start Failure',
message=str(why), parent=self.text)
else:
webbrowser.open(self.help_url)
return "break"
def cut(self,event):
self.text.event_generate("<<Cut>>")
return "break"
def copy(self,event):
if not self.text.tag_ranges("sel"):
# There is no selection, so do nothing and maybe interrupt.
return
self.text.event_generate("<<Copy>>")
return "break"
def paste(self,event):
self.text.event_generate("<<Paste>>")
self.text.see("insert")
return "break"
def select_all(self, event=None):
self.text.tag_add("sel", "1.0", "end-1c")
self.text.mark_set("insert", "1.0")
self.text.see("insert")
return "break"
def remove_selection(self, event=None):
self.text.tag_remove("sel", "1.0", "end")
self.text.see("insert")
def move_at_edge_if_selection(self, edge_index):
"""Cursor move begins at start or end of selection
When a left/right cursor key is pressed create and return to Tkinter a
function which causes a cursor move from the associated edge of the
selection.
"""
self_text_index = self.text.index
self_text_mark_set = self.text.mark_set
edges_table = ("sel.first+1c", "sel.last-1c")
def move_at_edge(event):
if (event.state & 5) == 0: # no shift(==1) or control(==4) pressed
try:
self_text_index("sel.first")
self_text_mark_set("insert", edges_table[edge_index])
except TclError:
pass
return move_at_edge
def del_word_left(self, event):
self.text.event_generate('<Meta-Delete>')
return "break"
def del_word_right(self, event):
self.text.event_generate('<Meta-d>')
return "break"
def find_event(self, event):
SearchDialog.find(self.text)
return "break"
def find_again_event(self, event):
SearchDialog.find_again(self.text)
return "break"
def find_selection_event(self, event):
SearchDialog.find_selection(self.text)
return "break"
def find_in_files_event(self, event):
GrepDialog.grep(self.text, self.io, self.flist)
return "break"
def replace_event(self, event):
ReplaceDialog.replace(self.text)
return "break"
def goto_line_event(self, event):
text = self.text
lineno = tkSimpleDialog.askinteger("Goto",
"Go to line number:",parent=text)
if lineno is None:
return "break"
if lineno <= 0:
text.bell()
return "break"
text.mark_set("insert", "%d.0" % lineno)
text.see("insert")
def open_module(self, event=None):
# XXX Shouldn't this be in IOBinding or in FileList?
try:
name = self.text.get("sel.first", "sel.last")
except TclError:
name = ""
else:
name = name.strip()
name = tkSimpleDialog.askstring("Module",
"Enter the name of a Python module\n"
"to search on sys.path and open:",
parent=self.text, initialvalue=name)
if name:
name = name.strip()
if not name:
return
# XXX Ought to insert current file's directory in front of path
try:
(f, file_path, (suffix, mode, mtype)) = _find_module(name)
except (NameError, ImportError) as msg:
tkMessageBox.showerror("Import error", str(msg), parent=self.text)
return
if mtype != imp.PY_SOURCE:
tkMessageBox.showerror("Unsupported type",
"%s is not a source module" % name, parent=self.text)
return
if f:
f.close()
if self.flist:
self.flist.open(file_path)
else:
self.io.loadfile(file_path)
return file_path
def open_class_browser(self, event=None):
filename = self.io.filename
if not (self.__class__.__name__ == 'PyShellEditorWindow'
and filename):
filename = self.open_module()
if filename is None:
return
head, tail = os.path.split(filename)
base, ext = os.path.splitext(tail)
from idlelib import ClassBrowser
ClassBrowser.ClassBrowser(self.flist, base, [head])
def open_path_browser(self, event=None):
from idlelib import PathBrowser
PathBrowser.PathBrowser(self.flist)
def gotoline(self, lineno):
if lineno is not None and lineno > 0:
self.text.mark_set("insert", "%d.0" % lineno)
self.text.tag_remove("sel", "1.0", "end")
self.text.tag_add("sel", "insert", "insert +1l")
self.center()
def ispythonsource(self, filename):
if not filename or os.path.isdir(filename):
return True
base, ext = os.path.splitext(os.path.basename(filename))
if os.path.normcase(ext) in (".py", ".pyw"):
return True
try:
f = open(filename)
line = f.readline()
f.close()
except IOError:
return False
return line.startswith('#!') and line.find('python') >= 0
def close_hook(self):
if self.flist:
self.flist.unregister_maybe_terminate(self)
self.flist = None
def set_close_hook(self, close_hook):
self.close_hook = close_hook
def filename_change_hook(self):
if self.flist:
self.flist.filename_changed_edit(self)
self.saved_change_hook()
self.top.update_windowlist_registry(self)
self.ResetColorizer()
def _addcolorizer(self):
if self.color:
return
if self.ispythonsource(self.io.filename):
self.color = self.ColorDelegator()
# can add more colorizers here...
if self.color:
self.per.removefilter(self.undo)
self.per.insertfilter(self.color)
self.per.insertfilter(self.undo)
def _rmcolorizer(self):
if not self.color:
return
self.color.removecolors()
self.per.removefilter(self.color)
self.color = None
def ResetColorizer(self):
"Update the color theme"
# Called from self.filename_change_hook and from configDialog.py
self._rmcolorizer()
self._addcolorizer()
theme = idleConf.GetOption('main','Theme','name')
normal_colors = idleConf.GetHighlight(theme, 'normal')
cursor_color = idleConf.GetHighlight(theme, 'cursor', fgBg='fg')
select_colors = idleConf.GetHighlight(theme, 'hilite')
self.text.config(
foreground=normal_colors['foreground'],
background=normal_colors['background'],
insertbackground=cursor_color,
selectforeground=select_colors['foreground'],
selectbackground=select_colors['background'],
)
def ResetFont(self):
"Update the text widgets' font if it is changed"
# Called from configDialog.py
fontWeight='normal'
if idleConf.GetOption('main','EditorWindow','font-bold',type='bool'):
fontWeight='bold'
self.text.config(font=(idleConf.GetOption('main','EditorWindow','font'),
idleConf.GetOption('main','EditorWindow','font-size',
type='int'),
fontWeight))
def RemoveKeybindings(self):
"Remove the keybindings before they are changed."
# Called from configDialog.py
self.Bindings.default_keydefs = keydefs = idleConf.GetCurrentKeySet()
for event, keylist in keydefs.items():
self.text.event_delete(event, *keylist)
for extensionName in self.get_standard_extension_names():
xkeydefs = idleConf.GetExtensionBindings(extensionName)
if xkeydefs:
for event, keylist in xkeydefs.items():
self.text.event_delete(event, *keylist)
def ApplyKeybindings(self):
"Update the keybindings after they are changed"
# Called from configDialog.py
self.Bindings.default_keydefs = keydefs = idleConf.GetCurrentKeySet()
self.apply_bindings()
for extensionName in self.get_standard_extension_names():
xkeydefs = idleConf.GetExtensionBindings(extensionName)
if xkeydefs:
self.apply_bindings(xkeydefs)
#update menu accelerators
menuEventDict = {}
for menu in self.Bindings.menudefs:
menuEventDict[menu[0]] = {}
for item in menu[1]:
if item:
menuEventDict[menu[0]][prepstr(item[0])[1]] = item[1]
for menubarItem in self.menudict.keys():
menu = self.menudict[menubarItem]
end = menu.index(END)
if end is None:
# Skip empty menus
continue
end += 1
for index in range(0, end):
if menu.type(index) == 'command':
accel = menu.entrycget(index, 'accelerator')
if accel:
itemName = menu.entrycget(index, 'label')
event = ''
if menubarItem in menuEventDict:
if itemName in menuEventDict[menubarItem]:
event = menuEventDict[menubarItem][itemName]
if event:
accel = get_accelerator(keydefs, event)
menu.entryconfig(index, accelerator=accel)
def set_notabs_indentwidth(self):
"Update the indentwidth if changed and not using tabs in this window"
# Called from configDialog.py
if not self.usetabs:
self.indentwidth = idleConf.GetOption('main', 'Indent','num-spaces',
type='int')
def reset_help_menu_entries(self):
"Update the additional help entries on the Help menu"
help_list = idleConf.GetAllExtraHelpSourcesList()
helpmenu = self.menudict['help']
# first delete the extra help entries, if any
helpmenu_length = helpmenu.index(END)
if helpmenu_length > self.base_helpmenu_length:
helpmenu.delete((self.base_helpmenu_length + 1), helpmenu_length)
# then rebuild them
if help_list:
helpmenu.add_separator()
for entry in help_list:
cmd = self.__extra_help_callback(entry[1])
helpmenu.add_command(label=entry[0], command=cmd)
# and update the menu dictionary
self.menudict['help'] = helpmenu
def __extra_help_callback(self, helpfile):
"Create a callback with the helpfile value frozen at definition time"
def display_extra_help(helpfile=helpfile):
if not helpfile.startswith(('www', 'http')):
helpfile = os.path.normpath(helpfile)
if sys.platform[:3] == 'win':
try:
os.startfile(helpfile)
except WindowsError as why:
tkMessageBox.showerror(title='Document Start Failure',
message=str(why), parent=self.text)
else:
webbrowser.open(helpfile)
return display_extra_help
def update_recent_files_list(self, new_file=None):
"Load and update the recent files list and menus"
rf_list = []
if os.path.exists(self.recent_files_path):
with open(self.recent_files_path, 'r') as rf_list_file:
rf_list = rf_list_file.readlines()
if new_file:
new_file = os.path.abspath(new_file) + '\n'
if new_file in rf_list:
rf_list.remove(new_file) # move to top
rf_list.insert(0, new_file)
# clean and save the recent files list
bad_paths = []
for path in rf_list:
if '\0' in path or not os.path.exists(path[0:-1]):
bad_paths.append(path)
rf_list = [path for path in rf_list if path not in bad_paths]
ulchars = "1234567890ABCDEFGHIJK"
rf_list = rf_list[0:len(ulchars)]
try:
with open(self.recent_files_path, 'w') as rf_file:
rf_file.writelines(rf_list)
except IOError as err:
if not getattr(self.root, "recentfilelist_error_displayed", False):
self.root.recentfilelist_error_displayed = True
tkMessageBox.showerror(title='IDLE Error',
message='Unable to update Recent Files list:\n%s'
% str(err),
parent=self.text)
# for each edit window instance, construct the recent files menu
for instance in self.top.instance_dict.keys():
menu = instance.recent_files_menu
menu.delete(0, END) # clear, and rebuild:
for i, file_name in enumerate(rf_list):
file_name = file_name.rstrip() # zap \n
# make unicode string to display non-ASCII chars correctly
ufile_name = self._filename_to_unicode(file_name)
callback = instance.__recent_file_callback(file_name)
menu.add_command(label=ulchars[i] + " " + ufile_name,
command=callback,
underline=0)
def __recent_file_callback(self, file_name):
def open_recent_file(fn_closure=file_name):
self.io.open(editFile=fn_closure)
return open_recent_file
def saved_change_hook(self):
short = self.short_title()
long = self.long_title()
if short and long:
title = short + " - " + long + _py_version
elif short:
title = short
elif long:
title = long
else:
title = "Untitled"
icon = short or long or title
if not self.get_saved():
title = "*%s*" % title
icon = "*%s" % icon
self.top.wm_title(title)
self.top.wm_iconname(icon)
def get_saved(self):
return self.undo.get_saved()
def set_saved(self, flag):
self.undo.set_saved(flag)
def reset_undo(self):
self.undo.reset_undo()
def short_title(self):
filename = self.io.filename
if filename:
filename = os.path.basename(filename)
else:
filename = "Untitled"
# return unicode string to display non-ASCII chars correctly
return self._filename_to_unicode(filename)
def long_title(self):
# return unicode string to display non-ASCII chars correctly
return self._filename_to_unicode(self.io.filename or "")
def center_insert_event(self, event):
self.center()
def center(self, mark="insert"):
text = self.text
top, bot = self.getwindowlines()
lineno = self.getlineno(mark)
height = bot - top
newtop = max(1, lineno - height//2)
text.yview(float(newtop))
def getwindowlines(self):
text = self.text
top = self.getlineno("@0,0")
bot = self.getlineno("@0,65535")
if top == bot and text.winfo_height() == 1:
# Geometry manager hasn't run yet
height = int(text['height'])
bot = top + height - 1
return top, bot
def getlineno(self, mark="insert"):
text = self.text
return int(float(text.index(mark)))
def get_geometry(self):
"Return (width, height, x, y)"
geom = self.top.wm_geometry()
m = re.match(r"(\d+)x(\d+)\+(-?\d+)\+(-?\d+)", geom)
tuple = (map(int, m.groups()))
return tuple
def close_event(self, event):
self.close()
def maybesave(self):
if self.io:
if not self.get_saved():
if self.top.state()!='normal':
self.top.deiconify()
self.top.lower()
self.top.lift()
return self.io.maybesave()
def close(self):
reply = self.maybesave()
if str(reply) != "cancel":
self._close()
return reply
def _close(self):
if self.io.filename:
self.update_recent_files_list(new_file=self.io.filename)
WindowList.unregister_callback(self.postwindowsmenu)
self.unload_extensions()
self.io.close()
self.io = None
self.undo = None
if self.color:
self.color.close(False)
self.color = None
self.text = None
self.tkinter_vars = None
self.per.close()
self.per = None
self.top.destroy()
if self.close_hook:
# unless override: unregister from flist, terminate if last window
self.close_hook()
def load_extensions(self):
self.extensions = {}
self.load_standard_extensions()
def unload_extensions(self):
for ins in self.extensions.values():
if hasattr(ins, "close"):
ins.close()
self.extensions = {}
def load_standard_extensions(self):
for name in self.get_standard_extension_names():
try:
self.load_extension(name)
except:
print "Failed to load extension", repr(name)
import traceback
traceback.print_exc()
def get_standard_extension_names(self):
return idleConf.GetExtensions(editor_only=True)
def load_extension(self, name):
try:
mod = __import__(name, globals(), locals(), [])
except ImportError:
print "\nFailed to import extension: ", name
return
cls = getattr(mod, name)
keydefs = idleConf.GetExtensionBindings(name)
if hasattr(cls, "menudefs"):
self.fill_menus(cls.menudefs, keydefs)
ins = cls(self)
self.extensions[name] = ins
if keydefs:
self.apply_bindings(keydefs)
for vevent in keydefs.keys():
methodname = vevent.replace("-", "_")
while methodname[:1] == '<':
methodname = methodname[1:]
while methodname[-1:] == '>':
methodname = methodname[:-1]
methodname = methodname + "_event"
if hasattr(ins, methodname):
self.text.bind(vevent, getattr(ins, methodname))
def apply_bindings(self, keydefs=None):
if keydefs is None:
keydefs = self.Bindings.default_keydefs
text = self.text
text.keydefs = keydefs
for event, keylist in keydefs.items():
if keylist:
text.event_add(event, *keylist)
def fill_menus(self, menudefs=None, keydefs=None):
"""Add appropriate entries to the menus and submenus
Menus that are absent or None in self.menudict are ignored.
"""
if menudefs is None:
menudefs = self.Bindings.menudefs
if keydefs is None:
keydefs = self.Bindings.default_keydefs
menudict = self.menudict
text = self.text
for mname, entrylist in menudefs:
menu = menudict.get(mname)
if not menu:
continue
for entry in entrylist:
if not entry:
menu.add_separator()
else:
label, eventname = entry
checkbutton = (label[:1] == '!')
if checkbutton:
label = label[1:]
underline, label = prepstr(label)
accelerator = get_accelerator(keydefs, eventname)
def command(text=text, eventname=eventname):
text.event_generate(eventname)
if checkbutton:
var = self.get_var_obj(eventname, BooleanVar)
menu.add_checkbutton(label=label, underline=underline,
command=command, accelerator=accelerator,
variable=var)
else:
menu.add_command(label=label, underline=underline,
command=command,
accelerator=accelerator)
def getvar(self, name):
var = self.get_var_obj(name)
if var:
value = var.get()
return value
else:
raise NameError, name
def setvar(self, name, value, vartype=None):
var = self.get_var_obj(name, vartype)
if var:
var.set(value)
else:
raise NameError, name
def get_var_obj(self, name, vartype=None):
var = self.tkinter_vars.get(name)
if not var and vartype:
# create a Tkinter variable object with self.text as master:
self.tkinter_vars[name] = var = vartype(self.text)
return var
# Tk implementations of "virtual text methods" -- each platform
# reusing IDLE's support code needs to define these for its GUI's
# flavor of widget.
# Is character at text_index in a Python string? Return 0 for
# "guaranteed no", true for anything else. This info is expensive
# to compute ab initio, but is probably already known by the
# platform's colorizer.
def is_char_in_string(self, text_index):
if self.color:
# Return true iff colorizer hasn't (re)gotten this far
# yet, or the character is tagged as being in a string
return self.text.tag_prevrange("TODO", text_index) or \
"STRING" in self.text.tag_names(text_index)
else:
# The colorizer is missing: assume the worst
return 1
# If a selection is defined in the text widget, return (start,
# end) as Tkinter text indices, otherwise return (None, None)
def get_selection_indices(self):
try:
first = self.text.index("sel.first")
last = self.text.index("sel.last")
return first, last
except TclError:
return None, None
# Return the text widget's current view of what a tab stop means
# (equivalent width in spaces).
def get_tabwidth(self):
current = self.text['tabs'] or TK_TABWIDTH_DEFAULT
return int(current)
# Set the text widget's current view of what a tab stop means.
def set_tabwidth(self, newtabwidth):
text = self.text
if self.get_tabwidth() != newtabwidth:
pixels = text.tk.call("font", "measure", text["font"],
"-displayof", text.master,
"n" * newtabwidth)
text.configure(tabs=pixels)
# If ispythonsource and guess are true, guess a good value for
# indentwidth based on file content (if possible), and if
# indentwidth != tabwidth set usetabs false.
# In any case, adjust the Text widget's view of what a tab
# character means.
def set_indentation_params(self, ispythonsource, guess=True):
if guess and ispythonsource:
i = self.guess_indent()
if 2 <= i <= 8:
self.indentwidth = i
if self.indentwidth != self.tabwidth:
self.usetabs = False
self.set_tabwidth(self.tabwidth)
def smart_backspace_event(self, event):
text = self.text
first, last = self.get_selection_indices()
if first and last:
text.delete(first, last)
text.mark_set("insert", first)
return "break"
# Delete whitespace left, until hitting a real char or closest
# preceding virtual tab stop.
chars = text.get("insert linestart", "insert")
if chars == '':
if text.compare("insert", ">", "1.0"):
# easy: delete preceding newline
text.delete("insert-1c")
else:
text.bell() # at start of buffer
return "break"
if chars[-1] not in " \t":
# easy: delete preceding real char
text.delete("insert-1c")
return "break"
# Ick. It may require *inserting* spaces if we back up over a
# tab character! This is written to be clear, not fast.
tabwidth = self.tabwidth
have = len(chars.expandtabs(tabwidth))
assert have > 0
want = ((have - 1) // self.indentwidth) * self.indentwidth
# Debug prompt is multilined....
if self.context_use_ps1:
last_line_of_prompt = sys.ps1.split('\n')[-1]
else:
last_line_of_prompt = ''
ncharsdeleted = 0
while 1:
if chars == last_line_of_prompt:
break
chars = chars[:-1]
ncharsdeleted = ncharsdeleted + 1
have = len(chars.expandtabs(tabwidth))
if have <= want or chars[-1] not in " \t":
break
text.undo_block_start()
text.delete("insert-%dc" % ncharsdeleted, "insert")
if have < want:
text.insert("insert", ' ' * (want - have))
text.undo_block_stop()
return "break"
def smart_indent_event(self, event):
# if intraline selection:
# delete it
# elif multiline selection:
# do indent-region
# else:
# indent one level
text = self.text
first, last = self.get_selection_indices()
text.undo_block_start()
try:
if first and last:
if index2line(first) != index2line(last):
return self.indent_region_event(event)
text.delete(first, last)
text.mark_set("insert", first)
prefix = text.get("insert linestart", "insert")
raw, effective = classifyws(prefix, self.tabwidth)
if raw == len(prefix):
# only whitespace to the left
self.reindent_to(effective + self.indentwidth)
else:
# tab to the next 'stop' within or to right of line's text:
if self.usetabs:
pad = '\t'
else:
effective = len(prefix.expandtabs(self.tabwidth))
n = self.indentwidth
pad = ' ' * (n - effective % n)
text.insert("insert", pad)
text.see("insert")
return "break"
finally:
text.undo_block_stop()
def newline_and_indent_event(self, event):
text = self.text
first, last = self.get_selection_indices()
text.undo_block_start()
try:
if first and last:
text.delete(first, last)
text.mark_set("insert", first)
line = text.get("insert linestart", "insert")
i, n = 0, len(line)
while i < n and line[i] in " \t":
i = i+1
if i == n:
# the cursor is in or at leading indentation in a continuation
# line; just inject an empty line at the start
text.insert("insert linestart", '\n')
return "break"
indent = line[:i]
# strip whitespace before insert point unless it's in the prompt
i = 0
last_line_of_prompt = sys.ps1.split('\n')[-1]
while line and line[-1] in " \t" and line != last_line_of_prompt:
line = line[:-1]
i = i+1
if i:
text.delete("insert - %d chars" % i, "insert")
# strip whitespace after insert point
while text.get("insert") in " \t":
text.delete("insert")
# start new line
text.insert("insert", '\n')
# adjust indentation for continuations and block
# open/close first need to find the last stmt
lno = index2line(text.index('insert'))
y = PyParse.Parser(self.indentwidth, self.tabwidth)
if not self.context_use_ps1:
for context in self.num_context_lines:
startat = max(lno - context, 1)
startatindex = repr(startat) + ".0"
rawtext = text.get(startatindex, "insert")
y.set_str(rawtext)
bod = y.find_good_parse_start(
self.context_use_ps1,
self._build_char_in_string_func(startatindex))
if bod is not None or startat == 1:
break
y.set_lo(bod or 0)
else:
r = text.tag_prevrange("console", "insert")
if r:
startatindex = r[1]
else:
startatindex = "1.0"
rawtext = text.get(startatindex, "insert")
y.set_str(rawtext)
y.set_lo(0)
c = y.get_continuation_type()
if c != PyParse.C_NONE:
# The current stmt hasn't ended yet.
if c == PyParse.C_STRING_FIRST_LINE:
# after the first line of a string; do not indent at all
pass
elif c == PyParse.C_STRING_NEXT_LINES:
# inside a string which started before this line;
# just mimic the current indent
text.insert("insert", indent)
elif c == PyParse.C_BRACKET:
# line up with the first (if any) element of the
# last open bracket structure; else indent one
# level beyond the indent of the line with the
# last open bracket
self.reindent_to(y.compute_bracket_indent())
elif c == PyParse.C_BACKSLASH:
# if more than one line in this stmt already, just
# mimic the current indent; else if initial line
# has a start on an assignment stmt, indent to
# beyond leftmost =; else to beyond first chunk of
# non-whitespace on initial line
if y.get_num_lines_in_stmt() > 1:
text.insert("insert", indent)
else:
self.reindent_to(y.compute_backslash_indent())
else:
assert 0, "bogus continuation type %r" % (c,)
return "break"
# This line starts a brand new stmt; indent relative to
# indentation of initial line of closest preceding
# interesting stmt.
indent = y.get_base_indent_string()
text.insert("insert", indent)
if y.is_block_opener():
self.smart_indent_event(event)
elif indent and y.is_block_closer():
self.smart_backspace_event(event)
return "break"
finally:
text.see("insert")
text.undo_block_stop()
# Our editwin provides a is_char_in_string function that works
# with a Tk text index, but PyParse only knows about offsets into
# a string. This builds a function for PyParse that accepts an
# offset.
def _build_char_in_string_func(self, startindex):
def inner(offset, _startindex=startindex,
_icis=self.is_char_in_string):
return _icis(_startindex + "+%dc" % offset)
return inner
def indent_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines)):
line = lines[pos]
if line:
raw, effective = classifyws(line, self.tabwidth)
effective = effective + self.indentwidth
lines[pos] = self._make_blanks(effective) + line[raw:]
self.set_region(head, tail, chars, lines)
return "break"
def dedent_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines)):
line = lines[pos]
if line:
raw, effective = classifyws(line, self.tabwidth)
effective = max(effective - self.indentwidth, 0)
lines[pos] = self._make_blanks(effective) + line[raw:]
self.set_region(head, tail, chars, lines)
return "break"
def comment_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines) - 1):
line = lines[pos]
lines[pos] = '##' + line
self.set_region(head, tail, chars, lines)
def uncomment_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines)):
line = lines[pos]
if not line:
continue
if line[:2] == '##':
line = line[2:]
elif line[:1] == '#':
line = line[1:]
lines[pos] = line
self.set_region(head, tail, chars, lines)
def tabify_region_event(self, event):
head, tail, chars, lines = self.get_region()
tabwidth = self._asktabwidth()
if tabwidth is None: return
for pos in range(len(lines)):
line = lines[pos]
if line:
raw, effective = classifyws(line, tabwidth)
ntabs, nspaces = divmod(effective, tabwidth)
lines[pos] = '\t' * ntabs + ' ' * nspaces + line[raw:]
self.set_region(head, tail, chars, lines)
def untabify_region_event(self, event):
head, tail, chars, lines = self.get_region()
tabwidth = self._asktabwidth()
if tabwidth is None: return
for pos in range(len(lines)):
lines[pos] = lines[pos].expandtabs(tabwidth)
self.set_region(head, tail, chars, lines)
def toggle_tabs_event(self, event):
if self.askyesno(
"Toggle tabs",
"Turn tabs " + ("on", "off")[self.usetabs] +
"?\nIndent width " +
("will be", "remains at")[self.usetabs] + " 8." +
"\n Note: a tab is always 8 columns",
parent=self.text):
self.usetabs = not self.usetabs
# Try to prevent inconsistent indentation.
# User must change indent width manually after using tabs.
self.indentwidth = 8
return "break"
# XXX this isn't bound to anything -- see tabwidth comments
## def change_tabwidth_event(self, event):
## new = self._asktabwidth()
## if new != self.tabwidth:
## self.tabwidth = new
## self.set_indentation_params(0, guess=0)
## return "break"
def change_indentwidth_event(self, event):
new = self.askinteger(
"Indent width",
"New indent width (2-16)\n(Always use 8 when using tabs)",
parent=self.text,
initialvalue=self.indentwidth,
minvalue=2,
maxvalue=16)
if new and new != self.indentwidth and not self.usetabs:
self.indentwidth = new
return "break"
def get_region(self):
text = self.text
first, last = self.get_selection_indices()
if first and last:
head = text.index(first + " linestart")
tail = text.index(last + "-1c lineend +1c")
else:
head = text.index("insert linestart")
tail = text.index("insert lineend +1c")
chars = text.get(head, tail)
lines = chars.split("\n")
return head, tail, chars, lines
def set_region(self, head, tail, chars, lines):
text = self.text
newchars = "\n".join(lines)
if newchars == chars:
text.bell()
return
text.tag_remove("sel", "1.0", "end")
text.mark_set("insert", head)
text.undo_block_start()
text.delete(head, tail)
text.insert(head, newchars)
text.undo_block_stop()
text.tag_add("sel", head, "insert")
# Make string that displays as n leading blanks.
def _make_blanks(self, n):
if self.usetabs:
ntabs, nspaces = divmod(n, self.tabwidth)
return '\t' * ntabs + ' ' * nspaces
else:
return ' ' * n
# Delete from beginning of line to insert point, then reinsert
# column logical (meaning use tabs if appropriate) spaces.
def reindent_to(self, column):
text = self.text
text.undo_block_start()
if text.compare("insert linestart", "!=", "insert"):
text.delete("insert linestart", "insert")
if column:
text.insert("insert", self._make_blanks(column))
text.undo_block_stop()
def _asktabwidth(self):
return self.askinteger(
"Tab width",
"Columns per tab? (2-16)",
parent=self.text,
initialvalue=self.indentwidth,
minvalue=2,
maxvalue=16)
# Guess indentwidth from text content.
# Return guessed indentwidth. This should not be believed unless
# it's in a reasonable range (e.g., it will be 0 if no indented
# blocks are found).
def guess_indent(self):
opener, indented = IndentSearcher(self.text, self.tabwidth).run()
if opener and indented:
raw, indentsmall = classifyws(opener, self.tabwidth)
raw, indentlarge = classifyws(indented, self.tabwidth)
else:
indentsmall = indentlarge = 0
return indentlarge - indentsmall
# "line.col" -> line, as an int
def index2line(index):
return int(float(index))
# Look at the leading whitespace in s.
# Return pair (# of leading ws characters,
# effective # of leading blanks after expanding
# tabs to width tabwidth)
def classifyws(s, tabwidth):
raw = effective = 0
for ch in s:
if ch == ' ':
raw = raw + 1
effective = effective + 1
elif ch == '\t':
raw = raw + 1
effective = (effective // tabwidth + 1) * tabwidth
else:
break
return raw, effective
import tokenize
_tokenize = tokenize
del tokenize
class IndentSearcher(object):
# .run() chews over the Text widget, looking for a block opener
# and the stmt following it. Returns a pair,
# (line containing block opener, line containing stmt)
# Either or both may be None.
def __init__(self, text, tabwidth):
self.text = text
self.tabwidth = tabwidth
self.i = self.finished = 0
self.blkopenline = self.indentedline = None
def readline(self):
if self.finished:
return ""
i = self.i = self.i + 1
mark = repr(i) + ".0"
if self.text.compare(mark, ">=", "end"):
return ""
return self.text.get(mark, mark + " lineend+1c")
def tokeneater(self, type, token, start, end, line,
INDENT=_tokenize.INDENT,
NAME=_tokenize.NAME,
OPENERS=('class', 'def', 'for', 'if', 'try', 'while')):
if self.finished:
pass
elif type == NAME and token in OPENERS:
self.blkopenline = line
elif type == INDENT and self.blkopenline:
self.indentedline = line
self.finished = 1
def run(self):
save_tabsize = _tokenize.tabsize
_tokenize.tabsize = self.tabwidth
try:
try:
_tokenize.tokenize(self.readline, self.tokeneater)
except (_tokenize.TokenError, SyntaxError):
# since we cut off the tokenizer early, we can trigger
# spurious errors
pass
finally:
_tokenize.tabsize = save_tabsize
return self.blkopenline, self.indentedline
### end autoindent code ###
def prepstr(s):
# Helper to extract the underscore from a string, e.g.
# prepstr("Co_py") returns (2, "Copy").
i = s.find('_')
if i >= 0:
s = s[:i] + s[i+1:]
return i, s
keynames = {
'bracketleft': '[',
'bracketright': ']',
'slash': '/',
}
def get_accelerator(keydefs, eventname):
keylist = keydefs.get(eventname)
# issue10940: temporary workaround to prevent hang with OS X Cocoa Tk 8.5
# if not keylist:
if (not keylist) or (macosxSupport.isCocoaTk() and eventname in {
"<<open-module>>",
"<<goto-line>>",
"<<change-indentwidth>>"}):
return ""
s = keylist[0]
s = re.sub(r"-[a-z]\b", lambda m: m.group().upper(), s)
s = re.sub(r"\b\w+\b", lambda m: keynames.get(m.group(), m.group()), s)
s = re.sub("Key-", "", s)
s = re.sub("Cancel","Ctrl-Break",s) # dscherer@cmu.edu
s = re.sub("Control-", "Ctrl-", s)
s = re.sub("-", "+", s)
s = re.sub("><", " ", s)
s = re.sub("<", "", s)
s = re.sub(">", "", s)
return s
def fixwordbreaks(root):
# Make sure that Tk's double-click and next/previous word
# operations use our definition of a word (i.e. an identifier)
tk = root.tk
tk.call('tcl_wordBreakAfter', 'a b', 0) # make sure word.tcl is loaded
tk.call('set', 'tcl_wordchars', '[a-zA-Z0-9_]')
tk.call('set', 'tcl_nonwordchars', '[^a-zA-Z0-9_]')
def _editor_window(parent): # htest #
# error if close master window first - timer event, after script
root = parent
fixwordbreaks(root)
if sys.argv[1:]:
filename = sys.argv[1]
else:
filename = None
macosxSupport.setupApp(root, None)
edit = EditorWindow(root=root, filename=filename)
edit.text.bind("<<close-all-windows>>", edit.close_event)
# Does not stop error, neither does following
# edit.text.bind("<<close-window>>", edit.close_event)
if __name__ == '__main__':
from idlelib.idle_test.htest import run
run(_help_dialog, _editor_window)
|
sdlBasic/sdlbrt
|
win32/mingw/opt/lib/python2.7/idlelib/EditorWindow.py
|
Python
|
lgpl-2.1
| 66,626
|
from typing import TypeVar, Dict, Iterable, Any
T = TypeVar("T")
def foo(values: Dict[T, Iterable[Any]]):
for e in []:
values.setdefault(e, undefined)
|
allotria/intellij-community
|
python/testData/inspections/PyTypeCheckerInspection/UnresolvedReceiverGeneric.py
|
Python
|
apache-2.0
| 165
|
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Keystone UUID Token Provider"""
from __future__ import absolute_import
import uuid
from keystone.token.providers import common
class Provider(common.BaseProvider):
def __init__(self, *args, **kwargs):
super(Provider, self).__init__(*args, **kwargs)
def _get_token_id(self, token_data):
return uuid.uuid4().hex
|
atheendra/access_keys
|
keystone/token/providers/uuid.py
|
Python
|
apache-2.0
| 929
|
# -*- coding: utf-8 -*-
# Basic exporter for svg icons
from os import listdir
from os.path import isfile, join, dirname, realpath
import subprocess
import sys
import rsvg
import cairo
last_svg_path = None
last_svg_data = None
SCRIPT_FOLDER = dirname(realpath(__file__)) + '/'
theme_dir_base = SCRIPT_FOLDER + '../../scene/resources/default_theme/'
theme_dir_source = theme_dir_base + 'source/'
icons_dir_base = SCRIPT_FOLDER + '../editor/icons/'
icons_dir_2x = icons_dir_base + '2x/'
icons_dir_source = icons_dir_base + 'source/'
def svg_to_png(svg_path, png_path, dpi):
global last_svg_path, last_svg_data
zoom = int(dpi / 90)
if last_svg_path != svg_path:
last_svg_data = open(svg_path, 'r').read()
last_svg_path = svg_path
svg = rsvg.Handle(data=last_svg_data)
img = cairo.ImageSurface(
cairo.FORMAT_ARGB32,
svg.props.width * zoom,
svg.props.height * zoom
)
ctx = cairo.Context(img)
ctx.set_antialias(cairo.ANTIALIAS_DEFAULT)
ctx.scale(zoom, zoom)
svg.render_cairo(ctx)
img.write_to_png('%s.png' % png_path)
svg.close()
def export_icons():
svgs_path = icons_dir_source
file_names = [f for f in listdir(svgs_path) if isfile(join(svgs_path, f))]
for file_name in file_names:
# name without extensions
name_only = file_name.replace('.svg', '')
out_icon_names = [name_only] # export to a png with the same file name
theme_out_icon_names = []
# special cases
if special_icons.has_key(name_only):
special_icon = special_icons[name_only]
if type(special_icon) is dict:
if special_icon.get('avoid_self'):
out_icon_names = []
if special_icon.has_key('output_names'):
out_icon_names += special_icon['output_names']
if special_icon.has_key('theme_output_names'):
theme_out_icon_names += special_icon['theme_output_names']
source_path = '%s%s.svg' % (svgs_path, name_only)
for out_icon_name in out_icon_names:
svg_to_png(source_path, icons_dir_base + out_icon_name, 90)
svg_to_png(source_path, icons_dir_2x + out_icon_name, 180)
for theme_out_icon_name in theme_out_icon_names:
svg_to_png(source_path, theme_dir_base + theme_out_icon_name, 90)
def export_theme():
svgs_path = theme_dir_source
file_names = [f for f in listdir(svgs_path) if isfile(join(svgs_path, f))]
for file_name in file_names:
# name without extensions
name_only = file_name.replace('.svg', '')
out_icon_names = [name_only] # export to a png with the same file name
# special cases
if theme_icons.has_key(name_only):
special_icon = theme_icons[name_only]
if type(special_icon) is dict:
if special_icon.has_key('output_names'):
out_icon_names += special_icon['output_names']
source_path = '%s%s.svg' % (svgs_path, name_only)
for out_icon_name in out_icon_names:
svg_to_png(source_path, theme_dir_base + out_icon_name, 90)
# special cases for icons that will be exported to multiple target pngs or that require transforms.
special_icons = {
'icon_add_track': dict(
output_names=['icon_add'],
theme_output_names=['icon_add', 'icon_zoom_more']
),
'icon_new': dict(output_names=['icon_file']),
'icon_animation_tree_player': dict(output_names=['icon_animation_tree']),
'icon_tool_rotate': dict(
output_names=['icon_reload'],
theme_output_names=['icon_reload']
),
'icon_multi_edit': dict(output_names=['icon_multi_node_edit']),
'icon_folder': dict(
output_names=['icon_load', 'icon_open'],
theme_output_names=['icon_folder']
),
'icon_file_list': dict(output_names=['icon_enum']),
'icon_collision_2d': dict(output_names=['icon_collision_polygon_2d', 'icon_polygon_2d']),
'icon_class_list': dict(output_names=['icon_filesystem']),
'icon_color_ramp': dict(output_names=['icon_graph_color_ramp']),
'icon_translation': dict(output_names=['icon_p_hash_translation']),
'icon_shader': dict(output_names=['icon_shader_material', 'icon_material_shader']),
'icon_canvas_item_shader_graph': dict(output_names=['icon_material_shader_graph']),
'icon_color_pick': dict(theme_output_names=['icon_color_pick'], avoid_self=True),
'icon_play': dict(theme_output_names=['icon_play']),
'icon_stop': dict(theme_output_names=['icon_stop']),
'icon_zoom_less': dict(theme_output_names=['icon_zoom_less'], avoid_self=True),
'icon_zoom_reset': dict(theme_output_names=['icon_zoom_reset'], avoid_self=True),
'icon_snap': dict(theme_output_names=['icon_snap'])
}
theme_icons = {
'icon_close': dict(output_names=['close', 'close_hl']),
'tab_menu': dict(output_names=['tab_menu_hl'])
}
export_icons()
export_theme()
|
MrMaidx/godot
|
misc/scripts/svgs_2_pngs.py
|
Python
|
mit
| 4,991
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
from pants.backend.jvm.targets.import_jars_mixin import ImportJarsMixin
from pants.backend.jvm.targets.jvm_target import JvmTarget
from pants.base.payload import Payload
from pants.base.payload_field import PrimitiveField
logger = logging.getLogger(__name__)
class JavaProtobufLibrary(ImportJarsMixin, JvmTarget):
"""Generates a stub Java library from protobuf IDL files."""
def __init__(self, payload=None, buildflags=None, imports=None, **kwargs):
"""
:param buildflags: Unused, and will be removed in a future release.
:param list imports: List of addresses of `jar_library <#jar_library>`_
targets which contain .proto definitions.
"""
payload = payload or Payload()
# TODO(Eric Ayers): The target needs to incorporate the settings of --gen-protoc-version
# and --gen-protoc-plugins into the fingerprint. Consider adding a custom FingeprintStrategy
# into ProtobufGen to get it.
payload.add_fields({
'import_specs': PrimitiveField(imports or ())
})
super(JavaProtobufLibrary, self).__init__(payload=payload, **kwargs)
if buildflags is not None:
logger.warn(" Target definition at {address} sets attribute 'buildflags' which is "
"ignored and will be removed in a future release"
.format(address=self.address.spec))
self.add_labels('codegen')
@property
def imported_jar_library_specs(self):
"""List of JarLibrary specs to import.
Required to implement the ImportJarsMixin.
"""
return self.payload.import_specs
|
sameerparekh/pants
|
src/python/pants/backend/codegen/targets/java_protobuf_library.py
|
Python
|
apache-2.0
| 1,867
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2017, Thierry Sallé (@tsalle)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
ANSIBLE_METADATA = {
'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'
}
DOCUMENTATION = '''
---
module: grafana_dashboard
author:
- "Thierry Sallé (@tsalle)"
version_added: "2.5"
short_description: Manage grafana dashboards
description:
- Create, update, delete, export grafana dashboards via API
options:
grafana_url:
required: true
description:
- Grafana url
grafana_user:
required: false
default: admin
description:
- Grafana API user
grafana_password:
required: false
default: admin
description:
- Grafana API password
grafana_api_key:
required: false
description:
- Grafana API key
- If set, I(grafana_user) and I(grafana_password) will be ignored
org_id:
required: false
description:
- Grafana Organisation ID where the dashboard will be imported / exported
- Not used when I(grafana_api_key) is set, because the grafana_api_key only belong to one organisation.
default: 1
state:
required: true
default: present
description:
- State of the dashboard.
choices: ['present', 'absent', 'export']
slug:
description:
- slug of the dashboard. It's the friendly url name of the dashboard.
- When state is present, this parameter can override the slug in the meta section of the json file.
- If you want to import a json dashboard exported directly from the interface (not from the api),
- you have to specify the slug parameter because there is no meta section in the exported json.
path:
description:
- path to the json file containing the grafana dashboard to import or export.
overwrite:
default: false
description:
- override existing dashboard when state is present.
message:
description:
- Set a commit message for the version history.
- Only used when state is present
validate_certs:
default: true
type: bool
description:
- If C(no), SSL certificates will not be validated. This should only be used
- on personally controlled sites using self-signed certificates.
'''
EXAMPLES = '''
---
- name: import grafana dashboard foo
grafana_dashboard:
grafana_url: http://grafana.company.com
grafana_api_key: XXXXXXXXXXXX
state: present
message: "updated by ansible"
overwrite: true
path: /path/to/dashboards/foo.json
- name: export dashboard
grafana_dashboard:
grafana_url: http://grafana.company.com
grafana_api_key: XXXXXXXXXXXX
state: export
slug: foo
path: /path/to/dashboards/foo.json
'''
RETURN = '''
---
slug:
description: slug of the created / deleted / exported dashboard.
returned: success
type: string
sample: foo
'''
import base64
import json
import os
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import fetch_url
__metaclass__ = type
class GrafanaAPIException(Exception):
pass
class GrafanaMalformedJson(Exception):
pass
class GrafanaExportException(Exception):
pass
def grafana_switch_organisation(module, grafana_url, org_id, headers):
r, info = fetch_url(module, '%s/api/user/using/%s' % (grafana_url, org_id), headers=headers, method='POST')
if info['status'] != 200:
raise GrafanaAPIException('Unable to switch to organization %s : %s' % (org_id, info))
def grafana_dashboard_exists(module, grafana_url, slug, headers):
dashboard_exists = False
dashboard = {}
r, info = fetch_url(module, '%s/api/dashboards/db/%s' % (grafana_url, slug), headers=headers, method='GET')
if info['status'] == 200:
dashboard_exists = True
try:
dashboard = json.loads(r.read())
except Exception as e:
raise GrafanaMalformedJson(e)
elif info['status'] == 404:
dashboard_exists = False
else:
raise GrafanaAPIException('Unable to get dashboard %s : %s' % (slug, info))
return dashboard_exists, dashboard
def grafana_create_dashboard(module, data):
# define data payload for grafana API
try:
with open(data['path'], 'r') as json_file:
payload = json.load(json_file)
except Exception as e:
raise GrafanaMalformedJson("Can't load json file %s" % str(e))
# define http header
headers = {'content-type': 'application/json; charset=utf8'}
if 'grafana_api_key' in data and data['grafana_api_key']:
headers['Authorization'] = "Bearer %s" % data['grafana_api_key']
else:
auth = base64.encodestring('%s:%s' % (data['grafana_user'], data['grafana_password'])).replace('\n', '')
headers['Authorization'] = 'Basic %s' % auth
grafana_switch_organisation(module, data['grafana_url'], data['org_id'], headers)
if data.get('slug'):
slug = data['slug']
elif 'meta' in payload and 'slug' in payload['meta']:
slug = payload['meta']['slug']
else:
raise GrafanaMalformedJson('No slug found in json')
# test if dashboard already exists
dashboard_exists, dashboard = grafana_dashboard_exists(module, data['grafana_url'], slug, headers=headers)
result = {}
if dashboard_exists is True:
if dashboard == payload:
# unchanged
result['slug'] = data['slug']
result['msg'] = "Dashboard %s unchanged." % data['slug']
result['changed'] = False
else:
# update
if 'overwrite' in data and data['overwrite']:
payload['overwrite'] = True
if 'message' in data and data['message']:
payload['message'] = data['message']
r, info = fetch_url(module, '%s/api/dashboards/db' % data['grafana_url'], data=json.dumps(payload), headers=headers, method='POST')
if info['status'] == 200:
result['slug'] = slug
result['msg'] = "Dashboard %s updated" % slug
result['changed'] = True
else:
body = json.loads(info['body'])
raise GrafanaAPIException('Unable to update the dashboard %s : %s' % (slug, body['message']))
else:
# create
if 'dashboard' not in payload:
payload = {'dashboard': payload}
r, info = fetch_url(module, '%s/api/dashboards/db' % data['grafana_url'], data=json.dumps(payload), headers=headers, method='POST')
if info['status'] == 200:
result['msg'] = "Dashboard %s created" % slug
result['changed'] = True
result['slug'] = slug
else:
raise GrafanaAPIException('Unable to create the new dashboard %s : %s - %s.' % (slug, info['status'], info))
return result
def grafana_delete_dashboard(module, data):
# define http headers
headers = {'content-type': 'application/json'}
if 'grafana_api_key' in data and data['grafana_api_key']:
headers['Authorization'] = "Bearer %s" % data['grafana_api_key']
else:
auth = base64.encodestring('%s:%s' % (data['grafana_user'], data['grafana_password'])).replace('\n', '')
headers['Authorization'] = 'Basic %s' % auth
grafana_switch_organisation(module, data['grafana_url'], data['org_id'], headers)
# test if dashboard already exists
dashboard_exists, dashboard = grafana_dashboard_exists(module, data['grafana_url'], data['slug'], headers=headers)
result = {}
if dashboard_exists is True:
# delete
r, info = fetch_url(module, '%s/api/dashboards/db/%s' % (data['grafana_url'], data['slug']), headers=headers, method='DELETE')
if info['status'] == 200:
result['msg'] = "Dashboard %s deleted" % data['slug']
result['changed'] = True
result['slug'] = data['slug']
else:
raise GrafanaAPIException('Unable to update the dashboard %s : %s' % (data['slug'], info))
else:
# dashboard does not exists : do nothing
result = {'msg': "Dashboard %s does not exists" % data['slug'],
'changed': False,
'slug': data['slug']}
return result
def grafana_export_dashboard(module, data):
# define http headers
headers = {'content-type': 'application/json'}
if 'grafana_api_key' in data and data['grafana_api_key']:
headers['Authorization'] = "Bearer %s" % data['grafana_api_key']
else:
auth = base64.encodestring('%s:%s' % (data['grafana_user'], data['grafana_password'])).replace('\n', '')
headers['Authorization'] = 'Basic %s' % auth
grafana_switch_organisation(module, data['grafana_url'], data['org_id'], headers)
# test if dashboard already exists
dashboard_exists, dashboard = grafana_dashboard_exists(module, data['grafana_url'], data['slug'], headers=headers)
if dashboard_exists is True:
try:
with open(data['path'], 'w') as f:
f.write(json.dumps(dashboard))
except Exception as e:
raise GrafanaExportException("Can't write json file : %s" % str(e))
result = {'msg': "Dashboard %s exported to %s" % (data['slug'], data['path']),
'slug': data['slug'],
'changed': True}
else:
result = {'msg': "Dashboard %s does not exists" % data['slug'],
'slug': data['slug'],
'changed': False}
return result
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(choices=['present', 'absent', 'export'],
default='present'),
grafana_url=dict(required=True),
grafana_user=dict(default='admin'),
grafana_password=dict(default='admin', no_log=True),
grafana_api_key=dict(type='str', no_log=True),
org_id=dict(default=1, type='int'),
slug=dict(type='str'),
path=dict(type='str'),
overwrite=dict(type='bool', default=False),
message=dict(type='str'),
validate_certs=dict(type='bool', default=True)
),
supports_check_mode=False,
required_together=[['grafana_user', 'grafana_password', 'org_id']],
mutually_exclusive=[['grafana_user', 'grafana_api_key']],
)
try:
if module.params['state'] == 'present':
result = grafana_create_dashboard(module, module.params)
elif module.params['state'] == 'absent':
result = grafana_delete_dashboard(module, module.params)
else:
result = grafana_export_dashboard(module, module.params)
except GrafanaAPIException as e:
module.fail_json(
failed=True,
msg="error : %s" % e
)
return
except GrafanaMalformedJson as e:
module.fail_json(
failed=True,
msg="error : json file does not contain a meta section with a slug parameter, or you did'nt specify the slug parameter"
)
return
except GrafanaExportException as e:
module.fail_json(
failed=True,
msg="error : json file cannot be written : %s" % str(e)
)
return
module.exit_json(
failed=False,
**result
)
return
if __name__ == '__main__':
main()
|
ravibhure/ansible
|
lib/ansible/modules/monitoring/grafana_dashboard.py
|
Python
|
gpl-3.0
| 11,537
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Representation of a Task and related classes."""
__author__ = 'jeff.carollo@gmail.com (Jeff Carollo)'
from google.appengine.api import memcache
from google.appengine.api import urlfetch
from google.appengine.api import taskqueue
from google.appengine.ext import db
from google.appengine.ext.blobstore import blobstore
import datetime
import json
import logging
import urllib
import webapp2
from third_party.prodeagle import counter
from util import db_properties
from util import parsetime
class Error(Exception):
pass
class TaskNotFoundError(Error):
pass
class TaskTimedOutError(Error):
pass
class TaskStates(object):
SCHEDULED = 'scheduled'
ASSIGNED = 'assigned'
COMPLETE = 'complete'
class TaskOutcomes(object):
SUCCESS = 'success'
TIMED_OUT = 'timed_out'
FAILED = 'failed'
class TaskResult(db.Model):
"""The results of a Task, including logs and execution time."""
exit_code = db.IntegerProperty(required=True)
execution_time = db.FloatProperty(required=False)
stdout = blobstore.BlobReferenceProperty(required=False)
stderr = blobstore.BlobReferenceProperty(required=False)
stdout_download_url = db.TextProperty(required=False)
stderr_download_url = db.TextProperty(required=True)
# Should be populated if task execution involved a device.
device_serial_number = db.StringProperty(required=False)
result_metadata = db.TextProperty(required=False)
worker_log = db.TextProperty(required=False)
class Task(db.Model):
"""MrTaskman's representation of a Task.
Includes metadata not needed in a Task config.
"""
# Set when a task is created.
name = db.StringProperty(required=True)
config = db_properties.JsonProperty(required=True)
scheduled_by = db.UserProperty(required=False)
scheduled_time = db.DateTimeProperty(required=False, auto_now_add=True)
state = db.StringProperty(
required=True,
choices=(TaskStates.SCHEDULED,
TaskStates.ASSIGNED,
TaskStates.COMPLETE),
default=TaskStates.SCHEDULED)
attempts = db.IntegerProperty(required=True, default=0)
max_attempts = db.IntegerProperty(required=True, default=3)
executor_requirements = db.StringListProperty(required=True)
priority = db.IntegerProperty(required=True, default=0)
webhook = db.StringProperty(required=False)
# Set once state == TaskStates.ASSIGNED.
assigned_time = db.DateTimeProperty(required=False)
assigned_worker = db.TextProperty(required=False)
# Set once state == TaskStates.COMPLETE.
completed_time = db.DateTimeProperty(required=False)
outcome = db.StringProperty(
required=False,
choices=(TaskOutcomes.SUCCESS,
TaskOutcomes.TIMED_OUT,
TaskOutcomes.FAILED))
result = db.ReferenceProperty(TaskResult)
def MakeParentKey():
return db.Key.from_path('TaskParent', '0')
def MakeExecutorPauseKey(executor):
"""Returns a db.Key corresponding to given executor."""
return db.Key.from_path('ExecutorPause', executor)
def PauseExecutor(executor):
"""Temporarily pauses execution for given executor."""
key = str(MakeExecutorPauseKey(executor))
memcache.set(key, 'paused')
def ResumeExecutor(executor):
"""Resumes execution for given paused executor.
Returns 0 on network failure, non-zero otherwise.
"""
key = str(MakeExecutorPauseKey(executor))
return memcache.delete(key)
def IsExecutorPaused(executor):
"""Returns True iff executor is paused. False otherwise."""
key = str(MakeExecutorPauseKey(executor))
paused = memcache.get(key)
return bool(memcache.get(key))
def Schedule(name, config, scheduled_by, executor_requirements, priority=0):
"""Adds a new Task with given name, config, user and requirements."""
webhook = json.loads(config)['task'].get('webhook', None)
task = Task(parent=MakeParentKey(),
name=name,
config=config,
scheduled_by=scheduled_by,
executor_requirements=executor_requirements,
priority=priority,
webhook=webhook)
db.put(task)
counter.incr('Tasks.Scheduled')
return task
def GetById(task_id):
"""Retrieves Task with given integer task_id."""
key = db.Key.from_path('TaskParent', '0', 'Task', task_id)
return db.get(key)
def GetByName(task_name):
"""Returns list of Tasks with given name, or []."""
assert isinstance(task_name, basestring)
tasks = Task.all().filter('name =', task_name).order('-scheduled_time').fetch(limit=1000)
if not tasks:
return []
if not isinstance(tasks, list):
return [tasks]
return tasks
def DeleteById(task_id):
"""Deletes Task with given integer task_id."""
task = GetById(task_id)
if task is None:
return False
task.delete()
counter.incr('Tasks.Deleted')
return True
def GetByExecutor(executor, limit=1000, keys_only=False):
"""Retrieves a list of tasks waiting for a given executor."""
tasks = (Task.all(keys_only=keys_only)
.ancestor(MakeParentKey())
.filter('state =', TaskStates.SCHEDULED)
.filter('executor_requirements =', executor)
.fetch(limit=limit))
return tasks
def GetOldestTaskForCapability(executor_capability):
"""Retrieves front of the queue for given executor capability.
Args:
executor_capability: Executor capability to search for as str.
Returns:
First Task in the queue, or None.
"""
task = (Task.all()
.ancestor(MakeParentKey())
.filter('state =', TaskStates.SCHEDULED)
.filter('executor_requirements =', executor_capability)
.order('-priority')
.get())
return task
def GetRecentlyFinishedTasks(executor_capability, limit=5):
"""Retrieves most recently finished tasks.
Args:
executor_capability: Executor capability to search for as str.
Returns:
Most recently finished Tasks as list of Task.
"""
task = (Task.all()
.ancestor(MakeParentKey())
.filter('state =', TaskStates.COMPLETE)
.filter('executor_requirements =', executor_capability)
.order('-completed_time')
.fetch(limit=limit))
return task
def GetCurrentTask(executor_capability):
"""Retrieves currently assigned Task for executor.
Args:
executor_capability: Executor capability to search for as str.
Returns:
Most recently assigned Task.
"""
task = (Task.all()
.ancestor(MakeParentKey())
.filter('state =', TaskStates.ASSIGNED)
.filter('executor_requirements =', executor_capability)
.get())
return task
def AssignTaskToWorker(task, worker):
"""Takes given Task and assigns to given worker.
Args:
task: Task to assign.
worker: Name of worker as str.
Returns:
None
"""
assert task
assert worker
task.state = TaskStates.ASSIGNED
task.assigned_time = datetime.datetime.now()
task.assigned_worker = worker
task.attempts += 1
db.put(task)
def Assign(worker, executor_capabilities):
"""Looks for Tasks worker can execute, assigning one if possible.
Args:
worker: Name of worker as str.
executor_capabilities: Capabilities as list of str.
Returns:
Task if a Task was assigned, None otherwise.
"""
assert worker
assert executor_capabilities
logging.info('Trying to assign task for %s', executor_capabilities)
def tx(executor_capability):
task = GetOldestTaskForCapability(executor_capability)
if task is None:
return None
logging.info('Assigning task %s to %s for %s.',
task.key().id_or_name(),
worker,
executor_capability)
AssignTaskToWorker(task, worker)
logging.info('Assignment successful.')
ScheduleTaskTimeout(task)
counter.incr('Tasks.Assigned')
counter.incr('Executors.%s.Assigned' % executor_capability)
return task
for executor_capability in executor_capabilities:
if not executor_capability:
continue
if IsExecutorPaused(executor_capability):
continue
task = db.run_in_transaction(tx, executor_capability)
if task:
return task
return None
def UploadTaskResult(task_id, attempt, exit_code,
execution_time, stdout, stderr,
stdout_download_url, stderr_download_url,
device_serial_number, result_metadata, worker_log):
logging.info('Trying to upload result for task %d attempt %d',
task_id, attempt)
def tx():
task = GetById(task_id)
counters = counter.Batch()
# Validate that task is in a state to accept results from worker.
if not task:
raise TaskNotFoundError()
counters.incr('Tasks.Completed')
if device_serial_number:
counters.incr('Executors.%s.Completed' % device_serial_number)
if task.attempts != attempt:
logging.info('Attempts: %d, attempt: %d', task.attempts, attempt)
counter.incr('Tasks.Completed.TimedOut')
if device_serial_number:
counter.incr('Executors.%s.TimedOut' % device_serial_number)
raise TaskTimedOutError()
# Here we allow a timed out task to publish results if it hasn't
# been scheduled to another worker yet.
if task.state not in [TaskStates.ASSIGNED, TaskStates.SCHEDULED]:
logging.info('task.state: %s', task.state)
counter.incr('Tasks.Completed.TimedOut')
if device_serial_number:
counter.incr('Executors.%s.TimedOut' % device_serial_number)
raise TaskTimedOutError()
# Mark task as complete and place results.
task.completed_time = datetime.datetime.now()
if exit_code == 0:
counters.incr('Tasks.Completed.Success')
if device_serial_number:
counters.incr('Executors.%s.Success' % device_serial_number)
task.outcome = TaskOutcomes.SUCCESS
else:
counters.incr('Tasks.Completed.Failed')
if device_serial_number:
counters.incr('Executors.%s.Failed' % device_serial_number)
task.outcome = TaskOutcomes.FAILED
task.state = TaskStates.COMPLETE
task_result = TaskResult(parent=task,
exit_code=exit_code,
execution_time=execution_time,
stdout=stdout,
stderr=stderr,
stdout_download_url=stdout_download_url,
stderr_download_url=stderr_download_url,
device_serial_number=device_serial_number,
result_metadata=result_metadata,
worker_log=worker_log)
task_result = db.put(task_result)
task.result = task_result
db.put(task)
taskqueue.add(url='/tasks/%d/invoke_webhook' % task.key().id(),
transactional=True)
return (task, counters)
(task, counters) = db.run_in_transaction(tx)
logging.info('Insert succeeded.')
counters.commit()
class InvokeWebhookHandler(webapp2.RequestHandler):
def post(self, task_id):
task = GetById(int(task_id))
if not task:
return
config = json.loads(task.config)
try:
webhook = config['task']['webhook']
except Exception, e:
logging.exception(e)
logging.info('No webhook, or error invoking webhook.')
return
logging.info('invoking webhook: %s', webhook)
payload = urllib.urlencode({'task_id': task_id}).encode('utf-8')
fetched = urlfetch.fetch(url=webhook, method='POST', payload=payload,
headers={'Content-Type':
'application/x-www-form-urlencoded;encoding=utf-8'})
logging.info('Webhook invoked with status %d: %s.', fetched.status_code,
fetched.content)
self.response.set_status(fetched.status_code)
counter.incr('Tasks.WebhookInvoked%s' % fetched.status_code)
def GetTaskTimeout(task, default=datetime.timedelta(minutes=15)):
"""Returns task timeout as timedelta.
Defaults to 15 minutes if no timeout is specified.
A worker is given 3 minutes longer than the task timeout to allow for
the overhead of downloading and installing packages.
"""
config = task.config
parsed_config = json.loads(config)
timeout_str = parsed_config['task'].get('timeout', None)
if not timeout_str:
return default
return (parsetime.ParseTimeDelta(timeout_str) +
datetime.timedelta(minutes=3))
def ScheduleTaskTimeout(task):
"""Schedules a timeout for the given assigned Task.
Must be called inside of a datastore transaction.
Called by Assign to enforce Task timeouts.
"""
timeout = GetTaskTimeout(task)
timeout_task = taskqueue.Task(
eta=(datetime.datetime.now() + timeout),
method='POST',
params={'task_key': task.key(),
'task_attempt': task.attempts},
url='/tasks/timeout')
timeout_task.add(transactional=True)
class TaskTimeoutHandler(webapp2.RequestHandler):
"""Handles Task timeout firing.
A Task may have completed successfully before the handler fires,
in which case a timeout did not occur and this handler will not
modify the completed Task.
"""
def post(self):
task_key = self.request.get('task_key')
task_attempt = int(self.request.get('task_attempt'))
assert task_key
def tx():
task = db.get(task_key)
if not task:
logging.info('Timed out Task %s was deleted.', task_key)
return
if (task.state == TaskStates.ASSIGNED and
task.attempts == task_attempt):
if task.attempts >= task.max_attempts:
task.state = TaskStates.COMPLETE
task.outcome = TaskOutcomes.TIMED_OUT
db.put(task)
else:
# Remember to enforce uploading task outcome to check
# both state and attempts.
task.state = TaskStates.SCHEDULED
# task.assigned_worker intentionally left so we can see
# who timed out.
db.put(task)
db.run_in_transaction(tx)
def DeleteByExecutor(executor):
delete_task = taskqueue.Task(
method='POST',
url='/executors/%s/deleteall' % executor)
delete_task.add()
class DeleteAllByExecutorHandler(webapp2.RequestHandler):
"""Deletes all tasks for a given executor."""
def post(self, executor):
count = 0
while True:
task_keys = GetByExecutor(executor, limit=1000, keys_only=True)
if not task_keys:
logging.info('Done. Deleted %d tasks total.', count)
return
logging.info('Deleting %d tasks.', len(task_keys))
count += len(task_keys)
db.delete(task_keys)
app = webapp2.WSGIApplication([
('/tasks/timeout', TaskTimeoutHandler),
('/executors/([a-zA-Z0-9]+)/deleteall', DeleteAllByExecutorHandler),
('/tasks/([0-9]+)/invoke_webhook', InvokeWebhookHandler),
], debug=True)
|
dbremner/bite-project
|
deps/mrtaskman/server/models/tasks.py
|
Python
|
apache-2.0
| 15,327
|
# Copyright 2018 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# for py2/py3 compatibility
from __future__ import print_function
import signal
from . import base
from testrunner.local import utils
class SignalProc(base.TestProcObserver):
def __init__(self):
super(SignalProc, self).__init__()
self.exit_code = utils.EXIT_CODE_PASS
def setup(self, *args, **kwargs):
super(SignalProc, self).setup(*args, **kwargs)
# It should be called after processors are chained together to not loose
# catched signal.
signal.signal(signal.SIGINT, self._on_ctrlc)
signal.signal(signal.SIGTERM, self._on_sigterm)
def _on_ctrlc(self, _signum, _stack_frame):
print('>>> Ctrl-C detected, early abort...')
self.exit_code = utils.EXIT_CODE_INTERRUPTED
self.stop()
def _on_sigterm(self, _signum, _stack_frame):
print('>>> SIGTERM received, early abort...')
self.exit_code = utils.EXIT_CODE_TERMINATED
self.stop()
|
weolar/miniblink49
|
v8_7_5/tools/testrunner/testproc/sigproc.py
|
Python
|
apache-2.0
| 1,059
|
from ..broker import Broker
class SubnetBroker(Broker):
controller = "subnets"
def show(self, **kwargs):
"""Shows the details for the specified subnet.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param SubnetID: The internal NetMRI identifier for this subnet.
:type SubnetID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of subnet methods. The listed methods will be called on each subnet returned and included in the output. Available methods are: network_name, data_source, vlan.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source, vlan.
:type include: Array of String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return subnet: The subnet identified by the specified SubnetID.
:rtype subnet: Subnet
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def index(self, **kwargs):
"""Lists the available subnets. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetCIDR: The subnet in CIDR format.
:type SubnetCIDR: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetCIDR: The subnet in CIDR format.
:type SubnetCIDR: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetID: The internal NetMRI identifier for this subnet.
:type SubnetID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetID: The internal NetMRI identifier for this subnet.
:type SubnetID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetIPNumeric: The numerical value of the network address.
:type SubnetIPNumeric: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetIPNumeric: The numerical value of the network address.
:type SubnetIPNumeric: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param VirtualNetworkID: The internal NetMRI identifier of the Virtual Network on which this subnet is defined, or blank if this cannot be determined.
:type VirtualNetworkID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param VirtualNetworkID: The internal NetMRI identifier of the Virtual Network on which this subnet is defined, or blank if this cannot be determined.
:type VirtualNetworkID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param VlanID: The internal NetMRI identifier of the VLAN on which this subnet is defined, or blank if this cannot be determined.
:type VlanID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param VlanID: The internal NetMRI identifier of the VLAN on which this subnet is defined, or blank if this cannot be determined.
:type VlanID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the subnets as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of subnet methods. The listed methods will be called on each subnet returned and included in the output. Available methods are: network_name, data_source, vlan.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source, vlan.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` SubnetID
:param sort: The data field(s) to use for sorting the output. Default is SubnetID. Valid values are SubnetID, DataSourceID, SubnetStartTime, SubnetEndTime, SubnetChangedCols, SubnetTimestamp, SubnetSource, SubnetIPDotted, SubnetIPNumeric, SubnetLastIPNumeric, SubnetNetMaskDotted, SubnetNetMaskNumeric, SubnetCIDR, SubnetLocation, VlanID, VirtualNetworkID, RouteTimestamp, SubnetSdnInd.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each Subnet. Valid values are SubnetID, DataSourceID, SubnetStartTime, SubnetEndTime, SubnetChangedCols, SubnetTimestamp, SubnetSource, SubnetIPDotted, SubnetIPNumeric, SubnetLastIPNumeric, SubnetNetMaskDotted, SubnetNetMaskNumeric, SubnetCIDR, SubnetLocation, VlanID, VirtualNetworkID, RouteTimestamp, SubnetSdnInd. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return subnets: An array of the Subnet objects that match the specified input criteria.
:rtype subnets: Array of Subnet
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def search(self, **kwargs):
"""Lists the available subnets matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param RouteTimestamp: The date and time that this subnet was last seen on any router.
:type RouteTimestamp: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param RouteTimestamp: The date and time that this subnet was last seen on any router.
:type RouteTimestamp: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetCIDR: The subnet in CIDR format.
:type SubnetCIDR: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetCIDR: The subnet in CIDR format.
:type SubnetCIDR: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetChangedCols: The fields that changed between this revision of the record and the previous revision.
:type SubnetChangedCols: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetChangedCols: The fields that changed between this revision of the record and the previous revision.
:type SubnetChangedCols: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetEndTime: The ending effective time of this revision of this record, or empty if still in effect.
:type SubnetEndTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetEndTime: The ending effective time of this revision of this record, or empty if still in effect.
:type SubnetEndTime: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetID: The internal NetMRI identifier for this subnet.
:type SubnetID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetID: The internal NetMRI identifier for this subnet.
:type SubnetID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetIPDotted: The subnet network address in dotted (or colon-delimited for IPv6) format.
:type SubnetIPDotted: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetIPDotted: The subnet network address in dotted (or colon-delimited for IPv6) format.
:type SubnetIPDotted: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetIPNumeric: The numerical value of the network address.
:type SubnetIPNumeric: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetIPNumeric: The numerical value of the network address.
:type SubnetIPNumeric: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetLastIPNumeric: The last IP address in the subnet. All IPs in the subnet will be between SubnetIPNumeric (Network Address, Numerical) and SubnetLastIPNumeric (Last IP Address, Numerical), inclusive.
:type SubnetLastIPNumeric: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetLastIPNumeric: The last IP address in the subnet. All IPs in the subnet will be between SubnetIPNumeric (Network Address, Numerical) and SubnetLastIPNumeric (Last IP Address, Numerical), inclusive.
:type SubnetLastIPNumeric: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetLocation: If the subnet is located within the NetMRI's configured CIDR blocks, it is identified as 'internal'. Otherwise, it is identified as 'external'.
:type SubnetLocation: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetLocation: If the subnet is located within the NetMRI's configured CIDR blocks, it is identified as 'internal'. Otherwise, it is identified as 'external'.
:type SubnetLocation: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetNetMaskDotted: The subnet network mask in dotted (or colon-delimited for IPv6) format.
:type SubnetNetMaskDotted: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetNetMaskDotted: The subnet network mask in dotted (or colon-delimited for IPv6) format.
:type SubnetNetMaskDotted: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetNetMaskNumeric: The numerical value of the network mask.
:type SubnetNetMaskNumeric: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetNetMaskNumeric: The numerical value of the network mask.
:type SubnetNetMaskNumeric: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetSdnInd: A flag indicating if this subnet collected from some SDN controller.
:type SubnetSdnInd: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetSdnInd: A flag indicating if this subnet collected from some SDN controller.
:type SubnetSdnInd: Array of Boolean
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetSource: Internal tracking information for NetMRI algorithms.
:type SubnetSource: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetSource: Internal tracking information for NetMRI algorithms.
:type SubnetSource: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetStartTime: The starting effective time of this revision of the record.
:type SubnetStartTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetStartTime: The starting effective time of this revision of the record.
:type SubnetStartTime: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SubnetTimestamp: The date and time this record was collected or calculated.
:type SubnetTimestamp: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SubnetTimestamp: The date and time this record was collected or calculated.
:type SubnetTimestamp: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param VirtualNetworkID: The internal NetMRI identifier of the Virtual Network on which this subnet is defined, or blank if this cannot be determined.
:type VirtualNetworkID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param VirtualNetworkID: The internal NetMRI identifier of the Virtual Network on which this subnet is defined, or blank if this cannot be determined.
:type VirtualNetworkID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param VlanID: The internal NetMRI identifier of the VLAN on which this subnet is defined, or blank if this cannot be determined.
:type VlanID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param VlanID: The internal NetMRI identifier of the VLAN on which this subnet is defined, or blank if this cannot be determined.
:type VlanID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the subnets as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of subnet methods. The listed methods will be called on each subnet returned and included in the output. Available methods are: network_name, data_source, vlan.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source, vlan.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` SubnetID
:param sort: The data field(s) to use for sorting the output. Default is SubnetID. Valid values are SubnetID, DataSourceID, SubnetStartTime, SubnetEndTime, SubnetChangedCols, SubnetTimestamp, SubnetSource, SubnetIPDotted, SubnetIPNumeric, SubnetLastIPNumeric, SubnetNetMaskDotted, SubnetNetMaskNumeric, SubnetCIDR, SubnetLocation, VlanID, VirtualNetworkID, RouteTimestamp, SubnetSdnInd.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each Subnet. Valid values are SubnetID, DataSourceID, SubnetStartTime, SubnetEndTime, SubnetChangedCols, SubnetTimestamp, SubnetSource, SubnetIPDotted, SubnetIPNumeric, SubnetLastIPNumeric, SubnetNetMaskDotted, SubnetNetMaskNumeric, SubnetCIDR, SubnetLocation, VlanID, VirtualNetworkID, RouteTimestamp, SubnetSdnInd. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against subnets, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: DataSourceID, RouteTimestamp, SubnetCIDR, SubnetChangedCols, SubnetEndTime, SubnetID, SubnetIPDotted, SubnetIPNumeric, SubnetLastIPNumeric, SubnetLocation, SubnetNetMaskDotted, SubnetNetMaskNumeric, SubnetSdnInd, SubnetSource, SubnetStartTime, SubnetTimestamp, VirtualNetworkID, VlanID.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return subnets: An array of the Subnet objects that match the specified input criteria.
:rtype subnets: Array of Subnet
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available subnets matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: DataSourceID, RouteTimestamp, SubnetCIDR, SubnetChangedCols, SubnetEndTime, SubnetID, SubnetIPDotted, SubnetIPNumeric, SubnetLastIPNumeric, SubnetLocation, SubnetNetMaskDotted, SubnetNetMaskNumeric, SubnetSdnInd, SubnetSource, SubnetStartTime, SubnetTimestamp, VirtualNetworkID, VlanID.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified.
:type val_f_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified.
:type val_c_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_RouteTimestamp: The operator to apply to the field RouteTimestamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. RouteTimestamp: The date and time that this subnet was last seen on any router. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_RouteTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_RouteTimestamp: If op_RouteTimestamp is specified, the field named in this input will be compared to the value in RouteTimestamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_RouteTimestamp must be specified if op_RouteTimestamp is specified.
:type val_f_RouteTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_RouteTimestamp: If op_RouteTimestamp is specified, this value will be compared to the value in RouteTimestamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_RouteTimestamp must be specified if op_RouteTimestamp is specified.
:type val_c_RouteTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SubnetCIDR: The operator to apply to the field SubnetCIDR. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SubnetCIDR: The subnet in CIDR format. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SubnetCIDR: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SubnetCIDR: If op_SubnetCIDR is specified, the field named in this input will be compared to the value in SubnetCIDR using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SubnetCIDR must be specified if op_SubnetCIDR is specified.
:type val_f_SubnetCIDR: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SubnetCIDR: If op_SubnetCIDR is specified, this value will be compared to the value in SubnetCIDR using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SubnetCIDR must be specified if op_SubnetCIDR is specified.
:type val_c_SubnetCIDR: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SubnetChangedCols: The operator to apply to the field SubnetChangedCols. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SubnetChangedCols: The fields that changed between this revision of the record and the previous revision. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SubnetChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SubnetChangedCols: If op_SubnetChangedCols is specified, the field named in this input will be compared to the value in SubnetChangedCols using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SubnetChangedCols must be specified if op_SubnetChangedCols is specified.
:type val_f_SubnetChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SubnetChangedCols: If op_SubnetChangedCols is specified, this value will be compared to the value in SubnetChangedCols using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SubnetChangedCols must be specified if op_SubnetChangedCols is specified.
:type val_c_SubnetChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SubnetEndTime: The operator to apply to the field SubnetEndTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SubnetEndTime: The ending effective time of this revision of this record, or empty if still in effect. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SubnetEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SubnetEndTime: If op_SubnetEndTime is specified, the field named in this input will be compared to the value in SubnetEndTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SubnetEndTime must be specified if op_SubnetEndTime is specified.
:type val_f_SubnetEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SubnetEndTime: If op_SubnetEndTime is specified, this value will be compared to the value in SubnetEndTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SubnetEndTime must be specified if op_SubnetEndTime is specified.
:type val_c_SubnetEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SubnetID: The operator to apply to the field SubnetID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SubnetID: The internal NetMRI identifier for this subnet. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SubnetID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SubnetID: If op_SubnetID is specified, the field named in this input will be compared to the value in SubnetID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SubnetID must be specified if op_SubnetID is specified.
:type val_f_SubnetID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SubnetID: If op_SubnetID is specified, this value will be compared to the value in SubnetID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SubnetID must be specified if op_SubnetID is specified.
:type val_c_SubnetID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SubnetIPDotted: The operator to apply to the field SubnetIPDotted. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SubnetIPDotted: The subnet network address in dotted (or colon-delimited for IPv6) format. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SubnetIPDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SubnetIPDotted: If op_SubnetIPDotted is specified, the field named in this input will be compared to the value in SubnetIPDotted using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SubnetIPDotted must be specified if op_SubnetIPDotted is specified.
:type val_f_SubnetIPDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SubnetIPDotted: If op_SubnetIPDotted is specified, this value will be compared to the value in SubnetIPDotted using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SubnetIPDotted must be specified if op_SubnetIPDotted is specified.
:type val_c_SubnetIPDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SubnetIPNumeric: The operator to apply to the field SubnetIPNumeric. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SubnetIPNumeric: The numerical value of the network address. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SubnetIPNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SubnetIPNumeric: If op_SubnetIPNumeric is specified, the field named in this input will be compared to the value in SubnetIPNumeric using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SubnetIPNumeric must be specified if op_SubnetIPNumeric is specified.
:type val_f_SubnetIPNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SubnetIPNumeric: If op_SubnetIPNumeric is specified, this value will be compared to the value in SubnetIPNumeric using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SubnetIPNumeric must be specified if op_SubnetIPNumeric is specified.
:type val_c_SubnetIPNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SubnetLastIPNumeric: The operator to apply to the field SubnetLastIPNumeric. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SubnetLastIPNumeric: The last IP address in the subnet. All IPs in the subnet will be between SubnetIPNumeric (Network Address, Numerical) and SubnetLastIPNumeric (Last IP Address, Numerical), inclusive. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SubnetLastIPNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SubnetLastIPNumeric: If op_SubnetLastIPNumeric is specified, the field named in this input will be compared to the value in SubnetLastIPNumeric using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SubnetLastIPNumeric must be specified if op_SubnetLastIPNumeric is specified.
:type val_f_SubnetLastIPNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SubnetLastIPNumeric: If op_SubnetLastIPNumeric is specified, this value will be compared to the value in SubnetLastIPNumeric using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SubnetLastIPNumeric must be specified if op_SubnetLastIPNumeric is specified.
:type val_c_SubnetLastIPNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SubnetLocation: The operator to apply to the field SubnetLocation. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SubnetLocation: If the subnet is located within the NetMRI's configured CIDR blocks, it is identified as 'internal'. Otherwise, it is identified as 'external'. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SubnetLocation: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SubnetLocation: If op_SubnetLocation is specified, the field named in this input will be compared to the value in SubnetLocation using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SubnetLocation must be specified if op_SubnetLocation is specified.
:type val_f_SubnetLocation: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SubnetLocation: If op_SubnetLocation is specified, this value will be compared to the value in SubnetLocation using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SubnetLocation must be specified if op_SubnetLocation is specified.
:type val_c_SubnetLocation: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SubnetNetMaskDotted: The operator to apply to the field SubnetNetMaskDotted. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SubnetNetMaskDotted: The subnet network mask in dotted (or colon-delimited for IPv6) format. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SubnetNetMaskDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SubnetNetMaskDotted: If op_SubnetNetMaskDotted is specified, the field named in this input will be compared to the value in SubnetNetMaskDotted using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SubnetNetMaskDotted must be specified if op_SubnetNetMaskDotted is specified.
:type val_f_SubnetNetMaskDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SubnetNetMaskDotted: If op_SubnetNetMaskDotted is specified, this value will be compared to the value in SubnetNetMaskDotted using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SubnetNetMaskDotted must be specified if op_SubnetNetMaskDotted is specified.
:type val_c_SubnetNetMaskDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SubnetNetMaskNumeric: The operator to apply to the field SubnetNetMaskNumeric. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SubnetNetMaskNumeric: The numerical value of the network mask. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SubnetNetMaskNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SubnetNetMaskNumeric: If op_SubnetNetMaskNumeric is specified, the field named in this input will be compared to the value in SubnetNetMaskNumeric using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SubnetNetMaskNumeric must be specified if op_SubnetNetMaskNumeric is specified.
:type val_f_SubnetNetMaskNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SubnetNetMaskNumeric: If op_SubnetNetMaskNumeric is specified, this value will be compared to the value in SubnetNetMaskNumeric using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SubnetNetMaskNumeric must be specified if op_SubnetNetMaskNumeric is specified.
:type val_c_SubnetNetMaskNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SubnetSdnInd: The operator to apply to the field SubnetSdnInd. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SubnetSdnInd: A flag indicating if this subnet collected from some SDN controller. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SubnetSdnInd: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SubnetSdnInd: If op_SubnetSdnInd is specified, the field named in this input will be compared to the value in SubnetSdnInd using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SubnetSdnInd must be specified if op_SubnetSdnInd is specified.
:type val_f_SubnetSdnInd: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SubnetSdnInd: If op_SubnetSdnInd is specified, this value will be compared to the value in SubnetSdnInd using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SubnetSdnInd must be specified if op_SubnetSdnInd is specified.
:type val_c_SubnetSdnInd: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SubnetSource: The operator to apply to the field SubnetSource. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SubnetSource: Internal tracking information for NetMRI algorithms. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SubnetSource: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SubnetSource: If op_SubnetSource is specified, the field named in this input will be compared to the value in SubnetSource using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SubnetSource must be specified if op_SubnetSource is specified.
:type val_f_SubnetSource: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SubnetSource: If op_SubnetSource is specified, this value will be compared to the value in SubnetSource using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SubnetSource must be specified if op_SubnetSource is specified.
:type val_c_SubnetSource: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SubnetStartTime: The operator to apply to the field SubnetStartTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SubnetStartTime: The starting effective time of this revision of the record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SubnetStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SubnetStartTime: If op_SubnetStartTime is specified, the field named in this input will be compared to the value in SubnetStartTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SubnetStartTime must be specified if op_SubnetStartTime is specified.
:type val_f_SubnetStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SubnetStartTime: If op_SubnetStartTime is specified, this value will be compared to the value in SubnetStartTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SubnetStartTime must be specified if op_SubnetStartTime is specified.
:type val_c_SubnetStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SubnetTimestamp: The operator to apply to the field SubnetTimestamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SubnetTimestamp: The date and time this record was collected or calculated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SubnetTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SubnetTimestamp: If op_SubnetTimestamp is specified, the field named in this input will be compared to the value in SubnetTimestamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SubnetTimestamp must be specified if op_SubnetTimestamp is specified.
:type val_f_SubnetTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SubnetTimestamp: If op_SubnetTimestamp is specified, this value will be compared to the value in SubnetTimestamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SubnetTimestamp must be specified if op_SubnetTimestamp is specified.
:type val_c_SubnetTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_VirtualNetworkID: The operator to apply to the field VirtualNetworkID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. VirtualNetworkID: The internal NetMRI identifier of the Virtual Network on which this subnet is defined, or blank if this cannot be determined. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_VirtualNetworkID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_VirtualNetworkID: If op_VirtualNetworkID is specified, the field named in this input will be compared to the value in VirtualNetworkID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_VirtualNetworkID must be specified if op_VirtualNetworkID is specified.
:type val_f_VirtualNetworkID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_VirtualNetworkID: If op_VirtualNetworkID is specified, this value will be compared to the value in VirtualNetworkID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_VirtualNetworkID must be specified if op_VirtualNetworkID is specified.
:type val_c_VirtualNetworkID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_VlanID: The operator to apply to the field VlanID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. VlanID: The internal NetMRI identifier of the VLAN on which this subnet is defined, or blank if this cannot be determined. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_VlanID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_VlanID: If op_VlanID is specified, the field named in this input will be compared to the value in VlanID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_VlanID must be specified if op_VlanID is specified.
:type val_f_VlanID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_VlanID: If op_VlanID is specified, this value will be compared to the value in VlanID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_VlanID must be specified if op_VlanID is specified.
:type val_c_VlanID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the subnets as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of subnet methods. The listed methods will be called on each subnet returned and included in the output. Available methods are: network_name, data_source, vlan.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source, vlan.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` SubnetID
:param sort: The data field(s) to use for sorting the output. Default is SubnetID. Valid values are SubnetID, DataSourceID, SubnetStartTime, SubnetEndTime, SubnetChangedCols, SubnetTimestamp, SubnetSource, SubnetIPDotted, SubnetIPNumeric, SubnetLastIPNumeric, SubnetNetMaskDotted, SubnetNetMaskNumeric, SubnetCIDR, SubnetLocation, VlanID, VirtualNetworkID, RouteTimestamp, SubnetSdnInd.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each Subnet. Valid values are SubnetID, DataSourceID, SubnetStartTime, SubnetEndTime, SubnetChangedCols, SubnetTimestamp, SubnetSource, SubnetIPDotted, SubnetIPNumeric, SubnetLastIPNumeric, SubnetNetMaskDotted, SubnetNetMaskNumeric, SubnetCIDR, SubnetLocation, VlanID, VirtualNetworkID, RouteTimestamp, SubnetSdnInd. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return subnets: An array of the Subnet objects that match the specified input criteria.
:rtype subnets: Array of Subnet
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
def data_source(self, **kwargs):
"""The NetMRI device that collected this record.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param SubnetID: The internal NetMRI identifier for this subnet.
:type SubnetID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The NetMRI device that collected this record.
:rtype : DataSource
"""
return self.api_request(self._get_method_fullname("data_source"), kwargs)
def vlan(self, **kwargs):
"""The VLAN on which this subnet is defined, or blank if this cannot be determined.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param SubnetID: The internal NetMRI identifier for this subnet.
:type SubnetID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The VLAN on which this subnet is defined, or blank if this cannot be determined.
:rtype : Vlan
"""
return self.api_request(self._get_method_fullname("vlan"), kwargs)
def network_name(self, **kwargs):
"""A Network View assigned to the subnet.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param SubnetID: The internal NetMRI identifier for this subnet.
:type SubnetID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : A Network View assigned to the subnet.
:rtype : String
"""
return self.api_request(self._get_method_fullname("network_name"), kwargs)
|
infobloxopen/infoblox-netmri
|
infoblox_netmri/api/broker/v3_8_0/subnet_broker.py
|
Python
|
apache-2.0
| 68,120
|
import re
import numpy as np
from scipy import special
from .common import with_attributes, safe_import
with safe_import():
from scipy.special import cython_special
FUNC_ARGS = {
'airy_d': (1,),
'airy_D': (1,),
'beta_dd': (0.25, 0.75),
'erf_d': (1,),
'erf_D': (1+1j,),
'exprel_d': (1e-6,),
'gamma_d': (100,),
'gamma_D': (100+100j,),
'jv_dd': (1, 1),
'jv_dD': (1, (1+1j)),
'loggamma_D': (20,),
'logit_d': (0.5,),
'psi_d': (1,),
'psi_D': (1,),
}
class _CythonSpecialMeta(type):
"""
Add time_* benchmarks corresponding to cython_special._bench_*_cy
"""
def __new__(cls, cls_name, bases, dct):
params = [(10, 100, 1000), ('python', 'numpy', 'cython')]
param_names = ['N', 'api']
def get_time_func(name, args):
@with_attributes(params=[(name,), (args,)] + params,
param_names=['name', 'argument'] + param_names)
def func(self, name, args, N, api):
if api == 'python':
self.py_func(N, *args)
elif api == 'numpy':
self.np_func(*self.obj)
else:
self.cy_func(N, *args)
func.__name__ = 'time_' + name
return func
for name in FUNC_ARGS.keys():
func = get_time_func(name, FUNC_ARGS[name])
dct[func.__name__] = func
return type.__new__(cls, cls_name, bases, dct)
class CythonSpecial(metaclass=_CythonSpecialMeta):
def setup(self, name, args, N, api):
self.py_func = getattr(cython_special, '_bench_{}_py'.format(name))
self.cy_func = getattr(cython_special, '_bench_{}_cy'.format(name))
m = re.match('^(.*)_[dDl]+$', name)
self.np_func = getattr(special, m.group(1))
self.obj = []
for arg in args:
self.obj.append(arg*np.ones(N))
self.obj = tuple(self.obj)
|
WarrenWeckesser/scipy
|
benchmarks/benchmarks/cython_special.py
|
Python
|
bsd-3-clause
| 1,956
|
# encoding: UTF-8
__author__ = 'CHENXY'
from string import join
from sgit_struct import structDict
def processCallBack(line):
orignalLine = line
line = line.replace(' virtual void ', '') # 删除行首的无效内容
line = line.replace('{};\n', '') # 删除行尾的无效内容
content = line.split('(')
cbName = content[0] # 回调函数名称
cbArgs = content[1] # 回调函数参数
if cbArgs[-1] == ' ':
cbArgs = cbArgs.replace(') ', '')
else:
cbArgs = cbArgs.replace(')', '')
cbArgsList = cbArgs.split(', ') # 将每个参数转化为列表
cbArgsTypeList = []
cbArgsValueList = []
for arg in cbArgsList: # 开始处理参数
content = arg.split(' ')
if len(content) > 1:
if 'struct' not in content:
cbArgsTypeList.append(content[0]) # 参数类型列表
cbArgsValueList.append(content[1]) # 参数数据列表
else:
print content
cbArgsTypeList.append(content[1]) # 参数类型列表
cbArgsValueList.append(content[2]+content[3]) # 参数数据列表
createTask(cbName, cbArgsTypeList, cbArgsValueList, orignalLine)
createProcess(cbName, cbArgsTypeList, cbArgsValueList)
# 生成.h文件中的process部分
process_line = 'void process' + cbName[2:] + '(Task task);\n'
fheaderprocess.write(process_line)
fheaderprocess.write('\n')
# 生成.h文件中的on部分
if 'OnRspError' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict error) {};\n'
elif 'OnRspQry' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data, dict error, int id, bool last) {};\n'
elif 'OnRsp' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data, dict error, int id, bool last) {};\n'
elif 'OnRtn' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data) {};\n'
elif 'OnErrRtn' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data, dict error) {};\n'
else:
on_line = ''
fheaderon.write(on_line)
fheaderon.write('\n')
# 生成封装部分
createWrap(cbName)
#----------------------------------------------------------------------
def createWrap(cbName):
"""在Python封装段代码中进行处理"""
# 生成.h文件中的on部分
if 'OnRspError' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict error)\n'
override_line = '("on' + cbName[2:] + '")(error);\n'
elif 'OnRsp' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data, dict error, int id, bool last)\n'
override_line = '("on' + cbName[2:] + '")(data, error, id, last);\n'
elif 'OnRtn' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data)\n'
override_line = '("on' + cbName[2:] + '")(data);\n'
elif 'OnErrRtn' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data, dict error)\n'
override_line = '("on' + cbName[2:] + '")(data, error);\n'
else:
on_line = ''
if on_line is not '':
fwrap.write(on_line)
fwrap.write('{\n')
fwrap.write('\ttry\n')
fwrap.write('\t{\n')
fwrap.write('\t\tthis->get_override'+override_line)
fwrap.write('\t}\n')
fwrap.write('\tcatch (error_already_set const &)\n')
fwrap.write('\t{\n')
fwrap.write('\t\tPyErr_Print();\n')
fwrap.write('\t}\n')
fwrap.write('};\n')
fwrap.write('\n')
def createTask(cbName, cbArgsTypeList, cbArgsValueList, orignalLine):
# 从回调函数生成任务对象,并放入队列
funcline = orignalLine.replace(' virtual void ', 'void ' + apiName + '::')
funcline = funcline.replace('{};', '')
funcline = funcline.replace(' {}', '')
ftask.write(funcline)
ftask.write('{\n')
ftask.write("\tTask task = Task();\n")
ftask.write("\ttask.task_name = " + cbName.upper() + ";\n")
# define常量
global define_count
fdefine.write("#define " + cbName.upper() + ' ' + str(define_count) + '\n')
define_count = define_count + 1
# switch段代码
fswitch.write("case " + cbName.upper() + ':\n')
fswitch.write("{\n")
fswitch.write("\tthis->" + cbName.replace('On', 'process') + '(task);\n')
fswitch.write("\tbreak;\n")
fswitch.write("}\n")
fswitch.write("\n")
for i, type_ in enumerate(cbArgsTypeList):
if type_ == 'int':
ftask.write("\ttask.task_id = " + cbArgsValueList[i] + ";\n")
elif type_ == 'bool':
ftask.write("\ttask.task_last = " + cbArgsValueList[i] + ";\n")
elif 'CSgitFtdcRspInfoField' in type_:
ftask.write("\n")
ftask.write("\tif (pRspInfo)\n")
ftask.write("\t{\n")
ftask.write("\t\ttask.task_error = " + cbArgsValueList[i] + ";\n")
ftask.write("\t}\n")
ftask.write("\telse\n")
ftask.write("\t{\n")
ftask.write("\t\tCSgitFtdcRspInfoField empty_error = CSgitFtdcRspInfoField();\n")
ftask.write("\t\tmemset(&empty_error, 0, sizeof(empty_error));\n")
ftask.write("\t\ttask.task_error = empty_error;\n")
ftask.write("\t}\n")
else:
ftask.write("\n")
ftask.write("\tif (" + cbArgsValueList[i][1:] + ")\n")
ftask.write("\t{\n")
ftask.write("\t\ttask.task_data = " + cbArgsValueList[i] + ";\n")
ftask.write("\t}\n")
ftask.write("\telse\n")
ftask.write("\t{\n")
ftask.write("\t\t" + type_ + " empty_data = " + type_ + "();\n")
ftask.write("\t\tmemset(&empty_data, 0, sizeof(empty_data));\n")
ftask.write("\t\ttask.task_data = empty_data;\n")
ftask.write("\t}\n")
ftask.write("\tthis->task_queue.push(task);\n")
ftask.write("};\n")
ftask.write("\n")
def createProcess(cbName, cbArgsTypeList, cbArgsValueList):
# 从队列中提取任务,并转化为python字典
fprocess.write("void " + apiName + '::' + cbName.replace('On', 'process') + '(Task task)' + "\n")
fprocess.write("{\n")
fprocess.write("\tPyLock lock;\n")
onArgsList = []
for i, type_ in enumerate(cbArgsTypeList):
if 'CSgitFtdcRspInfoField' in type_:
fprocess.write("\t"+ type_ + ' task_error = any_cast<' + type_ + '>(task.task_error);\n')
fprocess.write("\t"+ "dict error;\n")
struct = structDict[type_]
for key in struct.keys():
fprocess.write("\t"+ 'error["' + key + '"] = task_error.' + key + ';\n')
fprocess.write("\n")
onArgsList.append('error')
elif type_ in structDict:
fprocess.write("\t"+ type_ + ' task_data = any_cast<' + type_ + '>(task.task_data);\n')
fprocess.write("\t"+ "dict data;\n")
struct = structDict[type_]
for key in struct.keys():
fprocess.write("\t"+ 'data["' + key + '"] = task_data.' + key + ';\n')
fprocess.write("\n")
onArgsList.append('data')
elif type_ == 'bool':
onArgsList.append('task.task_last')
elif type_ == 'int':
onArgsList.append('task.task_id')
onArgs = join(onArgsList, ', ')
fprocess.write('\tthis->' + cbName.replace('On', 'on') + '(' + onArgs +');\n')
fprocess.write("};\n")
fprocess.write("\n")
def processFunction(line):
line = line.replace(' virtual int ', '') # 删除行首的无效内容
line = line.replace(') = 0;\n', '') # 删除行尾的无效内容
content = line.split('(')
fcName = content[0] # 回调函数名称
fcArgs = content[1] # 回调函数参数
fcArgs = fcArgs.replace(')', '')
fcArgsList = fcArgs.split(',') # 将每个参数转化为列表
fcArgsTypeList = []
fcArgsValueList = []
for arg in fcArgsList: # 开始处理参数
content = arg.split(' ')
if len(content) >= 2:
fcArgsTypeList.append(content[0]) # 参数类型列表
fcArgsValueList.append(content[1]) # 参数数据列表
print line
print fcArgs
print fcArgsList
print fcArgsTypeList
if len(fcArgsTypeList)>0 and fcArgsTypeList[0] in structDict:
createFunction(fcName, fcArgsTypeList, fcArgsValueList)
# 生成.h文件中的主动函数部分
if 'Req' in fcName:
req_line = 'int req' + fcName[3:] + '(dict req, int nRequestID);\n'
fheaderfunction.write(req_line)
fheaderfunction.write('\n')
def createFunction(fcName, fcArgsTypeList, fcArgsValueList):
type_ = fcArgsTypeList[0]
struct = structDict[type_]
ffunction.write('int ' + apiName + '::req' + fcName[3:] + '(dict req, int nRequestID)\n')
ffunction.write('{\n')
ffunction.write('\t' + type_ +' myreq = ' + type_ + '();\n')
ffunction.write('\tmemset(&myreq, 0, sizeof(myreq));\n')
for key, value in struct.items():
if value == 'string':
line = '\tgetString(req, "' + key + '", myreq.' + key + ');\n'
elif value == 'char':
line = '\tgetChar(req, "' + key + '", &myreq.' + key + ');\n'
elif value == 'int':
line = '\tgetInt(req, "' + key + '", &myreq.' + key + ');\n'
elif value == 'long':
line = '\tgetLong(req, "' + key + '", &myreq.' + key + ');\n'
elif value == 'short':
line = '\tgetShort(req, "' + key + '", &myreq.' + key + ');\n'
elif value == 'double':
line = '\tgetDouble(req, "' + key + '", &myreq.' + key + ');\n'
ffunction.write(line)
ffunction.write('\tint i = this->api->' + fcName + '(&myreq, nRequestID);\n')
ffunction.write('\treturn i;\n')
ffunction.write('};\n')
ffunction.write('\n')
#########################################################
apiName = 'MdApi'
fcpp = open('SgitFtdcMdApi.h', 'r')
ftask = open('sgit_md_task.cpp', 'w')
fprocess = open('sgit_md_process.cpp', 'w')
ffunction = open('sgit_md_function.cpp', 'w')
fdefine = open('sgit_md_define.cpp', 'w')
fswitch = open('sgit_md_switch.cpp', 'w')
fheaderprocess = open('sgit_md_header_process.h', 'w')
fheaderon = open('sgit_md_header_on.h', 'w')
fheaderfunction = open('sgit_md_header_function.h', 'w')
fwrap = open('sgit_md_wrap.cpp', 'w')
define_count = 1
for line in fcpp:
if " virtual void On" in line:
print 'callback'
processCallBack(line)
elif " virtual int" in line:
print 'function'
processFunction(line)
fcpp.close()
ftask.close()
fprocess.close()
ffunction.close()
fswitch.close()
fdefine.close()
fheaderprocess.close()
fheaderon.close()
fheaderfunction.close()
fwrap.close()
|
lukesummer/vnpy
|
vn.sgit/pyscript/generate_md_functions.py
|
Python
|
mit
| 11,152
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints reference to documents citing this one
"""
__revision__ = "$Id$"
import cgi
def format_element(bfo, separator='; '):
"""
Prints a list of records citing this record
@param separator: a separator between citations
"""
from urllib import quote
from invenio.config import CFG_SITE_URL
primary_report_numbers = bfo.fields('037__a')
additional_report_numbers = bfo.fields('088__a')
primary_citations = ['<a href="' + CFG_SITE_URL + \
'/search?f=reference&p=' + quote(report_number) + \
'&ln='+ bfo.lang +'">' + \
cgi.escape(report_number) + '</a>' \
for report_number in primary_report_numbers]
additional_citations = ['<a href="' + CFG_SITE_URL + \
'/search?f=reference&p=' + quote(report_number)+ \
'&ln='+ bfo.lang + '">' + \
cgi.escape(report_number) + '</a>' \
for report_number in additional_report_numbers]
citations = primary_citations
citations.extend(additional_citations)
return separator.join(citations)
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
|
kaplun/Invenio-OpenAIRE
|
modules/bibformat/lib/elements/bfe_cited_by.py
|
Python
|
gpl-2.0
| 2,193
|
"""HTTP Headers constants."""
from .multidict import upstr
METH_ANY = upstr('*')
METH_CONNECT = upstr('CONNECT')
METH_HEAD = upstr('HEAD')
METH_GET = upstr('GET')
METH_DELETE = upstr('DELETE')
METH_OPTIONS = upstr('OPTIONS')
METH_PATCH = upstr('PATCH')
METH_POST = upstr('POST')
METH_PUT = upstr('PUT')
METH_TRACE = upstr('TRACE')
ACCEPT = upstr('ACCEPT')
ACCEPT_CHARSET = upstr('ACCEPT-CHARSET')
ACCEPT_ENCODING = upstr('ACCEPT-ENCODING')
ACCEPT_LANGUAGE = upstr('ACCEPT-LANGUAGE')
ACCEPT_RANGES = upstr('ACCEPT-RANGES')
ACCESS_CONTROL_MAX_AGE = upstr('ACCESS-CONTROL-MAX-AGE')
ACCESS_CONTROL_ALLOW_CREDENTIALS = upstr('ACCESS-CONTROL-ALLOW-CREDENTIALS')
ACCESS_CONTROL_ALLOW_HEADERS = upstr('ACCESS-CONTROL-ALLOW-HEADERS')
ACCESS_CONTROL_ALLOW_METHODS = upstr('ACCESS-CONTROL-ALLOW-METHODS')
ACCESS_CONTROL_ALLOW_ORIGIN = upstr('ACCESS-CONTROL-ALLOW-ORIGIN')
ACCESS_CONTROL_EXPOSE_HEADERS = upstr('ACCESS-CONTROL-EXPOSE-HEADERS')
ACCESS_CONTROL_REQUEST_HEADERS = upstr('ACCESS-CONTROL-REQUEST-HEADERS')
ACCESS_CONTROL_REQUEST_METHOD = upstr('ACCESS-CONTROL-REQUEST-METHOD')
AGE = upstr('AGE')
ALLOW = upstr('ALLOW')
AUTHORIZATION = upstr('AUTHORIZATION')
CACHE_CONTROL = upstr('CACHE-CONTROL')
CONNECTION = upstr('CONNECTION')
CONTENT_DISPOSITION = upstr('CONTENT-DISPOSITION')
CONTENT_ENCODING = upstr('CONTENT-ENCODING')
CONTENT_LANGUAGE = upstr('CONTENT-LANGUAGE')
CONTENT_LENGTH = upstr('CONTENT-LENGTH')
CONTENT_LOCATION = upstr('CONTENT-LOCATION')
CONTENT_MD5 = upstr('CONTENT-MD5')
CONTENT_RANGE = upstr('CONTENT-RANGE')
CONTENT_TRANSFER_ENCODING = upstr('CONTENT-TRANSFER-ENCODING')
CONTENT_TYPE = upstr('CONTENT-TYPE')
COOKIE = upstr('COOKIE')
DATE = upstr('DATE')
DESTINATION = upstr('DESTINATION')
DIGEST = upstr('DIGEST')
ETAG = upstr('ETAG')
EXPECT = upstr('EXPECT')
EXPIRES = upstr('EXPIRES')
FROM = upstr('FROM')
HOST = upstr('HOST')
IF_MATCH = upstr('IF-MATCH')
IF_MODIFIED_SINCE = upstr('IF-MODIFIED-SINCE')
IF_NONE_MATCH = upstr('IF-NONE-MATCH')
IF_RANGE = upstr('IF-RANGE')
IF_UNMODIFIED_SINCE = upstr('IF-UNMODIFIED-SINCE')
KEEP_ALIVE = upstr('KEEP-ALIVE')
LAST_EVENT_ID = upstr('LAST-EVENT-ID')
LAST_MODIFIED = upstr('LAST-MODIFIED')
LINK = upstr('LINK')
LOCATION = upstr('LOCATION')
MAX_FORWARDS = upstr('MAX-FORWARDS')
ORIGIN = upstr('ORIGIN')
PRAGMA = upstr('PRAGMA')
PROXY_AUTHENTICATE = upstr('PROXY_AUTHENTICATE')
PROXY_AUTHORIZATION = upstr('PROXY-AUTHORIZATION')
RANGE = upstr('RANGE')
REFERER = upstr('REFERER')
RETRY_AFTER = upstr('RETRY-AFTER')
SEC_WEBSOCKET_ACCEPT = upstr('SEC-WEBSOCKET-ACCEPT')
SEC_WEBSOCKET_VERSION = upstr('SEC-WEBSOCKET-VERSION')
SEC_WEBSOCKET_PROTOCOL = upstr('SEC-WEBSOCKET-PROTOCOL')
SEC_WEBSOCKET_KEY = upstr('SEC-WEBSOCKET-KEY')
SEC_WEBSOCKET_KEY1 = upstr('SEC-WEBSOCKET-KEY1')
SERVER = upstr('SERVER')
SET_COOKIE = upstr('SET-COOKIE')
TE = upstr('TE')
TRAILER = upstr('TRAILER')
TRANSFER_ENCODING = upstr('TRANSFER-ENCODING')
UPGRADE = upstr('UPGRADE')
WEBSOCKET = upstr('WEBSOCKET')
URI = upstr('URI')
USER_AGENT = upstr('USER-AGENT')
VARY = upstr('VARY')
VIA = upstr('VIA')
WANT_DIGEST = upstr('WANT-DIGEST')
WARNING = upstr('WARNING')
WWW_AUTHENTICATE = upstr('WWW-AUTHENTICATE')
|
kehao95/Wechat_LearnHelper
|
src/env/lib/python3.5/site-packages/aiohttp/hdrs.py
|
Python
|
gpl-3.0
| 3,148
|
#!/usr/bin/env python
#
# Migration Stream Analyzer
#
# Copyright (c) 2015 Alexander Graf <agraf@suse.de>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, see <http://www.gnu.org/licenses/>.
import numpy as np
import json
import os
import argparse
import collections
import pprint
def mkdir_p(path):
try:
os.makedirs(path)
except OSError:
pass
class MigrationFile(object):
def __init__(self, filename):
self.filename = filename
self.file = open(self.filename, "rb")
def read64(self):
return np.asscalar(np.fromfile(self.file, count=1, dtype='>i8')[0])
def read32(self):
return np.asscalar(np.fromfile(self.file, count=1, dtype='>i4')[0])
def read16(self):
return np.asscalar(np.fromfile(self.file, count=1, dtype='>i2')[0])
def read8(self):
return np.asscalar(np.fromfile(self.file, count=1, dtype='>i1')[0])
def readstr(self, len = None):
if len is None:
len = self.read8()
if len == 0:
return ""
return np.fromfile(self.file, count=1, dtype=('S%d' % len))[0]
def readvar(self, size = None):
if size is None:
size = self.read8()
if size == 0:
return ""
value = self.file.read(size)
if len(value) != size:
raise Exception("Unexpected end of %s at 0x%x" % (self.filename, self.file.tell()))
return value
def tell(self):
return self.file.tell()
# The VMSD description is at the end of the file, after EOF. Look for
# the last NULL byte, then for the beginning brace of JSON.
def read_migration_debug_json(self):
QEMU_VM_VMDESCRIPTION = 0x06
# Remember the offset in the file when we started
entrypos = self.file.tell()
# Read the last 10MB
self.file.seek(0, os.SEEK_END)
endpos = self.file.tell()
self.file.seek(max(-endpos, -10 * 1024 * 1024), os.SEEK_END)
datapos = self.file.tell()
data = self.file.read()
# The full file read closed the file as well, reopen it
self.file = open(self.filename, "rb")
# Find the last NULL byte, then the first brace after that. This should
# be the beginning of our JSON data.
nulpos = data.rfind("\0")
jsonpos = data.find("{", nulpos)
# Check backwards from there and see whether we guessed right
self.file.seek(datapos + jsonpos - 5, 0)
if self.read8() != QEMU_VM_VMDESCRIPTION:
raise Exception("No Debug Migration device found")
jsonlen = self.read32()
# Seek back to where we were at the beginning
self.file.seek(entrypos, 0)
return data[jsonpos:jsonpos + jsonlen]
def close(self):
self.file.close()
class RamSection(object):
RAM_SAVE_FLAG_COMPRESS = 0x02
RAM_SAVE_FLAG_MEM_SIZE = 0x04
RAM_SAVE_FLAG_PAGE = 0x08
RAM_SAVE_FLAG_EOS = 0x10
RAM_SAVE_FLAG_CONTINUE = 0x20
RAM_SAVE_FLAG_XBZRLE = 0x40
RAM_SAVE_FLAG_HOOK = 0x80
def __init__(self, file, version_id, ramargs, section_key):
if version_id != 4:
raise Exception("Unknown RAM version %d" % version_id)
self.file = file
self.section_key = section_key
self.TARGET_PAGE_SIZE = ramargs['page_size']
self.dump_memory = ramargs['dump_memory']
self.write_memory = ramargs['write_memory']
self.sizeinfo = collections.OrderedDict()
self.data = collections.OrderedDict()
self.data['section sizes'] = self.sizeinfo
self.name = ''
if self.write_memory:
self.files = { }
if self.dump_memory:
self.memory = collections.OrderedDict()
self.data['memory'] = self.memory
def __repr__(self):
return self.data.__repr__()
def __str__(self):
return self.data.__str__()
def getDict(self):
return self.data
def read(self):
# Read all RAM sections
while True:
addr = self.file.read64()
flags = addr & (self.TARGET_PAGE_SIZE - 1)
addr &= ~(self.TARGET_PAGE_SIZE - 1)
if flags & self.RAM_SAVE_FLAG_MEM_SIZE:
while True:
namelen = self.file.read8()
# We assume that no RAM chunk is big enough to ever
# hit the first byte of the address, so when we see
# a zero here we know it has to be an address, not the
# length of the next block.
if namelen == 0:
self.file.file.seek(-1, 1)
break
self.name = self.file.readstr(len = namelen)
len = self.file.read64()
self.sizeinfo[self.name] = '0x%016x' % len
if self.write_memory:
print self.name
mkdir_p('./' + os.path.dirname(self.name))
f = open('./' + self.name, "wb")
f.truncate(0)
f.truncate(len)
self.files[self.name] = f
flags &= ~self.RAM_SAVE_FLAG_MEM_SIZE
if flags & self.RAM_SAVE_FLAG_COMPRESS:
if flags & self.RAM_SAVE_FLAG_CONTINUE:
flags &= ~self.RAM_SAVE_FLAG_CONTINUE
else:
self.name = self.file.readstr()
fill_char = self.file.read8()
# The page in question is filled with fill_char now
if self.write_memory and fill_char != 0:
self.files[self.name].seek(addr, os.SEEK_SET)
self.files[self.name].write(chr(fill_char) * self.TARGET_PAGE_SIZE)
if self.dump_memory:
self.memory['%s (0x%016x)' % (self.name, addr)] = 'Filled with 0x%02x' % fill_char
flags &= ~self.RAM_SAVE_FLAG_COMPRESS
elif flags & self.RAM_SAVE_FLAG_PAGE:
if flags & self.RAM_SAVE_FLAG_CONTINUE:
flags &= ~self.RAM_SAVE_FLAG_CONTINUE
else:
self.name = self.file.readstr()
if self.write_memory or self.dump_memory:
data = self.file.readvar(size = self.TARGET_PAGE_SIZE)
else: # Just skip RAM data
self.file.file.seek(self.TARGET_PAGE_SIZE, 1)
if self.write_memory:
self.files[self.name].seek(addr, os.SEEK_SET)
self.files[self.name].write(data)
if self.dump_memory:
hexdata = " ".join("{0:02x}".format(ord(c)) for c in data)
self.memory['%s (0x%016x)' % (self.name, addr)] = hexdata
flags &= ~self.RAM_SAVE_FLAG_PAGE
elif flags & self.RAM_SAVE_FLAG_XBZRLE:
raise Exception("XBZRLE RAM compression is not supported yet")
elif flags & self.RAM_SAVE_FLAG_HOOK:
raise Exception("RAM hooks don't make sense with files")
# End of RAM section
if flags & self.RAM_SAVE_FLAG_EOS:
break
if flags != 0:
raise Exception("Unknown RAM flags: %x" % flags)
def __del__(self):
if self.write_memory:
for key in self.files:
self.files[key].close()
class HTABSection(object):
HASH_PTE_SIZE_64 = 16
def __init__(self, file, version_id, device, section_key):
if version_id != 1:
raise Exception("Unknown HTAB version %d" % version_id)
self.file = file
self.section_key = section_key
def read(self):
header = self.file.read32()
if (header > 0):
# First section, just the hash shift
return
# Read until end marker
while True:
index = self.file.read32()
n_valid = self.file.read16()
n_invalid = self.file.read16()
if index == 0 and n_valid == 0 and n_invalid == 0:
break
self.file.readvar(n_valid * self.HASH_PTE_SIZE_64)
def getDict(self):
return ""
class ConfigurationSection(object):
def __init__(self, file):
self.file = file
def read(self):
name_len = self.file.read32()
name = self.file.readstr(len = name_len)
class VMSDFieldGeneric(object):
def __init__(self, desc, file):
self.file = file
self.desc = desc
self.data = ""
def __repr__(self):
return str(self.__str__())
def __str__(self):
return " ".join("{0:02x}".format(ord(c)) for c in self.data)
def getDict(self):
return self.__str__()
def read(self):
size = int(self.desc['size'])
self.data = self.file.readvar(size)
return self.data
class VMSDFieldInt(VMSDFieldGeneric):
def __init__(self, desc, file):
super(VMSDFieldInt, self).__init__(desc, file)
self.size = int(desc['size'])
self.format = '0x%%0%dx' % (self.size * 2)
self.sdtype = '>i%d' % self.size
self.udtype = '>u%d' % self.size
def __repr__(self):
if self.data < 0:
return ('%s (%d)' % ((self.format % self.udata), self.data))
else:
return self.format % self.data
def __str__(self):
return self.__repr__()
def getDict(self):
return self.__str__()
def read(self):
super(VMSDFieldInt, self).read()
self.sdata = np.fromstring(self.data, count=1, dtype=(self.sdtype))[0]
self.udata = np.fromstring(self.data, count=1, dtype=(self.udtype))[0]
self.data = self.sdata
return self.data
class VMSDFieldUInt(VMSDFieldInt):
def __init__(self, desc, file):
super(VMSDFieldUInt, self).__init__(desc, file)
def read(self):
super(VMSDFieldUInt, self).read()
self.data = self.udata
return self.data
class VMSDFieldIntLE(VMSDFieldInt):
def __init__(self, desc, file):
super(VMSDFieldIntLE, self).__init__(desc, file)
self.dtype = '<i%d' % self.size
class VMSDFieldBool(VMSDFieldGeneric):
def __init__(self, desc, file):
super(VMSDFieldBool, self).__init__(desc, file)
def __repr__(self):
return self.data.__repr__()
def __str__(self):
return self.data.__str__()
def getDict(self):
return self.data
def read(self):
super(VMSDFieldBool, self).read()
if self.data[0] == 0:
self.data = False
else:
self.data = True
return self.data
class VMSDFieldStruct(VMSDFieldGeneric):
QEMU_VM_SUBSECTION = 0x05
def __init__(self, desc, file):
super(VMSDFieldStruct, self).__init__(desc, file)
self.data = collections.OrderedDict()
# When we see compressed array elements, unfold them here
new_fields = []
for field in self.desc['struct']['fields']:
if not 'array_len' in field:
new_fields.append(field)
continue
array_len = field.pop('array_len')
field['index'] = 0
new_fields.append(field)
for i in xrange(1, array_len):
c = field.copy()
c['index'] = i
new_fields.append(c)
self.desc['struct']['fields'] = new_fields
def __repr__(self):
return self.data.__repr__()
def __str__(self):
return self.data.__str__()
def read(self):
for field in self.desc['struct']['fields']:
try:
reader = vmsd_field_readers[field['type']]
except:
reader = VMSDFieldGeneric
field['data'] = reader(field, self.file)
field['data'].read()
if 'index' in field:
if field['name'] not in self.data:
self.data[field['name']] = []
a = self.data[field['name']]
if len(a) != int(field['index']):
raise Exception("internal index of data field unmatched (%d/%d)" % (len(a), int(field['index'])))
a.append(field['data'])
else:
self.data[field['name']] = field['data']
if 'subsections' in self.desc['struct']:
for subsection in self.desc['struct']['subsections']:
if self.file.read8() != self.QEMU_VM_SUBSECTION:
raise Exception("Subsection %s not found at offset %x" % ( subsection['vmsd_name'], self.file.tell()))
name = self.file.readstr()
version_id = self.file.read32()
self.data[name] = VMSDSection(self.file, version_id, subsection, (name, 0))
self.data[name].read()
def getDictItem(self, value):
# Strings would fall into the array category, treat
# them specially
if value.__class__ is ''.__class__:
return value
try:
return self.getDictOrderedDict(value)
except:
try:
return self.getDictArray(value)
except:
try:
return value.getDict()
except:
return value
def getDictArray(self, array):
r = []
for value in array:
r.append(self.getDictItem(value))
return r
def getDictOrderedDict(self, dict):
r = collections.OrderedDict()
for (key, value) in dict.items():
r[key] = self.getDictItem(value)
return r
def getDict(self):
return self.getDictOrderedDict(self.data)
vmsd_field_readers = {
"bool" : VMSDFieldBool,
"int8" : VMSDFieldInt,
"int16" : VMSDFieldInt,
"int32" : VMSDFieldInt,
"int32 equal" : VMSDFieldInt,
"int32 le" : VMSDFieldIntLE,
"int64" : VMSDFieldInt,
"uint8" : VMSDFieldUInt,
"uint16" : VMSDFieldUInt,
"uint32" : VMSDFieldUInt,
"uint32 equal" : VMSDFieldUInt,
"uint64" : VMSDFieldUInt,
"int64 equal" : VMSDFieldInt,
"uint8 equal" : VMSDFieldInt,
"uint16 equal" : VMSDFieldInt,
"float64" : VMSDFieldGeneric,
"timer" : VMSDFieldGeneric,
"buffer" : VMSDFieldGeneric,
"unused_buffer" : VMSDFieldGeneric,
"bitmap" : VMSDFieldGeneric,
"struct" : VMSDFieldStruct,
"unknown" : VMSDFieldGeneric,
}
class VMSDSection(VMSDFieldStruct):
def __init__(self, file, version_id, device, section_key):
self.file = file
self.data = ""
self.vmsd_name = ""
self.section_key = section_key
desc = device
if 'vmsd_name' in device:
self.vmsd_name = device['vmsd_name']
# A section really is nothing but a FieldStruct :)
super(VMSDSection, self).__init__({ 'struct' : desc }, file)
###############################################################################
class MigrationDump(object):
QEMU_VM_FILE_MAGIC = 0x5145564d
QEMU_VM_FILE_VERSION = 0x00000003
QEMU_VM_EOF = 0x00
QEMU_VM_SECTION_START = 0x01
QEMU_VM_SECTION_PART = 0x02
QEMU_VM_SECTION_END = 0x03
QEMU_VM_SECTION_FULL = 0x04
QEMU_VM_SUBSECTION = 0x05
QEMU_VM_VMDESCRIPTION = 0x06
QEMU_VM_CONFIGURATION = 0x07
QEMU_VM_SECTION_FOOTER= 0x7e
def __init__(self, filename):
self.section_classes = { ( 'ram', 0 ) : [ RamSection, None ],
( 'spapr/htab', 0) : ( HTABSection, None ) }
self.filename = filename
self.vmsd_desc = None
def read(self, desc_only = False, dump_memory = False, write_memory = False):
# Read in the whole file
file = MigrationFile(self.filename)
# File magic
data = file.read32()
if data != self.QEMU_VM_FILE_MAGIC:
raise Exception("Invalid file magic %x" % data)
# Version (has to be v3)
data = file.read32()
if data != self.QEMU_VM_FILE_VERSION:
raise Exception("Invalid version number %d" % data)
self.load_vmsd_json(file)
# Read sections
self.sections = collections.OrderedDict()
if desc_only:
return
ramargs = {}
ramargs['page_size'] = self.vmsd_desc['page_size']
ramargs['dump_memory'] = dump_memory
ramargs['write_memory'] = write_memory
self.section_classes[('ram',0)][1] = ramargs
while True:
section_type = file.read8()
if section_type == self.QEMU_VM_EOF:
break
elif section_type == self.QEMU_VM_CONFIGURATION:
section = ConfigurationSection(file)
section.read()
elif section_type == self.QEMU_VM_SECTION_START or section_type == self.QEMU_VM_SECTION_FULL:
section_id = file.read32()
name = file.readstr()
instance_id = file.read32()
version_id = file.read32()
section_key = (name, instance_id)
classdesc = self.section_classes[section_key]
section = classdesc[0](file, version_id, classdesc[1], section_key)
self.sections[section_id] = section
section.read()
elif section_type == self.QEMU_VM_SECTION_PART or section_type == self.QEMU_VM_SECTION_END:
section_id = file.read32()
self.sections[section_id].read()
elif section_type == self.QEMU_VM_SECTION_FOOTER:
read_section_id = file.read32()
if read_section_id != section_id:
raise Exception("Mismatched section footer: %x vs %x" % (read_section_id, section_id))
else:
raise Exception("Unknown section type: %d" % section_type)
file.close()
def load_vmsd_json(self, file):
vmsd_json = file.read_migration_debug_json()
self.vmsd_desc = json.loads(vmsd_json, object_pairs_hook=collections.OrderedDict)
for device in self.vmsd_desc['devices']:
key = (device['name'], device['instance_id'])
value = ( VMSDSection, device )
self.section_classes[key] = value
def getDict(self):
r = collections.OrderedDict()
for (key, value) in self.sections.items():
key = "%s (%d)" % ( value.section_key[0], key )
r[key] = value.getDict()
return r
###############################################################################
class JSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, VMSDFieldGeneric):
return str(o)
return json.JSONEncoder.default(self, o)
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--file", help='migration dump to read from', required=True)
parser.add_argument("-m", "--memory", help='dump RAM contents as well', action='store_true')
parser.add_argument("-d", "--dump", help='what to dump ("state" or "desc")', default='state')
parser.add_argument("-x", "--extract", help='extract contents into individual files', action='store_true')
args = parser.parse_args()
jsonenc = JSONEncoder(indent=4, separators=(',', ': '))
if args.extract:
dump = MigrationDump(args.file)
dump.read(desc_only = True)
print "desc.json"
f = open("desc.json", "wb")
f.truncate()
f.write(jsonenc.encode(dump.vmsd_desc))
f.close()
dump.read(write_memory = True)
dict = dump.getDict()
print "state.json"
f = open("state.json", "wb")
f.truncate()
f.write(jsonenc.encode(dict))
f.close()
elif args.dump == "state":
dump = MigrationDump(args.file)
dump.read(dump_memory = args.memory)
dict = dump.getDict()
print jsonenc.encode(dict)
elif args.dump == "desc":
dump = MigrationDump(args.file)
dump.read(desc_only = True)
print jsonenc.encode(dump.vmsd_desc)
else:
raise Exception("Please specify either -x, -d state or -d dump")
|
gongleiarei/qemu
|
scripts/analyze-migration.py
|
Python
|
gpl-2.0
| 20,683
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2008 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
class XmlNamespace(object):
def __init__(self, namespace):
self._namespace = namespace
def name(self, tag):
return "{%s}%s" % (self._namespace, tag)
class XmlNamer(object):
"""Initialize me with a DOM node or a DOM document node (the
toplevel node you get when parsing an XML file). Then use me
to generate fully qualified XML names.
>>> xml = '<office:document-styles xmlns:office="urn:oasis:names:tc:opendocument:xmlns:office:1.0"></office>'
>>> from lxml import etree
>>> namer = XmlNamer(etree.fromstring(xml))
>>> namer.name('office', 'blah')
{urn:oasis:names:tc:opendocument:xmlns:office:1.0}blah
>>> namer.name('office:blah')
{urn:oasis:names:tc:opendocument:xmlns:office:1.0}blah
I can also give you XmlNamespace objects if you give me the abbreviated
namespace name. These are useful if you need to reference a namespace
continuously.
>>> office_ns = name.namespace('office')
>>> office_ns.name('foo')
{urn:oasis:names:tc:opendocument:xmlns:office:1.0}foo
"""
def __init__(self, dom_node):
# Allow the user to pass a dom node of the
# XML document nodle
if hasattr(dom_node, 'nsmap'):
self.nsmap = dom_node.nsmap
else:
self.nsmap = dom_node.getroot().nsmap
def name(self, namespace_shortcut, tag=None):
# If the user doesn't pass an argument into 'tag'
# then namespace_shortcut contains a tag of the form
# 'short-namespace:tag'
if tag is None:
try:
namespace_shortcut, tag = namespace_shortcut.split(':')
except ValueError:
# If there is no namespace in namespace_shortcut.
tag = namespace_shortcut.lstrip("{}")
return tag
return "{%s}%s" % (self.nsmap[namespace_shortcut], tag)
def namespace(self, namespace_shortcut):
return XmlNamespace(self.nsmap[namespace_shortcut])
|
bluemini/kuma
|
vendor/packages/translate/storage/xml_name.py
|
Python
|
mpl-2.0
| 2,748
|
"""Tests for the Volumio integration."""
|
jawilson/home-assistant
|
tests/components/volumio/__init__.py
|
Python
|
apache-2.0
| 41
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The Geometric distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.distributions import distribution
from tensorflow.python.ops.distributions import util as distribution_util
class Geometric(distribution.Distribution):
"""Geometric distribution.
The Geometric distribution is parameterized by p, the probability of a
positive event. It represents the probability that in k + 1 Bernoulli trials,
the first k trials failed, before seeing a success.
The pmf of this distribution is:
#### Mathematical Details
```none
pmf(k; p) = (1 - p)**k * p
```
where:
* `p` is the success probability, `0 < p <= 1`, and,
* `k` is a non-negative integer.
"""
def __init__(self,
logits=None,
probs=None,
validate_args=True,
allow_nan_stats=False,
name="Geometric"):
"""Construct Geometric distributions.
Args:
logits: Floating-point `Tensor` with shape `[B1, ..., Bb]` where `b >= 0`
indicates the number of batch dimensions. Each entry represents logits
for the probability of success for independent Geometric distributions
and must be in the range `(-inf, inf]`. Only one of `logits` or `probs`
should be specified.
probs: Positive floating-point `Tensor` with shape `[B1, ..., Bb]`
where `b >= 0` indicates the number of batch dimensions. Each entry
represents the probability of success for independent Geometric
distributions and must be in the range `(0, 1]`. Only one of `logits`
or `probs` should be specified.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = locals()
with ops.name_scope(name, values=[logits, probs]):
self._logits, self._probs = distribution_util.get_logits_and_probs(
logits, probs, validate_args=validate_args, name=name)
with ops.control_dependencies(
[check_ops.assert_positive(self._probs)] if validate_args else []):
self._probs = array_ops.identity(self._probs, name="probs")
super(Geometric, self).__init__(
dtype=self._probs.dtype,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[self._probs, self._logits],
name=name)
@property
def logits(self):
"""Log-odds of a `1` outcome (vs `0`)."""
return self._logits
@property
def probs(self):
"""Probability of a `1` outcome (vs `0`)."""
return self._probs
def _batch_shape_tensor(self):
return array_ops.shape(self._probs)
def _batch_shape(self):
return self.probs.get_shape()
def _event_shape_tensor(self):
return array_ops.constant([], dtype=dtypes.int32)
def _event_shape(self):
return tensor_shape.scalar()
def _sample_n(self, n, seed=None):
# Uniform variates must be sampled from the open-interval `(0, 1)` rather
# than `[0, 1)`. To do so, we use `np.finfo(self.dtype.as_numpy_dtype).tiny`
# because it is the smallest, positive, "normal" number. A "normal" number
# is such that the mantissa has an implicit leading 1. Normal, positive
# numbers x, y have the reasonable property that, `x + y >= max(x, y)`. In
# this case, a subnormal number (i.e., np.nextafter) can cause us to sample
# 0.
sampled = random_ops.random_uniform(
array_ops.concat([[n], array_ops.shape(self._probs)], 0),
minval=np.finfo(self.dtype.as_numpy_dtype).tiny,
maxval=1.,
seed=seed,
dtype=self.dtype)
return math_ops.floor(
math_ops.log(sampled) / math_ops.log1p(-self.probs))
def _cdf(self, counts):
if self.validate_args:
# We set `check_integer=False` since the CDF is defined on whole real
# line.
counts = math_ops.floor(
distribution_util.embed_check_nonnegative_discrete(
counts, check_integer=False))
counts *= array_ops.ones_like(self.probs)
return array_ops.where(
counts < 0.,
array_ops.zeros_like(counts),
-math_ops.expm1(
(counts + 1) * math_ops.log1p(-self.probs)))
def _log_prob(self, counts):
if self.validate_args:
counts = distribution_util.embed_check_nonnegative_discrete(
counts, check_integer=True)
counts *= array_ops.ones_like(self.probs)
probs = self.probs * array_ops.ones_like(counts)
safe_domain = array_ops.where(
math_ops.equal(counts, 0.),
array_ops.zeros_like(probs),
probs)
return counts * math_ops.log1p(-safe_domain) + math_ops.log(probs)
def _entropy(self):
probs = self._probs
if self.validate_args:
probs = control_flow_ops.with_dependencies(
[check_ops.assert_less(
probs,
constant_op.constant(1., probs.dtype),
message="Entropy is undefined when logits = inf or probs = 1.")],
probs)
# Claim: entropy(p) = softplus(s)/p - s
# where s=logits and p=probs.
#
# Proof:
#
# entropy(p)
# := -[(1-p)log(1-p) + plog(p)]/p
# = -[log(1-p) + plog(p/(1-p))]/p
# = -[-softplus(s) + ps]/p
# = softplus(s)/p - s
#
# since,
# log[1-sigmoid(s)]
# = log[1/(1+exp(s)]
# = -log[1+exp(s)]
# = -softplus(s)
#
# using the fact that,
# 1-sigmoid(s) = sigmoid(-s) = 1/(1+exp(s))
return nn.softplus(self.logits) / probs - self.logits
def _mean(self):
return math_ops.exp(-self.logits)
def _variance(self):
return self._mean() / self.probs
def _mode(self):
return array_ops.zeros(self.batch_shape_tensor(), dtype=self.dtype)
|
unnikrishnankgs/va
|
venv/lib/python3.5/site-packages/tensorflow/contrib/distributions/python/ops/geometric.py
|
Python
|
bsd-2-clause
| 7,545
|
#!/usr/bin/python
"""
A general tool for converting data from the
dictionary format to an (n x k) python list that's
ready for training an sklearn algorithm
n--no. of key-value pairs in dictonary
k--no. of features being extracted
dictionary keys are names of persons in dataset
dictionary values are dictionaries, where each
key-value pair in the dict is the name
of a feature, and its value for that person
In addition to converting a dictionary to a numpy
array, you may want to separate the labels from the
features--this is what targetFeatureSplit is for
so, if you want to have the poi label as the target,
and the features you want to use are the person's
salary and bonus, here's what you would do:
feature_list = ["poi", "salary", "bonus"]
data_array = featureFormat( data_dictionary, feature_list )
label, features = targetFeatureSplit(data_array)
the line above (targetFeatureSplit) assumes that the
label is the _first_ item in feature_list--very important
that poi is listed first!
"""
import numpy as np
def featureFormat( dictionary, features, remove_NaN=True, remove_all_zeroes=True, remove_any_zeroes=False, sort_keys = False):
""" convert dictionary to numpy array of features
remove_NaN = True will convert "NaN" string to 0.0
remove_all_zeroes = True will omit any data points for which
all the features you seek are 0.0
remove_any_zeroes = True will omit any data points for which
any of the features you seek are 0.0
sort_keys = True sorts keys by alphabetical order. Setting the value as
a string opens the corresponding pickle file with a preset key
order (this is used for Python 3 compatibility, and sort_keys
should be left as False for the course mini-projects).
NOTE: first feature is assumed to be 'poi' and is not checked for
removal for zero or missing values.
"""
return_list = []
# Key order - first branch is for Python 3 compatibility on mini-projects,
# second branch is for compatibility on final project.
if isinstance(sort_keys, str):
import pickle
keys = pickle.load(open(sort_keys, "rb"))
elif sort_keys:
keys = sorted(dictionary.keys())
else:
keys = dictionary.keys()
for key in keys:
tmp_list = []
for feature in features:
try:
dictionary[key][feature]
except KeyError:
print "error: key ", feature, " not present"
return
value = dictionary[key][feature]
if value=="NaN" and remove_NaN:
value = 0
tmp_list.append( float(value) )
# Logic for deciding whether or not to add the data point.
append = True
# exclude 'poi' class as criteria.
if features[0] == 'poi':
test_list = tmp_list[1:]
else:
test_list = tmp_list
### if all features are zero and you want to remove
### data points that are all zero, do that here
if remove_all_zeroes:
append = False
for item in test_list:
if item != 0 and item != "NaN":
append = True
break
### if any features for a given data point are zero
### and you want to remove data points with any zeroes,
### handle that here
if remove_any_zeroes:
if 0 in test_list or "NaN" in test_list:
append = False
### Append the data point if flagged for addition.
if append:
return_list.append( np.array(tmp_list) )
return np.array(return_list)
def targetFeatureSplit( data ):
"""
given a numpy array like the one returned from
featureFormat, separate out the first feature
and put it into its own list (this should be the
quantity you want to predict)
return targets and features as separate lists
(sklearn can generally handle both lists and numpy arrays as
input formats when training/predicting)
"""
target = []
features = []
for item in data:
target.append( item[0] )
features.append( item[1:] )
return target, features
|
harish-garg/Machine-Learning
|
udacity/enron/ud120-projects-master/tools/feature_format.py
|
Python
|
mit
| 4,390
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#############################################
import ansible.constants as C
from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible.inventory.expand_hosts import detect_range
from ansible.inventory.expand_hosts import expand_hostname_range
from ansible import errors
from ansible import utils
import shlex
import re
import ast
class InventoryParser(object):
"""
Host inventory for ansible.
"""
def __init__(self, filename=C.DEFAULT_HOST_LIST):
with open(filename) as fh:
self.lines = fh.readlines()
self.groups = {}
self.hosts = {}
self._parse()
def _parse(self):
self._parse_base_groups()
self._parse_group_children()
self._add_allgroup_children()
self._parse_group_variables()
return self.groups
@staticmethod
def _parse_value(v):
if "#" not in v:
try:
return ast.literal_eval(v)
# Using explicit exceptions.
# Likely a string that literal_eval does not like. We wil then just set it.
except ValueError:
# For some reason this was thought to be malformed.
pass
except SyntaxError:
# Is this a hash with an equals at the end?
pass
return v
# [webservers]
# alpha
# beta:2345
# gamma sudo=True user=root
# delta asdf=jkl favcolor=red
def _add_allgroup_children(self):
for group in self.groups.values():
if group.depth == 0 and group.name != 'all':
self.groups['all'].add_child_group(group)
def _parse_base_groups(self):
# FIXME: refactor
ungrouped = Group(name='ungrouped')
all = Group(name='all')
all.add_child_group(ungrouped)
self.groups = dict(all=all, ungrouped=ungrouped)
active_group_name = 'ungrouped'
for line in self.lines:
line = utils.before_comment(line).strip()
if line.startswith("[") and line.endswith("]"):
active_group_name = line.replace("[","").replace("]","")
if ":vars" in line or ":children" in line:
active_group_name = active_group_name.rsplit(":", 1)[0]
if active_group_name not in self.groups:
new_group = self.groups[active_group_name] = Group(name=active_group_name)
active_group_name = None
elif active_group_name not in self.groups:
new_group = self.groups[active_group_name] = Group(name=active_group_name)
elif line.startswith(";") or line == '':
pass
elif active_group_name:
tokens = shlex.split(line)
if len(tokens) == 0:
continue
hostname = tokens[0]
port = C.DEFAULT_REMOTE_PORT
# Three cases to check:
# 0. A hostname that contains a range pesudo-code and a port
# 1. A hostname that contains just a port
if hostname.count(":") > 1:
# Possible an IPv6 address, or maybe a host line with multiple ranges
# IPv6 with Port XXX:XXX::XXX.port
# FQDN foo.example.com
if hostname.count(".") == 1:
(hostname, port) = hostname.rsplit(".", 1)
elif ("[" in hostname and
"]" in hostname and
":" in hostname and
(hostname.rindex("]") < hostname.rindex(":")) or
("]" not in hostname and ":" in hostname)):
(hostname, port) = hostname.rsplit(":", 1)
hostnames = []
if detect_range(hostname):
hostnames = expand_hostname_range(hostname)
else:
hostnames = [hostname]
for hn in hostnames:
host = None
if hn in self.hosts:
host = self.hosts[hn]
else:
host = Host(name=hn, port=port)
self.hosts[hn] = host
if len(tokens) > 1:
for t in tokens[1:]:
if t.startswith('#'):
break
try:
(k,v) = t.split("=", 1)
except ValueError, e:
raise errors.AnsibleError("Invalid ini entry: %s - %s" % (t, str(e)))
host.set_variable(k, self._parse_value(v))
self.groups[active_group_name].add_host(host)
# [southeast:children]
# atlanta
# raleigh
def _parse_group_children(self):
group = None
for line in self.lines:
line = line.strip()
if line is None or line == '':
continue
if line.startswith("[") and ":children]" in line:
line = line.replace("[","").replace(":children]","")
group = self.groups.get(line, None)
if group is None:
group = self.groups[line] = Group(name=line)
elif line.startswith("#") or line.startswith(";"):
pass
elif line.startswith("["):
group = None
elif group:
kid_group = self.groups.get(line, None)
if kid_group is None:
raise errors.AnsibleError("child group is not defined: (%s)" % line)
else:
group.add_child_group(kid_group)
# [webservers:vars]
# http_port=1234
# maxRequestsPerChild=200
def _parse_group_variables(self):
group = None
for line in self.lines:
line = line.strip()
if line.startswith("[") and ":vars]" in line:
line = line.replace("[","").replace(":vars]","")
group = self.groups.get(line, None)
if group is None:
raise errors.AnsibleError("can't add vars to undefined group: %s" % line)
elif line.startswith("#") or line.startswith(";"):
pass
elif line.startswith("["):
group = None
elif line == '':
pass
elif group:
if "=" not in line:
raise errors.AnsibleError("variables assigned to group must be in key=value form")
else:
(k, v) = [e.strip() for e in line.split("=", 1)]
group.set_variable(k, self._parse_value(v))
def get_host_variables(self, host):
return {}
|
wulczer/ansible
|
lib/ansible/inventory/ini.py
|
Python
|
gpl-3.0
| 7,628
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Dag Wieers (@dagwieers) <dag@wieers.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: mso_schema_site_vrf_region
short_description: Manage site-local VRF regions in schema template
description:
- Manage site-local VRF regions in schema template on Cisco ACI Multi-Site.
author:
- Dag Wieers (@dagwieers)
version_added: '2.8'
options:
schema:
description:
- The name of the schema.
type: str
required: yes
site:
description:
- The name of the site.
type: str
required: yes
template:
description:
- The name of the template.
type: str
required: yes
vrf:
description:
- The name of the VRF.
type: str
region:
description:
- The name of the region to manage.
type: str
aliases: [ name ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
type: str
choices: [ absent, present, query ]
default: present
seealso:
- module: mso_schema_site_vrf
- module: mso_schema_template_vrf
extends_documentation_fragment: mso
'''
EXAMPLES = r'''
- name: Add a new site VRF region
mso_schema_template_vrf_region:
host: mso_host
username: admin
password: SomeSecretPassword
schema: Schema1
site: Site1
template: Template1
vrf: VRF1
region: us-west-1
state: present
delegate_to: localhost
- name: Remove a site VRF region
mso_schema_template_vrf_region:
host: mso_host
username: admin
password: SomeSecretPassword
schema: Schema1
site: Site1
template: Template1
vrf: VRF1
region: us-west-1
state: absent
delegate_to: localhost
- name: Query a specific site VRF region
mso_schema_template_vrf_region:
host: mso_host
username: admin
password: SomeSecretPassword
schema: Schema1
site: Site1
template: Template1
vrf: VRF1
region: us-west-1
state: query
delegate_to: localhost
register: query_result
- name: Query all site VRF regions
mso_schema_template_vrf_region:
host: mso_host
username: admin
password: SomeSecretPassword
schema: Schema1
site: Site1
template: Template1
vrf: VRF1
state: query
delegate_to: localhost
register: query_result
'''
RETURN = r'''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aci.mso import MSOModule, mso_argument_spec
def main():
argument_spec = mso_argument_spec()
argument_spec.update(
schema=dict(type='str', required=True),
site=dict(type='str', required=True),
template=dict(type='str', required=True),
vrf=dict(type='str', required=True),
region=dict(type='str', aliases=['name']), # This parameter is not required for querying all objects
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['region']],
['state', 'present', ['region']],
],
)
schema = module.params['schema']
site = module.params['site']
template = module.params['template']
vrf = module.params['vrf']
region = module.params['region']
state = module.params['state']
mso = MSOModule(module)
# Get schema_id
schema_obj = mso.get_obj('schemas', displayName=schema)
if not schema_obj:
mso.fail_json(msg="Provided schema '{0}' does not exist".format(schema))
schema_path = 'schemas/{id}'.format(**schema_obj)
schema_id = schema_obj['id']
# Get site
site_id = mso.lookup_site(site)
# Get site_idx
sites = [(s['siteId'], s['templateName']) for s in schema_obj['sites']]
if (site_id, template) not in sites:
mso.fail_json(msg="Provided site/template '{0}-{1}' does not exist. Existing sites/templates: {2}".format(site, template, ', '.join(sites)))
# Schema-access uses indexes
site_idx = sites.index((site_id, template))
# Path-based access uses site_id-template
site_template = '{0}-{1}'.format(site_id, template)
# Get VRF
vrf_ref = mso.vrf_ref(schema_id=schema_id, template=template, vrf=vrf)
vrfs = [v['vrfRef'] for v in schema_obj['sites'][site_idx]['vrfs']]
if vrf_ref not in vrfs:
mso.fail_json(msg="Provided vrf '{0}' does not exist. Existing vrfs: {1}".format(vrf, ', '.join(vrfs)))
vrf_idx = vrfs.index(vrf_ref)
# Get Region
regions = [r['name'] for r in schema_obj['sites'][site_idx]['vrfs'][vrf_idx]['regions']]
if region is not None and region in regions:
region_idx = regions.index(region)
region_path = '/sites/{0}/vrfs/{1}/regions/{2}'.format(site_template, vrf, region)
mso.existing = schema_obj['sites'][site_idx]['vrfs'][vrf_idx]['regions'][region_idx]
if state == 'query':
if region is None:
mso.existing = schema_obj['sites'][site_idx]['vrfs'][vrf_idx]['regions']
elif not mso.existing:
mso.fail_json(msg="Region '{region}' not found".format(region=region))
mso.exit_json()
regions_path = '/sites/{0}/vrfs/{1}/regions'.format(site_template, vrf)
ops = []
mso.previous = mso.existing
if state == 'absent':
if mso.existing:
mso.sent = mso.existing = {}
ops.append(dict(op='remove', path=region_path))
elif state == 'present':
payload = dict(
name=region,
)
mso.sanitize(payload, collate=True)
if mso.existing:
ops.append(dict(op='replace', path=region_path, value=mso.sent))
else:
ops.append(dict(op='add', path=regions_path + '/-', value=mso.sent))
mso.existing = mso.proposed
if not module.check_mode:
mso.request(schema_path, method='PATCH', data=ops)
mso.exit_json()
if __name__ == "__main__":
main()
|
alxgu/ansible
|
lib/ansible/modules/network/aci/mso_schema_site_vrf_region.py
|
Python
|
gpl-3.0
| 6,343
|
#!/usr/bin/env python
from modshogun import LongIntFeatures
from numpy import array, int64, all
# create dense matrix A
matrix=array([[1,2,3],[4,0,0],[0,0,0],[0,5,0],[0,0,6],[9,9,9]], dtype=int64)
parameter_list = [[matrix]]
# ... of type LongInt
def features_dense_longint_modular (A=matrix):
a=LongIntFeatures(A)
# get first feature vector and set it
a.set_feature_vector(array([1,4,0,0,0,9], dtype=int64), 0)
# get matrix
a_out = a.get_feature_matrix()
assert(all(a_out==A))
return a_out
if __name__=='__main__':
print('dense_longint')
features_dense_longint_modular(*parameter_list[0])
|
AzamYahya/shogun
|
examples/undocumented/python_modular/features_dense_longint_modular.py
|
Python
|
gpl-3.0
| 606
|
# Copyright Anne M. Archibald 2008
# Released under the scipy license
from __future__ import division, print_function, absolute_import
from numpy.testing import (assert_equal, assert_array_equal,
assert_almost_equal, assert_array_almost_equal, assert_, run_module_suite)
import numpy as np
from scipy.spatial import KDTree, Rectangle, distance_matrix, cKDTree
from scipy.spatial.ckdtree import cKDTreeNode
from scipy.spatial import minkowski_distance as distance
class ConsistencyTests:
def test_nearest(self):
x = self.x
d, i = self.kdtree.query(x, 1)
assert_almost_equal(d**2,np.sum((x-self.data[i])**2))
eps = 1e-8
assert_(np.all(np.sum((self.data-x[np.newaxis,:])**2,axis=1) > d**2-eps))
def test_m_nearest(self):
x = self.x
m = self.m
dd, ii = self.kdtree.query(x, m)
d = np.amax(dd)
i = ii[np.argmax(dd)]
assert_almost_equal(d**2,np.sum((x-self.data[i])**2))
eps = 1e-8
assert_equal(np.sum(np.sum((self.data-x[np.newaxis,:])**2,axis=1) < d**2+eps),m)
def test_points_near(self):
x = self.x
d = self.d
dd, ii = self.kdtree.query(x, k=self.kdtree.n, distance_upper_bound=d)
eps = 1e-8
hits = 0
for near_d, near_i in zip(dd,ii):
if near_d == np.inf:
continue
hits += 1
assert_almost_equal(near_d**2,np.sum((x-self.data[near_i])**2))
assert_(near_d < d+eps, "near_d=%g should be less than %g" % (near_d,d))
assert_equal(np.sum(np.sum((self.data-x[np.newaxis,:])**2,axis=1) < d**2+eps),hits)
def test_points_near_l1(self):
x = self.x
d = self.d
dd, ii = self.kdtree.query(x, k=self.kdtree.n, p=1, distance_upper_bound=d)
eps = 1e-8
hits = 0
for near_d, near_i in zip(dd,ii):
if near_d == np.inf:
continue
hits += 1
assert_almost_equal(near_d,distance(x,self.data[near_i],1))
assert_(near_d < d+eps, "near_d=%g should be less than %g" % (near_d,d))
assert_equal(np.sum(distance(self.data,x,1) < d+eps),hits)
def test_points_near_linf(self):
x = self.x
d = self.d
dd, ii = self.kdtree.query(x, k=self.kdtree.n, p=np.inf, distance_upper_bound=d)
eps = 1e-8
hits = 0
for near_d, near_i in zip(dd,ii):
if near_d == np.inf:
continue
hits += 1
assert_almost_equal(near_d,distance(x,self.data[near_i],np.inf))
assert_(near_d < d+eps, "near_d=%g should be less than %g" % (near_d,d))
assert_equal(np.sum(distance(self.data,x,np.inf) < d+eps),hits)
def test_approx(self):
x = self.x
k = self.k
eps = 0.1
d_real, i_real = self.kdtree.query(x, k)
d, i = self.kdtree.query(x, k, eps=eps)
assert_(np.all(d <= d_real*(1+eps)))
class test_random(ConsistencyTests):
def setUp(self):
self.n = 100
self.m = 4
np.random.seed(1234)
self.data = np.random.randn(self.n, self.m)
self.kdtree = KDTree(self.data,leafsize=2)
self.x = np.random.randn(self.m)
self.d = 0.2
self.k = 10
class test_random_far(test_random):
def setUp(self):
test_random.setUp(self)
self.x = np.random.randn(self.m)+10
class test_small(ConsistencyTests):
def setUp(self):
self.data = np.array([[0,0,0],
[0,0,1],
[0,1,0],
[0,1,1],
[1,0,0],
[1,0,1],
[1,1,0],
[1,1,1]])
self.kdtree = KDTree(self.data)
self.n = self.kdtree.n
self.m = self.kdtree.m
np.random.seed(1234)
self.x = np.random.randn(3)
self.d = 0.5
self.k = 4
def test_nearest(self):
assert_array_equal(
self.kdtree.query((0,0,0.1), 1),
(0.1,0))
def test_nearest_two(self):
assert_array_equal(
self.kdtree.query((0,0,0.1), 2),
([0.1,0.9],[0,1]))
class test_small_nonleaf(test_small):
def setUp(self):
test_small.setUp(self)
self.kdtree = KDTree(self.data,leafsize=1)
class test_small_compiled(test_small):
def setUp(self):
test_small.setUp(self)
self.kdtree = cKDTree(self.data)
class test_small_nonleaf_compiled(test_small):
def setUp(self):
test_small.setUp(self)
self.kdtree = cKDTree(self.data,leafsize=1)
class test_random_compiled(test_random):
def setUp(self):
test_random.setUp(self)
self.kdtree = cKDTree(self.data)
class test_random_far_compiled(test_random_far):
def setUp(self):
test_random_far.setUp(self)
self.kdtree = cKDTree(self.data)
class test_vectorization:
def setUp(self):
self.data = np.array([[0,0,0],
[0,0,1],
[0,1,0],
[0,1,1],
[1,0,0],
[1,0,1],
[1,1,0],
[1,1,1]])
self.kdtree = KDTree(self.data)
def test_single_query(self):
d, i = self.kdtree.query(np.array([0,0,0]))
assert_(isinstance(d,float))
assert_(np.issubdtype(i, int))
def test_vectorized_query(self):
d, i = self.kdtree.query(np.zeros((2,4,3)))
assert_equal(np.shape(d),(2,4))
assert_equal(np.shape(i),(2,4))
def test_single_query_multiple_neighbors(self):
s = 23
kk = self.kdtree.n+s
d, i = self.kdtree.query(np.array([0,0,0]),k=kk)
assert_equal(np.shape(d),(kk,))
assert_equal(np.shape(i),(kk,))
assert_(np.all(~np.isfinite(d[-s:])))
assert_(np.all(i[-s:] == self.kdtree.n))
def test_vectorized_query_multiple_neighbors(self):
s = 23
kk = self.kdtree.n+s
d, i = self.kdtree.query(np.zeros((2,4,3)),k=kk)
assert_equal(np.shape(d),(2,4,kk))
assert_equal(np.shape(i),(2,4,kk))
assert_(np.all(~np.isfinite(d[:,:,-s:])))
assert_(np.all(i[:,:,-s:] == self.kdtree.n))
def test_single_query_all_neighbors(self):
d, i = self.kdtree.query([0,0,0],k=None,distance_upper_bound=1.1)
assert_(isinstance(d,list))
assert_(isinstance(i,list))
def test_vectorized_query_all_neighbors(self):
d, i = self.kdtree.query(np.zeros((2,4,3)),k=None,distance_upper_bound=1.1)
assert_equal(np.shape(d),(2,4))
assert_equal(np.shape(i),(2,4))
assert_(isinstance(d[0,0],list))
assert_(isinstance(i[0,0],list))
class test_vectorization_compiled:
def setUp(self):
self.data = np.array([[0,0,0],
[0,0,1],
[0,1,0],
[0,1,1],
[1,0,0],
[1,0,1],
[1,1,0],
[1,1,1]])
self.kdtree = cKDTree(self.data)
def test_single_query(self):
d, i = self.kdtree.query([0,0,0])
assert_(isinstance(d,float))
assert_(isinstance(i,int))
def test_vectorized_query(self):
d, i = self.kdtree.query(np.zeros((2,4,3)))
assert_equal(np.shape(d),(2,4))
assert_equal(np.shape(i),(2,4))
def test_vectorized_query_noncontiguous_values(self):
np.random.seed(1234)
qs = np.random.randn(3,1000).T
ds, i_s = self.kdtree.query(qs)
for q, d, i in zip(qs,ds,i_s):
assert_equal(self.kdtree.query(q),(d,i))
def test_single_query_multiple_neighbors(self):
s = 23
kk = self.kdtree.n+s
d, i = self.kdtree.query([0,0,0],k=kk)
assert_equal(np.shape(d),(kk,))
assert_equal(np.shape(i),(kk,))
assert_(np.all(~np.isfinite(d[-s:])))
assert_(np.all(i[-s:] == self.kdtree.n))
def test_vectorized_query_multiple_neighbors(self):
s = 23
kk = self.kdtree.n+s
d, i = self.kdtree.query(np.zeros((2,4,3)),k=kk)
assert_equal(np.shape(d),(2,4,kk))
assert_equal(np.shape(i),(2,4,kk))
assert_(np.all(~np.isfinite(d[:,:,-s:])))
assert_(np.all(i[:,:,-s:] == self.kdtree.n))
class ball_consistency:
def test_in_ball(self):
l = self.T.query_ball_point(self.x, self.d, p=self.p, eps=self.eps)
for i in l:
assert_(distance(self.data[i],self.x,self.p) <= self.d*(1.+self.eps))
def test_found_all(self):
c = np.ones(self.T.n,dtype=bool)
l = self.T.query_ball_point(self.x, self.d, p=self.p, eps=self.eps)
c[l] = False
assert_(np.all(distance(self.data[c],self.x,self.p) >= self.d/(1.+self.eps)))
class test_random_ball(ball_consistency):
def setUp(self):
n = 100
m = 4
np.random.seed(1234)
self.data = np.random.randn(n,m)
self.T = KDTree(self.data,leafsize=2)
self.x = np.random.randn(m)
self.p = 2.
self.eps = 0
self.d = 0.2
class test_random_ball_compiled(ball_consistency):
def setUp(self):
n = 100
m = 4
np.random.seed(1234)
self.data = np.random.randn(n,m)
self.T = cKDTree(self.data,leafsize=2)
self.x = np.random.randn(m)
self.p = 2.
self.eps = 0
self.d = 0.2
class test_random_ball_approx(test_random_ball):
def setUp(self):
test_random_ball.setUp(self)
self.eps = 0.1
class test_random_ball_approx_compiled(test_random_ball_compiled):
def setUp(self):
test_random_ball_compiled.setUp(self)
self.eps = 0.1
class test_random_ball_far(test_random_ball):
def setUp(self):
test_random_ball.setUp(self)
self.d = 2.
class test_random_ball_far_compiled(test_random_ball_compiled):
def setUp(self):
test_random_ball_compiled.setUp(self)
self.d = 2.
class test_random_ball_l1(test_random_ball):
def setUp(self):
test_random_ball.setUp(self)
self.p = 1
class test_random_ball_l1_compiled(test_random_ball_compiled):
def setUp(self):
test_random_ball_compiled.setUp(self)
self.p = 1
class test_random_ball_linf(test_random_ball):
def setUp(self):
test_random_ball.setUp(self)
self.p = np.inf
class test_random_ball_linf_compiled(test_random_ball_compiled):
def setUp(self):
test_random_ball_compiled.setUp(self)
self.p = np.inf
def test_random_ball_vectorized():
n = 20
m = 5
T = KDTree(np.random.randn(n,m))
r = T.query_ball_point(np.random.randn(2,3,m),1)
assert_equal(r.shape,(2,3))
assert_(isinstance(r[0,0],list))
def test_random_ball_vectorized_compiled():
n = 20
m = 5
np.random.seed(1234)
T = cKDTree(np.random.randn(n,m))
r = T.query_ball_point(np.random.randn(2,3,m),1)
assert_equal(r.shape,(2,3))
assert_(isinstance(r[0,0],list))
class two_trees_consistency:
def test_all_in_ball(self):
r = self.T1.query_ball_tree(self.T2, self.d, p=self.p, eps=self.eps)
for i, l in enumerate(r):
for j in l:
assert_(distance(self.data1[i],self.data2[j],self.p) <= self.d*(1.+self.eps))
def test_found_all(self):
r = self.T1.query_ball_tree(self.T2, self.d, p=self.p, eps=self.eps)
for i, l in enumerate(r):
c = np.ones(self.T2.n,dtype=bool)
c[l] = False
assert_(np.all(distance(self.data2[c],self.data1[i],self.p) >= self.d/(1.+self.eps)))
class test_two_random_trees(two_trees_consistency):
def setUp(self):
n = 50
m = 4
np.random.seed(1234)
self.data1 = np.random.randn(n,m)
self.T1 = KDTree(self.data1,leafsize=2)
self.data2 = np.random.randn(n,m)
self.T2 = KDTree(self.data2,leafsize=2)
self.p = 2.
self.eps = 0
self.d = 0.2
class test_two_random_trees_compiled(two_trees_consistency):
def setUp(self):
n = 50
m = 4
np.random.seed(1234)
self.data1 = np.random.randn(n,m)
self.T1 = cKDTree(self.data1,leafsize=2)
self.data2 = np.random.randn(n,m)
self.T2 = cKDTree(self.data2,leafsize=2)
self.p = 2.
self.eps = 0
self.d = 0.2
class test_two_random_trees_far(test_two_random_trees):
def setUp(self):
test_two_random_trees.setUp(self)
self.d = 2
class test_two_random_trees_far_compiled(test_two_random_trees_compiled):
def setUp(self):
test_two_random_trees_compiled.setUp(self)
self.d = 2
class test_two_random_trees_linf(test_two_random_trees):
def setUp(self):
test_two_random_trees.setUp(self)
self.p = np.inf
class test_two_random_trees_linf_compiled(test_two_random_trees_compiled):
def setUp(self):
test_two_random_trees_compiled.setUp(self)
self.p = np.inf
class test_rectangle:
def setUp(self):
self.rect = Rectangle([0,0],[1,1])
def test_min_inside(self):
assert_almost_equal(self.rect.min_distance_point([0.5,0.5]),0)
def test_min_one_side(self):
assert_almost_equal(self.rect.min_distance_point([0.5,1.5]),0.5)
def test_min_two_sides(self):
assert_almost_equal(self.rect.min_distance_point([2,2]),np.sqrt(2))
def test_max_inside(self):
assert_almost_equal(self.rect.max_distance_point([0.5,0.5]),1/np.sqrt(2))
def test_max_one_side(self):
assert_almost_equal(self.rect.max_distance_point([0.5,1.5]),np.hypot(0.5,1.5))
def test_max_two_sides(self):
assert_almost_equal(self.rect.max_distance_point([2,2]),2*np.sqrt(2))
def test_split(self):
less, greater = self.rect.split(0,0.1)
assert_array_equal(less.maxes,[0.1,1])
assert_array_equal(less.mins,[0,0])
assert_array_equal(greater.maxes,[1,1])
assert_array_equal(greater.mins,[0.1,0])
def test_distance_l2():
assert_almost_equal(distance([0,0],[1,1],2),np.sqrt(2))
def test_distance_l1():
assert_almost_equal(distance([0,0],[1,1],1),2)
def test_distance_linf():
assert_almost_equal(distance([0,0],[1,1],np.inf),1)
def test_distance_vectorization():
np.random.seed(1234)
x = np.random.randn(10,1,3)
y = np.random.randn(1,7,3)
assert_equal(distance(x,y).shape,(10,7))
class test_count_neighbors:
def setUp(self):
n = 50
m = 2
np.random.seed(1234)
self.T1 = KDTree(np.random.randn(n,m),leafsize=2)
self.T2 = KDTree(np.random.randn(n,m),leafsize=2)
def test_one_radius(self):
r = 0.2
assert_equal(self.T1.count_neighbors(self.T2, r),
np.sum([len(l) for l in self.T1.query_ball_tree(self.T2,r)]))
def test_large_radius(self):
r = 1000
assert_equal(self.T1.count_neighbors(self.T2, r),
np.sum([len(l) for l in self.T1.query_ball_tree(self.T2,r)]))
def test_multiple_radius(self):
rs = np.exp(np.linspace(np.log(0.01),np.log(10),3))
results = self.T1.count_neighbors(self.T2, rs)
assert_(np.all(np.diff(results) >= 0))
for r,result in zip(rs, results):
assert_equal(self.T1.count_neighbors(self.T2, r), result)
class test_count_neighbors_compiled:
def setUp(self):
n = 50
m = 2
np.random.seed(1234)
self.T1 = cKDTree(np.random.randn(n,m),leafsize=2)
self.T2 = cKDTree(np.random.randn(n,m),leafsize=2)
def test_one_radius(self):
r = 0.2
assert_equal(self.T1.count_neighbors(self.T2, r),
np.sum([len(l) for l in self.T1.query_ball_tree(self.T2,r)]))
def test_large_radius(self):
r = 1000
assert_equal(self.T1.count_neighbors(self.T2, r),
np.sum([len(l) for l in self.T1.query_ball_tree(self.T2,r)]))
def test_multiple_radius(self):
rs = np.exp(np.linspace(np.log(0.01),np.log(10),3))
results = self.T1.count_neighbors(self.T2, rs)
assert_(np.all(np.diff(results) >= 0))
for r,result in zip(rs, results):
assert_equal(self.T1.count_neighbors(self.T2, r), result)
class test_sparse_distance_matrix:
def setUp(self):
n = 50
m = 4
np.random.seed(1234)
self.T1 = KDTree(np.random.randn(n,m),leafsize=2)
self.T2 = KDTree(np.random.randn(n,m),leafsize=2)
self.r = 0.5
def test_consistency_with_neighbors(self):
M = self.T1.sparse_distance_matrix(self.T2, self.r)
r = self.T1.query_ball_tree(self.T2, self.r)
for i,l in enumerate(r):
for j in l:
assert_almost_equal(M[i,j],
distance(self.T1.data[i], self.T2.data[j]),
decimal=14)
for ((i,j),d) in M.items():
assert_(j in r[i])
def test_zero_distance(self):
# raises an exception for bug 870
self.T1.sparse_distance_matrix(self.T1, self.r)
class test_sparse_distance_matrix_compiled:
def setUp(self):
n = 50
m = 4
np.random.seed(0)
data1 = np.random.randn(n,m)
data2 = np.random.randn(n,m)
self.T1 = cKDTree(data1,leafsize=2)
self.T2 = cKDTree(data2,leafsize=2)
self.ref_T1 = KDTree(data1, leafsize=2)
self.ref_T2 = KDTree(data2, leafsize=2)
self.r = 0.5
def test_consistency_with_neighbors(self):
M = self.T1.sparse_distance_matrix(self.T2, self.r)
r = self.T1.query_ball_tree(self.T2, self.r)
for i,l in enumerate(r):
for j in l:
assert_almost_equal(M[i,j],
distance(self.T1.data[i], self.T2.data[j]),
decimal=14)
for ((i,j),d) in M.items():
assert_(j in r[i])
def test_zero_distance(self):
# raises an exception for bug 870 (FIXME: Does it?)
self.T1.sparse_distance_matrix(self.T1, self.r)
def test_consistency_with_python(self):
M1 = self.T1.sparse_distance_matrix(self.T2, self.r)
M2 = self.ref_T1.sparse_distance_matrix(self.ref_T2, self.r)
assert_array_almost_equal(M1.todense(), M2.todense(), decimal=14)
def test_distance_matrix():
m = 10
n = 11
k = 4
np.random.seed(1234)
xs = np.random.randn(m,k)
ys = np.random.randn(n,k)
ds = distance_matrix(xs,ys)
assert_equal(ds.shape, (m,n))
for i in range(m):
for j in range(n):
assert_almost_equal(distance(xs[i],ys[j]),ds[i,j])
def test_distance_matrix_looping():
m = 10
n = 11
k = 4
np.random.seed(1234)
xs = np.random.randn(m,k)
ys = np.random.randn(n,k)
ds = distance_matrix(xs,ys)
dsl = distance_matrix(xs,ys,threshold=1)
assert_equal(ds,dsl)
def check_onetree_query(T,d):
r = T.query_ball_tree(T, d)
s = set()
for i, l in enumerate(r):
for j in l:
if i < j:
s.add((i,j))
assert_(s == T.query_pairs(d))
def test_onetree_query():
np.random.seed(0)
n = 50
k = 4
points = np.random.randn(n,k)
T = KDTree(points)
yield check_onetree_query, T, 0.1
points = np.random.randn(3*n,k)
points[:n] *= 0.001
points[n:2*n] += 2
T = KDTree(points)
yield check_onetree_query, T, 0.1
yield check_onetree_query, T, 0.001
yield check_onetree_query, T, 0.00001
yield check_onetree_query, T, 1e-6
def test_onetree_query_compiled():
np.random.seed(0)
n = 100
k = 4
points = np.random.randn(n,k)
T = cKDTree(points)
yield check_onetree_query, T, 0.1
points = np.random.randn(3*n,k)
points[:n] *= 0.001
points[n:2*n] += 2
T = cKDTree(points)
yield check_onetree_query, T, 0.1
yield check_onetree_query, T, 0.001
yield check_onetree_query, T, 0.00001
yield check_onetree_query, T, 1e-6
def test_query_pairs_single_node():
tree = KDTree([[0, 1]])
assert_equal(tree.query_pairs(0.5), set())
def test_query_pairs_single_node_compiled():
tree = cKDTree([[0, 1]])
assert_equal(tree.query_pairs(0.5), set())
def test_ball_point_ints():
# Regression test for #1373.
x, y = np.mgrid[0:4, 0:4]
points = list(zip(x.ravel(), y.ravel()))
tree = KDTree(points)
assert_equal(sorted([4, 8, 9, 12]),
sorted(tree.query_ball_point((2, 0), 1)))
points = np.asarray(points, dtype=float)
tree = KDTree(points)
assert_equal(sorted([4, 8, 9, 12]),
sorted(tree.query_ball_point((2, 0), 1)))
def test_kdtree_comparisons():
# Regression test: node comparisons were done wrong in 0.12 w/Py3.
nodes = [KDTree.node() for _ in range(3)]
assert_equal(sorted(nodes), sorted(nodes[::-1]))
def test_ckdtree_build_modes():
# check if different build modes for cKDTree give
# similar query results
np.random.seed(0)
n = 5000
k = 4
points = np.random.randn(n, k)
T1 = cKDTree(points).query(points, k=5)[-1]
T2 = cKDTree(points, compact_nodes=False).query(points, k=5)[-1]
T3 = cKDTree(points, balanced_tree=False).query(points, k=5)[-1]
T4 = cKDTree(points, compact_nodes=False, balanced_tree=False).query(points, k=5)[-1]
assert_array_equal(T1, T2)
assert_array_equal(T1, T3)
assert_array_equal(T1, T4)
def test_ckdtree_pickle():
# test if it is possible to pickle
# a cKDTree
try:
import cPickle
# known failure on Python 2
# pickle currently only supported on Python 3
cPickle.dumps('pyflakes dummy')
except ImportError:
import pickle
np.random.seed(0)
n = 50
k = 4
points = np.random.randn(n, k)
T1 = cKDTree(points)
tmp = pickle.dumps(T1)
T2 = pickle.loads(tmp)
T1 = T1.query(points, k=5)[-1]
T2 = T2.query(points, k=5)[-1]
assert_array_equal(T1, T2)
def test_ckdtree_copy_data():
# check if copy_data=True makes the kd-tree
# impervious to data corruption by modification of
# the data arrray
np.random.seed(0)
n = 5000
k = 4
points = np.random.randn(n, k)
T = cKDTree(points, copy_data=True)
q = points.copy()
T1 = T.query(q, k=5)[-1]
points[...] = np.random.randn(n, k)
T2 = T.query(q, k=5)[-1]
assert_array_equal(T1, T2)
def test_ckdtree_parallel():
# check if parallel=True also generates correct
# query results
np.random.seed(0)
n = 5000
k = 4
points = np.random.randn(n, k)
T = cKDTree(points)
T1 = T.query(points, k=5, n_jobs=64)[-1]
T2 = T.query(points, k=5, n_jobs=-1)[-1]
T3 = T.query(points, k=5)[-1]
assert_array_equal(T1, T2)
assert_array_equal(T1, T3)
def test_ckdtree_view():
# Check that the nodes can be correctly viewed from Python.
# This test also sanity checks each node in the cKDTree, and
# thus verifies the internal structure of the kd-tree.
np.random.seed(0)
n = 100
k = 4
points = np.random.randn(n, k)
kdtree = cKDTree(points)
# walk the whole kd-tree and sanity check each node
def recurse_tree(n):
assert_(isinstance(n, cKDTreeNode))
if n.split_dim == -1:
assert_(n.lesser is None)
assert_(n.greater is None)
assert_(n.indices.shape[0] <= kdtree.leafsize)
else:
recurse_tree(n.lesser)
recurse_tree(n.greater)
x = n.lesser.data_points[:, n.split_dim]
y = n.greater.data_points[:, n.split_dim]
assert_(x.max() < y.min())
recurse_tree(kdtree.tree)
# check that indices are correctly retreived
n = kdtree.tree
assert_array_equal(np.sort(n.indices), range(100))
# check that data_points are correctly retreived
assert_array_equal(kdtree.data[n.indices, :], n.data_points)
# cKDTree is specialized to type double points, so no need to make
# a unit test corresponding to test_ball_point_ints()
if __name__ == "__main__":
run_module_suite()
|
giorgiop/scipy
|
scipy/spatial/tests/test_kdtree.py
|
Python
|
bsd-3-clause
| 24,475
|
import unittest
import pystache
from pystache import Renderer
from examples.nested_context import NestedContext
from examples.complex import Complex
from examples.lambdas import Lambdas
from examples.template_partial import TemplatePartial
from examples.simple import Simple
from pystache.tests.common import EXAMPLES_DIR
from pystache.tests.common import AssertStringMixin
class TestSimple(unittest.TestCase, AssertStringMixin):
def test_nested_context(self):
renderer = Renderer()
view = NestedContext(renderer)
view.template = '{{#foo}}{{thing1}} and {{thing2}} and {{outer_thing}}{{/foo}}{{^foo}}Not foo!{{/foo}}'
actual = renderer.render(view)
self.assertString(actual, u"one and foo and two")
def test_looping_and_negation_context(self):
template = '{{#item}}{{header}}: {{name}} {{/item}}{{^item}} Shouldnt see me{{/item}}'
context = Complex()
renderer = Renderer()
actual = renderer.render(template, context)
self.assertEqual(actual, "Colors: red Colors: green Colors: blue ")
def test_empty_context(self):
template = '{{#empty_list}}Shouldnt see me {{/empty_list}}{{^empty_list}}Should see me{{/empty_list}}'
self.assertEqual(pystache.Renderer().render(template), "Should see me")
def test_callables(self):
view = Lambdas()
view.template = '{{#replace_foo_with_bar}}foo != bar. oh, it does!{{/replace_foo_with_bar}}'
renderer = Renderer()
actual = renderer.render(view)
self.assertString(actual, u'bar != bar. oh, it does!')
def test_rendering_partial(self):
renderer = Renderer(search_dirs=EXAMPLES_DIR)
view = TemplatePartial(renderer=renderer)
view.template = '{{>inner_partial}}'
actual = renderer.render(view)
self.assertString(actual, u'Again, Welcome!')
view.template = '{{#looping}}{{>inner_partial}} {{/looping}}'
actual = renderer.render(view)
self.assertString(actual, u"Again, Welcome! Again, Welcome! Again, Welcome! ")
def test_non_existent_value_renders_blank(self):
view = Simple()
template = '{{not_set}} {{blank}}'
self.assertEqual(pystache.Renderer().render(template), ' ')
def test_template_partial_extension(self):
"""
Side note:
From the spec--
Partial tags SHOULD be treated as standalone when appropriate.
In particular, this means that trailing newlines should be removed.
"""
renderer = Renderer(search_dirs=EXAMPLES_DIR, file_extension='txt')
view = TemplatePartial(renderer=renderer)
actual = renderer.render(view)
self.assertString(actual, u"""Welcome
-------
## Again, Welcome! ##""")
|
zzeleznick/zDjango
|
venv/lib/python2.7/site-packages/pystache/tests/test_simple.py
|
Python
|
mit
| 2,785
|
__author__ = 'bromix'
from .directory_item import DirectoryItem
from .. import constants
class SearchHistoryItem(DirectoryItem):
def __init__(self, context, query, image=None, fanart=None):
if image is None:
image = context.create_resource_path('media/search.png')
pass
DirectoryItem.__init__(self, query, context.create_uri([constants.paths.SEARCH, 'query'], {'q': query}),
image=image)
if fanart:
self.set_fanart(fanart)
pass
else:
self.set_fanart(context.get_fanart())
pass
context_menu = [(context.localize(constants.localize.SEARCH_REMOVE),
'RunPlugin(%s)' % context.create_uri([constants.paths.SEARCH, 'remove'], params={'q': query})),
(context.localize(constants.localize.SEARCH_RENAME),
'RunPlugin(%s)' % context.create_uri([constants.paths.SEARCH, 'rename'], params={'q': query})),
(context.localize(constants.localize.SEARCH_CLEAR),
'RunPlugin(%s)' % context.create_uri([constants.paths.SEARCH, 'clear']))]
self.set_context_menu(context_menu)
pass
pass
|
azumimuo/family-xbmc-addon
|
zips/plugin.video.youtube/resources/lib/kodion/items/search_history_item.py
|
Python
|
gpl-2.0
| 1,259
|
# -*- coding: utf-8 -*-
## $Id: webmessage_webinterface.py,v 1.13 2008/03/12 16:48:08 tibor Exp $
##
## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""FieldExporter web interface"""
__revision__ = "$Id: webmessage_webinterface.py,v 1.13 2008/03/12 16:48:08 tibor Exp $"
__lastupdated__ = """$Date: 2008/03/12 16:48:08 $"""
import re
from invenio.webpage import page
from invenio.webinterface_handler import WebInterfaceDirectory, \
wash_urlargd
from invenio.urlutils import redirect_to_url
from invenio.config import CFG_SITE_URL, \
CFG_SITE_SECURE_URL
from invenio.dateutils import convert_datestruct_to_datetext, \
convert_datetext_to_datestruct
from invenio.messages import gettext_set_language
from invenio.bibexport_method_fieldexporter import get_css, \
get_navigation_menu, \
perform_request_edit_query, \
perform_request_edit_job, \
perform_request_jobs, \
perform_request_new_job, \
perform_request_save_job, \
perform_request_delete_jobs, \
perform_request_run_jobs, \
perform_request_job_queries, \
perform_request_new_query, \
perform_request_save_query, \
perform_request_delete_queries, \
perform_request_run_queries, \
perform_request_job_history, \
perform_request_job_results, \
perform_request_display_job_result, \
perform_request_download_job_result, \
AccessDeniedError
from invenio.bibexport_method_fieldexporter_dblayer import Job, \
Query, \
JobResult
from invenio.webuser import collect_user_info, \
page_not_authorized
from invenio.access_control_engine import acc_authorize_action
class WebInterfaceFieldExporterPages(WebInterfaceDirectory):
"""Defines the set of /fieldexporter pages."""
_exports = ["", "jobs", "edit_job",
"job_queries", "edit_query",
"job_results", "display_job_result", "download_job_result",
"history", "not_authorized"]
# constats containig URL to the pages
_EXPORT_URL = "%s/exporter" % (CFG_SITE_URL, )
_JOBS_URL = "%s/exporter/jobs" % (CFG_SITE_URL, )
_EDIT_JOB_URL = "%s/exporter/edit_job" % (CFG_SITE_URL, )
_EDIT_QUERY_URL = "%s/exporter/edit_query" % (CFG_SITE_URL, )
_JOB_QUERIES_URL = "%s/exporter/job_queries" % (CFG_SITE_URL, )
_JOB_HISTORY_URL = "%s/exporter/history" % (CFG_SITE_URL, )
_NOT_AUTHORIZED_URL = "%s/exporter/not_authorized" % (CFG_SITE_URL, )
_LOGIN_URL = "%s/youraccount/login" % (CFG_SITE_SECURE_URL,)
_NAVTRAIL_EXPORT = """<a href="/exporter" class="navtrail">Export</a>"""
def index(self, req, form):
""" The function called by default"""
redirect_to_url(req, self._JOB_HISTORY_URL)
__call__ = index
def jobs(self, req, form):
"""Displays all the jobs of a given user
and allows creation, deletion and execution of jobs"""
argd = wash_urlargd(form, {
"new_button": (str, ""),
"run_button": (str, ""),
"delete_button": (str, ""),
"selected_jobs": (list, "")
})
# load the right message language
language = argd["ln"]
_ = gettext_set_language(language)
self._check_user_credentials(req, language)
user_id = self._get_user_id(req)
try:
# if the form is submitted through some of the buttons
# we should perform the appropriate action
if argd["new_button"]:
self._redirect_to_page(req, self._EDIT_JOB_URL, language)
elif argd["delete_button"]:
job_ids = argd["selected_jobs"]
perform_request_delete_jobs(job_ids = job_ids,
user_id = user_id,
language = language)
elif argd["run_button"]:
job_ids = argd["selected_jobs"]
perform_request_run_jobs(job_ids = job_ids,
user_id = user_id,
language = language)
self._redirect_to_page(req, self._JOB_HISTORY_URL, language)
user_id = self._get_user_id(req)
body = perform_request_jobs(user_id = user_id,
language = language)
except AccessDeniedError:
self._redirect_to_not_authorised_page(req, language)
return page(title = _("Export Job Overview"),
metaheaderadd = get_css(),
body = body,
req = req,
navmenuid = "fieldexporter",
titleprologue = get_navigation_menu(language),
navtrail = self._NAVTRAIL_EXPORT,
language = language)
def edit_job(self, req, form):
"""Edits an existing job or creates a new one"""
# Create an empty job and use its default values
# to init missing parameters
job = Job()
argd = wash_urlargd(form,
{"job_name": (str, job.get_name()),
"output_directory": (str, job.get_output_directory()),
"job_frequency": (int, job.get_frequency()),
"output_format": (int, job.get_output_format()),
"last_run": (str, convert_datestruct_to_datetext(job.get_last_run())),
"id": (int, job.get_id()),
"save_button": (str, ""),
"cancel_button": (str, ""),
"edit_queries_button": (str, "")
})
language = argd["ln"]
# load the right message language
_ = gettext_set_language(language)
self._check_user_credentials(req, language)
user_id = self._get_user_id(req)
job_id = argd["id"]
job = Job(job_id = job_id,
name = argd["job_name"],
frequency = argd["job_frequency"],
output_format = argd["output_format"],
last_run = convert_datetext_to_datestruct(argd["last_run"]),
output_directory = argd["output_directory"])
try:
if argd["cancel_button"]:
self._redirect_to_page(req, self._JOBS_URL, language)
elif argd["save_button"]:
perform_request_save_job(job = job,
user_id = user_id,
language = language)
self._redirect_to_page(req, self._JOBS_URL, language)
elif argd["edit_queries_button"]:
result_job_id = perform_request_save_job(job = job,
user_id = user_id,
language = language)
edit_queries_url = "%s?job_id=%s" % (self._JOB_QUERIES_URL, result_job_id)
self._redirect_to_page(req, edit_queries_url, language)
elif Job.ID_MISSING == job_id:
title = _("New Export Job")
body = perform_request_new_job(language = language)
else:
title = _("Edit Export Job")
body = perform_request_edit_job(job_id = job_id,
user_id = user_id,
language = language)
except AccessDeniedError:
self._redirect_to_not_authorised_page(req, language)
return page(title = title,
metaheaderadd=get_css(),
body = body,
req = req,
navmenuid = "fieldexporter",
titleprologue = get_navigation_menu(language),
navtrail = self._NAVTRAIL_EXPORT,
language = language)
def job_queries(self, req, form):
"""Allows edition and manipulations of the queries of a job"""
argd = wash_urlargd(form, {
"new_button": (str, ""),
"run_button": (str, ""),
"delete_button": (str, ""),
"selected_queries": (list, ""),
"job_id": (int, -1)
})
# load the right message language
language = argd["ln"]
_ = gettext_set_language(language)
self._check_user_credentials(req, language)
user_id = self._get_user_id(req)
job_id = argd["job_id"]
try:
# if the form is submitted through some of the buttons
# we should perform the appropriate action
if argd["new_button"]:
new_query_url = "%s?job_id=%s" % (self._EDIT_QUERY_URL, job_id)
self._redirect_to_page(req, new_query_url, language)
if argd["delete_button"]:
query_ids = argd["selected_queries"]
perform_request_delete_queries(query_ids = query_ids,
user_id = user_id,
language = language)
if argd["run_button"]:
title = _("Query Results")
query_ids = argd["selected_queries"]
body = perform_request_run_queries(query_ids = query_ids,
user_id = user_id,
job_id = job_id,
language = language)
else:
title = _("Export Job Queries")
body = perform_request_job_queries(job_id = job_id,
user_id = user_id,
language = language)
except AccessDeniedError:
self._redirect_to_not_authorised_page(req, language)
return page(title = title,
metaheaderadd=get_css(),
body = body,
req = req,
navmenuid = "fieldexporter",
titleprologue = get_navigation_menu(language),
navtrail = self._NAVTRAIL_EXPORT,
language = language)
def edit_query(self, req, form):
"""Edits an existing query or creates a new one"""
# Create an empty job and use its default values
# to init missing parameters
query = Query()
name = query.get_name()
output_fields = ", ".join(query.get_output_fields())
search_criteria = query.get_search_criteria()
comment = query.get_comment()
query_id = query.get_id()
argd = wash_urlargd(form,
{"name": (str, name),
"search_criteria": (str, search_criteria),
"output_fields": (str, output_fields),
"comment": (str, comment),
"id": (int, query_id),
"job_id": (int, Job.ID_MISSING),
"save_button": (str, ""),
"cancel_button": (str, "")
})
# load the right message language
language = argd["ln"]
_ = gettext_set_language(language)
self._check_user_credentials(req, language)
user_id = self._get_user_id(req)
query_id = argd["id"]
job_id = argd["job_id"]
current_job_queries_url = "%s?job_id=%s" %(self._JOB_QUERIES_URL, job_id)
try:
if argd["cancel_button"]:
self._redirect_to_page(req, current_job_queries_url, language)
elif argd["save_button"]:
name = argd["name"]
search_criteria = argd["search_criteria"]
comment = argd["comment"]
# split the text entered by user to different fields
outoutput_fields_text = argd["output_fields"]
re_split_pattern = re.compile(r',\s*')
output_fields = re_split_pattern.split(outoutput_fields_text)
query = Query(query_id,
name,
search_criteria,
comment,
output_fields)
perform_request_save_query(query = query,
job_id = job_id,
user_id = user_id,
language = language)
self._redirect_to_page(req, current_job_queries_url, language)
elif Query.ID_MISSING == query_id:
title = _("New Query")
body = perform_request_new_query(job_id = job_id,
user_id = user_id,
language = language)
else:
title = _("Edit Query")
body = perform_request_edit_query(query_id = query_id,
job_id = job_id,
user_id = user_id,
language = language)
except AccessDeniedError:
self._redirect_to_not_authorised_page(req, language)
return page(title = title,
metaheaderadd=get_css(),
body = body,
req = req,
navmenuid = "fieldexporter",
titleprologue = get_navigation_menu(language),
navtrail = self._NAVTRAIL_EXPORT,
language = language)
def job_results(self, req, form):
"""Displays information about the results of a job"""
argd = wash_urlargd(form, {
"result_id": (int, -1)
})
# load the right message language
language = argd["ln"]
_ = gettext_set_language(language)
self._check_user_credentials(req, language)
user_id = self._get_user_id(req)
job_result_id = argd["result_id"]
title = _("Export Job Results")
try:
body = perform_request_job_results(job_result_id = job_result_id,
user_id = user_id,
language = language)
except AccessDeniedError:
self._redirect_to_not_authorised_page(req, language)
return page(title = title,
metaheaderadd = get_css(),
body = body,
req = req,
navmenuid = "fieldexporter",
titleprologue = get_navigation_menu(language),
navtrail = self._NAVTRAIL_EXPORT,
language = language)
def display_job_result(self, req, form):
"""Displays the results of a job"""
argd = wash_urlargd(form, {
"result_id": (int, JobResult.ID_MISSING),
"output_format" : (int, Job.OUTPUT_FORMAT_MISSING)
})
# load the right message language
language = argd["ln"]
_ = gettext_set_language(language)
self._check_user_credentials(req, language)
user_id = self._get_user_id(req)
job_result_id = argd["result_id"]
output_format = argd["output_format"]
title = _("Export Job Result")
try:
body = perform_request_display_job_result(job_result_id = job_result_id,
output_format = output_format,
user_id = user_id,
language = language)
except AccessDeniedError:
self._redirect_to_not_authorised_page(req, language)
return page(title = title,
metaheaderadd = get_css(),
body = body,
req = req,
navmenuid = "fieldexporter",
titleprologue = get_navigation_menu(language),
navtrail = self._NAVTRAIL_EXPORT,
language = language)
def download_job_result(self, req, form):
"""Returns to the browser zip file containing the job result"""
argd = wash_urlargd(form, {
"result_id" : (int, JobResult.ID_MISSING),
"output_format" : (int, Job.OUTPUT_FORMAT_MISSING)
})
# load the right message language
language = argd["ln"]
job_result_id = argd["result_id"]
output_format = argd["output_format"]
user_id = self._get_user_id(req)
_ = gettext_set_language(language)
self._check_user_credentials(req, language)
title = _("Export Job Result")
try:
perform_request_download_job_result(req = req,
job_result_id = job_result_id,
output_format = output_format,
user_id = user_id,
language = language)
except AccessDeniedError:
self._redirect_to_not_authorised_page(req, language)
def history(self, req, form):
"""Displays history of the jobs"""
argd = wash_urlargd(form, {})
# load the right message language
language = argd["ln"]
_ = gettext_set_language(language)
self._check_user_credentials(req, language)
title = _("Export Job History")
user_id = self._get_user_id(req)
body = perform_request_job_history(user_id, language)
return page(title = title,
metaheaderadd = get_css(),
body = body,
req = req,
navmenuid = "fieldexporter",
titleprologue = get_navigation_menu(language),
navtrail = self._NAVTRAIL_EXPORT,
language = language)
def not_authorized(self, req, form):
"""Displays page telling the user that
he is not authorised to access the resource"""
argd = wash_urlargd(form, {})
# load the right message language
language = argd["ln"]
_ = gettext_set_language(language)
text = _("You are not authorised to access this resource.")
return page_not_authorized(req = req, ln = language, text = text)
def _check_user_credentials(self, req, language):
"""Check if the user is allowed to use field exporter
@param req: request as received from apache
@param language: the language of the page
"""
user_info = collect_user_info(req)
#redirect guests to login page
if "1" == user_info["guest"]:
referer_url = "%s?ln=%s" % (self._EXPORT_URL, language)
redirect_url = "%s?ln=%s&referer=%s" % (self._LOGIN_URL,
language,
referer_url)
redirect_to_url(req, redirect_url)
#redirect unauthorized user to not_authorized page
(auth_code, auth_msg) = acc_authorize_action(user_info, 'cfgbibexport')
if 0 != auth_code:
self._redirect_to_not_authorised_page(req, language)
def _redirect_to_not_authorised_page(self, req, language):
"""Redirects user to page telling him that he is not
authorised to do something
@param req: request as received from apache
@param language: the language of the page
"""
self._redirect_to_page(req, self._NOT_AUTHORIZED_URL, language)
def _redirect_to_page(self, req, url, language):
"""Redirects user to a page with the given URL
and language.
@param req: request as received from apache
@param language: the language of the page
@param url: url to redirect to
"""
# check which symbol to use for appending the parameters
# if this is the only parameter use ?
if -1 == url.find("?"):
append_symbol = "?"
# if there are other parameters already appended, use &
else:
append_symbol = "&"
redirect_url = "%s%sln=%s" % (url, append_symbol, language)
redirect_to_url(req, redirect_url)
def _get_user_id(self, req):
"""Return the identifier of the currently loged user.
@param req: request as received from apache
@return: identifier of currently logged user
"""
user_info = collect_user_info(req)
user_id = user_info['uid']
return user_id
|
kaplun/Invenio-OpenAIRE
|
modules/bibexport/lib/bibexport_method_fieldexporter_webinterface.py
|
Python
|
gpl-2.0
| 22,858
|
## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
# pylint: disable=C0103
from invenio.bibrecord import record_has_field, \
record_get_field_instances, \
record_delete_field, \
record_add_fields
from invenio.bibmerge_differ import record_field_diff, match_subfields, \
diff_subfields
from copy import deepcopy
def merge_record(rec1, rec2, merge_conflicting_fields=False):
"""Merges all non-conflicting fields from 'rec2' to 'rec1'
@param rec1: First record (a record dictionary structure)
@param rec2: Second record (a record dictionary structure)
"""
for fnum in rec2:
if fnum[:2] != "00": #if it's not a controlfield
merge_field_group(rec1, rec2, fnum, merge_conflicting_fields=merge_conflicting_fields)
def merge_field_group(rec1, rec2, fnum, ind1='', ind2='', merge_conflicting_fields=False):
"""Merges non-conflicting fields from 'rec2' to 'rec1' for a specific tag.
the second record.
@param rec1: First record (a record dictionary structure)
@param rec2: Second record (a record dictionary structure)
@param fnum: a 3 characters long string indicating field tag number
@param ind1: a 1 character long string
@param ind2: a 1 character long string
@param merge_conflicting_fields: whether to merge conflicting fields or not
"""
### Check if merging goes for all indicators and set a boolean
merging_all_indicators = not ind1 and not ind2
### check if there is no field in rec2 to be merged in rec1
if not record_has_field(rec2, fnum):
return
### get fields of rec2
if merging_all_indicators:
fields2 = record_get_field_instances(rec2, fnum, '%', '%')
else:
fields2 = record_get_field_instances(rec2, fnum, ind1, ind2)
if len(fields2)==0:
return
### check if field in rec1 doesn't even exist
if not record_has_field(rec1, fnum):
record_add_fields(rec1, fnum, fields2)
return
### compare the fields, get diffs for given indicators
alldiffs = record_field_diff(rec1[fnum], rec2[fnum], fnum, match_subfields, ind1, ind2)
### check if fields are the same
if alldiffs is None:
return #nothing to merge
### find the diffing for the fields of the given indicators
alldiffs = alldiffs[1] #keep only the list of diffs by indicators (without the 'c')
if merging_all_indicators:
#combine the diffs for each indicator to one list
diff = _combine_diffs(alldiffs)
else: #diffing for one indicator
for diff in alldiffs: #look for indicator pair in diff result
if diff[0] == (ind1, ind2):
break
else:
raise Exception, "Indicators not in diff result."
diff = diff[1] #keep only the list of diffs (without the indicator tuple)
### proceed to merging fields in a new field list
fields1, fields2 = rec1[fnum], rec2[fnum]
new_fields = []
if merge_conflicting_fields == False: #merge non-conflicting fields
for m in diff: #for every match of fields in the diff
if m[0] is not None: #if rec1 has a field in the diff, keep it
new_fields.append( deepcopy(fields1[m[0]]) )
else: #else take the field from rec2
new_fields.append( deepcopy(fields2[m[1]]) )
else: #merge all fields
for m in diff: #for every match of fields in the diff
if m[1] is not None: #if rec2 has a field, add it
new_fields.append( deepcopy(fields2[m[1]]) )
if m[0] is not None and fields1[m[0]][0] != fields2[m[1]][0]:
#if the fields are not the same then add the field of rec1
new_fields.append( deepcopy(fields1[m[0]]) )
else:
new_fields.append( deepcopy(fields1[m[0]]) )
### delete existing fields
record_delete_field(rec1, fnum, ind1, ind2)
## find where the new_fields should be inserted in rec1 (insert_index)
if merging_all_indicators:
insert_index = 0
else:
insert_index = None
ind_pair = (ind1, ind2)
first_last_dict = _first_and_last_index_for_each_indicator( rec1.get(fnum, []) )
#find the indicator pair which is just before the one which will be inserted
indicators = first_last_dict.keys()
indicators.sort()
ind_pair_before = None
for pair in indicators:
if pair > ind_pair:
break
else:
ind_pair_before = pair
if ind_pair_before is None: #if no smaller indicator pair exists
insert_index = 0 #insertion will take place at the beginning
else: #else insert after the last field index of the previous indicator pair
insert_index = first_last_dict[ind_pair_before][1] + 1
### add the new (merged) fields in correct 'in_field_index' position
record_add_fields(rec1, fnum, new_fields, insert_index)
return
def _combine_diffs(alldiffs):
"""Takes all diffs of a field-tag which are separated by indicators and
combine them in one list in correct index order."""
diff = []
for d in alldiffs:
diff.extend( d[1] )
return diff
def _first_and_last_index_for_each_indicator(fields):
"""return a dictionary with indicator pair tuples as keys and a pair as
value that contains the first and the last in_field_index of the fields that
have the specific indicators. Useful to find where to insert new fields."""
result = {}
for index, field in enumerate(fields):
indicators = (field[1], field[2])
if indicators not in result: #create first-last pair for indicator pair
result[indicators] = [index, index]
else: #if there is a first-last pair already, update the 'last' index
result[indicators][1] = index
return result
def add_field(rec1, rec2, fnum, findex1, findex2):
"""Adds the field of rec2 into rec1 in a position that depends on the
diffing of rec1 with rec2.
@param rec1: First record (a record dictionary structure)
@param rec2: Second record (a record dictionary structure)
@param fnum: a 3 characters long string indicating field tag number
@param findex1: the rec1 field position in the group of fields it belongs
@param findex2: the rec2 field position in the group of fields it belongs
"""
field_to_add = rec2[fnum][findex2]
### if findex1 indicates an existing field in rec1, insert the field of rec2
### before the field of rec1
if findex1 is not None:
record_add_fields(rec1, fnum, [field_to_add], findex1)
return
### check if field tag does not exist in record1
if not record_has_field(rec1, fnum):
record_add_fields(rec1, fnum, [field_to_add]) #insert at the beginning
return
### if findex1 is None and the fieldtag already exists
#get diffs for all indicators of the field.
alldiffs = record_field_diff(rec1[fnum], rec2[fnum], fnum, match_subfields)
alldiffs = alldiffs[1] #keep only the list of diffs by indicators (without the 'c')
diff = _combine_diffs(alldiffs) #combine results in one list
#find the position of the field after which the insertion should take place
findex1 = -1
for m in diff:
if m[1] == findex2:
break
if m[0] is not None:
findex1 = m[0]
#finally add the field (one position after)
record_add_fields(rec1, fnum, [field_to_add], findex1+1)
def replace_field(rec1, rec2, fnum, findex1, findex2):
"""Replaces the contents of a field of rec1 with those of rec2.
@param rec1: First record (a record dictionary structure)
@param rec2: Second record (a record dictionary structure)
@param fnum: a 3 characters long string indicating field tag number
@param findex1: the rec1 field position in the group of fields it belongs
@param findex2: the rec2 field position in the group of fields it belongs
"""
#if there is no field in rec1 to replace, just add a new one
if findex1 is None:
add_field(rec1, rec2, fnum, findex1, findex2)
return
#replace list of subfields from rec2 to rec1
for i in range( len(rec1[fnum][findex1][0]) ):
rec1[fnum][findex1][0].pop()
rec1[fnum][findex1][0].extend(rec2[fnum][findex2][0])
def merge_field(rec1, rec2, fnum, findex1, findex2):
"""Merges the contents of a field of rec1 with those of rec2, inserting
them in the place of the field of rec1.
@param rec1: First record (a record dictionary structure)
@param rec2: Second record (a record dictionary structure)
@param fnum: a 3 characters long string indicating field tag number
@param findex1: the rec1 field position in the group of fields it belongs
@param findex2: the rec2 field position in the group of fields it belongs
"""
#if there is no field in rec1 to merge to, just add a new one
if findex1 is None:
add_field(rec1, rec2, fnum, findex1, findex2)
return
field1 = rec1[fnum][findex1]
sflist1 = field1[0]
sflist2 = rec2[fnum][findex2][0]
# diff the subfields
diffs = diff_subfields(sflist1, sflist2)
#merge subfields of field1 with those of field2
new_sflist = []
#for every match in the diff append the subfields of both fields
for m in diffs:
if m[1] is not None:
new_sflist.append( sflist2[m[1]] ) #append the subfield
if m[2] != 1.0 and m[0] is not None:
new_sflist.append( sflist1[m[0]] )
#replace list of subfields of rec1 with the new one
for i in range( len(sflist1) ):
sflist1.pop()
sflist1.extend(new_sflist)
def delete_field(rec, fnum, findex):
"""Delete a specific field.
@param rec: a record dictionary structure
@param fnum: a 3 characters long string indicating field tag number
@param findex: the rec field position in the group of fields it belongs
"""
record_delete_field(rec, fnum, field_position_local=findex)
def delete_subfield(rec, fnum, findex, sfindex):
"""Delete a specific subfield.
@param rec: a record dictionary structure
@param fnum: a 3 characters long string indicating field tag number
@param findex: the rec field position in the group of fields it belongs
@param sfindex: the index position of the subfield in the field
"""
field = rec[fnum][findex]
subfields = field[0]
if len(subfields) > 1:
del subfields[sfindex]
def replace_subfield(rec1, rec2, fnum, findex1, findex2, sfindex1, sfindex2):
"""Replaces a subfield of rec1 with a subfield of rec2.
@param rec1: First record (a record dictionary structure)
@param rec2: Second record (a record dictionary structure)
@param fnum: a 3 characters long string indicating field tag number
@param findex1: the rec1 field position in the group of fields it belongs
@param findex2: the rec2 field position in the group of fields it belongs
@param sfindex1: the index position of the subfield in the field of rec1
@param sfindex2: the index position of the subfield in the field of rec2
"""
subfields1 = rec1[fnum][findex1][0]
subfields2 = rec2[fnum][findex2][0]
subfields1[sfindex1] = subfields2[sfindex2]
def add_subfield(rec1, rec2, fnum, findex1, findex2, sfindex1, sfindex2):
"""Adds a subfield of rec2 in a field of rec1, before or after sfindex1.
@param rec1: First record (a record dictionary structure)
@param rec2: Second record (a record dictionary structure)
@param fnum: a 3 characters long string indicating field tag number
@param findex1: the rec1 field position in the group of fields it belongs
@param findex2: the rec2 field position in the group of fields it belongs
@param sfindex1: the index position of the subfield in the field of rec1
@param sfindex2: the index position of the subfield in the field of rec2
"""
subfield_to_insert = rec2[fnum][findex2][0][sfindex2]
#insert at the sfindex1 position
subfields1 = rec1[fnum][findex1][0]
subfields1[sfindex1:sfindex1] = [ subfield_to_insert ]
def copy_R2_to_R1(rec1, rec2):
"""Copies contents of R2 to R1 apart from the controlfields."""
tmprec = deepcopy(rec1)
for fnum in tmprec:
if fnum[:2] != '00': #if it's not a control field delete it from rec1
del rec1[fnum]
for fnum in rec2:
if fnum[:2] != '00': #if it's not a control field add it to rec2
rec1[fnum] = rec2[fnum]
|
NikolaYolov/invenio_backup
|
modules/bibmerge/lib/bibmerge_merger.py
|
Python
|
gpl-2.0
| 13,389
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# Copyright: (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: digital_ocean_account_facts
short_description: Gather facts about DigitalOcean User account
description:
- This module can be used to gather facts about User account.
author: "Abhijeet Kasurde (@Akasurde)"
version_added: "2.6"
requirements:
- "python >= 2.6"
extends_documentation_fragment: digital_ocean.documentation
'''
EXAMPLES = '''
- name: Gather facts about user account
digital_ocean_account_facts:
oauth_token: "{{ oauth_token }}"
'''
RETURN = '''
data:
description: DigitalOcean account facts
returned: success
type: dictionary
sample: {
"droplet_limit": 10,
"email": "testuser1@gmail.com",
"email_verified": true,
"floating_ip_limit": 3,
"status": "active",
"status_message": "",
"uuid": "aaaaaaaaaaaaaa"
}
'''
from traceback import format_exc
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.digital_ocean import DigitalOceanHelper
from ansible.module_utils._text import to_native
def core(module):
rest = DigitalOceanHelper(module)
response = rest.get("account")
if response.status_code != 200:
module.fail_json(msg="Failed to fetch 'account' facts due to error : %s" % response.json['message'])
module.exit_json(changed=False, data=response.json["account"])
def main():
argument_spec = DigitalOceanHelper.digital_ocean_argument_spec()
module = AnsibleModule(argument_spec=argument_spec)
try:
core(module)
except Exception as e:
module.fail_json(msg=to_native(e), exception=format_exc())
if __name__ == '__main__':
main()
|
hryamzik/ansible
|
lib/ansible/modules/cloud/digital_ocean/digital_ocean_account_facts.py
|
Python
|
gpl-3.0
| 2,100
|
#!/usr/bin/env python
# (c) 2013, Jesse Keating <jesse.keating@rackspace.com,
# Paul Durivage <paul.durivage@rackspace.com>,
# Matt Martz <matt@sivel.net>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
Rackspace Cloud Inventory
Authors:
Jesse Keating <jesse.keating@rackspace.com,
Paul Durivage <paul.durivage@rackspace.com>,
Matt Martz <matt@sivel.net>
Description:
Generates inventory that Ansible can understand by making API request to
Rackspace Public Cloud API
When run against a specific host, this script returns variables similar to:
rax_os-ext-sts_task_state
rax_addresses
rax_links
rax_image
rax_os-ext-sts_vm_state
rax_flavor
rax_id
rax_rax-bandwidth_bandwidth
rax_user_id
rax_os-dcf_diskconfig
rax_accessipv4
rax_accessipv6
rax_progress
rax_os-ext-sts_power_state
rax_metadata
rax_status
rax_updated
rax_hostid
rax_name
rax_created
rax_tenant_id
rax_loaded
Configuration:
rax.py can be configured using a rax.ini file or via environment
variables. The rax.ini file should live in the same directory along side
this script.
The section header for configuration values related to this
inventory plugin is [rax]
[rax]
creds_file = ~/.rackspace_cloud_credentials
regions = IAD,ORD,DFW
env = prod
meta_prefix = meta
access_network = public
access_ip_version = 4
Each of these configurations also has a corresponding environment variable.
An environment variable will override a configuration file value.
creds_file:
Environment Variable: RAX_CREDS_FILE
An optional configuration that points to a pyrax-compatible credentials
file.
If not supplied, rax.py will look for a credentials file
at ~/.rackspace_cloud_credentials. It uses the Rackspace Python SDK,
and therefore requires a file formatted per the SDK's specifications.
https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md
regions:
Environment Variable: RAX_REGION
An optional environment variable to narrow inventory search
scope. If used, needs a value like ORD, DFW, SYD (a Rackspace
datacenter) and optionally accepts a comma-separated list.
environment:
Environment Variable: RAX_ENV
A configuration that will use an environment as configured in
~/.pyrax.cfg, see
https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md
meta_prefix:
Environment Variable: RAX_META_PREFIX
Default: meta
A configuration that changes the prefix used for meta key/value groups.
For compatibility with ec2.py set to "tag"
access_network:
Environment Variable: RAX_ACCESS_NETWORK
Default: public
A configuration that will tell the inventory script to use a specific
server network to determine the ansible_ssh_host value. If no address
is found, ansible_ssh_host will not be set. Accepts a comma-separated
list of network names, the first found wins.
access_ip_version:
Environment Variable: RAX_ACCESS_IP_VERSION
Default: 4
A configuration related to "access_network" that will attempt to
determine the ansible_ssh_host value for either IPv4 or IPv6. If no
address is found, ansible_ssh_host will not be set.
Acceptable values are: 4 or 6. Values other than 4 or 6
will be ignored, and 4 will be used. Accepts a comma-separated list,
the first found wins.
Examples:
List server instances
$ RAX_CREDS_FILE=~/.raxpub rax.py --list
List servers in ORD datacenter only
$ RAX_CREDS_FILE=~/.raxpub RAX_REGION=ORD rax.py --list
List servers in ORD and DFW datacenters
$ RAX_CREDS_FILE=~/.raxpub RAX_REGION=ORD,DFW rax.py --list
Get server details for server named "server.example.com"
$ RAX_CREDS_FILE=~/.raxpub rax.py --host server.example.com
Use the instance private IP to connect (instead of public IP)
$ RAX_CREDS_FILE=~/.raxpub RAX_ACCESS_NETWORK=private rax.py --list
"""
import os
import re
import sys
import argparse
import warnings
import collections
from ansible.module_utils.six import iteritems
from ansible.module_utils.six.moves import configparser as ConfigParser
import json
try:
import pyrax
from pyrax.utils import slugify
except ImportError:
sys.exit('pyrax is required for this module')
from time import time
from ansible.constants import get_config
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.module_utils.six import text_type
NON_CALLABLES = (text_type, str, bool, dict, int, list, type(None))
def load_config_file():
p = ConfigParser.ConfigParser()
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'rax.ini')
try:
p.read(config_file)
except ConfigParser.Error:
return None
else:
return p
def rax_slugify(value):
return 'rax_%s' % (re.sub(r'[^\w-]', '_', value).lower().lstrip('_'))
def to_dict(obj):
instance = {}
for key in dir(obj):
value = getattr(obj, key)
if isinstance(value, NON_CALLABLES) and not key.startswith('_'):
key = rax_slugify(key)
instance[key] = value
return instance
def host(regions, hostname):
hostvars = {}
for region in regions:
# Connect to the region
cs = pyrax.connect_to_cloudservers(region=region)
for server in cs.servers.list():
if server.name == hostname:
for key, value in to_dict(server).items():
hostvars[key] = value
# And finally, add an IP address
hostvars['ansible_ssh_host'] = server.accessIPv4
print(json.dumps(hostvars, sort_keys=True, indent=4))
def _list_into_cache(regions):
groups = collections.defaultdict(list)
hostvars = collections.defaultdict(dict)
images = {}
cbs_attachments = collections.defaultdict(dict)
prefix = get_config(p, 'rax', 'meta_prefix', 'RAX_META_PREFIX', 'meta')
try:
# Ansible 2.3+
networks = get_config(p, 'rax', 'access_network',
'RAX_ACCESS_NETWORK', 'public', value_type='list')
except TypeError:
# Ansible 2.2.x and below
# pylint: disable=unexpected-keyword-arg
networks = get_config(p, 'rax', 'access_network',
'RAX_ACCESS_NETWORK', 'public', islist=True)
try:
try:
# Ansible 2.3+
ip_versions = map(int, get_config(p, 'rax', 'access_ip_version',
'RAX_ACCESS_IP_VERSION', 4, value_type='list'))
except TypeError:
# Ansible 2.2.x and below
# pylint: disable=unexpected-keyword-arg
ip_versions = map(int, get_config(p, 'rax', 'access_ip_version',
'RAX_ACCESS_IP_VERSION', 4, islist=True))
except Exception:
ip_versions = [4]
else:
ip_versions = [v for v in ip_versions if v in [4, 6]]
if not ip_versions:
ip_versions = [4]
# Go through all the regions looking for servers
for region in regions:
# Connect to the region
cs = pyrax.connect_to_cloudservers(region=region)
if cs is None:
warnings.warn(
'Connecting to Rackspace region "%s" has caused Pyrax to '
'return None. Is this a valid region?' % region,
RuntimeWarning)
continue
for server in cs.servers.list():
# Create a group on region
groups[region].append(server.name)
# Check if group metadata key in servers' metadata
group = server.metadata.get('group')
if group:
groups[group].append(server.name)
for extra_group in server.metadata.get('groups', '').split(','):
if extra_group:
groups[extra_group].append(server.name)
# Add host metadata
for key, value in to_dict(server).items():
hostvars[server.name][key] = value
hostvars[server.name]['rax_region'] = region
for key, value in iteritems(server.metadata):
groups['%s_%s_%s' % (prefix, key, value)].append(server.name)
groups['instance-%s' % server.id].append(server.name)
groups['flavor-%s' % server.flavor['id']].append(server.name)
# Handle boot from volume
if not server.image:
if not cbs_attachments[region]:
cbs = pyrax.connect_to_cloud_blockstorage(region)
for vol in cbs.list():
if boolean(vol.bootable, strict=False):
for attachment in vol.attachments:
metadata = vol.volume_image_metadata
server_id = attachment['server_id']
cbs_attachments[region][server_id] = {
'id': metadata['image_id'],
'name': slugify(metadata['image_name'])
}
image = cbs_attachments[region].get(server.id)
if image:
server.image = {'id': image['id']}
hostvars[server.name]['rax_image'] = server.image
hostvars[server.name]['rax_boot_source'] = 'volume'
images[image['id']] = image['name']
else:
hostvars[server.name]['rax_boot_source'] = 'local'
try:
imagegroup = 'image-%s' % images[server.image['id']]
groups[imagegroup].append(server.name)
groups['image-%s' % server.image['id']].append(server.name)
except KeyError:
try:
image = cs.images.get(server.image['id'])
except cs.exceptions.NotFound:
groups['image-%s' % server.image['id']].append(server.name)
else:
images[image.id] = image.human_id
groups['image-%s' % image.human_id].append(server.name)
groups['image-%s' % server.image['id']].append(server.name)
# And finally, add an IP address
ansible_ssh_host = None
# use accessIPv[46] instead of looping address for 'public'
for network_name in networks:
if ansible_ssh_host:
break
if network_name == 'public':
for version_name in ip_versions:
if ansible_ssh_host:
break
if version_name == 6 and server.accessIPv6:
ansible_ssh_host = server.accessIPv6
elif server.accessIPv4:
ansible_ssh_host = server.accessIPv4
if not ansible_ssh_host:
addresses = server.addresses.get(network_name, [])
for address in addresses:
for version_name in ip_versions:
if ansible_ssh_host:
break
if address.get('version') == version_name:
ansible_ssh_host = address.get('addr')
break
if ansible_ssh_host:
hostvars[server.name]['ansible_ssh_host'] = ansible_ssh_host
if hostvars:
groups['_meta'] = {'hostvars': hostvars}
with open(get_cache_file_path(regions), 'w') as cache_file:
json.dump(groups, cache_file)
def get_cache_file_path(regions):
regions_str = '.'.join([reg.strip().lower() for reg in regions])
ansible_tmp_path = os.path.join(os.path.expanduser("~"), '.ansible', 'tmp')
if not os.path.exists(ansible_tmp_path):
os.makedirs(ansible_tmp_path)
return os.path.join(ansible_tmp_path,
'ansible-rax-%s-%s.cache' % (
pyrax.identity.username, regions_str))
def _list(regions, refresh_cache=True):
cache_max_age = int(get_config(p, 'rax', 'cache_max_age',
'RAX_CACHE_MAX_AGE', 600))
if (not os.path.exists(get_cache_file_path(regions)) or
refresh_cache or
(time() - os.stat(get_cache_file_path(regions))[-1]) > cache_max_age):
# Cache file doesn't exist or older than 10m or refresh cache requested
_list_into_cache(regions)
with open(get_cache_file_path(regions), 'r') as cache_file:
groups = json.load(cache_file)
print(json.dumps(groups, sort_keys=True, indent=4))
def parse_args():
parser = argparse.ArgumentParser(description='Ansible Rackspace Cloud '
'inventory module')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--list', action='store_true',
help='List active servers')
group.add_argument('--host', help='List details about the specific host')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help=('Force refresh of cache, making API requests to'
'RackSpace (default: False - use cache files)'))
return parser.parse_args()
def setup():
default_creds_file = os.path.expanduser('~/.rackspace_cloud_credentials')
env = get_config(p, 'rax', 'environment', 'RAX_ENV', None)
if env:
pyrax.set_environment(env)
keyring_username = pyrax.get_setting('keyring_username')
# Attempt to grab credentials from environment first
creds_file = get_config(p, 'rax', 'creds_file',
'RAX_CREDS_FILE', None)
if creds_file is not None:
creds_file = os.path.expanduser(creds_file)
else:
# But if that fails, use the default location of
# ~/.rackspace_cloud_credentials
if os.path.isfile(default_creds_file):
creds_file = default_creds_file
elif not keyring_username:
sys.exit('No value in environment variable %s and/or no '
'credentials file at %s'
% ('RAX_CREDS_FILE', default_creds_file))
identity_type = pyrax.get_setting('identity_type')
pyrax.set_setting('identity_type', identity_type or 'rackspace')
region = pyrax.get_setting('region')
try:
if keyring_username:
pyrax.keyring_auth(keyring_username, region=region)
else:
pyrax.set_credential_file(creds_file, region=region)
except Exception as e:
sys.exit("%s: %s" % (e, e.message))
regions = []
if region:
regions.append(region)
else:
try:
# Ansible 2.3+
region_list = get_config(p, 'rax', 'regions', 'RAX_REGION', 'all',
value_type='list')
except TypeError:
# Ansible 2.2.x and below
# pylint: disable=unexpected-keyword-arg
region_list = get_config(p, 'rax', 'regions', 'RAX_REGION', 'all',
islist=True)
for region in region_list:
region = region.strip().upper()
if region == 'ALL':
regions = pyrax.regions
break
elif region not in pyrax.regions:
sys.exit('Unsupported region %s' % region)
elif region not in regions:
regions.append(region)
return regions
def main():
args = parse_args()
regions = setup()
if args.list:
_list(regions, refresh_cache=args.refresh_cache)
elif args.host:
host(regions, args.host)
sys.exit(0)
p = load_config_file()
if __name__ == '__main__':
main()
|
anryko/ansible
|
contrib/inventory/rax.py
|
Python
|
gpl-3.0
| 16,904
|
thing = "hello from formerly_testcoll_pkg.submod.thing"
|
j-carl/ansible
|
test/integration/targets/collections/collection_root_user/ansible_collections/testns/othercoll/plugins/module_utils/formerly_testcoll_pkg/submod.py
|
Python
|
gpl-3.0
| 56
|
from __future__ import division, absolute_import, print_function
import sys, re, inspect, textwrap, pydoc
import sphinx
import collections
from .docscrape import NumpyDocString, FunctionDoc, ClassDoc
if sys.version_info[0] >= 3:
sixu = lambda s: s
else:
sixu = lambda s: unicode(s, 'unicode_escape')
class SphinxDocString(NumpyDocString):
def __init__(self, docstring, config={}):
NumpyDocString.__init__(self, docstring, config=config)
self.load_config(config)
def load_config(self, config):
self.use_plots = config.get('use_plots', False)
self.class_members_toctree = config.get('class_members_toctree', True)
# string conversion routines
def _str_header(self, name, symbol='`'):
return ['.. rubric:: ' + name, '']
def _str_field_list(self, name):
return [':' + name + ':']
def _str_indent(self, doc, indent=4):
out = []
for line in doc:
out += [' '*indent + line]
return out
def _str_signature(self):
return ['']
if self['Signature']:
return ['``%s``' % self['Signature']] + ['']
else:
return ['']
def _str_summary(self):
return self['Summary'] + ['']
def _str_extended_summary(self):
return self['Extended Summary'] + ['']
def _str_returns(self):
out = []
if self['Returns']:
out += self._str_field_list('Returns')
out += ['']
for param, param_type, desc in self['Returns']:
if param_type:
out += self._str_indent(['**%s** : %s' % (param.strip(),
param_type)])
else:
out += self._str_indent([param.strip()])
if desc:
out += ['']
out += self._str_indent(desc, 8)
out += ['']
return out
def _str_param_list(self, name):
out = []
if self[name]:
out += self._str_field_list(name)
out += ['']
for param, param_type, desc in self[name]:
if param_type:
out += self._str_indent(['**%s** : %s' % (param.strip(),
param_type)])
else:
out += self._str_indent(['**%s**' % param.strip()])
if desc:
out += ['']
out += self._str_indent(desc, 8)
out += ['']
return out
@property
def _obj(self):
if hasattr(self, '_cls'):
return self._cls
elif hasattr(self, '_f'):
return self._f
return None
def _str_member_list(self, name):
"""
Generate a member listing, autosummary:: table where possible,
and a table where not.
"""
out = []
if self[name]:
out += ['.. rubric:: %s' % name, '']
prefix = getattr(self, '_name', '')
if prefix:
prefix = '~%s.' % prefix
autosum = []
others = []
for param, param_type, desc in self[name]:
param = param.strip()
# Check if the referenced member can have a docstring or not
param_obj = getattr(self._obj, param, None)
if not (callable(param_obj)
or isinstance(param_obj, property)
or inspect.isgetsetdescriptor(param_obj)):
param_obj = None
if param_obj and (pydoc.getdoc(param_obj) or not desc):
# Referenced object has a docstring
autosum += [" %s%s" % (prefix, param)]
else:
others.append((param, param_type, desc))
if autosum:
out += ['.. autosummary::']
if self.class_members_toctree:
out += [' :toctree:']
out += [''] + autosum
if others:
maxlen_0 = max(3, max([len(x[0]) for x in others]))
hdr = sixu("=")*maxlen_0 + sixu(" ") + sixu("=")*10
fmt = sixu('%%%ds %%s ') % (maxlen_0,)
out += ['', hdr]
for param, param_type, desc in others:
desc = sixu(" ").join(x.strip() for x in desc).strip()
if param_type:
desc = "(%s) %s" % (param_type, desc)
out += [fmt % (param.strip(), desc)]
out += [hdr]
out += ['']
return out
def _str_section(self, name):
out = []
if self[name]:
out += self._str_header(name)
out += ['']
content = textwrap.dedent("\n".join(self[name])).split("\n")
out += content
out += ['']
return out
def _str_see_also(self, func_role):
out = []
if self['See Also']:
see_also = super(SphinxDocString, self)._str_see_also(func_role)
out = ['.. seealso::', '']
out += self._str_indent(see_also[2:])
return out
def _str_warnings(self):
out = []
if self['Warnings']:
out = ['.. warning::', '']
out += self._str_indent(self['Warnings'])
return out
def _str_index(self):
idx = self['index']
out = []
if len(idx) == 0:
return out
out += ['.. index:: %s' % idx.get('default','')]
for section, references in idx.items():
if section == 'default':
continue
elif section == 'refguide':
out += [' single: %s' % (', '.join(references))]
else:
out += [' %s: %s' % (section, ','.join(references))]
return out
def _str_references(self):
out = []
if self['References']:
out += self._str_header('References')
if isinstance(self['References'], str):
self['References'] = [self['References']]
out.extend(self['References'])
out += ['']
# Latex collects all references to a separate bibliography,
# so we need to insert links to it
if sphinx.__version__ >= "0.6":
out += ['.. only:: latex','']
else:
out += ['.. latexonly::','']
items = []
for line in self['References']:
m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I)
if m:
items.append(m.group(1))
out += [' ' + ", ".join(["[%s]_" % item for item in items]), '']
return out
def _str_examples(self):
examples_str = "\n".join(self['Examples'])
if (self.use_plots and 'import matplotlib' in examples_str
and 'plot::' not in examples_str):
out = []
out += self._str_header('Examples')
out += ['.. plot::', '']
out += self._str_indent(self['Examples'])
out += ['']
return out
else:
return self._str_section('Examples')
def __str__(self, indent=0, func_role="obj"):
out = []
out += self._str_signature()
out += self._str_index() + ['']
out += self._str_summary()
out += self._str_extended_summary()
out += self._str_param_list('Parameters')
out += self._str_returns()
for param_list in ('Other Parameters', 'Raises', 'Warns'):
out += self._str_param_list(param_list)
out += self._str_warnings()
out += self._str_see_also(func_role)
out += self._str_section('Notes')
out += self._str_references()
out += self._str_examples()
for param_list in ('Attributes', 'Methods'):
out += self._str_member_list(param_list)
out = self._str_indent(out,indent)
return '\n'.join(out)
class SphinxFunctionDoc(SphinxDocString, FunctionDoc):
def __init__(self, obj, doc=None, config={}):
self.load_config(config)
FunctionDoc.__init__(self, obj, doc=doc, config=config)
class SphinxClassDoc(SphinxDocString, ClassDoc):
def __init__(self, obj, doc=None, func_doc=None, config={}):
self.load_config(config)
ClassDoc.__init__(self, obj, doc=doc, func_doc=None, config=config)
class SphinxObjDoc(SphinxDocString):
def __init__(self, obj, doc=None, config={}):
self._f = obj
self.load_config(config)
SphinxDocString.__init__(self, doc, config=config)
def get_doc_object(obj, what=None, doc=None, config={}):
if what is None:
if inspect.isclass(obj):
what = 'class'
elif inspect.ismodule(obj):
what = 'module'
elif isinstance(obj, collections.Callable):
what = 'function'
else:
what = 'object'
if what == 'class':
return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc,
config=config)
elif what in ('function', 'method'):
return SphinxFunctionDoc(obj, doc=doc, config=config)
else:
if doc is None:
doc = pydoc.getdoc(obj)
return SphinxObjDoc(obj, doc, config=config)
|
opotowsky/cyclus.github.com
|
source/numpydoc/docscrape_sphinx.py
|
Python
|
bsd-3-clause
| 9,437
|
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import compat_urllib_parse_unquote
class XNXXIE(InfoExtractor):
_VALID_URL = r'https?://(?:video|www)\.xnxx\.com/video-?(?P<id>[0-9a-z]+)/'
_TESTS = [{
'url': 'http://www.xnxx.com/video-55awb78/skyrim_test_video',
'md5': 'ef7ecee5af78f8b03dca2cf31341d3a0',
'info_dict': {
'id': '55awb78',
'ext': 'flv',
'title': 'Skyrim Test Video',
'age_limit': 18,
},
}, {
'url': 'http://video.xnxx.com/video1135332/lida_naked_funny_actress_5_',
'only_matching': True,
}, {
'url': 'http://www.xnxx.com/video-55awb78/',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_url = self._search_regex(r'flv_url=(.*?)&',
webpage, 'video URL')
video_url = compat_urllib_parse_unquote(video_url)
video_title = self._html_search_regex(r'<title>(.*?)\s+-\s+XNXX.COM',
webpage, 'title')
video_thumbnail = self._search_regex(r'url_bigthumb=(.*?)&',
webpage, 'thumbnail', fatal=False)
return {
'id': video_id,
'url': video_url,
'title': video_title,
'ext': 'flv',
'thumbnail': video_thumbnail,
'age_limit': 18,
}
|
Tithen-Firion/youtube-dl
|
youtube_dl/extractor/xnxx.py
|
Python
|
unlicense
| 1,596
|
"""Tests for the fido component."""
|
fbradyirl/home-assistant
|
tests/components/fido/__init__.py
|
Python
|
apache-2.0
| 36
|
class A(object):
def __init__(self, bar):
self._x = 1 ; self._bar = bar
def __getX(self):
return self._x
def __setX(self, x):
self._x = x
def __delX(self):
pass
x1 = property(__getX, __setX, __delX, "doc of x1")
x2 = property(__setX) # should return
x3 = property(__getX, __getX) # should not return
x4 = property(__getX, fdel=__getX) # should not return
x5 = property(__getX, doc=123) # bad doc
x6 = property(lambda self: self._x)
x7 = property(lambda self: self._x, lambda self: self._x) # setter should not return
@property
def foo(self):
return self._x
@foo.setter # ignored in 2.5
def foo(self, x):
self._x = x
@foo.deleter # ignored in 2.5
def foo(self):
pass
@property
def boo(self):
return self._x
@boo.setter
def boo1(self, x): # ignored in 2.5
self._x = x
@boo.deleter
def boo2(self): # ignored in 2,5
pass
@property
def moo(self): # should return
pass
@moo.setter
def foo(self, x):
return 1 # ignored in 2.5
@foo.deleter
def foo(self):
return self._x # ignored in 2.5
@qoo.setter # unknown qoo is reported in ref inspection
def qoo(self, v):
self._x = v
@property
def bar(self):
return None
class Ghostbusters(object):
def __call__(self):
return "Who do you call?"
gb = Ghostbusters()
class B(object):
x = property(gb) # pass
y = property(Ghostbusters()) # pass
z = property(Ghostbusters) # pass
class Eternal(object):
def give(self):
while True:
yield 1
def giveAndTake(self):
x = 1
while True:
x = (yield x)
one = property(give) # should pass
anything = property(giveAndTake) # should pass
|
caot/intellij-community
|
python/testData/inspections/PyPropertyDefinitionInspection25/src/prop_test.py
|
Python
|
apache-2.0
| 1,700
|
from __future__ import absolute_import
from django.conf.urls import patterns, include
from . import views, customadmin, admin
urlpatterns = patterns('',
(r'^test_admin/admin/doc/', include('django.contrib.admindocs.urls')),
(r'^test_admin/admin/secure-view/$', views.secure_view),
(r'^test_admin/admin/', include(admin.site.urls)),
(r'^test_admin/admin2/', include(customadmin.site.urls)),
(r'^test_admin/admin3/', include(admin.site.urls), dict(form_url='pony')),
(r'^test_admin/admin4/', include(customadmin.simple_site.urls)),
(r'^test_admin/admin5/', include(admin.site2.urls)),
)
|
dex4er/django
|
tests/admin_views/urls.py
|
Python
|
bsd-3-clause
| 617
|
from enigma import eTimer
from Converter import Converter
class ConditionalShowHide(Converter, object):
def __init__(self, argstr):
Converter.__init__(self, argstr)
args = argstr.split(',')
self.invert = "Invert" in args
self.blink = "Blink" in args
if self.blink:
self.blinktime = len(args) == 2 and args[1].isdigit() and int(args[1]) or 500
self.timer = eTimer()
self.timer.callback.append(self.blinkFunc)
else:
self.timer = None
def blinkFunc(self):
if self.blinking:
for x in self.downstream_elements:
x.visible = not x.visible
def startBlinking(self):
self.blinking = True
self.timer.start(self.blinktime)
def stopBlinking(self):
self.blinking = False
for x in self.downstream_elements:
if x.visible:
x.hide()
self.timer.stop()
def calcVisibility(self):
b = self.source.boolean
if b is None:
return True
b ^= self.invert
return b
def changed(self, what):
vis = self.calcVisibility()
if self.blink:
if vis:
self.startBlinking()
else:
self.stopBlinking()
else:
for x in self.downstream_elements:
x.visible = vis
def connectDownstream(self, downstream):
Converter.connectDownstream(self, downstream)
vis = self.calcVisibility()
if self.blink:
if vis:
self.startBlinking()
else:
self.stopBlinking()
else:
downstream.visible = self.calcVisibility()
def destroy(self):
if self.timer:
self.timer.callback.remove(self.blinkFunc)
|
popazerty/EG-2
|
lib/python/Components/Converter/ConditionalShowHide.py
|
Python
|
gpl-2.0
| 1,460
|
# Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# Based on chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com>
# Copyright (c) 2013, Michael Scherer <misc@zarb.org>
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
author: Michael Scherer (@msherer) <misc@zarb.org>
connection: funcd
short_description: Use funcd to connect to target
description:
- This transport permits you to use Ansible over Func.
- For people who have already setup func and that wish to play with ansible,
this permit to move gradually to ansible without having to redo completely the setup of the network.
version_added: "1.1"
options:
remote_addr:
description:
- The path of the chroot you want to access.
default: inventory_hostname
vars:
- name: ansible_host
- name: ansible_func_host
"""
HAVE_FUNC = False
try:
import func.overlord.client as fc
HAVE_FUNC = True
except ImportError:
pass
import os
import tempfile
import shutil
from ansible.errors import AnsibleError
from ansible.utils.display import Display
display = Display()
class Connection(object):
''' Func-based connections '''
has_pipelining = False
def __init__(self, runner, host, port, *args, **kwargs):
self.runner = runner
self.host = host
# port is unused, this go on func
self.port = port
def connect(self, port=None):
if not HAVE_FUNC:
raise AnsibleError("func is not installed")
self.client = fc.Client(self.host)
return self
def exec_command(self, cmd, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
''' run a command on the remote minion '''
if in_data:
raise AnsibleError("Internal Error: this module does not support optimized module pipelining")
# totally ignores privlege escalation
display.vvv("EXEC %s" % (cmd), host=self.host)
p = self.client.command.run(cmd)[self.host]
return (p[0], p[1], p[2])
def _normalize_path(self, path, prefix):
if not path.startswith(os.path.sep):
path = os.path.join(os.path.sep, path)
normpath = os.path.normpath(path)
return os.path.join(prefix, normpath[1:])
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
out_path = self._normalize_path(out_path, '/')
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
self.client.local.copyfile.send(in_path, out_path)
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
in_path = self._normalize_path(in_path, '/')
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
# need to use a tmp dir due to difference of semantic for getfile
# ( who take a # directory as destination) and fetch_file, who
# take a file directly
tmpdir = tempfile.mkdtemp(prefix="func_ansible")
self.client.local.getfile.get(in_path, tmpdir)
shutil.move(os.path.join(tmpdir, self.host, os.path.basename(in_path)), out_path)
shutil.rmtree(tmpdir)
def close(self):
''' terminate the connection; nothing to do here '''
pass
|
alxgu/ansible
|
lib/ansible/plugins/connection/funcd.py
|
Python
|
gpl-3.0
| 3,533
|
import unittest
import unittest.mock
import random
import time
import pickle
import warnings
from functools import partial
from math import log, exp, pi, fsum, sin
from test import support
class TestBasicOps:
# Superclass with tests common to all generators.
# Subclasses must arrange for self.gen to retrieve the Random instance
# to be tested.
def randomlist(self, n):
"""Helper function to make a list of random numbers"""
return [self.gen.random() for i in range(n)]
def test_autoseed(self):
self.gen.seed()
state1 = self.gen.getstate()
time.sleep(0.1)
self.gen.seed() # diffent seeds at different times
state2 = self.gen.getstate()
self.assertNotEqual(state1, state2)
def test_saverestore(self):
N = 1000
self.gen.seed()
state = self.gen.getstate()
randseq = self.randomlist(N)
self.gen.setstate(state) # should regenerate the same sequence
self.assertEqual(randseq, self.randomlist(N))
def test_seedargs(self):
# Seed value with a negative hash.
class MySeed(object):
def __hash__(self):
return -1729
for arg in [None, 0, 0, 1, 1, -1, -1, 10**20, -(10**20),
3.14, 1+2j, 'a', tuple('abc'), MySeed()]:
self.gen.seed(arg)
for arg in [list(range(3)), dict(one=1)]:
self.assertRaises(TypeError, self.gen.seed, arg)
self.assertRaises(TypeError, self.gen.seed, 1, 2, 3, 4)
self.assertRaises(TypeError, type(self.gen), [])
@unittest.mock.patch('random._urandom') # os.urandom
def test_seed_when_randomness_source_not_found(self, urandom_mock):
# Random.seed() uses time.time() when an operating system specific
# randomness source is not found. To test this on machines were it
# exists, run the above test, test_seedargs(), again after mocking
# os.urandom() so that it raises the exception expected when the
# randomness source is not available.
urandom_mock.side_effect = NotImplementedError
self.test_seedargs()
def test_shuffle(self):
shuffle = self.gen.shuffle
lst = []
shuffle(lst)
self.assertEqual(lst, [])
lst = [37]
shuffle(lst)
self.assertEqual(lst, [37])
seqs = [list(range(n)) for n in range(10)]
shuffled_seqs = [list(range(n)) for n in range(10)]
for shuffled_seq in shuffled_seqs:
shuffle(shuffled_seq)
for (seq, shuffled_seq) in zip(seqs, shuffled_seqs):
self.assertEqual(len(seq), len(shuffled_seq))
self.assertEqual(set(seq), set(shuffled_seq))
# The above tests all would pass if the shuffle was a
# no-op. The following non-deterministic test covers that. It
# asserts that the shuffled sequence of 1000 distinct elements
# must be different from the original one. Although there is
# mathematically a non-zero probability that this could
# actually happen in a genuinely random shuffle, it is
# completely negligible, given that the number of possible
# permutations of 1000 objects is 1000! (factorial of 1000),
# which is considerably larger than the number of atoms in the
# universe...
lst = list(range(1000))
shuffled_lst = list(range(1000))
shuffle(shuffled_lst)
self.assertTrue(lst != shuffled_lst)
shuffle(lst)
self.assertTrue(lst != shuffled_lst)
def test_choice(self):
choice = self.gen.choice
with self.assertRaises(IndexError):
choice([])
self.assertEqual(choice([50]), 50)
self.assertIn(choice([25, 75]), [25, 75])
def test_sample(self):
# For the entire allowable range of 0 <= k <= N, validate that
# the sample is of the correct length and contains only unique items
N = 100
population = range(N)
for k in range(N+1):
s = self.gen.sample(population, k)
self.assertEqual(len(s), k)
uniq = set(s)
self.assertEqual(len(uniq), k)
self.assertTrue(uniq <= set(population))
self.assertEqual(self.gen.sample([], 0), []) # test edge case N==k==0
# Exception raised if size of sample exceeds that of population
self.assertRaises(ValueError, self.gen.sample, population, N+1)
def test_sample_distribution(self):
# For the entire allowable range of 0 <= k <= N, validate that
# sample generates all possible permutations
n = 5
pop = range(n)
trials = 10000 # large num prevents false negatives without slowing normal case
def factorial(n):
if n == 0:
return 1
return n * factorial(n - 1)
for k in range(n):
expected = factorial(n) // factorial(n-k)
perms = {}
for i in range(trials):
perms[tuple(self.gen.sample(pop, k))] = None
if len(perms) == expected:
break
else:
self.fail()
def test_sample_inputs(self):
# SF bug #801342 -- population can be any iterable defining __len__()
self.gen.sample(set(range(20)), 2)
self.gen.sample(range(20), 2)
self.gen.sample(range(20), 2)
self.gen.sample(str('abcdefghijklmnopqrst'), 2)
self.gen.sample(tuple('abcdefghijklmnopqrst'), 2)
def test_sample_on_dicts(self):
self.assertRaises(TypeError, self.gen.sample, dict.fromkeys('abcdef'), 2)
def test_gauss(self):
# Ensure that the seed() method initializes all the hidden state. In
# particular, through 2.2.1 it failed to reset a piece of state used
# by (and only by) the .gauss() method.
for seed in 1, 12, 123, 1234, 12345, 123456, 654321:
self.gen.seed(seed)
x1 = self.gen.random()
y1 = self.gen.gauss(0, 1)
self.gen.seed(seed)
x2 = self.gen.random()
y2 = self.gen.gauss(0, 1)
self.assertEqual(x1, x2)
self.assertEqual(y1, y2)
def test_pickling(self):
state = pickle.dumps(self.gen)
origseq = [self.gen.random() for i in range(10)]
newgen = pickle.loads(state)
restoredseq = [newgen.random() for i in range(10)]
self.assertEqual(origseq, restoredseq)
def test_bug_1727780(self):
# verify that version-2-pickles can be loaded
# fine, whether they are created on 32-bit or 64-bit
# platforms, and that version-3-pickles load fine.
files = [("randv2_32.pck", 780),
("randv2_64.pck", 866),
("randv3.pck", 343)]
for file, value in files:
f = open(support.findfile(file),"rb")
r = pickle.load(f)
f.close()
self.assertEqual(int(r.random()*1000), value)
def test_bug_9025(self):
# Had problem with an uneven distribution in int(n*random())
# Verify the fix by checking that distributions fall within expectations.
n = 100000
randrange = self.gen.randrange
k = sum(randrange(6755399441055744) % 3 == 2 for i in range(n))
self.assertTrue(0.30 < k/n < .37, (k/n))
try:
random.SystemRandom().random()
except NotImplementedError:
SystemRandom_available = False
else:
SystemRandom_available = True
@unittest.skipUnless(SystemRandom_available, "random.SystemRandom not available")
class SystemRandom_TestBasicOps(TestBasicOps, unittest.TestCase):
gen = random.SystemRandom()
def test_autoseed(self):
# Doesn't need to do anything except not fail
self.gen.seed()
def test_saverestore(self):
self.assertRaises(NotImplementedError, self.gen.getstate)
self.assertRaises(NotImplementedError, self.gen.setstate, None)
def test_seedargs(self):
# Doesn't need to do anything except not fail
self.gen.seed(100)
def test_gauss(self):
self.gen.gauss_next = None
self.gen.seed(100)
self.assertEqual(self.gen.gauss_next, None)
def test_pickling(self):
self.assertRaises(NotImplementedError, pickle.dumps, self.gen)
def test_53_bits_per_float(self):
# This should pass whenever a C double has 53 bit precision.
span = 2 ** 53
cum = 0
for i in range(100):
cum |= int(self.gen.random() * span)
self.assertEqual(cum, span-1)
def test_bigrand(self):
# The randrange routine should build-up the required number of bits
# in stages so that all bit positions are active.
span = 2 ** 500
cum = 0
for i in range(100):
r = self.gen.randrange(span)
self.assertTrue(0 <= r < span)
cum |= r
self.assertEqual(cum, span-1)
def test_bigrand_ranges(self):
for i in [40,80, 160, 200, 211, 250, 375, 512, 550]:
start = self.gen.randrange(2 ** (i-2))
stop = self.gen.randrange(2 ** i)
if stop <= start:
continue
self.assertTrue(start <= self.gen.randrange(start, stop) < stop)
def test_rangelimits(self):
for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]:
self.assertEqual(set(range(start,stop)),
set([self.gen.randrange(start,stop) for i in range(100)]))
def test_randrange_nonunit_step(self):
rint = self.gen.randrange(0, 10, 2)
self.assertIn(rint, (0, 2, 4, 6, 8))
rint = self.gen.randrange(0, 2, 2)
self.assertEqual(rint, 0)
def test_randrange_errors(self):
raises = partial(self.assertRaises, ValueError, self.gen.randrange)
# Empty range
raises(3, 3)
raises(-721)
raises(0, 100, -12)
# Non-integer start/stop
raises(3.14159)
raises(0, 2.71828)
# Zero and non-integer step
raises(0, 42, 0)
raises(0, 42, 3.14159)
def test_genrandbits(self):
# Verify ranges
for k in range(1, 1000):
self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k)
# Verify all bits active
getbits = self.gen.getrandbits
for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]:
cum = 0
for i in range(100):
cum |= getbits(span)
self.assertEqual(cum, 2**span-1)
# Verify argument checking
self.assertRaises(TypeError, self.gen.getrandbits)
self.assertRaises(TypeError, self.gen.getrandbits, 1, 2)
self.assertRaises(ValueError, self.gen.getrandbits, 0)
self.assertRaises(ValueError, self.gen.getrandbits, -1)
self.assertRaises(TypeError, self.gen.getrandbits, 10.1)
def test_randbelow_logic(self, _log=log, int=int):
# check bitcount transition points: 2**i and 2**(i+1)-1
# show that: k = int(1.001 + _log(n, 2))
# is equal to or one greater than the number of bits in n
for i in range(1, 1000):
n = 1 << i # check an exact power of two
numbits = i+1
k = int(1.00001 + _log(n, 2))
self.assertEqual(k, numbits)
self.assertEqual(n, 2**(k-1))
n += n - 1 # check 1 below the next power of two
k = int(1.00001 + _log(n, 2))
self.assertIn(k, [numbits, numbits+1])
self.assertTrue(2**k > n > 2**(k-2))
n -= n >> 15 # check a little farther below the next power of two
k = int(1.00001 + _log(n, 2))
self.assertEqual(k, numbits) # note the stronger assertion
self.assertTrue(2**k > n > 2**(k-1)) # note the stronger assertion
class MersenneTwister_TestBasicOps(TestBasicOps, unittest.TestCase):
gen = random.Random()
def test_guaranteed_stable(self):
# These sequences are guaranteed to stay the same across versions of python
self.gen.seed(3456147, version=1)
self.assertEqual([self.gen.random().hex() for i in range(4)],
['0x1.ac362300d90d2p-1', '0x1.9d16f74365005p-1',
'0x1.1ebb4352e4c4dp-1', '0x1.1a7422abf9c11p-1'])
self.gen.seed("the quick brown fox", version=2)
self.assertEqual([self.gen.random().hex() for i in range(4)],
['0x1.1239ddfb11b7cp-3', '0x1.b3cbb5c51b120p-4',
'0x1.8c4f55116b60fp-1', '0x1.63eb525174a27p-1'])
def test_setstate_first_arg(self):
self.assertRaises(ValueError, self.gen.setstate, (1, None, None))
def test_setstate_middle_arg(self):
# Wrong type, s/b tuple
self.assertRaises(TypeError, self.gen.setstate, (2, None, None))
# Wrong length, s/b 625
self.assertRaises(ValueError, self.gen.setstate, (2, (1,2,3), None))
# Wrong type, s/b tuple of 625 ints
self.assertRaises(TypeError, self.gen.setstate, (2, ('a',)*625, None))
# Last element s/b an int also
self.assertRaises(TypeError, self.gen.setstate, (2, (0,)*624+('a',), None))
# Little trick to make "tuple(x % (2**32) for x in internalstate)"
# raise ValueError. I cannot think of a simple way to achieve this, so
# I am opting for using a generator as the middle argument of setstate
# which attempts to cast a NaN to integer.
state_values = self.gen.getstate()[1]
state_values = list(state_values)
state_values[-1] = float('nan')
state = (int(x) for x in state_values)
self.assertRaises(TypeError, self.gen.setstate, (2, state, None))
def test_referenceImplementation(self):
# Compare the python implementation with results from the original
# code. Create 2000 53-bit precision random floats. Compare only
# the last ten entries to show that the independent implementations
# are tracking. Here is the main() function needed to create the
# list of expected random numbers:
# void main(void){
# int i;
# unsigned long init[4]={61731, 24903, 614, 42143}, length=4;
# init_by_array(init, length);
# for (i=0; i<2000; i++) {
# printf("%.15f ", genrand_res53());
# if (i%5==4) printf("\n");
# }
# }
expected = [0.45839803073713259,
0.86057815201978782,
0.92848331726782152,
0.35932681119782461,
0.081823493762449573,
0.14332226470169329,
0.084297823823520024,
0.53814864671831453,
0.089215024911993401,
0.78486196105372907]
self.gen.seed(61731 + (24903<<32) + (614<<64) + (42143<<96))
actual = self.randomlist(2000)[-10:]
for a, e in zip(actual, expected):
self.assertAlmostEqual(a,e,places=14)
def test_strong_reference_implementation(self):
# Like test_referenceImplementation, but checks for exact bit-level
# equality. This should pass on any box where C double contains
# at least 53 bits of precision (the underlying algorithm suffers
# no rounding errors -- all results are exact).
from math import ldexp
expected = [0x0eab3258d2231f,
0x1b89db315277a5,
0x1db622a5518016,
0x0b7f9af0d575bf,
0x029e4c4db82240,
0x04961892f5d673,
0x02b291598e4589,
0x11388382c15694,
0x02dad977c9e1fe,
0x191d96d4d334c6]
self.gen.seed(61731 + (24903<<32) + (614<<64) + (42143<<96))
actual = self.randomlist(2000)[-10:]
for a, e in zip(actual, expected):
self.assertEqual(int(ldexp(a, 53)), e)
def test_long_seed(self):
# This is most interesting to run in debug mode, just to make sure
# nothing blows up. Under the covers, a dynamically resized array
# is allocated, consuming space proportional to the number of bits
# in the seed. Unfortunately, that's a quadratic-time algorithm,
# so don't make this horribly big.
seed = (1 << (10000 * 8)) - 1 # about 10K bytes
self.gen.seed(seed)
def test_53_bits_per_float(self):
# This should pass whenever a C double has 53 bit precision.
span = 2 ** 53
cum = 0
for i in range(100):
cum |= int(self.gen.random() * span)
self.assertEqual(cum, span-1)
def test_bigrand(self):
# The randrange routine should build-up the required number of bits
# in stages so that all bit positions are active.
span = 2 ** 500
cum = 0
for i in range(100):
r = self.gen.randrange(span)
self.assertTrue(0 <= r < span)
cum |= r
self.assertEqual(cum, span-1)
def test_bigrand_ranges(self):
for i in [40,80, 160, 200, 211, 250, 375, 512, 550]:
start = self.gen.randrange(2 ** (i-2))
stop = self.gen.randrange(2 ** i)
if stop <= start:
continue
self.assertTrue(start <= self.gen.randrange(start, stop) < stop)
def test_rangelimits(self):
for start, stop in [(-2,0), (-(2**60)-2,-(2**60)), (2**60,2**60+2)]:
self.assertEqual(set(range(start,stop)),
set([self.gen.randrange(start,stop) for i in range(100)]))
def test_genrandbits(self):
# Verify cross-platform repeatability
self.gen.seed(1234567)
self.assertEqual(self.gen.getrandbits(100),
97904845777343510404718956115)
# Verify ranges
for k in range(1, 1000):
self.assertTrue(0 <= self.gen.getrandbits(k) < 2**k)
# Verify all bits active
getbits = self.gen.getrandbits
for span in [1, 2, 3, 4, 31, 32, 32, 52, 53, 54, 119, 127, 128, 129]:
cum = 0
for i in range(100):
cum |= getbits(span)
self.assertEqual(cum, 2**span-1)
# Verify argument checking
self.assertRaises(TypeError, self.gen.getrandbits)
self.assertRaises(TypeError, self.gen.getrandbits, 'a')
self.assertRaises(TypeError, self.gen.getrandbits, 1, 2)
self.assertRaises(ValueError, self.gen.getrandbits, 0)
self.assertRaises(ValueError, self.gen.getrandbits, -1)
def test_randbelow_logic(self, _log=log, int=int):
# check bitcount transition points: 2**i and 2**(i+1)-1
# show that: k = int(1.001 + _log(n, 2))
# is equal to or one greater than the number of bits in n
for i in range(1, 1000):
n = 1 << i # check an exact power of two
numbits = i+1
k = int(1.00001 + _log(n, 2))
self.assertEqual(k, numbits)
self.assertEqual(n, 2**(k-1))
n += n - 1 # check 1 below the next power of two
k = int(1.00001 + _log(n, 2))
self.assertIn(k, [numbits, numbits+1])
self.assertTrue(2**k > n > 2**(k-2))
n -= n >> 15 # check a little farther below the next power of two
k = int(1.00001 + _log(n, 2))
self.assertEqual(k, numbits) # note the stronger assertion
self.assertTrue(2**k > n > 2**(k-1)) # note the stronger assertion
@unittest.mock.patch('random.Random.random')
def test_randbelow_overriden_random(self, random_mock):
# Random._randbelow() can only use random() when the built-in one
# has been overridden but no new getrandbits() method was supplied.
random_mock.side_effect = random.SystemRandom().random
maxsize = 1<<random.BPF
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
# Population range too large (n >= maxsize)
self.gen._randbelow(maxsize+1, maxsize = maxsize)
self.gen._randbelow(5640, maxsize = maxsize)
# This might be going too far to test a single line, but because of our
# noble aim of achieving 100% test coverage we need to write a case in
# which the following line in Random._randbelow() gets executed:
#
# rem = maxsize % n
# limit = (maxsize - rem) / maxsize
# r = random()
# while r >= limit:
# r = random() # <== *This line* <==<
#
# Therefore, to guarantee that the while loop is executed at least
# once, we need to mock random() so that it returns a number greater
# than 'limit' the first time it gets called.
n = 42
epsilon = 0.01
limit = (maxsize - (maxsize % n)) / maxsize
random_mock.side_effect = [limit + epsilon, limit - epsilon]
self.gen._randbelow(n, maxsize = maxsize)
def test_randrange_bug_1590891(self):
start = 1000000000000
stop = -100000000000000000000
step = -200
x = self.gen.randrange(start, stop, step)
self.assertTrue(stop < x <= start)
self.assertEqual((x+stop)%step, 0)
def gamma(z, sqrt2pi=(2.0*pi)**0.5):
# Reflection to right half of complex plane
if z < 0.5:
return pi / sin(pi*z) / gamma(1.0-z)
# Lanczos approximation with g=7
az = z + (7.0 - 0.5)
return az ** (z-0.5) / exp(az) * sqrt2pi * fsum([
0.9999999999995183,
676.5203681218835 / z,
-1259.139216722289 / (z+1.0),
771.3234287757674 / (z+2.0),
-176.6150291498386 / (z+3.0),
12.50734324009056 / (z+4.0),
-0.1385710331296526 / (z+5.0),
0.9934937113930748e-05 / (z+6.0),
0.1659470187408462e-06 / (z+7.0),
])
class TestDistributions(unittest.TestCase):
def test_zeroinputs(self):
# Verify that distributions can handle a series of zero inputs'
g = random.Random()
x = [g.random() for i in range(50)] + [0.0]*5
g.random = x[:].pop; g.uniform(1,10)
g.random = x[:].pop; g.paretovariate(1.0)
g.random = x[:].pop; g.expovariate(1.0)
g.random = x[:].pop; g.weibullvariate(1.0, 1.0)
g.random = x[:].pop; g.vonmisesvariate(1.0, 1.0)
g.random = x[:].pop; g.normalvariate(0.0, 1.0)
g.random = x[:].pop; g.gauss(0.0, 1.0)
g.random = x[:].pop; g.lognormvariate(0.0, 1.0)
g.random = x[:].pop; g.vonmisesvariate(0.0, 1.0)
g.random = x[:].pop; g.gammavariate(0.01, 1.0)
g.random = x[:].pop; g.gammavariate(1.0, 1.0)
g.random = x[:].pop; g.gammavariate(200.0, 1.0)
g.random = x[:].pop; g.betavariate(3.0, 3.0)
g.random = x[:].pop; g.triangular(0.0, 1.0, 1.0/3.0)
def test_avg_std(self):
# Use integration to test distribution average and standard deviation.
# Only works for distributions which do not consume variates in pairs
g = random.Random()
N = 5000
x = [i/float(N) for i in range(1,N)]
for variate, args, mu, sigmasqrd in [
(g.uniform, (1.0,10.0), (10.0+1.0)/2, (10.0-1.0)**2/12),
(g.triangular, (0.0, 1.0, 1.0/3.0), 4.0/9.0, 7.0/9.0/18.0),
(g.expovariate, (1.5,), 1/1.5, 1/1.5**2),
(g.vonmisesvariate, (1.23, 0), pi, pi**2/3),
(g.paretovariate, (5.0,), 5.0/(5.0-1),
5.0/((5.0-1)**2*(5.0-2))),
(g.weibullvariate, (1.0, 3.0), gamma(1+1/3.0),
gamma(1+2/3.0)-gamma(1+1/3.0)**2) ]:
g.random = x[:].pop
y = []
for i in range(len(x)):
try:
y.append(variate(*args))
except IndexError:
pass
s1 = s2 = 0
for e in y:
s1 += e
s2 += (e - mu) ** 2
N = len(y)
self.assertAlmostEqual(s1/N, mu, places=2,
msg='%s%r' % (variate.__name__, args))
self.assertAlmostEqual(s2/(N-1), sigmasqrd, places=2,
msg='%s%r' % (variate.__name__, args))
def test_constant(self):
g = random.Random()
N = 100
for variate, args, expected in [
(g.uniform, (10.0, 10.0), 10.0),
(g.triangular, (10.0, 10.0), 10.0),
(g.triangular, (10.0, 10.0, 10.0), 10.0),
(g.expovariate, (float('inf'),), 0.0),
(g.vonmisesvariate, (3.0, float('inf')), 3.0),
(g.gauss, (10.0, 0.0), 10.0),
(g.lognormvariate, (0.0, 0.0), 1.0),
(g.lognormvariate, (-float('inf'), 0.0), 0.0),
(g.normalvariate, (10.0, 0.0), 10.0),
(g.paretovariate, (float('inf'),), 1.0),
(g.weibullvariate, (10.0, float('inf')), 10.0),
(g.weibullvariate, (0.0, 10.0), 0.0),
]:
for i in range(N):
self.assertEqual(variate(*args), expected)
def test_von_mises_range(self):
# Issue 17149: von mises variates were not consistently in the
# range [0, 2*PI].
g = random.Random()
N = 100
for mu in 0.0, 0.1, 3.1, 6.2:
for kappa in 0.0, 2.3, 500.0:
for _ in range(N):
sample = g.vonmisesvariate(mu, kappa)
self.assertTrue(
0 <= sample <= random.TWOPI,
msg=("vonmisesvariate({}, {}) produced a result {} out"
" of range [0, 2*pi]").format(mu, kappa, sample))
def test_von_mises_large_kappa(self):
# Issue #17141: vonmisesvariate() was hang for large kappas
random.vonmisesvariate(0, 1e15)
random.vonmisesvariate(0, 1e100)
def test_gammavariate_errors(self):
# Both alpha and beta must be > 0.0
self.assertRaises(ValueError, random.gammavariate, -1, 3)
self.assertRaises(ValueError, random.gammavariate, 0, 2)
self.assertRaises(ValueError, random.gammavariate, 2, 0)
self.assertRaises(ValueError, random.gammavariate, 1, -3)
@unittest.mock.patch('random.Random.random')
def test_gammavariate_full_code_coverage(self, random_mock):
# There are three different possibilities in the current implementation
# of random.gammavariate(), depending on the value of 'alpha'. What we
# are going to do here is to fix the values returned by random() to
# generate test cases that provide 100% line coverage of the method.
# #1: alpha > 1.0: we want the first random number to be outside the
# [1e-7, .9999999] range, so that the continue statement executes
# once. The values of u1 and u2 will be 0.5 and 0.3, respectively.
random_mock.side_effect = [1e-8, 0.5, 0.3]
returned_value = random.gammavariate(1.1, 2.3)
self.assertAlmostEqual(returned_value, 2.53)
# #2: alpha == 1: first random number less than 1e-7 to that the body
# of the while loop executes once. Then random.random() returns 0.45,
# which causes while to stop looping and the algorithm to terminate.
random_mock.side_effect = [1e-8, 0.45]
returned_value = random.gammavariate(1.0, 3.14)
self.assertAlmostEqual(returned_value, 2.507314166123803)
# #3: 0 < alpha < 1. This is the most complex region of code to cover,
# as there are multiple if-else statements. Let's take a look at the
# source code, and determine the values that we need accordingly:
#
# while 1:
# u = random()
# b = (_e + alpha)/_e
# p = b*u
# if p <= 1.0: # <=== (A)
# x = p ** (1.0/alpha)
# else: # <=== (B)
# x = -_log((b-p)/alpha)
# u1 = random()
# if p > 1.0: # <=== (C)
# if u1 <= x ** (alpha - 1.0): # <=== (D)
# break
# elif u1 <= _exp(-x): # <=== (E)
# break
# return x * beta
#
# First, we want (A) to be True. For that we need that:
# b*random() <= 1.0
# r1 = random() <= 1.0 / b
#
# We now get to the second if-else branch, and here, since p <= 1.0,
# (C) is False and we take the elif branch, (E). For it to be True,
# so that the break is executed, we need that:
# r2 = random() <= _exp(-x)
# r2 <= _exp(-(p ** (1.0/alpha)))
# r2 <= _exp(-((b*r1) ** (1.0/alpha)))
_e = random._e
_exp = random._exp
_log = random._log
alpha = 0.35
beta = 1.45
b = (_e + alpha)/_e
epsilon = 0.01
r1 = 0.8859296441566 # 1.0 / b
r2 = 0.3678794411714 # _exp(-((b*r1) ** (1.0/alpha)))
# These four "random" values result in the following trace:
# (A) True, (E) False --> [next iteration of while]
# (A) True, (E) True --> [while loop breaks]
random_mock.side_effect = [r1, r2 + epsilon, r1, r2]
returned_value = random.gammavariate(alpha, beta)
self.assertAlmostEqual(returned_value, 1.4499999999997544)
# Let's now make (A) be False. If this is the case, when we get to the
# second if-else 'p' is greater than 1, so (C) evaluates to True. We
# now encounter a second if statement, (D), which in order to execute
# must satisfy the following condition:
# r2 <= x ** (alpha - 1.0)
# r2 <= (-_log((b-p)/alpha)) ** (alpha - 1.0)
# r2 <= (-_log((b-(b*r1))/alpha)) ** (alpha - 1.0)
r1 = 0.8959296441566 # (1.0 / b) + epsilon -- so that (A) is False
r2 = 0.9445400408898141
# And these four values result in the following trace:
# (B) and (C) True, (D) False --> [next iteration of while]
# (B) and (C) True, (D) True [while loop breaks]
random_mock.side_effect = [r1, r2 + epsilon, r1, r2]
returned_value = random.gammavariate(alpha, beta)
self.assertAlmostEqual(returned_value, 1.5830349561760781)
@unittest.mock.patch('random.Random.gammavariate')
def test_betavariate_return_zero(self, gammavariate_mock):
# betavariate() returns zero when the Gamma distribution
# that it uses internally returns this same value.
gammavariate_mock.return_value = 0.0
self.assertEqual(0.0, random.betavariate(2.71828, 3.14159))
class TestModule(unittest.TestCase):
def testMagicConstants(self):
self.assertAlmostEqual(random.NV_MAGICCONST, 1.71552776992141)
self.assertAlmostEqual(random.TWOPI, 6.28318530718)
self.assertAlmostEqual(random.LOG4, 1.38629436111989)
self.assertAlmostEqual(random.SG_MAGICCONST, 2.50407739677627)
def test__all__(self):
# tests validity but not completeness of the __all__ list
self.assertTrue(set(random.__all__) <= set(dir(random)))
def test_random_subclass_with_kwargs(self):
# SF bug #1486663 -- this used to erroneously raise a TypeError
class Subclass(random.Random):
def __init__(self, newarg=None):
random.Random.__init__(self)
Subclass(newarg=1)
if __name__ == "__main__":
unittest.main()
|
jiangzhuo/kbengine
|
kbe/src/lib/python/Lib/test/test_random.py
|
Python
|
lgpl-3.0
| 31,638
|
"""Defines serializers used by the Team API."""
from copy import deepcopy
from django.contrib.auth.models import User
from django.db.models import Count
from django.conf import settings
from django_countries import countries
from rest_framework import serializers
from openedx.core.lib.api.serializers import CollapsedReferenceSerializer
from openedx.core.lib.api.fields import ExpandableField
from openedx.core.djangoapps.user_api.accounts.serializers import UserReadOnlySerializer
from lms.djangoapps.teams.models import CourseTeam, CourseTeamMembership
class CountryField(serializers.Field):
"""
Field to serialize a country code.
"""
COUNTRY_CODES = dict(countries).keys()
def to_representation(self, obj):
"""
Represent the country as a 2-character unicode identifier.
"""
return unicode(obj)
def to_internal_value(self, data):
"""
Check that the code is a valid country code.
We leave the data in its original format so that the Django model's
CountryField can convert it to the internal representation used
by the django-countries library.
"""
if data and data not in self.COUNTRY_CODES:
raise serializers.ValidationError(
u"{code} is not a valid country code".format(code=data)
)
return data
class UserMembershipSerializer(serializers.ModelSerializer):
"""Serializes CourseTeamMemberships with only user and date_joined
Used for listing team members.
"""
profile_configuration = deepcopy(settings.ACCOUNT_VISIBILITY_CONFIGURATION)
profile_configuration['shareable_fields'].append('url')
profile_configuration['public_fields'].append('url')
user = ExpandableField(
collapsed_serializer=CollapsedReferenceSerializer(
model_class=User,
id_source='username',
view_name='accounts_api',
read_only=True,
),
expanded_serializer=UserReadOnlySerializer(configuration=profile_configuration),
)
class Meta(object):
model = CourseTeamMembership
fields = ("user", "date_joined", "last_activity_at")
read_only_fields = ("date_joined", "last_activity_at")
class CourseTeamSerializer(serializers.ModelSerializer):
"""Serializes a CourseTeam with membership information."""
id = serializers.CharField(source='team_id', read_only=True) # pylint: disable=invalid-name
membership = UserMembershipSerializer(many=True, read_only=True)
country = CountryField()
class Meta(object):
model = CourseTeam
fields = (
"id",
"discussion_topic_id",
"name",
"course_id",
"topic_id",
"date_created",
"description",
"country",
"language",
"last_activity_at",
"membership",
)
read_only_fields = ("course_id", "date_created", "discussion_topic_id", "last_activity_at")
class CourseTeamCreationSerializer(serializers.ModelSerializer):
"""Deserializes a CourseTeam for creation."""
country = CountryField(required=False)
class Meta(object):
model = CourseTeam
fields = (
"name",
"course_id",
"description",
"topic_id",
"country",
"language",
)
def create(self, validated_data):
team = CourseTeam.create(
name=validated_data.get("name", ''),
course_id=validated_data.get("course_id"),
description=validated_data.get("description", ''),
topic_id=validated_data.get("topic_id", ''),
country=validated_data.get("country", ''),
language=validated_data.get("language", ''),
)
team.save()
return team
class CourseTeamSerializerWithoutMembership(CourseTeamSerializer):
"""The same as the `CourseTeamSerializer`, but elides the membership field.
Intended to be used as a sub-serializer for serializing team
memberships, since the membership field is redundant in that case.
"""
def __init__(self, *args, **kwargs):
super(CourseTeamSerializerWithoutMembership, self).__init__(*args, **kwargs)
del self.fields['membership']
class MembershipSerializer(serializers.ModelSerializer):
"""Serializes CourseTeamMemberships with information about both teams and users."""
profile_configuration = deepcopy(settings.ACCOUNT_VISIBILITY_CONFIGURATION)
profile_configuration['shareable_fields'].append('url')
profile_configuration['public_fields'].append('url')
user = ExpandableField(
collapsed_serializer=CollapsedReferenceSerializer(
model_class=User,
id_source='username',
view_name='accounts_api',
read_only=True,
),
expanded_serializer=UserReadOnlySerializer(configuration=profile_configuration)
)
team = ExpandableField(
collapsed_serializer=CollapsedReferenceSerializer(
model_class=CourseTeam,
id_source='team_id',
view_name='teams_detail',
read_only=True,
),
expanded_serializer=CourseTeamSerializerWithoutMembership(read_only=True),
)
class Meta(object):
model = CourseTeamMembership
fields = ("user", "team", "date_joined", "last_activity_at")
read_only_fields = ("date_joined", "last_activity_at")
class BaseTopicSerializer(serializers.Serializer):
"""Serializes a topic without team_count."""
description = serializers.CharField()
name = serializers.CharField()
id = serializers.CharField() # pylint: disable=invalid-name
class TopicSerializer(BaseTopicSerializer):
"""
Adds team_count to the basic topic serializer, checking if team_count
is already present in the topic data, and if not, querying the CourseTeam
model to get the count. Requires that `context` is provided with a valid course_id
in order to filter teams within the course.
"""
team_count = serializers.SerializerMethodField()
def get_team_count(self, topic):
"""Get the number of teams associated with this topic"""
# If team_count is already present (possible if topic data was pre-processed for sorting), return it.
if 'team_count' in topic:
return topic['team_count']
else:
return CourseTeam.objects.filter(course_id=self.context['course_id'], topic_id=topic['id']).count()
class BulkTeamCountTopicListSerializer(serializers.ListSerializer): # pylint: disable=abstract-method
"""
List serializer for efficiently serializing a set of topics.
"""
def to_representation(self, obj):
"""Adds team_count to each topic. """
data = super(BulkTeamCountTopicListSerializer, self).to_representation(obj)
add_team_count(data, self.context["course_id"])
return data
class BulkTeamCountTopicSerializer(BaseTopicSerializer): # pylint: disable=abstract-method
"""
Serializes a set of topics, adding the team_count field to each topic as a bulk operation.
Requires that `context` is provided with a valid course_id in order to filter teams within the course.
"""
class Meta(object):
list_serializer_class = BulkTeamCountTopicListSerializer
def add_team_count(topics, course_id):
"""
Helper method to add team_count for a list of topics.
This allows for a more efficient single query.
"""
topic_ids = [topic['id'] for topic in topics]
teams_per_topic = CourseTeam.objects.filter(
course_id=course_id,
topic_id__in=topic_ids
).values('topic_id').annotate(team_count=Count('topic_id'))
topics_to_team_count = {d['topic_id']: d['team_count'] for d in teams_per_topic}
for topic in topics:
topic['team_count'] = topics_to_team_count.get(topic['id'], 0)
|
solashirai/edx-platform
|
lms/djangoapps/teams/serializers.py
|
Python
|
agpl-3.0
| 7,981
|
from dogpile.cache.region import register_backend
register_backend(
"dogpile.cache.null", "dogpile.cache.backends.null", "NullBackend")
register_backend(
"dogpile.cache.dbm", "dogpile.cache.backends.file", "DBMBackend")
register_backend(
"dogpile.cache.pylibmc", "dogpile.cache.backends.memcached",
"PylibmcBackend")
register_backend(
"dogpile.cache.bmemcached", "dogpile.cache.backends.memcached",
"BMemcachedBackend")
register_backend(
"dogpile.cache.memcached", "dogpile.cache.backends.memcached",
"MemcachedBackend")
register_backend(
"dogpile.cache.memory", "dogpile.cache.backends.memory", "MemoryBackend")
register_backend(
"dogpile.cache.memory_pickle", "dogpile.cache.backends.memory",
"MemoryPickleBackend")
register_backend(
"dogpile.cache.redis", "dogpile.cache.backends.redis", "RedisBackend")
|
ctrlaltdel/neutrinator
|
vendor/dogpile/cache/backends/__init__.py
|
Python
|
gpl-3.0
| 856
|
"""
Utilities for dealing with JSON.
"""
import simplejson
from xmodule.modulestore import EdxJSONEncoder
class EscapedEdxJSONEncoder(EdxJSONEncoder):
"""
Class for encoding edx JSON which will be printed inline into HTML
templates.
"""
def encode(self, obj):
"""
Encodes JSON that is safe to be embedded in HTML.
"""
return simplejson.dumps(
simplejson.loads(super(EscapedEdxJSONEncoder, self).encode(obj)),
cls=simplejson.JSONEncoderForHTML
)
|
Semi-global/edx-platform
|
openedx/core/lib/json_utils.py
|
Python
|
agpl-3.0
| 533
|
def f():
a = 1
b = 1
foo(a, b)
def foo(a_new, b_new):
print(a_new + b_new * 123)
|
caot/intellij-community
|
python/testData/refactoring/extractmethod/Statement.after.py
|
Python
|
apache-2.0
| 98
|
"""
Amazon auth backend, docs at:
http://psa.matiasaguirre.net/docs/backends/twilio.html
"""
from re import sub
from social.p3 import urlencode
from social.backends.base import BaseAuth
class TwilioAuth(BaseAuth):
name = 'twilio'
ID_KEY = 'AccountSid'
def get_user_details(self, response):
"""Return twilio details, Twilio only provides AccountSID as
parameters."""
# /complete/twilio/?AccountSid=ACc65ea16c9ebd4d4684edf814995b27e
return {'username': response['AccountSid'],
'email': '',
'fullname': '',
'first_name': '',
'last_name': ''}
def auth_url(self):
"""Return authorization redirect url."""
key, secret = self.get_key_and_secret()
callback = self.strategy.absolute_uri(self.redirect_uri)
callback = sub(r'^https', 'http', callback)
query = urlencode({'cb': callback})
return 'https://www.twilio.com/authorize/{0}?{1}'.format(key, query)
def auth_complete(self, *args, **kwargs):
"""Completes loging process, must return user instance"""
account_sid = self.data.get('AccountSid')
if not account_sid:
raise ValueError('No AccountSid returned')
kwargs.update({'response': self.data, 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/social/backends/twilio.py
|
Python
|
agpl-3.0
| 1,384
|
"""
Tests for open ended grading interfaces
./manage.py lms --settings test test lms/djangoapps/open_ended_grading
"""
import ddt
import json
import logging
from django.conf import settings
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test import RequestFactory
from edxmako.shortcuts import render_to_string
from edxmako.tests import mako_middleware_process_request
from mock import MagicMock, patch, Mock
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from xblock.field_data import DictFieldData
from xblock.fields import ScopeIds
from config_models.models import cache
from courseware.tests import factories
from courseware.tests.helpers import LoginEnrollmentTestCase
from lms.djangoapps.lms_xblock.runtime import LmsModuleSystem
from student.roles import CourseStaffRole
from student.models import unique_id_for_user
from xblock_django.models import XBlockDisableConfig
from xmodule import peer_grading_module
from xmodule.error_module import ErrorDescriptor
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import TEST_DATA_MIXED_TOY_MODULESTORE, ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.xml_importer import import_course_from_xml
from xmodule.open_ended_grading_classes import peer_grading_service, controller_query_service
from xmodule.tests import test_util_open_ended
from open_ended_grading import staff_grading_service, views, utils
TEST_DATA_DIR = settings.COMMON_TEST_DATA_ROOT
log = logging.getLogger(__name__)
class EmptyStaffGradingService(object):
"""
A staff grading service that does not return a problem list from get_problem_list.
Used for testing to see if error message for empty problem list is correctly displayed.
"""
def get_problem_list(self, course_id, user_id):
"""
Return a staff grading response that is missing a problem list key.
"""
return {'success': True, 'error': 'No problems found.'}
def make_instructor(course, user_email):
"""
Makes a given user an instructor in a course.
"""
CourseStaffRole(course.id).add_users(User.objects.get(email=user_email))
class StudentProblemListMockQuery(object):
"""
Mock controller query service for testing student problem list functionality.
"""
def get_grading_status_list(self, *args, **kwargs):
"""
Get a mock grading status list with locations from the open_ended test course.
@returns: grading status message dictionary.
"""
return {
"version": 1,
"problem_list": [
{
"problem_name": "Test1",
"grader_type": "IN",
"eta_available": True,
"state": "Finished",
"eta": 259200,
"location": "i4x://edX/open_ended/combinedopenended/SampleQuestion1Attempt"
},
{
"problem_name": "Test2",
"grader_type": "NA",
"eta_available": True,
"state": "Waiting to be Graded",
"eta": 259200,
"location": "i4x://edX/open_ended/combinedopenended/SampleQuestion"
},
{
"problem_name": "Test3",
"grader_type": "PE",
"eta_available": True,
"state": "Waiting to be Graded",
"eta": 259200,
"location": "i4x://edX/open_ended/combinedopenended/SampleQuestion454"
},
],
"success": True
}
class TestStaffGradingService(ModuleStoreTestCase, LoginEnrollmentTestCase):
'''
Check that staff grading service proxy works. Basically just checking the
access control and error handling logic -- all the actual work is on the
backend.
'''
MODULESTORE = TEST_DATA_MIXED_TOY_MODULESTORE
def setUp(self):
super(TestStaffGradingService, self).setUp()
self.student = 'view@test.com'
self.instructor = 'view2@test.com'
self.password = 'foo'
self.create_account('u1', self.student, self.password)
self.create_account('u2', self.instructor, self.password)
self.activate_user(self.student)
self.activate_user(self.instructor)
self.course_id = SlashSeparatedCourseKey("edX", "toy", "2012_Fall")
self.location_string = self.course_id.make_usage_key('html', 'TestLocation').to_deprecated_string()
self.toy = modulestore().get_course(self.course_id)
make_instructor(self.toy, self.instructor)
self.mock_service = staff_grading_service.staff_grading_service()
self.logout()
def test_access(self):
"""
Make sure only staff have access.
"""
self.login(self.student, self.password)
# both get and post should return 404
for view_name in ('staff_grading_get_next', 'staff_grading_save_grade'):
url = reverse(view_name, kwargs={'course_id': self.course_id.to_deprecated_string()})
self.assert_request_status_code(404, url, method="GET")
self.assert_request_status_code(404, url, method="POST")
def test_get_next(self):
self.login(self.instructor, self.password)
url = reverse('staff_grading_get_next', kwargs={'course_id': self.course_id.to_deprecated_string()})
data = {'location': self.location_string}
response = self.assert_request_status_code(200, url, method="POST", data=data)
content = json.loads(response.content)
self.assertTrue(content['success'])
self.assertEquals(content['submission_id'], self.mock_service.cnt)
self.assertIsNotNone(content['submission'])
self.assertIsNotNone(content['num_graded'])
self.assertIsNotNone(content['min_for_ml'])
self.assertIsNotNone(content['num_pending'])
self.assertIsNotNone(content['prompt'])
self.assertIsNotNone(content['ml_error_info'])
self.assertIsNotNone(content['max_score'])
self.assertIsNotNone(content['rubric'])
def save_grade_base(self, skip=False):
self.login(self.instructor, self.password)
url = reverse('staff_grading_save_grade', kwargs={'course_id': self.course_id.to_deprecated_string()})
data = {'score': '12',
'feedback': 'great!',
'submission_id': '123',
'location': self.location_string,
'submission_flagged': "true",
'rubric_scores[]': ['1', '2']}
if skip:
data.update({'skipped': True})
response = self.assert_request_status_code(200, url, method="POST", data=data)
content = json.loads(response.content)
self.assertTrue(content['success'], str(content))
self.assertEquals(content['submission_id'], self.mock_service.cnt)
def test_save_grade(self):
self.save_grade_base(skip=False)
def test_save_grade_skip(self):
self.save_grade_base(skip=True)
def test_get_problem_list(self):
self.login(self.instructor, self.password)
url = reverse('staff_grading_get_problem_list', kwargs={'course_id': self.course_id.to_deprecated_string()})
data = {}
response = self.assert_request_status_code(200, url, method="POST", data=data)
content = json.loads(response.content)
self.assertTrue(content['success'])
self.assertEqual(content['problem_list'], [])
@patch('open_ended_grading.staff_grading_service._service', EmptyStaffGradingService())
def test_get_problem_list_missing(self):
"""
Test to see if a staff grading response missing a problem list is given the appropriate error.
Mock the staff grading service to enable the key to be missing.
"""
# Get a valid user object.
instructor = User.objects.get(email=self.instructor)
# Mock a request object.
request = Mock(
user=instructor,
)
# Get the response and load its content.
response = json.loads(staff_grading_service.get_problem_list(request, self.course_id.to_deprecated_string()).content)
# A valid response will have an "error" key.
self.assertTrue('error' in response)
# Check that the error text is correct.
self.assertIn("Cannot find", response['error'])
def test_save_grade_with_long_feedback(self):
"""
Test if feedback is too long save_grade() should return error message.
"""
self.login(self.instructor, self.password)
url = reverse('staff_grading_save_grade', kwargs={'course_id': self.course_id.to_deprecated_string()})
data = {
'score': '12',
'feedback': '',
'submission_id': '123',
'location': self.location_string,
'submission_flagged': "false",
'rubric_scores[]': ['1', '2']
}
feedback_fragment = "This is very long feedback."
data["feedback"] = feedback_fragment * (
(staff_grading_service.MAX_ALLOWED_FEEDBACK_LENGTH / len(feedback_fragment) + 1)
)
response = self.assert_request_status_code(200, url, method="POST", data=data)
content = json.loads(response.content)
# Should not succeed.
self.assertEquals(content['success'], False)
self.assertEquals(
content['error'],
"Feedback is too long, Max length is {0} characters.".format(
staff_grading_service.MAX_ALLOWED_FEEDBACK_LENGTH
)
)
class TestPeerGradingService(ModuleStoreTestCase, LoginEnrollmentTestCase):
'''
Check that staff grading service proxy works. Basically just checking the
access control and error handling logic -- all the actual work is on the
backend.
'''
def setUp(self):
super(TestPeerGradingService, self).setUp()
self.student = 'view@test.com'
self.instructor = 'view2@test.com'
self.password = 'foo'
self.create_account('u1', self.student, self.password)
self.create_account('u2', self.instructor, self.password)
self.activate_user(self.student)
self.activate_user(self.instructor)
self.course_id = SlashSeparatedCourseKey("edX", "toy", "2012_Fall")
self.location_string = self.course_id.make_usage_key('html', 'TestLocation').to_deprecated_string()
self.toy = modulestore().get_course(self.course_id)
location = "i4x://edX/toy/peergrading/init"
field_data = DictFieldData({'data': "<peergrading/>", 'location': location, 'category': 'peergrading'})
self.mock_service = peer_grading_service.MockPeerGradingService()
self.system = LmsModuleSystem(
static_url=settings.STATIC_URL,
track_function=None,
get_module=None,
render_template=render_to_string,
replace_urls=None,
s3_interface=test_util_open_ended.S3_INTERFACE,
open_ended_grading_interface=test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE,
mixins=settings.XBLOCK_MIXINS,
error_descriptor_class=ErrorDescriptor,
descriptor_runtime=None,
)
self.descriptor = peer_grading_module.PeerGradingDescriptor(self.system, field_data, ScopeIds(None, None, None, None))
self.descriptor.xmodule_runtime = self.system
self.peer_module = self.descriptor
self.peer_module.peer_gs = self.mock_service
self.logout()
def test_get_next_submission_success(self):
data = {'location': self.location_string}
response = self.peer_module.get_next_submission(data)
content = response
self.assertTrue(content['success'])
self.assertIsNotNone(content['submission_id'])
self.assertIsNotNone(content['prompt'])
self.assertIsNotNone(content['submission_key'])
self.assertIsNotNone(content['max_score'])
def test_get_next_submission_missing_location(self):
data = {}
d = self.peer_module.get_next_submission(data)
self.assertFalse(d['success'])
self.assertEqual(d['error'], "Missing required keys: location")
def test_save_grade_success(self):
data = {
'rubric_scores[]': [0, 0],
'location': self.location_string,
'submission_id': 1,
'submission_key': 'fake key',
'score': 2,
'feedback': 'feedback',
'submission_flagged': 'false',
'answer_unknown': 'false',
'rubric_scores_complete': 'true'
}
qdict = MagicMock()
def fake_get_item(key):
return data[key]
qdict.__getitem__.side_effect = fake_get_item
qdict.getlist = fake_get_item
qdict.keys = data.keys
response = self.peer_module.save_grade(qdict)
self.assertTrue(response['success'])
def test_save_grade_missing_keys(self):
data = {}
d = self.peer_module.save_grade(data)
self.assertFalse(d['success'])
self.assertTrue(d['error'].find('Missing required keys:') > -1)
def test_is_calibrated_success(self):
data = {'location': self.location_string}
response = self.peer_module.is_student_calibrated(data)
self.assertTrue(response['success'])
self.assertTrue('calibrated' in response)
def test_is_calibrated_failure(self):
data = {}
response = self.peer_module.is_student_calibrated(data)
self.assertFalse(response['success'])
self.assertFalse('calibrated' in response)
def test_show_calibration_essay_success(self):
data = {'location': self.location_string}
response = self.peer_module.show_calibration_essay(data)
self.assertTrue(response['success'])
self.assertIsNotNone(response['submission_id'])
self.assertIsNotNone(response['prompt'])
self.assertIsNotNone(response['submission_key'])
self.assertIsNotNone(response['max_score'])
def test_show_calibration_essay_missing_key(self):
data = {}
response = self.peer_module.show_calibration_essay(data)
self.assertFalse(response['success'])
self.assertEqual(response['error'], "Missing required keys: location")
def test_save_calibration_essay_success(self):
data = {
'rubric_scores[]': [0, 0],
'location': self.location_string,
'submission_id': 1,
'submission_key': 'fake key',
'score': 2,
'feedback': 'feedback',
'submission_flagged': 'false'
}
qdict = MagicMock()
def fake_get_item(key):
return data[key]
qdict.__getitem__.side_effect = fake_get_item
qdict.getlist = fake_get_item
qdict.keys = data.keys
response = self.peer_module.save_calibration_essay(qdict)
self.assertTrue(response['success'])
self.assertTrue('actual_score' in response)
def test_save_calibration_essay_missing_keys(self):
data = {}
response = self.peer_module.save_calibration_essay(data)
self.assertFalse(response['success'])
self.assertTrue(response['error'].find('Missing required keys:') > -1)
self.assertFalse('actual_score' in response)
def test_save_grade_with_long_feedback(self):
"""
Test if feedback is too long save_grade() should return error message.
"""
data = {
'rubric_scores[]': [0, 0],
'location': self.location_string,
'submission_id': 1,
'submission_key': 'fake key',
'score': 2,
'feedback': '',
'submission_flagged': 'false',
'answer_unknown': 'false',
'rubric_scores_complete': 'true'
}
feedback_fragment = "This is very long feedback."
data["feedback"] = feedback_fragment * (
(staff_grading_service.MAX_ALLOWED_FEEDBACK_LENGTH / len(feedback_fragment) + 1)
)
response_dict = self.peer_module.save_grade(data)
# Should not succeed.
self.assertEquals(response_dict['success'], False)
self.assertEquals(
response_dict['error'],
"Feedback is too long, Max length is {0} characters.".format(
staff_grading_service.MAX_ALLOWED_FEEDBACK_LENGTH
)
)
class TestPanel(ModuleStoreTestCase):
"""
Run tests on the open ended panel
"""
def setUp(self):
super(TestPanel, self).setUp()
self.user = factories.UserFactory()
store = modulestore()
course_items = import_course_from_xml(store, self.user.id, TEST_DATA_DIR, ['open_ended']) # pylint: disable=maybe-no-member
self.course = course_items[0]
self.course_key = self.course.id
def test_open_ended_panel(self):
"""
Test to see if the peer grading module in the demo course is found
@return:
"""
found_module, peer_grading_module = views.find_peer_grading_module(self.course)
self.assertTrue(found_module)
@patch(
'open_ended_grading.utils.create_controller_query_service',
Mock(
return_value=controller_query_service.MockControllerQueryService(
settings.OPEN_ENDED_GRADING_INTERFACE,
utils.render_to_string
)
)
)
def test_problem_list(self):
"""
Ensure that the problem list from the grading controller server can be rendered properly locally
@return:
"""
request = RequestFactory().get(
reverse("open_ended_problems", kwargs={'course_id': self.course_key})
)
request.user = self.user
mako_middleware_process_request(request)
response = views.student_problem_list(request, self.course.id.to_deprecated_string())
self.assertRegexpMatches(response.content, "Here is a list of open ended problems for this course.")
class TestPeerGradingFound(ModuleStoreTestCase):
"""
Test to see if peer grading modules can be found properly.
"""
def setUp(self):
super(TestPeerGradingFound, self).setUp()
self.user = factories.UserFactory()
store = modulestore()
course_items = import_course_from_xml(store, self.user.id, TEST_DATA_DIR, ['open_ended_nopath']) # pylint: disable=maybe-no-member
self.course = course_items[0]
self.course_key = self.course.id
def test_peer_grading_nopath(self):
"""
The open_ended_nopath course contains a peer grading module with no path to it.
Ensure that the exception is caught.
"""
found, url = views.find_peer_grading_module(self.course)
self.assertEqual(found, False)
class TestStudentProblemList(ModuleStoreTestCase):
"""
Test if the student problem list correctly fetches and parses problems.
"""
def setUp(self):
super(TestStudentProblemList, self).setUp()
# Load an open ended course with several problems.
self.user = factories.UserFactory()
store = modulestore()
course_items = import_course_from_xml(store, self.user.id, TEST_DATA_DIR, ['open_ended']) # pylint: disable=maybe-no-member
self.course = course_items[0]
self.course_key = self.course.id
# Enroll our user in our course and make them an instructor.
make_instructor(self.course, self.user.email)
@patch(
'open_ended_grading.utils.create_controller_query_service',
Mock(return_value=StudentProblemListMockQuery())
)
def test_get_problem_list(self):
"""
Test to see if the StudentProblemList class can get and parse a problem list from ORA.
Mock the get_grading_status_list function using StudentProblemListMockQuery.
"""
# Initialize a StudentProblemList object.
student_problem_list = utils.StudentProblemList(self.course.id, unique_id_for_user(self.user))
# Get the initial problem list from ORA.
success = student_problem_list.fetch_from_grading_service()
# Should be successful, and we should have three problems. See mock class for details.
self.assertTrue(success)
self.assertEqual(len(student_problem_list.problem_list), 3)
# See if the problem locations are valid.
valid_problems = student_problem_list.add_problem_data(reverse('courses'))
# One location is invalid, so we should now have two.
self.assertEqual(len(valid_problems), 2)
# Ensure that human names are being set properly.
self.assertEqual(valid_problems[0]['grader_type_display_name'], "Instructor Assessment")
@ddt.ddt
class TestTabs(ModuleStoreTestCase):
"""
Test tabs.
"""
def setUp(self):
super(TestTabs, self).setUp()
self.course = CourseFactory(advanced_modules=('combinedopenended'))
self.addCleanup(lambda: self._enable_xblock_disable_config(False))
def _enable_xblock_disable_config(self, enabled):
""" Enable or disable xblocks disable. """
config = XBlockDisableConfig.current()
config.enabled = enabled
config.disabled_blocks = "\n".join(('combinedopenended', 'peergrading'))
config.save()
cache.clear()
@ddt.data(
views.StaffGradingTab,
views.PeerGradingTab,
views.OpenEndedGradingTab,
)
def test_tabs_enabled(self, tab):
self.assertTrue(tab.is_enabled(self.course))
@ddt.data(
views.StaffGradingTab,
views.PeerGradingTab,
views.OpenEndedGradingTab,
)
def test_tabs_disabled(self, tab):
self._enable_xblock_disable_config(True)
self.assertFalse(tab.is_enabled(self.course))
|
ahmadiga/min_edx
|
lms/djangoapps/open_ended_grading/tests.py
|
Python
|
agpl-3.0
| 22,144
|
mapsize = 35
def test(x, y):
if <weak_warning descr="Simplify chained comparison">0 <= x < <caret>mapsize and y >= 0 and y < mapsize</weak_warning>:
return 1
|
asedunov/intellij-community
|
python/testData/inspections/ChainedComparison5.py
|
Python
|
apache-2.0
| 164
|
from networkx.algorithms.chordal.chordal_alg import *
|
LumPenPacK/NetworkExtractionFromImages
|
win_build/nefi2_win_amd64_msvc_2015/site-packages/networkx/algorithms/chordal/__init__.py
|
Python
|
bsd-2-clause
| 56
|
from django.conf import settings
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation
from django.utils.functional import cached_property
class PostGISCreation(DatabaseCreation):
geom_index_type = 'GIST'
geom_index_ops = 'GIST_GEOMETRY_OPS'
geom_index_ops_nd = 'GIST_GEOMETRY_OPS_ND'
@cached_property
def template_postgis(self):
template_postgis = getattr(settings, 'POSTGIS_TEMPLATE', 'template_postgis')
cursor = self.connection.cursor()
cursor.execute('SELECT 1 FROM pg_database WHERE datname = %s LIMIT 1;', (template_postgis,))
if cursor.fetchone():
return template_postgis
return None
def sql_indexes_for_field(self, model, f, style):
"Return any spatial index creation SQL for the field."
from django.contrib.gis.db.models.fields import GeometryField
output = super(PostGISCreation, self).sql_indexes_for_field(model, f, style)
if isinstance(f, GeometryField):
gqn = self.connection.ops.geo_quote_name
qn = self.connection.ops.quote_name
db_table = model._meta.db_table
if f.geography or self.connection.ops.geometry:
# Geography and Geometry (PostGIS 2.0+) columns are
# created normally.
pass
else:
# Geometry columns are created by `AddGeometryColumn`
# stored procedure.
output.append(style.SQL_KEYWORD('SELECT ') +
style.SQL_TABLE('AddGeometryColumn') + '(' +
style.SQL_TABLE(gqn(db_table)) + ', ' +
style.SQL_FIELD(gqn(f.column)) + ', ' +
style.SQL_FIELD(str(f.srid)) + ', ' +
style.SQL_COLTYPE(gqn(f.geom_type)) + ', ' +
style.SQL_KEYWORD(str(f.dim)) + ');')
if not f.null:
# Add a NOT NULL constraint to the field
output.append(style.SQL_KEYWORD('ALTER TABLE ') +
style.SQL_TABLE(qn(db_table)) +
style.SQL_KEYWORD(' ALTER ') +
style.SQL_FIELD(qn(f.column)) +
style.SQL_KEYWORD(' SET NOT NULL') + ';')
if f.spatial_index:
# Spatial indexes created the same way for both Geometry and
# Geography columns.
# PostGIS 2.0 does not support GIST_GEOMETRY_OPS. So, on 1.5
# we use GIST_GEOMETRY_OPS, on 2.0 we use either "nd" ops
# which are fast on multidimensional cases, or just plain
# gist index for the 2d case.
if f.geography:
index_ops = ''
elif self.connection.ops.geometry:
if f.dim > 2:
index_ops = ' ' + style.SQL_KEYWORD(self.geom_index_ops_nd)
else:
index_ops = ''
else:
index_ops = ' ' + style.SQL_KEYWORD(self.geom_index_ops)
output.append(style.SQL_KEYWORD('CREATE INDEX ') +
style.SQL_TABLE(qn('%s_%s_id' % (db_table, f.column))) +
style.SQL_KEYWORD(' ON ') +
style.SQL_TABLE(qn(db_table)) +
style.SQL_KEYWORD(' USING ') +
style.SQL_COLTYPE(self.geom_index_type) + ' ( ' +
style.SQL_FIELD(qn(f.column)) + index_ops + ' );')
return output
def sql_table_creation_suffix(self):
if self.template_postgis is not None:
return ' TEMPLATE %s' % (
self.connection.ops.quote_name(self.template_postgis),)
return ''
def _create_test_db(self, verbosity, autoclobber):
test_database_name = super(PostGISCreation, self)._create_test_db(verbosity, autoclobber)
if self.template_postgis is None:
# Connect to the test database in order to create the postgis extension
self.connection.close()
self.connection.settings_dict["NAME"] = test_database_name
cursor = self.connection.cursor()
cursor.execute("CREATE EXTENSION postgis")
cursor.connection.commit()
return test_database_name
|
edisonlz/fruit
|
web_project/base/site-packages/django/contrib/gis/db/backends/postgis/creation.py
|
Python
|
apache-2.0
| 4,498
|
from __future__ import division
# When true division is the default, get rid of this and add it to
# test_long.py instead. In the meantime, it's too obscure to try to
# trick just part of test_long into using future division.
import sys
import random
import math
import unittest
from test.test_support import run_unittest
# decorator for skipping tests on non-IEEE 754 platforms
requires_IEEE_754 = unittest.skipUnless(
float.__getformat__("double").startswith("IEEE"),
"test requires IEEE 754 doubles")
DBL_MAX = sys.float_info.max
DBL_MAX_EXP = sys.float_info.max_exp
DBL_MIN_EXP = sys.float_info.min_exp
DBL_MANT_DIG = sys.float_info.mant_dig
DBL_MIN_OVERFLOW = 2**DBL_MAX_EXP - 2**(DBL_MAX_EXP - DBL_MANT_DIG - 1)
# pure Python version of correctly-rounded true division
def truediv(a, b):
"""Correctly-rounded true division for integers."""
negative = a^b < 0
a, b = abs(a), abs(b)
# exceptions: division by zero, overflow
if not b:
raise ZeroDivisionError("division by zero")
if a >= DBL_MIN_OVERFLOW * b:
raise OverflowError("int/int too large to represent as a float")
# find integer d satisfying 2**(d - 1) <= a/b < 2**d
d = a.bit_length() - b.bit_length()
if d >= 0 and a >= 2**d * b or d < 0 and a * 2**-d >= b:
d += 1
# compute 2**-exp * a / b for suitable exp
exp = max(d, DBL_MIN_EXP) - DBL_MANT_DIG
a, b = a << max(-exp, 0), b << max(exp, 0)
q, r = divmod(a, b)
# round-half-to-even: fractional part is r/b, which is > 0.5 iff
# 2*r > b, and == 0.5 iff 2*r == b.
if 2*r > b or 2*r == b and q % 2 == 1:
q += 1
result = math.ldexp(float(q), exp)
return -result if negative else result
class TrueDivisionTests(unittest.TestCase):
def test(self):
huge = 1L << 40000
mhuge = -huge
self.assertEqual(huge / huge, 1.0)
self.assertEqual(mhuge / mhuge, 1.0)
self.assertEqual(huge / mhuge, -1.0)
self.assertEqual(mhuge / huge, -1.0)
self.assertEqual(1 / huge, 0.0)
self.assertEqual(1L / huge, 0.0)
self.assertEqual(1 / mhuge, 0.0)
self.assertEqual(1L / mhuge, 0.0)
self.assertEqual((666 * huge + (huge >> 1)) / huge, 666.5)
self.assertEqual((666 * mhuge + (mhuge >> 1)) / mhuge, 666.5)
self.assertEqual((666 * huge + (huge >> 1)) / mhuge, -666.5)
self.assertEqual((666 * mhuge + (mhuge >> 1)) / huge, -666.5)
self.assertEqual(huge / (huge << 1), 0.5)
self.assertEqual((1000000 * huge) / huge, 1000000)
namespace = {'huge': huge, 'mhuge': mhuge}
for overflow in ["float(huge)", "float(mhuge)",
"huge / 1", "huge / 2L", "huge / -1", "huge / -2L",
"mhuge / 100", "mhuge / 100L"]:
# If the "eval" does not happen in this module,
# true division is not enabled
with self.assertRaises(OverflowError):
eval(overflow, namespace)
for underflow in ["1 / huge", "2L / huge", "-1 / huge", "-2L / huge",
"100 / mhuge", "100L / mhuge"]:
result = eval(underflow, namespace)
self.assertEqual(result, 0.0, 'expected underflow to 0 '
'from {!r}'.format(underflow))
for zero in ["huge / 0", "huge / 0L", "mhuge / 0", "mhuge / 0L"]:
with self.assertRaises(ZeroDivisionError):
eval(zero, namespace)
def check_truediv(self, a, b, skip_small=True):
"""Verify that the result of a/b is correctly rounded, by
comparing it with a pure Python implementation of correctly
rounded division. b should be nonzero."""
a, b = long(a), long(b)
# skip check for small a and b: in this case, the current
# implementation converts the arguments to float directly and
# then applies a float division. This can give doubly-rounded
# results on x87-using machines (particularly 32-bit Linux).
if skip_small and max(abs(a), abs(b)) < 2**DBL_MANT_DIG:
return
try:
# use repr so that we can distinguish between -0.0 and 0.0
expected = repr(truediv(a, b))
except OverflowError:
expected = 'overflow'
except ZeroDivisionError:
expected = 'zerodivision'
try:
got = repr(a / b)
except OverflowError:
got = 'overflow'
except ZeroDivisionError:
got = 'zerodivision'
self.assertEqual(expected, got, "Incorrectly rounded division {}/{}: "
"expected {}, got {}".format(a, b, expected, got))
@requires_IEEE_754
def test_correctly_rounded_true_division(self):
# more stringent tests than those above, checking that the
# result of true division of ints is always correctly rounded.
# This test should probably be considered CPython-specific.
# Exercise all the code paths not involving Gb-sized ints.
# ... divisions involving zero
self.check_truediv(123, 0)
self.check_truediv(-456, 0)
self.check_truediv(0, 3)
self.check_truediv(0, -3)
self.check_truediv(0, 0)
# ... overflow or underflow by large margin
self.check_truediv(671 * 12345 * 2**DBL_MAX_EXP, 12345)
self.check_truediv(12345, 345678 * 2**(DBL_MANT_DIG - DBL_MIN_EXP))
# ... a much larger or smaller than b
self.check_truediv(12345*2**100, 98765)
self.check_truediv(12345*2**30, 98765*7**81)
# ... a / b near a boundary: one of 1, 2**DBL_MANT_DIG, 2**DBL_MIN_EXP,
# 2**DBL_MAX_EXP, 2**(DBL_MIN_EXP-DBL_MANT_DIG)
bases = (0, DBL_MANT_DIG, DBL_MIN_EXP,
DBL_MAX_EXP, DBL_MIN_EXP - DBL_MANT_DIG)
for base in bases:
for exp in range(base - 15, base + 15):
self.check_truediv(75312*2**max(exp, 0), 69187*2**max(-exp, 0))
self.check_truediv(69187*2**max(exp, 0), 75312*2**max(-exp, 0))
# overflow corner case
for m in [1, 2, 7, 17, 12345, 7**100,
-1, -2, -5, -23, -67891, -41**50]:
for n in range(-10, 10):
self.check_truediv(m*DBL_MIN_OVERFLOW + n, m)
self.check_truediv(m*DBL_MIN_OVERFLOW + n, -m)
# check detection of inexactness in shifting stage
for n in range(250):
# (2**DBL_MANT_DIG+1)/(2**DBL_MANT_DIG) lies halfway
# between two representable floats, and would usually be
# rounded down under round-half-to-even. The tiniest of
# additions to the numerator should cause it to be rounded
# up instead.
self.check_truediv((2**DBL_MANT_DIG + 1)*12345*2**200 + 2**n,
2**DBL_MANT_DIG*12345)
# 1/2731 is one of the smallest division cases that's subject
# to double rounding on IEEE 754 machines working internally with
# 64-bit precision. On such machines, the next check would fail,
# were it not explicitly skipped in check_truediv.
self.check_truediv(1, 2731)
# a particularly bad case for the old algorithm: gives an
# error of close to 3.5 ulps.
self.check_truediv(295147931372582273023, 295147932265116303360)
for i in range(1000):
self.check_truediv(10**(i+1), 10**i)
self.check_truediv(10**i, 10**(i+1))
# test round-half-to-even behaviour, normal result
for m in [1, 2, 4, 7, 8, 16, 17, 32, 12345, 7**100,
-1, -2, -5, -23, -67891, -41**50]:
for n in range(-10, 10):
self.check_truediv(2**DBL_MANT_DIG*m + n, m)
# test round-half-to-even, subnormal result
for n in range(-20, 20):
self.check_truediv(n, 2**1076)
# largeish random divisions: a/b where |a| <= |b| <=
# 2*|a|; |ans| is between 0.5 and 1.0, so error should
# always be bounded by 2**-54 with equality possible only
# if the least significant bit of q=ans*2**53 is zero.
for M in [10**10, 10**100, 10**1000]:
for i in range(1000):
a = random.randrange(1, M)
b = random.randrange(a, 2*a+1)
self.check_truediv(a, b)
self.check_truediv(-a, b)
self.check_truediv(a, -b)
self.check_truediv(-a, -b)
# and some (genuinely) random tests
for _ in range(10000):
a_bits = random.randrange(1000)
b_bits = random.randrange(1, 1000)
x = random.randrange(2**a_bits)
y = random.randrange(1, 2**b_bits)
self.check_truediv(x, y)
self.check_truediv(x, -y)
self.check_truediv(-x, y)
self.check_truediv(-x, -y)
def test_main():
run_unittest(TrueDivisionTests)
if __name__ == "__main__":
test_main()
|
teeple/pns_server
|
work/install/Python-2.7.4/Lib/test/test_long_future.py
|
Python
|
gpl-2.0
| 9,026
|
from .sitemap import *
|
grupydf/grupybr-template
|
{{cookiecutter.repo_name}}/.plugins/sitemap/__init__.py
|
Python
|
gpl-3.0
| 22
|
"""
Tools for sending email.
"""
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
# Imported for backwards compatibility, and for the sake
# of a cleaner namespace. These symbols used to be in
# django/core/mail.py before the introduction of email
# backends and the subsequent reorganization (See #10355)
from django.core.mail.utils import CachedDnsName, DNS_NAME
from django.core.mail.message import \
EmailMessage, EmailMultiAlternatives, \
SafeMIMEText, SafeMIMEMultipart, \
DEFAULT_ATTACHMENT_MIME_TYPE, make_msgid, \
BadHeaderError, forbid_multi_line_headers
from django.core.mail.backends.smtp import EmailBackend as _SMTPConnection
def get_connection(backend=None, fail_silently=False, **kwds):
"""Load an e-mail backend and return an instance of it.
If backend is None (default) settings.EMAIL_BACKEND is used.
Both fail_silently and other keyword arguments are used in the
constructor of the backend.
"""
path = backend or settings.EMAIL_BACKEND
try:
mod_name, klass_name = path.rsplit('.', 1)
mod = import_module(mod_name)
except ImportError, e:
raise ImproperlyConfigured(('Error importing email backend module %s: "%s"'
% (mod_name, e)))
try:
klass = getattr(mod, klass_name)
except AttributeError:
raise ImproperlyConfigured(('Module "%s" does not define a '
'"%s" class' % (mod_name, klass_name)))
return klass(fail_silently=fail_silently, **kwds)
def send_mail(subject, message, from_email, recipient_list,
fail_silently=False, auth_user=None, auth_password=None,
connection=None):
"""
Easy wrapper for sending a single message to a recipient list. All members
of the recipient list will see the other recipients in the 'To' field.
If auth_user is None, the EMAIL_HOST_USER setting is used.
If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
Note: The API for this method is frozen. New code wanting to extend the
functionality should use the EmailMessage class directly.
"""
connection = connection or get_connection(username=auth_user,
password=auth_password,
fail_silently=fail_silently)
return EmailMessage(subject, message, from_email, recipient_list,
connection=connection).send()
def send_mass_mail(datatuple, fail_silently=False, auth_user=None,
auth_password=None, connection=None):
"""
Given a datatuple of (subject, message, from_email, recipient_list), sends
each message to each recipient list. Returns the number of e-mails sent.
If from_email is None, the DEFAULT_FROM_EMAIL setting is used.
If auth_user and auth_password are set, they're used to log in.
If auth_user is None, the EMAIL_HOST_USER setting is used.
If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
Note: The API for this method is frozen. New code wanting to extend the
functionality should use the EmailMessage class directly.
"""
connection = connection or get_connection(username=auth_user,
password=auth_password,
fail_silently=fail_silently)
messages = [EmailMessage(subject, message, sender, recipient)
for subject, message, sender, recipient in datatuple]
return connection.send_messages(messages)
def mail_admins(subject, message, fail_silently=False, connection=None,
html_message=None):
"""Sends a message to the admins, as defined by the ADMINS setting."""
if not settings.ADMINS:
return
mail = EmailMultiAlternatives(u'%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject),
message, settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS],
connection=connection)
if html_message:
mail.attach_alternative(html_message, 'text/html')
mail.send(fail_silently=fail_silently)
def mail_managers(subject, message, fail_silently=False, connection=None,
html_message=None):
"""Sends a message to the managers, as defined by the MANAGERS setting."""
if not settings.MANAGERS:
return
mail = EmailMultiAlternatives(u'%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject),
message, settings.SERVER_EMAIL, [a[1] for a in settings.MANAGERS],
connection=connection)
if html_message:
mail.attach_alternative(html_message, 'text/html')
mail.send(fail_silently=fail_silently)
class SMTPConnection(_SMTPConnection):
def __init__(self, *args, **kwds):
import warnings
warnings.warn(
'mail.SMTPConnection is deprecated; use mail.get_connection() instead.',
DeprecationWarning
)
super(SMTPConnection, self).__init__(*args, **kwds)
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/django-1.3/django/core/mail/__init__.py
|
Python
|
bsd-3-clause
| 5,072
|
import re
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SecurityMiddleware(object):
def __init__(self):
self.sts_seconds = settings.SECURE_HSTS_SECONDS
self.sts_include_subdomains = settings.SECURE_HSTS_INCLUDE_SUBDOMAINS
self.content_type_nosniff = settings.SECURE_CONTENT_TYPE_NOSNIFF
self.xss_filter = settings.SECURE_BROWSER_XSS_FILTER
self.redirect = settings.SECURE_SSL_REDIRECT
self.redirect_host = settings.SECURE_SSL_HOST
self.redirect_exempt = [re.compile(r) for r in settings.SECURE_REDIRECT_EXEMPT]
def process_request(self, request):
path = request.path.lstrip("/")
if (self.redirect and not request.is_secure() and
not any(pattern.search(path)
for pattern in self.redirect_exempt)):
host = self.redirect_host or request.get_host()
return HttpResponsePermanentRedirect(
"https://%s%s" % (host, request.get_full_path())
)
def process_response(self, request, response):
if (self.sts_seconds and request.is_secure() and
'strict-transport-security' not in response):
sts_header = "max-age=%s" % self.sts_seconds
if self.sts_include_subdomains:
sts_header = sts_header + "; includeSubDomains"
response["strict-transport-security"] = sts_header
if self.content_type_nosniff and 'x-content-type-options' not in response:
response["x-content-type-options"] = "nosniff"
if self.xss_filter and 'x-xss-protection' not in response:
response["x-xss-protection"] = "1; mode=block"
return response
|
BitWriters/Zenith_project
|
zango/lib/python3.5/site-packages/django/middleware/security.py
|
Python
|
mit
| 1,753
|
"""add post media
Revision ID: 04da9abf37e2
Revises: 2e3a2882e5a4
Create Date: 2017-08-08 15:15:50.911420
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '04da9abf37e2'
down_revision = '2e3a2882e5a4'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('accounts', sa.Column('policy_keep_media', sa.Boolean(), server_default='FALSE', nullable=False))
op.add_column('posts', sa.Column('has_media', sa.Boolean(), server_default='FALSE', nullable=False))
# ### end Alembic commands ###
def downgrade():
op.drop_column('posts', 'has_media')
op.drop_column('accounts', 'policy_keep_media')
|
codl/forget
|
migrations/versions/04da9abf37e2_add_post_media.py
|
Python
|
isc
| 683
|
# Generated by Django 2.0.13 on 2020-01-04 17:21
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import modoboa.limits.models
class Migration(migrations.Migration):
replaces = [
('limits', '0001_initial'),
('limits', '0002_auto_20151114_1518'),
('limits', '0003_auto_20160413_1046'),
('limits', '0004_auto_20160413_1312'),
]
initial = True
dependencies = [
('admin', '0001_initial'),
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserObjectLimit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=254)),
('max_value', models.IntegerField(default=0)),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterUniqueTogether(
name='userobjectlimit',
unique_together=set([('user', 'name')]),
),
migrations.CreateModel(
name='DomainObjectLimit',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=254)),
('max_value', models.IntegerField(default=0)),
('domain', models.ForeignKey(to='admin.Domain', on_delete=models.CASCADE)),
],
bases=(modoboa.limits.models.ObjectLimitMixin, models.Model),
),
migrations.AlterUniqueTogether(
name='domainobjectlimit',
unique_together=set([('domain', 'name')]),
),
]
|
modoboa/modoboa
|
modoboa/limits/migrations/0001_squashed_0004_auto_20160413_1312.py
|
Python
|
isc
| 2,070
|
'''
Copyright (c) 2017 Yogesh Khatri
This file is part of mac_apt (macOS Artifact Parsing Tool).
Usage or distribution of this software/code is subject to the
terms of the MIT License.
'''
import io
import os
import logging
import nska_deserialize as nd
from plugins.helpers import macinfo
import plugins.helpers.ccl_bplist as ccl_bplist
from enum import IntEnum
from plugins.helpers.common import CommonFunctions
from plugins.helpers.macinfo import *
from plugins.helpers.writer import *
__Plugin_Name = "SAFARI"
__Plugin_Friendly_Name = "Internet history, downloaded file information, cookies and more from Safari caches"
__Plugin_Version = "2.0"
__Plugin_Description = "Gets internet history, downloaded file information, cookies and more from Safari caches"
__Plugin_Author = "Yogesh Khatri"
__Plugin_Author_Email = "yogesh@swiftforensics.com"
__Plugin_Modes = "IOS,MACOS,ARTIFACTONLY"
__Plugin_ArtifactOnly_Usage = ''
log = logging.getLogger('MAIN.' + __Plugin_Name) # Do not rename or remove this ! This is the logger object
#---- Do not change the variable names in above section ----#
''' Mavericks had History.plist, Yosemite has History.db
<Home_DIR>/Library/Preferences/com.apple.safari.plist
RecentSearchStrings[], SuccessfulLaunchTimestamp, DownloadsPath, HomePage, FrequentlyVisitedSitesCache
<Home_DIR>/Library/Safari/ --> Bookmarks.plist, Downloads.plist, History.plist, Form Values (Encrypted!),
UserNotificationPermissions.plist, RecentlyClosedTabs.plist
LastSession.plist <-- SessionVersion, SessionWindows\[xx]\TabStates\[xx]\[TabTitle & TabURL]
TopSites.plist <-- [BannedURLStrings] , DisplayedSitesLastModified, TopSites\[xx][TopSiteTitle & TopSiteURLString]
Extensions\Extensions.plist <-- Installed Extensions\[xx][Archive File Name & Enabled]
ReadingListArchives/<UUID>/Page.webarchive <-- Plist, get WebResourceURL
BrowserState.db
CloudTabs.db
'''
class SafariItemType(IntEnum):
UNKNOWN = 0
HISTORY = 1
TOPSITE = 2
BOOKMARK = 3
DOWNLOAD = 4
LASTSESSION = 5
RECENTCLOSEDTAB = 6
EXTENSION = 7
GENERAL = 8 # From com.apple.safari.plist
HISTORYDOMAINS = 9
TOPSITE_BANNED = 10
FREQUENTLY_VISITED = 11 # From com.apple.safari.plist
CLOUDTAB = 12
TAB = 13 # From BrowserState
TABHISTORY = 14 # Tab session history from BrowserState
def __str__(self):
return self.name
class SafariItem:
def __init__(self, type, url, name, date, other, user, source):
self.type = type
self.url = url
self.name = name
self.date = date
self.other_info = other
self.user = user
self.source = source
def PrintAll(safari_items, output_params, source_path):
safari_info = [ ('Type',DataType.TEXT),('Name_or_Title',DataType.TEXT),('URL',DataType.TEXT),
('Date', DataType.DATE),('Other_Info', DataType.TEXT),('User', DataType.TEXT),
('Source',DataType.TEXT)
]
data_list = []
for item in safari_items:
url = item.url
if url.startswith('file://'):
url = url[7:]
data_list.append( [ str(item.type), item.name, url, item.date, item.other_info, item.user, item.source ] )
WriteList("safari information", "Safari", data_list, safari_info, output_params, source_path)
def ReadSafariPlist(plist, safari_items, source, user):
'''Read com.apple.safari.plist'''
try:
searches = plist['RecentSearchStrings'] # Mavericks
try:
for search in searches:
si = SafariItem(SafariItemType.GENERAL, '', search, None, 'RECENT_SEARCH', user, source)
safari_items.append(si)
except ValueError as ex:
log.exception('Error reading RecentSearchStrings from plist')
except KeyError: # Not found
pass
try:
searches = plist['RecentWebSearches'] # Yosemite
try:
for search in searches:
si = SafariItem(SafariItemType.GENERAL, '', search.get('SearchString',''),
search.get('Date', None), 'RECENT_SEARCH', user, source)
safari_items.append(si)
except ValueError as ex:
log.exception('Error reading RecentWebSearches from plist')
except KeyError: # Not found
pass
try:
freq_sites = plist['FrequentlyVisitedSitesCache'] # seen in El Capitan
try:
for site in freq_sites:
si = SafariItem(SafariItemType.FREQUENTLY_VISITED, site.get('URL', ''), site.get('Title',''),
None, 'FrequentlyVisitedSitesCache', user, source)
safari_items.append(si)
except ValueError as ex:
log.exception('Error reading FrequentlyVisitedSitesCache from plist')
except KeyError: # Not found
pass
try:
download_path = plist['DownloadsPath']
si = SafariItem(SafariItemType.GENERAL, '', download_path, None, 'DOWNLOADS_PATH', user, source)
safari_items.append(si)
except KeyError: # Not found
pass
try:
home = plist['HomePage']
si = SafariItem(SafariItemType.GENERAL, home, '', None, 'HOME_PAGE', user, source)
safari_items.append(si)
except KeyError: # Not found
pass
try:
last_ext_pref_selected = plist['LastExtensionSelectedInPreferences']
si = SafariItem(SafariItemType.EXTENSION, '', last_ext_pref_selected, None, 'LastExtensionSelectedInPreferences', user, source)
safari_items.append(si)
except KeyError: # Not found
pass
try:
last_root_dir = plist['NSNavLastRootDirectory']
si = SafariItem(SafariItemType.GENERAL, last_root_dir, '', None, 'NSNavLastRootDirectory', user, source)
safari_items.append(si)
except KeyError: # Not found
pass
try:
time = CommonFunctions.ReadMacAbsoluteTime(plist['SuccessfulLaunchTimestamp'])
si = SafariItem(SafariItemType.GENERAL, '', '', time, 'SuccessfulLaunchTimestamp', user, source)
safari_items.append(si)
except KeyError: # Not found
pass
def ProcessSafariPlist(mac_info, source_path, user, safari_items, read_plist_function):
mac_info.ExportFile(source_path, __Plugin_Name, user + "_", False)
success, plist, error = mac_info.ReadPlist(source_path)
if success:
read_plist_function(plist, safari_items, source_path, user)
else:
log.info('Failed to open plist: {}'.format(source_path))
pass
def ReadHistoryDb(conn, safari_items, source_path, user):
try:
conn.row_factory = sqlite3.Row
cursor = conn.execute("select title, url, load_successful, visit_time as time_utc from "
"history_visits left join history_items on history_visits.history_item = history_items.id")
try:
for row in cursor:
try:
si = SafariItem(SafariItemType.HISTORY, row['url'], row['title'],
CommonFunctions.ReadMacAbsoluteTime(row['time_utc']),'', user, source_path)
safari_items.append(si)
except sqlite3.Error as ex:
log.exception ("Error while fetching row data")
except sqlite3.Error as ex:
log.exception ("Db cursor error while reading file " + source_path)
conn.close()
except sqlite3.Error as ex:
log.exception ("Sqlite error")
def GetItemFromCloudDbPlist(plist, item_name):
for dic_item in plist:
for k, v in dic_item.items():
if k == item_name:
return v
return None
def ReadCloudTabsDb(conn, safari_items, source_path, user):
try:
conn.row_factory = sqlite3.Row
cursor = conn.execute(
"""SELECT device_name, tab_uuid, t.system_fields, title, url, is_showing_reader, is_pinned
FROM cloud_tabs t LEFT JOIN cloud_tab_devices d on d.device_uuid=t.device_uuid
ORDER BY device_name""")
try:
for row in cursor:
try:
pinned = row['is_pinned']
system_fields = row['system_fields']
created = ''
modified = ''
if system_fields:
serialized_plist_file_obj = io.BytesIO(system_fields)
try:
deserialized_plist = nd.deserialize_plist(serialized_plist_file_obj)
created = GetItemFromCloudDbPlist(deserialized_plist, 'RecordCtime')
modified = GetItemFromCloudDbPlist(deserialized_plist, 'RecordMtime')
except (nd.DeserializeError, nd.biplist.NotBinaryPlistException,
nd.biplist.InvalidPlistException, plistlib.InvalidFileException,
nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex:
log.exception('plist deserialization error')
si = SafariItem(SafariItemType.CLOUDTAB, row['url'], row['title'], created,
f'Modified={modified}' + (' pinned=1' if pinned else ''),
user, source_path)
safari_items.append(si)
except sqlite3.Error as ex:
log.exception ("Error while fetching row data")
except sqlite3.Error as ex:
log.exception ("Db cursor error while reading file " + source_path)
conn.close()
except sqlite3.Error as ex:
log.exception ("Sqlite error")
def ReadBrowserStateDb(conn, safari_items, source_path, user):
try:
conn.row_factory = sqlite3.Row
cursor = conn.execute(
"""SELECT t.id, url, title, session_data, t.uuid
FROM tabs t LEFT JOIN tab_sessions s on s.tab_uuid=t.uuid""")
try:
for row in cursor:
try:
si = SafariItem(SafariItemType.TAB, row['url'], row['title'], '',
f'Tab UUID={row["uuid"]}', user, source_path)
safari_items.append(si)
plist_data = row['session_data']
if plist_data and len(plist_data) > 10:
f = io.BytesIO(plist_data[4:])
success, plist, error = CommonFunctions.ReadPlist(f)
if success:
history = plist.get('SessionHistory', None)
if history:
#current_session = history.get('SessionHistoryCurrentIndex', 0)
entries = history.get('SessionHistoryEntries', [])
for index, entry in enumerate(entries):
url = entry.get('SessionHistoryEntryURL', '')
title = entry.get('SessionHistoryEntryTitle', '')
if url == row['url']:
continue # same as current tab, skip it
si = SafariItem(SafariItemType.TABHISTORY, url, title, '',
f'Tab UUID={row["uuid"]} index={index}', user, source_path)
safari_items.append(si)
else:
log.error(f'Failed to read plist for tab {row["uuid"]}, {row["id"]}. {error}')
except sqlite3.Error as ex:
log.exception ("Error while fetching row data")
except sqlite3.Error as ex:
log.exception ("Db cursor error while reading file " + source_path)
conn.close()
except sqlite3.Error as ex:
log.exception ("Sqlite error")
def ReadExtensionsPlist(plist, safari_items, source_path, user):
try:
extensions = plist['Installed Extensions']
for item in extensions:
info = item.get('Enabled', '')
if info != '':
info = 'Enabled: ' + str(info)
apple_signed = item.get('Apple-signed', '')
if apple_signed != '':
info = ', '.join([info, 'Apple-signed: ' + str(apple_signed)])
si = SafariItem(SafariItemType.EXTENSION, '', item.get('Archive File Name', ''),
None, info, user, source_path)
safari_items.append(si)
return
except KeyError:
pass
'''Safari 14 extension plist parser'''
try:
for ext_name, ext in plist.items():
info = ''
enabled = ext.get('Enabled', '')
if enabled != '':
info += 'Enabled:' + str(enabled)
for key, val in ext.get('WebsiteAccess', {}).items():
info += f', {key}:{val}'
si = SafariItem(SafariItemType.EXTENSION, '', ext_name,
None, info, user, source_path)
safari_items.append(si)
except (KeyError, ValueError, TypeError) as ex:
log.error("Error reading extensions plist: " + source_path)
def ReadHistoryPlist(plist, safari_items, source_path, user):
try:
version = plist['WebHistoryFileVersion']
if version != 1:
log.warning('WebHistoryFileVersion is {}, this may not parse properly!'.format(version))
except KeyError:
log.error('WebHistoryFileVersion not found')
try:
history_dates = plist['WebHistoryDates']
for item in history_dates:
try:
redirect_urls = ",".join(item.get('redirectURLs', ''))
si = SafariItem(SafariItemType.HISTORY, item.get('',''), item.get('title', ''), \
CommonFunctions.ReadMacAbsoluteTime(item.get('lastVisitedDate', '')), \
'' if (redirect_urls == '') else ('REDIRECT_URLS:' + redirect_urls) , user, source_path) # Skipped visitCount
safari_items.append(si)
except ValueError as ex:
log.error(str(ex))
except KeyError:
log.error('WebHistoryDates not found')
try:
history_domains = plist['WebHistoryDomains.v2']
for item in history_domains:
si = SafariItem(SafariItemType.HISTORYDOMAINS, '', item.get('', ''), None,
'ITEMCOUNT:' + str(item.get('itemCount', 0)) , user, source_path)
safari_items.append(si)
except KeyError:
log.error('WebHistoryDomains.v2 not found')
def ReadDownloadsPlist(plist, safari_items, source_path, user):
try:
downloads = plist['DownloadHistory']
for item in downloads:
si = SafariItem(SafariItemType.DOWNLOAD, item.get('DownloadEntryURL', ''), os.path.basename(item.get('DownloadEntryPath', '')),
None, item.get('DownloadEntryPath', ''), user, source_path) # Skipping bookmark and file sizes
safari_items.append(si)
except KeyError:
log.error('DownloadHistory not found')
def ReadBookmark(bm, path, safari_items, source_path, user):
'''Recursive function'''
bm_title = bm.get('Title', '')
bm_type = bm.get('WebBookmarkType','')
if bm_type == 'WebBookmarkTypeList':
if path == '': # To remove extra '/' at the first one
path = bm_title
else:
path = path + "/" + bm_title
try:
children = bm['Children']
for item in children:
ReadBookmark(item, path, safari_items, source_path, user)
except KeyError:
pass#log.debug('Error fetching bookmark children @ {}'.format(path))
elif bm_type == 'WebBookmarkTypeProxy':
pass# do nothing
elif bm_type == 'WebBookmarkTypeLeaf':
bm_url = bm.get('URLString', '')
bm_title = bm.get('URIDictionary', {}).get('title', '')
bm_date = None
if path.find('com.apple.ReadingList') > 0:
try:
bm_date = bm['ReadingList']['DateAdded']
except KeyError: pass
si = SafariItem(SafariItemType.BOOKMARK, bm_url, bm_title, bm_date, path, user, source_path)
safari_items.append(si)
else:
log.info('Unknown type found in bookmark : {} @ {}'.format(bm_title, path))
def ReadBookmarksPlist(plist, safari_items, source_path, user):
try:
version = plist['WebBookmarkFileVersion']
if version != 1:
log.warning('WebBookmarkFileVersion is {}, this may not parse properly!'.format(version))
except KeyError:
log.error('WebBookmarkFileVersion not found')
ReadBookmark(plist, '', safari_items, source_path, user)
def ReadTopSitesPlist(plist, safari_items, source_path, user):
ts_last_mod_date = None
try:
ts_last_mod_date = plist['DisplayedSitesLastModified']
log.info('Topsites last modified on {}'.format(ts_last_mod_date))
except KeyError:
log.error('DisplayedSitesLastModified not found')
try:
banned = plist['BannedURLStrings']
for item in banned:
si = SafariItem(SafariItemType.TOPSITE_BANNED, item, '', ts_last_mod_date,
'Date represents DisplayedSitesLastModified for all Topsites', user, source_path)
safari_items.append(si)
except KeyError:
log.error('BannedURLStrings not found')
try:
downloads = plist['TopSites']
for item in downloads:
si = SafariItem(SafariItemType.TOPSITE, item.get('TopSiteURLString', ''), item.get('TopSiteTitle', ''),
ts_last_mod_date, 'Date represents DisplayedSitesLastModified for all Topsites', user, source_path)
safari_items.append(si)
except KeyError:
log.error('TopSites not found')
def ReadLastSessionPlist(plist, safari_items, source_path, user):
try:
version = plist['SessionVersion']
if version != '1.0':
log.warning('SessionVersion is {}, this may not parse properly!'.format(version))
except KeyError:
log.error('SessionVersion not found')
try:
session_windows = plist['SessionWindows']
for windows in session_windows:
selectedIndex = windows.get('SelectedTabIndex', None)
index = 0
for tab in windows.get('TabStates', []):
info = 'SELECTED WINDOW' if index == selectedIndex else ''
date_closed = tab.get('DateClosed', '')
log.debug(date_closed)
if date_closed:
if info:
info += ', TAB_CLOSED_DATE=' + str(date_closed)
else:
info = 'TAB_CLOSED_DATE=' + str(date_closed)
si = SafariItem(SafariItemType.LASTSESSION, tab.get('TabURL', ''), tab.get('TabTitle', ''),
CommonFunctions.ReadMacAbsoluteTime(tab.get('LastVisitTime', '')),
info, user, source_path) # Skipping SessionState(its encrypted) & TabIdentifier
safari_items.append(si)
index += 1
except KeyError as ex:
log.error('SessionWindows not found or unable to parse. Error was {}'.format(str(ex)))
def ReadRecentlyClosedTabsPlist(plist, safari_items, source_path, user):
try:
version = plist['ClosedTabOrWindowPersistentStatesVersion']
if version != '1':
log.warning('ClosedTabOrWindowPersistentStatesVersion is {}, this may not parse properly!'.format(version))
except KeyError:
log.error('ClosedTabOrWindowPersistentStatesVersion not found')
try:
tabs = plist['ClosedTabOrWindowPersistentStates']
for tab in tabs:
state_type = tab.get('PersistentStateType', None)
if state_type not in [0, 1]:
log.warning('Unknown PersistentStateType: {}'.format(state_type))
state = tab.get('PersistentState', None)
if state:
date_closed = state.get('DateClosed', None)
private_mode = state.get('IsPrivateWindow', False)
if state_type == 0:
si = SafariItem(SafariItemType.RECENTCLOSEDTAB, state.get('TabURL', ''), state.get('TabTitle', ''),
date_closed, 'PRIVATE MODE' if private_mode else '', user, source_path)
safari_items.append(si)
else: # assume 1 or higher
tab_states = state.get('TabStates', [])
for ts in tab_states:
date_closed = ts.get('DateClosed', date_closed)
ts.get('TabTitle')
si = SafariItem(SafariItemType.RECENTCLOSEDTAB, ts.get('TabURL', ''), ts.get('TabTitle', ''),
date_closed, 'PRIVATE MODE' if private_mode else '', user, source_path)
safari_items.append(si)
else:
log.error('Key PersistentState not present!')
except KeyError as ex:
log.error('ClosedTabOrWindowPersistentStates not found or unable to parse. Error was {}'.format(str(ex)))
def ProcessSafariFolder(mac_info, folder_path, user, safari_items):
files_list = [ ['History.plist', ReadHistoryPlist] , ['Downloads.plist', ReadDownloadsPlist],
['Bookmarks.plist', ReadBookmarksPlist], ['TopSites.plist', ReadTopSitesPlist],
['LastSession.plist', ReadLastSessionPlist], ['Extensions/Extensions.plist', ReadExtensionsPlist],
['RecentlyClosedTabs.plist', ReadRecentlyClosedTabsPlist] ]
for item in files_list:
source_path = folder_path + '/' + item[0]
if mac_info.IsValidFilePath(source_path):
ProcessSafariPlist(mac_info, source_path, user, safari_items, item[1])
else:
log.debug('Safari File not found : {}'.format(source_path))
# Yosemite onwards there is History.db
ReadDbFromImage(mac_info, folder_path + '/History.db', user, safari_items, ReadHistoryDb, 'safari history')
ReadDbFromImage(mac_info, folder_path + '/CloudTabs.db', user, safari_items, ReadCloudTabsDb, 'safari CloudTabs')
ReadDbFromImage(mac_info, folder_path + '/BrowserState.db', user, safari_items, ReadBrowserStateDb, 'safari BrowserState')
def ReadDbFromImage(mac_info, source_path, user, safari_items, processing_func, description):
if mac_info.IsValidFilePath(source_path) and mac_info.GetFileSize(source_path, 0) > 0:
mac_info.ExportFile(source_path, __Plugin_Name, user + "_")
try:
sqlite = SqliteWrapper(mac_info)
conn = sqlite.connect(source_path)
if conn:
processing_func(conn, safari_items, source_path, user)
except (sqlite3.Error, OSError) as ex:
log.exception ("Failed to open {} database '{}', is it a valid SQLITE DB?".format(description, source_path))
def Plugin_Start(mac_info):
'''Main Entry point function for plugin'''
safari_items = []
user_safari_plist_paths = ('{}/Library/Preferences/com.apple.safari.plist',\
'{}/Library/Containers/com.apple.Safari/Data/Library/Preferences/com.apple.Safari.plist')
user_safari_path = '{}/Library/Safari'
user_safari_extensions = ('{}/Library/Containers/com.apple.Safari/Data/Library/Safari/AppExtensions/Extensions.plist',\
'{}/Library/Containers/com.apple.Safari/Data/Library/Safari/WebExtensions/Extensions.plist')
processed_paths = []
for user in mac_info.users:
user_name = user.user_name
if user.home_dir == '/private/var/empty': continue # Optimization, nothing should be here!
elif user.home_dir == '/private/var/root': user_name = 'root' # Some other users use the same root folder, we will list such all users as 'root', as there is no way to tell
if user.home_dir in processed_paths: continue # Avoid processing same folder twice (some users have same folder! (Eg: root & daemon))
processed_paths.append(user.home_dir)
for user_safari_plist_path in user_safari_plist_paths:
source_path = user_safari_plist_path.format(user.home_dir)
if mac_info.IsValidFilePath(source_path):
ProcessSafariPlist(mac_info, source_path, user_name, safari_items, ReadSafariPlist)
#else:
# if not user_name.startswith('_'):
# log.debug('File not found: {}'.format(source_path))
source_path = user_safari_path.format(user.home_dir)
if mac_info.IsValidFolderPath(source_path):
ProcessSafariFolder(mac_info, source_path, user_name, safari_items)
for ext_path in user_safari_extensions:
source_path = ext_path.format(user.home_dir)
if mac_info.IsValidFilePath(source_path):
ProcessSafariPlist(mac_info, source_path, user_name, safari_items, ReadExtensionsPlist)
if len(safari_items) > 0:
PrintAll(safari_items, mac_info.output_params, '')
else:
log.info('No safari items were found!')
def Plugin_Start_Standalone(input_files_list, output_params):
log.info("Module Started as standalone")
for input_path in input_files_list:
log.debug("Input file passed was: " + input_path)
safari_items = []
if input_path.endswith('.plist'):
try:
success, plist, error = CommonFunctions.ReadPlist(input_path)
if success:
if input_path.lower().endswith('com.apple.safari.plist'):
ReadSafariPlist(plist, safari_items, input_path, '')
elif input_path.endswith('History.plist'):
ReadHistoryPlist(plist, safari_items, input_path, '')
elif input_path.endswith('Downloads.plist'):
ReadDownloadsPlist(plist, safari_items, input_path, '')
elif input_path.endswith('Bookmarks.plist'):
ReadBookmarksPlist(plist, safari_items, input_path, '')
elif input_path.endswith('TopSites.plist'):
ReadTopSitesPlist(plist, safari_items, input_path, '')
elif input_path.endswith('LastSession.plist'):
ReadLastSessionPlist(plist, safari_items, input_path, '')
elif input_path.endswith('Extensions.plist') and not input_path.endswith('KnownExtensions.plist'):
ReadExtensionsPlist(plist, safari_items, input_path, '')
elif input_path.endswith('RecentlyClosedTabs.plist'):
ReadRecentlyClosedTabsPlist(plist, safari_items, input_path, '')
else:
log.error("Unknown plist type encountered: {}".format(os.path.basename(input_path)))
else:
log.error(f'Failed to read plist: {os.path.basename(input_path)} : {error}')
except ValueError as ex:
log.exception('Failed to open file: {}'.format(input_path))
elif input_path.endswith('History.db'):
log.info ("Processing file " + input_path)
try:
conn = CommonFunctions.open_sqlite_db_readonly(input_path)
log.debug ("Opened database successfully")
ReadHistoryDb(conn, safari_items, input_path, '')
except (sqlite3.Error, OSError) as ex:
log.exception ("Failed to open database, is it a valid SQLITE DB?")
elif input_path.endswith('CloudTabs.db'):
log.info ("Processing file " + input_path)
try:
conn = CommonFunctions.open_sqlite_db_readonly(input_path)
log.debug ("Opened database successfully")
ReadCloudTabsDb(conn, safari_items, input_path, '')
except (sqlite3.Error, OSError) as ex:
log.exception ("Failed to open database, is it a valid SQLITE DB?")
elif input_path.endswith('BrowserState.db'):
log.info ("Processing file " + input_path)
try:
conn = CommonFunctions.open_sqlite_db_readonly(input_path)
log.debug ("Opened database successfully")
ReadBrowserStateDb(conn, safari_items, input_path, '')
except (sqlite3.Error, OSError) as ex:
log.exception ("Failed to open database, is it a valid SQLITE DB?")
else:
log.error('Input file {} is not a recognized name of a Safari artifact!'.format(input_path))
if len(safari_items) > 0:
PrintAll(safari_items, output_params, input_path)
else:
log.info('No safari items found in {}'.format(input_path))
def Plugin_Start_Ios(ios_info):
'''Entry point for ios_apt plugin'''
safari_items = []
for app in ios_info.apps:
if app.bundle_display_name.lower() == "safari":
log.debug(f'Safari version {app.bundle_version} found at {app.sandbox_path}')
safari_plist_path = f'{app.sandbox_path}/Library/Preferences/com.apple.mobilesafari.plist'
if ios_info.IsValidFilePath(safari_plist_path):
ProcessSafariPlist(ios_info, safari_plist_path, 'mobile', safari_items, ReadSafariPlist)
break
source_path = '/private/var/mobile/Library/Safari'
if ios_info.IsValidFolderPath(source_path):
ReadDbFromImage(ios_info, source_path + '/History.db', 'mobile', safari_items, ReadHistoryDb, 'safari History')
ReadDbFromImage(ios_info, source_path + '/CloudTabs.db', 'mobile', safari_items, ReadCloudTabsDb, 'safari CloudTabs')
ReadDbFromImage(ios_info, source_path + '/BrowserState.db', 'mobile', safari_items, ReadBrowserStateDb, 'safari BrowserState')
if len(safari_items) > 0:
PrintAll(safari_items, ios_info.output_params, '')
else:
log.info('No safari items were found!')
if __name__ == '__main__':
print ("This plugin is a part of a framework and does not run independently on its own!")
|
ydkhatri/mac_apt
|
plugins/safari.py
|
Python
|
mit
| 30,422
|
#!/usr/bin/env python
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter as ADHF
from sys import stdout, stderr, exit, maxint, argv
from os.path import join, dirname, basename, isfile
from itertools import izip, chain
from bisect import insort
import logging
import json
import re
import matplotlib; matplotlib.use('Agg')
from matplotlib import pylab as plt
from matplotlib.patches import Rectangle, Polygon
from psycho import dict2hierarchy
from pairwise_similarities import readDists, reverseDistMap, readGenomeMap, \
PAT_POS, PAT_CHR, GENOME_MAP_FILE, GM_ACTV_GNS_KEY
DRAW_SEG_HEIGHT = 100
DRAW_SCALE = 0.1
DEFAULT_OUT_FILENAME_PREFIX = 'overlapping_SB_'
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.DEBUG)
LOG_FILENAME = '%s' %basename(argv[0]).rsplit('.py', 1)[0]
def computeOverlaps(mlists, ciss):
res = dict()
for x in xrange(len(mlists)):
queue = list()
goss = mlists[x]
#
# XXX this whole procedure can be significanlty sped up by sorting start
# positions before hand and then using a pointer within this list.
#
for chrx, start, end, i in goss:
# only iterate until an interval is found that ends before new interval start
y = len(queue)-1
while y >= 0 and queue[y][0] == chrx and queue[y][1] >= start:
j = queue[y][3]
if queue[y][1] < end and queue[y][2] < start and \
len(ciss[i][0]) == len(ciss[j][0]):
ii, jj = i < j and (i, j) or (j, i)
if not res.has_key((ii, jj)):
res[(ii, jj)] = set()
res[(ii, jj)].add(x)
y -= 1
insort(queue, (chrx, end, start, i))
return res
def parseMarkerList(marker_seq_list):
res = list()
for x in xrange(len(genomes)):
mlist = list()
queue = list()
goss = marker_seq_list[x]
for i in xrange(len(goss)):
start = int(PAT_POS.match(goss[i][1]).group(1))
end = int(PAT_POS.match(goss[i][-2]).group(2))
chrx = PAT_CHR.match(goss[i][1]).group(1)
mlist.append((chrx, start, end, i))
mlist.sort()
res.append(mlist)
return res
def drawSegments(ax, gos, color, scale=1, offset=(0, 0)):
xoff, yoff = offset
for g in gos:
start, end = map(int, PAT_POS.match(g).groups()[:2])
r = Rectangle(((start+xoff)*scale, (yoff-DRAW_SEG_HEIGHT/2.)*scale),
(end-start)*scale, DRAW_SEG_HEIGHT * scale, fill=True,
edgecolor='none', facecolor=color)
ax.add_patch(r)
def drawOverlappingSBFS(genomes, msl, dists, gMap, i, j, source, target, ax):
start_si= int(PAT_POS.match(msl[source][i][1]).group(1))
end_si = int(PAT_POS.match(msl[source][i][-2]).group(2))
start_sj = int(PAT_POS.match(msl[source][j][1]).group(1))
end_sj = int(PAT_POS.match(msl[source][j][-2]).group(2))
start_ti= int(PAT_POS.match(msl[target][i][1]).group(1))
end_ti = int(PAT_POS.match(msl[target][i][-2]).group(2))
start_tj = int(PAT_POS.match(msl[target][j][1]).group(1))
end_tj = int(PAT_POS.match(msl[target][j][-2]).group(2))
chrs1 = PAT_CHR.match(msl[source][i][1]).group(1)
chrs2 = PAT_CHR.match(msl[source][j][1]).group(1)
chrt1 = PAT_CHR.match(msl[target][i][1]).group(1)
chrt2 = PAT_CHR.match(msl[target][j][1]).group(1)
if chrs1 == chrs2:
OFFSET_SI = OFFSET_SJ = - min(start_si, start_sj)
else:
gap = (end_si-start_si + end_sj-start_sj)/10.
if chrt1 == chrt2 and start_tj < start_ti:
OFFSET_SJ = -start_sj-gap
OFFSET_SI = OFFSET_SJ + end_sj + 2*gap - start_si
else:
OFFSET_SI = -start_si-gap
OFFSET_SJ = OFFSET_SI + end_si + 2*gap - start_sj
if chrt1 == chrt2:
OFFSET_TI = OFFSET_TJ = (min(end_si, end_sj)+max(start_si, start_sj))/2. \
+ min(OFFSET_SI, OFFSET_SJ) - min(end_ti, end_tj)
else:
if start_sj > start_si:
OFFSET_TI = end_si + min(OFFSET_SI, OFFSET_SJ) - end_ti
OFFSET_TJ = OFFSET_TI + end_ti - start_tj
gap = end_tj+OFFSET_TJ/10.
else:
OFFSET_TJ = end_sj + min(OFFSET_SI, OFFSET_SJ) - end_tj
OFFSET_TI = OFFSET_TJ + end_tj - start_ti
gap = end_ti+OFFSET_TI/10.
OFFSET_TI -= gap
OFFSET_TJ += gap
GAP_Y = 2000
sgi = set(msl[source][i][1:-1])
sgj = set(msl[source][j][1:-1])
tgi = set(msl[target][i][1:-1])
tgj = set(msl[target][j][1:-1])
r = Rectangle(((start_si+OFFSET_SI)*DRAW_SCALE,
((GAP_Y-DRAW_SEG_HEIGHT)/2.)*DRAW_SCALE), (end_si-start_si)*DRAW_SCALE,
DRAW_SEG_HEIGHT* DRAW_SCALE, fill=True, edgecolor='none', facecolor='m',
alpha=0.2)
ax.add_patch(r)
drawSegments(ax, sgi.difference(sgj), 'm', scale=DRAW_SCALE,
offset=(OFFSET_SI, GAP_Y/2.))
r = Rectangle(((start_sj+OFFSET_SJ)*DRAW_SCALE,
((GAP_Y-DRAW_SEG_HEIGHT)/2.)*DRAW_SCALE), (end_sj-start_sj)*DRAW_SCALE,
DRAW_SEG_HEIGHT* DRAW_SCALE, fill=True, edgecolor='none', facecolor='g',
alpha=0.2)
ax.add_patch(r)
drawSegments(ax, sgj.difference(sgi), 'g', scale=DRAW_SCALE,
offset=(OFFSET_SJ, GAP_Y/2.))
drawSegments(ax, sgj.intersection(sgi), 'b', scale=DRAW_SCALE,
offset=(OFFSET_SJ, GAP_Y/2.))
r = Rectangle(((start_ti+OFFSET_TI) * DRAW_SCALE,
((-GAP_Y-DRAW_SEG_HEIGHT)/2.) * DRAW_SCALE),
(end_ti-start_ti) * DRAW_SCALE, DRAW_SEG_HEIGHT * DRAW_SCALE, fill=True,
edgecolor='none', facecolor='m', alpha=0.2)
ax.add_patch(r)
drawSegments(ax, tgi.difference(tgj), 'm', scale=DRAW_SCALE,
offset=(OFFSET_TI, -GAP_Y/2.))
r = Rectangle(((start_tj+OFFSET_TJ)*DRAW_SCALE,
((-GAP_Y-DRAW_SEG_HEIGHT)/2.)*DRAW_SCALE), (end_tj-start_tj)*DRAW_SCALE,
DRAW_SEG_HEIGHT* DRAW_SCALE, fill=True, edgecolor='none', facecolor='g',
alpha=0.2)
ax.add_patch(r)
drawSegments(ax, tgj.difference(tgi), 'g', scale=DRAW_SCALE,
offset=(OFFSET_TJ, -GAP_Y/2.))
drawSegments(ax, tgj.intersection(tgi), 'b', scale=DRAW_SCALE,
offset=(OFFSET_TI, -GAP_Y/2.))
ax.set_xlim(min((OFFSET_SI+start_si, OFFSET_SJ+start_sj,
OFFSET_TI+start_ti, OFFSET_TJ+start_tj))*DRAW_SCALE,
max((end_si+OFFSET_SI, end_sj+OFFSET_SJ, end_ti+OFFSET_TI,
end_tj+OFFSET_TJ)) * DRAW_SCALE)
ax.set_ylim((-GAP_Y/2. - DRAW_SEG_HEIGHT) * DRAW_SCALE, (GAP_Y/2. +
DRAW_SEG_HEIGHT) * DRAW_SCALE)
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['left'].set_visible(False)
plt.tick_params(top='off', bottom='on', right='off', labelbottom='on')
ax.set_yticks([-GAP_Y/2.*DRAW_SCALE, GAP_Y/2.*DRAW_SCALE])
ax.set_yticklabels([genomes[target], genomes[source]], fontsize=16)
gene2id = dict()
for g in genomes:
gene2id[g] = dict((gMap[g][GM_ACTV_GNS_KEY][i],
(PAT_CHR.match(gMap[g][GM_ACTV_GNS_KEY][i]).group(1), i+1)) for i in
xrange(len(gMap[g][GM_ACTV_GNS_KEY])))
pwDist = dists[(genomes[source], genomes[target])]
for gs in chain(sgi, sgj):
sstart, send = map(int, PAT_POS.match(gs).groups()[:2])
csi = gs in sgi
csj = gs in sgj
OFFSET_S = OFFSET_SI
if csj:
OFFSET_S = OFFSET_SJ
for gt_id, (_, w) in pwDist[gene2id[genomes[source]][gs]].items():
gt = gMap[genomes[target]][GM_ACTV_GNS_KEY][gt_id[1]-1]
tstart, tend = map(int, PAT_POS.match(gt).groups()[:2])
cti = gt in tgi
ctj = gt in tgj
OFFSET_T = OFFSET_TI
if ctj:
OFFSET_T = OFFSET_TJ
if (csi and csj) or (cti and ctj):
OFFSET_S = OFFSET_SI
xy = [((sstart+OFFSET_S)*DRAW_SCALE, \
(GAP_Y-DRAW_SEG_HEIGHT)/2.*DRAW_SCALE), \
((send+OFFSET_S)*DRAW_SCALE, \
(GAP_Y-DRAW_SEG_HEIGHT)/2.*DRAW_SCALE), \
((tend+OFFSET_T)*DRAW_SCALE, \
(-GAP_Y+DRAW_SEG_HEIGHT)/2.*DRAW_SCALE), \
((tstart+OFFSET_T)*DRAW_SCALE,\
(-GAP_Y+DRAW_SEG_HEIGHT)/2.*DRAW_SCALE)]
p = Polygon(plt.array(xy), closed=True, fill=True, \
edgecolor='none', facecolor='b', alpha=w)
ax.add_patch(p)
elif csi and cti:
xy = [((sstart+OFFSET_S)*DRAW_SCALE, \
(GAP_Y-DRAW_SEG_HEIGHT)/2.*DRAW_SCALE), \
((send+OFFSET_S)*DRAW_SCALE, \
(GAP_Y-DRAW_SEG_HEIGHT)/2.*DRAW_SCALE), \
((tend+OFFSET_T)*DRAW_SCALE, \
(-GAP_Y+DRAW_SEG_HEIGHT)/2.*DRAW_SCALE), \
((tstart+OFFSET_T)*DRAW_SCALE,\
(-GAP_Y+DRAW_SEG_HEIGHT)/2.*DRAW_SCALE)]
p = Polygon(plt.array(xy), closed=True, fill=True, \
edgecolor='none', facecolor='m', alpha=w)
ax.add_patch(p)
elif csj and ctj:
xy = [((sstart+OFFSET_S)*DRAW_SCALE, \
(GAP_Y-DRAW_SEG_HEIGHT)/2.*DRAW_SCALE), \
((send+OFFSET_S)*DRAW_SCALE, \
(GAP_Y-DRAW_SEG_HEIGHT)/2.*DRAW_SCALE), \
((tend+OFFSET_T)*DRAW_SCALE, \
(-GAP_Y+DRAW_SEG_HEIGHT)/2.*DRAW_SCALE), \
((tstart+OFFSET_T)*DRAW_SCALE,\
(-GAP_Y+DRAW_SEG_HEIGHT)/2.*DRAW_SCALE)]
p = Polygon(plt.array(xy), closed=True, fill=True, \
edgecolor='none', facecolor='g', alpha=w)
ax.add_patch(p)
if __name__ == '__main__':
parser = ArgumentParser(formatter_class=ADHF)
parser.add_argument('-o', '--out_filename_prefix',
default=DEFAULT_OUT_FILENAME_PREFIX, type=str,
help='prefix of output filename')
parser.add_argument('sb_hierarchy', type=str,
help='PSyCHO JSON output file')
args = parser.parse_args()
# setup logging
ch = logging.StreamHandler(stderr)
ch.setLevel(logging.ERROR)
ch.setFormatter(logging.Formatter('!! %(message)s'))
cf = logging.FileHandler('%s.log' %LOG_FILENAME, mode='w', delay=True)
cf.setLevel(logging.DEBUG)
cf.setFormatter(logging.Formatter('%(levelname)s\t%(asctime)s\t%(message)s'))
LOG.addHandler(cf)
LOG.addHandler(ch)
#
# load hiearchy data
#
LOG.info('reading SB hierarchy from file %s' %args.sb_hierarchy)
jsDict = json.load(open(args.sb_hierarchy))
ref = jsDict['ref_id']
genomes = jsDict['genome_names']
marker_seq_list = jsDict['marker_seq_list']
mlists = parseMarkerList(marker_seq_list)
ciss = jsDict['raw_sbfs']
pwsim_files = jsDict['pwsim_files']
gMap = readGenomeMap(open(join(dirname(pwsim_files[0]), GENOME_MAP_FILE)))
LOG.info('reading pairwise similarities... ')
dists = dict()
for f in pwsim_files:
LOG.info(' %s' %f)
_ , dist = readDists(open(f))
gname1, gname2 = basename(f).split('.', 1)[0].split('_', 1)
dists[(gname1, gname2)] = dist
dists[(gname2, gname1)] = reverseDistMap(dist)
LOG.info('identifying overlaps between syntenic blocks (SBs)')
overlapping_sbs = computeOverlaps(mlists, ciss)
LOG.info('drawing figures...')
for (i, j), o in overlapping_sbs.items():
out = open('%s%s-%s.pdf' %(args.out_filename_prefix, i, j), 'w')
LOG.info((' plotting overlap between SBs %s and %s and storing ' + \
'figure in %s') %(i, j, out.name))
source = ref
f, axs = plt.subplots(len(genomes)-1, 1, sharey=False, sharex=False)
ax_it = iter(axs)
for target in xrange(len(genomes)):
if source == target:
continue
ax = next(ax_it)
drawOverlappingSBFS(genomes, marker_seq_list, dists, gMap, i, j, source,
target, ax)
plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
f.savefig(out, format='pdf')
out.close()
#raw_input('Press Enter to continue...')
LOG.info('done!')
|
danydoerr/PSyCHO
|
scripts/visualize_overlapping_sbs.py
|
Python
|
mit
| 12,540
|
import abc
try:
import collections.abc as collectionsabc
except ImportError:
import collections as collectionsabc
import decimal
import io
import json
import locale
import os
import os.path
import subprocess
import threading
import time
def parse_version_string():
path = os.path.abspath(__file__)
while os.path.islink(path):
path = os.path.join(os.path.dirname(path), os.readlink(path))
path = os.path.dirname(path) # go up one level, from repo/lazyjson.py to repo, where README.md is located
while os.path.islink(path):
path = os.path.join(os.path.dirname(path), os.readlink(path))
try:
version = subprocess.check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD'], cwd=path).decode('utf-8').strip('\n')
if version == 'master':
try:
with open(os.path.join(path, 'README.md')) as readme:
for line in readme.read().splitlines():
if line.startswith('This is `lazyjson` version '):
return line.split(' ')[4]
except:
pass
return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'], cwd=path).decode('utf-8').strip('\n')
except:
pass
__version__ = str(parse_version_string())
try:
import builtins
import pathlib
except ImportError:
pass
else:
def open(file, mode='r', buffering=-1, encoding=None, errors=None, newline=None, closefd=True, opener=None):
if isinstance(file, pathlib.Path):
return file.open(mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline)
else:
return builtins.open(file, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, closefd=closefd, opener=opener)
class DecimalEncoder(json.JSONEncoder): #FROM http://stackoverflow.com/a/3885198/667338
def default(self, o):
if isinstance(o, decimal.Decimal):
return float(o) # do not use str as that would enclose the value in quotes
return super().default(o)
class Node(collectionsabc.MutableMapping, collectionsabc.MutableSequence):
def __init__(self, root, key_path=None):
if not isinstance(root, BaseFile):
root = File(root)
self.root = root
self.key_path = [] if key_path is None else key_path[:]
def __contains__(self, item):
if isinstance(item, Node):
item = item.value()
return item in self.value()
def __deepcopy__(self, memodict={}):
return self.value()
def __delitem__(self, key):
self.root.delete_value_at_key_path(self.key_path + [key])
def __eq__(self, other):
return self.root == other.root and self.key_path == other.key_path
def __format__(self, format_spec):
return format(self.value(), format_spec)
def __getitem__(self, key):
return Node(self.root, self.key_path + [key])
def __hash__(self):
return hash((self.root, self.key_path))
def __iter__(self):
v = self.value()
if isinstance(v, dict):
for item in v:
yield self[item]
else:
for i in range(len(v)):
yield self[i]
def __len__(self):
return len(self.value())
def __str__(self):
return str(self.value())
def __repr__(self):
return 'lazyjson.Node(' + repr(self.root) + ', ' + repr(self.key_path) + ')'
def __setitem__(self, key, value):
if isinstance(value, Node):
value = value.value()
self.root.set_value_at_key_path(self.key_path + [key], value)
def get(self, key, default=None):
try:
return self[key].value()
except:
if isinstance(default, Node):
return default.value()
else:
return default
def insert(self, key, value):
self.root.insert_value_at_key_path(self.key_path + [key], value)
@property
def key(self):
if len(self.key_path) == 0:
return None
else:
return self.key_path[-1]
@property
def parent(self):
if len(self.key_path) == 0:
return None
elif len(self.key_path) == 1:
return self.root
else:
return Node(self.root, self.key_path[:-1])
def set(self, new_value):
if isinstance(new_value, Node):
new_value = new_value.value()
self.root.set_value_at_key_path(self.key_path, new_value)
def value(self):
return self.root.value_at_key_path(self.key_path)
class BaseFile(Node, metaclass=abc.ABCMeta):
"""ABC for lazyjson files (root values)."""
def __init__(self):
super().__init__(self)
@abc.abstractmethod
def __eq__(self, other):
raise NotImplementedError()
@abc.abstractmethod
def __hash__(self):
raise NotImplementedError()
def delete_value_at_key_path(self, key_path):
json_value = self.value()
item = json_value
if len(key_path) == 0:
json_value = None
else:
for key in key_path[:-1]:
item = item[key]
del item[key_path[-1]]
self.set(json_value)
def insert_value_at_key_path(self, key_path, value):
json_value = self.value()
item = json_value
if len(key_path) == 0:
json_value = value
else:
for key in key_path[:-1]:
item = item[key]
item.insert(key_path[-1], value)
self.set(json_value)
@abc.abstractmethod
def set(self, new_value):
pass
def set_value_at_key_path(self, key_path, new_value):
json_value = self.value()
item = json_value
if len(key_path) == 0:
json_value = new_value
else:
for key in key_path[:-1]:
item = item[key]
item[key_path[-1]] = new_value
self.set(json_value)
@abc.abstractmethod
def value(self, new_value):
pass
def value_at_key_path(self, key_path):
ret = self.value()
for key in key_path:
ret = ret[key]
return ret
class File(BaseFile):
"""A file based on a file-like object, a pathlib.Path, or anything that can be opened."""
def __init__(self, file_info, file_is_open=None, tries=10, init=..., **kwargs):
super().__init__()
self.open_args = dict(kwargs)
self.file_is_open = isinstance(file_info, io.IOBase) if file_is_open is None else bool(file_is_open)
self.tries = tries
self.file_info = file_info
self.lock = threading.Lock()
if init != ... and not self.file_is_open and not pathlib.Path(self.file_info).exists():
with open(self.file_info, 'w', **self.open_args) as json_file:
json.dump(init, json_file, sort_keys=True, indent=4, separators=(',', ': '), cls=DecimalEncoder)
print(file=json_file) # json.dump doesn't end the file in a newline, so add it manually
def __eq__(self, other):
return self.file_info == other.file_info
def __hash__(self):
return hash(self.file_info)
def __repr__(self):
return 'lazyjson.File(' + repr(self.file_info) + ('' if self.file_is_open and isinstance(self.file_info, io.IOBase) or (not self.file_is_open) and not isinstance(self.file_info, io.IOBase) else ', file_is_open=' + repr(self.file_is_open)) + ('' if self.tries == 10 else ', tries=' + repr(self.tries)) + (', **' + repr(self.open_args) if self.open_args else '') + ')'
def set(self, new_value):
if isinstance(new_value, Node):
new_value = new_value.value()
json.dumps(new_value, cls=DecimalEncoder) # try writing the value to a string first to prevent corrupting the file if the value is not JSON serializable
with self.lock:
if self.file_is_open:
json.dump(new_value, self.file_info, sort_keys=True, indent=4, separators=(',', ': '), cls=DecimalEncoder)
print(file=self.file_info) # json.dump doesn't end the file in a newline, so add it manually
else:
with open(self.file_info, 'w', **self.open_args) as json_file:
json.dump(new_value, json_file, sort_keys=True, indent=4, separators=(',', ': '), cls=DecimalEncoder)
print(file=json_file) # json.dump doesn't end the file in a newline, so add it manually
def value(self):
if self.file_is_open:
return json.load(self.file_info, parse_float=decimal.Decimal)
else:
tried = 0
while True:
try:
with open(self.file_info, **self.open_args) as json_file:
return json.load(json_file, parse_float=decimal.Decimal)
except json.decoder.JSONDecodeError:
tried += 1
if tried >= self.tries:
raise
else:
time.sleep(1)
class CachedFile(BaseFile):
"""A file that wraps an inner file. The contents of the inner file are cached in a user-provided cache, which must be a mutable mapping.
Cache invalidation must be handled externally, for example by storing the cache inside flask.g when working with the Flask framework.
"""
def __init__(self, cache, inner):
super().__init__()
self.cache = cache
self.inner = inner
def __eq__(self, other):
return self.inner == other.inner
def __hash__(self):
return hash(self.inner)
def __repr__(self):
return 'lazyjson.CachedFile(' + repr(self.cache) + ', ' + repr(self.inner) + ')'
def set(self, new_value):
if self.inner in self.cache:
del self.cache[self.inner]
self.inner.set(new_value)
def value(self):
if self.inner not in self.cache:
self.cache[self.inner] = self.inner.value()
return self.cache[self.inner]
class HTTPFile(BaseFile):
def __init__(self, url, post_url=None, **kwargs):
super().__init__()
self.url = url
self.post_url = url if post_url is None else post_url
self.request_params = kwargs
def __eq__(self, other):
return self.url == other.url and self.post_url == other.post_url
def __hash__(self):
return hash((self.url, self.post_url))
def __repr__(self):
return 'lazyjson.HTTPFile(' + repr(self.url) + ('' if self.post_url == self.url else ', post_url=' + repr(self.post_url)) + ''.join(', {}={}'.format(k, repr(v)) for k, v in self.request_params.items()) + ')'
def set(self, new_value):
import requests
if isinstance(new_value, Node):
new_value = new_value.value()
request_params = self.request_params.copy()
request_params['json'] = new_value
requests.post(self.post_url, **request_params)
def value(self):
import requests
return requests.get(self.url, **self.request_params).json()
class MultiFile(BaseFile):
def __init__(self, *args):
super().__init__()
self.files = [arg if isinstance(arg, BaseFile) else File(arg) for arg in args]
def __eq__(self, other):
return self.files == other.files
def __hash__(self):
return hash(self.files)
def __repr__(self):
return 'lazyjson.MultiFile(' + ', '.join(repr(f) for f in self.files) + ')'
@staticmethod
def json_recursive_merge(json_values):
try:
first = next(json_values)
except StopIteration:
return None
if isinstance(first, dict):
objects_prefix = [first]
for value in json_values:
if isinstance(value, dict):
objects_prefix.append(value)
else:
break
return {k: MultiFile.json_recursive_merge(value[k] for value in objects_prefix if isinstance(value, dict) and k in value) for k in set.union(*(set(d.keys()) for d in objects_prefix))}
else:
return first
def set(self, new_value):
self.files[0].set(new_value)
def value(self):
return self.json_recursive_merge(f.value() for f in self.files)
class PythonFile(BaseFile):
"""A file based on a Python object. Can be used with MultiFile to provide fallback values."""
def __init__(self, value=None):
super().__init__()
self._value = value
def __eq__(self, other):
return self._value == other._value
def __hash__(self):
return hash(self._value)
def __repr__(self):
return 'lazyjson.PythonFile(' + repr(self._value) + ')'
def set(self, new_value):
if isinstance(new_value, Node):
new_value = new_value.value()
json.dumps(new_value, cls=DecimalEncoder) # try writing the value to a string first to make sure it is JSON serializable
self._value = new_value
def value(self):
return self._value
class SFTPFile(BaseFile):
def __init__(self, host, port, path, **kwargs):
import paramiko
import paramiko.util
super().__init__()
self.hostname = host
self.port = port
self.remote_path = path
self.connection_args = kwargs.copy()
if 'pkey' not in self.connection_args:
self.connection_args['pkey'] = paramiko.RSAKey.from_private_key_file(os.path.expanduser('~/.ssh/id_rsa'))
if 'hostkey' not in self.connection_args:
host_keys = paramiko.util.load_host_keys(os.path.expanduser('~/.ssh/known_hosts'))
self.connection_args['hostkey'] = host_keys[self.hostname][host_keys[self.hostname].keys()[0]]
def __eq__(self, other):
return self.hostname == other.hostname and self.port == other.port and self.remote_path == other.remote_path
def __hash__(self):
return hash((self.hostname, self.port, self.remote_path))
def __repr__(self):
return 'lazyjson.SFTPFile(' + repr(self.hostname) + ', ' + repr(self.port) + ', ' + repr(self.remote_path) + ''.join(', {}={}'.format(k, repr(v)) for k, v in self.connection_args.items()) + ')'
def set(self, new_value):
import paramiko
if isinstance(new_value, Node):
new_value = new_value.value()
with paramiko.Transport((self.hostname, self.port)) as transport:
transport.connect(**self.connection_args)
with transport.open_sftp_client() as sftp_client:
with sftp_client.file(self.remote_path, 'w') as sftp_file:
json_string = json.dumps(new_value, sort_keys=True, indent=4, separators=(',', ': '), cls=DecimalEncoder)
sftp_file.write(json_string.encode('utf-8') + b'\n')
def value(self):
import paramiko
with paramiko.Transport((self.hostname, self.port)) as transport:
transport.connect(**self.connection_args)
with transport.open_sftp_client() as sftp_client:
with sftp_client.file(self.remote_path) as sftp_file:
return json.loads(sftp_file.read().decode('utf-8'), parse_float=decimal.Decimal)
|
fenhl/lazyjson
|
lazyjson/__init__.py
|
Python
|
mit
| 15,325
|
from pyh import *
list=[[1,'Lucy',25],[2,'Tom',30],[3,'Lily',20]]
page = PyH('Test')
page<<div(style="text-align:center")<<h4('Test table')
mytab = page << table(border="1",cellpadding="3",cellspacing="0",style="margin:auto")
tr1 = mytab << tr(bgcolor="lightgrey")
tr1 << th('id') + th('name')+th('age')
for i in range(len(list)):
tr2 = mytab << tr()
for j in range(3):
tr2 << td(list[i][j])
if list[i][j]=='Tom':
tr2.attributes['bgcolor']='yellow'
if list[i][j]=='Lily':
tr2[1].attributes['style']='color:red'
page.printOut('/Users/miraclewong/github/PythonBasic/PyH/demo.html')
|
MiracleWong/PythonBasic
|
PyH/demo.py
|
Python
|
mit
| 635
|
from app import Handler
from handlers.auth import Auth
class LogoutHandler(Handler):
def get(self):
Auth.logout(self.response)
self.redirect("/")
|
diegopettengill/multiuserblog
|
handlers/logout.py
|
Python
|
mit
| 168
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorflow/core/example/example.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorflow.core.example import feature_pb2 as tensorflow_dot_core_dot_example_dot_feature__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow/core/example/example.proto',
package='tensorflow',
syntax='proto3',
serialized_pb=_b('\n%tensorflow/core/example/example.proto\x12\ntensorflow\x1a%tensorflow/core/example/feature.proto\"1\n\x07\x45xample\x12&\n\x08\x66\x65\x61tures\x18\x01 \x01(\x0b\x32\x14.tensorflow.Features\"i\n\x0fSequenceExample\x12%\n\x07\x63ontext\x18\x01 \x01(\x0b\x32\x14.tensorflow.Features\x12/\n\rfeature_lists\x18\x02 \x01(\x0b\x32\x18.tensorflow.FeatureListsB,\n\x16org.tensorflow.exampleB\rExampleProtosP\x01\xf8\x01\x01\x62\x06proto3')
,
dependencies=[tensorflow_dot_core_dot_example_dot_feature__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_EXAMPLE = _descriptor.Descriptor(
name='Example',
full_name='tensorflow.Example',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='features', full_name='tensorflow.Example.features', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=92,
serialized_end=141,
)
_SEQUENCEEXAMPLE = _descriptor.Descriptor(
name='SequenceExample',
full_name='tensorflow.SequenceExample',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='context', full_name='tensorflow.SequenceExample.context', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='feature_lists', full_name='tensorflow.SequenceExample.feature_lists', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=143,
serialized_end=248,
)
_EXAMPLE.fields_by_name['features'].message_type = tensorflow_dot_core_dot_example_dot_feature__pb2._FEATURES
_SEQUENCEEXAMPLE.fields_by_name['context'].message_type = tensorflow_dot_core_dot_example_dot_feature__pb2._FEATURES
_SEQUENCEEXAMPLE.fields_by_name['feature_lists'].message_type = tensorflow_dot_core_dot_example_dot_feature__pb2._FEATURELISTS
DESCRIPTOR.message_types_by_name['Example'] = _EXAMPLE
DESCRIPTOR.message_types_by_name['SequenceExample'] = _SEQUENCEEXAMPLE
Example = _reflection.GeneratedProtocolMessageType('Example', (_message.Message,), dict(
DESCRIPTOR = _EXAMPLE,
__module__ = 'tensorflow.core.example.example_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.Example)
))
_sym_db.RegisterMessage(Example)
SequenceExample = _reflection.GeneratedProtocolMessageType('SequenceExample', (_message.Message,), dict(
DESCRIPTOR = _SEQUENCEEXAMPLE,
__module__ = 'tensorflow.core.example.example_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.SequenceExample)
))
_sym_db.RegisterMessage(SequenceExample)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\026org.tensorflow.exampleB\rExampleProtosP\001\370\001\001'))
try:
# THESE ELEMENTS WILL BE DEPRECATED.
# Please use the generated *_pb2_grpc.py files instead.
import grpc
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
except ImportError:
pass
# @@protoc_insertion_point(module_scope)
|
nubbel/swift-tensorflow
|
PythonGenerated/tensorflow/core/example/example_pb2.py
|
Python
|
mit
| 4,799
|
from abc import ABCMeta, abstractmethod
import numpy as np
class Illustration2VecBase(object):
__metaclass__ = ABCMeta
def __init__(self, net, tags=None, threshold=None):
self.net = net
if tags is not None:
self.tags = np.array(tags)
self.index = {t: i for i, t in enumerate(tags)}
else:
self.tags = None
if threshold is not None:
self.threshold = threshold
else:
self.threshold = None
@abstractmethod
def _extract(self, inputs, layername):
pass
def _convert_image(self, image):
arr = np.asarray(image, dtype=np.float32)
if arr.ndim == 2:
# convert a monochrome image to a color one
ret = np.empty((arr.shape[0], arr.shape[1], 3), dtype=np.float32)
ret[:] = arr.reshape(arr.shape[0], arr.shape[1], 1)
return ret
elif arr.ndim == 3:
# if arr contains alpha channel, remove it
return arr[:,:,:3]
else:
raise TypeError('unsupported image specified')
def _estimate(self, images):
assert(self.tags is not None)
imgs = [self._convert_image(img) for img in images]
prob = self._extract(imgs, layername='prob')
prob = prob.reshape(prob.shape[0], -1)
return prob
def estimate_specific_tags(self, images, tags):
prob = self._estimate(images)
return [{t: float(prob[i, self.index[t]]) for t in tags}
for i in range(prob.shape[0])]
def estimate_top_tags(self, images, n_tag=10):
prob = self._estimate(images)
general_prob = prob[:, :512]
character_prob = prob[:, 512:1024]
copyright_prob = prob[:, 1024:1536]
rating_prob = prob[:, 1536:]
general_arg = np.argsort(-general_prob, axis=1)[:, :n_tag]
character_arg = np.argsort(-character_prob, axis=1)[:, :n_tag]
copyright_arg = np.argsort(-copyright_prob, axis=1)[:, :n_tag]
rating_arg = np.argsort(-rating_prob, axis=1)
result = []
for i in range(prob.shape[0]):
result.append({
'general': list(zip(
self.tags[general_arg[i]],
general_prob[i, general_arg[i]].tolist())),
'character': list(zip(
self.tags[512 + character_arg[i]],
character_prob[i, character_arg[i]].tolist())),
'copyright': list(zip(
self.tags[1024 + copyright_arg[i]],
copyright_prob[i, copyright_arg[i]].tolist())),
'rating': list(zip(
self.tags[1536 + rating_arg[i]],
rating_prob[i, rating_arg[i]].tolist())),
})
return result
def __extract_plausible_tags(self, preds, f):
result = []
for pred in preds:
general = [(t, p) for t, p in pred['general'] if f(t, p)]
character = [(t, p) for t, p in pred['character'] if f(t, p)]
copyright = [(t, p) for t, p in pred['copyright'] if f(t, p)]
result.append({
'general': general,
'character': character,
'copyright': copyright,
'rating': pred['rating'],
})
return result
def estimate_plausible_tags(
self, images, threshold=0.25, threshold_rule='constant'):
preds = self.estimate_top_tags(images, n_tag=512)
result = []
if threshold_rule == 'constant':
return self.__extract_plausible_tags(
preds, lambda t, p: p > threshold)
elif threshold_rule == 'f0.5':
if self.threshold is None:
raise TypeError(
'please specify threshold option during init.')
return self.__extract_plausible_tags(
preds, lambda t, p: p > self.threshold[self.index[t], 0])
elif threshold_rule == 'f1':
if self.threshold is None:
raise TypeError(
'please specify threshold option during init.')
return self.__extract_plausible_tags(
preds, lambda t, p: p > self.threshold[self.index[t], 1])
elif threshold_rule == 'f2':
if self.threshold is None:
raise TypeError(
'please specify threshold option during init.')
return self.__extract_plausible_tags(
preds, lambda t, p: p > self.threshold[self.index[t], 2])
else:
raise TypeError('unknown rule specified')
return result
def extract_feature(self, images):
imgs = [self._convert_image(img) for img in images]
feature = self._extract(imgs, layername='encode1')
feature = feature.reshape(feature.shape[0], -1)
return feature
def extract_binary_feature(self, images):
imgs = [self._convert_image(img) for img in images]
feature = self._extract(imgs, layername='encode1neuron')
feature = feature.reshape(feature.shape[0], -1)
binary_feature = np.zeros_like(feature, dtype=np.uint8)
binary_feature[feature > 0.5] = 1
return np.packbits(binary_feature, axis=1)
|
rezoo/illustration2vec
|
i2v/base.py
|
Python
|
mit
| 5,296
|
# Copyright (c) 2015-2019 Jack Morton <jhm@jemscout.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import random
from bs4 import BeautifulSoup
from urllib.request import urlopen, Request
import nhlscrappo.constants as C
from nhlscrappo import GameType, ReportType
class ReportFetcher(object):
"""Responsible for fetching and validating the report fields"""
__docroot = "http://www.nhl.com/"
def __init__(self, season, game_num, game_type, report_type):
self.season = season
self.game_num = game_num
self.game_type = game_type
self.report_type = report_type
self.soup = None
def __random_user_agent(self):
user_agent_list = [ \
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, " \
"like Gecko) Chrome/22.0.1207.1 Safari/537.1", \
"Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 " \
"(KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11", \
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 "\
"(KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6", \
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like " \
"Gecko) Chrome/20.0.1090.0 Safari/536.6", \
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, " \
"like Gecko) Chrome/19.77.34.5 Safari/537.1", \
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like " \
"Gecko) Chrome/19.0.1084.9 Safari/536.5", \
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like " \
"Gecko) Chrome/19.0.1084.36 Safari/536.5", \
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, " \
"like Gecko) Chrome/19.0.1063.0 Safari/536.3", \
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like " \
"Gecko) Chrome/19.0.1063.0 Safari/536.3",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3" \
" (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3", \
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like " \
"Gecko) Chrome/19.0.1062.0 Safari/536.3", \
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, " \
"like Gecko) Chrome/19.0.1062.0 Safari/536.3", \
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like " \
"Gecko) Chrome/19.0.1061.1 Safari/536.3", \
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, " \
"like Gecko) Chrome/19.0.1061.1 Safari/536.3", \
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like " \
"Gecko) Chrome/19.0.1061.1 Safari/536.3", \
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like " \
"Gecko) Chrome/19.0.1061.0 Safari/536.3", \
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like " \
"Gecko) Chrome/19.0.1055.1 Safari/535.24", \
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, " \
"like Gecko) Chrome/19.0.1055.1 Safari/535.24"]
return random.choice(user_agent_list)
def __load_html(self, url):
if "http://" in url:
req = Request(url, headers = {
"User-Agent": self.__random_user_agent(), \
"Accept": "text/html,application/xhtml+xml,application/" \
"xml;q=0.9,*/*;q=0.8", \
"Accept-Charset": "ISO-8859-1,utf-8;q=0.7,*;q=0.3", \
"Accept-Encoding": "none", \
"Accept-Language": "en-US,en;q=0.8", \
"Connection": "keep-alive"})
with urlopen(req) as handle:
html = handle.read()
handle.close()
return BeautifulSoup(html.decode("utf-8", "lxml"))
else:
with open(url, "r") as handle:
html = handle.read()
handle.close()
return BeautifulSoup(html, features="lxml")
def make_soup(self, local = None):
if local:
self.soup = self.__load_html(local)
else:
url = self.__docroot + "scores/htmlreports/" + str(self.season) + \
str(self.season + 1) + "/" + self.report_type.value + "0" + \
str(self.game_type.value) + ("%04i" % self.game_num) + ".HTM"
self.soup = self.__load_html(url)
return self.soup
@property
def season(self):
return self._season
@season.setter
def season(self, value):
if not isinstance(value, int):
raise TypeError("season must be of type int")
if value < C.MIN_SEASON or value > C.MAX_SEASON:
raise ValueError("Only seasons starting from " + \
str(C.MIN_SEASON) + " until " + str(C.MAX_SEASON) + \
" are supported")
self._season = int(value)
@property
def game_num(self):
return self._game_num
@game_num.setter
def game_num(self, value):
if not isinstance(value, int):
raise TypeError("game_num must be of type int")
self._game_num = value
@property
def game_type(self):
return self._game_type
@game_type.setter
def game_type(self, value):
if value in GameType:
self._game_type = value
else:
raise TypeError("game_type must be of type GameType")
@property
def report_type(self):
return self._report_type
@report_type.setter
def report_type(self, value):
if value in ReportType:
self._report_type = value
else:
raise TypeError("report_type must be of type ReportType")
@property
def soup(self):
return self._soup
@soup.setter
def soup(self, value):
if value is not None and not isinstance(value, BeautifulSoup):
raise TypeError("soup must be of type BeautifulSoup")
self._soup = value
|
jhm-/nhlscrappo
|
nhlscrappo/fetcher.py
|
Python
|
mit
| 7,149
|
class Stats:
"""
Contains the stats that a character or monster may have.
The stats tied to an agent are:
* Health: The amount of damage the agent can withstand. The agent dies when their health falls to zero. This is
the only stat that will persist after a battle ends.
* Stamina: An arbitrary measure of ability to use special actions such as magic or other techniques. This stat is
meant to be consumed through the battle.
* Strength: Influences the effectiveness of physical attacks.
* Magic: Influences the effectiveness of magical attacks.
* Endurance: Influences defence and resistance to attacks, as well as resistance to status ailments.
* Agility: Influences speed in battle and chance to evade attacks.
"""
def __init__(self, health, stamina, strength, magic, endurance, agility):
"""
Initializes stats with specific values.
"""
self.health = health
self.stamina = stamina
self.strength = strength
self.magic = magic
self.endurance = endurance
self.agility = agility
def __str__(self):
return str(self.__dict__)
class EquipmentStats:
"""
Contains the stats that come from equipment. Only characters have equipment.
* Attack: Heavily influences physical attacks.
* Magic attack: Heavily influences magic attacks.
* Armour: Heavily influences physical defence.
* Magic armour: Heavily influences magic defence.
"""
def __init__(self, attack, magic_attack, armour, magic_armour):
"""
Initializes equipment stats with specific values.
"""
self.attack = attack
self.magic_attack = magic_attack
self.armour = armour
self.magic_armour = magic_armour
def __str__(self):
return str(self.__dict__)
|
MikeHoffert/caladbolg-engine
|
caladbolg/agents/stats.py
|
Python
|
mit
| 1,898
|
class URIError(Exception):
pass
|
sergeyglazyrindev/amigrations
|
amigrations/adapters/exceptions.py
|
Python
|
mit
| 36
|
from select_multiple_field.models import SelectMultipleField
class CommaSeparatedCharField(SelectMultipleField):
def contribute_to_class(self, cls, name, **kwargs):
"""Contribute to the Model subclass.
We just set our custom get_FIELD_display(),
which returns a comma-separated list of displays.
"""
super(CommaSeparatedCharField, self).contribute_to_class(cls, name,
**kwargs)
def _get_FIELD_display(instance):
choices = dict(self.choices)
values = getattr(instance, self.attname)
return ", ".join(unicode(choices.get(c, c)) for c in values if c)
setattr(cls, 'get_%s_display' % self.name, _get_FIELD_display)
|
Lcaracol/ideasbox.lan
|
ideasbox/fields.py
|
Python
|
mit
| 778
|
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 10 17:27:49 2016
@author: camacho
"""
import Kernel;reload(Kernel);kl = Kernel
import numpy as np
from time import time
import inspect as i
##### LIKELIHOOD
def likelihood(kernel, x, xcalc, y, yerr): #covariance matrix calculations
K = np.zeros((len(x),len(x))) #covariance matrix K
for i in range(len(x)):
x1 = x[i]
for j in range(len(xcalc)):
x2 = xcalc[j]
K[i,j] = kernel(x1, x2)
K=K+yerr**2*np.identity(len(x))
log_p_correct = lnlike(K, y)
print 'likelihood ->', log_p_correct
return K
def lnlike(K, r): #log-likelihood calculations
from scipy.linalg import cho_factor, cho_solve
L1 = cho_factor(K) # tuple (L, lower)
sol = cho_solve(L1, r) # this is K^-1*(r)
n = r.size
logLike = -0.5*np.dot(r, sol) \
- np.sum(np.log(np.diag(L1[0]))) \
- n*0.5*np.log(2*np.pi)
return logLike
##### LIKELIHOOD GRADIENT
def grad_logp(kernel,x,xcalc,y,yerr,cov_matrix):
K_grad = np.zeros((len(x),len(x)))
for i in range(len(x)):
x1 = x[i]
for j in range(len(xcalc)):
x2 = xcalc[j]
K_grad[i,j] = kernel(x1, x2)
K_inv = np.linalg.inv(cov_matrix)
alpha = np.dot(K_inv,y)
alpha_trans = alpha.T
#formula do gradiente tiradas do Rasmussen&Williams chapter 5, equaçao(5.9)
grad = 0.5 * np.dot(y.T,np.dot(K_inv,np.dot(K_grad,np.dot(K_inv,y)))) \
-0.5 * np.einsum('ij,ij',K_inv,K_grad)
return grad
def gradient_likelihood(kernel,x,xcalc,y,yerr):
import inspect
cov_matrix=likelihood(kernel,x,xcalc,y,yerr)#ele volta a imprimir a likelihood acho que
#por causa disto mas preciso da matriz de
#covariancia original
if isinstance(kernel,kl.ExpSquared):
grad1=grad_logp(kernel.dES_dtheta, x, xcalc, y, yerr, cov_matrix)
grad2=grad_logp(kernel.dES_dl, x, xcalc, y, yerr, cov_matrix)
print 'gradient ->', grad1, grad2
elif isinstance(kernel,kl.ExpSineSquared):
grad1=grad_logp(kernel.dESS_dtheta,x,xcalc,y,yerr,cov_matrix)
grad2=grad_logp(kernel.dESS_dl,x,xcalc,y,yerr,cov_matrix)
grad3=grad_logp(kernel.dESS_dP,x,xcalc,y,yerr,cov_matrix)
print 'gradient ->', grad1, grad2, grad3
elif isinstance(kernel,kl.RatQuadratic):
grad1=grad_logp(kernel.dRQ_dtheta,x,xcalc,y,yerr,cov_matrix)
grad2=grad_logp(kernel.dRQ_dalpha,x,xcalc,y,yerr,cov_matrix)
grad3=grad_logp(kernel.dRQ_dl,x,xcalc,y,yerr,cov_matrix)
print 'gradient ->', grad1, grad2, grad3
elif isinstance(kernel,kl.Exponential):
grad1=grad_logp(kernel.dExp_dtheta,x,xcalc,y,yerr,cov_matrix)
grad2=grad_logp(kernel.dExp_dl,x,xcalc,y,yerr,cov_matrix)
print 'gradient ->', grad1, grad2
# elif isinstance(kernel,Sum) is True:
# initial=kernel.__dict__
# for i in range(len(initial)):
# cond_i = initial.popitem()
# print 'gradient -> Olha olha é uma soma com', cond_i
#
else:
print 'gradient -> We dont need no calculation \n We dont need no optimization control'
# Nao apliquei a mesma logica às kernels exponential e matern pois
#até isto funcionar como deve ser não vale a pena fazer
#funcionar como deve ser = saber se estou a calcular o gradiente bem
#e arranjar maneira de isto funcionar com somas e multiplicaçoes de kernels
|
jdavidrcamacho/Tests_GP
|
01 - Trials and attempts/Cheat_attempt/Likelihood.py
|
Python
|
mit
| 3,613
|
# This file is part of pyplink.
#
# The MIT License (MIT)
#
# Copyright (c) 2014 Louis-Philippe Lemieux Perreault
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import unittest
from . import test_suite
__author__ = "Louis-Philippe Lemieux Perreault"
__copyright__ = "Copyright 2014 Louis-Philippe Lemieux Perreault"
__license__ = "MIT"
unittest.TextTestRunner(verbosity=1).run(test_suite)
|
lemieuxl/pyplink
|
pyplink/tests/__main__.py
|
Python
|
mit
| 1,406
|
#!/usr/bin/env python
import doctest
import unittest
import sys
def test_suite(docs):
suite = unittest.TestSuite()
for doc in docs:
suite.addTest(doctest.DocFileSuite(doc, optionflags=flags()))
return suite
def flags():
flags = doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS
if sys.version_info >= (3,):
flags |= doctest.IGNORE_EXCEPTION_DETAIL
return flags
def run(docs):
suite = test_suite(docs)
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(suite)
sys.exit(int(bool(result.failures or result.errors)))
if __name__ == '__main__':
run(sys.argv)
|
nsi-iff/should-dsl
|
run_examples.py
|
Python
|
mit
| 630
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-07-27 17:21
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0008_sponsor_primary_sponsor'),
]
operations = [
migrations.CreateModel(
name='Footer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('facebook_url', models.URLField(blank=True, null=True)),
('twitch_url', models.URLField(blank=True, null=True)),
],
),
]
|
davidjrichardson/uwcs-zarya
|
blog/migrations/0009_footer.py
|
Python
|
mit
| 666
|
from __future__ import unicode_literals
from django.apps import AppConfig
class ToolboxConfig(AppConfig):
name = 'toolbox'
|
california-civic-data-coalition/django-calaccess-downloads-website
|
toolbox/apps.py
|
Python
|
mit
| 130
|
# -*- coding: utf-8 -*-
from rdflib import Namespace
ONTOLEX = Namespace("http://www.w3.org/ns/lemon/ontolex#")
LEXINFO = Namespace("http://www.lexinfo.net/ontology/2.0/lexinfo#")
DECOMP = Namespace("http://www.w3.org/ns/lemon/decomp#")
ISOCAT = Namespace("http://www.isocat.org/datcat/")
LIME = Namespace("http://www.w3.org/ns/lemon/lime#")
|
wimmuskee/ontolex-db
|
format/namespace.py
|
Python
|
mit
| 343
|
from __future__ import absolute_import
from pyti import catch_errors
from pyti.function_helper import fill_for_noncomputable_vals
from six.moves import range
def linear_weighted_moving_average(data, period):
"""
Linear Weighted Moving Average.
Formula:
LWMA = SUM(DATA[i]) * i / SUM(i)
"""
catch_errors.check_for_period_error(data, period)
idx_period = list(range(1, period+1))
lwma = [(sum([i * idx_period[data[idx-(period-1):idx+1].index(i)]
for i in data[idx-(period-1):idx+1]])) /
sum(range(1, len(data[idx+1-period:idx+1])+1)) for idx in range(period-1, len(data))]
lwma = fill_for_noncomputable_vals(data, lwma)
return lwma
|
kylejusticemagnuson/pyti
|
pyti/linear_weighted_moving_average.py
|
Python
|
mit
| 700
|
class Solution:
def maxNumEdgesToRemove(self, n: int, edges: List[List[int]]) -> int:
parent = list(range(n + 1))
def findParent(i):
while parent[i] != i:
parent[i] = parent[parent[i]]
i = parent[i]
return i
def union(u, v):
pu = findParent(u)
pv = findParent(v)
if pu != pv:
parent[pv] = pu
return 1
else:
return 0
e1 = e2 = result = 0
for t, u, v in edges:
if t == 3:
if union(u, v):
e1 += 1
e2 += 1
else:
result += 1
parentOrig = parent[:]
for t, u, v in edges:
if t == 1:
if union(u, v):
e1 += 1
else:
result += 1
parent = parentOrig
for t, u, v in edges:
if t == 2:
if union(u, v):
e2 += 1
else:
result += 1
return result if e1 == e2 == n - 1 else -1
|
jiadaizhao/LeetCode
|
1501-1600/1579-Remove Max Number of Edges to Keep Graph Fully Traversable/1579-Remove Max Number of Edges to Keep Graph Fully Traversable.py
|
Python
|
mit
| 1,211
|
# orm/interfaces.py
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
Contains various base classes used throughout the ORM.
Defines the now deprecated ORM extension classes as well
as ORM internals.
Other than the deprecated extensions, this module and the
classes within should be considered mostly private.
"""
from __future__ import absolute_import
from .. import exc as sa_exc, util, inspect
from ..sql import operators
from collections import deque
orm_util = util.importlater('sqlalchemy.orm', 'util')
collections = util.importlater('sqlalchemy.orm', 'collections')
__all__ = (
'AttributeExtension',
'EXT_CONTINUE',
'EXT_STOP',
'ExtensionOption',
'InstrumentationManager',
'LoaderStrategy',
'MapperExtension',
'MapperOption',
'MapperProperty',
'PropComparator',
'PropertyOption',
'SessionExtension',
'StrategizedOption',
'StrategizedProperty',
)
EXT_CONTINUE = util.symbol('EXT_CONTINUE')
EXT_STOP = util.symbol('EXT_STOP')
ONETOMANY = util.symbol('ONETOMANY')
MANYTOONE = util.symbol('MANYTOONE')
MANYTOMANY = util.symbol('MANYTOMANY')
from .deprecated_interfaces import AttributeExtension, \
SessionExtension, \
MapperExtension
NOT_EXTENSION = util.symbol('NOT_EXTENSION')
"""Symbol indicating an :class:`_InspectionAttr` that's
not part of sqlalchemy.ext.
Is assigned to the :attr:`._InspectionAttr.extension_type`
attibute.
"""
class _InspectionAttr(object):
"""A base class applied to all ORM objects that can be returned
by the :func:`.inspect` function.
The attributes defined here allow the usage of simple boolean
checks to test basic facts about the object returned.
While the boolean checks here are basically the same as using
the Python isinstance() function, the flags here can be used without
the need to import all of these classes, and also such that
the SQLAlchemy class system can change while leaving the flags
here intact for forwards-compatibility.
"""
is_selectable = False
"""Return True if this object is an instance of :class:`.Selectable`."""
is_aliased_class = False
"""True if this object is an instance of :class:`.AliasedClass`."""
is_instance = False
"""True if this object is an instance of :class:`.InstanceState`."""
is_mapper = False
"""True if this object is an instance of :class:`.Mapper`."""
is_property = False
"""True if this object is an instance of :class:`.MapperProperty`."""
is_attribute = False
"""True if this object is a Python :term:`descriptor`.
This can refer to one of many types. Usually a
:class:`.QueryableAttribute` which handles attributes events on behalf
of a :class:`.MapperProperty`. But can also be an extension type
such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
The :attr:`._InspectionAttr.extension_type` will refer to a constant
identifying the specific subtype.
.. seealso::
:attr:`.Mapper.all_orm_descriptors`
"""
is_clause_element = False
"""True if this object is an instance of :class:`.ClauseElement`."""
extension_type = NOT_EXTENSION
"""The extension type, if any.
Defaults to :data:`.interfaces.NOT_EXTENSION`
.. versionadded:: 0.8.0
.. seealso::
:data:`.HYBRID_METHOD`
:data:`.HYBRID_PROPERTY`
:data:`.ASSOCIATION_PROXY`
"""
class _MappedAttribute(object):
"""Mixin for attributes which should be replaced by mapper-assigned
attributes.
"""
class MapperProperty(_MappedAttribute, _InspectionAttr):
"""Manage the relationship of a ``Mapper`` to a single class
attribute, as well as that attribute as it appears on individual
instances of the class, including attribute instrumentation,
attribute access, loading behavior, and dependency calculations.
The most common occurrences of :class:`.MapperProperty` are the
mapped :class:`.Column`, which is represented in a mapping as
an instance of :class:`.ColumnProperty`,
and a reference to another class produced by :func:`.relationship`,
represented in the mapping as an instance of
:class:`.RelationshipProperty`.
"""
cascade = frozenset()
"""The set of 'cascade' attribute names.
This collection is checked before the 'cascade_iterator' method is called.
"""
is_property = True
def setup(self, context, entity, path, adapter, **kwargs):
"""Called by Query for the purposes of constructing a SQL statement.
Each MapperProperty associated with the target mapper processes the
statement referenced by the query context, adding columns and/or
criterion as appropriate.
"""
pass
def create_row_processor(self, context, path,
mapper, row, adapter):
"""Return a 3-tuple consisting of three row processing functions.
"""
return None, None, None
def cascade_iterator(self, type_, state, visited_instances=None,
halt_on=None):
"""Iterate through instances related to the given instance for
a particular 'cascade', starting with this MapperProperty.
Return an iterator3-tuples (instance, mapper, state).
Note that the 'cascade' collection on this MapperProperty is
checked first for the given type before cascade_iterator is called.
See PropertyLoader for the related instance implementation.
"""
return iter(())
def set_parent(self, parent, init):
self.parent = parent
def instrument_class(self, mapper): # pragma: no-coverage
raise NotImplementedError()
@util.memoized_property
def info(self):
"""Info dictionary associated with the object, allowing user-defined
data to be associated with this :class:`.MapperProperty`.
The dictionary is generated when first accessed. Alternatively,
it can be specified as a constructor argument to the
:func:`.column_property`, :func:`.relationship`, or :func:`.composite`
functions.
.. versionadded:: 0.8 Added support for .info to all
:class:`.MapperProperty` subclasses.
.. seealso::
:attr:`.QueryableAttribute.info`
:attr:`.SchemaItem.info`
"""
return {}
_configure_started = False
_configure_finished = False
def init(self):
"""Called after all mappers are created to assemble
relationships between mappers and perform other post-mapper-creation
initialization steps.
"""
self._configure_started = True
self.do_init()
self._configure_finished = True
@property
def class_attribute(self):
"""Return the class-bound descriptor corresponding to this
MapperProperty."""
return getattr(self.parent.class_, self.key)
def do_init(self):
"""Perform subclass-specific initialization post-mapper-creation
steps.
This is a template method called by the ``MapperProperty``
object's init() method.
"""
pass
def post_instrument_class(self, mapper):
"""Perform instrumentation adjustments that need to occur
after init() has completed.
"""
pass
def is_primary(self):
"""Return True if this ``MapperProperty``'s mapper is the
primary mapper for its class.
This flag is used to indicate that the ``MapperProperty`` can
define attribute instrumentation for the class at the class
level (as opposed to the individual instance level).
"""
return not self.parent.non_primary
def merge(self, session, source_state, source_dict, dest_state,
dest_dict, load, _recursive):
"""Merge the attribute represented by this ``MapperProperty``
from source to destination object"""
pass
def compare(self, operator, value, **kw):
"""Return a compare operation for the columns represented by
this ``MapperProperty`` to the given value, which may be a
column value or an instance. 'operator' is an operator from
the operators module, or from sql.Comparator.
By default uses the PropComparator attached to this MapperProperty
under the attribute name "comparator".
"""
return operator(self.comparator, value)
def __repr__(self):
return '<%s at 0x%x; %s>' % (
self.__class__.__name__,
id(self), getattr(self, 'key', 'no key'))
class PropComparator(operators.ColumnOperators):
"""Defines boolean, comparison, and other operators for
:class:`.MapperProperty` objects.
SQLAlchemy allows for operators to
be redefined at both the Core and ORM level. :class:`.PropComparator`
is the base class of operator redefinition for ORM-level operations,
including those of :class:`.ColumnProperty`,
:class:`.RelationshipProperty`, and :class:`.CompositeProperty`.
.. note:: With the advent of Hybrid properties introduced in SQLAlchemy
0.7, as well as Core-level operator redefinition in
SQLAlchemy 0.8, the use case for user-defined :class:`.PropComparator`
instances is extremely rare. See :ref:`hybrids_toplevel` as well
as :ref:`types_operators`.
User-defined subclasses of :class:`.PropComparator` may be created. The
built-in Python comparison and math operator methods, such as
:meth:`.operators.ColumnOperators.__eq__`,
:meth:`.operators.ColumnOperators.__lt__`, and
:meth:`.operators.ColumnOperators.__add__`, can be overridden to provide
new operator behavior. The custom :class:`.PropComparator` is passed to
the :class:`.MapperProperty` instance via the ``comparator_factory``
argument. In each case,
the appropriate subclass of :class:`.PropComparator` should be used::
# definition of custom PropComparator subclasses
from sqlalchemy.orm.properties import \\
ColumnProperty,\\
CompositeProperty,\\
RelationshipProperty
class MyColumnComparator(ColumnProperty.Comparator):
def __eq__(self, other):
return self.__clause_element__() == other
class MyRelationshipComparator(RelationshipProperty.Comparator):
def any(self, expression):
"define the 'any' operation"
# ...
class MyCompositeComparator(CompositeProperty.Comparator):
def __gt__(self, other):
"redefine the 'greater than' operation"
return sql.and_(*[a>b for a, b in
zip(self.__clause_element__().clauses,
other.__composite_values__())])
# application of custom PropComparator subclasses
from sqlalchemy.orm import column_property, relationship, composite
from sqlalchemy import Column, String
class SomeMappedClass(Base):
some_column = column_property(Column("some_column", String),
comparator_factory=MyColumnComparator)
some_relationship = relationship(SomeOtherClass,
comparator_factory=MyRelationshipComparator)
some_composite = composite(
Column("a", String), Column("b", String),
comparator_factory=MyCompositeComparator
)
Note that for column-level operator redefinition, it's usually
simpler to define the operators at the Core level, using the
:attr:`.TypeEngine.comparator_factory` attribute. See
:ref:`types_operators` for more detail.
See also:
:class:`.ColumnProperty.Comparator`
:class:`.RelationshipProperty.Comparator`
:class:`.CompositeProperty.Comparator`
:class:`.ColumnOperators`
:ref:`types_operators`
:attr:`.TypeEngine.comparator_factory`
"""
def __init__(self, prop, parentmapper, adapt_to_entity=None):
self.prop = self.property = prop
self._parentmapper = parentmapper
self._adapt_to_entity = adapt_to_entity
def __clause_element__(self):
raise NotImplementedError("%r" % self)
def adapt_to_entity(self, adapt_to_entity):
"""Return a copy of this PropComparator which will use the given
:class:`.AliasedInsp` to produce corresponding expressions.
"""
return self.__class__(self.prop, self._parentmapper, adapt_to_entity)
@property
def adapter(self):
"""Produce a callable that adapts column expressions
to suit an aliased version of this comparator.
"""
if self._adapt_to_entity is None:
return None
else:
return self._adapt_to_entity._adapt_element
@util.memoized_property
def info(self):
return self.property.info
@staticmethod
def any_op(a, b, **kwargs):
return a.any(b, **kwargs)
@staticmethod
def has_op(a, b, **kwargs):
return a.has(b, **kwargs)
@staticmethod
def of_type_op(a, class_):
return a.of_type(class_)
def of_type(self, class_):
"""Redefine this object in terms of a polymorphic subclass.
Returns a new PropComparator from which further criterion can be
evaluated.
e.g.::
query.join(Company.employees.of_type(Engineer)).\\
filter(Engineer.name=='foo')
:param \class_: a class or mapper indicating that criterion will be
against this specific subclass.
"""
return self.operate(PropComparator.of_type_op, class_)
def any(self, criterion=None, **kwargs):
"""Return true if this collection contains any member that meets the
given criterion.
The usual implementation of ``any()`` is
:meth:`.RelationshipProperty.Comparator.any`.
:param criterion: an optional ClauseElement formulated against the
member class' table or attributes.
:param \**kwargs: key/value pairs corresponding to member class
attribute names which will be compared via equality to the
corresponding values.
"""
return self.operate(PropComparator.any_op, criterion, **kwargs)
def has(self, criterion=None, **kwargs):
"""Return true if this element references a member which meets the
given criterion.
The usual implementation of ``has()`` is
:meth:`.RelationshipProperty.Comparator.has`.
:param criterion: an optional ClauseElement formulated against the
member class' table or attributes.
:param \**kwargs: key/value pairs corresponding to member class
attribute names which will be compared via equality to the
corresponding values.
"""
return self.operate(PropComparator.has_op, criterion, **kwargs)
class StrategizedProperty(MapperProperty):
"""A MapperProperty which uses selectable strategies to affect
loading behavior.
There is a single strategy selected by default. Alternate
strategies can be selected at Query time through the usage of
``StrategizedOption`` objects via the Query.options() method.
"""
strategy_wildcard_key = None
@util.memoized_property
def _wildcard_path(self):
if self.strategy_wildcard_key:
return ('loaderstrategy', (self.strategy_wildcard_key,))
else:
return None
def _get_context_strategy(self, context, path):
strategy_cls = path._inlined_get_for(self, context, 'loaderstrategy')
if not strategy_cls:
wc_key = self._wildcard_path
if wc_key and wc_key in context.attributes:
strategy_cls = context.attributes[wc_key]
if strategy_cls:
try:
return self._strategies[strategy_cls]
except KeyError:
return self.__init_strategy(strategy_cls)
return self.strategy
def _get_strategy(self, cls):
try:
return self._strategies[cls]
except KeyError:
return self.__init_strategy(cls)
def __init_strategy(self, cls):
self._strategies[cls] = strategy = cls(self)
return strategy
def setup(self, context, entity, path, adapter, **kwargs):
self._get_context_strategy(context, path).\
setup_query(context, entity, path,
adapter, **kwargs)
def create_row_processor(self, context, path, mapper, row, adapter):
return self._get_context_strategy(context, path).\
create_row_processor(context, path,
mapper, row, adapter)
def do_init(self):
self._strategies = {}
self.strategy = self.__init_strategy(self.strategy_class)
def post_instrument_class(self, mapper):
if self.is_primary() and \
not mapper.class_manager._attr_has_impl(self.key):
self.strategy.init_class_attribute(mapper)
class MapperOption(object):
"""Describe a modification to a Query."""
propagate_to_loaders = False
"""if True, indicate this option should be carried along
Query object generated by scalar or object lazy loaders.
"""
def process_query(self, query):
pass
def process_query_conditionally(self, query):
"""same as process_query(), except that this option may not
apply to the given query.
Used when secondary loaders resend existing options to a new
Query."""
self.process_query(query)
class PropertyOption(MapperOption):
"""A MapperOption that is applied to a property off the mapper or
one of its child mappers, identified by a dot-separated key
or list of class-bound attributes. """
def __init__(self, key, mapper=None):
self.key = key
self.mapper = mapper
def process_query(self, query):
self._process(query, True)
def process_query_conditionally(self, query):
self._process(query, False)
def _process(self, query, raiseerr):
paths = self._process_paths(query, raiseerr)
if paths:
self.process_query_property(query, paths)
def process_query_property(self, query, paths):
pass
def __getstate__(self):
d = self.__dict__.copy()
d['key'] = ret = []
for token in util.to_list(self.key):
if isinstance(token, PropComparator):
ret.append((token._parentmapper.class_, token.key))
else:
ret.append(token)
return d
def __setstate__(self, state):
ret = []
for key in state['key']:
if isinstance(key, tuple):
cls, propkey = key
ret.append(getattr(cls, propkey))
else:
ret.append(key)
state['key'] = tuple(ret)
self.__dict__ = state
def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
if orm_util._is_aliased_class(mapper):
searchfor = mapper
else:
searchfor = orm_util._class_to_mapper(mapper)
for ent in query._mapper_entities:
if ent.corresponds_to(searchfor):
return ent
else:
if raiseerr:
if not list(query._mapper_entities):
raise sa_exc.ArgumentError(
"Query has only expression-based entities - "
"can't find property named '%s'."
% (token, )
)
else:
raise sa_exc.ArgumentError(
"Can't find property '%s' on any entity "
"specified in this Query. Note the full path "
"from root (%s) to target entity must be specified."
% (token, ",".join(str(x) for
x in query._mapper_entities))
)
else:
return None
def _find_entity_basestring(self, query, token, raiseerr):
for ent in query._mapper_entities:
# return only the first _MapperEntity when searching
# based on string prop name. Ideally object
# attributes are used to specify more exactly.
return ent
else:
if raiseerr:
raise sa_exc.ArgumentError(
"Query has only expression-based entities - "
"can't find property named '%s'."
% (token, )
)
else:
return None
def _process_paths(self, query, raiseerr):
"""reconcile the 'key' for this PropertyOption with
the current path and entities of the query.
Return a list of affected paths.
"""
path = orm_util.PathRegistry.root
entity = None
paths = []
no_result = []
# _current_path implies we're in a
# secondary load with an existing path
current_path = list(query._current_path.path)
tokens = deque(self.key)
while tokens:
token = tokens.popleft()
if isinstance(token, str):
# wildcard token
if token.endswith(':*'):
return [path.token(token)]
sub_tokens = token.split(".", 1)
token = sub_tokens[0]
tokens.extendleft(sub_tokens[1:])
# exhaust current_path before
# matching tokens to entities
if current_path:
if current_path[1].key == token:
current_path = current_path[2:]
continue
else:
return no_result
if not entity:
entity = self._find_entity_basestring(
query,
token,
raiseerr)
if entity is None:
return no_result
path_element = entity.entity_zero
mapper = entity.mapper
if hasattr(mapper.class_, token):
prop = getattr(mapper.class_, token).property
else:
if raiseerr:
raise sa_exc.ArgumentError(
"Can't find property named '%s' on the "
"mapped entity %s in this Query. " % (
token, mapper)
)
else:
return no_result
elif isinstance(token, PropComparator):
prop = token.property
# exhaust current_path before
# matching tokens to entities
if current_path:
if current_path[0:2] == \
[token._parententity, prop]:
current_path = current_path[2:]
continue
else:
return no_result
if not entity:
entity = self._find_entity_prop_comparator(
query,
prop.key,
token._parententity,
raiseerr)
if not entity:
return no_result
path_element = entity.entity_zero
mapper = entity.mapper
else:
raise sa_exc.ArgumentError(
"mapper option expects "
"string key or list of attributes")
assert prop is not None
if raiseerr and not prop.parent.common_parent(mapper):
raise sa_exc.ArgumentError("Attribute '%s' does not "
"link from element '%s'" % (token, path_element))
path = path[path_element][prop]
paths.append(path)
if getattr(token, '_of_type', None):
ac = token._of_type
ext_info = inspect(ac)
path_element = mapper = ext_info.mapper
if not ext_info.is_aliased_class:
ac = orm_util.with_polymorphic(
ext_info.mapper.base_mapper,
ext_info.mapper, aliased=True,
_use_mapper_path=True)
ext_info = inspect(ac)
path.set(query._attributes, "path_with_polymorphic", ext_info)
else:
path_element = mapper = getattr(prop, 'mapper', None)
if mapper is None and tokens:
raise sa_exc.ArgumentError(
"Attribute '%s' of entity '%s' does not "
"refer to a mapped entity" %
(token, entity)
)
if current_path:
# ran out of tokens before
# current_path was exhausted.
assert not tokens
return no_result
return paths
class StrategizedOption(PropertyOption):
"""A MapperOption that affects which LoaderStrategy will be used
for an operation by a StrategizedProperty.
"""
chained = False
def process_query_property(self, query, paths):
strategy = self.get_strategy_class()
if self.chained:
for path in paths:
path.set(
query._attributes,
"loaderstrategy",
strategy
)
else:
paths[-1].set(
query._attributes,
"loaderstrategy",
strategy
)
def get_strategy_class(self):
raise NotImplementedError()
class LoaderStrategy(object):
"""Describe the loading behavior of a StrategizedProperty object.
The ``LoaderStrategy`` interacts with the querying process in three
ways:
* it controls the configuration of the ``InstrumentedAttribute``
placed on a class to handle the behavior of the attribute. this
may involve setting up class-level callable functions to fire
off a select operation when the attribute is first accessed
(i.e. a lazy load)
* it processes the ``QueryContext`` at statement construction time,
where it can modify the SQL statement that is being produced.
Simple column attributes may add their represented column to the
list of selected columns, *eager loading* properties may add
``LEFT OUTER JOIN`` clauses to the statement.
* It produces "row processor" functions at result fetching time.
These "row processor" functions populate a particular attribute
on a particular mapped instance.
"""
def __init__(self, parent):
self.parent_property = parent
self.is_class_level = False
self.parent = self.parent_property.parent
self.key = self.parent_property.key
def init_class_attribute(self, mapper):
pass
def setup_query(self, context, entity, path, adapter, **kwargs):
pass
def create_row_processor(self, context, path, mapper,
row, adapter):
"""Return row processing functions which fulfill the contract
specified by MapperProperty.create_row_processor.
StrategizedProperty delegates its create_row_processor method
directly to this method. """
return None, None, None
def __str__(self):
return str(self.parent_property)
|
sauloal/PiCastPy
|
sqlalchemy/orm/interfaces.py
|
Python
|
mit
| 28,330
|
import cgi
import email.utils
import hashlib
import getpass
import mimetypes
import os
import platform
import re
import shutil
import sys
import tempfile
import pip
from pip.backwardcompat import urllib, urlparse, raw_input
from pip.exceptions import InstallationError, HashMismatch
from pip.util import (splitext, rmtree, format_size, display_path,
backup_dir, ask_path_exists, unpack_file,
create_download_cache_folder, cache_download)
from pip.vcs import vcs
from pip.log import logger
from pip._vendor import requests, six
from pip._vendor.requests.adapters import BaseAdapter
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.compat import IncompleteRead
from pip._vendor.requests.exceptions import InvalidURL, ChunkedEncodingError
from pip._vendor.requests.models import Response
from pip._vendor.requests.structures import CaseInsensitiveDict
__all__ = ['get_file_content',
'is_url', 'url_to_path', 'path_to_url',
'is_archive_file', 'unpack_vcs_link',
'unpack_file_url', 'is_vcs_url', 'is_file_url', 'unpack_http_url']
def user_agent():
"""Return a string representing the user agent."""
_implementation = platform.python_implementation()
if _implementation == 'CPython':
_implementation_version = platform.python_version()
elif _implementation == 'PyPy':
_implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
sys.pypy_version_info.minor,
sys.pypy_version_info.micro)
if sys.pypy_version_info.releaselevel != 'final':
_implementation_version = ''.join([
_implementation_version,
sys.pypy_version_info.releaselevel,
])
elif _implementation == 'Jython':
_implementation_version = platform.python_version() # Complete Guess
elif _implementation == 'IronPython':
_implementation_version = platform.python_version() # Complete Guess
else:
_implementation_version = 'Unknown'
try:
p_system = platform.system()
p_release = platform.release()
except IOError:
p_system = 'Unknown'
p_release = 'Unknown'
return " ".join(['pip/%s' % pip.__version__,
'%s/%s' % (_implementation, _implementation_version),
'%s/%s' % (p_system, p_release)])
class MultiDomainBasicAuth(AuthBase):
def __init__(self, prompting=True):
self.prompting = prompting
self.passwords = {}
def __call__(self, req):
parsed = urlparse.urlparse(req.url)
# Get the netloc without any embedded credentials
netloc = parsed.netloc.split("@", 1)[-1]
# Set the url of the request to the url without any credentials
req.url = urlparse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
# Use any stored credentials that we have for this netloc
username, password = self.passwords.get(netloc, (None, None))
# Extract credentials embedded in the url if we have none stored
if username is None:
username, password = self.parse_credentials(parsed.netloc)
if username or password:
# Store the username and password
self.passwords[netloc] = (username, password)
# Send the basic auth with this request
req = HTTPBasicAuth(username or "", password or "")(req)
# Attach a hook to handle 401 responses
req.register_hook("response", self.handle_401)
return req
def handle_401(self, resp, **kwargs):
# We only care about 401 responses, anything else we want to just
# pass through the actual response
if resp.status_code != 401:
return resp
# We are not able to prompt the user so simple return the response
if not self.prompting:
return resp
parsed = urlparse.urlparse(resp.url)
# Prompt the user for a new username and password
username = raw_input("User for %s: " % parsed.netloc)
password = getpass.getpass("Password: ")
# Store the new username and password to use for future requests
if username or password:
self.passwords[parsed.netloc] = (username, password)
# Consume content and release the original connection to allow our new
# request to reuse the same one.
resp.content
resp.raw.release_conn()
# Add our new username and password to the request
req = HTTPBasicAuth(username or "", password or "")(resp.request)
# Send our new request
new_resp = resp.connection.send(req, **kwargs)
new_resp.history.append(resp)
return new_resp
def parse_credentials(self, netloc):
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
return userinfo.split(":", 1)
return userinfo, None
return None, None
class LocalFSResponse(object):
def __init__(self, fileobj):
self.fileobj = fileobj
def __getattr__(self, name):
return getattr(self.fileobj, name)
def read(self, amt=None, decode_content=None, cache_content=False):
return self.fileobj.read(amt)
# Insert Hacks to Make Cookie Jar work w/ Requests
@property
def _original_response(self):
class FakeMessage(object):
def getheaders(self, header):
return []
def get_all(self, header, default):
return []
class FakeResponse(object):
@property
def msg(self):
return FakeMessage()
return FakeResponse()
class LocalFSAdapter(BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
parsed_url = urlparse.urlparse(request.url)
# We only work for requests with a host of localhost
if parsed_url.netloc.lower() != "localhost":
raise InvalidURL("Invalid URL %r: Only localhost is allowed" %
request.url)
real_url = urlparse.urlunparse(parsed_url[:1] + ("",) + parsed_url[2:])
pathname = url_to_path(real_url)
resp = Response()
resp.status_code = 200
resp.url = real_url
stats = os.stat(pathname)
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
resp.headers = CaseInsensitiveDict({
"Content-Type": mimetypes.guess_type(pathname)[0] or "text/plain",
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = LocalFSResponse(open(pathname, "rb"))
resp.close = resp.raw.close
return resp
def close(self):
pass
class PipSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
super(PipSession, self).__init__(*args, **kwargs)
# Attach our User Agent to the request
self.headers["User-Agent"] = user_agent()
# Attach our Authentication handler to the session
self.auth = MultiDomainBasicAuth()
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
def request(self, method, url, *args, **kwargs):
# Make file:// urls not fail due to lack of a hostname
parsed = urlparse.urlparse(url)
if parsed.scheme == "file":
url = urlparse.urlunparse(parsed[:1] + ("localhost",) + parsed[2:])
# Allow setting a default timeout on a session
kwargs.setdefault("timeout", self.timeout)
# Dispatch the actual request
return super(PipSession, self).request(method, url, *args, **kwargs)
def get_file_content(url, comes_from=None, session=None):
"""Gets the content of a file; it may be a filename, file: URL, or
http: URL. Returns (location, content). Content is unicode."""
if session is None:
session = PipSession()
match = _scheme_re.search(url)
if match:
scheme = match.group(1).lower()
if (scheme == 'file' and comes_from
and comes_from.startswith('http')):
raise InstallationError(
'Requirements file %s references URL %s, which is local'
% (comes_from, url))
if scheme == 'file':
path = url.split(':', 1)[1]
path = path.replace('\\', '/')
match = _url_slash_drive_re.match(path)
if match:
path = match.group(1) + ':' + path.split('|', 1)[1]
path = urllib.unquote(path)
if path.startswith('/'):
path = '/' + path.lstrip('/')
url = path
else:
# # FIXME: catch some errors
resp = session.get(url)
resp.raise_for_status()
if six.PY3:
return resp.url, resp.text
else:
return resp.url, resp.content
try:
f = open(url)
content = f.read()
except IOError:
e = sys.exc_info()[1]
raise InstallationError('Could not open requirements file: %s' % str(e))
else:
f.close()
return url, content
_scheme_re = re.compile(r'^(http|https|file):', re.I)
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
def is_url(name):
"""Returns true if the name looks like a URL"""
if ':' not in name:
return False
scheme = name.split(':', 1)[0].lower()
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urllib.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
else:
path = '/' + path
return path
_drive_re = re.compile('^([a-z]):', re.I)
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
def path_to_url(path):
"""
Convert a path to a file: URL. The path will be made absolute and have
quoted path parts.
"""
path = os.path.normpath(os.path.abspath(path))
drive, path = os.path.splitdrive(path)
filepath = path.split(os.path.sep)
url = '/'.join([urllib.quote(part) for part in filepath])
if not drive:
url = url.lstrip('/')
return 'file:///' + drive + url
def is_archive_file(name):
"""Return True if `name` is a considered as an archive file."""
archives = ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.pybundle',
'.whl')
ext = splitext(name)[1].lower()
if ext in archives:
return True
return False
def unpack_vcs_link(link, location, only_download=False):
vcs_backend = _get_used_vcs_backend(link)
if only_download:
vcs_backend.export(location)
else:
vcs_backend.unpack(location)
def _get_used_vcs_backend(link):
for backend in vcs.backends:
if link.scheme in backend.schemes:
vcs_backend = backend(link.url)
return vcs_backend
def is_vcs_url(link):
return bool(_get_used_vcs_backend(link))
def is_file_url(link):
return link.url.lower().startswith('file:')
def _check_hash(download_hash, link):
if download_hash.digest_size != hashlib.new(link.hash_name).digest_size:
logger.fatal("Hash digest size of the package %d (%s) doesn't match the expected hash name %s!"
% (download_hash.digest_size, link, link.hash_name))
raise HashMismatch('Hash name mismatch for package %s' % link)
if download_hash.hexdigest() != link.hash:
logger.fatal("Hash of the package %s (%s) doesn't match the expected hash %s!"
% (link, download_hash.hexdigest(), link.hash))
raise HashMismatch('Bad %s hash for package %s' % (link.hash_name, link))
def _get_hash_from_file(target_file, link):
try:
download_hash = hashlib.new(link.hash_name)
except (ValueError, TypeError):
logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
return None
fp = open(target_file, 'rb')
while True:
chunk = fp.read(4096)
if not chunk:
break
download_hash.update(chunk)
fp.close()
return download_hash
def _download_url(resp, link, temp_location):
fp = open(temp_location, 'wb')
download_hash = None
if link.hash and link.hash_name:
try:
download_hash = hashlib.new(link.hash_name)
except ValueError:
logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
try:
total_length = int(resp.headers['content-length'])
except (ValueError, KeyError, TypeError):
total_length = 0
downloaded = 0
show_progress = total_length > 40 * 1000 or not total_length
show_url = link.show_url
try:
if show_progress:
# # FIXME: the URL can get really long in this message:
if total_length:
logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length)))
else:
logger.start_progress('Downloading %s (unknown size): ' % show_url)
else:
logger.notify('Downloading %s' % show_url)
logger.info('Downloading from URL %s' % link)
def resp_read(chunk_size):
try:
# Special case for urllib3.
try:
for chunk in resp.raw.stream(
chunk_size, decode_content=False):
yield chunk
except IncompleteRead as e:
raise ChunkedEncodingError(e)
except AttributeError:
# Standard file-like object.
while True:
chunk = resp.raw.read(chunk_size)
if not chunk:
break
yield chunk
for chunk in resp_read(4096):
downloaded += len(chunk)
if show_progress:
if not total_length:
logger.show_progress('%s' % format_size(downloaded))
else:
logger.show_progress('%3i%% %s' % (100 * downloaded / total_length, format_size(downloaded)))
if download_hash is not None:
download_hash.update(chunk)
fp.write(chunk)
fp.close()
finally:
if show_progress:
logger.end_progress('%s downloaded' % format_size(downloaded))
return download_hash
def _copy_file(filename, location, content_type, link):
copy = True
download_location = os.path.join(location, link.filename)
if os.path.exists(download_location):
response = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' %
display_path(download_location), ('i', 'w', 'b'))
if response == 'i':
copy = False
elif response == 'w':
logger.warn('Deleting %s' % display_path(download_location))
os.remove(download_location)
elif response == 'b':
dest_file = backup_dir(download_location)
logger.warn('Backing up %s to %s'
% (display_path(download_location), display_path(dest_file)))
shutil.move(download_location, dest_file)
if copy:
shutil.copy(filename, download_location)
logger.notify('Saved %s' % display_path(download_location))
def unpack_http_url(link, location, download_cache, download_dir=None,
session=None):
if session is None:
session = PipSession()
temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
temp_location = None
target_url = link.url.split('#', 1)[0]
already_cached = False
cache_file = None
cache_content_type_file = None
download_hash = None
# If a download cache is specified, is the file cached there?
if download_cache:
cache_file = os.path.join(download_cache,
urllib.quote(target_url, ''))
cache_content_type_file = cache_file + '.content-type'
already_cached = (
os.path.exists(cache_file) and
os.path.exists(cache_content_type_file)
)
if not os.path.isdir(download_cache):
create_download_cache_folder(download_cache)
# If a download dir is specified, is the file already downloaded there?
already_downloaded = None
if download_dir:
already_downloaded = os.path.join(download_dir, link.filename)
if not os.path.exists(already_downloaded):
already_downloaded = None
# If already downloaded, does it's hash match?
if already_downloaded:
temp_location = already_downloaded
content_type = mimetypes.guess_type(already_downloaded)[0]
logger.notify('File was already downloaded %s' % already_downloaded)
if link.hash:
download_hash = _get_hash_from_file(temp_location, link)
try:
_check_hash(download_hash, link)
except HashMismatch:
logger.warn(
'Previously-downloaded file %s has bad hash, '
're-downloading.' % temp_location
)
temp_location = None
os.unlink(already_downloaded)
already_downloaded = None
# If not a valid download, let's confirm the cached file is valid
if already_cached and not temp_location:
with open(cache_content_type_file) as fp:
content_type = fp.read().strip()
temp_location = cache_file
logger.notify('Using download cache from %s' % cache_file)
if link.hash and link.hash_name:
download_hash = _get_hash_from_file(cache_file, link)
try:
_check_hash(download_hash, link)
except HashMismatch:
logger.warn(
'Cached file %s has bad hash, '
're-downloading.' % temp_location
)
temp_location = None
os.unlink(cache_file)
os.unlink(cache_content_type_file)
already_cached = False
# We don't have either a cached or a downloaded copy
# let's download to a tmp dir
if not temp_location:
try:
resp = session.get(target_url, stream=True)
resp.raise_for_status()
except requests.HTTPError as exc:
logger.fatal("HTTP error %s while getting %s" %
(exc.response.status_code, link))
raise
content_type = resp.headers.get('content-type', '')
filename = link.filename # fallback
# Have a look at the Content-Disposition header for a better guess
content_disposition = resp.headers.get('content-disposition')
if content_disposition:
type, params = cgi.parse_header(content_disposition)
# We use ``or`` here because we don't want to use an "empty" value
# from the filename param.
filename = params.get('filename') or filename
ext = splitext(filename)[1]
if not ext:
ext = mimetypes.guess_extension(content_type)
if ext:
filename += ext
if not ext and link.url != resp.url:
ext = os.path.splitext(resp.url)[1]
if ext:
filename += ext
temp_location = os.path.join(temp_dir, filename)
download_hash = _download_url(resp, link, temp_location)
if link.hash and link.hash_name:
_check_hash(download_hash, link)
# a download dir is specified; let's copy the archive there
if download_dir and not already_downloaded:
_copy_file(temp_location, download_dir, content_type, link)
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(temp_location, location, content_type, link)
# if using a download cache, cache it, if needed
if cache_file and not already_cached:
cache_download(cache_file, temp_location, content_type)
if not (already_cached or already_downloaded):
os.unlink(temp_location)
os.rmdir(temp_dir)
def unpack_file_url(link, location, download_dir=None):
link_path = url_to_path(link.url_without_fragment)
already_downloaded = False
# If it's a url to a local directory
if os.path.isdir(link_path):
if os.path.isdir(location):
rmtree(location)
shutil.copytree(link_path, location, symlinks=True)
return
# if link has a hash, let's confirm it matches
if link.hash:
link_path_hash = _get_hash_from_file(link_path, link)
_check_hash(link_path_hash, link)
# If a download dir is specified, is the file already there and valid?
if download_dir:
download_path = os.path.join(download_dir, link.filename)
if os.path.exists(download_path):
content_type = mimetypes.guess_type(download_path)[0]
logger.notify('File was already downloaded %s' % download_path)
if link.hash:
download_hash = _get_hash_from_file(download_path, link)
try:
_check_hash(download_hash, link)
already_downloaded = True
except HashMismatch:
logger.warn(
'Previously-downloaded file %s has bad hash, '
're-downloading.' % link_path
)
os.unlink(download_path)
else:
already_downloaded = True
if already_downloaded:
from_path = download_path
else:
from_path = link_path
content_type = mimetypes.guess_type(from_path)[0]
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type, link)
# a download dir is specified and not already downloaded
if download_dir and not already_downloaded:
_copy_file(from_path, download_dir, content_type, link)
|
ppyordanov/HCI_4_Future_Cities
|
Server/src/virtualenv/Lib/site-packages/pip/download.py
|
Python
|
mit
| 22,573
|