code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from trac.test import Mock
from trac.web.api import Request, RequestDone
from StringIO import StringIO
import unittest
class RequestTestCase(unittest.TestCase):
def _make_environ(self, scheme='http', server_name='example.org',
server_port=80, method='GET', script_name='/trac',
**kwargs):
environ = {'wsgi.url_scheme': scheme, 'wsgi.input': StringIO(''),
'REQUEST_METHOD': method, 'SERVER_NAME': server_name,
'SERVER_PORT': server_port, 'SCRIPT_NAME': script_name}
environ.update(kwargs)
return environ
def test_base_url(self):
environ = self._make_environ()
req = Request(environ, None)
self.assertEqual('http://example.org/trac', req.base_url)
def test_base_url_host(self):
environ = self._make_environ(server_port=8080, HTTP_HOST='example.com')
req = Request(environ, None)
self.assertEqual('http://example.com/trac', req.base_url)
def test_base_url_nondefaultport(self):
environ = self._make_environ(server_port=8080)
req = Request(environ, None)
self.assertEqual('http://example.org:8080/trac', req.base_url)
def test_base_url_https(self):
environ = self._make_environ(scheme='https', server_port=443)
req = Request(environ, None)
self.assertEqual('https://example.org/trac', req.base_url)
def test_base_url_https_host(self):
environ = self._make_environ(scheme='https', server_port=443,
HTTP_HOST='example.com')
req = Request(environ, None)
self.assertEqual('https://example.com/trac', req.base_url)
def test_base_url_https_nondefaultport(self):
environ = self._make_environ(scheme='https', server_port=8443)
req = Request(environ, None)
self.assertEqual('https://example.org:8443/trac', req.base_url)
def test_base_url_proxy(self):
environ = self._make_environ(HTTP_HOST='localhost',
HTTP_X_FORWARDED_HOST='example.com')
req = Request(environ, None)
self.assertEqual('http://localhost/trac', req.base_url)
def test_languages(self):
environ = self._make_environ()
environ['HTTP_ACCEPT_LANGUAGE'] = 'en-us,en;q=0.5'
req = Request(environ, None)
self.assertEqual(['en-us', 'en'], req.languages)
def test_redirect(self):
status_sent = []
headers_sent = {}
def start_response(status, headers):
status_sent.append(status)
headers_sent.update(dict(headers))
environ = self._make_environ(method='HEAD')
req = Request(environ, start_response)
req.session = Mock(save=lambda: None)
self.assertRaises(RequestDone, req.redirect, '/trac/test')
self.assertEqual('302 Found', status_sent[0])
self.assertEqual('http://example.org/trac/test',
headers_sent['Location'])
def test_redirect_absolute(self):
status_sent = []
headers_sent = {}
def start_response(status, headers):
status_sent.append(status)
headers_sent.update(dict(headers))
environ = self._make_environ(method='HEAD')
req = Request(environ, start_response,)
req.session = Mock(save=lambda: None)
self.assertRaises(RequestDone, req.redirect,
'http://example.com/trac/test')
self.assertEqual('302 Found', status_sent[0])
self.assertEqual('http://example.com/trac/test',
headers_sent['Location'])
def test_write_unicode(self):
buf = StringIO()
def write(data):
buf.write(data)
def start_response(status, headers):
return write
environ = self._make_environ(method='HEAD')
req = Request(environ, start_response)
req.send_header('Content-Type', 'text/plain;charset=utf-8')
# we didn't set Content-Length, so we get a RuntimeError for that
self.assertRaises(RuntimeError, req.write, u'Föö')
req = Request(environ, start_response)
req.send_header('Content-Type', 'text/plain;charset=utf-8')
req.send_header('Content-Length', 0)
# anyway we're not supposed to send unicode, so we get a ValueError
self.assertRaises(ValueError, req.write, u'Föö')
def test_invalid_cookies(self):
environ = self._make_environ(HTTP_COOKIE='bad:key=value;')
req = Request(environ, None)
self.assertEqual('', str(req.incookie))
def test_multiple_cookies(self):
environ = self._make_environ(HTTP_COOKIE='key=value1; key=value2;')
req = Request(environ, None)
self.assertEqual('Set-Cookie: key=value1',
str(req.incookie).rstrip(';'))
def test_read(self):
environ = self._make_environ(**{'wsgi.input': StringIO('test input')})
req = Request(environ, None)
self.assertEqual('test input', req.read())
def test_read_size(self):
environ = self._make_environ(**{'wsgi.input': StringIO('test input')})
req = Request(environ, None)
self.assertEqual('test', req.read(size=4))
def test_qs_on_post(self):
"""Make sure req.args parsing is consistent even after the backwards
incompatible change introduced in Python 2.6.
"""
environ = self._make_environ(method='GET',
**{'QUERY_STRING': 'action=foo'})
req = Request(environ, None)
self.assertEqual('foo', req.args['action'])
environ = self._make_environ(method='POST',
**{'wsgi.input': StringIO('action=bar'),
'CONTENT_LENGTH': '10',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'QUERY_STRING': 'action=foo'})
req = Request(environ, None)
self.assertEqual('bar', req.args['action'])
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(RequestTestCase, 'test'))
return suite
if __name__ == '__main__':
unittest.main()
|
zjj/trac_hack
|
trac/web/tests/api.py
|
Python
|
bsd-3-clause
| 6,314
|
from distutils.version import LooseVersion
import pytest
import numpy.testing as nptest
from wqio.tests import helpers
import scipy
import pandas
from wqio.features import Location, Dataset
OLD_SCIPY = LooseVersion(scipy.version.version) < LooseVersion("0.19")
@pytest.fixture(params=[True, False])
def location(request):
data = helpers.getTestROSData()
return Location(
data,
station_type="inflow",
bsiter=1500,
rescol="res",
qualcol="qual",
useros=request.param,
)
@pytest.mark.parametrize(
"attr",
[
"name",
"station_type",
"analysis_space",
"rescol",
"qualcol",
"plot_marker",
"scatter_marker",
"hasData",
"all_positive",
"include",
"exclude",
"NUnique",
"bsiter",
"N",
"ND",
],
)
def test_locations_strings_ints(location, attr):
expected = {
"name": "Influent",
"station_type": "inflow",
"analysis_space": "lognormal",
"rescol": "res",
"qualcol": "qual",
"plot_marker": "o",
"scatter_marker": "v",
"hasData": True,
"all_positive": True,
"exclude": False,
"include": True,
"NUnique": 30,
"bsiter": 1500,
"N": 35,
"ND": 7,
}
assert getattr(location, attr) == expected[attr]
@pytest.mark.parametrize("attr", ["fractionND", "min", "min_DL", "min_detect", "max"])
def test_locations_numbers(location, attr):
expected = {
"fractionND": 0.2,
"min": 2.0,
"min_DL": 5.0,
"min_detect": 2.0,
"max": 22.97,
}
nptest.assert_approx_equal(getattr(location, attr), expected[attr])
@pytest.mark.parametrize(
"attr",
[
"useros",
"cov",
"geomean",
"geostd",
"logmean",
"logstd",
"mean",
"median",
"pctl10",
"pctl25",
"pctl75",
"pctl90",
"skew",
"std",
],
)
@helpers.seed
def test_location_stats_scalars(location, attr):
expected = {
"useros": {True: True, False: False},
"cov": {True: 0.5887644, False: 0.5280314},
"geomean": {True: 8.0779865, False: 8.8140731},
"geostd": {True: 1.8116975, False: 1.7094616},
"logmean": {True: 2.0891426, False: 2.1763497},
"logstd": {True: 0.5942642, False: 0.5361785},
"mean": {True: 9.5888515, False: 10.120571},
"median": {True: 7.5000000, False: 8.7100000},
"pctl10": {True: 4.0460279, False: 5.0000000},
"pctl25": {True: 5.6150000, False: 5.8050000},
"pctl75": {True: 11.725000, False: 11.725000},
"pctl90": {True: 19.178000, False: 19.178000},
"skew": {True: 0.8692107, False: 0.8537566},
"std": {True: 5.6455746, False: 5.3439797},
}
nptest.assert_approx_equal(
getattr(location, attr), expected[attr][location.useros], significant=5
)
@pytest.mark.parametrize(
"attr",
[
"geomean_conf_interval",
"logmean_conf_interval",
"mean_conf_interval",
"median_conf_interval",
"shapiro",
"shapiro_log",
"lilliefors",
"lilliefors_log",
"color",
],
)
def test_location_stats_arrays(location, attr):
expected = {
"color": {
True: [0.32157, 0.45271, 0.66667],
False: [0.32157, 0.45271, 0.66667],
},
"geomean_conf_interval": {True: [6.55572, 9.79677], False: [7.25255, 10.34346]},
"logmean_conf_interval": {True: [1.88631, 2.27656], False: [1.97075, 2.34456]},
"mean_conf_interval": {True: [7.74564, 11.49393], False: [8.52743, 11.97627]},
"median_conf_interval": {True: [5.66000, 8.71000], False: [6.65000, 9.850000]},
"shapiro": {True: [0.886889, 0.001789], False: [0.896744, 0.003236]},
"shapiro_log": {True: [0.972679, 0.520949], False: [0.964298, 0.306435]},
"lilliefors": {True: [0.185180, 0.003756], False: [0.160353, 0.023078]},
"lilliefors_log": {True: [0.091855, 0.64099], False: [0.08148, 0.80351]},
}
nptest.assert_array_almost_equal(
getattr(location, attr), expected[attr][location.useros], decimal=5
)
@pytest.mark.parametrize("attr", ["anderson", "anderson_log"])
@pytest.mark.parametrize("index", range(4))
def test_location_anderson(location, attr, index):
expected = {
"anderson": {
True: (
1.54388800,
[0.527, 0.6, 0.719, 0.839, 0.998],
[15.0, 10.0, 5.0, 2.5, 1.0],
0.000438139,
),
False: (
1.4392085,
[0.527, 0.6, 0.719, 0.839, 0.998],
[15.0, 10.0, 5.0, 2.5, 1.0],
0.00080268,
),
},
"anderson_log": {
True: (
0.30409634,
[0.527, 0.6, 0.719, 0.839, 0.998],
[15.0, 10.0, 5.0, 2.5, 1.0],
0.552806894,
),
False: (
0.3684061,
[0.527, 0.6, 0.719, 0.839, 0.998],
[15.0, 10.0, 5.0, 2.5, 1.0],
0.41004028,
),
},
}
result = expected[attr][location.useros][index]
if index in [0, 3]:
nptest.assert_approx_equal(
getattr(location, attr)[index], result, significant=5
)
else:
nptest.assert_array_almost_equal(
getattr(location, attr)[index], result, decimal=5
)
class Test_Dataset(object):
def setup(self):
self.maxDiff = None
# basic test data
self.tolerance = 0.05
self.known_bsiter = 750
in_data = helpers.getTestROSData()
in_data["res"] += 3
out_data = helpers.getTestROSData()
out_data["res"] -= 1.5
self.influent = Location(
in_data,
station_type="inflow",
bsiter=self.known_bsiter,
rescol="res",
qualcol="qual",
useros=False,
)
self.effluent = Location(
out_data,
station_type="outflow",
bsiter=self.known_bsiter,
rescol="res",
qualcol="qual",
useros=False,
)
self.ds = Dataset(self.influent, self.effluent)
self.known_dumpFile = None
self.known_kendall_stats = (1.00, 5.482137e-17)
self.known_kendall_tau = self.known_kendall_stats[0]
self.known_kendall_p = self.known_kendall_stats[1]
self.known_mannwhitney_stats = (927.0, 2.251523e-04)
self.known_mannwhitney_u = self.known_mannwhitney_stats[0]
self.known_mannwhitney_p = self.known_mannwhitney_stats[1]
self.known_spearman_stats = (1.0, 0.0)
self.known_spearman_rho = self.known_spearman_stats[0]
self.known_spearman_p = self.known_spearman_stats[1]
self.known_theil_stats = (1.0, -4.5, 1.0, 1.0)
self.known_theil_hislope = self.known_theil_stats[0]
self.known_theil_intercept = self.known_theil_stats[1]
self.known_theil_loslope = self.known_theil_stats[2]
self.known_theil_medslope = self.known_theil_stats[3]
self.known_wilcoxon_stats = (0.0, 2.4690274207037342e-07)
self.known_wilcoxon_z = self.known_wilcoxon_stats[0]
self.known_wilcoxon_p = self.known_wilcoxon_stats[1]
self.known__non_paired_stats = True
self.known__paired_stats = True
self.known_definition = {"attr1": "test1", "attr2": "test2"}
self.known_include = True
self.known_exclude = not self.known_include
self.known_medianCIsOverlap = False
def test_data(self):
assert hasattr(self.ds, "data")
assert isinstance(self.ds.data, pandas.DataFrame)
def test_paired_data(self):
assert hasattr(self.ds, "paired_data")
assert isinstance(self.ds.paired_data, pandas.DataFrame)
def test__non_paired_stats(self):
assert hasattr(self.ds, "_non_paired_stats")
assert self.ds._non_paired_stats, self.known__non_paired_stats
def test__paired_stats(self):
assert hasattr(self.ds, "_paired_stats")
assert self.ds._paired_stats, self.known__paired_stats
def test_name(self):
assert hasattr(self.ds, "name")
assert self.ds.name is None
def test_name_set(self):
assert hasattr(self.ds, "name")
testname = "Test Name"
self.ds.name = testname
assert self.ds.name == testname
def test_defintion_default(self):
assert hasattr(self.ds, "definition")
assert self.ds.definition == {}
def test_defintion_set(self):
assert hasattr(self.ds, "definition")
self.ds.definition = self.known_definition
assert self.ds.definition == self.known_definition
def test_include(self):
assert hasattr(self.ds, "include")
assert self.ds.include == self.known_include
def test_exclude(self):
assert hasattr(self.ds, "exclude")
assert self.ds.exclude == self.known_exclude
def test_wilcoxon_z(self):
assert hasattr(self.ds, "wilcoxon_z")
nptest.assert_allclose(
self.ds.wilcoxon_z, self.known_wilcoxon_z, rtol=self.tolerance
)
def test_wilcoxon_p(self):
assert hasattr(self.ds, "wilcoxon_p")
nptest.assert_allclose(
self.ds.wilcoxon_p, self.known_wilcoxon_p, rtol=self.tolerance
)
def test_mannwhitney_u(self):
assert hasattr(self.ds, "mannwhitney_u")
nptest.assert_allclose(
self.ds.mannwhitney_u, self.known_mannwhitney_u, rtol=self.tolerance
)
def test_mannwhitney_p(self):
assert hasattr(self.ds, "mannwhitney_p")
nptest.assert_allclose(
self.ds.mannwhitney_p, self.known_mannwhitney_p, rtol=self.tolerance
)
@pytest.mark.xfail(OLD_SCIPY, reason="Scipy < 0.19")
def test_kendall_tau(self):
assert hasattr(self.ds, "kendall_tau")
nptest.assert_allclose(
self.ds.kendall_tau, self.known_kendall_tau, rtol=self.tolerance
)
@pytest.mark.xfail(OLD_SCIPY, reason="Scipy < 0.19")
def test_kendall_p(self):
assert hasattr(self.ds, "kendall_p")
nptest.assert_allclose(
self.ds.kendall_p, self.known_kendall_p, rtol=self.tolerance
)
def test_spearman_rho(self):
assert hasattr(self.ds, "spearman_rho")
nptest.assert_allclose(
self.ds.spearman_rho, self.known_spearman_rho, atol=0.0001
)
def test_spearman_p(self):
assert hasattr(self.ds, "spearman_p")
nptest.assert_allclose(self.ds.spearman_p, self.known_spearman_p, atol=0.0001)
def test_theil_medslope(self):
assert hasattr(self.ds, "theil_medslope")
nptest.assert_allclose(
self.ds.theil_medslope, self.known_theil_medslope, rtol=self.tolerance
)
def test_theil_intercept(self):
assert hasattr(self.ds, "theil_intercept")
nptest.assert_allclose(
self.ds.theil_intercept, self.known_theil_intercept, rtol=self.tolerance
)
def test_theil_loslope(self):
assert hasattr(self.ds, "theil_loslope")
nptest.assert_allclose(
self.ds.theil_loslope, self.known_theil_loslope, rtol=self.tolerance
)
def test_theil_hilope(self):
assert hasattr(self.ds, "theil_hislope")
nptest.assert_allclose(
self.ds.theil_hislope, self.known_theil_hislope, rtol=self.tolerance
)
def test_wilcoxon_stats(self):
assert hasattr(self.ds, "_wilcoxon_stats")
nptest.assert_allclose(
self.ds._wilcoxon_stats, self.known_wilcoxon_stats, rtol=self.tolerance
)
def test_mannwhitney_stats(self):
assert hasattr(self.ds, "_mannwhitney_stats")
nptest.assert_allclose(
self.ds._mannwhitney_stats,
self.known_mannwhitney_stats,
rtol=self.tolerance,
)
@pytest.mark.xfail(OLD_SCIPY, reason="Scipy < 0.19")
def test_kendall_stats(self):
assert hasattr(self.ds, "_kendall_stats")
nptest.assert_allclose(
self.ds._kendall_stats, self.known_kendall_stats, rtol=self.tolerance
)
def test_spearman_stats(self):
assert hasattr(self.ds, "_spearman_stats")
nptest.assert_allclose(
self.ds._spearman_stats, self.known_spearman_stats, atol=0.0001
)
def test_theil_stats(self):
assert hasattr(self.ds, "_theil_stats")
nptest.assert_almost_equal(
self.ds._theil_stats["medslope"], self.known_theil_stats[0], decimal=4
)
nptest.assert_almost_equal(
self.ds._theil_stats["intercept"], self.known_theil_stats[1], decimal=4
)
nptest.assert_almost_equal(
self.ds._theil_stats["loslope"], self.known_theil_stats[2], decimal=4
)
nptest.assert_almost_equal(
self.ds._theil_stats["hislope"], self.known_theil_stats[3], decimal=4
)
assert not self.ds._theil_stats["is_inverted"]
assert "estimated_effluent" in list(self.ds._theil_stats.keys())
assert "estimate_error" in list(self.ds._theil_stats.keys())
def test_medianCIsOverlap(self):
assert self.known_medianCIsOverlap == self.ds.medianCIsOverlap
def test__repr__normal(self):
self.ds.__repr__
def test_repr__None(self):
self.ds.definition = None
self.ds.__repr__
|
phobson/wqio
|
wqio/tests/test_features.py
|
Python
|
bsd-3-clause
| 13,698
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import inspect
import mock
import raven
import time
import six
import os
from raven.base import Client, ClientState
from raven.exceptions import RateLimited
from raven.transport import AsyncTransport
from raven.transport.http import HTTPTransport
from raven.utils.stacks import iter_stack_frames
from raven.utils.testutils import TestCase
class TempStoreClient(Client):
def __init__(self, **kwargs):
self.events = []
super(TempStoreClient, self).__init__(**kwargs)
def is_enabled(self):
return True
def send(self, **kwargs):
self.events.append(kwargs)
class ClientStateTest(TestCase):
def test_should_try_online(self):
state = ClientState()
self.assertEquals(state.should_try(), True)
def test_should_try_new_error(self):
state = ClientState()
state.status = state.ERROR
state.last_check = time.time()
state.retry_number = 1
self.assertEquals(state.should_try(), False)
def test_should_try_time_passed_error(self):
state = ClientState()
state.status = state.ERROR
state.last_check = time.time() - 10
state.retry_number = 1
self.assertEquals(state.should_try(), True)
def test_set_fail(self):
state = ClientState()
state.set_fail()
self.assertEquals(state.status, state.ERROR)
self.assertNotEquals(state.last_check, None)
self.assertEquals(state.retry_number, 1)
def test_set_success(self):
state = ClientState()
state.status = state.ERROR
state.last_check = 'foo'
state.retry_number = 0
state.set_success()
self.assertEquals(state.status, state.ONLINE)
self.assertEquals(state.last_check, None)
self.assertEquals(state.retry_number, 0)
def test_should_try_retry_after(self):
state = ClientState()
state.status = state.ERROR
state.last_check = time.time()
state.retry_number = 1
state.retry_after = 1
self.assertFalse(state.should_try())
def test_should_try_retry_after_passed(self):
state = ClientState()
state.status = state.ERROR
state.last_check = time.time() - 1
state.retry_number = 1
state.retry_after = 1
self.assertTrue(state.should_try())
class ClientTest(TestCase):
def setUp(self):
self.client = TempStoreClient()
def test_first_client_is_singleton(self):
from raven import base
base.Raven = None
client = Client()
client2 = Client()
assert base.Raven is client
assert client is not client2
def test_client_picks_up_env_dsn(self):
DSN = 'sync+http://public:secret@example.com/1'
PUBLIC_DSN = '//public@example.com/1'
with mock.patch.dict(os.environ, {'SENTRY_DSN': DSN}):
client = Client()
assert client.remote.get_public_dsn() == PUBLIC_DSN
client = Client('')
assert client.remote.get_public_dsn() == PUBLIC_DSN
@mock.patch('raven.transport.http.HTTPTransport.send')
@mock.patch('raven.base.ClientState.should_try')
def test_send_remote_failover(self, should_try, send):
should_try.return_value = True
client = Client(
dsn='sync+http://public:secret@example.com/1'
)
# test error
send.side_effect = Exception()
client.send_remote('sync+http://example.com/api/store', client.encode({}))
self.assertEquals(client.state.status, client.state.ERROR)
# test recovery
send.side_effect = None
client.send_remote('sync+http://example.com/api/store', client.encode({}))
self.assertEquals(client.state.status, client.state.ONLINE)
@mock.patch('raven.transport.http.HTTPTransport.send')
@mock.patch('raven.base.ClientState.should_try')
def test_send_remote_failover_with_retry_after(self, should_try, send):
should_try.return_value = True
client = Client(
dsn='sync+http://public:secret@example.com/1'
)
# test error
send.side_effect = RateLimited('foo', 5)
client.send_remote('sync+http://example.com/api/1/store/', client.encode({}))
self.assertEquals(client.state.status, client.state.ERROR)
self.assertEqual(client.state.retry_after, 5)
# test recovery
send.side_effect = None
client.send_remote('sync+http://example.com/api/1/store/', client.encode({}))
self.assertEquals(client.state.status, client.state.ONLINE)
self.assertEqual(client.state.retry_after, 0)
@mock.patch('raven.conf.remote.RemoteConfig.get_transport')
@mock.patch('raven.base.ClientState.should_try')
def test_async_send_remote_failover(self, should_try, get_transport):
should_try.return_value = True
async_transport = AsyncTransport()
async_transport.async_send = async_send = mock.Mock()
get_transport.return_value = async_transport
client = Client(
dsn='http://public:secret@example.com/1',
)
# test immediate raise of error
async_send.side_effect = Exception()
client.send_remote('http://example.com/api/1/store/', client.encode({}))
self.assertEquals(client.state.status, client.state.ERROR)
# test recovery
client.send_remote('http://example.com/api/1/store/', client.encode({}))
success_cb = async_send.call_args[0][2]
success_cb()
self.assertEquals(client.state.status, client.state.ONLINE)
# test delayed raise of error
client.send_remote('http://example.com/api/1/store/', client.encode({}))
failure_cb = async_send.call_args[0][3]
failure_cb(Exception())
self.assertEquals(client.state.status, client.state.ERROR)
@mock.patch('raven.base.Client.send_remote')
@mock.patch('raven.base.time.time')
def test_send(self, time, send_remote):
time.return_value = 1328055286.51
client = Client(
dsn='http://public:secret@example.com/1',
)
client.send(**{
'foo': 'bar',
})
send_remote.assert_called_once_with(
url='http://example.com/api/1/store/',
data=client.encode({'foo': 'bar'}),
headers={
'User-Agent': 'raven-python/%s' % (raven.VERSION,),
'Content-Type': 'application/octet-stream',
'Content-Encoding': client.get_content_encoding(),
'X-Sentry-Auth': (
'Sentry sentry_timestamp=1328055286.51, '
'sentry_client=raven-python/%s, sentry_version=6, '
'sentry_key=public, '
'sentry_secret=secret' % (raven.VERSION,))
},
)
@mock.patch('raven.base.Client.send_remote')
@mock.patch('raven.base.time.time')
def test_send_with_auth_header(self, time, send_remote):
time.return_value = 1328055286.51
client = Client(
dsn='http://public:secret@example.com/1',
)
client.send(auth_header='foo', **{
'foo': 'bar',
})
send_remote.assert_called_once_with(
url='http://example.com/api/1/store/',
data=client.encode({'foo': 'bar'}),
headers={
'User-Agent': 'raven-python/%s' % (raven.VERSION,),
'Content-Type': 'application/octet-stream',
'Content-Encoding': client.get_content_encoding(),
'X-Sentry-Auth': 'foo',
},
)
@mock.patch('raven.transport.http.HTTPTransport.send')
@mock.patch('raven.base.ClientState.should_try')
def test_raise_exception_on_send_error(self, should_try, _send_remote):
should_try.return_value = True
client = Client(
dsn='sync+http://public:secret@example.com/1',
)
# Test for the default behaviour in which a send error is handled by the client
_send_remote.side_effect = Exception()
client.capture('Message', data={}, date=None, time_spent=10,
extra={}, stack=None, tags=None, message='Test message')
assert client.state.status == client.state.ERROR
# Test for the case in which a send error is raised to the calling frame.
client = Client(
dsn='sync+http://public:secret@example.com/1',
raise_send_errors=True,
)
with self.assertRaises(Exception):
client.capture('Message', data={}, date=None, time_spent=10,
extra={}, stack=None, tags=None, message='Test message')
def test_encode_decode(self):
data = {'foo': 'bar'}
encoded = self.client.encode(data)
self.assertTrue(type(encoded), str)
self.assertEquals(data, self.client.decode(encoded))
def test_get_public_dsn(self):
client = Client('http://public:secret@example.com/1')
public_dsn = client.get_public_dsn()
self.assertEquals(public_dsn, '//public@example.com/1')
def test_explicit_message_on_message_event(self):
self.client.captureMessage(message='test', data={
'message': 'foo'
})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['message'], 'foo')
def test_message_from_kwargs(self):
try:
raise ValueError('foo')
except ValueError:
self.client.captureException(message='test', data={})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['message'], 'test')
def test_explicit_message_on_exception_event(self):
try:
raise ValueError('foo')
except ValueError:
self.client.captureException(data={'message': 'foobar'})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['message'], 'foobar')
def test_exception_event(self):
try:
raise ValueError('foo')
except ValueError:
self.client.captureException()
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['message'], 'ValueError: foo')
self.assertTrue('exception' in event)
exc = event['exception']['values'][0]
self.assertEquals(exc['type'], 'ValueError')
self.assertEquals(exc['value'], 'foo')
self.assertEquals(exc['module'], ValueError.__module__) # this differs in some Python versions
assert 'stacktrace' not in event
stacktrace = exc['stacktrace']
self.assertEquals(len(stacktrace['frames']), 1)
frame = stacktrace['frames'][0]
self.assertEquals(frame['abs_path'], __file__.replace('.pyc', '.py'))
self.assertEquals(frame['filename'], 'tests/base/tests.py')
self.assertEquals(frame['module'], __name__)
self.assertEquals(frame['function'], 'test_exception_event')
self.assertTrue('timestamp' in event)
def test_exception_event_true_exc_info(self):
try:
raise ValueError('foo')
except ValueError:
self.client.captureException(exc_info=True)
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['message'], 'ValueError: foo')
self.assertTrue('exception' in event)
exc = event['exception']['values'][0]
stacktrace = exc['stacktrace']
self.assertEquals(len(stacktrace['frames']), 1)
frame = stacktrace['frames'][0]
self.assertEquals(frame['abs_path'], __file__.replace('.pyc', '.py'))
self.assertEquals(frame['filename'], 'tests/base/tests.py')
self.assertEquals(frame['module'], __name__)
def test_decorator_preserves_function(self):
@self.client.capture_exceptions
def test1():
return 'foo'
self.assertEquals(test1(), 'foo')
class DecoratorTestException(Exception):
pass
def test_decorator_functionality(self):
@self.client.capture_exceptions
def test2():
raise self.DecoratorTestException()
try:
test2()
except self.DecoratorTestException:
pass
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['message'], 'DecoratorTestException')
exc = event['exception']['values'][0]
self.assertEquals(exc['type'], 'DecoratorTestException')
self.assertEquals(exc['module'], self.DecoratorTestException.__module__)
stacktrace = exc['stacktrace']
# this is a wrapped class object with __call__ so three frames are expected
self.assertEquals(len(stacktrace['frames']), 3)
frame = stacktrace['frames'][-1]
self.assertEquals(frame['module'], __name__)
self.assertEquals(frame['function'], 'test2')
def test_decorator_filtering(self):
@self.client.capture_exceptions(self.DecoratorTestException)
def test3():
raise Exception()
try:
test3()
except Exception:
pass
self.assertEquals(len(self.client.events), 0)
def test_context_manager_functionality(self):
def test4():
raise self.DecoratorTestException()
try:
with self.client.capture_exceptions():
test4()
except self.DecoratorTestException:
pass
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['message'], 'DecoratorTestException')
exc = event['exception']['values'][0]
self.assertEquals(exc['type'], 'DecoratorTestException')
self.assertEquals(exc['module'], self.DecoratorTestException.__module__)
stacktrace = exc['stacktrace']
# three frames are expected: test4, `with` block and context manager internals
self.assertEquals(len(stacktrace['frames']), 3)
frame = stacktrace['frames'][-1]
self.assertEquals(frame['module'], __name__)
self.assertEquals(frame['function'], 'test4')
def test_content_manager_filtering(self):
def test5():
raise Exception()
try:
with self.client.capture_exceptions(self.DecoratorTestException):
test5()
except Exception:
pass
self.assertEquals(len(self.client.events), 0)
def test_message_event(self):
self.client.captureMessage(message='test')
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['message'], 'test')
assert 'stacktrace' not in event
self.assertTrue('timestamp' in event)
def test_fingerprint(self):
self.client.captureMessage(
message='test',
fingerprint=['{{ default }}', 'foobar'],
)
assert len(self.client.events) == 1
event = self.client.events.pop(0)
assert event['fingerprint'] == ['{{ default }}', 'foobar']
def test_context(self):
self.client.context.merge({
'tags': {'foo': 'bar'},
})
try:
raise ValueError('foo')
except ValueError:
self.client.captureException()
else:
self.fail('Exception should have been raised')
assert len(self.client.events) == 1
event = self.client.events.pop(0)
assert event['tags'] == {'foo': 'bar'}
def test_stack_explicit_frames(self):
def bar():
return inspect.stack()
frames = bar()
self.client.captureMessage('test', stack=iter_stack_frames(frames))
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['message'], 'test')
assert 'stacktrace' in event
self.assertEquals(len(frames), len(event['stacktrace']['frames']))
for frame, frame_i in zip(frames, event['stacktrace']['frames']):
self.assertEquals(frame[0].f_code.co_filename, frame_i['abs_path'])
self.assertEquals(frame[0].f_code.co_name, frame_i['function'])
def test_stack_auto_frames(self):
self.client.captureMessage('test', stack=True)
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['message'], 'test')
self.assertTrue('stacktrace' in event)
self.assertTrue('timestamp' in event)
def test_site(self):
self.client.captureMessage(message='test', data={'site': 'test'})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
assert 'site' in event['tags']
assert event['tags']['site'] == 'test'
def test_implicit_site(self):
self.client = TempStoreClient(site='foo')
self.client.captureMessage(message='test')
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
assert 'site' in event['tags']
assert event['tags']['site'] == 'foo'
def test_logger(self):
self.client.captureMessage(message='test', data={'logger': 'test'})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['logger'], 'test')
self.assertTrue('timestamp' in event)
def test_tags(self):
self.client.captureMessage(message='test', tags={'logger': 'test'})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['tags'], {'logger': 'test'})
def test_client_extra_context(self):
self.client.extra = {
'foo': 'bar',
'logger': 'baz',
}
self.client.captureMessage(message='test', extra={'logger': 'test'})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
if six.PY3:
expected = {'logger': "'test'", 'foo': "'bar'"}
else:
expected = {'logger': "u'test'", 'foo': "u'bar'"}
self.assertEquals(event['extra'], expected)
def test_transport_registration(self):
client = Client('http://public:secret@example.com/1',
transport=HTTPTransport)
assert type(client.remote.get_transport()) is HTTPTransport
client = Client('sync+http://public:secret@example.com/1')
assert type(client.remote.get_transport()) is HTTPTransport
def test_marks_in_app_frames_for_stacktrace(self):
client = TempStoreClient(
include_paths=['foo'],
exclude_paths=['foo.bar'],
)
client.captureMessage('hello', data={
'stacktrace': {
'frames': [
{'module': 'foo'},
{'module': 'bar'},
{'module': 'foo.bar'},
{'module': 'foo.baz'},
]
}
})
event = client.events.pop(0)
frames = event['stacktrace']['frames']
assert frames[0]['in_app']
assert not frames[1]['in_app']
assert not frames[2]['in_app']
assert frames[3]['in_app']
def test_marks_in_app_frames_for_exception(self):
client = TempStoreClient(
include_paths=['foo'],
exclude_paths=['foo.bar'],
)
client.captureMessage('hello', data={
'exception': {
'values': [{
'stacktrace': {
'frames': [
{'module': 'foo'},
{'module': 'bar'},
{'module': 'foo.bar'},
{'module': 'foo.baz'},
]
}
}]
}
})
event = client.events.pop(0)
frames = event['exception']['values'][0]['stacktrace']['frames']
assert frames[0]['in_app']
assert not frames[1]['in_app']
assert not frames[2]['in_app']
assert frames[3]['in_app']
|
johansteffner/raven-python
|
tests/base/tests.py
|
Python
|
bsd-3-clause
| 20,540
|
from django.core.management import call_command
from django.core.management.base import BaseCommand
from ._utils import add_arguments
class Command(BaseCommand):
help = "Rebuilds the search index by clearing the search index and then performing an update."
add_arguments = add_arguments
def handle(self, **options):
call_command('clear_index', **options)
call_command('search_index', action='update', **options)
|
Sparrho/bungiesearch
|
bungiesearch/management/commands/rebuild_index.py
|
Python
|
bsd-3-clause
| 444
|
import json
import logging
from django.db import transaction
from rest_framework import mixins, serializers, status, viewsets
from rest_framework.response import Response
from mkt.api.authentication import RestOAuthAuthentication
from mkt.api.authorization import GroupPermission
from mkt.api.base import CORSMixin
from .forms import MonolithForm
from .models import MonolithRecord
logger = logging.getLogger('z.monolith')
class MonolithSerializer(serializers.ModelSerializer):
class Meta:
model = MonolithRecord
def transform_value(self, obj, value):
return json.loads(value)
class MonolithViewSet(CORSMixin, mixins.DestroyModelMixin,
mixins.ListModelMixin, mixins.RetrieveModelMixin,
viewsets.GenericViewSet):
cors_allowed_methods = ('get', 'delete')
permission_classes = [GroupPermission('Monolith', 'API')]
authentication_classes = [RestOAuthAuthentication]
serializer_class = MonolithSerializer
def get_queryset(self):
form = MonolithForm(self.request.QUERY_PARAMS)
if not form.is_valid():
return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
key = form.cleaned_data['key']
start = form.cleaned_data['start']
end = form.cleaned_data['end']
qs = MonolithRecord.objects.all()
if key:
qs = qs.filter(key=key)
if start is not None:
qs = qs.filter(recorded__gte=start)
if end is not None:
qs = qs.filter(recorded__lt=end)
return qs
@transaction.commit_on_success
def delete(self, request, *args, **kwargs):
qs = self.filter_queryset(self.get_queryset())
logger.info('Deleting %d monolith resources' % qs.count())
qs.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
|
wagnerand/zamboni
|
mkt/monolith/resources.py
|
Python
|
bsd-3-clause
| 1,858
|
#!/bin/env python
"""
This file defines a set of system_info classes for getting
information about various resources (libraries, library directories,
include directories, etc.) in the system. Currently, the following
classes are available:
atlas_info
atlas_threads_info
atlas_blas_info
atlas_blas_threads_info
lapack_atlas_info
lapack_atlas_threads_info
atlas_3_10_info
atlas_3_10_threads_info
atlas_3_10_blas_info,
atlas_3_10_blas_threads_info,
lapack_atlas_3_10_info
lapack_atlas_3_10_threads_info
blas_info
lapack_info
openblas_info
blas_opt_info # usage recommended
lapack_opt_info # usage recommended
fftw_info,dfftw_info,sfftw_info
fftw_threads_info,dfftw_threads_info,sfftw_threads_info
djbfft_info
x11_info
lapack_src_info
blas_src_info
numpy_info
numarray_info
numpy_info
boost_python_info
agg2_info
wx_info
gdk_pixbuf_xlib_2_info
gdk_pixbuf_2_info
gdk_x11_2_info
gtkp_x11_2_info
gtkp_2_info
xft_info
freetype2_info
umfpack_info
Usage:
info_dict = get_info(<name>)
where <name> is a string 'atlas','x11','fftw','lapack','blas',
'lapack_src', 'blas_src', etc. For a complete list of allowed names,
see the definition of get_info() function below.
Returned info_dict is a dictionary which is compatible with
distutils.setup keyword arguments. If info_dict == {}, then the
asked resource is not available (system_info could not find it).
Several *_info classes specify an environment variable to specify
the locations of software. When setting the corresponding environment
variable to 'None' then the software will be ignored, even when it
is available in system.
Global parameters:
system_info.search_static_first - search static libraries (.a)
in precedence to shared ones (.so, .sl) if enabled.
system_info.verbosity - output the results to stdout if enabled.
The file 'site.cfg' is looked for in
1) Directory of main setup.py file being run.
2) Home directory of user running the setup.py file as ~/.numpy-site.cfg
3) System wide directory (location of this file...)
The first one found is used to get system configuration options The
format is that used by ConfigParser (i.e., Windows .INI style). The
section ALL has options that are the default for each section. The
available sections are fftw, atlas, and x11. Appropiate defaults are
used if nothing is specified.
The order of finding the locations of resources is the following:
1. environment variable
2. section in site.cfg
3. ALL section in site.cfg
Only the first complete match is returned.
Example:
----------
[ALL]
library_dirs = /usr/lib:/usr/local/lib:/opt/lib
include_dirs = /usr/include:/usr/local/include:/opt/include
src_dirs = /usr/local/src:/opt/src
# search static libraries (.a) in preference to shared ones (.so)
search_static_first = 0
[fftw]
fftw_libs = rfftw, fftw
fftw_opt_libs = rfftw_threaded, fftw_threaded
# if the above aren't found, look for {s,d}fftw_libs and {s,d}fftw_opt_libs
[atlas]
library_dirs = /usr/lib/3dnow:/usr/lib/3dnow/atlas
# for overriding the names of the atlas libraries
atlas_libs = lapack, f77blas, cblas, atlas
[x11]
library_dirs = /usr/X11R6/lib
include_dirs = /usr/X11R6/include
----------
Authors:
Pearu Peterson <pearu@cens.ioc.ee>, February 2002
David M. Cooke <cookedm@physics.mcmaster.ca>, April 2002
Copyright 2002 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@cens.ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy (BSD style) license. See LICENSE.txt that came with
this distribution for specifics.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
"""
from __future__ import division, absolute_import, print_function
import sys
import os
import re
import copy
import warnings
from glob import glob
from functools import reduce
if sys.version_info[0] < 3:
from ConfigParser import NoOptionError, ConfigParser
else:
from configparser import NoOptionError, ConfigParser
from distutils.errors import DistutilsError
from distutils.dist import Distribution
import distutils.sysconfig
from distutils import log
from distutils.util import get_platform
from numpy.distutils.exec_command import \
find_executable, exec_command, get_pythonexe
from numpy.distutils.misc_util import is_sequence, is_string, \
get_shared_lib_extension
from numpy.distutils.command.config import config as cmd_config
from numpy.distutils.compat import get_exception
import distutils.ccompiler
import tempfile
import shutil
# Determine number of bits
import platform
_bits = {'32bit': 32, '64bit': 64}
platform_bits = _bits[platform.architecture()[0]]
def libpaths(paths, bits):
"""Return a list of library paths valid on 32 or 64 bit systems.
Inputs:
paths : sequence
A sequence of strings (typically paths)
bits : int
An integer, the only valid values are 32 or 64. A ValueError exception
is raised otherwise.
Examples:
Consider a list of directories
>>> paths = ['/usr/X11R6/lib','/usr/X11/lib','/usr/lib']
For a 32-bit platform, this is already valid:
>>> np.distutils.system_info.libpaths(paths,32)
['/usr/X11R6/lib', '/usr/X11/lib', '/usr/lib']
On 64 bits, we prepend the '64' postfix
>>> np.distutils.system_info.libpaths(paths,64)
['/usr/X11R6/lib64', '/usr/X11R6/lib', '/usr/X11/lib64', '/usr/X11/lib',
'/usr/lib64', '/usr/lib']
"""
if bits not in (32, 64):
raise ValueError("Invalid bit size in libpaths: 32 or 64 only")
# Handle 32bit case
if bits == 32:
return paths
# Handle 64bit case
out = []
for p in paths:
out.extend([p + '64', p])
return out
if sys.platform == 'win32':
default_lib_dirs = ['C:\\',
os.path.join(distutils.sysconfig.EXEC_PREFIX,
'libs')]
default_runtime_dirs = []
default_include_dirs = []
default_src_dirs = ['.']
default_x11_lib_dirs = []
default_x11_include_dirs = []
else:
default_lib_dirs = libpaths(['/usr/local/lib', '/opt/lib', '/usr/lib',
'/opt/local/lib', '/sw/lib'], platform_bits)
default_runtime_dirs = []
default_include_dirs = ['/usr/local/include',
'/opt/include', '/usr/include',
# path of umfpack under macports
'/opt/local/include/ufsparse',
'/opt/local/include', '/sw/include',
'/usr/include/suitesparse']
default_src_dirs = ['.', '/usr/local/src', '/opt/src', '/sw/src']
default_x11_lib_dirs = libpaths(['/usr/X11R6/lib', '/usr/X11/lib',
'/usr/lib'], platform_bits)
default_x11_include_dirs = ['/usr/X11R6/include', '/usr/X11/include',
'/usr/include']
if os.path.exists('/usr/lib/X11'):
globbed_x11_dir = glob('/usr/lib/*/libX11.so')
if globbed_x11_dir:
x11_so_dir = os.path.split(globbed_x11_dir[0])[0]
default_x11_lib_dirs.extend([x11_so_dir, '/usr/lib/X11'])
default_x11_include_dirs.extend(['/usr/lib/X11/include',
'/usr/include/X11'])
import subprocess as sp
tmp = None
try:
# Explicitly open/close file to avoid ResourceWarning when
# tests are run in debug mode Python 3.
tmp = open(os.devnull, 'w')
p = sp.Popen(["gcc", "-print-multiarch"], stdout=sp.PIPE,
stderr=tmp)
except (OSError, DistutilsError):
# OSError if gcc is not installed, or SandboxViolation (DistutilsError
# subclass) if an old setuptools bug is triggered (see gh-3160).
pass
else:
triplet = str(p.communicate()[0].decode().strip())
if p.returncode == 0:
# gcc supports the "-print-multiarch" option
default_x11_lib_dirs += [os.path.join("/usr/lib/", triplet)]
default_lib_dirs += [os.path.join("/usr/lib/", triplet)]
finally:
if tmp is not None:
tmp.close()
if os.path.join(sys.prefix, 'lib') not in default_lib_dirs:
default_lib_dirs.insert(0, os.path.join(sys.prefix, 'lib'))
default_include_dirs.append(os.path.join(sys.prefix, 'include'))
default_src_dirs.append(os.path.join(sys.prefix, 'src'))
default_lib_dirs = [_m for _m in default_lib_dirs if os.path.isdir(_m)]
default_runtime_dirs = [_m for _m in default_runtime_dirs if os.path.isdir(_m)]
default_include_dirs = [_m for _m in default_include_dirs if os.path.isdir(_m)]
default_src_dirs = [_m for _m in default_src_dirs if os.path.isdir(_m)]
so_ext = get_shared_lib_extension()
def get_standard_file(fname):
"""Returns a list of files named 'fname' from
1) System-wide directory (directory-location of this module)
2) Users HOME directory (os.environ['HOME'])
3) Local directory
"""
# System-wide file
filenames = []
try:
f = __file__
except NameError:
f = sys.argv[0]
else:
sysfile = os.path.join(os.path.split(os.path.abspath(f))[0],
fname)
if os.path.isfile(sysfile):
filenames.append(sysfile)
# Home directory
# And look for the user config file
try:
f = os.path.expanduser('~')
except KeyError:
pass
else:
user_file = os.path.join(f, fname)
if os.path.isfile(user_file):
filenames.append(user_file)
# Local file
if os.path.isfile(fname):
filenames.append(os.path.abspath(fname))
return filenames
def get_info(name, notfound_action=0):
"""
notfound_action:
0 - do nothing
1 - display warning message
2 - raise error
"""
cl = {'atlas': atlas_info, # use lapack_opt or blas_opt instead
'atlas_threads': atlas_threads_info, # ditto
'atlas_blas': atlas_blas_info,
'atlas_blas_threads': atlas_blas_threads_info,
'lapack_atlas': lapack_atlas_info, # use lapack_opt instead
'lapack_atlas_threads': lapack_atlas_threads_info, # ditto
'atlas_3_10': atlas_3_10_info, # use lapack_opt or blas_opt instead
'atlas_3_10_threads': atlas_3_10_threads_info, # ditto
'atlas_3_10_blas': atlas_3_10_blas_info,
'atlas_3_10_blas_threads': atlas_3_10_blas_threads_info,
'lapack_atlas_3_10': lapack_atlas_3_10_info, # use lapack_opt instead
'lapack_atlas_3_10_threads': lapack_atlas_3_10_threads_info, # ditto
'mkl': mkl_info,
# openblas which may or may not have embedded lapack
'openblas': openblas_info, # use blas_opt instead
# openblas with embedded lapack
'openblas_lapack': openblas_lapack_info, # use blas_opt instead
'lapack_mkl': lapack_mkl_info, # use lapack_opt instead
'blas_mkl': blas_mkl_info, # use blas_opt instead
'x11': x11_info,
'fft_opt': fft_opt_info,
'fftw': fftw_info,
'fftw2': fftw2_info,
'fftw3': fftw3_info,
'dfftw': dfftw_info,
'sfftw': sfftw_info,
'fftw_threads': fftw_threads_info,
'dfftw_threads': dfftw_threads_info,
'sfftw_threads': sfftw_threads_info,
'djbfft': djbfft_info,
'blas': blas_info, # use blas_opt instead
'lapack': lapack_info, # use lapack_opt instead
'lapack_src': lapack_src_info,
'blas_src': blas_src_info,
'numpy': numpy_info,
'f2py': f2py_info,
'Numeric': Numeric_info,
'numeric': Numeric_info,
'numarray': numarray_info,
'numerix': numerix_info,
'lapack_opt': lapack_opt_info,
'blas_opt': blas_opt_info,
'boost_python': boost_python_info,
'agg2': agg2_info,
'wx': wx_info,
'gdk_pixbuf_xlib_2': gdk_pixbuf_xlib_2_info,
'gdk-pixbuf-xlib-2.0': gdk_pixbuf_xlib_2_info,
'gdk_pixbuf_2': gdk_pixbuf_2_info,
'gdk-pixbuf-2.0': gdk_pixbuf_2_info,
'gdk': gdk_info,
'gdk_2': gdk_2_info,
'gdk-2.0': gdk_2_info,
'gdk_x11_2': gdk_x11_2_info,
'gdk-x11-2.0': gdk_x11_2_info,
'gtkp_x11_2': gtkp_x11_2_info,
'gtk+-x11-2.0': gtkp_x11_2_info,
'gtkp_2': gtkp_2_info,
'gtk+-2.0': gtkp_2_info,
'xft': xft_info,
'freetype2': freetype2_info,
'umfpack': umfpack_info,
'amd': amd_info,
}.get(name.lower(), system_info)
return cl().get_info(notfound_action)
class NotFoundError(DistutilsError):
"""Some third-party program or library is not found."""
class AtlasNotFoundError(NotFoundError):
"""
Atlas (http://math-atlas.sourceforge.net/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [atlas]) or by setting
the ATLAS environment variable."""
class LapackNotFoundError(NotFoundError):
"""
Lapack (http://www.netlib.org/lapack/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [lapack]) or by setting
the LAPACK environment variable."""
class LapackSrcNotFoundError(LapackNotFoundError):
"""
Lapack (http://www.netlib.org/lapack/) sources not found.
Directories to search for the sources can be specified in the
numpy/distutils/site.cfg file (section [lapack_src]) or by setting
the LAPACK_SRC environment variable."""
class BlasNotFoundError(NotFoundError):
"""
Blas (http://www.netlib.org/blas/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [blas]) or by setting
the BLAS environment variable."""
class BlasSrcNotFoundError(BlasNotFoundError):
"""
Blas (http://www.netlib.org/blas/) sources not found.
Directories to search for the sources can be specified in the
numpy/distutils/site.cfg file (section [blas_src]) or by setting
the BLAS_SRC environment variable."""
class FFTWNotFoundError(NotFoundError):
"""
FFTW (http://www.fftw.org/) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [fftw]) or by setting
the FFTW environment variable."""
class DJBFFTNotFoundError(NotFoundError):
"""
DJBFFT (http://cr.yp.to/djbfft.html) libraries not found.
Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [djbfft]) or by setting
the DJBFFT environment variable."""
class NumericNotFoundError(NotFoundError):
"""
Numeric (http://www.numpy.org/) module not found.
Get it from above location, install it, and retry setup.py."""
class X11NotFoundError(NotFoundError):
"""X11 libraries not found."""
class UmfpackNotFoundError(NotFoundError):
"""
UMFPACK sparse solver (http://www.cise.ufl.edu/research/sparse/umfpack/)
not found. Directories to search for the libraries can be specified in the
numpy/distutils/site.cfg file (section [umfpack]) or by setting
the UMFPACK environment variable."""
class system_info(object):
""" get_info() is the only public method. Don't use others.
"""
section = 'ALL'
dir_env_var = None
search_static_first = 0 # XXX: disabled by default, may disappear in
# future unless it is proved to be useful.
verbosity = 1
saved_results = {}
notfounderror = NotFoundError
def __init__(self,
default_lib_dirs=default_lib_dirs,
default_include_dirs=default_include_dirs,
verbosity=1,
):
self.__class__.info = {}
self.local_prefixes = []
defaults = {'library_dirs': os.pathsep.join(default_lib_dirs),
'include_dirs': os.pathsep.join(default_include_dirs),
'runtime_library_dirs': os.pathsep.join(default_runtime_dirs),
'rpath': '',
'src_dirs': os.pathsep.join(default_src_dirs),
'search_static_first': str(self.search_static_first),
'extra_compile_args': '', 'extra_link_args': ''}
self.cp = ConfigParser(defaults)
self.files = []
self.files.extend(get_standard_file('.numpy-site.cfg'))
self.files.extend(get_standard_file('site.cfg'))
self.parse_config_files()
if self.section is not None:
self.search_static_first = self.cp.getboolean(
self.section, 'search_static_first')
assert isinstance(self.search_static_first, int)
def parse_config_files(self):
self.cp.read(self.files)
if not self.cp.has_section(self.section):
if self.section is not None:
self.cp.add_section(self.section)
def calc_libraries_info(self):
libs = self.get_libraries()
dirs = self.get_lib_dirs()
# The extensions use runtime_library_dirs
r_dirs = self.get_runtime_lib_dirs()
# Intrinsic distutils use rpath, we simply append both entries
# as though they were one entry
r_dirs.extend(self.get_runtime_lib_dirs(key='rpath'))
info = {}
for lib in libs:
i = self.check_libs(dirs, [lib])
if i is not None:
dict_append(info, **i)
else:
log.info('Library %s was not found. Ignoring' % (lib))
i = self.check_libs(r_dirs, [lib])
if i is not None:
# Swap library keywords found to runtime_library_dirs
# the libraries are insisting on the user having defined
# them using the library_dirs, and not necessarily by
# runtime_library_dirs
del i['libraries']
i['runtime_library_dirs'] = i.pop('library_dirs')
dict_append(info, **i)
else:
log.info('Runtime library %s was not found. Ignoring' % (lib))
return info
def set_info(self, **info):
if info:
lib_info = self.calc_libraries_info()
dict_append(info, **lib_info)
# Update extra information
extra_info = self.calc_extra_info()
dict_append(info, **extra_info)
self.saved_results[self.__class__.__name__] = info
def has_info(self):
return self.__class__.__name__ in self.saved_results
def calc_extra_info(self):
""" Updates the information in the current information with
respect to these flags:
extra_compile_args
extra_link_args
"""
info = {}
for key in ['extra_compile_args', 'extra_link_args']:
# Get values
opt = self.cp.get(self.section, key)
if opt:
tmp = {key : [opt]}
dict_append(info, **tmp)
return info
def get_info(self, notfound_action=0):
""" Return a dictonary with items that are compatible
with numpy.distutils.setup keyword arguments.
"""
flag = 0
if not self.has_info():
flag = 1
log.info(self.__class__.__name__ + ':')
if hasattr(self, 'calc_info'):
self.calc_info()
if notfound_action:
if not self.has_info():
if notfound_action == 1:
warnings.warn(self.notfounderror.__doc__)
elif notfound_action == 2:
raise self.notfounderror(self.notfounderror.__doc__)
else:
raise ValueError(repr(notfound_action))
if not self.has_info():
log.info(' NOT AVAILABLE')
self.set_info()
else:
log.info(' FOUND:')
res = self.saved_results.get(self.__class__.__name__)
if self.verbosity > 0 and flag:
for k, v in res.items():
v = str(v)
if k in ['sources', 'libraries'] and len(v) > 270:
v = v[:120] + '...\n...\n...' + v[-120:]
log.info(' %s = %s', k, v)
log.info('')
return copy.deepcopy(res)
def get_paths(self, section, key):
dirs = self.cp.get(section, key).split(os.pathsep)
env_var = self.dir_env_var
if env_var:
if is_sequence(env_var):
e0 = env_var[-1]
for e in env_var:
if e in os.environ:
e0 = e
break
if not env_var[0] == e0:
log.info('Setting %s=%s' % (env_var[0], e0))
env_var = e0
if env_var and env_var in os.environ:
d = os.environ[env_var]
if d == 'None':
log.info('Disabled %s: %s',
self.__class__.__name__, '(%s is None)'
% (env_var,))
return []
if os.path.isfile(d):
dirs = [os.path.dirname(d)] + dirs
l = getattr(self, '_lib_names', [])
if len(l) == 1:
b = os.path.basename(d)
b = os.path.splitext(b)[0]
if b[:3] == 'lib':
log.info('Replacing _lib_names[0]==%r with %r' \
% (self._lib_names[0], b[3:]))
self._lib_names[0] = b[3:]
else:
ds = d.split(os.pathsep)
ds2 = []
for d in ds:
if os.path.isdir(d):
ds2.append(d)
for dd in ['include', 'lib']:
d1 = os.path.join(d, dd)
if os.path.isdir(d1):
ds2.append(d1)
dirs = ds2 + dirs
default_dirs = self.cp.get(self.section, key).split(os.pathsep)
dirs.extend(default_dirs)
ret = []
for d in dirs:
if not os.path.isdir(d):
warnings.warn('Specified path %s is invalid.' % d)
continue
if d not in ret:
ret.append(d)
log.debug('( %s = %s )', key, ':'.join(ret))
return ret
def get_lib_dirs(self, key='library_dirs'):
return self.get_paths(self.section, key)
def get_runtime_lib_dirs(self, key='runtime_library_dirs'):
return self.get_paths(self.section, key)
def get_include_dirs(self, key='include_dirs'):
return self.get_paths(self.section, key)
def get_src_dirs(self, key='src_dirs'):
return self.get_paths(self.section, key)
def get_libs(self, key, default):
try:
libs = self.cp.get(self.section, key)
except NoOptionError:
if not default:
return []
if is_string(default):
return [default]
return default
return [b for b in [a.strip() for a in libs.split(',')] if b]
def get_libraries(self, key='libraries'):
return self.get_libs(key, '')
def library_extensions(self):
static_exts = ['.a']
if sys.platform == 'win32':
static_exts.append('.lib') # .lib is used by MSVC
if self.search_static_first:
exts = static_exts + [so_ext]
else:
exts = [so_ext] + static_exts
if sys.platform == 'cygwin':
exts.append('.dll.a')
if sys.platform == 'darwin':
exts.append('.dylib')
return exts
def check_libs(self, lib_dirs, libs, opt_libs=[]):
"""If static or shared libraries are available then return
their info dictionary.
Checks for all libraries as shared libraries first, then
static (or vice versa if self.search_static_first is True).
"""
exts = self.library_extensions()
info = None
for ext in exts:
info = self._check_libs(lib_dirs, libs, opt_libs, [ext])
if info is not None:
break
if not info:
log.info(' libraries %s not found in %s', ','.join(libs),
lib_dirs)
return info
def check_libs2(self, lib_dirs, libs, opt_libs=[]):
"""If static or shared libraries are available then return
their info dictionary.
Checks each library for shared or static.
"""
exts = self.library_extensions()
info = self._check_libs(lib_dirs, libs, opt_libs, exts)
if not info:
log.info(' libraries %s not found in %s', ','.join(libs),
lib_dirs)
return info
def _lib_list(self, lib_dir, libs, exts):
assert is_string(lib_dir)
liblist = []
# under windows first try without 'lib' prefix
if sys.platform == 'win32':
lib_prefixes = ['', 'lib']
else:
lib_prefixes = ['lib']
# for each library name, see if we can find a file for it.
for l in libs:
for ext in exts:
for prefix in lib_prefixes:
p = self.combine_paths(lib_dir, prefix + l + ext)
if p:
break
if p:
assert len(p) == 1
# ??? splitext on p[0] would do this for cygwin
# doesn't seem correct
if ext == '.dll.a':
l += '.dll'
liblist.append(l)
break
return liblist
def _check_libs(self, lib_dirs, libs, opt_libs, exts):
"""Find mandatory and optional libs in expected paths.
Missing optional libraries are silently forgotten.
"""
# First, try to find the mandatory libraries
if is_sequence(lib_dirs):
found_libs, found_dirs = [], []
for dir_ in lib_dirs:
found_libs1 = self._lib_list(dir_, libs, exts)
# It's possible that we'll find the same library in multiple
# directories. It's also possible that we'll find some
# libraries on in directory, and some in another. So the
# obvious thing would be to use a set instead of a list, but I
# don't know if preserving order matters (does it?).
for found_lib in found_libs1:
if found_lib not in found_libs:
found_libs.append(found_lib)
if dir_ not in found_dirs:
found_dirs.append(dir_)
else:
found_libs = self._lib_list(lib_dirs, libs, exts)
found_dirs = [lib_dirs]
if len(found_libs) > 0 and len(found_libs) == len(libs):
info = {'libraries': found_libs, 'library_dirs': found_dirs}
# Now, check for optional libraries
if is_sequence(lib_dirs):
for dir_ in lib_dirs:
opt_found_libs = self._lib_list(dir_, opt_libs, exts)
if opt_found_libs:
if dir_ not in found_dirs:
found_dirs.extend(dir_)
found_libs.extend(opt_found_libs)
else:
opt_found_libs = self._lib_list(lib_dirs, opt_libs, exts)
if opt_found_libs:
found_libs.extend(opt_found_libs)
return info
else:
return None
def combine_paths(self, *args):
"""Return a list of existing paths composed by all combinations
of items from the arguments.
"""
return combine_paths(*args, **{'verbosity': self.verbosity})
class fft_opt_info(system_info):
def calc_info(self):
info = {}
fftw_info = get_info('fftw3') or get_info('fftw2') or get_info('dfftw')
djbfft_info = get_info('djbfft')
if fftw_info:
dict_append(info, **fftw_info)
if djbfft_info:
dict_append(info, **djbfft_info)
self.set_info(**info)
return
class fftw_info(system_info):
#variables to override
section = 'fftw'
dir_env_var = 'FFTW'
notfounderror = FFTWNotFoundError
ver_info = [{'name':'fftw3',
'libs':['fftw3'],
'includes':['fftw3.h'],
'macros':[('SCIPY_FFTW3_H', None)]},
{'name':'fftw2',
'libs':['rfftw', 'fftw'],
'includes':['fftw.h', 'rfftw.h'],
'macros':[('SCIPY_FFTW_H', None)]}]
def calc_ver_info(self, ver_param):
"""Returns True on successful version detection, else False"""
lib_dirs = self.get_lib_dirs()
incl_dirs = self.get_include_dirs()
incl_dir = None
libs = self.get_libs(self.section + '_libs', ver_param['libs'])
info = self.check_libs(lib_dirs, libs)
if info is not None:
flag = 0
for d in incl_dirs:
if len(self.combine_paths(d, ver_param['includes'])) \
== len(ver_param['includes']):
dict_append(info, include_dirs=[d])
flag = 1
incl_dirs = [d]
break
if flag:
dict_append(info, define_macros=ver_param['macros'])
else:
info = None
if info is not None:
self.set_info(**info)
return True
else:
log.info(' %s not found' % (ver_param['name']))
return False
def calc_info(self):
for i in self.ver_info:
if self.calc_ver_info(i):
break
class fftw2_info(fftw_info):
#variables to override
section = 'fftw'
dir_env_var = 'FFTW'
notfounderror = FFTWNotFoundError
ver_info = [{'name':'fftw2',
'libs':['rfftw', 'fftw'],
'includes':['fftw.h', 'rfftw.h'],
'macros':[('SCIPY_FFTW_H', None)]}
]
class fftw3_info(fftw_info):
#variables to override
section = 'fftw3'
dir_env_var = 'FFTW3'
notfounderror = FFTWNotFoundError
ver_info = [{'name':'fftw3',
'libs':['fftw3'],
'includes':['fftw3.h'],
'macros':[('SCIPY_FFTW3_H', None)]},
]
class dfftw_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'dfftw',
'libs':['drfftw', 'dfftw'],
'includes':['dfftw.h', 'drfftw.h'],
'macros':[('SCIPY_DFFTW_H', None)]}]
class sfftw_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'sfftw',
'libs':['srfftw', 'sfftw'],
'includes':['sfftw.h', 'srfftw.h'],
'macros':[('SCIPY_SFFTW_H', None)]}]
class fftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'fftw threads',
'libs':['rfftw_threads', 'fftw_threads'],
'includes':['fftw_threads.h', 'rfftw_threads.h'],
'macros':[('SCIPY_FFTW_THREADS_H', None)]}]
class dfftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'dfftw threads',
'libs':['drfftw_threads', 'dfftw_threads'],
'includes':['dfftw_threads.h', 'drfftw_threads.h'],
'macros':[('SCIPY_DFFTW_THREADS_H', None)]}]
class sfftw_threads_info(fftw_info):
section = 'fftw'
dir_env_var = 'FFTW'
ver_info = [{'name':'sfftw threads',
'libs':['srfftw_threads', 'sfftw_threads'],
'includes':['sfftw_threads.h', 'srfftw_threads.h'],
'macros':[('SCIPY_SFFTW_THREADS_H', None)]}]
class djbfft_info(system_info):
section = 'djbfft'
dir_env_var = 'DJBFFT'
notfounderror = DJBFFTNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend(self.combine_paths(d, ['djbfft']) + [d])
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
lib_dirs = self.get_lib_dirs()
incl_dirs = self.get_include_dirs()
info = None
for d in lib_dirs:
p = self.combine_paths(d, ['djbfft.a'])
if p:
info = {'extra_objects': p}
break
p = self.combine_paths(d, ['libdjbfft.a', 'libdjbfft' + so_ext])
if p:
info = {'libraries': ['djbfft'], 'library_dirs': [d]}
break
if info is None:
return
for d in incl_dirs:
if len(self.combine_paths(d, ['fftc8.h', 'fftfreq.h'])) == 2:
dict_append(info, include_dirs=[d],
define_macros=[('SCIPY_DJBFFT_H', None)])
self.set_info(**info)
return
return
class mkl_info(system_info):
section = 'mkl'
dir_env_var = 'MKL'
_lib_mkl = ['mkl', 'vml', 'guide']
def get_mkl_rootdir(self):
mklroot = os.environ.get('MKLROOT', None)
if mklroot is not None:
return mklroot
paths = os.environ.get('LD_LIBRARY_PATH', '').split(os.pathsep)
ld_so_conf = '/etc/ld.so.conf'
if os.path.isfile(ld_so_conf):
for d in open(ld_so_conf, 'r'):
d = d.strip()
if d:
paths.append(d)
intel_mkl_dirs = []
for path in paths:
path_atoms = path.split(os.sep)
for m in path_atoms:
if m.startswith('mkl'):
d = os.sep.join(path_atoms[:path_atoms.index(m) + 2])
intel_mkl_dirs.append(d)
break
for d in paths:
dirs = glob(os.path.join(d, 'mkl', '*'))
dirs += glob(os.path.join(d, 'mkl*'))
for d in dirs:
if os.path.isdir(os.path.join(d, 'lib')):
return d
return None
def __init__(self):
mklroot = self.get_mkl_rootdir()
if mklroot is None:
system_info.__init__(self)
else:
from .cpuinfo import cpu
l = 'mkl' # use shared library
if cpu.is_Itanium():
plt = '64'
elif cpu.is_Xeon():
plt = 'intel64'
else:
plt = '32'
if l not in self._lib_mkl:
self._lib_mkl.insert(0, l)
system_info.__init__(
self,
default_lib_dirs=[os.path.join(mklroot, 'lib', plt)],
default_include_dirs=[os.path.join(mklroot, 'include')])
def calc_info(self):
lib_dirs = self.get_lib_dirs()
incl_dirs = self.get_include_dirs()
mkl_libs = self.get_libs('mkl_libs', self._lib_mkl)
info = self.check_libs2(lib_dirs, mkl_libs)
if info is None:
return
dict_append(info,
define_macros=[('SCIPY_MKL_H', None),
('HAVE_CBLAS', None)],
include_dirs=incl_dirs)
if sys.platform == 'win32':
pass # win32 has no pthread library
else:
dict_append(info, libraries=['pthread'])
self.set_info(**info)
class lapack_mkl_info(mkl_info):
def calc_info(self):
mkl = get_info('mkl')
if not mkl:
return
if sys.platform == 'win32':
lapack_libs = self.get_libs('lapack_libs', ['mkl_lapack'])
else:
lapack_libs = self.get_libs('lapack_libs',
['mkl_lapack32', 'mkl_lapack64'])
info = {'libraries': lapack_libs}
dict_append(info, **mkl)
self.set_info(**info)
class blas_mkl_info(mkl_info):
pass
class atlas_info(system_info):
section = 'atlas'
dir_env_var = 'ATLAS'
_lib_names = ['f77blas', 'cblas']
if sys.platform[:7] == 'freebsd':
_lib_atlas = ['atlas_r']
_lib_lapack = ['alapack_r']
else:
_lib_atlas = ['atlas']
_lib_lapack = ['lapack']
notfounderror = AtlasNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend(self.combine_paths(d, ['atlas*', 'ATLAS*',
'sse', '3dnow', 'sse2']) + [d])
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
lib_dirs = self.get_lib_dirs()
info = {}
atlas_libs = self.get_libs('atlas_libs',
self._lib_names + self._lib_atlas)
lapack_libs = self.get_libs('lapack_libs', self._lib_lapack)
atlas = None
lapack = None
atlas_1 = None
for d in lib_dirs:
atlas = self.check_libs2(d, atlas_libs, [])
lapack_atlas = self.check_libs2(d, ['lapack_atlas'], [])
if atlas is not None:
lib_dirs2 = [d] + self.combine_paths(d, ['atlas*', 'ATLAS*'])
lapack = self.check_libs2(lib_dirs2, lapack_libs, [])
if lapack is not None:
break
if atlas:
atlas_1 = atlas
log.info(self.__class__)
if atlas is None:
atlas = atlas_1
if atlas is None:
return
include_dirs = self.get_include_dirs()
h = (self.combine_paths(lib_dirs + include_dirs, 'cblas.h') or [None])
h = h[0]
if h:
h = os.path.dirname(h)
dict_append(info, include_dirs=[h])
info['language'] = 'c'
if lapack is not None:
dict_append(info, **lapack)
dict_append(info, **atlas)
elif 'lapack_atlas' in atlas['libraries']:
dict_append(info, **atlas)
dict_append(info,
define_macros=[('ATLAS_WITH_LAPACK_ATLAS', None)])
self.set_info(**info)
return
else:
dict_append(info, **atlas)
dict_append(info, define_macros=[('ATLAS_WITHOUT_LAPACK', None)])
message = """
*********************************************************************
Could not find lapack library within the ATLAS installation.
*********************************************************************
"""
warnings.warn(message)
self.set_info(**info)
return
# Check if lapack library is complete, only warn if it is not.
lapack_dir = lapack['library_dirs'][0]
lapack_name = lapack['libraries'][0]
lapack_lib = None
lib_prefixes = ['lib']
if sys.platform == 'win32':
lib_prefixes.append('')
for e in self.library_extensions():
for prefix in lib_prefixes:
fn = os.path.join(lapack_dir, prefix + lapack_name + e)
if os.path.exists(fn):
lapack_lib = fn
break
if lapack_lib:
break
if lapack_lib is not None:
sz = os.stat(lapack_lib)[6]
if sz <= 4000 * 1024:
message = """
*********************************************************************
Lapack library (from ATLAS) is probably incomplete:
size of %s is %sk (expected >4000k)
Follow the instructions in the KNOWN PROBLEMS section of the file
numpy/INSTALL.txt.
*********************************************************************
""" % (lapack_lib, sz / 1024)
warnings.warn(message)
else:
info['language'] = 'f77'
atlas_version, atlas_extra_info = get_atlas_version(**atlas)
dict_append(info, **atlas_extra_info)
self.set_info(**info)
class atlas_blas_info(atlas_info):
_lib_names = ['f77blas', 'cblas']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
info = {}
atlas_libs = self.get_libs('atlas_libs',
self._lib_names + self._lib_atlas)
atlas = self.check_libs2(lib_dirs, atlas_libs, [])
if atlas is None:
return
include_dirs = self.get_include_dirs()
h = (self.combine_paths(lib_dirs + include_dirs, 'cblas.h') or [None])
h = h[0]
if h:
h = os.path.dirname(h)
dict_append(info, include_dirs=[h])
info['language'] = 'c'
info['define_macros'] = [('HAVE_CBLAS', None)]
atlas_version, atlas_extra_info = get_atlas_version(**atlas)
dict_append(atlas, **atlas_extra_info)
dict_append(info, **atlas)
self.set_info(**info)
return
class atlas_threads_info(atlas_info):
dir_env_var = ['PTATLAS', 'ATLAS']
_lib_names = ['ptf77blas', 'ptcblas']
class atlas_blas_threads_info(atlas_blas_info):
dir_env_var = ['PTATLAS', 'ATLAS']
_lib_names = ['ptf77blas', 'ptcblas']
class lapack_atlas_info(atlas_info):
_lib_names = ['lapack_atlas'] + atlas_info._lib_names
class lapack_atlas_threads_info(atlas_threads_info):
_lib_names = ['lapack_atlas'] + atlas_threads_info._lib_names
class atlas_3_10_info(atlas_info):
_lib_names = ['satlas']
_lib_atlas = _lib_names
_lib_lapack = _lib_names
class atlas_3_10_blas_info(atlas_3_10_info):
_lib_names = ['satlas']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
info = {}
atlas_libs = self.get_libs('atlas_libs',
self._lib_names)
atlas = self.check_libs2(lib_dirs, atlas_libs, [])
if atlas is None:
return
include_dirs = self.get_include_dirs()
h = (self.combine_paths(lib_dirs + include_dirs, 'cblas.h') or [None])
h = h[0]
if h:
h = os.path.dirname(h)
dict_append(info, include_dirs=[h])
info['language'] = 'c'
info['define_macros'] = [('HAVE_CBLAS', None)]
atlas_version, atlas_extra_info = get_atlas_version(**atlas)
dict_append(atlas, **atlas_extra_info)
dict_append(info, **atlas)
self.set_info(**info)
return
class atlas_3_10_threads_info(atlas_3_10_info):
dir_env_var = ['PTATLAS', 'ATLAS']
_lib_names = ['tatlas']
_lib_atlas = _lib_names
_lib_lapack = _lib_names
class atlas_3_10_blas_threads_info(atlas_3_10_blas_info):
dir_env_var = ['PTATLAS', 'ATLAS']
_lib_names = ['tatlas']
class lapack_atlas_3_10_info(atlas_3_10_info):
pass
class lapack_atlas_3_10_threads_info(atlas_3_10_threads_info):
pass
class lapack_info(system_info):
section = 'lapack'
dir_env_var = 'LAPACK'
_lib_names = ['lapack']
notfounderror = LapackNotFoundError
def calc_info(self):
lib_dirs = self.get_lib_dirs()
lapack_libs = self.get_libs('lapack_libs', self._lib_names)
info = self.check_libs(lib_dirs, lapack_libs, [])
if info is None:
return
info['language'] = 'f77'
self.set_info(**info)
class lapack_src_info(system_info):
section = 'lapack_src'
dir_env_var = 'LAPACK_SRC'
notfounderror = LapackSrcNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['LAPACK*/SRC', 'SRC']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'dgesv.f')):
src_dir = d
break
if not src_dir:
#XXX: Get sources from netlib. May be ask first.
return
# The following is extracted from LAPACK-3.0/SRC/Makefile.
# Added missing names from lapack-lite-3.1.1/SRC/Makefile
# while keeping removed names for Lapack-3.0 compatibility.
allaux = '''
ilaenv ieeeck lsame lsamen xerbla
iparmq
''' # *.f
laux = '''
bdsdc bdsqr disna labad lacpy ladiv lae2 laebz laed0 laed1
laed2 laed3 laed4 laed5 laed6 laed7 laed8 laed9 laeda laev2
lagtf lagts lamch lamrg lanst lapy2 lapy3 larnv larrb larre
larrf lartg laruv las2 lascl lasd0 lasd1 lasd2 lasd3 lasd4
lasd5 lasd6 lasd7 lasd8 lasd9 lasda lasdq lasdt laset lasq1
lasq2 lasq3 lasq4 lasq5 lasq6 lasr lasrt lassq lasv2 pttrf
stebz stedc steqr sterf
larra larrc larrd larr larrk larrj larrr laneg laisnan isnan
lazq3 lazq4
''' # [s|d]*.f
lasrc = '''
gbbrd gbcon gbequ gbrfs gbsv gbsvx gbtf2 gbtrf gbtrs gebak
gebal gebd2 gebrd gecon geequ gees geesx geev geevx gegs gegv
gehd2 gehrd gelq2 gelqf gels gelsd gelss gelsx gelsy geql2
geqlf geqp3 geqpf geqr2 geqrf gerfs gerq2 gerqf gesc2 gesdd
gesv gesvd gesvx getc2 getf2 getrf getri getrs ggbak ggbal
gges ggesx ggev ggevx ggglm gghrd gglse ggqrf ggrqf ggsvd
ggsvp gtcon gtrfs gtsv gtsvx gttrf gttrs gtts2 hgeqz hsein
hseqr labrd lacon laein lags2 lagtm lahqr lahrd laic1 lals0
lalsa lalsd langb lange langt lanhs lansb lansp lansy lantb
lantp lantr lapll lapmt laqgb laqge laqp2 laqps laqsb laqsp
laqsy lar1v lar2v larf larfb larfg larft larfx largv larrv
lartv larz larzb larzt laswp lasyf latbs latdf latps latrd
latrs latrz latzm lauu2 lauum pbcon pbequ pbrfs pbstf pbsv
pbsvx pbtf2 pbtrf pbtrs pocon poequ porfs posv posvx potf2
potrf potri potrs ppcon ppequ pprfs ppsv ppsvx pptrf pptri
pptrs ptcon pteqr ptrfs ptsv ptsvx pttrs ptts2 spcon sprfs
spsv spsvx sptrf sptri sptrs stegr stein sycon syrfs sysv
sysvx sytf2 sytrf sytri sytrs tbcon tbrfs tbtrs tgevc tgex2
tgexc tgsen tgsja tgsna tgsy2 tgsyl tpcon tprfs tptri tptrs
trcon trevc trexc trrfs trsen trsna trsyl trti2 trtri trtrs
tzrqf tzrzf
lacn2 lahr2 stemr laqr0 laqr1 laqr2 laqr3 laqr4 laqr5
''' # [s|c|d|z]*.f
sd_lasrc = '''
laexc lag2 lagv2 laln2 lanv2 laqtr lasy2 opgtr opmtr org2l
org2r orgbr orghr orgl2 orglq orgql orgqr orgr2 orgrq orgtr
orm2l orm2r ormbr ormhr orml2 ormlq ormql ormqr ormr2 ormr3
ormrq ormrz ormtr rscl sbev sbevd sbevx sbgst sbgv sbgvd sbgvx
sbtrd spev spevd spevx spgst spgv spgvd spgvx sptrd stev stevd
stevr stevx syev syevd syevr syevx sygs2 sygst sygv sygvd
sygvx sytd2 sytrd
''' # [s|d]*.f
cz_lasrc = '''
bdsqr hbev hbevd hbevx hbgst hbgv hbgvd hbgvx hbtrd hecon heev
heevd heevr heevx hegs2 hegst hegv hegvd hegvx herfs hesv
hesvx hetd2 hetf2 hetrd hetrf hetri hetrs hpcon hpev hpevd
hpevx hpgst hpgv hpgvd hpgvx hprfs hpsv hpsvx hptrd hptrf
hptri hptrs lacgv lacp2 lacpy lacrm lacrt ladiv laed0 laed7
laed8 laesy laev2 lahef lanhb lanhe lanhp lanht laqhb laqhe
laqhp larcm larnv lartg lascl laset lasr lassq pttrf rot spmv
spr stedc steqr symv syr ung2l ung2r ungbr unghr ungl2 unglq
ungql ungqr ungr2 ungrq ungtr unm2l unm2r unmbr unmhr unml2
unmlq unmql unmqr unmr2 unmr3 unmrq unmrz unmtr upgtr upmtr
''' # [c|z]*.f
#######
sclaux = laux + ' econd ' # s*.f
dzlaux = laux + ' secnd ' # d*.f
slasrc = lasrc + sd_lasrc # s*.f
dlasrc = lasrc + sd_lasrc # d*.f
clasrc = lasrc + cz_lasrc + ' srot srscl ' # c*.f
zlasrc = lasrc + cz_lasrc + ' drot drscl ' # z*.f
oclasrc = ' icmax1 scsum1 ' # *.f
ozlasrc = ' izmax1 dzsum1 ' # *.f
sources = ['s%s.f' % f for f in (sclaux + slasrc).split()] \
+ ['d%s.f' % f for f in (dzlaux + dlasrc).split()] \
+ ['c%s.f' % f for f in (clasrc).split()] \
+ ['z%s.f' % f for f in (zlasrc).split()] \
+ ['%s.f' % f for f in (allaux + oclasrc + ozlasrc).split()]
sources = [os.path.join(src_dir, f) for f in sources]
# Lapack 3.1:
src_dir2 = os.path.join(src_dir, '..', 'INSTALL')
sources += [os.path.join(src_dir2, p + 'lamch.f') for p in 'sdcz']
# Lapack 3.2.1:
sources += [os.path.join(src_dir, p + 'larfp.f') for p in 'sdcz']
sources += [os.path.join(src_dir, 'ila' + p + 'lr.f') for p in 'sdcz']
sources += [os.path.join(src_dir, 'ila' + p + 'lc.f') for p in 'sdcz']
# Should we check here actual existence of source files?
# Yes, the file listing is different between 3.0 and 3.1
# versions.
sources = [f for f in sources if os.path.isfile(f)]
info = {'sources': sources, 'language': 'f77'}
self.set_info(**info)
atlas_version_c_text = r'''
/* This file is generated from numpy/distutils/system_info.py */
void ATL_buildinfo(void);
int main(void) {
ATL_buildinfo();
return 0;
}
'''
_cached_atlas_version = {}
def get_atlas_version(**config):
libraries = config.get('libraries', [])
library_dirs = config.get('library_dirs', [])
key = (tuple(libraries), tuple(library_dirs))
if key in _cached_atlas_version:
return _cached_atlas_version[key]
c = cmd_config(Distribution())
atlas_version = None
info = {}
try:
s, o = c.get_output(atlas_version_c_text,
libraries=libraries, library_dirs=library_dirs,
use_tee=(system_info.verbosity > 0))
if s and re.search(r'undefined reference to `_gfortran', o, re.M):
s, o = c.get_output(atlas_version_c_text,
libraries=libraries + ['gfortran'],
library_dirs=library_dirs,
use_tee=(system_info.verbosity > 0))
if not s:
warnings.warn("""
*****************************************************
Linkage with ATLAS requires gfortran. Use
python setup.py config_fc --fcompiler=gnu95 ...
when building extension libraries that use ATLAS.
Make sure that -lgfortran is used for C++ extensions.
*****************************************************
""")
dict_append(info, language='f90',
define_macros=[('ATLAS_REQUIRES_GFORTRAN', None)])
except Exception: # failed to get version from file -- maybe on Windows
# look at directory name
for o in library_dirs:
m = re.search(r'ATLAS_(?P<version>\d+[.]\d+[.]\d+)_', o)
if m:
atlas_version = m.group('version')
if atlas_version is not None:
break
# final choice --- look at ATLAS_VERSION environment
# variable
if atlas_version is None:
atlas_version = os.environ.get('ATLAS_VERSION', None)
if atlas_version:
dict_append(info, define_macros=[(
'ATLAS_INFO', '"\\"%s\\""' % atlas_version)
])
else:
dict_append(info, define_macros=[('NO_ATLAS_INFO', -1)])
return atlas_version or '?.?.?', info
if not s:
m = re.search(r'ATLAS version (?P<version>\d+[.]\d+[.]\d+)', o)
if m:
atlas_version = m.group('version')
if atlas_version is None:
if re.search(r'undefined symbol: ATL_buildinfo', o, re.M):
atlas_version = '3.2.1_pre3.3.6'
else:
log.info('Status: %d', s)
log.info('Output: %s', o)
if atlas_version == '3.2.1_pre3.3.6':
dict_append(info, define_macros=[('NO_ATLAS_INFO', -2)])
else:
dict_append(info, define_macros=[(
'ATLAS_INFO', '"\\"%s\\""' % atlas_version)
])
result = _cached_atlas_version[key] = atlas_version, info
return result
class lapack_opt_info(system_info):
notfounderror = LapackNotFoundError
def calc_info(self):
openblas_info = get_info('openblas_lapack')
if openblas_info:
self.set_info(**openblas_info)
return
lapack_mkl_info = get_info('lapack_mkl')
if lapack_mkl_info:
self.set_info(**lapack_mkl_info)
return
atlas_info = get_info('atlas_3_10_threads')
if not atlas_info:
atlas_info = get_info('atlas_3_10')
if not atlas_info:
atlas_info = get_info('atlas_threads')
if not atlas_info:
atlas_info = get_info('atlas')
if sys.platform == 'darwin' and not atlas_info:
# Use the system lapack from Accelerate or vecLib under OSX
args = []
link_args = []
if get_platform()[-4:] == 'i386' or 'intel' in get_platform() or \
'x86_64' in get_platform() or \
'i386' in platform.platform():
intel = 1
else:
intel = 0
if os.path.exists('/System/Library/Frameworks'
'/Accelerate.framework/'):
if intel:
args.extend(['-msse3'])
else:
args.extend(['-faltivec'])
link_args.extend(['-Wl,-framework', '-Wl,Accelerate'])
elif os.path.exists('/System/Library/Frameworks'
'/vecLib.framework/'):
if intel:
args.extend(['-msse3'])
else:
args.extend(['-faltivec'])
link_args.extend(['-Wl,-framework', '-Wl,vecLib'])
if args:
self.set_info(extra_compile_args=args,
extra_link_args=link_args,
define_macros=[('NO_ATLAS_INFO', 3),
('HAVE_CBLAS', None)])
return
need_lapack = 0
need_blas = 0
info = {}
if atlas_info:
l = atlas_info.get('define_macros', [])
if ('ATLAS_WITH_LAPACK_ATLAS', None) in l \
or ('ATLAS_WITHOUT_LAPACK', None) in l:
need_lapack = 1
info = atlas_info
else:
warnings.warn(AtlasNotFoundError.__doc__)
need_blas = 1
need_lapack = 1
dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)])
if need_lapack:
lapack_info = get_info('lapack')
#lapack_info = {} ## uncomment for testing
if lapack_info:
dict_append(info, **lapack_info)
else:
warnings.warn(LapackNotFoundError.__doc__)
lapack_src_info = get_info('lapack_src')
if not lapack_src_info:
warnings.warn(LapackSrcNotFoundError.__doc__)
return
dict_append(info, libraries=[('flapack_src', lapack_src_info)])
if need_blas:
blas_info = get_info('blas')
if blas_info:
dict_append(info, **blas_info)
else:
warnings.warn(BlasNotFoundError.__doc__)
blas_src_info = get_info('blas_src')
if not blas_src_info:
warnings.warn(BlasSrcNotFoundError.__doc__)
return
dict_append(info, libraries=[('fblas_src', blas_src_info)])
self.set_info(**info)
return
class blas_opt_info(system_info):
notfounderror = BlasNotFoundError
def calc_info(self):
blas_mkl_info = get_info('blas_mkl')
if blas_mkl_info:
self.set_info(**blas_mkl_info)
return
openblas_info = get_info('openblas')
if openblas_info:
self.set_info(**openblas_info)
return
atlas_info = get_info('atlas_3_10_blas_threads')
if not atlas_info:
atlas_info = get_info('atlas_3_10_blas')
if not atlas_info:
atlas_info = get_info('atlas_blas_threads')
if not atlas_info:
atlas_info = get_info('atlas_blas')
if sys.platform == 'darwin' and not atlas_info:
# Use the system BLAS from Accelerate or vecLib under OSX
args = []
link_args = []
if get_platform()[-4:] == 'i386' or 'intel' in get_platform() or \
'x86_64' in get_platform() or \
'i386' in platform.platform():
intel = 1
else:
intel = 0
if os.path.exists('/System/Library/Frameworks'
'/Accelerate.framework/'):
if intel:
args.extend(['-msse3'])
else:
args.extend(['-faltivec'])
args.extend([
'-I/System/Library/Frameworks/vecLib.framework/Headers'])
link_args.extend(['-Wl,-framework', '-Wl,Accelerate'])
elif os.path.exists('/System/Library/Frameworks'
'/vecLib.framework/'):
if intel:
args.extend(['-msse3'])
else:
args.extend(['-faltivec'])
args.extend([
'-I/System/Library/Frameworks/vecLib.framework/Headers'])
link_args.extend(['-Wl,-framework', '-Wl,vecLib'])
if args:
self.set_info(extra_compile_args=args,
extra_link_args=link_args,
define_macros=[('NO_ATLAS_INFO', 3),
('HAVE_CBLAS', None)])
return
need_blas = 0
info = {}
if atlas_info:
info = atlas_info
else:
warnings.warn(AtlasNotFoundError.__doc__)
need_blas = 1
dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)])
if need_blas:
blas_info = get_info('blas')
if blas_info:
dict_append(info, **blas_info)
else:
warnings.warn(BlasNotFoundError.__doc__)
blas_src_info = get_info('blas_src')
if not blas_src_info:
warnings.warn(BlasSrcNotFoundError.__doc__)
return
dict_append(info, libraries=[('fblas_src', blas_src_info)])
self.set_info(**info)
return
class blas_info(system_info):
section = 'blas'
dir_env_var = 'BLAS'
_lib_names = ['blas']
notfounderror = BlasNotFoundError
def calc_info(self):
lib_dirs = self.get_lib_dirs()
blas_libs = self.get_libs('blas_libs', self._lib_names)
info = self.check_libs(lib_dirs, blas_libs, [])
if info is None:
return
if platform.system() == 'Windows':
# The check for windows is needed because has_cblas uses the
# same compiler that was used to compile Python and msvc is
# often not installed when mingw is being used. This rough
# treatment is not desirable, but windows is tricky.
info['language'] = 'f77' # XXX: is it generally true?
else:
lib = self.has_cblas(info)
if lib is not None:
info['language'] = 'c'
info['libraries'] = [lib]
info['define_macros'] = [('HAVE_CBLAS', None)]
self.set_info(**info)
def has_cblas(self, info):
# primitive cblas check by looking for the header and trying to link
# cblas or blas
res = False
c = distutils.ccompiler.new_compiler()
tmpdir = tempfile.mkdtemp()
s = """#include <cblas.h>
int main(int argc, const char *argv[])
{
double a[4] = {1,2,3,4};
double b[4] = {5,6,7,8};
return cblas_ddot(4, a, 1, b, 1) > 10;
}"""
src = os.path.join(tmpdir, 'source.c')
try:
with open(src, 'wt') as f:
f.write(s)
try:
# check we can compile (find headers)
obj = c.compile([src], output_dir=tmpdir,
include_dirs=self.get_include_dirs())
# check we can link (find library)
# some systems have separate cblas and blas libs. First
# check for cblas lib, and if not present check for blas lib.
try:
c.link_executable(obj, os.path.join(tmpdir, "a.out"),
libraries=["cblas"],
library_dirs=info['library_dirs'],
extra_postargs=info.get('extra_link_args', []))
res = "cblas"
except distutils.ccompiler.LinkError:
c.link_executable(obj, os.path.join(tmpdir, "a.out"),
libraries=["blas"],
library_dirs=info['library_dirs'],
extra_postargs=info.get('extra_link_args', []))
res = "blas"
except distutils.ccompiler.CompileError:
res = None
finally:
shutil.rmtree(tmpdir)
return res
class openblas_info(blas_info):
section = 'openblas'
dir_env_var = 'OPENBLAS'
_lib_names = ['openblas']
notfounderror = BlasNotFoundError
def check_embedded_lapack(self, info):
return True
def calc_info(self):
lib_dirs = self.get_lib_dirs()
openblas_libs = self.get_libs('libraries', self._lib_names)
if openblas_libs == self._lib_names: # backward compat with 1.8.0
openblas_libs = self.get_libs('openblas_libs', self._lib_names)
info = self.check_libs(lib_dirs, openblas_libs, [])
if info is None:
return
# Add extra info for OpenBLAS
extra_info = self.calc_extra_info()
dict_append(info, **extra_info)
if not self.check_embedded_lapack(info):
return
info['language'] = 'c'
info['define_macros'] = [('HAVE_CBLAS', None)]
self.set_info(**info)
class openblas_lapack_info(openblas_info):
section = 'openblas'
dir_env_var = 'OPENBLAS'
_lib_names = ['openblas']
notfounderror = BlasNotFoundError
def check_embedded_lapack(self, info):
res = False
c = distutils.ccompiler.new_compiler()
tmpdir = tempfile.mkdtemp()
s = """void zungqr();
int main(int argc, const char *argv[])
{
zungqr_();
return 0;
}"""
src = os.path.join(tmpdir, 'source.c')
out = os.path.join(tmpdir, 'a.out')
# Add the additional "extra" arguments
try:
extra_args = info['extra_link_args']
except:
extra_args = []
try:
with open(src, 'wt') as f:
f.write(s)
obj = c.compile([src], output_dir=tmpdir)
try:
c.link_executable(obj, out, libraries=info['libraries'],
library_dirs=info['library_dirs'],
extra_postargs=extra_args)
res = True
except distutils.ccompiler.LinkError:
res = False
finally:
shutil.rmtree(tmpdir)
return res
class blas_src_info(system_info):
section = 'blas_src'
dir_env_var = 'BLAS_SRC'
notfounderror = BlasSrcNotFoundError
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['blas']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'daxpy.f')):
src_dir = d
break
if not src_dir:
#XXX: Get sources from netlib. May be ask first.
return
blas1 = '''
caxpy csscal dnrm2 dzasum saxpy srotg zdotc ccopy cswap drot
dznrm2 scasum srotm zdotu cdotc dasum drotg icamax scnrm2
srotmg zdrot cdotu daxpy drotm idamax scopy sscal zdscal crotg
dcabs1 drotmg isamax sdot sswap zrotg cscal dcopy dscal izamax
snrm2 zaxpy zscal csrot ddot dswap sasum srot zcopy zswap
scabs1
'''
blas2 = '''
cgbmv chpmv ctrsv dsymv dtrsv sspr2 strmv zhemv ztpmv cgemv
chpr dgbmv dsyr lsame ssymv strsv zher ztpsv cgerc chpr2 dgemv
dsyr2 sgbmv ssyr xerbla zher2 ztrmv cgeru ctbmv dger dtbmv
sgemv ssyr2 zgbmv zhpmv ztrsv chbmv ctbsv dsbmv dtbsv sger
stbmv zgemv zhpr chemv ctpmv dspmv dtpmv ssbmv stbsv zgerc
zhpr2 cher ctpsv dspr dtpsv sspmv stpmv zgeru ztbmv cher2
ctrmv dspr2 dtrmv sspr stpsv zhbmv ztbsv
'''
blas3 = '''
cgemm csymm ctrsm dsyrk sgemm strmm zhemm zsyr2k chemm csyr2k
dgemm dtrmm ssymm strsm zher2k zsyrk cher2k csyrk dsymm dtrsm
ssyr2k zherk ztrmm cherk ctrmm dsyr2k ssyrk zgemm zsymm ztrsm
'''
sources = [os.path.join(src_dir, f + '.f') \
for f in (blas1 + blas2 + blas3).split()]
#XXX: should we check here actual existence of source files?
sources = [f for f in sources if os.path.isfile(f)]
info = {'sources': sources, 'language': 'f77'}
self.set_info(**info)
class x11_info(system_info):
section = 'x11'
notfounderror = X11NotFoundError
def __init__(self):
system_info.__init__(self,
default_lib_dirs=default_x11_lib_dirs,
default_include_dirs=default_x11_include_dirs)
def calc_info(self):
if sys.platform in ['win32']:
return
lib_dirs = self.get_lib_dirs()
include_dirs = self.get_include_dirs()
x11_libs = self.get_libs('x11_libs', ['X11'])
info = self.check_libs(lib_dirs, x11_libs, [])
if info is None:
return
inc_dir = None
for d in include_dirs:
if self.combine_paths(d, 'X11/X.h'):
inc_dir = d
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir])
self.set_info(**info)
class _numpy_info(system_info):
section = 'Numeric'
modulename = 'Numeric'
notfounderror = NumericNotFoundError
def __init__(self):
include_dirs = []
try:
module = __import__(self.modulename)
prefix = []
for name in module.__file__.split(os.sep):
if name == 'lib':
break
prefix.append(name)
# Ask numpy for its own include path before attempting
# anything else
try:
include_dirs.append(getattr(module, 'get_include')())
except AttributeError:
pass
include_dirs.append(distutils.sysconfig.get_python_inc(
prefix=os.sep.join(prefix)))
except ImportError:
pass
py_incl_dir = distutils.sysconfig.get_python_inc()
include_dirs.append(py_incl_dir)
py_pincl_dir = distutils.sysconfig.get_python_inc(plat_specific=True)
if py_pincl_dir not in include_dirs:
include_dirs.append(py_pincl_dir)
for d in default_include_dirs:
d = os.path.join(d, os.path.basename(py_incl_dir))
if d not in include_dirs:
include_dirs.append(d)
system_info.__init__(self,
default_lib_dirs=[],
default_include_dirs=include_dirs)
def calc_info(self):
try:
module = __import__(self.modulename)
except ImportError:
return
info = {}
macros = []
for v in ['__version__', 'version']:
vrs = getattr(module, v, None)
if vrs is None:
continue
macros = [(self.modulename.upper() + '_VERSION',
'"\\"%s\\""' % (vrs)),
(self.modulename.upper(), None)]
break
dict_append(info, define_macros=macros)
include_dirs = self.get_include_dirs()
inc_dir = None
for d in include_dirs:
if self.combine_paths(d,
os.path.join(self.modulename,
'arrayobject.h')):
inc_dir = d
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir])
if info:
self.set_info(**info)
return
class numarray_info(_numpy_info):
section = 'numarray'
modulename = 'numarray'
class Numeric_info(_numpy_info):
section = 'Numeric'
modulename = 'Numeric'
class numpy_info(_numpy_info):
section = 'numpy'
modulename = 'numpy'
class numerix_info(system_info):
section = 'numerix'
def calc_info(self):
which = None, None
if os.getenv("NUMERIX"):
which = os.getenv("NUMERIX"), "environment var"
# If all the above fail, default to numpy.
if which[0] is None:
which = "numpy", "defaulted"
try:
import numpy
which = "numpy", "defaulted"
except ImportError:
msg1 = str(get_exception())
try:
import Numeric
which = "numeric", "defaulted"
except ImportError:
msg2 = str(get_exception())
try:
import numarray
which = "numarray", "defaulted"
except ImportError:
msg3 = str(get_exception())
log.info(msg1)
log.info(msg2)
log.info(msg3)
which = which[0].strip().lower(), which[1]
if which[0] not in ["numeric", "numarray", "numpy"]:
raise ValueError("numerix selector must be either 'Numeric' "
"or 'numarray' or 'numpy' but the value obtained"
" from the %s was '%s'." % (which[1], which[0]))
os.environ['NUMERIX'] = which[0]
self.set_info(**get_info(which[0]))
class f2py_info(system_info):
def calc_info(self):
try:
import numpy.f2py as f2py
except ImportError:
return
f2py_dir = os.path.join(os.path.dirname(f2py.__file__), 'src')
self.set_info(sources=[os.path.join(f2py_dir, 'fortranobject.c')],
include_dirs=[f2py_dir])
return
class boost_python_info(system_info):
section = 'boost_python'
dir_env_var = 'BOOST'
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['boost*']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'libs', 'python', 'src',
'module.cpp')):
src_dir = d
break
if not src_dir:
return
py_incl_dirs = [distutils.sysconfig.get_python_inc()]
py_pincl_dir = distutils.sysconfig.get_python_inc(plat_specific=True)
if py_pincl_dir not in py_incl_dirs:
py_incl_dirs.append(py_pincl_dir)
srcs_dir = os.path.join(src_dir, 'libs', 'python', 'src')
bpl_srcs = glob(os.path.join(srcs_dir, '*.cpp'))
bpl_srcs += glob(os.path.join(srcs_dir, '*', '*.cpp'))
info = {'libraries': [('boost_python_src',
{'include_dirs': [src_dir] + py_incl_dirs,
'sources':bpl_srcs}
)],
'include_dirs': [src_dir],
}
if info:
self.set_info(**info)
return
class agg2_info(system_info):
section = 'agg2'
dir_env_var = 'AGG2'
def get_paths(self, section, key):
pre_dirs = system_info.get_paths(self, section, key)
dirs = []
for d in pre_dirs:
dirs.extend([d] + self.combine_paths(d, ['agg2*']))
return [d for d in dirs if os.path.isdir(d)]
def calc_info(self):
src_dirs = self.get_src_dirs()
src_dir = ''
for d in src_dirs:
if os.path.isfile(os.path.join(d, 'src', 'agg_affine_matrix.cpp')):
src_dir = d
break
if not src_dir:
return
if sys.platform == 'win32':
agg2_srcs = glob(os.path.join(src_dir, 'src', 'platform',
'win32', 'agg_win32_bmp.cpp'))
else:
agg2_srcs = glob(os.path.join(src_dir, 'src', '*.cpp'))
agg2_srcs += [os.path.join(src_dir, 'src', 'platform',
'X11',
'agg_platform_support.cpp')]
info = {'libraries':
[('agg2_src',
{'sources': agg2_srcs,
'include_dirs': [os.path.join(src_dir, 'include')],
}
)],
'include_dirs': [os.path.join(src_dir, 'include')],
}
if info:
self.set_info(**info)
return
class _pkg_config_info(system_info):
section = None
config_env_var = 'PKG_CONFIG'
default_config_exe = 'pkg-config'
append_config_exe = ''
version_macro_name = None
release_macro_name = None
version_flag = '--modversion'
cflags_flag = '--cflags'
def get_config_exe(self):
if self.config_env_var in os.environ:
return os.environ[self.config_env_var]
return self.default_config_exe
def get_config_output(self, config_exe, option):
cmd = config_exe + ' ' + self.append_config_exe + ' ' + option
s, o = exec_command(cmd, use_tee=0)
if not s:
return o
def calc_info(self):
config_exe = find_executable(self.get_config_exe())
if not config_exe:
log.warn('File not found: %s. Cannot determine %s info.' \
% (config_exe, self.section))
return
info = {}
macros = []
libraries = []
library_dirs = []
include_dirs = []
extra_link_args = []
extra_compile_args = []
version = self.get_config_output(config_exe, self.version_flag)
if version:
macros.append((self.__class__.__name__.split('.')[-1].upper(),
'"\\"%s\\""' % (version)))
if self.version_macro_name:
macros.append((self.version_macro_name + '_%s'
% (version.replace('.', '_')), None))
if self.release_macro_name:
release = self.get_config_output(config_exe, '--release')
if release:
macros.append((self.release_macro_name + '_%s'
% (release.replace('.', '_')), None))
opts = self.get_config_output(config_exe, '--libs')
if opts:
for opt in opts.split():
if opt[:2] == '-l':
libraries.append(opt[2:])
elif opt[:2] == '-L':
library_dirs.append(opt[2:])
else:
extra_link_args.append(opt)
opts = self.get_config_output(config_exe, self.cflags_flag)
if opts:
for opt in opts.split():
if opt[:2] == '-I':
include_dirs.append(opt[2:])
elif opt[:2] == '-D':
if '=' in opt:
n, v = opt[2:].split('=')
macros.append((n, v))
else:
macros.append((opt[2:], None))
else:
extra_compile_args.append(opt)
if macros:
dict_append(info, define_macros=macros)
if libraries:
dict_append(info, libraries=libraries)
if library_dirs:
dict_append(info, library_dirs=library_dirs)
if include_dirs:
dict_append(info, include_dirs=include_dirs)
if extra_link_args:
dict_append(info, extra_link_args=extra_link_args)
if extra_compile_args:
dict_append(info, extra_compile_args=extra_compile_args)
if info:
self.set_info(**info)
return
class wx_info(_pkg_config_info):
section = 'wx'
config_env_var = 'WX_CONFIG'
default_config_exe = 'wx-config'
append_config_exe = ''
version_macro_name = 'WX_VERSION'
release_macro_name = 'WX_RELEASE'
version_flag = '--version'
cflags_flag = '--cxxflags'
class gdk_pixbuf_xlib_2_info(_pkg_config_info):
section = 'gdk_pixbuf_xlib_2'
append_config_exe = 'gdk-pixbuf-xlib-2.0'
version_macro_name = 'GDK_PIXBUF_XLIB_VERSION'
class gdk_pixbuf_2_info(_pkg_config_info):
section = 'gdk_pixbuf_2'
append_config_exe = 'gdk-pixbuf-2.0'
version_macro_name = 'GDK_PIXBUF_VERSION'
class gdk_x11_2_info(_pkg_config_info):
section = 'gdk_x11_2'
append_config_exe = 'gdk-x11-2.0'
version_macro_name = 'GDK_X11_VERSION'
class gdk_2_info(_pkg_config_info):
section = 'gdk_2'
append_config_exe = 'gdk-2.0'
version_macro_name = 'GDK_VERSION'
class gdk_info(_pkg_config_info):
section = 'gdk'
append_config_exe = 'gdk'
version_macro_name = 'GDK_VERSION'
class gtkp_x11_2_info(_pkg_config_info):
section = 'gtkp_x11_2'
append_config_exe = 'gtk+-x11-2.0'
version_macro_name = 'GTK_X11_VERSION'
class gtkp_2_info(_pkg_config_info):
section = 'gtkp_2'
append_config_exe = 'gtk+-2.0'
version_macro_name = 'GTK_VERSION'
class xft_info(_pkg_config_info):
section = 'xft'
append_config_exe = 'xft'
version_macro_name = 'XFT_VERSION'
class freetype2_info(_pkg_config_info):
section = 'freetype2'
append_config_exe = 'freetype2'
version_macro_name = 'FREETYPE2_VERSION'
class amd_info(system_info):
section = 'amd'
dir_env_var = 'AMD'
_lib_names = ['amd']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
amd_libs = self.get_libs('amd_libs', self._lib_names)
info = self.check_libs(lib_dirs, amd_libs, [])
if info is None:
return
include_dirs = self.get_include_dirs()
inc_dir = None
for d in include_dirs:
p = self.combine_paths(d, 'amd.h')
if p:
inc_dir = os.path.dirname(p[0])
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir],
define_macros=[('SCIPY_AMD_H', None)],
swig_opts=['-I' + inc_dir])
self.set_info(**info)
return
class umfpack_info(system_info):
section = 'umfpack'
dir_env_var = 'UMFPACK'
notfounderror = UmfpackNotFoundError
_lib_names = ['umfpack']
def calc_info(self):
lib_dirs = self.get_lib_dirs()
umfpack_libs = self.get_libs('umfpack_libs', self._lib_names)
info = self.check_libs(lib_dirs, umfpack_libs, [])
if info is None:
return
include_dirs = self.get_include_dirs()
inc_dir = None
for d in include_dirs:
p = self.combine_paths(d, ['', 'umfpack'], 'umfpack.h')
if p:
inc_dir = os.path.dirname(p[0])
break
if inc_dir is not None:
dict_append(info, include_dirs=[inc_dir],
define_macros=[('SCIPY_UMFPACK_H', None)],
swig_opts=['-I' + inc_dir])
amd = get_info('amd')
dict_append(info, **get_info('amd'))
self.set_info(**info)
return
def combine_paths(*args, **kws):
""" Return a list of existing paths composed by all combinations of
items from arguments.
"""
r = []
for a in args:
if not a:
continue
if is_string(a):
a = [a]
r.append(a)
args = r
if not args:
return []
if len(args) == 1:
result = reduce(lambda a, b: a + b, map(glob, args[0]), [])
elif len(args) == 2:
result = []
for a0 in args[0]:
for a1 in args[1]:
result.extend(glob(os.path.join(a0, a1)))
else:
result = combine_paths(*(combine_paths(args[0], args[1]) + args[2:]))
verbosity = kws.get('verbosity', 1)
log.debug('(paths: %s)', ','.join(result))
return result
language_map = {'c': 0, 'c++': 1, 'f77': 2, 'f90': 3}
inv_language_map = {0: 'c', 1: 'c++', 2: 'f77', 3: 'f90'}
def dict_append(d, **kws):
languages = []
for k, v in kws.items():
if k == 'language':
languages.append(v)
continue
if k in d:
if k in ['library_dirs', 'include_dirs',
'extra_compile_args', 'extra_link_args',
'runtime_library_dirs', 'define_macros']:
[d[k].append(vv) for vv in v if vv not in d[k]]
else:
d[k].extend(v)
else:
d[k] = v
if languages:
l = inv_language_map[max([language_map.get(l, 0) for l in languages])]
d['language'] = l
return
def parseCmdLine(argv=(None,)):
import optparse
parser = optparse.OptionParser("usage: %prog [-v] [info objs]")
parser.add_option('-v', '--verbose', action='store_true', dest='verbose',
default=False,
help='be verbose and print more messages')
opts, args = parser.parse_args(args=argv[1:])
return opts, args
def show_all(argv=None):
import inspect
if argv is None:
argv = sys.argv
opts, args = parseCmdLine(argv)
if opts.verbose:
log.set_threshold(log.DEBUG)
else:
log.set_threshold(log.INFO)
show_only = []
for n in args:
if n[-5:] != '_info':
n = n + '_info'
show_only.append(n)
show_all = not show_only
_gdict_ = globals().copy()
for name, c in _gdict_.items():
if not inspect.isclass(c):
continue
if not issubclass(c, system_info) or c is system_info:
continue
if not show_all:
if name not in show_only:
continue
del show_only[show_only.index(name)]
conf = c()
conf.verbosity = 2
r = conf.get_info()
if show_only:
log.info('Info classes not defined: %s', ','.join(show_only))
if __name__ == "__main__":
show_all()
|
mwiebe/numpy
|
numpy/distutils/system_info.py
|
Python
|
bsd-3-clause
| 84,440
|
r"""
Diametrically point loaded 2-D disk with postprocessing and probes. See
:ref:`sec-primer`.
Find :math:`\ul{u}` such that:
.. math::
\int_{\Omega} D_{ijkl}\ e_{ij}(\ul{v}) e_{kl}(\ul{u})
= 0
\;, \quad \forall \ul{v} \;,
where
.. math::
D_{ijkl} = \mu (\delta_{ik} \delta_{jl}+\delta_{il} \delta_{jk}) +
\lambda \ \delta_{ij} \delta_{kl}
\;.
"""
from __future__ import absolute_import
from examples.linear_elasticity.its2D_1 import *
from sfepy.mechanics.matcoefs import stiffness_from_youngpoisson
from sfepy.postprocess.probes_vtk import Probe
import os
from six.moves import range
def stress_strain(out, pb, state, extend=False):
"""
Calculate and output strain and stress for given displacements.
"""
from sfepy.base.base import Struct
import matplotlib.pyplot as plt
import matplotlib.font_manager as fm
ev = pb.evaluate
strain = ev('ev_cauchy_strain.2.Omega(u)', mode='el_avg')
stress = ev('ev_cauchy_stress.2.Omega(Asphalt.D, u)', mode='el_avg')
out['cauchy_strain'] = Struct(name='output_data', mode='cell',
data=strain, dofs=None)
out['cauchy_stress'] = Struct(name='output_data', mode='cell',
data=stress, dofs=None)
probe = Probe(out, pb.domain.mesh, probe_view=True)
ps0 = [[0.0, 0.0, 0.0], [ 0.0, 0.0, 0.0]]
ps1 = [[75.0, 0.0, 0.0], [ 0.0, 75.0, 0.0]]
n_point = 10
labels = ['%s -> %s' % (p0, p1) for p0, p1 in zip(ps0, ps1)]
probes = []
for ip in range(len(ps0)):
p0, p1 = ps0[ip], ps1[ip]
probes.append('line%d' % ip)
probe.add_line_probe('line%d' % ip, p0, p1, n_point)
for ip, label in zip(probes, labels):
fig = plt.figure()
plt.clf()
fig.subplots_adjust(hspace=0.4)
plt.subplot(311)
pars, vals = probe(ip, 'u')
for ic in range(vals.shape[1] - 1):
plt.plot(pars, vals[:,ic], label=r'$u_{%d}$' % (ic + 1),
lw=1, ls='-', marker='+', ms=3)
plt.ylabel('displacements')
plt.xlabel('probe %s' % label, fontsize=8)
plt.legend(loc='best', prop=fm.FontProperties(size=10))
sym_labels = ['11', '22', '12']
plt.subplot(312)
pars, vals = probe(ip, 'cauchy_strain')
for ii in range(vals.shape[1]):
plt.plot(pars, vals[:, ii], label=r'$e_{%s}$' % sym_labels[ii],
lw=1, ls='-', marker='+', ms=3)
plt.ylabel('Cauchy strain')
plt.xlabel('probe %s' % label, fontsize=8)
plt.legend(loc='best', prop=fm.FontProperties(size=8))
plt.subplot(313)
pars, vals = probe(ip, 'cauchy_stress')
for ii in range(vals.shape[1]):
plt.plot(pars, vals[:, ii], label=r'$\sigma_{%s}$' % sym_labels[ii],
lw=1, ls='-', marker='+', ms=3)
plt.ylabel('Cauchy stress')
plt.xlabel('probe %s' % label, fontsize=8)
plt.legend(loc='best', prop=fm.FontProperties(size=8))
opts = pb.conf.options
filename_results = os.path.join(opts.get('output_dir'),
'its2D_probe_%s.png' % ip)
fig.savefig(filename_results)
return out
materials['Asphalt'][0].update({'D' : stiffness_from_youngpoisson(2, young, poisson)})
options.update({
'post_process_hook' : 'stress_strain',
})
|
rc/sfepy
|
examples/linear_elasticity/its2D_5.py
|
Python
|
bsd-3-clause
| 3,399
|
#!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
from django.core.management import execute_from_command_line
from django.conf import settings, global_settings as default_settings
from os.path import dirname, realpath, abspath
# Give feedback on used versions
sys.stderr.write('Using Python version {0} from {1}\n'.format(sys.version[:5], sys.executable))
sys.stderr.write('Using Django version {0} from {1}\n'.format(
django.get_version(),
dirname(abspath(django.__file__)))
)
if not settings.configured:
if django.VERSION >= (1, 8):
template_settings = dict(
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': (),
'OPTIONS': {
'loaders': (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
),
'context_processors': (
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.request',
'django.template.context_processors.static',
'django.contrib.messages.context_processors.messages',
'django.contrib.auth.context_processors.auth',
),
},
},
]
)
else:
template_settings = dict(
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
'django.template.loaders.filesystem.Loader',
),
TEMPLATE_CONTEXT_PROCESSORS = list(default_settings.TEMPLATE_CONTEXT_PROCESSORS) + [
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
],
)
settings.configure(
DEBUG=False,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:'
},
'secondary': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:'
}
},
TEST_RUNNER = 'django.test.runner.DiscoverRunner' if django.VERSION >= (1, 7) else 'django.test.simple.DjangoTestSuiteRunner',
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.admin',
'polymorphic',
),
MIDDLEWARE_CLASSES = (),
SITE_ID = 3,
**template_settings
)
DEFAULT_TEST_APPS = [
'polymorphic',
]
def runtests():
other_args = list(filter(lambda arg: arg.startswith('-'), sys.argv[1:]))
test_apps = list(filter(lambda arg: not arg.startswith('-'), sys.argv[1:])) or DEFAULT_TEST_APPS
argv = sys.argv[:1] + ['test', '--traceback'] + other_args + test_apps
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
pombredanne/django_polymorphic
|
runtests.py
|
Python
|
bsd-3-clause
| 3,374
|
from django.conf.urls.defaults import *
urlpatterns = patterns('',
# serve static files
(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, 'show_indexes': True }),
# reverse url lookups
# (r'^', include('djangobaselibrary.sample.urls')),
)
|
Almad/rpgrules
|
tests/example_project/urls.py
|
Python
|
bsd-3-clause
| 304
|
#!/usr/bin/python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import re
import types
import pyauto_functional # Must be imported before pyauto
import pyauto
class BrowserTest(pyauto.PyUITest):
"""TestCase for Browser info."""
def Debug(self):
"""Test method for experimentation.
This method will not run automatically.
"""
import pprint
pp = pprint.PrettyPrinter(indent=2)
while True:
raw_input('Hit <enter> to dump info.. ')
info = self.GetBrowserInfo()
pp.pprint(info)
def _GetUniqProcesses(self, total_tabs, renderer_processes):
""" Returns a count of uniq processes of opened tabs
Args:
total_tabs: opened tabs count
renderer_processes: opened renderers info data dictionary
"""
pid_list = []
for tab_index in range(total_tabs):
pid = renderer_processes[tab_index]['renderer_pid']
if pid not in pid_list:
pid_list.append(pid)
return len(pid_list)
def _VerifyUniqueRendererProcesses(self, browser_info):
"""Verify that each tab has a unique renderer process.
This cannot be used for large number of renderers since there actually is
a cap, depending on memory on the machine.
Args:
browser_info: browser info data dictionary as returned by GetBrowserInfo()
"""
seen_pids = {} # lookup table of PIDs we've seen
for window in browser_info['windows']:
for tab in window['tabs']:
renderer_pid = tab['renderer_pid']
self.assertEqual(types.IntType, type(renderer_pid))
# Verify unique renderer pid
self.assertFalse(renderer_pid in seen_pids, 'renderer pid not unique')
seen_pids[renderer_pid] = True
def testBasics(self):
"""Verify basic browser info at startup."""
url = self.GetFileURLForPath(os.path.join(self.DataDir(), 'title2.html'))
self.NavigateToURL(url)
info = self.GetBrowserInfo()
# Verify valid version string
version_string = info['properties']['ChromeVersion']
self.assertTrue(re.match('\d+\.\d+\.\d+.\.\d+', version_string))
# Verify browser process
self.assertEqual(types.IntType, type(info['browser_pid']))
self.assertEqual(1, len(info['windows'])) # one window
self.assertEqual(1, len(info['windows'][0]['tabs'])) # one tab
self.assertEqual(0, info['windows'][0]['selected_tab']) # 1st tab selected
self.assertEqual(url, info['windows'][0]['tabs'][0]['url'])
self.assertFalse(info['windows'][0]['fullscreen']) # not fullscreen
self._VerifyUniqueRendererProcesses(info)
def testProcessesForMultipleWindowsAndTabs(self):
"""Verify processes for multiple windows and tabs"""
url = self.GetFileURLForPath(os.path.join(self.DataDir(), 'title2.html'))
self.NavigateToURL(url)
for _ in range(2):
self.AppendTab(pyauto.GURL(url))
for windex in range(1, 3): # 2 new windows
self.OpenNewBrowserWindow(True)
self.NavigateToURL(url, windex, 0)
self.RunCommand(pyauto.IDC_NEW_INCOGNITO_WINDOW) # incognito window
self.NavigateToURL(url, 3, 0)
self._VerifyUniqueRendererProcesses(self.GetBrowserInfo())
def testWindowResize(self):
"""Verify window resizing and persistence after restart."""
def _VerifySize(x, y, width, height):
info = self.GetBrowserInfo()
self.assertEqual(x, info['windows'][0]['x'])
self.assertEqual(y, info['windows'][0]['y'])
self.assertEqual(width, info['windows'][0]['width'])
self.assertEqual(height, info['windows'][0]['height'])
self.SetWindowDimensions(x=20, y=40, width=600, height=300)
_VerifySize(20, 40, 600, 300)
self.RestartBrowser(clear_profile=False)
_VerifySize(20, 40, 600, 300)
def testCanLoadFlash(self):
"""Verify that we can play Flash.
We merely check that the flash process kicks in.
"""
flash_url = self.GetFileURLForPath(os.path.join(self.DataDir(),
'plugin', 'flash.swf'))
self.NavigateToURL(flash_url)
child_processes = self.GetBrowserInfo()['child_processes']
self.assertTrue([x for x in child_processes
if x['type'] == 'Plug-in' and x['name'] == 'Shockwave Flash'])
def _GetFlashProcessesInfo(self):
"""Get info about flash processes, if any."""
return [x for x in self.GetBrowserInfo()['child_processes']
if x['type'] == 'Plug-in' and x['name'] == 'Shockwave Flash']
def testSingleFlashPluginProcess(self):
"""Verify there's only one flash plugin process shared across all uses."""
flash_url = self.GetFileURLForPath(os.path.join(self.DataDir(),
'plugin', 'flash.swf'))
self.NavigateToURL(flash_url)
for _ in range(2):
self.AppendTab(pyauto.GURL(flash_url))
# Open flash in new window
self.OpenNewBrowserWindow(True)
self.NavigateToURL(flash_url, 1, 0)
# Open flash in new incognito window
self.RunCommand(pyauto.IDC_NEW_INCOGNITO_WINDOW)
self.NavigateToURL(flash_url, 1, 0)
# Verify there's only 1 flash process
self.assertEqual(1, len(self._GetFlashProcessesInfo()))
def testFlashLoadsAfterKill(self):
"""Verify that Flash process reloads after crashing (or being killed)."""
flash_url = self.GetFileURLForPath(os.path.join(self.DataDir(),
'plugin', 'flash.swf'))
self.NavigateToURL(flash_url)
flash_process_id1 = self._GetFlashProcessesInfo()[0]['pid']
self.Kill(flash_process_id1)
self.ReloadActiveTab()
flash_processes = self._GetFlashProcessesInfo()
self.assertEqual(1, len(flash_processes))
self.assertNotEqual(flash_process_id1, flash_processes[0]['pid'])
def testMaxProcess(self):
"""Verify that opening 100 tabs doesn't create 100 child processes"""
total_tabs = 100
test_url = self.GetFileURLForDataPath('english_page.html')
# Opening tabs
for tab_index in range(total_tabs - 1):
self.AppendTab(pyauto.GURL(test_url))
tabs = self.GetBrowserInfo()['windows'][0]['tabs']
# For the first time we have 2 tabs opened, so sending the tab_index as +2
unique_renderers = self._GetUniqProcesses(len(tabs), tabs)
# We verify that opening a new tab should not create a new process after
# Chrome reaches to a maximum process limit.
if len(tabs) > unique_renderers:
return
# In case if we create 100 processes for 100 tabs, then we are failing.
self.fail(msg='Got 100 renderer processes')
def testKillAndReloadRenderer(self):
"""Verify that reloading of renderer is possible,
after renderer is killed"""
test_url = self.GetFileURLForDataPath('english_page.html')
self.NavigateToURL(test_url)
pid1 = self.GetBrowserInfo()['windows'][0]['tabs'][0]['renderer_pid']
self.KillRendererProcess(pid1)
self.ReloadActiveTab()
pid2 = self.GetBrowserInfo()['windows'][0]['tabs'][0]['renderer_pid']
self.assertNotEqual(pid1, pid2)
def testPopupSharesProcess(self):
"""Verify that parent tab and popup share a process."""
file_url = self.GetFileURLForPath(os.path.join(
self.DataDir(), 'popup_blocker', 'popup-window-open.html'))
self.NavigateToURL(file_url)
blocked_popups = self.GetBlockedPopupsInfo()
self.assertEqual(1, len(blocked_popups), msg='Popup not blocked')
self.UnblockAndLaunchBlockedPopup(0)
self.assertEquals(2, self.GetBrowserWindowCount())
parent_pid = self.GetBrowserInfo()['windows'][0]['tabs'][0]['renderer_pid']
popup_pid = self.GetBrowserInfo()['windows'][1]['tabs'][0]['renderer_pid']
self.assertEquals(popup_pid, parent_pid,
msg='Parent and popup are not sharing a process.')
def testKillAndReloadSharedProcess(self):
"""Verify that killing a shared process kills all associated renderers.
In this case we are killing a process shared by a parent and
its popup process. Reloading both should share a process again.
"""
file_url = self.GetFileURLForPath(os.path.join(
self.DataDir(), 'popup_blocker', 'popup-window-open.html'))
self.NavigateToURL(file_url)
blocked_popups = self.GetBlockedPopupsInfo()
self.assertEqual(1, len(blocked_popups), msg='Popup not blocked')
self.UnblockAndLaunchBlockedPopup(0)
self.assertEquals(2, self.GetBrowserWindowCount())
# Check that the renderers are alive.
self.assertEquals(1, self.FindInPage('pop-up')['match_count'])
self.assertEquals(1,
self.FindInPage('popup', tab_index=0, windex=1)['match_count'])
# Check if they are sharing a process id.
self.assertEquals(
self.GetBrowserInfo()['windows'][0]['tabs'][0]['renderer_pid'],
self.GetBrowserInfo()['windows'][1]['tabs'][0]['renderer_pid'])
shared_pid = self.GetBrowserInfo()['windows'][0]['tabs'][0]['renderer_pid']
# This method would fail if the renderers are not killed.
self.KillRendererProcess(shared_pid)
# Reload the parent and popup windows.
self.GetBrowserWindow(0).GetTab(0).Reload()
self.GetBrowserWindow(1).GetTab(0).Reload()
# Check if both are sharing a process id.
self.assertEquals(
self.GetBrowserInfo()['windows'][0]['tabs'][0]['renderer_pid'],
self.GetBrowserInfo()['windows'][1]['tabs'][0]['renderer_pid'])
# The shared process id should be different from the previous one.
self.assertNotEqual(shared_pid,
self.GetBrowserInfo()['windows'][0]['tabs'][0]['renderer_pid'])
if __name__ == '__main__':
pyauto_functional.Main()
|
meego-tablet-ux/meego-app-browser
|
chrome/test/functional/browser.py
|
Python
|
bsd-3-clause
| 9,680
|
from django.contrib import admin
from pressgang.actions.revert.models import ReversionRecord
class ReversionRecordAdmin(admin.ModelAdmin):
list_display = ('blog_name', 'blog_path', 'started', 'ended', 'succeeded')
ordering = ('ended',)
admin.site.register(ReversionRecord, ReversionRecordAdmin)
|
oberlin/pressgang
|
pressgang/actions/revert/admin.py
|
Python
|
bsd-3-clause
| 302
|
# Copyright (c) 2012-2015 The GPy authors (see AUTHORS.txt)
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from scipy import stats,special
import scipy as sp
from . import link_functions
from ..util.misc import chain_1, chain_2, chain_3, blockify_dhess_dtheta, blockify_third, blockify_hessian, safe_exp
from ..util.quad_integrate import quadgk_int
from scipy.integrate import quad
from functools import partial
import warnings
from ..core.parameterization import Parameterized
class Likelihood(Parameterized):
"""
Likelihood base class, used to defing p(y|f).
All instances use _inverse_ link functions, which can be swapped out. It is
expected that inheriting classes define a default inverse link function
To use this class, inherit and define missing functionality.
Inheriting classes *must* implement:
pdf_link : a bound method which turns the output of the link function into the pdf
logpdf_link : the logarithm of the above
To enable use with EP, inheriting classes *must* define:
TODO: a suitable derivative function for any parameters of the class
It is also desirable to define:
moments_match_ep : a function to compute the EP moments If this isn't defined, the moments will be computed using 1D quadrature.
To enable use with Laplace approximation, inheriting classes *must* define:
Some derivative functions *AS TODO*
For exact Gaussian inference, define *JH TODO*
"""
def __init__(self, gp_link, name):
super(Likelihood, self).__init__(name)
assert isinstance(gp_link,link_functions.GPTransformation), "gp_link is not a valid GPTransformation."
self.gp_link = gp_link
self.log_concave = False
self.not_block_really = False
self.name = name
def to_dict(self):
raise NotImplementedError
def _to_dict(self):
input_dict = {}
input_dict["name"] = self.name
input_dict["gp_link_dict"] = self.gp_link.to_dict()
return input_dict
@staticmethod
def from_dict(input_dict):
import copy
input_dict = copy.deepcopy(input_dict)
likelihood_class = input_dict.pop('class')
input_dict["name"] = str(input_dict["name"])
name = input_dict.pop('name')
import GPy
likelihood_class = eval(likelihood_class)
return likelihood_class._from_dict(likelihood_class, input_dict)
@staticmethod
def _from_dict(likelihood_class, input_dict):
import copy
input_dict = copy.deepcopy(input_dict)
gp_link_dict = input_dict.pop('gp_link_dict')
import GPy
gp_link = GPy.likelihoods.link_functions.GPTransformation.from_dict(gp_link_dict)
input_dict["gp_link"] = gp_link
return likelihood_class(**input_dict)
def request_num_latent_functions(self, Y):
"""
The likelihood should infer how many latent functions are needed for the likelihood
Default is the number of outputs
"""
return Y.shape[1]
def exact_inference_gradients(self, dL_dKdiag,Y_metadata=None):
return np.zeros(self.size)
def update_gradients(self, partial):
if self.size > 0:
raise NotImplementedError('Must be implemented for likelihoods with parameters to be optimized')
def _preprocess_values(self,Y):
"""
In case it is needed, this function assess the output values or makes any pertinent transformation on them.
:param Y: observed output
:type Y: Nx1 numpy.darray
"""
return Y
def conditional_mean(self, gp):
"""
The mean of the random variable conditioned on one value of the GP
"""
raise NotImplementedError
def conditional_variance(self, gp):
"""
The variance of the random variable conditioned on one value of the GP
"""
raise NotImplementedError
def log_predictive_density(self, y_test, mu_star, var_star, Y_metadata=None):
"""
Calculation of the log predictive density
.. math:
p(y_{*}|D) = p(y_{*}|f_{*})p(f_{*}|\mu_{*}\\sigma^{2}_{*})
:param y_test: test observations (y_{*})
:type y_test: (Nx1) array
:param mu_star: predictive mean of gaussian p(f_{*}|mu_{*}, var_{*})
:type mu_star: (Nx1) array
:param var_star: predictive variance of gaussian p(f_{*}|mu_{*}, var_{*})
:type var_star: (Nx1) array
"""
assert y_test.shape==mu_star.shape
assert y_test.shape==var_star.shape
assert y_test.shape[1] == 1
flat_y_test = y_test.flatten()
flat_mu_star = mu_star.flatten()
flat_var_star = var_star.flatten()
if Y_metadata is not None:
#Need to zip individual elements of Y_metadata aswell
Y_metadata_flat = {}
if Y_metadata is not None:
for key, val in Y_metadata.items():
Y_metadata_flat[key] = np.atleast_1d(val).reshape(-1,1)
zipped_values = []
for i in range(y_test.shape[0]):
y_m = {}
for key, val in Y_metadata_flat.items():
if np.isscalar(val) or val.shape[0] == 1:
y_m[key] = val
else:
#Won't broadcast yet
y_m[key] = val[i]
zipped_values.append((flat_y_test[i], flat_mu_star[i], flat_var_star[i], y_m))
else:
#Otherwise just pass along None's
zipped_values = zip(flat_y_test, flat_mu_star, flat_var_star, [None]*y_test.shape[0])
def integral_generator(yi, mi, vi, yi_m):
"""Generate a function which can be integrated
to give p(Y*|Y) = int p(Y*|f*)p(f*|Y) df*"""
def f(fi_star):
#exponent = np.exp(-(1./(2*vi))*np.square(mi-fi_star))
#from GPy.util.misc import safe_exp
#exponent = safe_exp(exponent)
#res = safe_exp(self.logpdf(fi_star, yi, yi_m))*exponent
#More stable in the log space
res = np.exp(self.logpdf(fi_star, yi, yi_m)
- 0.5*np.log(2*np.pi*vi)
- 0.5*np.square(fi_star-mi)/vi)
if not np.isfinite(res):
import ipdb; ipdb.set_trace() # XXX BREAKPOINT
return res
return f
p_ystar, _ = zip(*[quad(integral_generator(yi, mi, vi, yi_m), -np.inf, np.inf)
for yi, mi, vi, yi_m in zipped_values])
p_ystar = np.array(p_ystar).reshape(*y_test.shape)
return np.log(p_ystar)
def log_predictive_density_sampling(self, y_test, mu_star, var_star, Y_metadata=None, num_samples=1000):
"""
Calculation of the log predictive density via sampling
.. math:
log p(y_{*}|D) = log 1/num_samples prod^{S}_{s=1} p(y_{*}|f_{*s})
f_{*s} ~ p(f_{*}|\mu_{*}\\sigma^{2}_{*})
:param y_test: test observations (y_{*})
:type y_test: (Nx1) array
:param mu_star: predictive mean of gaussian p(f_{*}|mu_{*}, var_{*})
:type mu_star: (Nx1) array
:param var_star: predictive variance of gaussian p(f_{*}|mu_{*}, var_{*})
:type var_star: (Nx1) array
:param num_samples: num samples of p(f_{*}|mu_{*}, var_{*}) to take
:type num_samples: int
"""
assert y_test.shape==mu_star.shape
assert y_test.shape==var_star.shape
assert y_test.shape[1] == 1
#Take samples of p(f*|y)
#fi_samples = np.random.randn(num_samples)*np.sqrt(var_star) + mu_star
fi_samples = np.random.normal(mu_star, np.sqrt(var_star), size=(mu_star.shape[0], num_samples))
from scipy.misc import logsumexp
log_p_ystar = -np.log(num_samples) + logsumexp(self.logpdf(fi_samples, y_test, Y_metadata=Y_metadata), axis=1)
log_p_ystar = np.array(log_p_ystar).reshape(*y_test.shape)
return log_p_ystar
def moments_match_ep(self,obs,tau,v,Y_metadata_i=None):
"""
Calculation of moments using quadrature
:param obs: observed output
:param tau: cavity distribution 1st natural parameter (precision)
:param v: cavity distribution 2nd natural paramenter (mu*precision)
"""
#Compute first integral for zeroth moment.
#NOTE constant np.sqrt(2*pi/tau) added at the end of the function
mu = v/tau
sigma2 = 1./tau
#Lets do these for now based on the same idea as Gaussian quadrature
# i.e. multiply anything by close to zero, and its zero.
f_min = mu - 20*np.sqrt(sigma2)
f_max = mu + 20*np.sqrt(sigma2)
def int_1(f):
return self.pdf(f, obs, Y_metadata=Y_metadata_i)*np.exp(-0.5*tau*np.square(mu-f))
z_scaled, accuracy = quad(int_1, f_min, f_max)
#Compute second integral for first moment
def int_2(f):
return f*self.pdf(f, obs, Y_metadata=Y_metadata_i)*np.exp(-0.5*tau*np.square(mu-f))
mean, accuracy = quad(int_2, f_min, f_max)
mean /= z_scaled
#Compute integral for variance
def int_3(f):
return (f**2)*self.pdf(f, obs, Y_metadata=Y_metadata_i)*np.exp(-0.5*tau*np.square(mu-f))
Ef2, accuracy = quad(int_3, f_min, f_max)
Ef2 /= z_scaled
variance = Ef2 - mean**2
#Add constant to the zeroth moment
#NOTE: this constant is not needed in the other moments because it cancells out.
z = z_scaled/np.sqrt(2*np.pi/tau)
return z, mean, variance
#only compute gh points if required
__gh_points = None
def _gh_points(self, T=20):
if self.__gh_points is None:
self.__gh_points = np.polynomial.hermite.hermgauss(T)
return self.__gh_points
def ep_gradients(self, Y, cav_tau, cav_v, dL_dKdiag, Y_metadata=None, quad_mode='gk', boost_grad=1.):
if self.size > 0:
shape = Y.shape
tau,v,Y = cav_tau.flatten(), cav_v.flatten(),Y.flatten()
mu = v/tau
sigma2 = 1./tau
# assert Y.shape == v.shape
dlik_dtheta = np.empty((self.size, Y.shape[0]))
# for j in range(self.size):
Y_metadata_list = []
for index in range(len(Y)):
Y_metadata_i = {}
if Y_metadata is not None:
for key in Y_metadata.keys():
Y_metadata_i[key] = Y_metadata[key][index,:]
Y_metadata_list.append(Y_metadata_i)
if quad_mode == 'gk':
f = partial(self.integrate_gk)
quads = zip(*map(f, Y.flatten(), mu.flatten(), np.sqrt(sigma2.flatten()), Y_metadata_list))
quads = np.vstack(quads)
quads.reshape(self.size, shape[0], shape[1])
elif quad_mode == 'gh':
f = partial(self.integrate_gh)
quads = zip(*map(f, Y.flatten(), mu.flatten(), np.sqrt(sigma2.flatten())))
quads = np.hstack(quads)
quads = quads.T
else:
raise Exception("no other quadrature mode available")
# do a gaussian-hermite integration
dL_dtheta_avg = boost_grad * np.nanmean(quads, axis=1)
dL_dtheta = boost_grad * np.nansum(quads, axis=1)
# dL_dtheta = boost_grad * np.nansum(dlik_dtheta, axis=1)
else:
dL_dtheta = np.zeros(self.num_params)
return dL_dtheta
def integrate_gk(self, Y, mu, sigma, Y_metadata_i=None):
# gaussian-kronrod integration.
fmin = -np.inf
fmax = np.inf
SQRT_2PI = np.sqrt(2.*np.pi)
def generate_integral(f):
a = np.exp(self.logpdf_link(f, Y, Y_metadata_i)) * np.exp(-0.5 * np.square((f - mu) / sigma)) / (
SQRT_2PI * sigma)
fn1 = a * self.dlogpdf_dtheta(f, Y, Y_metadata_i)
fn = fn1
return fn
dF_dtheta_i = quadgk_int(generate_integral, fmin=fmin, fmax=fmax)
return dF_dtheta_i
def integrate_gh(self, Y, mu, sigma, Y_metadata_i=None, gh_points=None):
# gaussian-hermite quadrature.
# "calculate site derivatives E_f{d logp(y_i|f_i)/da} where a is a likelihood parameter
# and the expectation is over the exact marginal posterior, which is not gaussian- and is
# unnormalised product of the cavity distribution(a Gaussian) and the exact likelihood term.
#
# calculate the expectation wrt the approximate marginal posterior, which should be approximately the same.
# . This term is needed for evaluating the
# gradients of the marginal likelihood estimate Z_EP wrt likelihood parameters."
# "writing it explicitly "
# use them for gaussian-hermite quadrature
SQRT_2PI = np.sqrt(2.*np.pi)
if gh_points is None:
gh_x, gh_w = self._gh_points(32)
else:
gh_x, gh_w = gh_points
X = gh_x[None,:]*np.sqrt(2.)*sigma + mu
# Here X is a grid vector of possible fi values, while Y is just a single value which will be broadcasted.
a = np.exp(self.logpdf_link(X, Y, Y_metadata_i))
a = a.repeat(self.num_params,0)
b = self.dlogpdf_dtheta(X, Y, Y_metadata_i)
old_shape = b.shape
fn = np.array([i*j for i,j in zip(a.flatten(), b.flatten())])
fn = fn.reshape(old_shape)
dF_dtheta_i = np.dot(fn, gh_w)/np.sqrt(np.pi)
return dF_dtheta_i
def variational_expectations(self, Y, m, v, gh_points=None, Y_metadata=None):
"""
Use Gauss-Hermite Quadrature to compute
E_p(f) [ log p(y|f) ]
d/dm E_p(f) [ log p(y|f) ]
d/dv E_p(f) [ log p(y|f) ]
where p(f) is a Gaussian with mean m and variance v. The shapes of Y, m and v should match.
if no gh_points are passed, we construct them using defualt options
"""
if gh_points is None:
gh_x, gh_w = self._gh_points()
else:
gh_x, gh_w = gh_points
shape = m.shape
m,v,Y = m.flatten(), v.flatten(), Y.flatten()
#make a grid of points
X = gh_x[None,:]*np.sqrt(2.*v[:,None]) + m[:,None]
#evaluate the likelhood for the grid. First ax indexes the data (and mu, var) and the second indexes the grid.
# broadcast needs to be handled carefully.
logp = self.logpdf(X,Y[:,None], Y_metadata=Y_metadata)
dlogp_dx = self.dlogpdf_df(X, Y[:,None], Y_metadata=Y_metadata)
d2logp_dx2 = self.d2logpdf_df2(X, Y[:,None], Y_metadata=Y_metadata)
#clipping for numerical stability
#logp = np.clip(logp,-1e9,1e9)
#dlogp_dx = np.clip(dlogp_dx,-1e9,1e9)
#d2logp_dx2 = np.clip(d2logp_dx2,-1e9,1e9)
#average over the gird to get derivatives of the Gaussian's parameters
#division by pi comes from fact that for each quadrature we need to scale by 1/sqrt(pi)
F = np.dot(logp, gh_w)/np.sqrt(np.pi)
dF_dm = np.dot(dlogp_dx, gh_w)/np.sqrt(np.pi)
dF_dv = np.dot(d2logp_dx2, gh_w)/np.sqrt(np.pi)
dF_dv /= 2.
if np.any(np.isnan(dF_dv)) or np.any(np.isinf(dF_dv)):
stop
if np.any(np.isnan(dF_dm)) or np.any(np.isinf(dF_dm)):
stop
if self.size:
dF_dtheta = self.dlogpdf_dtheta(X, Y[:,None], Y_metadata=Y_metadata) # Ntheta x (orig size) x N_{quad_points}
dF_dtheta = np.dot(dF_dtheta, gh_w)/np.sqrt(np.pi)
dF_dtheta = dF_dtheta.reshape(self.size, shape[0], shape[1])
else:
dF_dtheta = None # Not yet implemented
return F.reshape(*shape), dF_dm.reshape(*shape), dF_dv.reshape(*shape), dF_dtheta
def predictive_mean(self, mu, variance, Y_metadata=None):
"""
Quadrature calculation of the predictive mean: E(Y_star|Y) = E( E(Y_star|f_star, Y) )
:param mu: mean of posterior
:param sigma: standard deviation of posterior
"""
#conditional_mean: the edpected value of y given some f, under this likelihood
fmin = -np.inf
fmax = np.inf
def int_mean(f,m,v):
exponent = -(0.5/v)*np.square(f - m)
#If exponent is under -30 then exp(exponent) will be very small, so don't exp it!)
#If p is zero then conditional_mean will overflow
assert v.all() > 0
p = safe_exp(exponent)
#If p is zero then conditional_variance will overflow
if p < 1e-10:
return 0.
else:
return self.conditional_mean(f)*p
scaled_mean = [quad(int_mean, fmin, fmax,args=(mj,s2j))[0] for mj,s2j in zip(mu,variance)]
mean = np.array(scaled_mean)[:,None] / np.sqrt(2*np.pi*(variance))
return mean
def predictive_variance(self, mu,variance, predictive_mean=None, Y_metadata=None):
"""
Approximation to the predictive variance: V(Y_star)
The following variance decomposition is used:
V(Y_star) = E( V(Y_star|f_star)**2 ) + V( E(Y_star|f_star) )**2
:param mu: mean of posterior
:param sigma: standard deviation of posterior
:predictive_mean: output's predictive mean, if None _predictive_mean function will be called.
"""
#sigma2 = sigma**2
normalizer = np.sqrt(2*np.pi*variance)
fmin_v = -np.inf
fmin_m = np.inf
fmin = -np.inf
fmax = np.inf
from ..util.misc import safe_exp
# E( V(Y_star|f_star) )
def int_var(f,m,v):
exponent = -(0.5/v)*np.square(f - m)
p = safe_exp(exponent)
#If p is zero then conditional_variance will overflow
if p < 1e-10:
return 0.
else:
return self.conditional_variance(f)*p
scaled_exp_variance = [quad(int_var, fmin_v, fmax,args=(mj,s2j))[0] for mj,s2j in zip(mu,variance)]
exp_var = np.array(scaled_exp_variance)[:,None] / normalizer
#V( E(Y_star|f_star) ) = E( E(Y_star|f_star)**2 ) - E( E(Y_star|f_star) )**2
#E( E(Y_star|f_star) )**2
if predictive_mean is None:
predictive_mean = self.predictive_mean(mu,variance)
predictive_mean_sq = predictive_mean**2
#E( E(Y_star|f_star)**2 )
def int_pred_mean_sq(f,m,v,predictive_mean_sq):
exponent = -(0.5/v)*np.square(f - m)
p = np.exp(exponent)
#If p is zero then conditional_mean**2 will overflow
if p < 1e-10:
return 0.
else:
return self.conditional_mean(f)**2*p
scaled_exp_exp2 = [quad(int_pred_mean_sq, fmin_m, fmax,args=(mj,s2j,pm2j))[0] for mj,s2j,pm2j in zip(mu,variance,predictive_mean_sq)]
exp_exp2 = np.array(scaled_exp_exp2)[:,None] / normalizer
var_exp = exp_exp2 - predictive_mean_sq
# V(Y_star) = E[ V(Y_star|f_star) ] + V[ E(Y_star|f_star) ]
# V(Y_star) = E[ V(Y_star|f_star) ] + E(Y_star**2|f_star) - E[Y_star|f_star]**2
return exp_var + var_exp
def pdf_link(self, inv_link_f, y, Y_metadata=None):
raise NotImplementedError
def logpdf_link(self, inv_link_f, y, Y_metadata=None):
raise NotImplementedError
def dlogpdf_dlink(self, inv_link_f, y, Y_metadata=None):
raise NotImplementedError
def d2logpdf_dlink2(self, inv_link_f, y, Y_metadata=None):
raise NotImplementedError
def d3logpdf_dlink3(self, inv_link_f, y, Y_metadata=None):
raise NotImplementedError
def dlogpdf_link_dtheta(self, inv_link_f, y, Y_metadata=None):
raise NotImplementedError
def dlogpdf_dlink_dtheta(self, inv_link_f, y, Y_metadata=None):
raise NotImplementedError
def d2logpdf_dlink2_dtheta(self, inv_link_f, y, Y_metadata=None):
raise NotImplementedError
def pdf(self, f, y, Y_metadata=None):
"""
Evaluates the link function link(f) then computes the likelihood (pdf) using it
.. math:
p(y|\\lambda(f))
:param f: latent variables f
:type f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution - not used
:returns: likelihood evaluated for this point
:rtype: float
"""
if isinstance(self.gp_link, link_functions.Identity):
return self.pdf_link(f, y, Y_metadata=Y_metadata)
else:
inv_link_f = self.gp_link.transf(f)
return self.pdf_link(inv_link_f, y, Y_metadata=Y_metadata)
def logpdf_sum(self, f, y, Y_metadata=None):
"""
Convenience function that can overridden for functions where this could
be computed more efficiently
"""
return np.sum(self.logpdf(f, y, Y_metadata=Y_metadata))
def logpdf(self, f, y, Y_metadata=None):
"""
Evaluates the link function link(f) then computes the log likelihood (log pdf) using it
.. math:
\\log p(y|\\lambda(f))
:param f: latent variables f
:type f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution - not used
:returns: log likelihood evaluated for this point
:rtype: float
"""
if isinstance(self.gp_link, link_functions.Identity):
return self.logpdf_link(f, y, Y_metadata=Y_metadata)
else:
inv_link_f = self.gp_link.transf(f)
return self.logpdf_link(inv_link_f, y, Y_metadata=Y_metadata)
def dlogpdf_df(self, f, y, Y_metadata=None):
"""
Evaluates the link function link(f) then computes the derivative of log likelihood using it
Uses the Faa di Bruno's formula for the chain rule
.. math::
\\frac{d\\log p(y|\\lambda(f))}{df} = \\frac{d\\log p(y|\\lambda(f))}{d\\lambda(f)}\\frac{d\\lambda(f)}{df}
:param f: latent variables f
:type f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution - not used
:returns: derivative of log likelihood evaluated for this point
:rtype: 1xN array
"""
if isinstance(self.gp_link, link_functions.Identity):
return self.dlogpdf_dlink(f, y, Y_metadata=Y_metadata)
else:
inv_link_f = self.gp_link.transf(f)
dlogpdf_dlink = self.dlogpdf_dlink(inv_link_f, y, Y_metadata=Y_metadata)
dlink_df = self.gp_link.dtransf_df(f)
return chain_1(dlogpdf_dlink, dlink_df)
@blockify_hessian
def d2logpdf_df2(self, f, y, Y_metadata=None):
"""
Evaluates the link function link(f) then computes the second derivative of log likelihood using it
Uses the Faa di Bruno's formula for the chain rule
.. math::
\\frac{d^{2}\\log p(y|\\lambda(f))}{df^{2}} = \\frac{d^{2}\\log p(y|\\lambda(f))}{d^{2}\\lambda(f)}\\left(\\frac{d\\lambda(f)}{df}\\right)^{2} + \\frac{d\\log p(y|\\lambda(f))}{d\\lambda(f)}\\frac{d^{2}\\lambda(f)}{df^{2}}
:param f: latent variables f
:type f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution - not used
:returns: second derivative of log likelihood evaluated for this point (diagonal only)
:rtype: 1xN array
"""
if isinstance(self.gp_link, link_functions.Identity):
d2logpdf_df2 = self.d2logpdf_dlink2(f, y, Y_metadata=Y_metadata)
else:
inv_link_f = self.gp_link.transf(f)
d2logpdf_dlink2 = self.d2logpdf_dlink2(inv_link_f, y, Y_metadata=Y_metadata)
dlink_df = self.gp_link.dtransf_df(f)
dlogpdf_dlink = self.dlogpdf_dlink(inv_link_f, y, Y_metadata=Y_metadata)
d2link_df2 = self.gp_link.d2transf_df2(f)
d2logpdf_df2 = chain_2(d2logpdf_dlink2, dlink_df, dlogpdf_dlink, d2link_df2)
return d2logpdf_df2
@blockify_third
def d3logpdf_df3(self, f, y, Y_metadata=None):
"""
Evaluates the link function link(f) then computes the third derivative of log likelihood using it
Uses the Faa di Bruno's formula for the chain rule
.. math::
\\frac{d^{3}\\log p(y|\\lambda(f))}{df^{3}} = \\frac{d^{3}\\log p(y|\\lambda(f)}{d\\lambda(f)^{3}}\\left(\\frac{d\\lambda(f)}{df}\\right)^{3} + 3\\frac{d^{2}\\log p(y|\\lambda(f)}{d\\lambda(f)^{2}}\\frac{d\\lambda(f)}{df}\\frac{d^{2}\\lambda(f)}{df^{2}} + \\frac{d\\log p(y|\\lambda(f)}{d\\lambda(f)}\\frac{d^{3}\\lambda(f)}{df^{3}}
:param f: latent variables f
:type f: Nx1 array
:param y: data
:type y: Nx1 array
:param Y_metadata: Y_metadata which is not used in student t distribution - not used
:returns: third derivative of log likelihood evaluated for this point
:rtype: float
"""
if isinstance(self.gp_link, link_functions.Identity):
d3logpdf_df3 = self.d3logpdf_dlink3(f, y, Y_metadata=Y_metadata)
else:
inv_link_f = self.gp_link.transf(f)
d3logpdf_dlink3 = self.d3logpdf_dlink3(inv_link_f, y, Y_metadata=Y_metadata)
dlink_df = self.gp_link.dtransf_df(f)
d2logpdf_dlink2 = self.d2logpdf_dlink2(inv_link_f, y, Y_metadata=Y_metadata)
d2link_df2 = self.gp_link.d2transf_df2(f)
dlogpdf_dlink = self.dlogpdf_dlink(inv_link_f, y, Y_metadata=Y_metadata)
d3link_df3 = self.gp_link.d3transf_df3(f)
d3logpdf_df3 = chain_3(d3logpdf_dlink3, dlink_df, d2logpdf_dlink2, d2link_df2, dlogpdf_dlink, d3link_df3)
return d3logpdf_df3
def dlogpdf_dtheta(self, f, y, Y_metadata=None):
"""
TODO: Doc strings
"""
if self.size > 0:
if self.not_block_really:
raise NotImplementedError("Need to make a decorator for this!")
if isinstance(self.gp_link, link_functions.Identity):
return self.dlogpdf_link_dtheta(f, y, Y_metadata=Y_metadata)
else:
inv_link_f = self.gp_link.transf(f)
return self.dlogpdf_link_dtheta(inv_link_f, y, Y_metadata=Y_metadata)
else:
# There are no parameters so return an empty array for derivatives
return np.zeros((0, f.shape[0], f.shape[1]))
def dlogpdf_df_dtheta(self, f, y, Y_metadata=None):
"""
TODO: Doc strings
"""
if self.size > 0:
if self.not_block_really:
raise NotImplementedError("Need to make a decorator for this!")
if isinstance(self.gp_link, link_functions.Identity):
return self.dlogpdf_dlink_dtheta(f, y, Y_metadata=Y_metadata)
else:
inv_link_f = self.gp_link.transf(f)
dlink_df = self.gp_link.dtransf_df(f)
dlogpdf_dlink_dtheta = self.dlogpdf_dlink_dtheta(inv_link_f, y, Y_metadata=Y_metadata)
dlogpdf_df_dtheta = np.zeros((self.size, f.shape[0], f.shape[1]))
#Chain each parameter of hte likelihood seperately
for p in range(self.size):
dlogpdf_df_dtheta[p, :, :] = chain_1(dlogpdf_dlink_dtheta[p,:,:], dlink_df)
return dlogpdf_df_dtheta
#return chain_1(dlogpdf_dlink_dtheta, dlink_df)
else:
# There are no parameters so return an empty array for derivatives
return np.zeros((0, f.shape[0], f.shape[1]))
def d2logpdf_df2_dtheta(self, f, y, Y_metadata=None):
"""
TODO: Doc strings
"""
if self.size > 0:
if self.not_block_really:
raise NotImplementedError("Need to make a decorator for this!")
if isinstance(self.gp_link, link_functions.Identity):
return self.d2logpdf_dlink2_dtheta(f, y, Y_metadata=Y_metadata)
else:
inv_link_f = self.gp_link.transf(f)
dlink_df = self.gp_link.dtransf_df(f)
d2link_df2 = self.gp_link.d2transf_df2(f)
d2logpdf_dlink2_dtheta = self.d2logpdf_dlink2_dtheta(inv_link_f, y, Y_metadata=Y_metadata)
dlogpdf_dlink_dtheta = self.dlogpdf_dlink_dtheta(inv_link_f, y, Y_metadata=Y_metadata)
d2logpdf_df2_dtheta = np.zeros((self.size, f.shape[0], f.shape[1]))
#Chain each parameter of hte likelihood seperately
for p in range(self.size):
d2logpdf_df2_dtheta[p, :, :] = chain_2(d2logpdf_dlink2_dtheta[p,:,:], dlink_df, dlogpdf_dlink_dtheta[p,:,:], d2link_df2)
return d2logpdf_df2_dtheta
#return chain_2(d2logpdf_dlink2_dtheta, dlink_df, dlogpdf_dlink_dtheta, d2link_df2)
else:
# There are no parameters so return an empty array for derivatives
return np.zeros((0, f.shape[0], f.shape[1]))
def _laplace_gradients(self, f, y, Y_metadata=None):
dlogpdf_dtheta = self.dlogpdf_dtheta(f, y, Y_metadata=Y_metadata)
dlogpdf_df_dtheta = self.dlogpdf_df_dtheta(f, y, Y_metadata=Y_metadata)
d2logpdf_df2_dtheta = self.d2logpdf_df2_dtheta(f, y, Y_metadata=Y_metadata)
#Parameters are stacked vertically. Must be listed in same order as 'get_param_names'
# ensure we have gradients for every parameter we want to optimize
assert dlogpdf_dtheta.shape[0] == self.size #num_param array x f, d
assert dlogpdf_df_dtheta.shape[0] == self.size #num_param x f x d x matrix or just num_param x f
assert d2logpdf_df2_dtheta.shape[0] == self.size #num_param x f matrix or num_param x f x d x matrix, num_param x f x f or num_param x f x f x d
return dlogpdf_dtheta, dlogpdf_df_dtheta, d2logpdf_df2_dtheta
def predictive_values(self, mu, var, full_cov=False, Y_metadata=None):
"""
Compute mean, variance of the predictive distibution.
:param mu: mean of the latent variable, f, of posterior
:param var: variance of the latent variable, f, of posterior
:param full_cov: whether to use the full covariance or just the diagonal
:type full_cov: Boolean
"""
try:
pred_mean = self.predictive_mean(mu, var, Y_metadata=Y_metadata)
pred_var = self.predictive_variance(mu, var, pred_mean, Y_metadata=Y_metadata)
except NotImplementedError:
print("Finding predictive mean and variance via sampling rather than quadrature")
Nf_samp = 300
Ny_samp = 1
s = np.random.randn(mu.shape[0], Nf_samp)*np.sqrt(var) + mu
ss_y = self.samples(s, Y_metadata, samples=Ny_samp)
pred_mean = np.mean(ss_y, axis=1)[:, None]
pred_var = np.var(ss_y, axis=1)[:, None]
return pred_mean, pred_var
def predictive_quantiles(self, mu, var, quantiles, Y_metadata=None):
#compute the quantiles by sampling!!!
Nf_samp = 300
Ny_samp = 1
s = np.random.randn(mu.shape[0], Nf_samp)*np.sqrt(var) + mu
ss_y = self.samples(s, Y_metadata)#, samples=Ny_samp)
#ss_y = ss_y.reshape(mu.shape[0], mu.shape[1], Nf_samp*Ny_samp)
pred_quantiles = [np.percentile(ss_y, q, axis=1)[:,None] for q in quantiles]
return pred_quantiles
def samples(self, gp, Y_metadata=None, samples=1):
"""
Returns a set of samples of observations based on a given value of the latent variable.
:param gp: latent variable
:param samples: number of samples to take for each f location
"""
raise NotImplementedError("""May be possible to use MCMC with user-tuning, see
MCMC_pdf_samples in likelihood.py and write samples function
using this, beware this is a simple implementation
of Metropolis and will not work well for all likelihoods""")
def MCMC_pdf_samples(self, fNew, num_samples=1000, starting_loc=None, stepsize=0.1, burn_in=1000, Y_metadata=None):
"""
Simple implementation of Metropolis sampling algorithm
Will run a parallel chain for each input dimension (treats each f independently)
Thus assumes f*_1 independant of f*_2 etc.
:param num_samples: Number of samples to take
:param fNew: f at which to sample around
:param starting_loc: Starting locations of the independant chains (usually will be conditional_mean of likelihood), often link_f
:param stepsize: Stepsize for the normal proposal distribution (will need modifying)
:param burnin: number of samples to use for burnin (will need modifying)
:param Y_metadata: Y_metadata for pdf
"""
print("Warning, using MCMC for sampling y*, needs to be tuned!")
if starting_loc is None:
starting_loc = fNew
from functools import partial
logpdf = partial(self.logpdf, f=fNew, Y_metadata=Y_metadata)
pdf = lambda y_star: np.exp(logpdf(y=y_star[:, None]))
#Should be the link function of f is a good starting point
#(i.e. the point before you corrupt it with the likelihood)
par_chains = starting_loc.shape[0]
chain_values = np.zeros((par_chains, num_samples))
chain_values[:, 0][:,None] = starting_loc
#Use same stepsize for all par_chains
stepsize = np.ones(par_chains)*stepsize
accepted = np.zeros((par_chains, num_samples+burn_in))
accept_ratio = np.zeros(num_samples+burn_in)
#Whilst burning in, only need to keep the previous lot
burnin_cache = np.zeros(par_chains)
burnin_cache[:] = starting_loc.flatten()
burning_in = True
for i in range(burn_in+num_samples):
next_ind = i-burn_in
if burning_in:
old_y = burnin_cache
else:
old_y = chain_values[:,next_ind-1]
old_lik = pdf(old_y)
#Propose new y from Gaussian proposal
new_y = np.random.normal(loc=old_y, scale=stepsize)
new_lik = pdf(new_y)
#Accept using Metropolis (not hastings) acceptance
#Always accepts if new_lik > old_lik
accept_probability = np.minimum(1, new_lik/old_lik)
u = np.random.uniform(0,1,par_chains)
#print "Accept prob: ", accept_probability
accepts = u < accept_probability
if burning_in:
burnin_cache[accepts] = new_y[accepts]
burnin_cache[~accepts] = old_y[~accepts]
if i == burn_in:
burning_in = False
chain_values[:,0] = burnin_cache
else:
#If it was accepted then new_y becomes the latest sample
chain_values[accepts, next_ind] = new_y[accepts]
#Otherwise use old y as the sample
chain_values[~accepts, next_ind] = old_y[~accepts]
accepted[~accepts, i] = 0
accepted[accepts, i] = 1
accept_ratio[i] = np.sum(accepted[:,i])/float(par_chains)
#Show progress
if i % int((burn_in+num_samples)*0.1) == 0:
print("{}% of samples taken ({})".format((i/int((burn_in+num_samples)*0.1)*10), i))
print("Last run accept ratio: ", accept_ratio[i])
print("Average accept ratio: ", np.mean(accept_ratio))
return chain_values
|
befelix/GPy
|
GPy/likelihoods/likelihood.py
|
Python
|
bsd-3-clause
| 35,968
|
#! /usr/bin/env python
#
# This file is part of khmer, https://github.com/dib-lab/khmer/, and is
# Copyright (C) Michigan State University, 2009-2015. It is licensed under
# the three-clause BSD license; see LICENSE.
# Contact: khmer-project@idyll.org
#
"""
Accept or discard sequences XXX, based on the given counting
hash table. Output sequences will be placed in 'infile.medfilt'.
% python sandbox/filter-median.py <counting.ct> <data1> [ <data2> <...> ]
Use '-h' for parameter help.
"""
from __future__ import print_function
import sys
import screed.fasta
import os
import khmer
from khmer.thread_utils import ThreadedSequenceProcessor, verbose_loader
from khmer.khmer_args import build_counting_args
import random
###
DEFAULT_COVERAGE = 20
def main():
parser = build_counting_args()
parser.add_argument('--coverage', '-C', dest='coverage',
default=DEFAULT_COVERAGE, type=int)
args = parser.parse_args()
counting_ht = args.input_table
infiles = args.input_filenames
print('file with ht: %s' % counting_ht)
print('loading hashtable')
ht = khmer.load_counting_hash(counting_ht)
K = ht.ksize()
print("K:", K)
# the filtering function.
def process_fn(record):
name = record['name']
seq = record['sequence']
med, avg, dev = ht.get_median_count(seq)
if random.randint(1, med) > args.coverage:
return None, None
return name, seq
# the filtering loop
for infile in infiles:
print('filtering', infile)
outfile = os.path.basename(infile) + '.medfilt'
outfp = open(outfile, 'w')
tsp = ThreadedSequenceProcessor(process_fn)
tsp.start(verbose_loader(infile), outfp)
print('output in', outfile)
if __name__ == '__main__':
main()
|
Winterflower/khmer
|
sandbox/filter-median.py
|
Python
|
bsd-3-clause
| 1,824
|
# Copyright (c) 1996-2015 PSERC. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""Loads a PYPOWER case dictionary.
"""
import sys
from os.path import basename, splitext, exists
from copy import deepcopy
from numpy import array, zeros, ones, c_
from scipy.io import loadmat
from pypower._compat import PY2
from pypower.idx_gen import PMIN, MU_PMAX, MU_PMIN, MU_QMAX, MU_QMIN, APF
from pypower.idx_brch import PF, QF, PT, QT, MU_SF, MU_ST, BR_STATUS
if not PY2:
basestring = str
def loadcase(casefile,
return_as_obj=True, expect_gencost=True, expect_areas=True):
"""Returns the individual data matrices or an dict containing them
as values.
Here C{casefile} is either a dict containing the keys C{baseMVA}, C{bus},
C{gen}, C{branch}, C{areas}, C{gencost}, or a string containing the name
of the file. If C{casefile} contains the extension '.mat' or '.py', then
the explicit file is searched. If C{casefile} containts no extension, then
L{loadcase} looks for a '.mat' file first, then for a '.py' file. If the
file does not exist or doesn't define all matrices, the function returns
an exit code as follows:
0. all variables successfully defined
1. input argument is not a string or dict
2. specified extension-less file name does not exist
3. specified .mat file does not exist
4. specified .py file does not exist
5. specified file fails to define all matrices or contains syntax
error
If the input data is not a dict containing a 'version' key, it is
assumed to be a PYPOWER case file in version 1 format, and will be
converted to version 2 format.
@author: Carlos E. Murillo-Sanchez (PSERC Cornell & Universidad
Autonoma de Manizales)
@author: Ray Zimmerman (PSERC Cornell)
"""
if return_as_obj == True:
expect_gencost = False
expect_areas = False
info = 0
# read data into case object
if isinstance(casefile, basestring):
# check for explicit extension
if casefile.endswith(('.py', '.mat')):
rootname, extension = splitext(casefile)
fname = basename(rootname)
else:
# set extension if not specified explicitly
rootname = casefile
if exists(casefile + '.mat'):
extension = '.mat'
elif exists(casefile + '.py'):
extension = '.py'
else:
info = 2
fname = basename(rootname)
lasterr = ''
## attempt to read file
if info == 0:
if extension == '.mat': ## from MAT file
try:
d = loadmat(rootname + extension, struct_as_record=True)
if 'ppc' in d or 'mpc' in d: ## it's a MAT/PYPOWER dict
if 'ppc' in d:
struct = d['ppc']
else:
struct = d['mpc']
val = struct[0, 0]
s = {}
for a in val.dtype.names:
s[a] = val[a]
else: ## individual data matrices
d['version'] = '1'
s = {}
for k, v in d.items():
s[k] = v
s['baseMVA'] = s['baseMVA'][0] # convert array to float
except IOError as e:
info = 3
lasterr = str(e)
elif extension == '.py': ## from Python file
try:
if PY2:
execfile(rootname + extension)
else:
exec(compile(open(rootname + extension).read(),
rootname + extension, 'exec'))
try: ## assume it returns an object
s = eval(fname)()
except ValueError as e:
info = 4
lasterr = str(e)
## if not try individual data matrices
if info == 0 and not isinstance(s, dict):
s = {}
s['version'] = '1'
if expect_gencost:
try:
s['baseMVA'], s['bus'], s['gen'], s['branch'], \
s['areas'], s['gencost'] = eval(fname)()
except IOError as e:
info = 4
lasterr = str(e)
else:
if return_as_obj:
try:
s['baseMVA'], s['bus'], s['gen'], \
s['branch'], s['areas'], \
s['gencost'] = eval(fname)()
except ValueError as e:
try:
s['baseMVA'], s['bus'], s['gen'], \
s['branch'] = eval(fname)()
except ValueError as e:
info = 4
lasterr = str(e)
else:
try:
s['baseMVA'], s['bus'], s['gen'], \
s['branch'] = eval(fname)()
except ValueError as e:
info = 4
lasterr = str(e)
except IOError as e:
info = 4
lasterr = str(e)
if info == 4 and exists(rootname + '.py'):
info = 5
err5 = lasterr
elif isinstance(casefile, dict):
s = deepcopy(casefile)
else:
info = 1
# check contents of dict
if info == 0:
# check for required keys
if (s['baseMVA'] is None or s['bus'] is None \
or s['gen'] is None or s['branch'] is None) or \
(expect_gencost and s['gencost'] is None) or \
(expect_areas and s['areas'] is None):
info = 5 ## missing some expected fields
err5 = 'missing data'
else:
## remove empty areas if not needed
if hasattr(s, 'areas') and (len(s['areas']) == 0) and (not expect_areas):
del s['areas']
## all fields present, copy to ppc
ppc = deepcopy(s)
if not hasattr(ppc, 'version'): ## hmm, struct with no 'version' field
if ppc['gen'].shape[1] < 21: ## version 2 has 21 or 25 cols
ppc['version'] = '1'
else:
ppc['version'] = '2'
if (ppc['version'] == '1'):
# convert from version 1 to version 2
ppc['gen'], ppc['branch'] = ppc_1to2(ppc['gen'], ppc['branch']);
ppc['version'] = '2'
if info == 0: # no errors
if return_as_obj:
return ppc
else:
result = [ppc['baseMVA'], ppc['bus'], ppc['gen'], ppc['branch']]
if expect_gencost:
if expect_areas:
result.extend([ppc['areas'], ppc['gencost']])
else:
result.extend([ppc['gencost']])
return result
else: # error encountered
if info == 1:
sys.stderr.write('Input arg should be a case or a string '
'containing a filename\n')
elif info == 2:
sys.stderr.write('Specified case not a valid file\n')
elif info == 3:
sys.stderr.write('Specified MAT file does not exist\n')
elif info == 4:
sys.stderr.write('Specified Python file does not exist\n')
elif info == 5:
sys.stderr.write('Syntax error or undefined data '
'matrix(ices) in the file\n')
else:
sys.stderr.write('Unknown error encountered loading case.\n')
sys.stderr.write(lasterr + '\n')
return info
def ppc_1to2(gen, branch):
##----- gen -----
## use the version 1 values for column names
if gen.shape[1] >= APF:
sys.stderr.write('ppc_1to2: gen matrix appears to already be in '
'version 2 format\n')
return gen, branch
shift = MU_PMAX - PMIN - 1
tmp = array([MU_PMAX, MU_PMIN, MU_QMAX, MU_QMIN]) - shift
mu_Pmax, mu_Pmin, mu_Qmax, mu_Qmin = tmp
## add extra columns to gen
tmp = zeros((gen.shape[0], shift))
if gen.shape[1] >= mu_Qmin:
gen = c_[ gen[:, 0:PMIN + 1], tmp, gen[:, mu_Pmax:mu_Qmin] ]
else:
gen = c_[ gen[:, 0:PMIN + 1], tmp ]
##----- branch -----
## use the version 1 values for column names
shift = PF - BR_STATUS - 1
tmp = array([PF, QF, PT, QT, MU_SF, MU_ST]) - shift
Pf, Qf, Pt, Qt, mu_Sf, mu_St = tmp
## add extra columns to branch
tmp = ones((branch.shape[0], 1)) * array([-360, 360])
tmp2 = zeros((branch.shape[0], 2))
if branch.shape[1] >= mu_St - 1:
branch = c_[ branch[:, 0:BR_STATUS + 1], tmp, branch[:, PF - 1:MU_ST + 1], tmp2 ]
elif branch.shape[1] >= QT - 1:
branch = c_[ branch[:, 0:BR_STATUS + 1], tmp, branch[:, PF - 1:QT + 1] ]
else:
branch = c_[ branch[:, 0:BR_STATUS + 1], tmp ]
return gen, branch
|
praba230890/PYPOWER
|
pypower/loadcase.py
|
Python
|
bsd-3-clause
| 9,819
|
"""Application base, containing global templates."""
import datetime
from django.contrib.sitemaps import Sitemap
from django.core.urlresolvers import reverse
class PathsSitemap(Sitemap):
def __init__(self, pages=[], priority=None, changefreq=None):
self.pages = pages
self.changefreq = changefreq
self.priority = priority
def items(self):
return self.pages
def lastmod(self, item):
# The day sitemap generated
return datetime.datetime.now()
def location(self, item):
return item
class ViewsSitemap(Sitemap):
def __init__(self, pages=[], priority=None, changefreq=None):
self.pages = pages
self.changefreq = changefreq
self.priority = priority
def items(self):
return self.pages
def lastmod(self, item):
# The day sitemap generated
return datetime.datetime.now()
def location(self, item):
return reverse(item)
|
elin-moco/ffclub
|
ffclub/base/__init__.py
|
Python
|
bsd-3-clause
| 965
|
DBNAME = ''
DBUSER = ''
DBPASSWORD = ''
DBHOST = ''
DBPORT = ''
DJ_SECRET_KEY = ''
DJ_DEBUG = True
DJ_ALLOWED_HOSTS = []
DJ_EMAIL_HOST = ''
DJ_EMAIL_PORT = ''
DJ_EMAIL_HOST_USER = ''
DJ_EMAIL_HOST_PASSWORD = ''
|
alfegupe/retro
|
retro/parameters-dist.py
|
Python
|
bsd-3-clause
| 213
|
# -*- coding: utf-8 -*-
"""Copyright (c) 2010-2012 David Rio Vierra
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE."""
import time
t= time.time()
import sys
import os
import json
import glob
import shutil
def win32_utf8_argv():
"""Uses shell32.GetCommandLineArgvW to get sys.argv as a list of UTF-8
strings.
Versions 2.5 and older of Python don't support Unicode in sys.argv on
Windows, with the underlying Windows API instead replacing multi-byte
characters with '?'.
Returns None on failure.
Example usage:
>>> def main(argv=None):
... if argv is None:
... argv = win32_utf8_argv() or sys.argv
...
"""
try:
from ctypes import POINTER, byref, cdll, c_int, windll
from ctypes.wintypes import LPCWSTR, LPWSTR
GetCommandLineW = cdll.kernel32.GetCommandLineW
GetCommandLineW.argtypes = []
GetCommandLineW.restype = LPCWSTR
CommandLineToArgvW = windll.shell32.CommandLineToArgvW
CommandLineToArgvW.argtypes = [LPCWSTR, POINTER(c_int)]
CommandLineToArgvW.restype = POINTER(LPWSTR)
cmd = GetCommandLineW()
argc = c_int(0)
argv = CommandLineToArgvW(cmd, byref(argc))
if argc.value > 0:
# # Remove Python executable if present
# if argc.value - len(sys.argv) == 1:
# start = 1
# else:
# start = 0
return [argv[i] for i in
xrange(0, argc.value)]
except Exception:
pass
def getDataDir(path=""):
"""
Returns the folder where the executable is located.
:return unicode
"""
# if sys.platform == "win32":
# def fsdecode(x):
# return x.decode(sys.getfilesystemencoding())
#
# dataDir = os.getcwdu()
# '''
# if getattr(sys, 'frozen', False):
# dataDir = os.path.dirname(sys._MEIPASS)
# else:
# dataDir = os.path.dirname(__file__)
# '''
#
# else:
dataDir = os.getcwdu()
if len(path) > 0:
return os.path.join(dataDir, path)
return dataDir
def win32_appdata():
# try to use win32 api to get the AppData folder since python doesn't populate os.environ with unicode strings.
try:
import win32com.client
objShell = win32com.client.Dispatch("WScript.Shell")
return objShell.SpecialFolders("AppData")
except Exception, e:
print "Error while getting AppData folder using WScript.Shell.SpecialFolders: {0!r}".format(e)
try:
from win32com.shell import shell, shellcon
return shell.SHGetPathFromIDListEx(
shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_APPDATA)
)
except Exception, e:
print "Error while getting AppData folder using SHGetSpecialFolderLocation: {0!r}".format(e)
return os.environ['APPDATA'].decode(sys.getfilesystemencoding())
def getMinecraftProfileJSON():
"""Returns a dictionary object with the minecraft profile information"""
if os.path.isfile(os.path.join(getMinecraftLauncherDirectory(), "launcher_profiles.json")):
try:
with open(os.path.join(getMinecraftLauncherDirectory(), "launcher_profiles.json")) as jsonString:
minecraftProfilesJSON = json.load(jsonString)
return minecraftProfilesJSON
except:
return None
def getMinecraftProfileDirectory(profileName):
"""Returns the path to the sent minecraft profile directory"""
try:
profileDir = getMinecraftProfileJSON()['profiles'][profileName][
'gameDir'] # profileDir update to correct location.
return profileDir
except:
return os.path.join(getMinecraftLauncherDirectory())
def getMinecraftLauncherDirectory():
'''
Returns the /minecraft directory, note: may not contain the /saves folder!
'''
if sys.platform == "win32":
return os.path.join(win32_appdata(), ".minecraft")
elif sys.platform == "darwin":
return os.path.expanduser("~/Library/Application Support/minecraft")
else:
return os.path.expanduser("~/.minecraft")
def getDocumentsFolder():
if sys.platform == "win32":
try:
import win32com.client
from win32com.shell import shell, shellcon
objShell = win32com.client.Dispatch("WScript.Shell")
docsFolder = objShell.SpecialFolders("MyDocuments")
except Exception, e:
print e
try:
docsFolder = shell.SHGetFolderPath(0, shellcon.CSIDL_MYDOCUMENTS, 0, 0)
except Exception:
userprofile = os.environ['USERPROFILE'].decode(sys.getfilesystemencoding())
docsFolder = os.path.join(userprofile, "Documents")
elif sys.platform == "darwin":
docsFolder = os.path.expanduser("~/Documents")
else:
docsFolder = os.path.expanduser("~/.mcedit")
try:
os.mkdir(docsFolder)
except:
pass
return docsFolder
def getSelectedProfile():
"""
Gets the selected profile from the Minecraft Launcher
"""
try:
selectedProfile = getMinecraftProfileJSON()['selectedProfile']
return selectedProfile
except:
return None
_minecraftSaveFileDir = None
def getMinecraftSaveFileDir():
global _minecraftSaveFileDir
if _minecraftSaveFileDir is None:
_minecraftSaveFileDir = os.path.join(getMinecraftProfileDirectory(getSelectedProfile()), "saves")
return _minecraftSaveFileDir
minecraftSaveFileDir = getMinecraftSaveFileDir()
ini = u"mcedit.ini"
cache = u"usercache.json"
parentDir = os.path.dirname(getDataDir())
docsFolder = os.path.join(getDocumentsFolder(),'MCEdit')
if sys.platform != "darwin":
portableConfigFilePath = os.path.join(parentDir, ini)
portableCacheFilePath = os.path.join(parentDir, cache)
portableGenericSupportPath = os.path.join(parentDir)
portableSchematicsDir = os.path.join(parentDir, u"Schematics")
portableBrushesDir = os.path.join(parentDir, u"Brushes")
portableJarStorageDir = os.path.join(parentDir, u"ServerJarStorage")
portableFiltersDir = os.path.join(parentDir, u"Filters")
if not os.path.exists(parentDir):
os.makedirs(parentDir)
fixedCacheFilePath = os.path.join(docsFolder, cache)
fixedConfigFilePath = os.path.join(docsFolder, ini)
fixedGenericSupportPath = os.path.join(docsFolder)
fixedSchematicsDir = os.path.join(docsFolder, u"Schematics")
fixedBrushesDir = os.path.join(docsFolder, u"Brushes")
fixedJarStorageDir = os.path.join(docsFolder, u"ServerJarStorage")
fixedFiltersDir = os.path.join(docsFolder, u"Filters")
if not os.path.exists(docsFolder):
os.makedirs(docsFolder)
def goPortable():
if sys.platform == "darwin":
return False
global configFilePath, schematicsDir, filtersDir, portable, brushesDir
if os.path.exists(fixedSchematicsDir):
move_displace(fixedSchematicsDir, portableSchematicsDir)
if os.path.exists(fixedBrushesDir):
move_displace(fixedBrushesDir, portableBrushesDir)
if os.path.exists(fixedConfigFilePath):
move_displace(fixedConfigFilePath, portableConfigFilePath)
if os.path.exists(fixedFiltersDir):
move_displace(fixedFiltersDir, portableFiltersDir)
if os.path.exists(fixedCacheFilePath):
move_displace(fixedCacheFilePath, portableCacheFilePath)
if os.path.exists(fixedJarStorageDir):
move_displace(fixedJarStorageDir, portableJarStorageDir)
if filtersDir in sys.path:
sys.path.remove(filtersDir)
schematicsDir = portableSchematicsDir
brushesDir = portableBrushesDir
configFilePath = portableConfigFilePath
filtersDir = portableFiltersDir
sys.path.append(filtersDir)
portable = True
return True
def move_displace(src, dst):
dstFolder = os.path.basename(os.path.dirname(dst))
if not os.path.exists(dst):
print "Moving {0} to {1}".format(os.path.basename(src), dstFolder)
shutil.move(src, dst)
else:
olddst = dst + ".old"
i = 0
while os.path.exists(olddst):
olddst = dst + ".old" + str(i)
i += 1
print "{0} already found in {1}! Renamed it to {2}.".format(os.path.basename(src), dstFolder, dst)
os.rename(dst, olddst)
shutil.move(src, dst)
return True
def goFixed():
if sys.platform == "darwin":
return False
global configFilePath, schematicsDir, filtersDir, portable, cacheDir, brushesDir
if os.path.exists(portableSchematicsDir):
move_displace(portableSchematicsDir, fixedSchematicsDir)
if os.path.exists(portableBrushesDir):
move_displace(portableBrushesDir, fixedBrushesDir)
if os.path.exists(portableConfigFilePath):
move_displace(portableConfigFilePath, fixedConfigFilePath)
if os.path.exists(portableFiltersDir):
move_displace(portableFiltersDir, fixedFiltersDir)
if os.path.exists(portableCacheFilePath):
move_displace(portableCacheFilePath, fixedCacheFilePath)
if os.path.exists(portableJarStorageDir):
move_displace(portableJarStorageDir, fixedJarStorageDir)
if filtersDir in sys.path:
sys.path.remove(filtersDir)
schematicsDir = fixedSchematicsDir
brushesDir = fixedBrushesDir
configFilePath = fixedConfigFilePath
filtersDir = fixedFiltersDir
sys.path.append(filtersDir)
portable = False
def fixedConfigExists():
if sys.platform == "darwin":
return True
# Check for files at portable locations. Cannot be Mac because config doesn't move
return os.path.exists(fixedConfigFilePath) or not os.path.exists(portableConfigFilePath)
if fixedConfigExists():
print "Running in fixed mode. Support files are in your " + (
sys.platform == "darwin" and "App Support Folder (Available from the main menu of MCEdit)"
or "Documents folder.")
portable = False
if not sys.platform == "darwin":
schematicsDir = fixedSchematicsDir
brushesDir = fixedBrushesDir
configFilePath = fixedConfigFilePath
filtersDir = fixedFiltersDir
jarStorageDir = fixedJarStorageDir
genericSupportDir = fixedGenericSupportPath
else:
print "Running in portable mode. Support files are stored next to the MCEdit directory."
if not sys.platform == "darwin":
schematicsDir = portableSchematicsDir
brushesDir = portableBrushesDir
configFilePath = portableConfigFilePath
filtersDir = portableFiltersDir
jarStorageDir = portableJarStorageDir
genericSupportDir = portableGenericSupportPath
portable = True
#if portable:
# serverJarStorageDir = portableJarStorageDir
# ServerJarStorage.defaultCacheDir = serverJarStorageDir
# jarStorage = ServerJarStorage(serverJarStorageDir)
#else:
# serverJarStorageDir = fixedJarStorageDir
def getAllOfAFile(file_dir, ext):
'''
Returns a list of all the files the direcotry with the specified file extenstion
:param file_dir: Directory to search
:param ext: The file extension (IE: ".py")
'''
return glob.glob(file_dir+"/*"+ext)
def getCacheDir():
"""
Returns the path to the cache folder.
This folder is the Application Support folder on OS X, and the Documents Folder on Windows.
:return unicode
"""
if sys.platform == "win32":
return genericSupportDir
elif sys.platform == "darwin":
return os.path.expanduser("~/Library/Application Support/pymclevel")
else:
try:
return genericSupportDir
except:
return os.path.expanduser("~/.pymclevel")
if sys.platform == "darwin":
configFilePath = os.path.expanduser("~/Library/Preferences/mcedit.ini")
schematicsDir = os.path.join(getCacheDir(), u"Schematics")
brushesDir = os.path.join(getCacheDir(), u"Brushes")
filtersDir = os.path.join(getCacheDir(), u"Filters")
if not os.path.exists(getCacheDir()):
os.makedirs(getCacheDir())
# Create pymclevel folder as needed
if not os.path.exists(getCacheDir()):
os.makedirs(getCacheDir())
# build the structures of directories if they don't exists
for directory in (filtersDir, brushesDir, schematicsDir):
if not os.path.exists(directory):
os.makedirs(directory)
bundledLibsDir = os.path.join(filtersDir, 'lib', 'Bundled Libraries')
if not os.path.exists(bundledLibsDir):
os.makedirs(bundledLibsDir)
# set userCachePath
userCachePath = os.path.join(getCacheDir(),'usercache.json')
# Make sure it exists
try:
if not os.path.exists(userCachePath):
f = open(userCachePath,'w')
f.write('{}')
f.close()
except:
print "Unable to make usercache.json at {}".format(userCachePath)
def getFiltersDir():
return filtersDir
|
gpmidi/MCEdit-Unified
|
directories.py
|
Python
|
isc
| 13,619
|
# coding: utf-8
import os
import json
from unipath import FSPath as Path
BASE_DIR = Path(__file__).absolute().parent.parent.parent
SECRETS_PATH = BASE_DIR.parent.child('conf').child('secrets.json')
try:
with open(SECRETS_PATH) as handle:
SECRETS = json.load(handle)
except IOError:
SECRETS = {
'secret_key': 'a',
}
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Admin', 'me@netoxico.com'),
)
MANAGERS = ADMINS
ALLOWED_HOSTS = ['django.mx']
TIME_ZONE = 'America/Mexico_City'
LANGUAGE_CODE = 'es-mx'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
USE_TZ = True
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(BASE_DIR, 'public', 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'djangomx.urls'
SECRET_KEY = str(SECRETS['secret_key'])
WSGI_APPLICATION = 'djangomx.wsgi.application'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates'), ],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': SECRETS.get('db_name', ''),
'USER': SECRETS.get('db_user', ''),
'PASSWORD': SECRETS.get('db_password', ''),
'HOST': SECRETS.get('db_host', ''),
}
}
INSTALLED_APPS = (
'suit',
'suit_redactor',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'compressor',
'sorl.thumbnail',
'blog',
'contact',
'courses',
'jobs',
'utilities',
'newsletter',
'accounts'
)
COMPRESS_ENABLED = True
COMPRESS_OFFLINE = False
COMPRESS_PRECOMPILERS = (
('text/less', 'lessc {infile} {outfile}'),
)
LOGGING = {
"version": 1,
"disable_existing_loggers": True,
"formatters": {
"simple": {"format": "[%(name)s] %(levelname)s: %(message)s"},
"full": {"format": "%(asctime)s [%(name)s] %(levelname)s: %(message)s"},
},
"filters": {
"require_debug_false": {
"()": "django.utils.log.RequireDebugFalse",
},
},
"handlers": {
"mail_admins": {
"level": "ERROR",
"include_html": True,
"filters": ['require_debug_false'],
"class": "django.utils.log.AdminEmailHandler",
},
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "simple",
},
},
"loggers": {
"django.request": {
"handlers": ["mail_admins"],
"level": "ERROR",
"propagate": False,
},
}
}
DEFAULT_FROM_EMAIL = 'Django Mexico <no-reply@django.mx>'
# Sourl thumbnail settings
THUMBNAIL_DEBUG = True
THUMBNAIL_KVSTORE = 'sorl.thumbnail.kvstores.redis_kvstore.KVStore'
THUMBNAIL_QUALITY = 100
# Django SUIT configuration
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS as TCP
TEMPLATE_CONTEXT_PROCESSORS = TCP + (
'django.core.context_processors.request',
)
|
kushiahu/djangomx
|
djangomx/djangomx/settings/common.py
|
Python
|
mit
| 4,490
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import os, base64, re
import hashlib
import mimetypes
from frappe.utils import get_site_path, get_hook_method, get_files_path, random_string, encode, cstr, call_hook_method, cint
from frappe import _
from frappe import conf
from copy import copy
import urllib
class MaxFileSizeReachedError(frappe.ValidationError): pass
def get_file_url(file_data_name):
data = frappe.db.get_value("File", file_data_name, ["file_name", "file_url"], as_dict=True)
return data.file_url or data.file_name
def upload():
# get record details
dt = frappe.form_dict.doctype
dn = frappe.form_dict.docname
folder = frappe.form_dict.folder
file_url = frappe.form_dict.file_url
filename = frappe.form_dict.filename
is_private = cint(frappe.form_dict.is_private)
if not filename and not file_url:
frappe.msgprint(_("Please select a file or url"),
raise_exception=True)
# save
if filename:
filedata = save_uploaded(dt, dn, folder, is_private)
elif file_url:
filedata = save_url(file_url, dt, dn, folder)
comment = {}
if dt and dn:
comment = frappe.get_doc(dt, dn).add_comment("Attachment",
_("Added {0}").format("<a href='{file_url}' target='_blank'>{file_name}</a>{icon}".format(**{
"icon": ' <i class="icon icon-lock text-warning"></i>' if filedata.is_private else "",
"file_url": filedata.file_url.replace("#", "%23") if filedata.file_name else filedata.file_url,
"file_name": filedata.file_name or filedata.file_url
})))
return {
"name": filedata.name,
"file_name": filedata.file_name,
"file_url": filedata.file_url,
"comment": comment.as_dict() if comment else {}
}
def save_uploaded(dt, dn, folder, is_private):
fname, content = get_uploaded_content()
if content:
return save_file(fname, content, dt, dn, folder, is_private=is_private);
else:
raise Exception
def save_url(file_url, dt, dn, folder):
# if not (file_url.startswith("http://") or file_url.startswith("https://")):
# frappe.msgprint("URL must start with 'http://' or 'https://'")
# return None, None
file_url = urllib.unquote(file_url)
f = frappe.get_doc({
"doctype": "File",
"file_url": file_url,
"attached_to_doctype": dt,
"attached_to_name": dn,
"folder": folder
})
f.flags.ignore_permissions = True
try:
f.insert();
except frappe.DuplicateEntryError:
return frappe.get_doc("File", f.duplicate_entry)
return f
def get_uploaded_content():
# should not be unicode when reading a file, hence using frappe.form
if 'filedata' in frappe.form_dict:
if "," in frappe.form_dict.filedata:
frappe.form_dict.filedata = frappe.form_dict.filedata.rsplit(",", 1)[1]
frappe.uploaded_content = base64.b64decode(frappe.form_dict.filedata)
frappe.uploaded_filename = frappe.form_dict.filename
return frappe.uploaded_filename, frappe.uploaded_content
else:
frappe.msgprint(_('No file attached'))
return None, None
def extract_images_from_doc(doc, fieldname):
content = doc.get(fieldname)
content = extract_images_from_html(doc, content)
if frappe.flags.has_dataurl:
doc.set(fieldname, content)
def extract_images_from_html(doc, content):
frappe.flags.has_dataurl = False
def _save_file(match):
data = match.group(1)
data = data.split("data:")[1]
headers, content = data.split(",")
if "filename=" in headers:
filename = headers.split("filename=")[-1]
# decode filename
if not isinstance(filename, unicode):
filename = unicode(filename, 'utf-8')
else:
mtype = headers.split(";")[0]
filename = get_random_filename(content_type=mtype)
doctype = doc.parenttype if doc.parent else doc.doctype
name = doc.parent or doc.name
# TODO fix this
file_url = save_file(filename, content, doctype, name, decode=True).get("file_url")
if not frappe.flags.has_dataurl:
frappe.flags.has_dataurl = True
return '<img src="{file_url}"'.format(file_url=file_url)
if content:
content = re.sub('<img[^>]*src\s*=\s*["\'](?=data:)(.*?)["\']', _save_file, content)
return content
def get_random_filename(extn=None, content_type=None):
if extn:
if not extn.startswith("."):
extn = "." + extn
elif content_type:
extn = mimetypes.guess_extension(content_type)
return random_string(7) + (extn or "")
def save_file(fname, content, dt, dn, folder=None, decode=False, is_private=0):
if decode:
if isinstance(content, unicode):
content = content.encode("utf-8")
if "," in content:
content = content.split(",")[1]
content = base64.b64decode(content)
file_size = check_max_file_size(content)
content_hash = get_content_hash(content)
content_type = mimetypes.guess_type(fname)[0]
fname = get_file_name(fname, content_hash[-6:])
file_data = get_file_data_from_hash(content_hash, is_private=is_private)
if not file_data:
call_hook_method("before_write_file", file_size=file_size)
write_file_method = get_hook_method('write_file', fallback=save_file_on_filesystem)
file_data = write_file_method(fname, content, content_type=content_type, is_private=is_private)
file_data = copy(file_data)
file_data.update({
"doctype": "File",
"attached_to_doctype": dt,
"attached_to_name": dn,
"folder": folder,
"file_size": file_size,
"content_hash": content_hash,
"is_private": is_private
})
f = frappe.get_doc(file_data)
f.flags.ignore_permissions = True
try:
f.insert()
except frappe.DuplicateEntryError:
return frappe.get_doc("File", f.duplicate_entry)
return f
def get_file_data_from_hash(content_hash, is_private=0):
for name in frappe.db.sql_list("select name from `tabFile` where content_hash=%s and is_private=%s", (content_hash, is_private)):
b = frappe.get_doc('File', name)
return {k:b.get(k) for k in frappe.get_hooks()['write_file_keys']}
return False
def save_file_on_filesystem(fname, content, content_type=None, is_private=0):
fpath = write_file(content, fname, is_private)
if is_private:
file_url = "/private/files/{0}".format(fname)
else:
file_url = "/files/{0}".format(fname)
return {
'file_name': os.path.basename(fpath),
'file_url': file_url
}
def check_max_file_size(content):
max_file_size = conf.get('max_file_size') or 10485760
file_size = len(content)
if file_size > max_file_size:
frappe.msgprint(_("File size exceeded the maximum allowed size of {0} MB").format(
max_file_size / 1048576),
raise_exception=MaxFileSizeReachedError)
return file_size
def write_file(content, fname, is_private=0):
"""write file to disk with a random name (to compare)"""
file_path = get_files_path(is_private=is_private)
# create directory (if not exists)
frappe.create_folder(file_path)
# write the file
with open(os.path.join(file_path.encode('utf-8'), fname.encode('utf-8')), 'w+') as f:
f.write(content)
return get_files_path(fname, is_private=is_private)
def remove_all(dt, dn):
"""remove all files in a transaction"""
try:
for fid in frappe.db.sql_list("""select name from `tabFile` where
attached_to_doctype=%s and attached_to_name=%s""", (dt, dn)):
remove_file(fid, dt, dn)
except Exception, e:
if e.args[0]!=1054: raise # (temp till for patched)
def remove_file_by_url(file_url, doctype=None, name=None):
if doctype and name:
fid = frappe.db.get_value("File", {"file_url": file_url,
"attached_to_doctype": doctype, "attached_to_name": name})
else:
fid = frappe.db.get_value("File", {"file_url": file_url})
if fid:
return remove_file(fid)
def remove_file(fid, attached_to_doctype=None, attached_to_name=None):
"""Remove file and File entry"""
file_name = None
if not (attached_to_doctype and attached_to_name):
attached = frappe.db.get_value("File", fid,
["attached_to_doctype", "attached_to_name", "file_name"])
if attached:
attached_to_doctype, attached_to_name, file_name = attached
ignore_permissions, comment = False, None
if attached_to_doctype and attached_to_name:
doc = frappe.get_doc(attached_to_doctype, attached_to_name)
ignore_permissions = doc.has_permission("write") or False
if not file_name:
file_name = frappe.db.get_value("File", fid, "file_name")
comment = doc.add_comment("Attachment Removed", _("Removed {0}").format(file_name))
frappe.delete_doc("File", fid, ignore_permissions=ignore_permissions)
return comment
def delete_file_data_content(doc, only_thumbnail=False):
method = get_hook_method('delete_file_data_content', fallback=delete_file_from_filesystem)
method(doc, only_thumbnail=only_thumbnail)
def delete_file_from_filesystem(doc, only_thumbnail=False):
"""Delete file, thumbnail from File document"""
if only_thumbnail:
delete_file(doc.thumbnail_url)
else:
delete_file(doc.file_url)
delete_file(doc.thumbnail_url)
def delete_file(path):
"""Delete file from `public folder`"""
if path:
if ".." in path.split("/"):
frappe.msgprint(_("It is risky to delete this file: {0}. Please contact your System Manager.").format(path))
parts = os.path.split(path.strip("/"))
if parts[0]=="files":
path = frappe.utils.get_site_path("public", "files", parts[-1])
else:
path = frappe.utils.get_site_path("private", "files", parts[-1])
path = encode(path)
if os.path.exists(path):
os.remove(path)
def get_file(fname):
"""Returns [`file_name`, `content`] for given file name `fname`"""
file_path = get_file_path(fname)
# read the file
with open(encode(file_path), 'r') as f:
content = f.read()
return [file_path.rsplit("/", 1)[-1], content]
def get_file_path(file_name):
"""Returns file path from given file name"""
f = frappe.db.sql("""select file_url from `tabFile`
where name=%s or file_name=%s""", (file_name, file_name))
if f:
file_name = f[0][0]
file_path = file_name
if "/" not in file_path:
file_path = "/files/" + file_path
if file_path.startswith("/private/files/"):
file_path = get_files_path(*file_path.split("/private/files/", 1)[1].split("/"), is_private=1)
elif file_path.startswith("/files/"):
file_path = get_files_path(*file_path.split("/files/", 1)[1].split("/"))
else:
frappe.throw(_("There is some problem with the file url: {0}").format(file_path))
return file_path
def get_content_hash(content):
return hashlib.md5(content).hexdigest()
def get_file_name(fname, optional_suffix):
# convert to unicode
fname = cstr(fname)
n_records = frappe.db.sql("select name from `tabFile` where file_name=%s", fname)
if len(n_records) > 0 or os.path.exists(encode(get_files_path(fname))):
f = fname.rsplit('.', 1)
if len(f) == 1:
partial, extn = f[0], ""
else:
partial, extn = f[0], "." + f[1]
return '{partial}{suffix}{extn}'.format(partial=partial, extn=extn, suffix=optional_suffix)
return fname
|
vqw/frappe
|
frappe/utils/file_manager.py
|
Python
|
mit
| 10,724
|
"""Test case for autocomplete implementations."""
import uuid
from django import VERSION
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.utils import six
from sbo_selenium import SeleniumTestCase
from selenium.common.exceptions import NoSuchElementException
class AutocompleteTestCase(SeleniumTestCase):
"""Provide a class-persistent selenium instance and assertions."""
class AdminMixin(object):
"""Mixin for tests that should happen in ModelAdmin."""
def get(self, url):
"""Get a URL, logs in if necessary."""
super(AdminMixin, self).get(url)
try:
self.sel.find_element_by_css_selector('input[value="Log in"]')
except NoSuchElementException:
return
username = self.sel.find_element_by_name('username')
if username.get_attribute('value') != 'test':
username.send_keys('test')
password = self.sel.find_element_by_name('username')
if password.get_attribute('value') != 'test':
password.send_keys('test')
self.sel.find_element_by_css_selector('input[value="Log in"]').click()
def get_modeladmin_url(self, action, **kwargs):
"""Return a modeladmin url for a model and action."""
return reverse('admin:%s_%s_%s' % (
self.model._meta.app_label,
self.model._meta.model_name,
action
), kwargs=kwargs)
def fill_name(self):
"""Fill in the name input."""
i = self.id()
half = int(len(i))
not_id = i[half:] + i[:half]
self.enter_text('[name=name]', not_id)
class OptionMixin(object):
"""Mixin to make a unique option per test."""
def create_option(self):
"""Create a unique option from self.model into self.option."""
unique_name = six.text_type(uuid.uuid1())
if VERSION < (1, 10):
# Support for the name to be changed through a popup in the admin.
unique_name = unique_name.replace('-', '')
option, created = self.model.objects.get_or_create(
name=unique_name)
return option
class ContentTypeOptionMixin(OptionMixin):
"""Same as option mixin, with content type."""
def create_option(self):
"""Return option, content type."""
option = super(ContentTypeOptionMixin, self).create_option()
ctype = ContentType.objects.get_for_model(option)
return option, ctype
|
Eraldo/django-autocomplete-light
|
src/dal/test/case.py
|
Python
|
mit
| 2,502
|
"""Test script for poplib module."""
# Modified by Giampaolo Rodola' to give poplib.POP3 and poplib.POP3_SSL
# a real test suite
import poplib
import asyncore
import asynchat
import socket
import os
import time
import errno
from unittest import TestCase, skipUnless
from test import support as test_support
threading = test_support.import_module('threading')
HOST = test_support.HOST
PORT = 0
# the dummy data returned by server when LIST and RETR commands are issued
LIST_RESP = b'1 1\r\n2 2\r\n3 3\r\n4 4\r\n5 5\r\n.\r\n'
RETR_RESP = b"""From: postmaster@python.org\
\r\nContent-Type: text/plain\r\n\
MIME-Version: 1.0\r\n\
Subject: Dummy\r\n\
\r\n\
line1\r\n\
line2\r\n\
line3\r\n\
.\r\n"""
class DummyPOP3Handler(asynchat.async_chat):
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.set_terminator(b"\r\n")
self.in_buffer = []
self.push('+OK dummy pop3 server ready. <timestamp>')
def collect_incoming_data(self, data):
self.in_buffer.append(data)
def found_terminator(self):
line = b''.join(self.in_buffer)
line = str(line, 'ISO-8859-1')
self.in_buffer = []
cmd = line.split(' ')[0].lower()
space = line.find(' ')
if space != -1:
arg = line[space + 1:]
else:
arg = ""
if hasattr(self, 'cmd_' + cmd):
method = getattr(self, 'cmd_' + cmd)
method(arg)
else:
self.push('-ERR unrecognized POP3 command "%s".' %cmd)
def handle_error(self):
raise
def push(self, data):
asynchat.async_chat.push(self, data.encode("ISO-8859-1") + b'\r\n')
def cmd_echo(self, arg):
# sends back the received string (used by the test suite)
self.push(arg)
def cmd_user(self, arg):
if arg != "guido":
self.push("-ERR no such user")
self.push('+OK password required')
def cmd_pass(self, arg):
if arg != "python":
self.push("-ERR wrong password")
self.push('+OK 10 messages')
def cmd_stat(self, arg):
self.push('+OK 10 100')
def cmd_list(self, arg):
if arg:
self.push('+OK %s %s' % (arg, arg))
else:
self.push('+OK')
asynchat.async_chat.push(self, LIST_RESP)
cmd_uidl = cmd_list
def cmd_retr(self, arg):
self.push('+OK %s bytes' %len(RETR_RESP))
asynchat.async_chat.push(self, RETR_RESP)
cmd_top = cmd_retr
def cmd_dele(self, arg):
self.push('+OK message marked for deletion.')
def cmd_noop(self, arg):
self.push('+OK done nothing.')
def cmd_rpop(self, arg):
self.push('+OK done nothing.')
def cmd_apop(self, arg):
self.push('+OK done nothing.')
def cmd_quit(self, arg):
self.push('+OK closing.')
self.close_when_done()
class DummyPOP3Server(asyncore.dispatcher, threading.Thread):
handler = DummyPOP3Handler
def __init__(self, address, af=socket.AF_INET):
threading.Thread.__init__(self)
asyncore.dispatcher.__init__(self)
self.create_socket(af, socket.SOCK_STREAM)
self.bind(address)
self.listen(5)
self.active = False
self.active_lock = threading.Lock()
self.host, self.port = self.socket.getsockname()[:2]
self.handler_instance = None
def start(self):
assert not self.active
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def run(self):
self.active = True
self.__flag.set()
while self.active and asyncore.socket_map:
self.active_lock.acquire()
asyncore.loop(timeout=0.1, count=1)
self.active_lock.release()
asyncore.close_all(ignore_all=True)
def stop(self):
assert self.active
self.active = False
self.join()
def handle_accepted(self, conn, addr):
self.handler_instance = self.handler(conn)
def handle_connect(self):
self.close()
handle_read = handle_connect
def writable(self):
return 0
def handle_error(self):
raise
class TestPOP3Class(TestCase):
def assertOK(self, resp):
self.assertTrue(resp.startswith(b"+OK"))
def setUp(self):
self.server = DummyPOP3Server((HOST, PORT))
self.server.start()
self.client = poplib.POP3(self.server.host, self.server.port, timeout=3)
def tearDown(self):
self.client.close()
self.server.stop()
def test_getwelcome(self):
self.assertEqual(self.client.getwelcome(),
b'+OK dummy pop3 server ready. <timestamp>')
def test_exceptions(self):
self.assertRaises(poplib.error_proto, self.client._shortcmd, 'echo -err')
def test_user(self):
self.assertOK(self.client.user('guido'))
self.assertRaises(poplib.error_proto, self.client.user, 'invalid')
def test_pass_(self):
self.assertOK(self.client.pass_('python'))
self.assertRaises(poplib.error_proto, self.client.user, 'invalid')
def test_stat(self):
self.assertEqual(self.client.stat(), (10, 100))
def test_list(self):
self.assertEqual(self.client.list()[1:],
([b'1 1', b'2 2', b'3 3', b'4 4', b'5 5'],
25))
self.assertTrue(self.client.list('1').endswith(b"OK 1 1"))
def test_retr(self):
expected = (b'+OK 116 bytes',
[b'From: postmaster@python.org', b'Content-Type: text/plain',
b'MIME-Version: 1.0', b'Subject: Dummy',
b'', b'line1', b'line2', b'line3'],
113)
foo = self.client.retr('foo')
self.assertEqual(foo, expected)
def test_too_long_lines(self):
self.assertRaises(poplib.error_proto, self.client._shortcmd,
'echo +%s' % ((poplib._MAXLINE + 10) * 'a'))
def test_dele(self):
self.assertOK(self.client.dele('foo'))
def test_noop(self):
self.assertOK(self.client.noop())
def test_rpop(self):
self.assertOK(self.client.rpop('foo'))
def test_apop(self):
self.assertOK(self.client.apop('foo', 'dummypassword'))
def test_top(self):
expected = (b'+OK 116 bytes',
[b'From: postmaster@python.org', b'Content-Type: text/plain',
b'MIME-Version: 1.0', b'Subject: Dummy', b'',
b'line1', b'line2', b'line3'],
113)
self.assertEqual(self.client.top(1, 1), expected)
def test_uidl(self):
self.client.uidl()
self.client.uidl('foo')
def test_quit(self):
resp = self.client.quit()
self.assertTrue(resp)
self.assertIsNone(self.client.sock)
self.assertIsNone(self.client.file)
SUPPORTS_SSL = False
if hasattr(poplib, 'POP3_SSL'):
import ssl
SUPPORTS_SSL = True
CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, "keycert.pem")
class DummyPOP3_SSLHandler(DummyPOP3Handler):
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
ssl_socket = ssl.wrap_socket(self.socket, certfile=CERTFILE,
server_side=True,
do_handshake_on_connect=False)
self.del_channel()
self.set_socket(ssl_socket)
# Must try handshake before calling push()
self._ssl_accepting = True
self._do_ssl_handshake()
self.set_terminator(b"\r\n")
self.in_buffer = []
self.push('+OK dummy pop3 server ready. <timestamp>')
def _do_ssl_handshake(self):
try:
self.socket.do_handshake()
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return
elif err.args[0] == ssl.SSL_ERROR_EOF:
return self.handle_close()
raise
except socket.error as err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
self._ssl_accepting = False
def handle_read(self):
if self._ssl_accepting:
self._do_ssl_handshake()
else:
DummyPOP3Handler.handle_read(self)
requires_ssl = skipUnless(SUPPORTS_SSL, 'SSL not supported')
@requires_ssl
class TestPOP3_SSLClass(TestPOP3Class):
# repeat previous tests by using poplib.POP3_SSL
def setUp(self):
self.server = DummyPOP3Server((HOST, PORT))
self.server.handler = DummyPOP3_SSLHandler
self.server.start()
self.client = poplib.POP3_SSL(self.server.host, self.server.port)
def test__all__(self):
self.assertIn('POP3_SSL', poplib.__all__)
def test_context(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, keyfile=CERTFILE, context=ctx)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, certfile=CERTFILE, context=ctx)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, keyfile=CERTFILE,
certfile=CERTFILE, context=ctx)
self.client.quit()
self.client = poplib.POP3_SSL(self.server.host, self.server.port,
context=ctx)
self.assertIsInstance(self.client.sock, ssl.SSLSocket)
self.assertIs(self.client.sock.context, ctx)
self.assertTrue(self.client.noop().startswith(b'+OK'))
class TestTimeouts(TestCase):
def setUp(self):
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(60) # Safety net. Look issue 11812
self.port = test_support.bind_port(self.sock)
self.thread = threading.Thread(target=self.server, args=(self.evt,self.sock))
self.thread.setDaemon(True)
self.thread.start()
self.evt.wait()
def tearDown(self):
self.thread.join()
del self.thread # Clear out any dangling Thread objects.
def server(self, evt, serv):
serv.listen(5)
evt.set()
try:
conn, addr = serv.accept()
conn.send(b"+ Hola mundo\n")
conn.close()
except socket.timeout:
pass
finally:
serv.close()
def testTimeoutDefault(self):
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
pop = poplib.POP3(HOST, self.port)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(pop.sock.gettimeout(), 30)
pop.sock.close()
def testTimeoutNone(self):
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
pop = poplib.POP3(HOST, self.port, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertIsNone(pop.sock.gettimeout())
pop.sock.close()
def testTimeoutValue(self):
pop = poplib.POP3(HOST, self.port, timeout=30)
self.assertEqual(pop.sock.gettimeout(), 30)
pop.sock.close()
def test_main():
tests = [TestPOP3Class, TestTimeouts,
TestPOP3_SSLClass]
thread_info = test_support.threading_setup()
try:
test_support.run_unittest(*tests)
finally:
test_support.threading_cleanup(*thread_info)
if __name__ == '__main__':
test_main()
|
timm/timmnix
|
pypy3-v5.5.0-linux64/lib-python/3/test/test_poplib.py
|
Python
|
mit
| 11,954
|
#!/usr/bin/env python
# Helpful little script that spits out a comma-separated list of
# language codes for Qt icons that should be included
# in binary bytecoin distributions
import glob
import os
import re
import sys
if len(sys.argv) != 3:
sys.exit("Usage: %s $QTDIR/translations $BYTECOINDIR/src/qt/locale"%sys.argv[0])
d1 = sys.argv[1]
d2 = sys.argv[2]
l1 = set([ re.search(r'qt_(.*).qm', f).group(1) for f in glob.glob(os.path.join(d1, 'qt_*.qm')) ])
l2 = set([ re.search(r'bytecoin_(.*).qm', f).group(1) for f in glob.glob(os.path.join(d2, 'bytecoin_*.qm')) ])
print ",".join(sorted(l1.intersection(l2)))
|
ahmedbodi/bytecoin
|
contrib/qt_translations.py
|
Python
|
mit
| 620
|
'''Visualization of the filters of VGG16, via gradient ascent in input space.
This script can run on CPU in a few minutes.
Results example: http://i.imgur.com/4nj4KjN.jpg
'''
from __future__ import print_function
from scipy.misc import imsave
import numpy as np
import time
from keras.applications import vgg16
from keras import backend as K
# dimensions of the generated pictures for each filter.
img_width = 128
img_height = 128
# the name of the layer we want to visualize
# (see model definition at keras/applications/vgg16.py)
layer_name = 'block5_conv1'
# util function to convert a tensor into a valid image
def deprocess_image(x):
# normalize tensor: center on 0., ensure std is 0.1
x -= x.mean()
x /= (x.std() + 1e-5)
x *= 0.1
# clip to [0, 1]
x += 0.5
x = np.clip(x, 0, 1)
# convert to RGB array
x *= 255
if K.image_data_format() == 'channels_first':
x = x.transpose((1, 2, 0))
x = np.clip(x, 0, 255).astype('uint8')
return x
# build the VGG16 network with ImageNet weights
model = vgg16.VGG16(weights='imagenet', include_top=False)
print('Model loaded.')
model.summary()
# this is the placeholder for the input images
input_img = model.input
# get the symbolic outputs of each "key" layer (we gave them unique names).
layer_dict = dict([(layer.name, layer) for layer in model.layers[1:]])
def normalize(x):
# utility function to normalize a tensor by its L2 norm
return x / (K.sqrt(K.mean(K.square(x))) + 1e-5)
kept_filters = []
for filter_index in range(200):
# we only scan through the first 200 filters,
# but there are actually 512 of them
print('Processing filter %d' % filter_index)
start_time = time.time()
# we build a loss function that maximizes the activation
# of the nth filter of the layer considered
layer_output = layer_dict[layer_name].output
if K.image_data_format() == 'channels_first':
loss = K.mean(layer_output[:, filter_index, :, :])
else:
loss = K.mean(layer_output[:, :, :, filter_index])
# we compute the gradient of the input picture wrt this loss
grads = K.gradients(loss, input_img)[0]
# normalization trick: we normalize the gradient
grads = normalize(grads)
# this function returns the loss and grads given the input picture
iterate = K.function([input_img], [loss, grads])
# step size for gradient ascent
step = 1.
# we start from a gray image with some random noise
if K.image_data_format() == 'channels_first':
input_img_data = np.random.random((1, 3, img_width, img_height))
else:
input_img_data = np.random.random((1, img_width, img_height, 3))
input_img_data = (input_img_data - 0.5) * 20 + 128
# we run gradient ascent for 20 steps
for i in range(20):
loss_value, grads_value = iterate([input_img_data])
input_img_data += grads_value * step
print('Current loss value:', loss_value)
if loss_value <= 0.:
# some filters get stuck to 0, we can skip them
break
# decode the resulting input image
if loss_value > 0:
img = deprocess_image(input_img_data[0])
kept_filters.append((img, loss_value))
end_time = time.time()
print('Filter %d processed in %ds' % (filter_index, end_time - start_time))
# we will stich the best 64 filters on a 8 x 8 grid.
n = 8
# the filters that have the highest loss are assumed to be better-looking.
# we will only keep the top 64 filters.
kept_filters.sort(key=lambda x: x[1], reverse=True)
kept_filters = kept_filters[:n * n]
# build a black picture with enough space for
# our 8 x 8 filters of size 128 x 128, with a 5px margin in between
margin = 5
width = n * img_width + (n - 1) * margin
height = n * img_height + (n - 1) * margin
stitched_filters = np.zeros((width, height, 3))
# fill the picture with our saved filters
for i in range(n):
for j in range(n):
img, loss = kept_filters[i * n + j]
stitched_filters[(img_width + margin) * i: (img_width + margin) * i + img_width,
(img_height + margin) * j: (img_height + margin) * j + img_height, :] = img
# save the result to disk
imsave('stitched_filters_%dx%d.png' % (n, n), stitched_filters)
|
infilect/ml-course1
|
keras-scripts/conv_filter_visualization.py
|
Python
|
mit
| 4,274
|
class Series(object):
def __init__(self, name, data):
self.name = name
self.data = data
def to_javascript(self):
jsc = "{"
jsc += "name: '" + self.name + "',"
jsc += "data: " + str(self.data) + "}"
return jsc
|
jpmfribeiro/PyCharts
|
pycharts/fields/series/series.py
|
Python
|
mit
| 268
|
#!/usr/bin/env python
'''
NAME
latex2png - Converts LaTeX source to PNG file
SYNOPSIS
latex2png [options] INFILE
DESCRIPTION
This filter reads LaTeX source text from the input file
INFILE (or stdin if INFILE is -) and renders it to PNG image file.
Typically used to render math equations.
Requires latex(1), dvipng(1) commands and LaTeX math packages.
OPTIONS
-D DPI
Set the output resolution to DPI dots per inch. Use this option to
scale the output image size.
-o OUTFILE
The file name of the output file. If not specified the output file is
named like INFILE but with a .png file name extension.
-m
Skip if the PNG output file is newer that than the INFILE.
Compares timestamps on INFILE and OUTFILE. If
INFILE is - (stdin) then compares MD5 checksum stored in file
named like OUTFILE but with a .md5 file name extension.
The .md5 file is created if the -m option is used and the
INFILE is - (stdin).
-v
Verbosely print processing information to stderr.
--help, -h
Print this documentation.
--version
Print program version number.
SEE ALSO
latex(1), dvipng(1)
AUTHOR
Written by Stuart Rackham, <srackham@gmail.com>
The code was inspired by Kjell Magne Fauske's code:
http://fauskes.net/nb/htmleqII/
See also:
http://www.amk.ca/python/code/mt-math
http://code.google.com/p/latexmath2png/
COPYING
Copyright (C) 2010 Stuart Rackham. Free use of this software is
granted under the terms of the MIT License.
'''
# Suppress warning: "the md5 module is deprecated; use hashlib instead"
import warnings
warnings.simplefilter('ignore',DeprecationWarning)
import os, sys, tempfile, md5
VERSION = '0.1.0'
# Include LaTeX packages and commands here.
TEX_HEADER = r'''\documentclass{article}
\usepackage{amsmath}
\usepackage{amsthm}
\usepackage{amssymb}
\usepackage{bm}
\newcommand{\mx}[1]{\mathbf{\bm{#1}}} % Matrix command
\newcommand{\vc}[1]{\mathbf{\bm{#1}}} % Vector command
\newcommand{\T}{\text{T}} % Transpose
\pagestyle{empty}
\begin{document}'''
TEX_FOOTER = r'''\end{document}'''
# Globals.
verbose = False
class EApp(Exception): pass # Application specific exception.
def print_stderr(line):
sys.stderr.write(line + os.linesep)
def print_verbose(line):
if verbose:
print_stderr(line)
def run(cmd):
global verbose
if verbose:
cmd += ' 1>&2'
else:
cmd += ' 2>%s 1>&2' % os.devnull
print_verbose('executing: %s' % cmd)
if os.system(cmd):
raise EApp, 'failed command: %s' % cmd
def latex2png(infile, outfile, dpi, modified):
'''Convert LaTeX input file infile to PNG file named outfile.'''
outfile = os.path.abspath(outfile)
outdir = os.path.dirname(outfile)
if not os.path.isdir(outdir):
raise EApp, 'directory does not exist: %s' % outdir
texfile = tempfile.mktemp(suffix='.tex', dir=os.path.dirname(outfile))
basefile = os.path.splitext(texfile)[0]
dvifile = basefile + '.dvi'
temps = [basefile + ext for ext in ('.tex','.dvi', '.aux', '.log')]
skip = False
if infile == '-':
tex = sys.stdin.read()
checksum = md5.new(tex).digest()
f = os.path.splitext(outfile)[0] + '.md5'
if modified:
if os.path.isfile(f) and os.path.isfile(outfile) and \
checksum == open(f,'rb').read():
skip = True
open(f,'wb').write(checksum)
else:
if not os.path.isfile(infile):
raise EApp, 'input file does not exist: %s' % infile
tex = open(infile).read()
if modified and os.path.isfile(outfile) and \
os.path.getmtime(infile) <= os.path.getmtime(outfile):
skip = True
if skip:
print_verbose('skipped: no change: %s' % outfile)
return
tex = '%s\n%s\n%s\n' % (TEX_HEADER, tex.strip(), TEX_FOOTER)
print_verbose('tex:\n%s' % tex)
open(texfile, 'w').write(tex)
saved_pwd = os.getcwd()
os.chdir(outdir)
try:
# Compile LaTeX document to DVI file.
run('latex %s' % texfile)
# Convert DVI file to PNG.
cmd = 'dvipng'
if dpi:
cmd += ' -D %s' % dpi
cmd += ' -T tight -x 1000 -z 9 -bg Transparent -o "%s" "%s"' \
% (outfile,dvifile)
run(cmd)
finally:
os.chdir(saved_pwd)
for f in temps:
if os.path.isfile(f):
print_verbose('deleting: %s' % f)
os.remove(f)
def usage(msg=''):
if msg:
print_stderr(msg)
print_stderr('\n'
'usage:\n'
' latex2png [options] INFILE\n'
'\n'
'options:\n'
' -D DPI\n'
' -o OUTFILE\n'
' -m\n'
' -v\n'
' --help\n'
' --version')
def main():
# Process command line options.
global verbose
dpi = None
outfile = None
modified = False
import getopt
opts,args = getopt.getopt(sys.argv[1:], 'D:o:mhv', ['help','version'])
for o,v in opts:
if o in ('--help','-h'):
print __doc__
sys.exit(0)
if o =='--version':
print('latex2png version %s' % (VERSION,))
sys.exit(0)
if o == '-D': dpi = v
if o == '-o': outfile = v
if o == '-m': modified = True
if o == '-v': verbose = True
if len(args) != 1:
usage()
sys.exit(1)
infile = args[0]
if dpi and not dpi.isdigit():
usage('invalid DPI')
sys.exit(1)
if outfile is None:
if infile == '-':
usage('OUTFILE must be specified')
sys.exit(1)
outfile = os.path.splitext(infile)[0] + '.png'
# Do the work.
latex2png(infile, outfile, dpi, modified)
# Print something to suppress asciidoc 'no output from filter' warnings.
if infile == '-':
sys.stdout.write(' ')
if __name__ == "__main__":
try:
main()
except SystemExit:
raise
except KeyboardInterrupt:
sys.exit(1)
except Exception, e:
print_stderr("%s: %s" % (os.path.basename(sys.argv[0]), str(e)))
sys.exit(1)
|
tectronics/evergreen-mobile-opac
|
dev/asciidoc/filters/latex/latex2png.py
|
Python
|
mit
| 6,352
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.core import AzCommandsLoader
import azure.cli.command_modules.batch._help # pylint: disable=unused-import
from azure.cli.command_modules.batch._exception_handler import batch_exception_handler
from azure.cli.command_modules.batch._command_type import BatchCommandGroup
class BatchCommandsLoader(AzCommandsLoader):
def __init__(self, cli_ctx=None):
from azure.cli.core.commands import CliCommandType
from azure.cli.core.profiles import ResourceType
batch_custom = CliCommandType(
operations_tmpl='azure.cli.command_modules.batch.custom#{}',
exception_handler=batch_exception_handler)
super(BatchCommandsLoader, self).__init__(cli_ctx=cli_ctx,
custom_command_type=batch_custom,
command_group_cls=BatchCommandGroup,
resource_type=ResourceType.MGMT_BATCH)
self.module_name = __name__
def load_command_table(self, args):
from azure.cli.command_modules.batch.commands import load_command_table
load_command_table(self, args)
return self.command_table
def load_arguments(self, command):
from azure.cli.command_modules.batch._params import load_arguments
load_arguments(self, command)
COMMAND_LOADER_CLS = BatchCommandsLoader
|
yugangw-msft/azure-cli
|
src/azure-cli/azure/cli/command_modules/batch/__init__.py
|
Python
|
mit
| 1,754
|
from .core import UnitedStates
from ..registry_tools import iso_register
from ..core import MON
@iso_register('US-IL')
class Illinois(UnitedStates):
"""Illinois"""
include_thanksgiving_friday = True
include_lincoln_birthday = True
include_election_day_even = True
class ChicagoIllinois(Illinois):
"Chicago, Illinois"
include_thanksgiving_friday = False
def get_pulaski_day(self, year):
"""
Return Casimir Pulaski Day.
Defined on the first MON of March.
ref: https://en.wikipedia.org/wiki/Casimir_Pulaski_Day
"""
day = self.get_nth_weekday_in_month(year, 3, MON)
return (
day,
"Casimir Pulaski Day"
)
def get_variable_days(self, year):
days = super().get_variable_days(year)
days.append(self.get_pulaski_day(year))
return days
|
novafloss/workalendar
|
workalendar/usa/illinois.py
|
Python
|
mit
| 877
|
from sqlalchemy import Integer, func
from alembic.testing.fixtures import TestBase
from alembic.testing import config
from sqlalchemy import TIMESTAMP, MetaData, Table, Column, text
from sqlalchemy.engine.reflection import Inspector
from alembic import op, util
from alembic.testing import eq_, assert_raises_message
from alembic.testing.fixtures import capture_context_buffer, op_fixture
from alembic.testing.env import staging_env, _no_sql_testing_config, \
three_rev_fixture, clear_staging_env
from alembic.migration import MigrationContext
class MySQLOpTest(TestBase):
def test_rename_column(self):
context = op_fixture('mysql')
op.alter_column(
't1', 'c1', new_column_name="c2", existing_type=Integer)
context.assert_(
'ALTER TABLE t1 CHANGE c1 c2 INTEGER NULL'
)
def test_rename_column_quotes_needed_one(self):
context = op_fixture('mysql')
op.alter_column('MyTable', 'ColumnOne', new_column_name="ColumnTwo",
existing_type=Integer)
context.assert_(
'ALTER TABLE `MyTable` CHANGE `ColumnOne` `ColumnTwo` INTEGER NULL'
)
def test_rename_column_quotes_needed_two(self):
context = op_fixture('mysql')
op.alter_column('my table', 'column one', new_column_name="column two",
existing_type=Integer)
context.assert_(
'ALTER TABLE `my table` CHANGE `column one` '
'`column two` INTEGER NULL'
)
def test_rename_column_serv_default(self):
context = op_fixture('mysql')
op.alter_column(
't1', 'c1', new_column_name="c2", existing_type=Integer,
existing_server_default="q")
context.assert_(
"ALTER TABLE t1 CHANGE c1 c2 INTEGER NULL DEFAULT 'q'"
)
def test_rename_column_serv_compiled_default(self):
context = op_fixture('mysql')
op.alter_column(
't1', 'c1', existing_type=Integer,
server_default=func.utc_thing(func.current_timestamp()))
# this is not a valid MySQL default but the point is to just
# test SQL expression rendering
context.assert_(
"ALTER TABLE t1 ALTER COLUMN c1 "
"SET DEFAULT utc_thing(CURRENT_TIMESTAMP)"
)
def test_rename_column_autoincrement(self):
context = op_fixture('mysql')
op.alter_column(
't1', 'c1', new_column_name="c2", existing_type=Integer,
existing_autoincrement=True)
context.assert_(
'ALTER TABLE t1 CHANGE c1 c2 INTEGER NULL AUTO_INCREMENT'
)
def test_col_add_autoincrement(self):
context = op_fixture('mysql')
op.alter_column('t1', 'c1', existing_type=Integer,
autoincrement=True)
context.assert_(
'ALTER TABLE t1 MODIFY c1 INTEGER NULL AUTO_INCREMENT'
)
def test_col_remove_autoincrement(self):
context = op_fixture('mysql')
op.alter_column('t1', 'c1', existing_type=Integer,
existing_autoincrement=True,
autoincrement=False)
context.assert_(
'ALTER TABLE t1 MODIFY c1 INTEGER NULL'
)
def test_col_dont_remove_server_default(self):
context = op_fixture('mysql')
op.alter_column('t1', 'c1', existing_type=Integer,
existing_server_default='1',
server_default=False)
context.assert_()
def test_alter_column_drop_default(self):
context = op_fixture('mysql')
op.alter_column("t", "c", existing_type=Integer, server_default=None)
context.assert_(
'ALTER TABLE t ALTER COLUMN c DROP DEFAULT'
)
def test_alter_column_modify_default(self):
context = op_fixture('mysql')
# notice we dont need the existing type on this one...
op.alter_column("t", "c", server_default='1')
context.assert_(
"ALTER TABLE t ALTER COLUMN c SET DEFAULT '1'"
)
def test_col_not_nullable(self):
context = op_fixture('mysql')
op.alter_column('t1', 'c1', nullable=False, existing_type=Integer)
context.assert_(
'ALTER TABLE t1 MODIFY c1 INTEGER NOT NULL'
)
def test_col_not_nullable_existing_serv_default(self):
context = op_fixture('mysql')
op.alter_column('t1', 'c1', nullable=False, existing_type=Integer,
existing_server_default='5')
context.assert_(
"ALTER TABLE t1 MODIFY c1 INTEGER NOT NULL DEFAULT '5'"
)
def test_col_nullable(self):
context = op_fixture('mysql')
op.alter_column('t1', 'c1', nullable=True, existing_type=Integer)
context.assert_(
'ALTER TABLE t1 MODIFY c1 INTEGER NULL'
)
def test_col_multi_alter(self):
context = op_fixture('mysql')
op.alter_column(
't1', 'c1', nullable=False, server_default="q", type_=Integer)
context.assert_(
"ALTER TABLE t1 MODIFY c1 INTEGER NOT NULL DEFAULT 'q'"
)
def test_alter_column_multi_alter_w_drop_default(self):
context = op_fixture('mysql')
op.alter_column(
't1', 'c1', nullable=False, server_default=None, type_=Integer)
context.assert_(
"ALTER TABLE t1 MODIFY c1 INTEGER NOT NULL"
)
def test_col_alter_type_required(self):
op_fixture('mysql')
assert_raises_message(
util.CommandError,
"MySQL CHANGE/MODIFY COLUMN operations require the existing type.",
op.alter_column, 't1', 'c1', nullable=False, server_default="q"
)
def test_drop_fk(self):
context = op_fixture('mysql')
op.drop_constraint("f1", "t1", "foreignkey")
context.assert_(
"ALTER TABLE t1 DROP FOREIGN KEY f1"
)
def test_drop_constraint_primary(self):
context = op_fixture('mysql')
op.drop_constraint('primary', 't1', type_='primary')
context.assert_(
"ALTER TABLE t1 DROP PRIMARY KEY"
)
def test_drop_unique(self):
context = op_fixture('mysql')
op.drop_constraint("f1", "t1", "unique")
context.assert_(
"ALTER TABLE t1 DROP INDEX f1"
)
def test_drop_check(self):
op_fixture('mysql')
assert_raises_message(
NotImplementedError,
"MySQL does not support CHECK constraints.",
op.drop_constraint, "f1", "t1", "check"
)
def test_drop_unknown(self):
op_fixture('mysql')
assert_raises_message(
TypeError,
"'type' can be one of 'check', 'foreignkey', "
"'primary', 'unique', None",
op.drop_constraint, "f1", "t1", "typo"
)
def test_drop_generic_constraint(self):
op_fixture('mysql')
assert_raises_message(
NotImplementedError,
"No generic 'DROP CONSTRAINT' in MySQL - please "
"specify constraint type",
op.drop_constraint, "f1", "t1"
)
class MySQLDefaultCompareTest(TestBase):
__only_on__ = 'mysql'
@classmethod
def setup_class(cls):
cls.bind = config.db
staging_env()
context = MigrationContext.configure(
connection=cls.bind.connect(),
opts={
'compare_type': True,
'compare_server_default': True
}
)
connection = context.bind
cls.autogen_context = {
'imports': set(),
'connection': connection,
'dialect': connection.dialect,
'context': context
}
@classmethod
def teardown_class(cls):
clear_staging_env()
def setUp(self):
self.metadata = MetaData(self.bind)
def tearDown(self):
self.metadata.drop_all()
def _compare_default_roundtrip(self, type_, txt, alternate=None):
if alternate:
expected = True
else:
alternate = txt
expected = False
t = Table(
"test", self.metadata,
Column(
"somecol", type_,
server_default=text(txt) if txt else None
)
)
t2 = Table("test", MetaData(),
Column("somecol", type_, server_default=text(alternate))
)
assert self._compare_default(
t, t2, t2.c.somecol, alternate
) is expected
def _compare_default(
self,
t1, t2, col,
rendered
):
t1.create(self.bind)
insp = Inspector.from_engine(self.bind)
cols = insp.get_columns(t1.name)
ctx = self.autogen_context['context']
return ctx.impl.compare_server_default(
None,
col,
rendered,
cols[0]['default'])
def test_compare_timestamp_current_timestamp(self):
self._compare_default_roundtrip(
TIMESTAMP(),
"CURRENT_TIMESTAMP",
)
def test_compare_timestamp_current_timestamp_diff(self):
self._compare_default_roundtrip(
TIMESTAMP(),
None, "CURRENT_TIMESTAMP",
)
|
ImaginationForPeople/alembic
|
tests/test_mysql.py
|
Python
|
mit
| 9,352
|
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base.version import Version
from twilio.rest.lookups.v1.phone_number import PhoneNumberList
class V1(Version):
def __init__(self, domain):
"""
Initialize the V1 version of Lookups
:returns: V1 version of Lookups
:rtype: twilio.rest.lookups.v1.V1.V1
"""
super(V1, self).__init__(domain)
self.version = 'v1'
self._phone_numbers = None
@property
def phone_numbers(self):
"""
:rtype: twilio.rest.lookups.v1.phone_number.PhoneNumberList
"""
if self._phone_numbers is None:
self._phone_numbers = PhoneNumberList(self)
return self._phone_numbers
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Lookups.V1>'
|
tysonholub/twilio-python
|
twilio/rest/lookups/v1/__init__.py
|
Python
|
mit
| 1,004
|
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from mpl_toolkits.basemap import Basemap
map = Basemap()
fig = plt.figure()
ax = Axes3D(fig)
'''
ax.azim = 270
ax.elev = 90
ax.dist = 5
'''
ax.add_collection3d(map.drawcoastlines(linewidth=0.25))
ax.add_collection3d(map.drawcountries(linewidth=0.35))
plt.show()
|
rveciana/BasemapTutorial
|
code_examples/cookbook/plotting_3d_basic.py
|
Python
|
cc0-1.0
| 338
|
#
# Chris Lumens <clumens@redhat.com>
#
# Copyright 2007, 2009, 2013 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
from pykickstart.base import KickstartCommand
from pykickstart.errors import KickstartValueError, formatErrorMsg
from pykickstart.options import KSOptionParser
from pykickstart.i18n import _
class FC3_HardDrive(KickstartCommand):
removedKeywords = KickstartCommand.removedKeywords
removedAttrs = KickstartCommand.removedAttrs
def __init__(self, writePriority=0, *args, **kwargs):
KickstartCommand.__init__(self, writePriority, *args, **kwargs)
self.biospart = kwargs.get("biospart", None)
self.partition = kwargs.get("partition", None)
self.dir = kwargs.get("dir", None)
self.op = self._getParser()
def __eq__(self, other):
if not other:
return False
return self.biospart == other.biospart and self.partition == other.partition and self.dir == other.dir
def __ne__(self, other):
return not self == other
def __str__(self):
retval = KickstartCommand.__str__(self)
if not self.seen:
return retval
retval += "# Use hard drive installation media\n"
if self.biospart is not None:
retval += "harddrive --dir=%s --biospart=%s\n" % (self.dir, self.biospart)
else:
retval += "harddrive --dir=%s --partition=%s\n" % (self.dir, self.partition)
return retval
def _getParser(self):
op = KSOptionParser()
op.add_option("--biospart", dest="biospart")
op.add_option("--partition", dest="partition")
op.add_option("--dir", dest="dir", required=1)
return op
def parse(self, args):
(opts, _extra) = self.op.parse_args(args=args, lineno=self.lineno)
self._setToSelf(self.op, opts)
if self.biospart is None and self.partition is None or \
self.biospart is not None and self.partition is not None:
raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("One of biospart or partition options must be specified.")))
return self
|
pbokoc/pykickstart
|
pykickstart/commands/harddrive.py
|
Python
|
gpl-2.0
| 2,974
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for the memory layer provider.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Matthias Kuhn'
__date__ = '2015-04-23'
__copyright__ = 'Copyright 2015, The QGIS Project'
from urllib.parse import parse_qs
from qgis.PyQt.QtCore import QVariant, QByteArray, QDate, QDateTime, QTime
from qgis.core import (
QgsField,
QgsFields,
QgsLayerDefinition,
QgsPointXY,
QgsReadWriteContext,
QgsVectorLayer,
QgsFeatureRequest,
QgsFeature,
QgsGeometry,
QgsWkbTypes,
NULL,
QgsMemoryProviderUtils,
QgsCoordinateReferenceSystem,
QgsRectangle,
QgsTestUtils,
QgsFeatureSource,
QgsFeatureSink,
)
from qgis.testing import (
start_app,
unittest
)
from providertestbase import ProviderTestCase
from utilities import (
unitTestDataPath,
compareWkt
)
start_app()
TEST_DATA_DIR = unitTestDataPath()
class TestPyQgsMemoryProvider(unittest.TestCase, ProviderTestCase):
@classmethod
def createLayer(cls):
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&field=dt:datetime&field=date:date&field=time:time&key=pk',
'test', 'memory')
assert (vl.isValid())
f1 = QgsFeature()
f1.setAttributes(
[5, -200, NULL, 'NuLl', '5', QDateTime(QDate(2020, 5, 4), QTime(12, 13, 14)), QDate(2020, 5, 2),
QTime(12, 13, 1)])
f1.setGeometry(QgsGeometry.fromWkt('Point (-71.123 78.23)'))
f2 = QgsFeature()
f2.setAttributes([3, 300, 'Pear', 'PEaR', '3', NULL, NULL, NULL])
f3 = QgsFeature()
f3.setAttributes(
[1, 100, 'Orange', 'oranGe', '1', QDateTime(QDate(2020, 5, 3), QTime(12, 13, 14)), QDate(2020, 5, 3),
QTime(12, 13, 14)])
f3.setGeometry(QgsGeometry.fromWkt('Point (-70.332 66.33)'))
f4 = QgsFeature()
f4.setAttributes(
[2, 200, 'Apple', 'Apple', '2', QDateTime(QDate(2020, 5, 4), QTime(12, 14, 14)), QDate(2020, 5, 4),
QTime(12, 14, 14)])
f4.setGeometry(QgsGeometry.fromWkt('Point (-68.2 70.8)'))
f5 = QgsFeature()
f5.setAttributes(
[4, 400, 'Honey', 'Honey', '4', QDateTime(QDate(2021, 5, 4), QTime(13, 13, 14)), QDate(2021, 5, 4),
QTime(13, 13, 14)])
f5.setGeometry(QgsGeometry.fromWkt('Point (-65.32 78.3)'))
vl.dataProvider().addFeatures([f1, f2, f3, f4, f5])
return vl
@classmethod
def setUpClass(cls):
"""Run before all tests"""
# Create test layer
cls.vl = cls.createLayer()
assert (cls.vl.isValid())
cls.source = cls.vl.dataProvider()
# poly layer
cls.poly_vl = QgsVectorLayer('Polygon?crs=epsg:4326&field=pk:integer&key=pk',
'test', 'memory')
assert (cls.poly_vl.isValid())
cls.poly_provider = cls.poly_vl.dataProvider()
f1 = QgsFeature()
f1.setAttributes([1])
f1.setGeometry(QgsGeometry.fromWkt(
'Polygon ((-69.03664108 81.35818902, -69.09237722 80.24346619, -73.718477 80.1319939, -73.718477 76.28620011, -74.88893598 76.34193625, -74.83319983 81.35818902, -69.03664108 81.35818902))'))
f2 = QgsFeature()
f2.setAttributes([2])
f2.setGeometry(QgsGeometry.fromWkt(
'Polygon ((-67.58750139 81.1909806, -66.30557012 81.24671674, -66.30557012 76.89929767, -67.58750139 76.89929767, -67.58750139 81.1909806))'))
f3 = QgsFeature()
f3.setAttributes([3])
f3.setGeometry(QgsGeometry.fromWkt(
'Polygon ((-68.36780737 75.78457483, -67.53176524 72.60761475, -68.64648808 73.66660144, -70.20710006 72.9420316, -68.36780737 75.78457483))'))
f4 = QgsFeature()
f4.setAttributes([4])
cls.poly_provider.addFeatures([f1, f2, f3, f4])
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
def getEditableLayer(self):
return self.createLayer()
def testGetFeaturesSubsetAttributes2(self):
""" Override and skip this test for memory provider, as it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testGetFeaturesNoGeometry(self):
""" Override and skip this test for memory provider, as it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testCtors(self):
testVectors = ["Point", "LineString", "Polygon", "MultiPoint", "MultiLineString", "MultiPolygon", "None"]
for v in testVectors:
layer = QgsVectorLayer(v, "test", "memory")
assert layer.isValid(), "Failed to create valid %s memory layer" % (v)
def testLayerGeometry(self):
testVectors = [("Point", QgsWkbTypes.PointGeometry, QgsWkbTypes.Point),
("LineString", QgsWkbTypes.LineGeometry, QgsWkbTypes.LineString),
("Polygon", QgsWkbTypes.PolygonGeometry, QgsWkbTypes.Polygon),
("MultiPoint", QgsWkbTypes.PointGeometry, QgsWkbTypes.MultiPoint),
("MultiLineString", QgsWkbTypes.LineGeometry, QgsWkbTypes.MultiLineString),
("MultiPolygon", QgsWkbTypes.PolygonGeometry, QgsWkbTypes.MultiPolygon),
("PointZ", QgsWkbTypes.PointGeometry, QgsWkbTypes.PointZ),
("LineStringZ", QgsWkbTypes.LineGeometry, QgsWkbTypes.LineStringZ),
("PolygonZ", QgsWkbTypes.PolygonGeometry, QgsWkbTypes.PolygonZ),
("MultiPointZ", QgsWkbTypes.PointGeometry, QgsWkbTypes.MultiPointZ),
("MultiLineStringZ", QgsWkbTypes.LineGeometry, QgsWkbTypes.MultiLineStringZ),
("MultiPolygonZ", QgsWkbTypes.PolygonGeometry, QgsWkbTypes.MultiPolygonZ),
("PointM", QgsWkbTypes.PointGeometry, QgsWkbTypes.PointM),
("LineStringM", QgsWkbTypes.LineGeometry, QgsWkbTypes.LineStringM),
("PolygonM", QgsWkbTypes.PolygonGeometry, QgsWkbTypes.PolygonM),
("MultiPointM", QgsWkbTypes.PointGeometry, QgsWkbTypes.MultiPointM),
("MultiLineStringM", QgsWkbTypes.LineGeometry, QgsWkbTypes.MultiLineStringM),
("MultiPolygonM", QgsWkbTypes.PolygonGeometry, QgsWkbTypes.MultiPolygonM),
("PointZM", QgsWkbTypes.PointGeometry, QgsWkbTypes.PointZM),
("LineStringZM", QgsWkbTypes.LineGeometry, QgsWkbTypes.LineStringZM),
("PolygonZM", QgsWkbTypes.PolygonGeometry, QgsWkbTypes.PolygonZM),
("MultiPointZM", QgsWkbTypes.PointGeometry, QgsWkbTypes.MultiPointZM),
("MultiLineStringZM", QgsWkbTypes.LineGeometry, QgsWkbTypes.MultiLineStringZM),
("MultiPolygonZM", QgsWkbTypes.PolygonGeometry, QgsWkbTypes.MultiPolygonZM),
("Point25D", QgsWkbTypes.PointGeometry, QgsWkbTypes.Point25D),
("LineString25D", QgsWkbTypes.LineGeometry, QgsWkbTypes.LineString25D),
("Polygon25D", QgsWkbTypes.PolygonGeometry, QgsWkbTypes.Polygon25D),
("MultiPoint25D", QgsWkbTypes.PointGeometry, QgsWkbTypes.MultiPoint25D),
("MultiLineString25D", QgsWkbTypes.LineGeometry, QgsWkbTypes.MultiLineString25D),
("MultiPolygon25D", QgsWkbTypes.PolygonGeometry, QgsWkbTypes.MultiPolygon25D),
("None", QgsWkbTypes.NullGeometry, QgsWkbTypes.NoGeometry)]
for v in testVectors:
layer = QgsVectorLayer(v[0], "test", "memory")
myMessage = ('Expected: %s\nGot: %s\n' %
(v[1], layer.geometryType()))
assert layer.geometryType() == v[1], myMessage
myMessage = ('Expected: %s\nGot: %s\n' %
(v[2], layer.wkbType()))
assert layer.wkbType() == v[2], myMessage
def testAddFeatures(self):
layer = QgsVectorLayer("Point", "test", "memory")
provider = layer.dataProvider()
res = provider.addAttributes([QgsField("name", QVariant.String),
QgsField("age", QVariant.Int),
QgsField("size", QVariant.Double)])
assert res, "Failed to add attributes"
myMessage = ('Expected: %s\nGot: %s\n' %
(3, len(provider.fields())))
assert len(provider.fields()) == 3, myMessage
ft = QgsFeature()
ft.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(10, 10)))
ft.setAttributes(["Johny",
20,
0.3])
res, t = provider.addFeatures([ft])
assert res, "Failed to add feature"
myMessage = ('Expected: %s\nGot: %s\n' %
(1, provider.featureCount()))
assert provider.featureCount() == 1, myMessage
for f in provider.getFeatures(QgsFeatureRequest()):
myMessage = ('Expected: %s\nGot: %s\n' %
("Johny", f[0]))
assert f[0] == "Johny", myMessage
myMessage = ('Expected: %s\nGot: %s\n' %
(20, f[1]))
assert f[1] == 20, myMessage
myMessage = ('Expected: %s\nGot: %s\n' %
(0.3, f[2]))
assert (f[2] - 0.3) < 0.0000001, myMessage
geom = f.geometry()
myMessage = ('Expected: %s\nGot: %s\n' %
("Point (10 10)", str(geom.asWkt())))
assert compareWkt(str(geom.asWkt()), "Point (10 10)"), myMessage
def testClone(self):
"""
Test that cloning a memory layer also clones features
"""
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=f1:integer&field=f2:integer',
'test', 'memory')
self.assertTrue(vl.isValid())
f1 = QgsFeature()
f1.setAttributes([5, -200])
f2 = QgsFeature()
f2.setAttributes([3, 300])
f3 = QgsFeature()
f3.setAttributes([1, 100])
res, [f1, f2, f3] = vl.dataProvider().addFeatures([f1, f2, f3])
self.assertEqual(vl.featureCount(), 3)
vl2 = vl.clone()
self.assertEqual(vl2.featureCount(), 3)
features = [f for f in vl2.getFeatures()]
self.assertTrue([f for f in features if f['f1'] == 5])
self.assertTrue([f for f in features if f['f1'] == 3])
self.assertTrue([f for f in features if f['f1'] == 1])
def testGetFields(self):
layer = QgsVectorLayer("Point", "test", "memory")
provider = layer.dataProvider()
provider.addAttributes([QgsField("name", QVariant.String),
QgsField("age", QVariant.Int),
QgsField("size", QVariant.Double),
QgsField("vallist", QVariant.List, subType=QVariant.Int),
QgsField("stringlist", QVariant.List, subType=QVariant.String),
QgsField("reallist", QVariant.List, subType=QVariant.Double),
QgsField("longlist", QVariant.List, subType=QVariant.LongLong)])
self.assertEqual(len(provider.fields()), 7)
self.assertEqual(provider.fields()[0].name(), "name")
self.assertEqual(provider.fields()[0].type(), QVariant.String)
self.assertEqual(provider.fields()[0].subType(), QVariant.Invalid)
self.assertEqual(provider.fields()[1].name(), "age")
self.assertEqual(provider.fields()[1].type(), QVariant.Int)
self.assertEqual(provider.fields()[1].subType(), QVariant.Invalid)
self.assertEqual(provider.fields()[2].name(), "size")
self.assertEqual(provider.fields()[2].type(), QVariant.Double)
self.assertEqual(provider.fields()[2].subType(), QVariant.Invalid)
self.assertEqual(provider.fields()[3].name(), "vallist")
self.assertEqual(provider.fields()[3].type(), QVariant.List)
self.assertEqual(provider.fields()[3].subType(), QVariant.Int)
self.assertEqual(provider.fields()[4].name(), "stringlist")
self.assertEqual(provider.fields()[4].type(), QVariant.List)
self.assertEqual(provider.fields()[4].subType(), QVariant.String)
self.assertEqual(provider.fields()[5].name(), "reallist")
self.assertEqual(provider.fields()[5].type(), QVariant.List)
self.assertEqual(provider.fields()[5].subType(), QVariant.Double)
self.assertEqual(provider.fields()[6].name(), "longlist")
self.assertEqual(provider.fields()[6].type(), QVariant.List)
self.assertEqual(provider.fields()[6].subType(), QVariant.LongLong)
ft = QgsFeature()
ft.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(10, 10)))
ft.setAttributes(["Johny",
20,
0.3,
[1, 2, 3],
['a', 'b', 'c'],
[1.1, 2.2, 3.3],
[1, 2, 3]])
provider.addFeatures([ft])
for f in provider.getFeatures(QgsFeatureRequest()):
self.assertEqual(f.attributes(), ['Johny', 20, 0.3, [1, 2, 3], ['a', 'b', 'c'], [1.1, 2.2, 3.3], [1, 2, 3]])
def testFromUri(self):
"""Test we can construct the mem provider from a uri"""
myMemoryLayer = QgsVectorLayer(
('Point?crs=epsg:4326&field=name:string(20)&'
'field=age:integer&field=size:double&index=yes'),
'test',
'memory')
self.assertIsNotNone(myMemoryLayer)
myProvider = myMemoryLayer.dataProvider()
self.assertIsNotNone(myProvider)
def testLengthPrecisionFromUri(self):
"""Test we can assign length and precision from a uri"""
myMemoryLayer = QgsVectorLayer(
('Point?crs=epsg:4326&field=size:double(12,9)&index=yes'),
'test',
'memory')
self.assertEqual(myMemoryLayer.fields().field('size').length(), 12)
self.assertEqual(myMemoryLayer.fields().field('size').precision(), 9)
myMemoryLayer = QgsVectorLayer(
('Point?crs=epsg:4326&field=size:double(-1,-1)&index=yes'),
'test',
'memory')
self.assertEqual(myMemoryLayer.fields().field('size').length(), -1)
self.assertEqual(myMemoryLayer.fields().field('size').precision(), -1)
myMemoryLayer = QgsVectorLayer(
('Point?crs=epsg:4326&field=size:string(-1)&index=yes'),
'test',
'memory')
self.assertEqual(myMemoryLayer.fields().field('size').length(), -1)
def testListFromUri(self):
"""Test we can create list type fields from a uri"""
myMemoryLayer = QgsVectorLayer(
('Point?crs=epsg:4326&field=a:string(-1)[]&index=yes'),
'test',
'memory')
self.assertEqual(myMemoryLayer.fields().field('a').type(), QVariant.StringList)
self.assertEqual(myMemoryLayer.fields().field('a').subType(), QVariant.String)
myMemoryLayer = QgsVectorLayer(
('Point?crs=epsg:4326&field=a:double(-1,-1)[]&index=yes'),
'test',
'memory')
self.assertEqual(myMemoryLayer.fields().field('a').type(), QVariant.List)
self.assertEqual(myMemoryLayer.fields().field('a').subType(), QVariant.Double)
myMemoryLayer = QgsVectorLayer(
('Point?crs=epsg:4326&field=a:long(-1,-1)[]&index=yes'),
'test',
'memory')
self.assertEqual(myMemoryLayer.fields().field('a').type(), QVariant.List)
self.assertEqual(myMemoryLayer.fields().field('a').subType(), QVariant.LongLong)
myMemoryLayer = QgsVectorLayer(
('Point?crs=epsg:4326&field=a:int(-1,-1)[]&index=yes'),
'test',
'memory')
self.assertEqual(myMemoryLayer.fields().field('a').type(), QVariant.List)
self.assertEqual(myMemoryLayer.fields().field('a').subType(), QVariant.Int)
def testFromUriWithEncodedField(self):
"""Test we can construct the mem provider from a uri when a field name is encoded"""
layer = QgsVectorLayer(
('Point?crs=epsg:4326&field=name:string(20)&'
'field=test%2Ffield:integer'),
'test',
'memory')
self.assertTrue(layer.isValid())
self.assertEqual([f.name() for f in layer.fields()], ['name', 'test/field'])
def testSaveFields(self):
# Create a new memory layer with no fields
myMemoryLayer = QgsVectorLayer(
('Point?crs=epsg:4326&index=yes'),
'test',
'memory')
# Add some fields to the layer
myFields = [QgsField('TestInt', QVariant.Int, 'integer', 2, 0),
QgsField('TestLong', QVariant.LongLong, 'long', -1, 0),
QgsField('TestDbl', QVariant.Double, 'double', 8, 6),
QgsField('TestString', QVariant.String, 'string', 50, 0),
QgsField('TestDate', QVariant.Date, 'date'),
QgsField('TestTime', QVariant.Time, 'time'),
QgsField('TestDateTime', QVariant.DateTime, 'datetime'),
QgsField("vallist", QVariant.List, subType=QVariant.Int),
QgsField("stringlist", QVariant.StringList, subType=QVariant.String),
QgsField("stringlist2", QVariant.List, subType=QVariant.String),
QgsField("reallist", QVariant.List, subType=QVariant.Double),
QgsField("longlist", QVariant.List, subType=QVariant.LongLong)]
self.assertTrue(myMemoryLayer.startEditing())
for f in myFields:
assert myMemoryLayer.addAttribute(f)
self.assertTrue(myMemoryLayer.commitChanges())
myMemoryLayer.updateFields()
for f in myFields:
self.assertEqual(f, myMemoryLayer.fields().field(f.name()))
# Export the layer to a layer-definition-XML
qlr = QgsLayerDefinition.exportLayerDefinitionLayers([myMemoryLayer], QgsReadWriteContext())
self.assertIsNotNone(qlr)
# Import the layer from the layer-definition-XML
layers = QgsLayerDefinition.loadLayerDefinitionLayers(qlr, QgsReadWriteContext())
self.assertTrue(layers)
myImportedLayer = layers[0]
self.assertIsNotNone(myImportedLayer)
# Check for the presence of the fields
importedFields = myImportedLayer.fields()
for f in myFields:
self.assertEqual(f.name(), importedFields.field(f.name()).name())
if f.name() != 'stringlist2':
self.assertEqual(f.type(), importedFields.field(f.name()).type())
else:
# we automatically convert List with String subtype to StringList, to match other data providers
self.assertEqual(importedFields.field(f.name()).type(), QVariant.StringList)
self.assertEqual(f.subType(), importedFields.field(f.name()).subType())
self.assertEqual(f.precision(), importedFields.field(f.name()).precision())
self.assertEqual(f.length(), importedFields.field(f.name()).length())
def testRenameAttributes(self):
layer = QgsVectorLayer("Point", "test", "memory")
provider = layer.dataProvider()
res = provider.addAttributes([QgsField("name", QVariant.String),
QgsField("age", QVariant.Int),
QgsField("size", QVariant.Double)])
layer.updateFields()
assert res, "Failed to add attributes"
ft = QgsFeature()
ft.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(10, 10)))
ft.setAttributes(["Johny",
20,
0.3])
res, t = provider.addFeatures([ft])
# bad rename
self.assertFalse(provider.renameAttributes({-1: 'not_a_field'}))
self.assertFalse(provider.renameAttributes({100: 'not_a_field'}))
# already exists
self.assertFalse(provider.renameAttributes({1: 'name'}))
# rename one field
self.assertTrue(provider.renameAttributes({1: 'this_is_the_new_age'}))
self.assertEqual(provider.fields().at(1).name(), 'this_is_the_new_age')
layer.updateFields()
fet = next(layer.getFeatures())
self.assertEqual(fet.fields()[1].name(), 'this_is_the_new_age')
# rename two fields
self.assertTrue(provider.renameAttributes({1: 'mapinfo_is_the_stone_age', 2: 'super_size'}))
self.assertEqual(provider.fields().at(1).name(), 'mapinfo_is_the_stone_age')
self.assertEqual(provider.fields().at(2).name(), 'super_size')
layer.updateFields()
fet = next(layer.getFeatures())
self.assertEqual(fet.fields()[1].name(), 'mapinfo_is_the_stone_age')
self.assertEqual(fet.fields()[2].name(), 'super_size')
def testUniqueSource(self):
"""
Similar memory layers should have unique source - some code checks layer source to identify
matching layers
"""
layer = QgsVectorLayer("Point", "test", "memory")
layer2 = QgsVectorLayer("Point", "test2", "memory")
self.assertNotEqual(layer.source(), layer2.source())
def testCreateMemoryLayer(self):
"""
Test QgsMemoryProviderUtils.createMemoryLayer()
"""
# no fields
layer = QgsMemoryProviderUtils.createMemoryLayer('my name', QgsFields())
self.assertTrue(layer.isValid())
self.assertEqual(layer.name(), 'my name')
self.assertTrue(layer.fields().isEmpty())
# similar layers should have unique sources
layer2 = QgsMemoryProviderUtils.createMemoryLayer('my name', QgsFields())
self.assertNotEqual(layer.source(), layer2.source())
# geometry type
layer = QgsMemoryProviderUtils.createMemoryLayer('my name', QgsFields(), QgsWkbTypes.Point)
self.assertTrue(layer.isValid())
self.assertEqual(layer.wkbType(), QgsWkbTypes.Point)
layer = QgsMemoryProviderUtils.createMemoryLayer('my name', QgsFields(), QgsWkbTypes.PolygonZM)
self.assertTrue(layer.isValid())
self.assertEqual(layer.wkbType(), QgsWkbTypes.PolygonZM)
# crs
layer = QgsMemoryProviderUtils.createMemoryLayer('my name', QgsFields(), QgsWkbTypes.PolygonZM,
QgsCoordinateReferenceSystem.fromEpsgId(3111))
self.assertTrue(layer.isValid())
self.assertEqual(layer.wkbType(), QgsWkbTypes.PolygonZM)
self.assertTrue(layer.crs().isValid())
self.assertEqual(layer.crs().authid(), 'EPSG:3111')
# custom CRS
crs = QgsCoordinateReferenceSystem.fromProj(
'+proj=qsc +lat_0=0 +lon_0=0 +x_0=0 +y_0=0 +ellps=WGS84 +units=m +no_defs')
layer = QgsMemoryProviderUtils.createMemoryLayer('my name', QgsFields(), QgsWkbTypes.PolygonZM, crs)
self.assertTrue(layer.isValid())
self.assertTrue(layer.crs().isValid())
self.assertEqual(layer.crs().toProj(),
'+proj=qsc +lat_0=0 +lon_0=0 +x_0=0 +y_0=0 +ellps=WGS84 +units=m +no_defs +type=crs')
# clone it, just to check
layer2 = layer.clone()
self.assertEqual(layer2.crs().toProj(),
'+proj=qsc +lat_0=0 +lon_0=0 +x_0=0 +y_0=0 +ellps=WGS84 +units=m +no_defs +type=crs')
# fields
fields = QgsFields()
fields.append(QgsField("string", QVariant.String))
fields.append(QgsField("long", QVariant.LongLong))
fields.append(QgsField("double", QVariant.Double))
fields.append(QgsField("integer", QVariant.Int))
fields.append(QgsField("date", QVariant.Date))
fields.append(QgsField("datetime", QVariant.DateTime))
fields.append(QgsField("time", QVariant.Time))
fields.append(QgsField("#complex_name", QVariant.String))
fields.append(QgsField("complex/name", QVariant.String))
fields.append(QgsField("binaryfield", QVariant.ByteArray))
fields.append(QgsField("boolfield", QVariant.Bool))
fields.append(QgsField("vallist", QVariant.List, subType=QVariant.Int))
fields.append(QgsField("stringlist", QVariant.StringList, subType=QVariant.String))
fields.append(QgsField("stringlist2", QVariant.List, subType=QVariant.String))
fields.append(QgsField("reallist", QVariant.List, subType=QVariant.Double))
fields.append(QgsField("longlist", QVariant.List, subType=QVariant.LongLong))
layer = QgsMemoryProviderUtils.createMemoryLayer('my name', fields)
self.assertTrue(layer.isValid())
self.assertFalse(layer.fields().isEmpty())
self.assertEqual(len(layer.fields()), len(fields))
for i in range(len(fields)):
self.assertEqual(layer.fields()[i].name(), fields[i].name())
if layer.fields()[i].name() != 'stringlist2':
self.assertEqual(layer.fields()[i].type(), fields[i].type())
else:
# we automatically convert List with String subtype to StringList, to match other data providers
self.assertEqual(layer.fields()[i].type(), QVariant.StringList)
self.assertEqual(layer.fields()[i].length(), fields[i].length())
self.assertEqual(layer.fields()[i].precision(), fields[i].precision(), fields[i].name())
# unsupported field type
fields = QgsFields()
fields.append(QgsField("rect", QVariant.RectF))
layer = QgsMemoryProviderUtils.createMemoryLayer('my name', fields)
self.assertTrue(layer.isValid())
self.assertFalse(layer.fields().isEmpty())
self.assertEqual(layer.fields()[0].name(), 'rect')
self.assertEqual(layer.fields()[0].type(), QVariant.String) # should be mapped to string
# field precision
fields = QgsFields()
fields.append(QgsField("string", QVariant.String, len=10))
fields.append(QgsField("long", QVariant.LongLong, len=6))
fields.append(QgsField("double", QVariant.Double, len=10, prec=7))
fields.append(QgsField("double2", QVariant.Double, len=-1, prec=-1))
layer = QgsMemoryProviderUtils.createMemoryLayer('my name', fields)
self.assertTrue(layer.isValid())
self.assertFalse(layer.fields().isEmpty())
self.assertEqual(len(layer.fields()), len(fields))
for i in range(len(fields)):
self.assertEqual(layer.fields()[i].name(), fields[i].name())
self.assertEqual(layer.fields()[i].type(), fields[i].type())
self.assertEqual(layer.fields()[i].length(), fields[i].length())
self.assertEqual(layer.fields()[i].precision(), fields[i].precision())
def testAddChangeFeatureConvertAttribute(self):
"""
Test add features with attribute values which require conversion
"""
layer = QgsVectorLayer(
'Point?crs=epsg:4326&index=yes&field=pk:integer&field=cnt:int8&field=dt:datetime', 'test', 'memory')
provider = layer.dataProvider()
f = QgsFeature()
# string value specified for datetime field -- must be converted when adding the feature
f.setAttributes([5, -200, '2021-02-10 00:00'])
self.assertTrue(provider.addFeatures([f]))
saved_feature = next(provider.getFeatures())
# saved feature must have a QDateTime value for field, not string
self.assertEqual(saved_feature.attributes(), [5, -200, QDateTime(2021, 2, 10, 0, 0)])
self.assertTrue(provider.changeAttributeValues({saved_feature.id(): {2: '2021-02-12 00:00'}}))
saved_feature = next(provider.getFeatures())
# saved feature must have a QDateTime value for field, not string
self.assertEqual(saved_feature.attributes(), [5, -200, QDateTime(2021, 2, 12, 0, 0)])
def testThreadSafetyWithIndex(self):
layer = QgsVectorLayer(
'Point?crs=epsg:4326&index=yes&field=pk:integer&field=cnt:int8&field=name:string(0)&field=name2:string(0)&field=num_char:string&key=pk',
'test', 'memory')
provider = layer.dataProvider()
f = QgsFeature()
f.setAttributes([5, -200, NULL, 'NuLl', '5'])
f.setGeometry(QgsGeometry.fromWkt('Point (-71.123 78.23)'))
for i in range(100000):
provider.addFeatures([f])
# filter rect request
extent = QgsRectangle(-73, 70, -63, 80)
request = QgsFeatureRequest().setFilterRect(extent)
self.assertTrue(QgsTestUtils.testProviderIteratorThreadSafety(self.source, request))
def testMinMaxCache(self):
"""
Test that min/max cache is appropriately cleared
:return:
"""
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=f1:integer&field=f2:integer',
'test', 'memory')
self.assertTrue(vl.isValid())
f1 = QgsFeature()
f1.setAttributes([5, -200])
f2 = QgsFeature()
f2.setAttributes([3, 300])
f3 = QgsFeature()
f3.setAttributes([1, 100])
f4 = QgsFeature()
f4.setAttributes([2, 200])
f5 = QgsFeature()
f5.setAttributes([4, 400])
res, [f1, f2, f3, f4, f5] = vl.dataProvider().addFeatures([f1, f2, f3, f4, f5])
self.assertTrue(res)
self.assertEqual(vl.dataProvider().minimumValue(0), 1)
self.assertEqual(vl.dataProvider().minimumValue(1), -200)
self.assertEqual(vl.dataProvider().maximumValue(0), 5)
self.assertEqual(vl.dataProvider().maximumValue(1), 400)
# add feature
f6 = QgsFeature()
f6.setAttributes([15, 1400])
res, [f6] = vl.dataProvider().addFeatures([f6])
self.assertTrue(res)
self.assertEqual(vl.dataProvider().minimumValue(0), 1)
self.assertEqual(vl.dataProvider().minimumValue(1), -200)
self.assertEqual(vl.dataProvider().maximumValue(0), 15)
self.assertEqual(vl.dataProvider().maximumValue(1), 1400)
f7 = QgsFeature()
f7.setAttributes([-1, -1400])
res, [f7] = vl.dataProvider().addFeatures([f7])
self.assertTrue(res)
self.assertEqual(vl.dataProvider().minimumValue(0), -1)
self.assertEqual(vl.dataProvider().minimumValue(1), -1400)
self.assertEqual(vl.dataProvider().maximumValue(0), 15)
self.assertEqual(vl.dataProvider().maximumValue(1), 1400)
# change attribute values
self.assertTrue(vl.dataProvider().changeAttributeValues({f6.id(): {0: 3, 1: 150}, f7.id(): {0: 4, 1: -100}}))
self.assertEqual(vl.dataProvider().minimumValue(0), 1)
self.assertEqual(vl.dataProvider().minimumValue(1), -200)
self.assertEqual(vl.dataProvider().maximumValue(0), 5)
self.assertEqual(vl.dataProvider().maximumValue(1), 400)
# delete features
self.assertTrue(vl.dataProvider().deleteFeatures([f4.id(), f1.id()]))
self.assertEqual(vl.dataProvider().minimumValue(0), 1)
self.assertEqual(vl.dataProvider().minimumValue(1), -100)
self.assertEqual(vl.dataProvider().maximumValue(0), 4)
self.assertEqual(vl.dataProvider().maximumValue(1), 400)
# delete attributes
self.assertTrue(vl.dataProvider().deleteAttributes([0]))
self.assertEqual(vl.dataProvider().minimumValue(0), -100)
self.assertEqual(vl.dataProvider().maximumValue(0), 400)
def testBinary(self):
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=f1:integer&field=f2:binary',
'test', 'memory')
self.assertTrue(vl.isValid())
dp = vl.dataProvider()
fields = dp.fields()
self.assertEqual([f.name() for f in fields], ['f1', 'f2'])
self.assertEqual([f.type() for f in fields], [QVariant.Int, QVariant.ByteArray])
self.assertEqual([f.typeName() for f in fields], ['integer', 'binary'])
f = QgsFeature(dp.fields())
bin_1 = b'xxx'
bin_val1 = QByteArray(bin_1)
f.setAttributes([1, bin_val1])
self.assertTrue(dp.addFeature(f))
f2 = [f for f in dp.getFeatures()][0]
self.assertEqual(f2.attributes(), [1, bin_val1])
# add binary field
self.assertTrue(dp.addAttributes([QgsField('binfield2', QVariant.ByteArray, 'Binary')]))
fields = dp.fields()
bin2_field = fields[fields.lookupField('binfield2')]
self.assertEqual(bin2_field.type(), QVariant.ByteArray)
self.assertEqual(bin2_field.typeName(), 'Binary')
f = QgsFeature(fields)
bin_2 = b'yyy'
bin_val2 = QByteArray(bin_2)
f.setAttributes([2, NULL, bin_val2])
self.assertTrue(dp.addFeature(f))
f1 = [f for f in dp.getFeatures()][0]
self.assertEqual(f1.attributes(), [1, bin_val1, NULL])
f2 = [f for f in dp.getFeatures()][1]
self.assertEqual(f2.attributes(), [2, NULL, bin_val2])
def testBool(self):
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=f1:integer&field=f2:bool',
'test', 'memory')
self.assertTrue(vl.isValid())
dp = vl.dataProvider()
fields = dp.fields()
self.assertEqual([f.name() for f in fields], ['f1', 'f2'])
self.assertEqual([f.type() for f in fields], [QVariant.Int, QVariant.Bool])
self.assertEqual([f.typeName() for f in fields], ['integer', 'boolean'])
f = QgsFeature(dp.fields())
f.setAttributes([1, True])
f2 = QgsFeature(dp.fields())
f2.setAttributes([2, False])
f3 = QgsFeature(dp.fields())
f3.setAttributes([3, NULL])
self.assertTrue(dp.addFeatures([f, f2, f3]))
self.assertEqual([f.attributes() for f in dp.getFeatures()], [[1, True], [2, False], [3, NULL]])
# add boolean field
self.assertTrue(dp.addAttributes([QgsField('boolfield2', QVariant.Bool, 'Boolean')]))
fields = dp.fields()
bool2_field = fields[fields.lookupField('boolfield2')]
self.assertEqual(bool2_field.type(), QVariant.Bool)
self.assertEqual(bool2_field.typeName(), 'Boolean')
f = QgsFeature(fields)
f.setAttributes([2, NULL, True])
self.assertTrue(dp.addFeature(f))
self.assertEqual([f.attributes() for f in dp.getFeatures()],
[[1, True, NULL], [2, False, NULL], [3, NULL, NULL], [2, NULL, True]])
def testSpatialIndex(self):
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=f1:integer&field=f2:bool',
'test', 'memory')
self.assertEqual(vl.hasSpatialIndex(), QgsFeatureSource.SpatialIndexNotPresent)
vl.dataProvider().createSpatialIndex()
self.assertEqual(vl.hasSpatialIndex(), QgsFeatureSource.SpatialIndexPresent)
def testClone(self):
"""Test that a cloned layer has a single new id and
the same fields as the source layer"""
vl = QgsVectorLayer(
'Point?crs=epsg:4326',
'test', 'memory')
self.assertTrue(vl.isValid)
dp = vl.dataProvider()
self.assertTrue(dp.addAttributes([QgsField("name", QVariant.String),
QgsField("age", QVariant.Int),
QgsField("size", QVariant.Double)]))
vl2 = vl.clone()
self.assertTrue(
'memory?geometry=Point&crs=EPSG:4326&field=name:(0,0)&field=age:(0,0)&field=size:(0,0)' in vl2.publicSource())
self.assertEqual(len(parse_qs(vl.publicSource())['uid']), 1)
self.assertEqual(len(parse_qs(vl2.publicSource())['uid']), 1)
self.assertNotEqual(parse_qs(vl2.publicSource())['uid'][0], parse_qs(vl.publicSource())['uid'][0])
def testTypeValidation(self):
"""Test that incompatible types in attributes raise errors"""
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=int:integer',
'test', 'memory')
self.assertTrue(vl.isValid())
invalid = QgsFeature(vl.fields())
invalid.setAttribute('int', 'A string')
invalid.setGeometry(QgsGeometry.fromWkt('point(9 45)'))
self.assertTrue(vl.startEditing())
# Validation happens on commit
self.assertTrue(vl.addFeatures([invalid]))
self.assertFalse(vl.commitChanges())
self.assertTrue(vl.rollBack())
self.assertFalse(vl.hasFeatures())
# Add a valid feature
valid = QgsFeature(vl.fields())
valid.setAttribute('int', 123)
self.assertTrue(vl.startEditing())
self.assertTrue(vl.addFeatures([valid]))
self.assertTrue(vl.commitChanges())
self.assertEqual(vl.featureCount(), 1)
f = vl.getFeature(1)
self.assertEqual(f.attribute('int'), 123)
# Add both
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=int:integer',
'test', 'memory')
self.assertEqual(vl.featureCount(), 0)
self.assertTrue(vl.startEditing())
self.assertTrue(vl.addFeatures([valid, invalid]))
self.assertFalse(vl.commitChanges())
self.assertEqual(vl.featureCount(), 2)
self.assertTrue(vl.rollBack())
self.assertEqual(vl.featureCount(), 0)
# Add both swapped
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=int:integer',
'test', 'memory')
self.assertTrue(vl.startEditing())
self.assertTrue(vl.addFeatures([invalid, valid]))
self.assertFalse(vl.commitChanges())
self.assertEqual(vl.featureCount(), 2)
self.assertTrue(vl.rollBack())
self.assertEqual(vl.featureCount(), 0)
# Change attribute value
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=int:integer',
'test', 'memory')
self.assertTrue(vl.startEditing())
self.assertTrue(vl.addFeatures([valid]))
self.assertTrue(vl.commitChanges())
self.assertTrue(vl.startEditing())
self.assertTrue(vl.changeAttributeValue(1, 0, 'A string'))
self.assertFalse(vl.commitChanges())
f = vl.getFeature(1)
self.assertEqual(f.attribute('int'), 'A string')
self.assertTrue(vl.rollBack())
f = vl.getFeature(1)
self.assertEqual(f.attribute('int'), 123)
# Change attribute values
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=int:integer',
'test', 'memory')
self.assertTrue(vl.startEditing())
self.assertTrue(vl.addFeatures([valid]))
self.assertTrue(vl.commitChanges())
self.assertTrue(vl.startEditing())
self.assertTrue(vl.changeAttributeValues(1, {0: 'A string'}))
self.assertFalse(vl.commitChanges())
f = vl.getFeature(1)
self.assertEqual(f.attribute('int'), 'A string')
self.assertTrue(vl.rollBack())
f = vl.getFeature(1)
self.assertEqual(f.attribute('int'), 123)
##############################################
# Test direct data provider calls
# No rollback (old behavior)
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=int:integer',
'test', 'memory')
dp = vl.dataProvider()
self.assertFalse(dp.addFeatures([valid, invalid])[0])
self.assertEqual([f.attributes() for f in dp.getFeatures()], [[123]])
f = vl.getFeature(1)
self.assertEqual(f.attribute('int'), 123)
# Roll back
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=int:integer',
'test', 'memory')
dp = vl.dataProvider()
self.assertFalse(dp.addFeatures([valid, invalid], QgsFeatureSink.RollBackOnErrors)[0])
self.assertFalse(dp.hasFeatures())
# Expected behavior for changeAttributeValues is to always roll back
self.assertTrue(dp.addFeatures([valid])[0])
self.assertFalse(dp.changeAttributeValues({1: {0: 'A string'}}))
f = vl.getFeature(1)
self.assertEqual(f.attribute('int'), 123)
class TestPyQgsMemoryProviderIndexed(unittest.TestCase, ProviderTestCase):
"""Runs the provider test suite against an indexed memory layer"""
@classmethod
def setUpClass(cls):
"""Run before all tests"""
# Create test layer
cls.vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&field=dt:datetime&field=date:date&field=time:time&key=pk',
'test', 'memory')
assert (cls.vl.isValid())
cls.source = cls.vl.dataProvider()
f1 = QgsFeature()
f1.setAttributes(
[5, -200, NULL, 'NuLl', '5', QDateTime(QDate(2020, 5, 4), QTime(12, 13, 14)), QDate(2020, 5, 2),
QTime(12, 13, 1)])
f1.setGeometry(QgsGeometry.fromWkt('Point (-71.123 78.23)'))
f2 = QgsFeature()
f2.setAttributes([3, 300, 'Pear', 'PEaR', '3', NULL, NULL, NULL])
f3 = QgsFeature()
f3.setAttributes(
[1, 100, 'Orange', 'oranGe', '1', QDateTime(QDate(2020, 5, 3), QTime(12, 13, 14)), QDate(2020, 5, 3),
QTime(12, 13, 14)])
f3.setGeometry(QgsGeometry.fromWkt('Point (-70.332 66.33)'))
f4 = QgsFeature()
f4.setAttributes(
[2, 200, 'Apple', 'Apple', '2', QDateTime(QDate(2020, 5, 4), QTime(12, 14, 14)), QDate(2020, 5, 4),
QTime(12, 14, 14)])
f4.setGeometry(QgsGeometry.fromWkt('Point (-68.2 70.8)'))
f5 = QgsFeature()
f5.setAttributes(
[4, 400, 'Honey', 'Honey', '4', QDateTime(QDate(2021, 5, 4), QTime(13, 13, 14)), QDate(2021, 5, 4),
QTime(13, 13, 14)])
f5.setGeometry(QgsGeometry.fromWkt('Point (-65.32 78.3)'))
cls.source.addFeatures([f1, f2, f3, f4, f5])
# poly layer
cls.poly_vl = QgsVectorLayer('Polygon?crs=epsg:4326&index=yes&field=pk:integer&key=pk',
'test', 'memory')
assert (cls.poly_vl.isValid())
cls.poly_provider = cls.poly_vl.dataProvider()
f1 = QgsFeature()
f1.setAttributes([1])
f1.setGeometry(QgsGeometry.fromWkt(
'Polygon ((-69.0 81.4, -69.0 80.2, -73.7 80.2, -73.7 76.3, -74.9 76.3, -74.9 81.4, -69.0 81.4))'))
f2 = QgsFeature()
f2.setAttributes([2])
f2.setGeometry(QgsGeometry.fromWkt('Polygon ((-67.6 81.2, -66.3 81.2, -66.3 76.9, -67.6 76.9, -67.6 81.2))'))
f3 = QgsFeature()
f3.setAttributes([3])
f3.setGeometry(QgsGeometry.fromWkt('Polygon ((-68.4 75.8, -67.5 72.6, -68.6 73.7, -70.2 72.9, -68.4 75.8))'))
f4 = QgsFeature()
f4.setAttributes([4])
cls.poly_provider.addFeatures([f1, f2, f3, f4])
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
def testGetFeaturesSubsetAttributes2(self):
""" Override and skip this test for memory provider, as it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testGetFeaturesNoGeometry(self):
""" Override and skip this test for memory provider, as it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
if __name__ == '__main__':
unittest.main()
|
pblottiere/QGIS
|
tests/src/python/test_provider_memory.py
|
Python
|
gpl-2.0
| 44,152
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
import io
import os
import shutil
import sys
import tarfile
from twisted.internet import defer
from twisted.internet import reactor
from twisted.python import failure
from twisted.python import runtime
from twisted.trial import unittest
from buildbot_worker.commands import transfer
from buildbot_worker.test.fake.remote import FakeRemote
from buildbot_worker.test.util.command import CommandTestMixin
class FakeMasterMethods(object):
# a fake to represent any of:
# - FileWriter
# - FileDirectoryWriter
# - FileReader
def __init__(self, add_update):
self.add_update = add_update
self.delay_write = False
self.count_writes = False
self.keep_data = False
self.write_out_of_space_at = None
self.delay_read = False
self.count_reads = False
self.unpack_fail = False
self.written = False
self.read = False
self.data = b''
def remote_write(self, data):
if self.write_out_of_space_at is not None:
self.write_out_of_space_at -= len(data)
if self.write_out_of_space_at <= 0:
f = failure.Failure(RuntimeError("out of space"))
return defer.fail(f)
if self.count_writes:
self.add_update('write %d' % len(data))
elif not self.written:
self.add_update('write(s)')
self.written = True
if self.keep_data:
self.data += data
if self.delay_write:
d = defer.Deferred()
reactor.callLater(0.01, d.callback, None)
return d
def remote_read(self, length):
if self.count_reads:
self.add_update('read %d' % length)
elif not self.read:
self.add_update('read(s)')
self.read = True
if not self.data:
return ''
_slice, self.data = self.data[:length], self.data[length:]
if self.delay_read:
d = defer.Deferred()
reactor.callLater(0.01, d.callback, _slice)
return d
return _slice
def remote_unpack(self):
self.add_update('unpack')
if self.unpack_fail:
return defer.fail(failure.Failure(RuntimeError("out of space")))
def remote_utime(self, accessed_modified):
self.add_update('utime - %s' % accessed_modified[0])
def remote_close(self):
self.add_update('close')
class TestUploadFile(CommandTestMixin, unittest.TestCase):
def setUp(self):
self.setUpCommand()
self.fakemaster = FakeMasterMethods(self.add_update)
# write 180 bytes of data to upload
self.datadir = os.path.join(self.basedir, 'workdir')
if os.path.exists(self.datadir):
shutil.rmtree(self.datadir)
os.makedirs(self.datadir)
self.datafile = os.path.join(self.datadir, 'data')
# note: use of 'wb' here ensures newlines aren't translated on the
# upload
with open(self.datafile, mode="wb") as f:
f.write(b"this is some data\n" * 10)
def tearDown(self):
self.tearDownCommand()
if os.path.exists(self.datadir):
shutil.rmtree(self.datadir)
def test_simple(self):
self.fakemaster.count_writes = True # get actual byte counts
self.make_command(transfer.WorkerFileUploadCommand, dict(
workdir='workdir',
workersrc='data',
writer=FakeRemote(self.fakemaster),
maxsize=1000,
blocksize=64,
keepstamp=False,
))
d = self.run_command()
def check(_):
self.assertUpdates([
{'header': 'sending %s' % self.datafile},
'write 64', 'write 64', 'write 52', 'close',
{'rc': 0}
])
d.addCallback(check)
return d
def test_truncated(self):
self.fakemaster.count_writes = True # get actual byte counts
self.make_command(transfer.WorkerFileUploadCommand, dict(
workdir='workdir',
workersrc='data',
writer=FakeRemote(self.fakemaster),
maxsize=100,
blocksize=64,
keepstamp=False,
))
d = self.run_command()
def check(_):
self.assertUpdates([
{'header': 'sending %s' % self.datafile},
'write 64', 'write 36', 'close',
{'rc': 1,
'stderr': "Maximum filesize reached, truncating file '%s'" % self.datafile}
])
d.addCallback(check)
return d
def test_missing(self):
self.make_command(transfer.WorkerFileUploadCommand, dict(
workdir='workdir',
workersrc='data-nosuch',
writer=FakeRemote(self.fakemaster),
maxsize=100,
blocksize=64,
keepstamp=False,
))
d = self.run_command()
def check(_):
df = self.datafile + "-nosuch"
self.assertUpdates([
{'header': 'sending %s' % df},
'close',
{'rc': 1,
'stderr': "Cannot open file '%s' for upload" % df}
])
d.addCallback(check)
return d
def test_out_of_space(self):
self.fakemaster.write_out_of_space_at = 70
self.fakemaster.count_writes = True # get actual byte counts
self.make_command(transfer.WorkerFileUploadCommand, dict(
workdir='workdir',
workersrc='data',
writer=FakeRemote(self.fakemaster),
maxsize=1000,
blocksize=64,
keepstamp=False,
))
d = self.run_command()
self.assertFailure(d, RuntimeError)
def check(_):
self.assertUpdates([
{'header': 'sending %s' % self.datafile},
'write 64', 'close',
{'rc': 1}
])
d.addCallback(check)
return d
def test_interrupted(self):
self.fakemaster.delay_write = True # write very slowly
self.make_command(transfer.WorkerFileUploadCommand, dict(
workdir='workdir',
workersrc='data',
writer=FakeRemote(self.fakemaster),
maxsize=100,
blocksize=2,
keepstamp=False,
))
d = self.run_command()
# wait a jiffy..
interrupt_d = defer.Deferred()
reactor.callLater(0.01, interrupt_d.callback, None)
# and then interrupt the step
def do_interrupt(_):
return self.cmd.interrupt()
interrupt_d.addCallback(do_interrupt)
dl = defer.DeferredList([d, interrupt_d])
def check(_):
self.assertUpdates([
{'header': 'sending %s' % self.datafile},
'write(s)', 'close', {'rc': 1}
])
dl.addCallback(check)
return dl
def test_timestamp(self):
self.fakemaster.count_writes = True # get actual byte counts
timestamp = (os.path.getatime(self.datafile),
os.path.getmtime(self.datafile))
self.make_command(transfer.WorkerFileUploadCommand, dict(
workdir='workdir',
workersrc='data',
writer=FakeRemote(self.fakemaster),
maxsize=1000,
blocksize=64,
keepstamp=True,
))
d = self.run_command()
def check(_):
self.assertUpdates([
{'header': 'sending %s' % self.datafile},
'write 64', 'write 64', 'write 52',
'close', 'utime - %s' % timestamp[0],
{'rc': 0}
])
d.addCallback(check)
return d
class TestWorkerDirectoryUpload(CommandTestMixin, unittest.TestCase):
def setUp(self):
self.setUpCommand()
self.fakemaster = FakeMasterMethods(self.add_update)
# write a directory to upload
self.datadir = os.path.join(self.basedir, 'workdir', 'data')
if os.path.exists(self.datadir):
shutil.rmtree(self.datadir)
os.makedirs(self.datadir)
with open(os.path.join(self.datadir, "aa"), mode="wb") as f:
f.write(b"lots of a" * 100)
with open(os.path.join(self.datadir, "bb"), mode="wb") as f:
f.write(b"and a little b" * 17)
def tearDown(self):
self.tearDownCommand()
if os.path.exists(self.datadir):
shutil.rmtree(self.datadir)
def test_simple(self, compress=None):
self.fakemaster.keep_data = True
self.make_command(transfer.WorkerDirectoryUploadCommand, dict(
workdir='workdir',
workersrc='data',
writer=FakeRemote(self.fakemaster),
maxsize=None,
blocksize=512,
compress=compress,
))
d = self.run_command()
def check(_):
self.assertUpdates([
{'header': 'sending %s' % self.datadir},
'write(s)', 'unpack', # note no 'close"
{'rc': 0}
])
d.addCallback(check)
def check_tarfile(_):
f = io.BytesIO(self.fakemaster.data)
a = tarfile.open(fileobj=f, name='check.tar', mode="r")
exp_names = ['.', 'aa', 'bb']
got_names = [n.rstrip('/') for n in a.getnames()]
# py27 uses '' instead of '.'
got_names = sorted([n or '.' for n in got_names])
self.assertEqual(got_names, exp_names, "expected archive contents")
a.close()
f.close()
d.addCallback(check_tarfile)
return d
# try it again with bz2 and gzip
def test_simple_bz2(self):
return self.test_simple('bz2')
def test_simple_gz(self):
return self.test_simple('gz')
# except bz2 can't operate in stream mode on py24
if sys.version_info[:2] <= (2, 4):
test_simple_bz2.skip = "bz2 stream decompression not supported on Python-2.4"
def test_out_of_space_unpack(self):
self.fakemaster.keep_data = True
self.fakemaster.unpack_fail = True
self.make_command(transfer.WorkerDirectoryUploadCommand, dict(
workdir='workdir',
workersrc='data',
writer=FakeRemote(self.fakemaster),
maxsize=None,
blocksize=512,
compress=None
))
d = self.run_command()
self.assertFailure(d, RuntimeError)
def check(_):
self.assertUpdates([
{'header': 'sending %s' % self.datadir},
'write(s)', 'unpack',
{'rc': 1}
])
d.addCallback(check)
return d
class TestDownloadFile(CommandTestMixin, unittest.TestCase):
def setUp(self):
self.setUpCommand()
self.fakemaster = FakeMasterMethods(self.add_update)
# the command will write to the basedir, so make sure it exists
if os.path.exists(self.basedir):
shutil.rmtree(self.basedir)
os.makedirs(self.basedir)
def tearDown(self):
self.tearDownCommand()
if os.path.exists(self.basedir):
shutil.rmtree(self.basedir)
def test_simple(self):
self.fakemaster.count_reads = True # get actual byte counts
self.fakemaster.data = test_data = b'1234' * 13
assert(len(self.fakemaster.data) == 52)
self.make_command(transfer.WorkerFileDownloadCommand, dict(
workdir='.',
workerdest='data',
reader=FakeRemote(self.fakemaster),
maxsize=None,
blocksize=32,
mode=0o777,
))
d = self.run_command()
def check(_):
self.assertUpdates([
'read 32', 'read 32', 'read 32', 'close',
{'rc': 0}
])
datafile = os.path.join(self.basedir, 'data')
self.assertTrue(os.path.exists(datafile))
with open(datafile, mode="rb") as f:
datafileContent = f.read()
self.assertEqual(datafileContent, test_data)
if runtime.platformType != 'win32':
self.assertEqual(os.stat(datafile).st_mode & 0o777, 0o777)
d.addCallback(check)
return d
def test_mkdir(self):
self.fakemaster.data = test_data = b'hi'
self.make_command(transfer.WorkerFileDownloadCommand, dict(
workdir='workdir',
workerdest=os.path.join('subdir', 'data'),
reader=FakeRemote(self.fakemaster),
maxsize=None,
blocksize=32,
mode=0o777,
))
d = self.run_command()
def check(_):
self.assertUpdates([
'read(s)', 'close',
{'rc': 0}
])
datafile = os.path.join(self.basedir, 'workdir', 'subdir', 'data')
self.assertTrue(os.path.exists(datafile))
with open(datafile, mode="rb") as f:
datafileContent = f.read()
self.assertEqual(datafileContent, test_data)
d.addCallback(check)
return d
def test_failure(self):
self.fakemaster.data = 'hi'
os.makedirs(os.path.join(self.basedir, 'dir'))
self.make_command(transfer.WorkerFileDownloadCommand, dict(
workdir='.',
workerdest='dir', # but that's a directory!
reader=FakeRemote(self.fakemaster),
maxsize=None,
blocksize=32,
mode=0o777,
))
d = self.run_command()
def check(_):
self.assertUpdates([
'close',
{'rc': 1,
'stderr': "Cannot open file '%s' for download"
% os.path.join(self.basedir, '.', 'dir')}
])
d.addCallback(check)
return d
def test_truncated(self):
self.fakemaster.data = test_data = b'tenchars--' * 10
self.make_command(transfer.WorkerFileDownloadCommand, dict(
workdir='.',
workerdest='data',
reader=FakeRemote(self.fakemaster),
maxsize=50,
blocksize=32,
mode=0o777,
))
d = self.run_command()
def check(_):
self.assertUpdates([
'read(s)', 'close',
{'rc': 1,
'stderr': "Maximum filesize reached, truncating file '%s'"
% os.path.join(self.basedir, '.', 'data')}
])
datafile = os.path.join(self.basedir, 'data')
self.assertTrue(os.path.exists(datafile))
with open(datafile, mode="rb") as f:
data = f.read()
self.assertEqual(data, test_data[:50])
d.addCallback(check)
return d
def test_interrupted(self):
self.fakemaster.data = b'tenchars--' * 100 # 1k
self.fakemaster.delay_read = True # read very slowly
self.make_command(transfer.WorkerFileDownloadCommand, dict(
workdir='.',
workerdest='data',
reader=FakeRemote(self.fakemaster),
maxsize=100,
blocksize=2,
mode=0o777,
))
d = self.run_command()
# wait a jiffy..
interrupt_d = defer.Deferred()
reactor.callLater(0.01, interrupt_d.callback, None)
# and then interrupt the step
def do_interrupt(_):
return self.cmd.interrupt()
interrupt_d.addCallback(do_interrupt)
dl = defer.DeferredList([d, interrupt_d])
def check(_):
self.assertUpdates([
'read(s)', 'close', {'rc': 1}
])
dl.addCallback(check)
return dl
|
rkashapov/buildbot
|
worker/buildbot_worker/test/unit/test_commands_transfer.py
|
Python
|
gpl-2.0
| 16,620
|
language_list = [
'en', # English
'es', # Spanish
'ja', # Japanese
'cz', # Czech
'da', # Danish
'de', # German
'fi', # Finnish
'fr', # French
'hu', # Hungarian
'it', # Italian
'ko', # Korean
'nl', # Dutch
'no', # Norwegian
'pl', # Polish
'pt', # Portuguese
'ru', # Russian
'sk', # Slovak
'sv', # Swedish
'tr', # Turkish
'zh', # Chinese
]
|
chrisism/plugin.program.advanced.emulator.launcher
|
dev-scrapers/data/ScreenScraper_languages.py
|
Python
|
gpl-2.0
| 525
|
# -*- coding: utf-8 -*-
#
# Copyright © 2013-2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions
# of the GNU General Public License v.2, or (at your option) any later
# version. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details. You
# should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Any Red Hat trademarks that are incorporated in the source
# code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission
# of Red Hat, Inc.
#
'''
UI namespace for the Flask application.
'''
import flask
import requests
from dateutil import parser
from math import ceil
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm.exc import NoResultFound
import pkgdb2.forms
import pkgdb2.lib as pkgdblib
from pkgdb2 import SESSION, APP, is_admin, is_pkgdb_admin, is_pkg_admin, \
packager_login_required, is_authenticated
from pkgdb2.ui import UI
## Some of the object we use here have inherited methods which apparently
## pylint does not detect.
# pylint: disable=E1101
@UI.route('/packages/')
@UI.route('/packages/<motif>/')
def list_packages(motif=None, orphaned=None, status=None,
origin='list_packages', case_sensitive=False):
''' Display the list of packages corresponding to the motif. '''
pattern = flask.request.args.get('motif', motif) or '*'
branches = flask.request.args.get('branches', None)
owner = flask.request.args.get('owner', None)
orphaned = flask.request.args.get('orphaned', orphaned)
if str(orphaned) in ['False', '0']:
orphaned = False
status = flask.request.args.get('status', status)
limit = flask.request.args.get('limit', APP.config['ITEMS_PER_PAGE'])
page = flask.request.args.get('page', 1)
case_sensitive = flask.request.args.get('case_sensitive', False)
try:
page = abs(int(page))
except ValueError:
page = 1
try:
limit = abs(int(limit))
except ValueError:
limit = APP.config['ITEMS_PER_PAGE']
flask.flash('Incorrect limit provided, using default', 'errors')
packages = pkgdblib.search_package(
SESSION,
pkg_name=pattern,
pkg_branch=branches,
pkg_poc=owner,
orphaned=orphaned,
status=status,
page=page,
limit=limit,
case_sensitive=case_sensitive,
)
packages_count = pkgdblib.search_package(
SESSION,
pkg_name=pattern,
pkg_branch=branches,
pkg_poc=owner,
orphaned=orphaned,
status=status,
page=page,
limit=limit,
count=True,
case_sensitive=case_sensitive,
)
total_page = int(ceil(packages_count / float(limit)))
select = origin.replace('list_', '')
if len(packages) == 1:
flask.flash('Only one package matching, redirecting you to it')
return flask.redirect(flask.url_for(
'.package_info', package=packages[0].name))
return flask.render_template(
'list_packages.html',
origin=origin,
select=select,
packages=packages,
motif=motif,
total_page=total_page,
packages_count=packages_count,
page=page,
status=status,
owner=owner,
branches=branches,
)
@UI.route('/orphaned/')
@UI.route('/orphaned/<motif>/')
def list_orphaned(motif=None):
''' Display the list of orphaned packages corresponding to the motif.'''
return list_packages(motif=motif, orphaned=True, status='Orphaned',
origin='list_orphaned')
@UI.route('/retired/')
@UI.route('/retired/<motif>/')
def list_retired(motif=None):
''' Display the list of retired packages corresponding to the motif.'''
return list_packages(motif=motif, status='Retired', origin='list_retired')
## Too many branches
# pylint: disable=R0912
## Too many variables
# pylint: disable=R0914
## Too many statements
# pylint: disable=R0915
@UI.route('/package/<package>/')
def package_info(package):
''' Display the information about the specified package. '''
packagename = package
package = None
try:
package_acl = pkgdblib.get_acl_package(SESSION, packagename)
package = pkgdblib.search_package(SESSION, packagename, limit=1)[0]
except (NoResultFound, IndexError):
SESSION.rollback()
flask.flash('No package of this name found.', 'errors')
return flask.render_template('msg.html')
planned_acls = set(
pkgdblib.get_status(SESSION, 'pkg_acl')['pkg_acl'])
branches = set()
commit_acls = {}
watch_acls = {}
admins = {}
pending_admins = {}
pocs = {}
committers = []
for pkg in package_acl:
if pkg.collection.status == 'EOL': # pragma: no cover
continue
collection_name = '%s %s' % (
pkg.collection.name, pkg.collection.version)
branches.add(collection_name)
if pkg.point_of_contact not in pocs:
pocs[pkg.point_of_contact] = set()
pocs[pkg.point_of_contact].add(collection_name)
for acl in pkg.acls:
if acl.acl == 'approveacls' and acl.status == 'Approved':
if acl.fas_name not in admins:
admins[acl.fas_name] = set()
admins[acl.fas_name].add(collection_name)
elif acl.acl == 'approveacls' and acl.status == 'Awaiting Review':
if acl.fas_name not in pending_admins:
pending_admins[acl.fas_name] = set()
pending_admins[acl.fas_name].add(collection_name)
if acl.acl == 'commit':
dic = commit_acls
if acl.status == 'Approved':
committers.append(acl.fas_name)
elif acl.acl.startswith('watch') and acl.status == 'Approved':
dic = watch_acls
else: # pragma: no cover -- pass isn't `covered` by coverage
# We managed approveacls earlier
continue
if acl.fas_name not in dic:
dic[acl.fas_name] = {}
if collection_name not in dic[acl.fas_name]:
dic[acl.fas_name][collection_name] = {}
dic[acl.fas_name][collection_name][acl.acl] = \
acl.status
for aclname in planned_acls:
for user in commit_acls:
if collection_name in commit_acls[user] and \
aclname not in commit_acls[user][collection_name]:
commit_acls[user][collection_name][aclname] = None
for aclname in planned_acls:
for user in watch_acls:
if collection_name in watch_acls[user] and \
aclname not in watch_acls[user][collection_name]:
watch_acls[user][collection_name][aclname] = None
statuses = set([
listing.status
for listing in package.sorted_listings
if listing.collection.status != 'EOL'
])
collections = pkgdb2.lib.search_collection(
SESSION, '*', 'Under Development')
collections.extend(pkgdb2.lib.search_collection(SESSION, '*', 'Active'))
branches_possible = [
collec.branchname
for collec in collections
if '%s %s' % (collec.name, collec.version) not in branches]
requester = False
if is_authenticated():
for req in package.requests:
if req.user == flask.g.fas_user.username:
requester = True
break
return flask.render_template(
'package.html',
package=package,
commit_acls=commit_acls,
watch_acls=watch_acls,
pocs=pocs,
admins=admins,
statuses=statuses,
pending_admins=pending_admins,
branches=branches,
branches_possible=branches_possible,
committers=committers,
form=pkgdb2.forms.ConfirmationForm(),
requester=requester,
)
@UI.route('/package/<package>/timeline')
def package_timeline(package):
""" Return the timeline of a specified package.
"""
from_date = flask.request.args.get('from_date', None)
packager = flask.request.args.get('packager', None)
limit = flask.request.args.get('limit', APP.config['ITEMS_PER_PAGE'])
page = flask.request.args.get('page', 1)
try:
page = abs(int(page))
except ValueError:
page = 1
try:
limit = abs(int(limit))
except ValueError:
limit = APP.config['ITEMS_PER_PAGE']
flask.flash('Incorrect limit provided, using default', 'errors')
if from_date:
try:
from_date = parser.parse(from_date)
except (ValueError, TypeError):
flask.flash(
'Incorrect from_date provided, using default', 'errors')
from_date = None
## Could not infer the date() function
# pylint: disable=E1103
if from_date:
from_date = from_date.date()
logs = []
cnt_logs = 0
try:
logs = pkgdblib.search_logs(
SESSION,
package=package or None,
packager=packager or None,
from_date=from_date,
page=page,
limit=limit,
)
cnt_logs = pkgdblib.search_logs(
SESSION,
package=package or None,
packager=packager or None,
from_date=from_date,
count=True
)
except pkgdblib.PkgdbException, err:
flask.flash(err, 'errors')
total_page = int(ceil(cnt_logs / float(limit)))
return flask.render_template(
'package_timeline.html',
logs=logs,
cnt_logs=cnt_logs,
total_page=total_page,
page=page,
package=package,
from_date=from_date or '',
packager=packager or '',
)
@UI.route('/package/<package>/anitya')
@UI.route('/package/<package>/anitya/<full>')
def package_anitya(package, full=True):
""" Return information anitya integration about this package.
"""
if str(full).lower() in ['0', 'false']:
full = False
pkg = None
try:
pkg = pkgdblib.search_package(SESSION, package, limit=1)[0]
except (NoResultFound, IndexError):
SESSION.rollback()
flask.flash('No package of this name found.', 'errors')
return flask.render_template('msg.html')
url = '%s/api/project/%s/%s' % (
APP.config['PKGDB2_ANITYA_URL'],
APP.config['PKGDB2_ANITYA_DISTRO'],
package
)
data = {}
try:
req = requests.get(url)
if req.status_code != 200:
raise pkgdblib.PkgdbException(
'Querying anitya returned a status %s' % req.status_code)
else:
data = req.json()
except Exception, err:
flask.flash(err.message, 'error')
pass
return flask.render_template(
'package_anitya.html',
full=full,
package=package,
pkg=pkg,
data=data,
)
@UI.route('/package/requests/<action_id>', methods=['GET', 'POST'])
def package_request_edit(action_id):
""" Edit an Admin Action status
"""
admin_action = pkgdblib.get_admin_action(SESSION, action_id)
if not admin_action:
flask.flash('No action found with this identifier.', 'errors')
return flask.render_template('msg.html')
package = None
if admin_action.package:
package = admin_action.package.name
if admin_action.status in ['Accepted', 'Blocked', 'Denied']:
return flask.render_template(
'actions_update_ro.html',
admin_action=admin_action,
action_id=action_id,
)
if not is_authenticated() or not 'packager' in flask.g.fas_user.groups:
return flask.render_template(
'actions_update_ro.html',
admin_action=admin_action,
action_id=action_id,
)
# Check user is the pkg/pkgdb admin
pkg_admin = pkgdblib.has_acls(
SESSION, flask.g.fas_user.username, package, 'approveacls')
if not is_pkgdb_admin(flask.g.fas_user) \
and not pkg_admin \
and not admin_action.user == flask.g.fas_user.username:
flask.flash(
'Only package adminitrators (`approveacls`) and the requester '
'can review pending branch requests', 'errors')
if package:
return flask.redirect(
flask.url_for('.package_info', package=package)
)
else:
return flask.redirect(
flask.url_for(
'.packager_requests',
packager=flask.g.fas_user.username)
)
action_status = ['Pending', 'Awaiting Review', 'Blocked']
if admin_action.user == flask.g.fas_user.username:
action_status = ['Pending', 'Obsolete']
if pkg_admin or admin_action.action == 'request.package':
action_status.append('Awaiting Review')
form = pkgdb2.forms.EditActionStatusForm(
status=action_status,
obj=admin_action
)
form.id.data = action_id
if form.validate_on_submit():
try:
message = pkgdblib.edit_action_status(
SESSION,
admin_action,
action_status=form.status.data,
user=flask.g.fas_user,
message=form.message.data,
)
SESSION.commit()
flask.flash(message)
except pkgdblib.PkgdbException, err: # pragma: no cover
# We can only reach here in two cases:
# 1) the user is not an admin, but that's taken care of
# by the decorator
# 2) we have a SQLAlchemy problem when storing the info
# in the DB which we cannot test
SESSION.rollback()
flask.flash(err, 'errors')
return flask.render_template('msg.html')
if package:
return flask.redirect(
flask.url_for('.package_info', package=package)
)
else:
return flask.redirect(
flask.url_for(
'.packager_requests',
packager=flask.g.fas_user.username)
)
return flask.render_template(
'actions_update.html',
admin_action=admin_action,
action_id=action_id,
form=form,
package=package,
tag='packages',
)
@UI.route('/new/package/', methods=('GET', 'POST'))
@is_admin
def package_new():
''' Page to create a new package. '''
collections = pkgdb2.lib.search_collection(
SESSION, '*', 'Under Development')
collections.extend(pkgdb2.lib.search_collection(SESSION, '*', 'Active'))
pkg_status = pkgdb2.lib.get_status(SESSION, 'pkg_status')['pkg_status']
form = pkgdb2.forms.AddPackageForm(
collections=collections,
pkg_status_list=pkg_status,
)
if form.validate_on_submit():
pkg_name = form.pkgname.data
pkg_summary = form.summary.data
pkg_description = form.description.data
pkg_review_url = form.review_url.data
pkg_status = form.status.data
pkg_critpath = form.critpath.data
pkg_collection = form.branches.data
pkg_poc = form.poc.data
pkg_upstream_url = form.upstream_url.data
try:
message = pkgdblib.add_package(
SESSION,
pkg_name=pkg_name,
pkg_summary=pkg_summary,
pkg_description=pkg_description,
pkg_review_url=pkg_review_url,
pkg_status=pkg_status,
pkg_critpath=pkg_critpath,
pkg_collection=pkg_collection,
pkg_poc=pkg_poc,
pkg_upstream_url=pkg_upstream_url,
user=flask.g.fas_user,
)
SESSION.commit()
flask.flash(message)
return flask.redirect(flask.url_for('.list_packages'))
# Keep it in, but normally we shouldn't hit this
except pkgdblib.PkgdbException, err: # pragma: no cover
SESSION.rollback()
flask.flash(str(err), 'error')
return flask.render_template(
'package_new.html',
form=form,
)
@UI.route('/package/<package>/give', methods=('GET', 'POST'))
@UI.route('/package/<package>/give/<full>', methods=('GET', 'POST'))
@packager_login_required
def package_give(package, full=True):
''' Gives the PoC of a package to someone else. '''
if not bool(full) or str(full) in ['0', 'False']:
full = False
packagename = package
package = None
try:
package_acl = pkgdblib.get_acl_package(SESSION, packagename)
package = pkgdblib.search_package(SESSION, packagename, limit=1)[0]
except (NoResultFound, IndexError):
SESSION.rollback()
flask.flash('No package of this name found.', 'errors')
return flask.render_template('msg.html')
# Restrict the branch to the one current user is PoC of (unless admin
# or group)
collect_name = []
for acl in package_acl:
if acl.point_of_contact != flask.g.fas_user.username and \
not is_pkgdb_admin(flask.g.fas_user) and \
not acl.point_of_contact.startswith('group::'):
pass
else:
if acl.point_of_contact.startswith('group::'):
group = acl.point_of_contact.split('group::')[0]
if group not in flask.g.fas_user.groups:
pass
elif acl.collection.status != 'EOL':
collect_name.append(acl.collection.branchname)
form = pkgdb2.forms.GivePoCForm(collections=collect_name)
acls = ['commit', 'watchbugzilla', 'watchcommits', 'approveacls']
if form.validate_on_submit():
collections = form.branches.data
pkg_poc = form.poc.data
if pkg_poc.startswith('group::'):
acls = ['commit', 'watchbugzilla', 'watchcommits']
try:
for pkg_collection in collections:
message = pkgdblib.update_pkg_poc(
SESSION,
pkg_name=packagename,
pkg_branch=pkg_collection,
pkg_poc=pkg_poc,
user=flask.g.fas_user,
)
flask.flash(message)
for acl in acls:
pkgdblib.set_acl_package(
SESSION,
pkg_name=packagename,
pkg_branch=pkg_collection,
pkg_user=pkg_poc,
acl=acl,
status='Approved',
user=flask.g.fas_user
)
SESSION.commit()
except pkgdblib.PkgdbBugzillaException, err: # pragma: no cover
APP.logger.exception(err)
flask.flash(str(err), 'error')
SESSION.rollback()
except pkgdblib.PkgdbException, err:
SESSION.rollback()
flask.flash(str(err), 'error')
return flask.redirect(
flask.url_for('.package_info', package=packagename)
)
return flask.render_template(
'package_give.html',
full=full,
form=form,
packagename=packagename,
)
@UI.route('/package/<package>/orphan', methods=('GET', 'POST'))
@UI.route('/package/<package>/orphan/<full>', methods=('GET', 'POST'))
@packager_login_required
def package_orphan(package, full=True):
''' Gives the possibility to orphan or take a package. '''
if not bool(full) or str(full) in ['0', 'False']:
full = False
try:
package_acl = pkgdblib.get_acl_package(SESSION, package)
package = pkgdblib.search_package(SESSION, package, limit=1)[0]
except (NoResultFound, IndexError):
SESSION.rollback()
flask.flash('No package of this name found.', 'errors')
return flask.render_template('msg.html')
collections = [
acl.collection.branchname
for acl in package_acl
if acl.collection.status in ['Active', 'Under Development']
and acl.status == 'Approved'
and (
is_pkgdb_admin(flask.g.fas_user)
or acl.point_of_contact == flask.g.fas_user.username
or (
acl.point_of_contact.startswith('group::') and
is_pkg_admin(SESSION, flask.g.fas_user, package.name)
)
)
]
form = pkgdb2.forms.BranchForm(collections=collections)
if form.validate_on_submit():
for branch in form.branches.data:
try:
pkgdblib.update_pkg_poc(
session=SESSION,
pkg_name=package.name,
pkg_branch=branch,
pkg_poc='orphan',
user=flask.g.fas_user
)
flask.flash(
'You are no longer point of contact on branch: %s'
% branch)
except pkgdblib.PkgdbBugzillaException, err: # pragma: no cover
APP.logger.exception(err)
flask.flash(str(err), 'error')
SESSION.rollback()
except pkgdblib.PkgdbException, err: # pragma: no cover
flask.flash(str(err), 'error')
SESSION.rollback()
try:
SESSION.commit()
# Keep it in, but normally we shouldn't hit this
except pkgdblib.PkgdbException, err: # pragma: no cover
SESSION.rollback()
flask.flash(str(err), 'error')
return flask.redirect(
flask.url_for('.package_info', package=package.name))
return flask.render_template(
'branch_selection.html',
full=full,
package=package,
form=form,
action='orphan',
)
@UI.route('/package/<package>/retire', methods=('GET', 'POST'))
@UI.route('/package/<package>/retire/<full>', methods=('GET', 'POST'))
@packager_login_required
def package_retire(package, full=True):
''' Gives the possibility to orphan or take a package. '''
if not bool(full) or str(full) in ['0', 'False']:
full = False
try:
package_acl = pkgdblib.get_acl_package(SESSION, package)
package = pkgdblib.search_package(SESSION, package, limit=1)[0]
except (NoResultFound, IndexError):
SESSION.rollback()
flask.flash('No package of this name found.', 'errors')
return flask.render_template('msg.html')
if not is_pkgdb_admin(flask.g.fas_user):
flask.flash(
'Only Admins are allowed to retire package here, '
'you should use `fedpkg retire`.', 'errors')
return flask.redirect(
flask.url_for('.package_info', package=package.name))
collections = [
acl.collection.branchname
for acl in package_acl
if acl.collection.status in ['Active', 'Under Development']
and acl.status == 'Orphaned'
]
form = pkgdb2.forms.BranchForm(collections=collections)
if form.validate_on_submit():
for acl in package_acl:
if acl.collection.branchname in form.branches.data:
if acl.point_of_contact == 'orphan':
try:
pkgdblib.update_pkg_status(
session=SESSION,
pkg_name=package.name,
pkg_branch=acl.collection.branchname,
status='Retired',
user=flask.g.fas_user
)
flask.flash(
'This package has been retired on branch: %s'
% acl.collection.branchname)
except pkgdblib.PkgdbException, err: # pragma: no cover
# We should never hit this
flask.flash(str(err), 'error')
SESSION.rollback()
APP.logger.exception(err)
else: # pragma: no cover
flask.flash(
'This package has not been orphaned on '
'branch: %s' % acl.collection.branchname)
try:
SESSION.commit()
# Keep it in, but normally we shouldn't hit this
except pkgdblib.PkgdbException, err: # pragma: no cover
# We should never hit this
SESSION.rollback()
APP.logger.exception(err)
flask.flash(str(err), 'error')
return flask.redirect(
flask.url_for('.package_info', package=package.name))
return flask.render_template(
'branch_selection.html',
full=full,
package=package,
form=form,
action='retire',
)
@UI.route('/package/<package>/unretire', methods=('GET', 'POST'))
@UI.route('/package/<package>/unretire/<full>', methods=('GET', 'POST'))
@packager_login_required
def package_unretire(package, full=True):
''' Asks an admin to unretire the package. '''
if not bool(full) or str(full) in ['0', 'False']:
full = False
try:
package_acl = pkgdblib.get_acl_package(SESSION, package)
package = pkgdblib.search_package(SESSION, package, limit=1)[0]
except (NoResultFound, IndexError):
SESSION.rollback()
flask.flash('No package of this name found.', 'errors')
return flask.render_template('msg.html')
collections = [
acl.collection.branchname
for acl in package_acl
if acl.collection.status in ['Active', 'Under Development']
and acl.status == 'Retired'
]
form = pkgdb2.forms.BranchForm(collections=collections)
if form.validate_on_submit():
for acl in package_acl:
if acl.collection.branchname in form.branches.data:
if acl.point_of_contact == 'orphan':
try:
pkgdblib.add_unretire_request(
session=SESSION,
pkg_name=package.name,
pkg_branch=acl.collection.branchname,
user=flask.g.fas_user,
)
flask.flash(
'Admins have been asked to un-retire branch: %s'
% acl.collection.branchname)
except pkgdblib.PkgdbException, err: # pragma: no cover
# We should never hit this
flask.flash(str(err), 'error')
SESSION.rollback()
except SQLAlchemyError, err:
SESSION.rollback()
flask.flash(
'Could not save the request for branch: %s, has '
'it already been requested?'
% acl.collection.branchname, 'error')
else: # pragma: no cover
flask.flash(
'This package is not orphaned on branch: %s'
% acl.collection.branchname)
try:
SESSION.commit()
# Keep it in, but normally we shouldn't hit this
except pkgdblib.PkgdbException, err: # pragma: no cover
# We should never hit this
SESSION.rollback()
APP.logger.exception(err)
flask.flash(str(err), 'error')
return flask.redirect(
flask.url_for('.package_info', package=package.name))
return flask.render_template(
'branch_selection.html',
full=full,
package=package,
form=form,
action='unretire',
)
@UI.route('/package/<package>/take', methods=('GET', 'POST'))
@UI.route('/package/<package>/take/<full>', methods=('GET', 'POST'))
@packager_login_required
def package_take(package, full=True):
''' Make someone Point of contact of an orphaned package. '''
if not bool(full) or str(full) in ['0', 'False']:
full = False
try:
package_acl = pkgdblib.get_acl_package(SESSION, package)
package = pkgdblib.search_package(SESSION, package, limit=1)[0]
except (NoResultFound, IndexError):
SESSION.rollback()
flask.flash('No package of this name found.', 'errors')
return flask.render_template('msg.html')
collections = [
acl.collection.branchname
for acl in package_acl
if acl.collection.status in ['Active', 'Under Development']
and acl.status == 'Orphaned'
]
if not collections:
flask.flash('No branches orphaned found', 'error')
return flask.redirect(
flask.url_for('.package_info', package=package.name))
form = pkgdb2.forms.BranchForm(collections=collections)
if form.validate_on_submit():
for branch in form.branches.data:
try:
pkgdblib.unorphan_package(
session=SESSION,
pkg_name=package.name,
pkg_branch=branch,
pkg_user=flask.g.fas_user.username,
user=flask.g.fas_user
)
SESSION.commit()
flask.flash('You have taken the package %s on branch %s' % (
package.name, branch))
except pkgdblib.PkgdbBugzillaException, err: # pragma: no cover
APP.logger.exception(err)
flask.flash(str(err), 'error')
SESSION.rollback()
except pkgdblib.PkgdbException, err: # pragma: no cover
flask.flash(str(err), 'error')
SESSION.rollback()
return flask.redirect(
flask.url_for('.package_info', package=package.name))
return flask.render_template(
'branch_selection.html',
full=full,
package=package,
form=form,
action='take',
)
@UI.route('/package/<package>/acl/<update_acl>/', methods=('GET', 'POST'))
@packager_login_required
def update_acl(package, update_acl):
''' Update the acls of a package. '''
packagename = package
package = None
try:
package_acl = pkgdblib.get_acl_package(SESSION, packagename)
package = pkgdblib.search_package(SESSION, packagename, limit=1)[0]
except (NoResultFound, IndexError):
SESSION.rollback()
flask.flash('No package of this name found.', 'errors')
return flask.render_template('msg.html')
statues = pkgdblib.get_status(SESSION)
planned_acls = set(statues['pkg_acl'])
acl_status = list(set(statues['acl_status']))
acl_status.insert(0, '')
if update_acl not in planned_acls:
flask.flash('Invalid ACL to update.', 'errors')
return flask.redirect(
flask.url_for('.package_info', package=package.name))
branches = {}
branches_inv = {}
commit_acls = {}
admins = {}
committers = []
for pkg in package_acl:
if pkg.collection.status == 'EOL': # pragma: no cover
continue
collection_name = '%s %s' % (
pkg.collection.name, pkg.collection.version)
if collection_name not in branches:
branches[collection_name] = pkg.collection.branchname
if pkg.collection.branchname not in branches_inv:
branches_inv[pkg.collection.branchname] = collection_name
for acl in pkg.acls:
if acl.acl == 'approveacls' and acl.status == 'Approved':
if acl.fas_name not in admins:
admins[acl.fas_name] = set()
admins[acl.fas_name].add(collection_name)
if acl.acl != update_acl:
continue
committers.append(acl.fas_name)
if acl.fas_name not in commit_acls:
commit_acls[acl.fas_name] = {}
if collection_name not in commit_acls[acl.fas_name]:
commit_acls[acl.fas_name][collection_name] = {}
commit_acls[acl.fas_name][collection_name][acl.acl] = \
acl.status
for aclname in planned_acls:
for user in commit_acls:
if collection_name in commit_acls[user] and \
aclname not in commit_acls[user][collection_name]:
commit_acls[user][collection_name][aclname] = None
# If the user is not an admin, he/she can only access his/her ACLs
username = flask.g.fas_user.username
if username not in admins and not is_pkgdb_admin(flask.g.fas_user):
tmp = {username: []}
if username in commit_acls:
tmp = {username: commit_acls[username]}
commit_acls = tmp
form = pkgdb2.forms.ConfirmationForm()
if form.validate_on_submit():
sub_acls = flask.request.values.getlist('acls')
sub_users = flask.request.values.getlist('user')
sub_branches = flask.request.values.getlist('branch')
changed = False
if sub_acls and len(sub_acls) == (len(sub_users) * len(sub_branches)):
cnt = 0
for cnt_u in range(len(sub_users)):
for cnt_b in range(len(sub_branches)):
lcl_acl = sub_acls[cnt]
lcl_user = sub_users[cnt_u]
lcl_branch = sub_branches[cnt_b]
if lcl_acl not in acl_status:
flask.flash('Invalid ACL: %s' % lcl_acl, 'error')
cnt += 1
continue
if lcl_user not in commit_acls:
flask.flash('Invalid user: %s' % lcl_user, 'error')
cnt += 1
continue
if lcl_branch not in branches_inv or (
branches_inv[lcl_branch] in commit_acls[lcl_user]
and commit_acls[lcl_user][
branches_inv[lcl_branch]][
update_acl] == lcl_acl):
cnt += 1
continue
if not lcl_acl:
if branches_inv[lcl_branch] \
not in commit_acls[lcl_user]:
cnt += 1
continue
elif branches_inv[lcl_branch] \
in commit_acls[lcl_user] \
and username != lcl_user:
flask.flash(
'Only the user can remove his/her ACL',
'error')
cnt += 1
continue
try:
pkgdblib.set_acl_package(
SESSION,
pkg_name=package.name,
pkg_branch=lcl_branch,
pkg_user=lcl_user,
acl=update_acl,
status=lcl_acl,
user=flask.g.fas_user,
)
SESSION.commit()
flask.flash("%s's %s ACL updated on %s" % (
lcl_user, update_acl, lcl_branch))
changed = True
except pkgdblib.PkgdbException, err:
SESSION.rollback()
flask.flash(str(err), 'error')
cnt += 1
SESSION.commit()
if not changed:
flask.flash('Nothing to update')
return flask.redirect(
flask.url_for('.package_info', package=package.name))
else:
flask.flash('Invalid input submitted', 'error')
return flask.render_template(
'acl_update.html',
acl=update_acl,
acl_status=acl_status,
package=package,
form=form,
branches=branches,
commit_acls=commit_acls,
admins=admins,
committers=committers,
)
@UI.route('/package/<package>/delete', methods=['POST'])
@is_admin
def delete_package(package):
''' Delete the specified package.
'''
form = pkgdb2.forms.ConfirmationForm()
if not form.validate_on_submit():
flask.flash('Invalid input', 'error')
return flask.redirect(
flask.url_for('.package_info', package=package))
packagename = package
package = None
try:
package = pkgdblib.search_package(SESSION, packagename, limit=1)[0]
except (NoResultFound, IndexError):
SESSION.rollback()
flask.flash('No package of this name found.', 'errors')
return flask.render_template('msg.html')
for pkglist in package.listings:
for acl in pkglist.acls:
pkgdb2.lib.utils.log(SESSION, None, 'acl.delete', dict(
agent=flask.g.fas_user.username,
acl=acl.to_json(),
))
SESSION.delete(acl)
pkgdb2.lib.utils.log(SESSION, None, 'package.branch.delete', dict(
agent=flask.g.fas_user.username,
package_listing=pkglist.to_json(),
))
SESSION.delete(pkglist)
pkgdb2.lib.utils.log(SESSION, None, 'package.delete', dict(
agent=flask.g.fas_user.username,
package=package.to_json(),
))
SESSION.delete(package)
try:
SESSION.commit()
flask.flash('Package %s deleted' % packagename)
except SQLAlchemyError, err: # pragma: no cover
SESSION.rollback()
flask.flash(
'An error occured while trying to delete the package %s'
% packagename, 'error')
APP.logger.debug('Could not delete package: %s', packagename)
APP.logger.exception(err)
return flask.redirect(
flask.url_for('.package_info', package=package.name))
return flask.redirect(
flask.url_for('.list_packages'))
@UI.route('/package/<package>/request_branch', methods=('GET', 'POST'))
@UI.route('/package/<package>/request_branch/<full>', methods=('GET', 'POST'))
@packager_login_required
def package_request_branch(package, full=True):
''' Gives the possibility to request a new branch for this package. '''
if not bool(full) or str(full) in ['0', 'False']:
full = False
try:
package_acl = pkgdblib.get_acl_package(SESSION, package)
package = pkgdblib.search_package(SESSION, package, limit=1)[0]
except (NoResultFound, IndexError):
SESSION.rollback()
flask.flash('No package of this name found.', 'errors')
return flask.render_template('msg.html')
branches = [
pkg.collection.branchname
for pkg in package_acl
if pkg.collection.status != 'EOL'
]
collections = pkgdb2.lib.search_collection(
SESSION, '*', 'Under Development')
collections.extend(pkgdb2.lib.search_collection(SESSION, '*', 'Active'))
branches_possible = [
collec.branchname
for collec in collections
if collec.branchname not in branches]
form = pkgdb2.forms.BranchForm(collections=branches_possible)
if form.validate_on_submit():
for branch in form.branches.data:
try:
msg = pkgdblib.add_new_branch_request(
session=SESSION,
pkg_name=package.name,
clt_to=branch,
user=flask.g.fas_user)
SESSION.commit()
flask.flash(msg)
except pkgdblib.PkgdbException, err: # pragma: no cover
flask.flash(str(err), 'error')
SESSION.rollback()
except SQLAlchemyError, err: # pragma: no cover
APP.logger.exception(err)
flask.flash(
'Could not save the request to the database for '
'branch: %s' % branch, 'error')
SESSION.rollback()
return flask.redirect(
flask.url_for('.package_info', package=package.name))
return flask.render_template(
'request_branch.html',
full=full,
package=package,
form=form,
action='request_branch',
)
@UI.route('/request/package/', methods=('GET', 'POST'))
@packager_login_required
def package_request_new():
''' Page to request a new package. '''
collections = pkgdb2.lib.search_collection(SESSION, '*', 'Under Development')
collections.reverse()
active_collections = pkgdb2.lib.search_collection(SESSION, '*', 'Active')
active_collections.reverse()
# We want all the branch `Under Development` as well as all the `Active`
# branch but we can only have at max 2 Fedora branch active at the same
# time. In other words, when Fedora n+1 is released one can no longer
# request a package to be added to Fedora n-1
cnt = 0
for collection in active_collections:
if collection.name.lower() == 'fedora':
if cnt >= 2:
continue
cnt += 1
collections.append(collection)
form = pkgdb2.forms.RequestPackageForm(
collections=collections,
)
if form.validate_on_submit():
pkg_name = form.pkgname.data
pkg_summary = form.summary.data
pkg_description = form.description.data
pkg_review_url = form.review_url.data
pkg_status = 'Approved'
pkg_critpath = False
pkg_collection = form.branches.data
if not 'master' in pkg_collection:
flask.flash(
'Adding a request for `master` branch, this branch is '
'mandatory')
pkg_collection.append('master')
pkg_poc = flask.g.fas_user.username
pkg_upstream_url = form.upstream_url.data
try:
messages = []
for clt in pkg_collection:
message = pkgdblib.add_new_package_request(
SESSION,
pkg_name=pkg_name,
pkg_summary=pkg_summary,
pkg_description=pkg_description,
pkg_review_url=pkg_review_url,
pkg_status=pkg_status,
pkg_critpath=pkg_critpath,
pkg_collection=clt,
pkg_poc=pkg_poc,
pkg_upstream_url=pkg_upstream_url,
user=flask.g.fas_user,
)
if message:
messages.append(message)
SESSION.commit()
for message in messages:
flask.flash(message)
return flask.redirect(flask.url_for('.index'))
# Keep it in, but normally we shouldn't hit this
except pkgdblib.PkgdbException, err: # pragma: no cover
SESSION.rollback()
flask.flash(str(err), 'error')
return flask.render_template(
'package_request.html',
form=form,
)
|
crobinso/pkgdb2
|
pkgdb2/ui/packages.py
|
Python
|
gpl-2.0
| 43,624
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of the Shiboken Python Bindings Generator project.
#
# Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
#
# Contact: PySide team <contact@pyside.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# version 2.1 as published by the Free Software Foundation. Please
# review the following information to ensure the GNU Lesser General
# Public License version 2.1 requirements will be met:
# http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
# #
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
import unittest
from minimal import ListUser, Val, Obj
from py3kcompat import IS_PY3K
if IS_PY3K:
import functools
reduce = functools.reduce
class ExtListUser(ListUser):
def __init__(self):
ListUser.__init__(self)
def createIntList(self, num):
return list(range(0, num * 2, 2))
def sumIntList(self, intList):
return sum(intList) * 2
def createMinBoolList(self, mb1, mb2):
return [not mb1, not mb2]
def oredMinBoolList(self, minBoolList):
return not reduce(lambda a, b: a|b, minBoolList)
def createValList(self, num):
return [Val(i) for i in range(0, num * 2, 2)]
def sumValList(self, valList):
return sum([val.valId() for val in valList]) * 2
def createObjList(self, o1, o2):
o1.setObjId(o1.objId() * 2)
o2.setObjId(o2.objId() * 2)
return [o1, o2]
def sumObjList(self, objList):
return sum([obj.objId() for obj in objList]) * 2
def createListOfIntLists(self, num):
return [self.createIntList(num)] * 4
def sumListOfIntLists(self, intListList):
return sum([sum(line) for line in intListList]) * 2
class IntListConversionTest(unittest.TestCase):
def testCreateIntList(self):
num = 4
lu = ListUser()
lst = lu.createIntList(num)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), num)
for i in lst:
self.assertEqual(type(i), int)
self.assertEqual(lst, list(range(num)))
lst = lu.callCreateIntList(num)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), num)
for i in lst:
self.assertEqual(type(i), int)
self.assertEqual(lst, list(range(num)))
def testCreateIntListFromExtendedClass(self):
lu = ExtListUser()
num = 4
lst = lu.createIntList(num)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), num)
for i in lst:
self.assertEqual(type(i), int)
self.assertEqual(lst, list(range(0, num * 2, 2)))
lst = lu.callCreateIntList(num)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), num)
for i in lst:
self.assertEqual(type(i), int)
self.assertEqual(lst, list(range(0, num * 2, 2)))
def testSumIntList(self):
lu = ListUser()
lst = range(4)
self.assertEqual(lu.sumIntList(lst), sum(lst))
self.assertEqual(lu.callSumIntList(lst), sum(lst))
def testSumIntListFromExtendedClass(self):
lu = ExtListUser()
lst = range(4)
self.assertEqual(lu.sumIntList(lst), sum(lst) * 2)
self.assertEqual(lu.callSumIntList(lst), sum(lst) * 2)
class MinBoolListConversionTest(unittest.TestCase):
def testCreateMinBoolList(self):
lu = ListUser()
lst = lu.createMinBoolList(True, False)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), 2)
for i in lst:
self.assertEqual(type(i), bool)
self.assertEqual(lst, [True, False])
lst = lu.callCreateMinBoolList(False, True)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), 2)
for i in lst:
self.assertEqual(type(i), bool)
self.assertEqual(lst, [False, True])
def testCreateMinBoolListFromExtendedClass(self):
lu = ExtListUser()
lst = lu.createMinBoolList(True, False)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), 2)
for i in lst:
self.assertEqual(type(i), bool)
self.assertEqual(lst, [False, True])
lst = lu.callCreateMinBoolList(False, True)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), 2)
for i in lst:
self.assertEqual(type(i), bool)
self.assertEqual(lst, [True, False])
def testOredMinBoolList(self):
lu = ListUser()
lst = [False, False, True]
self.assertTrue(lu.oredMinBoolList(lst))
self.assertTrue(lu.callOredMinBoolList(lst))
lst = [False, False, False]
self.assertFalse(lu.oredMinBoolList(lst))
self.assertFalse(lu.callOredMinBoolList(lst))
def testOredMinBoolListFromExtendedClass(self):
lu = ExtListUser()
lst = [False, False, True]
self.assertFalse(lu.oredMinBoolList(lst))
self.assertFalse(lu.callOredMinBoolList(lst))
lst = [False, False, False]
self.assertTrue(lu.oredMinBoolList(lst))
self.assertTrue(lu.callOredMinBoolList(lst))
class ValListConversionTest(unittest.TestCase):
def testCreateValList(self):
num = 4
lu = ListUser()
lst = lu.createValList(num)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), num)
for i in lst:
self.assertEqual(type(i), Val)
self.assertEqual([val.valId() for val in lst], list(range(num)))
lst = lu.callCreateValList(num)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), num)
for i in lst:
self.assertEqual(type(i), Val)
self.assertEqual([val.valId() for val in lst], list(range(num)))
def testCreateValListFromExtendedClass(self):
lu = ExtListUser()
num = 4
lst = lu.createValList(num)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), num)
for i in lst:
self.assertEqual(type(i), Val)
self.assertEqual([val.valId() for val in lst], list(range(0, num * 2, 2)))
lst = lu.callCreateValList(num)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), num)
for i in lst:
self.assertEqual(type(i), Val)
self.assertEqual([val.valId() for val in lst], list(range(0, num * 2, 2)))
def testSumValList(self):
lu = ListUser()
lst = [Val(i) for i in range(4)]
self.assertEqual(lu.sumValList(lst), sum([val.valId() for val in lst]))
self.assertEqual(lu.callSumValList(lst), sum([val.valId() for val in lst]))
def testSumValListFromExtendedClass(self):
lu = ExtListUser()
lst = [Val(i) for i in range(4)]
self.assertEqual(lu.sumValList(lst), sum([val.valId() for val in lst]) * 2)
self.assertEqual(lu.callSumValList(lst), sum([val.valId() for val in lst]) * 2)
class ObjListConversionTest(unittest.TestCase):
def testCreateObjList(self):
o1 = Obj(1)
o2 = Obj(2)
lu = ListUser()
lst = lu.createObjList(o1, o2)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), 2)
for i in lst:
self.assertEqual(type(i), Obj)
self.assertEqual(lst, [o1, o2])
self.assertEqual([obj.objId() for obj in lst], [1, 2])
lst = lu.callCreateObjList(o1, o2)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), 2)
for i in lst:
self.assertEqual(type(i), Obj)
self.assertEqual(lst, [o1, o2])
self.assertEqual([obj.objId() for obj in lst], [1, 2])
def testCreateObjListFromExtendedClass(self):
o1 = Obj(1)
o2 = Obj(2)
lu = ExtListUser()
lst = lu.createObjList(o1, o2)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), 2)
for i in lst:
self.assertEqual(type(i), Obj)
self.assertEqual(lst, [o1, o2])
self.assertEqual([obj.objId() for obj in lst], [2, 4])
lst = lu.callCreateObjList(o1, o2)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), 2)
for i in lst:
self.assertEqual(type(i), Obj)
self.assertEqual(lst, [o1, o2])
self.assertEqual([obj.objId() for obj in lst], [4, 8])
def testSumObjList(self):
lu = ListUser()
lst = [Obj(i) for i in list(range(4))]
self.assertEqual(lu.sumObjList(lst), sum([obj.objId() for obj in lst]))
self.assertEqual(lu.callSumObjList(lst), sum([obj.objId() for obj in lst]))
def testSumObjListFromExtendedClass(self):
lu = ExtListUser()
lst = [Obj(i) for i in list(range(4))]
self.assertEqual(lu.sumObjList(lst), sum([obj.objId() for obj in lst]) * 2)
self.assertEqual(lu.callSumObjList(lst), sum([obj.objId() for obj in lst]) * 2)
class ListOfIntListConversionTest(unittest.TestCase):
def testCreateListOfIntLists(self):
num = 4
lu = ListUser()
lst = lu.createListOfIntLists(num)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), num)
for i in lst:
self.assertEqual(type(i), list)
self.assertEqual(i, list(range(num)))
for j in i:
self.assertEqual(type(j), int)
self.assertEqual(lst, [list(range(num))] * 4)
def testCreateListOfIntListsFromExtendedClass(self):
num = 4
lu = ExtListUser()
lst = lu.createListOfIntLists(num)
self.assertEqual(type(lst), list)
self.assertEqual(len(lst), num)
for i in lst:
self.assertEqual(type(i), list)
self.assertEqual(i, list(range(0, num * 2, 2)))
for j in i:
self.assertEqual(type(j), int)
self.assertEqual(lst, [list(range(0, num * 2, 2))] * 4)
def testSumListIntLists(self):
lu = ListUser()
lst = [range(4)] * 4
self.assertEqual(lu.sumListOfIntLists(lst), sum([sum(line) for line in [range(4)] * 4]))
self.assertEqual(lu.callSumListOfIntLists(lst), sum([sum(line) for line in [range(4)] * 4]))
def testSumListOfIntListsFromExtendedClass(self):
lu = ExtListUser()
lst = [range(4)] * 4
self.assertEqual(lu.sumListOfIntLists(lst), sum([sum(line) for line in [range(4)] * 4]) * 2)
self.assertEqual(lu.callSumListOfIntLists(lst), sum([sum(line) for line in [range(4)] * 4]) * 2)
if __name__ == '__main__':
unittest.main()
|
codewarrior0/Shiboken
|
tests/minimalbinding/listuser_test.py
|
Python
|
gpl-2.0
| 11,139
|
import pytest
def test_apot_pair_stress_weight_missing(potfit):
potfit.create_param_file()
potfit.run()
assert potfit.has_error()
assert 'stress_weight' in potfit.stderr
def test_apot_pair_stress_weight_empty(potfit):
potfit.create_param_file(stress_weight='')
potfit.call_makeapot('startpot', '-n 1 -i pair -f eopp_sc')
potfit.create_config_file()
potfit.run()
assert potfit.has_error()
assert 'Missing value in parameter file' in potfit.stderr
assert 'stress_weight is <undefined>' in potfit.stderr
def test_apot_pair_stress_weight_invalid(potfit):
potfit.create_param_file(stress_weight='foo')
potfit.call_makeapot('startpot', '-n 1 -i pair -f eopp_sc')
potfit.create_config_file()
potfit.run()
assert potfit.has_error()
assert 'Illegal value in parameter file' in potfit.stderr
assert 'stress_weight is not a double' in potfit.stderr
assert 'value = foo' in potfit.stderr
def test_apot_pair_stress_weight_out_of_bounds (potfit):
potfit.create_param_file(stress_weight=-1)
potfit.call_makeapot('startpot', '-n 1 -i pair -f eopp_sc')
potfit.create_config_file()
potfit.run()
assert potfit.has_error()
assert 'Illegal value in parameter file' in potfit.stderr
assert 'stress_weight is out of bounds' in potfit.stderr
def test_apot_pair_stress_basic(potfit):
potfit.create_param_file(stress_weight=1)
potfit.create_potential_file('''
#F 0 1
#T PAIR
#I 0
#E
type lj
cutoff 6.0
epsilon 0.1 0 1
sigma 2.5 1 4
''')
potfit.create_config_file(stress=1)
potfit.run()
assert potfit.has_no_error()
assert potfit.has_correct_atom_count()
assert 'analytic potentials' in potfit.stdout
assert '1 PAIR potential(s)' in potfit.stdout
assert 'Read 1 configurations (1 with forces, 1 with stresses)' in potfit.stdout
assert 'Global stress weight: 1.000000' in potfit.stdout
assert 'Optimization disabled' in potfit.stdout
assert 'Potential in format 0 written to file' in potfit.stdout
assert 'Energy data not written' in potfit.stdout
assert 'global stress weight w is 1.00' in potfit.stress
assert '386 contributions' in potfit.stdout
assert 'sum of stress-errors' in potfit.stdout
|
potfit/potfit
|
tests/apot/pair/stress/test_apot_pair_stress.py
|
Python
|
gpl-2.0
| 2,243
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'LegalPage.body_html'
db.delete_column('staticpages_legalpage', 'body_html')
# Changing field 'LegalPage.body'
db.alter_column('staticpages_legalpage', 'body', self.gf('pootle.core.markup.fields.MarkupField')())
def backwards(self, orm):
# Adding field 'LegalPage.body_html'
db.add_column('staticpages_legalpage', 'body_html',
self.gf('django.db.models.fields.TextField')(default='', blank=True),
keep_default=False)
# Changing field 'LegalPage.body'
db.alter_column('staticpages_legalpage', 'body', self.gf('django.db.models.fields.TextField')())
models = {
'staticpages.legalpage': {
'Meta': {'object_name': 'LegalPage'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'body': ('pootle.core.markup.fields.MarkupField', [], {'blank': 'True'}),
'display_on_register': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'default': "''", 'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['staticpages']
|
ttreeagency/PootleTypo3Org
|
pootle/apps/staticpages/migrations/0003_auto__del_field_legalpage_body_html__chg_field_legalpage_body.py
|
Python
|
gpl-2.0
| 1,685
|
from esc import NUL
import esccmd
import escio
from esctypes import Point, Rect
from escutil import AssertEQ, AssertScreenCharsInRectEqual, GetCursorPosition, GetScreenSize, knownBug
class DCHTests(object):
def test_DCH_DefaultParam(self):
"""DCH with no parameter should delete one character at the cursor."""
escio.Write("abcd")
esccmd.CUP(Point(2, 1))
esccmd.DCH()
AssertScreenCharsInRectEqual(Rect(1, 1, 4, 1), [ "acd" + NUL ]);
def test_DCH_ExplicitParam(self):
"""DCH deletes the specified number of parameters."""
escio.Write("abcd")
esccmd.CUP(Point(2, 1))
esccmd.DCH(2)
AssertScreenCharsInRectEqual(Rect(1, 1, 4, 1), [ "ad" + NUL * 2 ]);
def test_DCH_RespectsMargins(self):
"""DCH respects left-right margins."""
escio.Write("abcde")
esccmd.DECSET(esccmd.DECLRMM)
esccmd.DECSLRM(2, 4)
esccmd.CUP(Point(3, 1))
esccmd.DCH()
esccmd.DECRESET(esccmd.DECLRMM)
AssertScreenCharsInRectEqual(Rect(1, 1, 5, 1), [ "abd" + NUL + "e" ]);
def test_DCH_DeleteAllWithMargins(self):
"""Delete all characters up to right margin."""
escio.Write("abcde")
esccmd.DECSET(esccmd.DECLRMM)
esccmd.DECSLRM(2, 4)
esccmd.CUP(Point(3, 1))
esccmd.DCH(99)
esccmd.DECRESET(esccmd.DECLRMM)
AssertScreenCharsInRectEqual(Rect(1, 1, 5, 1), [ "ab" + NUL * 2 + "e" ]);
def test_DCH_DoesNothingOutsideLeftRightMargin(self):
"""DCH should do nothing outside left-right margins."""
escio.Write("abcde")
esccmd.DECSET(esccmd.DECLRMM)
esccmd.DECSLRM(2, 4)
esccmd.CUP(Point(1, 1))
esccmd.DCH(99)
esccmd.DECRESET(esccmd.DECLRMM)
AssertScreenCharsInRectEqual(Rect(1, 1, 5, 1), [ "abcde" ])
@knownBug(terminal="xterm", reason="DCH operates on the current line when outside the scroll region in xterm.")
@knownBug(terminal="xterm", reason="Assertion fires", shouldTry=False)
@knownBug(terminal="iTerm2", reason="DCH operates on the current line when outside the scroll region in iTerm2.")
@knownBug(terminal="Terminal.app", reason="DCH operates on the current line when outside the scroll region in Terminal.app.")
def test_DCH_DoesNothingOutsideTopBottomMargin(self):
"""DCH should do nothing outside top-bottom margins."""
escio.Write("abcde")
esccmd.DECSTBM(2, 3)
esccmd.CUP(Point(1, 1))
esccmd.DCH(99)
esccmd.DECSTBM()
AssertScreenCharsInRectEqual(Rect(1, 1, 5, 1), [ "abcde" ])
|
margaritis/iTerm2
|
tests/esctest/tests/dch.py
|
Python
|
gpl-2.0
| 2,438
|
import unittest
import socket
from M2Crypto import SSL
from subscription_manager.gui import utils
from fixture import FakeException, FakeLogger
import rhsm.connection as connection
class FakeErrorWindow:
def __init__(self, msg, parent=None):
self.msg = msg
class HandleGuiExceptionTests(unittest.TestCase):
# we are going with "hge" for handle_gui_exception
def setUp(self):
self.msg = "some thing to log home about"
self.formatted_msg = "some thing else like: %s"
self.msg_with_url = "https://www.example.com"
self.msg_with_url_and_formatting = "https://www.example.com %s"
self.msg_with_markup = """<span foreground="blue" size="100">Blue text</span> is <i>cool</i>!"""
utils.log = FakeLogger()
utils.show_error_window = FakeErrorWindow
# set a mock logger
def test_hge(self):
e = FakeException()
utils.log.set_expected_msg(self.msg)
utils.handle_gui_exception(e, self.msg, None)
self.assertEqual(utils.log.expected_msg, self.msg)
def test_hge_log_msg_none(self):
e = FakeException()
utils.log.set_expected_msg(self.msg)
utils.handle_gui_exception(e, self.msg, None, log_msg=None)
self.assertEqual(utils.log.expected_msg, self.msg)
def test_hge_socket_error(self):
utils.log.set_expected_msg(self.msg)
utils.handle_gui_exception(socket.error(), self.msg, None)
self.assertEqual(utils.log.expected_msg, self.msg)
def test_hge_ssl_error(self):
utils.log.set_expected_msg(self.msg)
utils.handle_gui_exception(SSL.SSLError(), self.msg, None)
self.assertEqual(utils.log.expected_msg, self.msg)
def test_hge_network_exception(self):
utils.log.set_expected_msg(self.msg)
utils.handle_gui_exception(connection.NetworkException(1337),
self.msg, None)
self.assertEqual(utils.log.expected_msg, self.msg)
def test_hge_remote_server_exception(self):
utils.log.set_expected_msg(self.msg)
utils.handle_gui_exception(connection.RemoteServerException(1984),
self.msg, None)
self.assertEqual(utils.log.expected_msg, self.msg)
def test_hge_restlib_exception_unformatted_msg(self):
utils.log.set_expected_msg(self.msg)
utils.handle_gui_exception(connection.RestlibException(421, "whatever"),
self.msg, None)
self.assertEqual(utils.log.expected_msg, self.msg)
def test_hge_restlib_exception_unformatted_msg_format_msg_false(self):
utils.log.set_expected_msg(self.msg)
utils.handle_gui_exception(connection.RestlibException(421, "whatever"),
self.msg, None,
format_msg=False)
def test_hge_restlib_exception_formated_msg(self):
utils.log.set_expected_msg(self.msg)
utils.handle_gui_exception(connection.RestlibException(409, "very clean"),
self.formatted_msg, None)
self.assertEqual(utils.log.expected_msg, self.msg)
def test_hge_restlib_exception_url_msg(self):
utils.log.set_expected_msg(self.msg)
utils.handle_gui_exception(connection.RestlibException(404, "page not found"),
self.msg_with_url, None)
self.assertEqual(utils.log.expected_msg, self.msg)
# if we handle this okay, we can probably remove the format_msg tests
def test_hge_restlib_exception_url_msg_with_formatting_format_msg_false(self):
utils.handle_gui_exception(connection.RestlibException(404, "page not found"),
self.msg_with_url_and_formatting, None,
format_msg=False)
def test_hge_restlib_exception_url_msg_500(self):
utils.handle_gui_exception(connection.RestlibException(500, "internal server error"),
self.msg_with_url, None, format_msg=True)
def test_hge_bad_certificate(self):
utils.handle_gui_exception(connection.BadCertificateException("/road/to/nowhere"),
self.msg, None)
def test_hge_fake_exception_url_msg(self):
utils.handle_gui_exception(FakeException(msg="hey https://www.exmaple.com"),
self.msg, None)
def test_hge_fake_exception_no_url_msg(self):
utils.handle_gui_exception(FakeException(msg="< what?>"),
self.msg, None)
def test_hge_fake_exception_formatted_msg(self):
utils.handle_gui_exception(FakeException(msg="something"),
self.formatted_msg, None)
def test_hge_fake_exception_formatted_msg_format_msg_false(self):
utils.handle_gui_exception(FakeException(msg="whatever"),
self.formatted_msg, None,
format_msg=False)
def test_hge_fake_exception_fomatted_log_msg(self):
utils.handle_gui_exception(FakeException(msg="bieber"),
self.formatted_msg, None,
log_msg=self.formatted_msg)
|
vritant/subscription-manager
|
test/test_handle_gui_exception.py
|
Python
|
gpl-2.0
| 5,281
|
"""Test OpenBabel executables from Python
Note: Python bindings not used
On Windows or Linux, you can run these tests at the commandline
in the build folder with:
"C:\Program Files\CMake 2.6\bin\ctest.exe" -C CTestTestfile.cmake
-R pytest -VV
You could also "chdir" into build/test and run the test file directly:
python ../../../test/testfastsearch.py
In both cases, the test file is run directly from the source folder,
and so you can quickly develop the tests and try them out.
"""
import unittest
from testbabel import run_exec, BaseTest
class TestSym(BaseTest):
"""A series of tests relating to fastsearch functionality"""
def setUp(self):
self.canFindExecutable("obabel")
def testSingleHit(self):
"""PR#2955101 - Difficulty reading from a fastsearch index"""
smiles = """C12(C(N(C(=O)C)c3c2cccc3)=O)Nc2c(ccc(c2N1)OCCCC)OCCCC
n1c([nH]c(cc1c1ccccc1)=O)c1ccc(cc1)Br
n1c(nc2c(c1N(C)C)cccc2)c1c(O)cccc1
C1(/[CH]2[CH]3\C(=C4/CC(C)(C)NC(C4)(C)C)C=C[CH]3[CH]1C=C2)=C1/CC(C)(C)NC(C1)(C)C
n1c(c2ccc(C(=O)O)cc2)ccc(c1)CCCCC
N1(C(CN(CC1=O)C(=O)C1CCCCC1)=O)CCc1ccccc1
S(N1[CH](c2ccccc2C=C1)C#N)(c1ccc(cc1)C)(=O)=O
c12c(c(OC)c3c(c1OC)occ3)ccc(o2)=O
c12c(O[CH](C1=O)C(C)C)cc1c(c2)ccc(=O)o1
c12[C]3([C@H]4([N@@](CCc1c1ccccc1[nH]2)C[C@H](C=C4CC)C3))C(=O)OC"""
outputfile = open("ten.smi", "w")
outputfile.write(smiles)
outputfile.close()
output, error = run_exec("obabel ten.smi -O ten.fs")
self.canFindFile("ten.fs")
self.assertConverted(error, 10)
query = "Nc2nc(c1ccccc1)nc3ccccc23"
output, error = run_exec("obabel ten.fs -ifs -s %s -osmi" % query)
self.assertConverted(error, 1)
output, error = run_exec("obabel ten.fs -ifs -s %s -at 0.5 -aa -osmi" % query)
self.assertConverted(error, 1)
if __name__ == "__main__":
unittest.main()
|
baoilleach/openbabel
|
test/testfastsearch.py
|
Python
|
gpl-2.0
| 1,914
|
# -*- encoding: iso-8859-2 -*-
import ekg
import time
def status_handler(session, uid, status, desc):
# for sesja in ekg.sessions():
# if sesja.connected():
# ekg.echo("sesja '%s' po³±czona" % (name,))
# ekg.echo("status: "+sesja['status'])
# else:
# ekg.echo("sesja '%s' nie jest po³±czona" % (name,))
ekg.echo("Dosta³em status!")
ekg.echo("Sesja : "+session)
ekg.echo("UID : "+uid)
ekg.echo("Status: "+status)
if desc:
ekg.echo("Opis : "+desc)
sesja = ekg.session_get(session)
# ekg.echo('Lista userów sesji: '+", ".join(sesja.users()))
user = sesja.user_get(uid)
if user.last_status:
ekg.echo(str(user.last_status))
stat, des = user.last_status
ekg.echo("Ostatni status: "+stat)
if user.last_status[1]:
ekg.echo("Ostatni opis : "+des)
else:
ekg.echo("Nie ma poprzedniego stanu - pewnie dopiero siê ³±czymy...")
if user.ip:
ekg.echo("IP: "+user.ip)
if user.groups:
ekg.echo("Grupy: "+", ".join(user.groups()))
if status == ekg.STATUS_AWAY:
ekg.echo("Chyba go nie ma...")
if status == ekg.STATUS_XA:
ekg.echo("Chyba bardzo go nie ma, to na grzyb mi taki status?. Po³ykam. *¶lurp*")
return 0
return 1
def message_handler(session, uid, type, text, sent_time, ignore_level):
ekg.debug("[test script] some debug\n")
ekg.echo("Dosta³em wiadomo¶æ!")
ekg.echo("Sesja : "+session)
ekg.echo("UID : "+uid)
if type == ekg.MSGCLASS_MESSAGE:
ekg.echo("Typ : msg")
elif type == ekg.MSGCLASS_CHAT:
ekg.echo("Typ : chat")
ekg.echo("Czas : "+time.strftime("%a, %d %b %Y %H:%M:%S %Z", time.gmtime(sent_time)))
ekg.echo("Ign : "+str(ignore_level))
ekg.echo("TxtLen: "+str(len(text)))
if len(text) == 13:
ekg.echo("Oj, ale pechowa liczba, nie odbieram")
return 0
return 1
def own_message_handler(session, target, text):
ekg.debug("[test script] some debug\n")
ekg.echo("Wysy³am wiadomo¶æ!")
ekg.echo("Sesja : "+session)
ekg.echo("Target: "+target)
ekg.echo("TxtLen: "+str(len(text)))
return 1
def connect_handler(session):
ekg.echo("Po³±czono! Ale super! Mo¿na gadaæ!")
ekg.echo("Sesja : "+session)
if session[:3] == 'irc':
struct = time.gmtime()
if struct[3] >= 8 and struct[3] < 17:
ekg.echo('£adnie to tak ircowaæ w pracy? ;)')
sesja = ekg.session_get(session)
if sesja.connected():
ekg.echo('Po³±czony!')
else:
ekg.echo('W tym miejscu jeszcze nie po³±czony')
ekg.echo('Lista userów sesji: '+", ".join(sesja.users()))
def keypress(key):
ekg.echo('nacisnales #'+ str(key));
def disconnect_handler(session):
ekg.echo("£o, sesja %s nam pad³a" % (session,))
ekg.echo("Wysy³amy smsa ¿e nam cu¶ pad³o...")
def foo_command(name, args):
ekg.echo("Wywo³ane polecenie foo!")
def varchange(name, newval):
ekg.echo("Zmienna %s zmieni³a warto¶æ na %s" % (name, newval) )
ekg.command_bind('foo', foo_command)
ekg.handler_bind('protocol-message-received', message_handler)
ekg.handler_bind('protocol-message-sent', own_message_handler)
ekg.handler_bind('protocol-status', status_handler)
ekg.handler_bind('protocol-connected', connect_handler)
ekg.handler_bind('protocol-disconnected', disconnect_handler)
ekg.handler_bind('ui-keypress', keypress)
ekg.variable_add('zmienna_testowa', 'warto¶æ', varchange)
|
wcb2/wcb2
|
contrib/python/sample.py
|
Python
|
gpl-2.0
| 3,325
|
import unittest
from mock import call, patch, MagicMock, Mock
from pymongo.errors import AutoReconnect
from pulp.devel import mock_config
from pulp.server import config
from pulp.server.db import connection
from pulp.server.exceptions import PulpCodedException
class MongoEngineConnectionError(Exception):
pass
class TestDatabaseSeeds(unittest.TestCase):
def test_seeds_default(self):
self.assertEqual(config._default_values['database']['seeds'], 'localhost:27017')
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test_seeds_invalid(self, mock_mongoengine):
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
connection.initialize(seeds='localhost:27017:1234')
max_pool_size = connection._DEFAULT_MAX_POOL_SIZE
database = config.config.get('database', 'name')
mock_mongoengine.connect.assert_called_once_with(database, max_pool_size=max_pool_size,
host='localhost:27017:1234')
@mock_config.patch({'database': {'seeds': ''}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test_seeds_is_empty(self, mock_mongoengine):
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
self.assertRaises(PulpCodedException, connection.initialize)
@mock_config.patch({'database': {'replica_set': 'fakeReplica'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
@patch('pulp.server.db.connection._connect_to_one_of_seeds')
def test_seeds_is_set_from_argument(self, mock_connect_seeds, mock_mongoengine):
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
mock_connect_seeds.return_value.server_info.return_value = {'version': '2.6.0'}
replica_set = 'fakeReplica'
connection.initialize(seeds='firsthost:1234,secondhost:5678')
max_pool_size = connection._DEFAULT_MAX_POOL_SIZE
database = config.config.get('database', 'name')
mock_connect_seeds.assert_called_with({'max_pool_size': max_pool_size,
'replicaSet': replica_set}, ['firsthost:1234',
'secondhost:5678'],
database)
@mock_config.patch({'database': {'seeds': 'firsthost:1234,secondhost:5678'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
@patch('pulp.server.db.connection._connect_to_one_of_seeds')
def test_seeds_from_config(self, mock_connect_seeds, mock_mongoengine):
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
mock_connect_seeds.return_value.server_info.return_value = {'version': '2.6.0'}
seeds = "firsthost:1234,secondhost:5678"
replica_set = 'fakeReplica'
config.config.set('database', 'seeds', seeds)
config.config.set('database', 'replica_set', replica_set)
connection.initialize()
max_pool_size = connection._DEFAULT_MAX_POOL_SIZE
database = config.config.get('database', 'name')
mock_connect_seeds.assert_called_with({'max_pool_size': max_pool_size,
'replicaSet': replica_set}, ['firsthost:1234',
'secondhost:5678'],
database)
class TestDatabaseName(unittest.TestCase):
@mock_config.patch({'database': {'seeds': 'champs.example.com:27018'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test__DATABASE_uses_default_name(self, mock_mongoengine):
"""
Assert that the name from the database config is used if not provided as a parameter to
initialize().
"""
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
host = 'champs.example.com:27018'
connection.initialize()
name = config.config.get('database', 'name')
mock_mongoengine.connect.assert_called_once_with(name, host=host, max_pool_size=10)
@mock_config.patch({'database': {'seeds': 'champs.example.com:27018'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test_name_is_set_from_argument(self, mock_mongoengine):
"""
Assert that passing a name to initialize() overrides the value from the config.
"""
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
name = 'name_set_from_argument'
host = 'champs.example.com:27018'
connection.initialize(name=name)
mock_mongoengine.connect.assert_called_once_with(name, host=host, max_pool_size=10)
class TestDatabaseReplicaSet(unittest.TestCase):
@mock_config.patch({'database': {'seeds': 'champs.example.com:27018'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test_database_sets_replica_set(self, mock_mongoengine):
mock_replica_set = Mock()
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
host = 'champs.example.com:27018'
connection.initialize(replica_set=mock_replica_set)
database = config.config.get('database', 'name')
max_pool_size = connection._DEFAULT_MAX_POOL_SIZE
mock_mongoengine.connect.assert_called_once_with(
database, host=host, max_pool_size=max_pool_size,
replicaSet=mock_replica_set)
@mock_config.patch({'database': {'replica_set': 'real_replica_set', 'name': 'nbachamps',
'seeds': 'champs.example.com:27018'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test_database_replica_set_from_config(self, mock_mongoengine):
"""
Assert that replica set configuration defaults to the configured value if not provided.
"""
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
connection.initialize()
max_pool_size = connection._DEFAULT_MAX_POOL_SIZE
mock_mongoengine.connect.assert_called_once_with(
'nbachamps', host='champs.example.com:27018', max_pool_size=max_pool_size,
replicaSet='real_replica_set')
class TestDatabaseMaxPoolSize(unittest.TestCase):
@mock_config.patch({'database': {'seeds': 'champs.example.com:27018'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test_database_max_pool_size_default_is_10(self, mock_mongoengine):
"""
Assert that the max_pool_size parameter defaults to 10.
"""
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
host = 'champs.example.com:27018'
connection.initialize()
database = config.config.get('database', 'name')
mock_mongoengine.connect.assert_called_once_with(database, host=host,
max_pool_size=10)
@mock_config.patch({'database': {'seeds': 'champs.example.com:27018'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test_database_max_pool_size_uses_default(self, mock_mongoengine):
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
host = 'champs.example.com:27018'
connection.initialize()
database = config.config.get('database', 'name')
max_pool_size = connection._DEFAULT_MAX_POOL_SIZE
mock_mongoengine.connect.assert_called_once_with(database, host=host,
max_pool_size=max_pool_size)
@mock_config.patch({'database': {'seeds': 'champs.example.com:27018'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test_database_max_pool_size(self, mock_mongoengine):
"""
Assert that the max_pool_size parameter to initialize() is handled appropriately.
"""
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
connection.initialize(max_pool_size=5)
database = config.config.get('database', 'name')
host = config.config.get('database', 'seeds')
mock_mongoengine.connect.assert_called_once_with(database, host=host,
max_pool_size=5)
class TestDatabase(unittest.TestCase):
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test_mongoengine_connect_is_called(self, mock_mongoengine):
"""
Assert that mongoengine.connect() is called.
"""
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
connection.initialize()
mock_mongoengine.connect.assert_called_once()
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test__DATABASE_is_returned_from_get_db_call(self, mock_mongoengine):
"""
This test asserts that pulp.server.db.connection._DATABASE is the result of calling get_db()
on the connection.
"""
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
connection.initialize()
expected_database = mock_mongoengine.connection.get_db.return_value
self.assertTrue(connection._DATABASE is expected_database)
mock_mongoengine.connection.get_db.assert_called_once_with()
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.NamespaceInjector')
@patch('pulp.server.db.connection.mongoengine')
def test__DATABASE_receives_namespace_injector(self, mock_mongoengine, mock_namespace_injector):
"""
This test asserts that the NamespaceInjector was added as a son manipulator to the
_DATABASE.
"""
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
connection.initialize()
mock_son_manipulator = connection._DATABASE.add_son_manipulator
mock_namespace_injector.assert_called_once_with()
mock_son_manipulator.assert_called_once_with(mock_namespace_injector.return_value)
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test__DATABASE_collection_names_is_called(self, mock_mongoengine):
"""
The initialize() method queries for all the collection names just to check that the
connection is up and authenticated (if necessary). This way it can raise an Exception if
there is a problem, rather than letting the first real query experience an Exception.
"""
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
connection.initialize()
connection._DATABASE.collection_names.assert_called_once_with()
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
@patch('pulp.server.db.connection._logger')
def test_unexpected_Exception_is_logged(self, mock__logger, mock_mongoengine):
"""
Assert that the logger gets called when an Exception is raised by mongoengine.connect().
"""
mock_mongoengine.connect.side_effect = IOError()
self.assertRaises(IOError, connection.initialize)
self.assertTrue(connection._CONNECTION is None)
self.assertTrue(connection._DATABASE is None)
mock__logger.critical.assert_called_once()
class TestDatabaseSSL(unittest.TestCase):
def test_ssl_off_by_default(self):
self.assertEqual(config.config.getboolean('database', 'ssl'), False)
@mock_config.patch({'database': {'ssl': 'false', 'seeds': 'champs.example.com:27018'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test_ssl_is_skipped_if_off(self, mock_mongoengine):
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
host = 'champs.example.com:27018'
replica_set = ''
config.config.set('database', 'ssl', 'false')
config.config.set('database', 'seeds', host)
config.config.set('database', 'replica_set', replica_set)
connection.initialize()
database = config.config.get('database', 'name')
max_pool_size = connection._DEFAULT_MAX_POOL_SIZE
mock_mongoengine.connect.assert_called_once_with(database, max_pool_size=max_pool_size,
host=host, replicaSet=replica_set)
@mock_config.patch({'database': {'verify_ssl': 'true',
'ssl': 'true', 'seeds': 'champs.example.com:27018'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.ssl')
@patch('pulp.server.db.connection.mongoengine')
def test_ssl_is_configured_with_verify_ssl_on(self, mock_mongoengine, mock_ssl):
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
host = 'champs.example.com:27018'
replica_set = ''
config.config.set('database', 'verify_ssl', 'true')
config.config.set('database', 'ssl', 'true')
config.config.set('database', 'seeds', host)
config.config.set('database', 'replica_set', replica_set)
connection.initialize()
database = config.config.get('database', 'name')
max_pool_size = connection._DEFAULT_MAX_POOL_SIZE
ssl_cert_reqs = mock_ssl.CERT_REQUIRED
ssl_ca_certs = config.config.get('database', 'ca_path')
mock_mongoengine.connect.assert_called_once_with(
database, max_pool_size=max_pool_size, ssl=True, ssl_cert_reqs=ssl_cert_reqs,
ssl_ca_certs=ssl_ca_certs, host=host, replicaSet=replica_set)
@mock_config.patch({'database': {'verify_ssl': 'false',
'ssl': 'true', 'seeds': 'champs.example.com:27018'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.ssl')
@patch('pulp.server.db.connection.mongoengine')
def test_ssl_is_configured_with_verify_ssl_off(self, mock_mongoengine, mock_ssl):
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
host = 'champs.example.com:27018'
replica_set = ''
config.config.set('database', 'verify_ssl', 'false')
config.config.set('database', 'ssl', 'true')
config.config.set('database', 'seeds', host)
config.config.set('database', 'replica_set', replica_set)
connection.initialize()
database = config.config.get('database', 'name')
max_pool_size = connection._DEFAULT_MAX_POOL_SIZE
ssl_cert_reqs = mock_ssl.CERT_NONE
ssl_ca_certs = config.config.get('database', 'ca_path')
mock_mongoengine.connect.assert_called_once_with(
database, max_pool_size=max_pool_size, ssl=True, ssl_cert_reqs=ssl_cert_reqs,
ssl_ca_certs=ssl_ca_certs, host=host, replicaSet=replica_set)
@mock_config.patch({'database': {'ssl_keyfile': 'keyfilepath', 'verify_ssl': 'false',
'ssl': 'true', 'seeds': 'champs.example.com:27018'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.ssl')
@patch('pulp.server.db.connection.mongoengine')
def test_ssl_is_configured_with_ssl_keyfile(self, mock_mongoengine, mock_ssl):
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
host = 'champs.example.com:27018'
replica_set = ''
config.config.set('database', 'ssl_keyfile', 'keyfilepath')
config.config.set('database', 'verify_ssl', 'false')
config.config.set('database', 'ssl', 'true')
config.config.set('database', 'seeds', host)
config.config.set('database', 'replica_set', replica_set)
connection.initialize()
database = config.config.get('database', 'name')
max_pool_size = connection._DEFAULT_MAX_POOL_SIZE
ssl_cert_reqs = mock_ssl.CERT_NONE
ssl_ca_certs = config.config.get('database', 'ca_path')
mock_mongoengine.connect.assert_called_once_with(
database, max_pool_size=max_pool_size, ssl=True, ssl_cert_reqs=ssl_cert_reqs,
ssl_ca_certs=ssl_ca_certs, ssl_keyfile='keyfilepath', host=host,
replicaSet=replica_set)
@mock_config.patch({'database': {'ssl_certfile': 'certfilepath', 'verify_ssl': 'false',
'ssl': 'true', 'seeds': 'champs.example.com:27018'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.ssl')
@patch('pulp.server.db.connection.mongoengine')
def test_ssl_is_configured_with_ssl_certfile(self, mock_mongoengine, mock_ssl):
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
host = 'champs.example.com:27018'
replica_set = ''
config.config.set('database', 'ssl_certfile', 'certfilepath')
config.config.set('database', 'verify_ssl', 'false')
config.config.set('database', 'ssl', 'true')
config.config.set('database', 'seeds', host)
config.config.set('database', 'replica_set', replica_set)
connection.initialize()
database = config.config.get('database', 'name')
max_pool_size = connection._DEFAULT_MAX_POOL_SIZE
ssl_cert_reqs = mock_ssl.CERT_NONE
ssl_ca_certs = config.config.get('database', 'ca_path')
mock_mongoengine.connect.assert_called_once_with(
database, max_pool_size=max_pool_size, ssl=True, ssl_cert_reqs=ssl_cert_reqs,
ssl_ca_certs=ssl_ca_certs, ssl_certfile='certfilepath', host=host,
replicaSet=replica_set)
class TestDatabaseVersion(unittest.TestCase):
"""
test DB version parsing. Info on expected versions is at
https://github.com/mongodb/mongo/blob/master/src/mongo/util/version.cpp#L39-45
"""
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def _test_initialize(self, version_str, mock_mongoengine):
mock_mongoclient_connect = mock_mongoengine.connect.return_value
mock_mongoclient_connect.server_info.return_value = {'version': version_str}
connection.initialize()
def test_database_version_bad_version(self):
try:
self._test_initialize('1.2.3')
self.fail("RuntimeError not raised")
except RuntimeError:
pass # expected exception
def test_database_version_good_version(self):
# the version check succeeded if no exception was raised
self._test_initialize('2.6.0')
def test_database_version_good_equal_version(self):
# the version check succeeded if no exception was raised
self._test_initialize('2.4.0')
def test_database_version_good_rc_version(self):
# the version check succeeded if no exception was raised
self._test_initialize('2.8.0-rc1')
def test_database_version_bad_rc_version(self):
try:
self._test_initialize('2.3.0-rc1')
self.fail("RuntimeError not raised")
except RuntimeError:
pass # expected exception
class TestDatabaseAuthentication(unittest.TestCase):
@mock_config.patch(
{'database': {'name': 'nbachamps', 'username': 'larrybird',
'password': 'celtics1981', 'seeds': 'champs.example.com:27018'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test_initialize_username_and_password(self, mock_mongoengine):
"""
Assert that the connection is made correctly when a username and password are provided in
the config.
"""
mock_mongoengine_instance = mock_mongoengine.connect.return_value
mock_mongoengine_instance.server_info.return_value = {"version":
connection.MONGO_MINIMUM_VERSION}
config.config.set('database', 'name', 'nbachamps')
config.config.set('database', 'username', 'larrybird')
config.config.set('database', 'password', 'celtics1981')
config.config.set('database', 'seeds', 'champs.example.com:27018')
config.config.set('database', 'replica_set', '')
connection.initialize()
mock_mongoengine.connect.assert_called_once_with(
'nbachamps', username='larrybird', host='champs.example.com:27018',
password='celtics1981', max_pool_size=10, replicaSet='')
@mock_config.patch(
{'database': {'name': 'nbachamps', 'username': 'larrybird',
'password': 'celtics1981', 'seeds': 'champs.example.com:27018'}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection._logger.debug')
@patch('pulp.server.db.connection.mongoengine')
def test_initialize_username_and_shadows_password(self, mock_mongoengine, mock_log):
"""
Assert that the password and password length are not logged.
"""
mock_mongoengine_instance = mock_mongoengine.connect.return_value
mock_mongoengine_instance.server_info.return_value = {"version":
connection.MONGO_MINIMUM_VERSION}
config.config.set('database', 'name', 'nbachamps')
config.config.set('database', 'username', 'larrybird')
config.config.set('database', 'password', 'celtics1981')
config.config.set('database', 'seeds', 'champs.example.com:27018')
config.config.set('database', 'replica_set', '')
connection.initialize()
mock_mongoengine.connect.assert_called_once_with(
'nbachamps', username='larrybird', host='champs.example.com:27018',
password='celtics1981', max_pool_size=10, replicaSet='')
expected_calls = [
call('Attempting username and password authentication.'),
call("Connection Arguments: {'username': 'larrybird', 'host': "
"'champs.example.com:27018', 'password': '*****', 'max_pool_size': 10, "
"'replicaSet': ''}"),
call('Querying the database to validate the connection.')]
mock_log.assert_has_calls(expected_calls)
@mock_config.patch({'database': {'username': '', 'password': ''}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test_initialize_no_username_or_password(self, mock_mongoengine):
"""
Assert that no call is made to authenticate() when the username and password are the empty
string.
"""
mock_mongoengine_instance = mock_mongoengine.connect.return_value
mock_mongoengine_instance.server_info.return_value = {"version":
connection.MONGO_MINIMUM_VERSION}
connection.initialize()
self.assertFalse(connection._DATABASE.authenticate.called)
@mock_config.patch({'database': {'username': 'admin', 'password': ''}})
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test_initialize_username_no_password(self, mock_mongoengine):
"""
Test that no Exception is raised if a DB username is provided without a password.
"""
mock_mongoengine_instance = mock_mongoengine.connect.return_value
mock_mongoengine_instance.server_info.return_value = {"version":
connection.MONGO_MINIMUM_VERSION}
# ensure no exception is raised (redmine #708)
connection.initialize()
@mock_config.patch({'database': {'username': '', 'password': 'foo'}})
@patch('pulp.server.db.connection.mongoengine')
def test_initialize_password_no_username(self, mock_mongoengine):
mock_mongoengine_instance = mock_mongoengine.connect.return_value
mock_mongoengine_instance.server_info.return_value = {"version":
connection.MONGO_MINIMUM_VERSION}
self.assertRaises(Exception, connection.initialize)
@patch('pulp.server.db.connection.OperationFailure', new=MongoEngineConnectionError)
@patch('pulp.server.db.connection.mongoengine')
def test_authentication_fails_with_RuntimeError(self, mock_mongoengine):
mock_mongoengine_instance = mock_mongoengine.connect.return_value
mock_mongoengine_instance.server_info.return_value = {"version":
connection.MONGO_MINIMUM_VERSION}
exc = MongoEngineConnectionError()
exc.code = 18
mock_mongoengine.connection.get_db.side_effect = exc
self.assertRaises(RuntimeError, connection.initialize)
class TestDatabaseRetryOnInitialConnectionSupport(unittest.TestCase):
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
@patch('pulp.server.db.connection.time.sleep', Mock())
def test_retry_waits_when_mongoengine_connection_error_is_raised(self, mock_mongoengine):
def break_out_on_second(*args, **kwargs):
mock_mongoengine.connect.side_effect = StopIteration()
raise MongoEngineConnectionError()
mock_mongoengine.connect.side_effect = break_out_on_second
mock_mongoengine.connection.ConnectionError = MongoEngineConnectionError
self.assertRaises(StopIteration, connection.initialize)
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.time.sleep')
@patch('pulp.server.db.connection.mongoengine')
def test_retry_sleeps_with_backoff(self, mock_mongoengine, mock_sleep):
def break_out_on_second(*args, **kwargs):
mock_sleep.side_effect = StopIteration()
mock_sleep.side_effect = break_out_on_second
mock_mongoengine.connect.side_effect = MongoEngineConnectionError()
mock_mongoengine.connection.ConnectionError = MongoEngineConnectionError
self.assertRaises(StopIteration, connection.initialize)
mock_sleep.assert_has_calls([call(1), call(2)])
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.time.sleep')
@patch('pulp.server.db.connection.mongoengine')
def test_retry_with_max_timeout(self, mock_mongoengine, mock_sleep):
def break_out_on_second(*args, **kwargs):
mock_sleep.side_effect = StopIteration()
mock_sleep.side_effect = break_out_on_second
mock_mongoengine.connect.side_effect = MongoEngineConnectionError()
mock_mongoengine.connection.ConnectionError = MongoEngineConnectionError
self.assertRaises(StopIteration, connection.initialize, max_timeout=1)
mock_sleep.assert_has_calls([call(1), call(1)])
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
@patch('pulp.server.db.connection.itertools')
def test_retry_uses_itertools_chain_and_repeat(self, mock_itertools, mock_mongoengine):
mock_mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
connection.initialize()
mock_itertools.repeat.assert_called_once_with(32)
mock_itertools.chain.assert_called_once_with([1, 2, 4, 8, 16],
mock_itertools.repeat.return_value)
class TestGetDatabaseFunction(unittest.TestCase):
@patch('pulp.server.db.connection._DATABASE')
def test_get_database(self, mock__DATABASE):
self.assertEqual(mock__DATABASE, connection.get_database())
class TestGetConnectionFunction(unittest.TestCase):
@patch('pulp.server.db.connection._CONNECTION')
def test_get_connection(self, mock__CONNECTION):
self.assertEqual(mock__CONNECTION, connection.get_connection())
class TestInitialize(unittest.TestCase):
"""
This class contains tests for the initialize() function.
"""
@patch('pulp.server.db.connection._CONNECTION', None)
@patch('pulp.server.db.connection._DATABASE', None)
@patch('pulp.server.db.connection.mongoengine')
def test_multiple_calls_errors(self, mongoengine):
"""
This test asserts that more than one call to initialize() raises a RuntimeError.
"""
mongoengine.connect.return_value.server_info.return_value = {'version': '2.6.0'}
# The first call to initialize should be fine
connection.initialize()
# A second call to initialize() should raise a RuntimeError
self.assertRaises(RuntimeError, connection.initialize)
# The connection should still be initialized
self.assertEqual(connection._CONNECTION, mongoengine.connect.return_value)
self.assertEqual(connection._DATABASE, mongoengine.connection.get_db.return_value)
# Connect should still have been called correctly
name = config.config.get('database', 'name')
host = config.config.get('database', 'seeds')
mongoengine.connect.assert_called_once_with(name, host=host, max_pool_size=10)
@patch('pulp.server.db.connection.UnsafeRetry._decorated_methods', new=('one', 'two'))
@patch('pulp.server.db.connection.config.config')
class TestUnsafeRetry(unittest.TestCase):
"""
Tests for the unsafe retry feature.
"""
@patch('pulp.server.db.connection.UnsafeRetry.retry_decorator')
def test_decorate_instance_retry_off(self, m_retry, m_config):
"""
Database calls should not be wrapped if the feature has not been turned on.
"""
m_config.getboolean.return_value = False
m_instance = MagicMock()
connection.UnsafeRetry.decorate_instance(m_instance, 'test_collection')
self.assertFalse(m_retry.called)
@patch('pulp.server.db.connection.UnsafeRetry.retry_decorator')
def test_decorate_instance_retry_on(self, m_retry, m_config):
"""
Database calls should be wrapped if the feature has been turned on.
"""
m_config.getboolean.return_value = True
m_instance = MagicMock()
connection.UnsafeRetry.decorate_instance(m_instance, 'test_collection')
self.assertTrue(m_instance.one is m_retry.return_value.return_value)
self.assertTrue(m_instance.two is m_retry.return_value.return_value)
@patch('pulp.server.db.connection.UnsafeRetry.retry_decorator')
def test_decorate_instance_retry_on_incomplete_attrs(self, m_retry, m_config):
"""
Instances without all decorated methods should still be wrapped.
"""
m_config.getboolean.return_value = True
m_instance = MagicMock()
del m_instance.one
connection.UnsafeRetry.decorate_instance(m_instance, 'test_collection')
self.assertTrue(m_instance.two is m_retry.return_value.return_value)
self.assertRaises(AttributeError, getattr, m_instance, 'one')
@patch('pulp.server.db.connection._logger')
def test_retry_decorator(self, m_logger, m_config):
"""
Raise AutoReconnect once (simulate no connection to db), hijack the logger to fix the mock
so it is not an infinite loop.
"""
mock_r = MagicMock()
mock_r.side_effect = AutoReconnect
def restart_mongo(*args):
"""
Simulate turning Mongo back on.
"""
mock_r.side_effect = None
mock_r.return_value = 'final'
@connection.UnsafeRetry.retry_decorator(full_name='mock_coll')
def mock_func():
"""
Simple function to decorate.
"""
return mock_r()
m_config.getboolean.return_value = True
m_logger.error.side_effect = restart_mongo
final_answer = mock_func()
m_logger.error.assert_called_once_with('mock_func operation failed on mock_coll')
self.assertTrue(final_answer is 'final')
|
nthien/pulp
|
server/test/unit/server/db/test_connection.py
|
Python
|
gpl-2.0
| 34,833
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provide the capability for registered students to invite others.
Setup:
Include the text of the invitation email in course.yaml with the key:
course:
invitation_email:
sender_email: <email_address_in_from_field>
subject_template: <text_of_the_email>
body_template: <text_of_the_email>
The templates can use Jinja includes for the following variables:
sender_name: The name of the current student, as entered in the
registration form.
unsubscribe_url: A URL for the recipient to use to unsubscribe from
future emails.
The invitation_email settings in course.yaml can also be edited in the
Dashboard under Settings > Course Options.
"""
__author__ = 'John Orr (jorr@google.com)'
import logging
import os
import re
import jinja2
import appengine_config
from common import crypto
from common import safe_dom
from common import schema_fields
from common import tags
from controllers import utils
from models import courses
from models import custom_modules
from models import data_removal
from models import models
from models import transforms
from modules.courses import settings
from modules.invitation import messages
from modules.notifications import notifications
from modules.unsubscribe import unsubscribe
# The intent recorded for the emails sent by the notifications module
INVITATION_INTENT = 'course_invitation'
MODULE_NAME = 'invitation'
MODULE_TITLE = 'Invitations'
RESOURCES_PATH = '/modules/invitation/resources'
TEMPLATES_DIR = os.path.join(
appengine_config.BUNDLE_ROOT, 'modules', 'invitation', 'templates')
INVITATION_EMAIL_KEY = 'invitation_email'
SENDER_EMAIL_KEY = 'sender_email'
SUBJECT_TEMPLATE_KEY = 'subject_template'
BODY_TEMPLATE_KEY = 'body_template'
# In order to prevent spamming, the number of invitations which can be sent per
# user is limited.
MAX_EMAILS = 100
def is_email_valid(email):
# TODO(jorr): Use google.appengine.api.mail.is_email_valid when Issue 7471
# is resolved:
# https://code.google.com/p/googleappengine/issues/detail?id=7471
return re.match(
r'^[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,4}$', email, flags=re.IGNORECASE)
class InvitationEmail(object):
@classmethod
def is_available(cls, handler):
env = handler.app_context.get_environ()
email_env = env['course'].get(INVITATION_EMAIL_KEY, {})
return (
email_env.get(SENDER_EMAIL_KEY)
and email_env.get(SUBJECT_TEMPLATE_KEY)
and email_env.get(BODY_TEMPLATE_KEY))
def __init__(self, handler, recipient_email, sender_name):
self.recipient_email = recipient_email
env = handler.app_context.get_environ()
email_env = env['course'].get(INVITATION_EMAIL_KEY)
self.sender_email = email_env[SENDER_EMAIL_KEY]
self.subject_template = email_env[SUBJECT_TEMPLATE_KEY]
self.body_template = email_env[BODY_TEMPLATE_KEY]
self.email_vars = {
'sender_name': sender_name,
'unsubscribe_url': unsubscribe.get_unsubscribe_url(
handler, recipient_email)
}
def _render(self, template, env):
# Coerce template to unicode in case it is a LazyTranslator.
template = unicode(template)
return jinja2.Template(template).render(env)
@property
def subject(self):
return self._render(self.subject_template, self.email_vars)
@property
def body(self):
return self._render(self.body_template, self.email_vars)
def send(self):
notifications.Manager.send_async(
self.recipient_email,
self.sender_email,
INVITATION_INTENT,
self.body,
self.subject
)
class InvitationStudentProperty(models.StudentPropertyEntity):
"""Entity to hold the list of people already invited."""
PROPERTY_NAME = 'invitation-student-property'
EMAIL_LIST_KEY = 'email_list'
@classmethod
def load_or_default(cls, student):
entity = cls.get(student, cls.PROPERTY_NAME)
if entity is None:
entity = cls.create(student, cls.PROPERTY_NAME)
entity.value = '{}'
return entity
def is_in_invited_list(self, email):
value_dict = transforms.loads(self.value)
return email in value_dict.get(self.EMAIL_LIST_KEY, [])
def append_to_invited_list(self, email_list):
value_dict = transforms.loads(self.value)
email_set = set(value_dict.get(self.EMAIL_LIST_KEY, []))
email_set.update(email_list)
value_dict[self.EMAIL_LIST_KEY] = list(email_set)
self.value = transforms.dumps(value_dict)
def invited_list_size(self):
return len(transforms.loads(self.value))
def for_export(self, transform_fn):
"""Return a sanitized version of the model, with anonymized data."""
# Anonymize the email adresses in the email list and drop any
# additional data in the data value field.
model = super(InvitationStudentProperty, self).for_export(transform_fn)
value_dict = transforms.loads(model.value)
email_list = value_dict.get(self.EMAIL_LIST_KEY, [])
clean_email_list = [transform_fn(email) for email in email_list]
model.value = transforms.dumps({self.EMAIL_LIST_KEY: clean_email_list})
return model
class InvitationHandler(utils.BaseHandler):
"""Renders the student invitation panel."""
URL = 'modules/invitation'
def __init__(self):
super(InvitationHandler, self).__init__()
self.email_vars = {}
def render_for_email(self, template):
return jinja2.Template(template).render(self.email_vars)
def get(self):
user = self.personalize_page_and_get_user()
if user is None:
self.redirect('/course')
return
student = models.Student.get_enrolled_student_by_user(user)
if student is None:
self.redirect('/course')
return
if not InvitationEmail.is_available(self):
self.redirect('/course')
return
invitation_email = InvitationEmail(self, user.email(), student.name)
self.template_value['navbar'] = {}
self.template_value['xsrf_token'] = (
crypto.XsrfTokenManager.create_xsrf_token(
InvitationRESTHandler.XSRF_SCOPE))
self.template_value['subject'] = invitation_email.subject
self.template_value['body'] = invitation_email.body
template = self.get_template('invitation.html', [TEMPLATES_DIR])
self.response.out.write(template.render(self.template_value))
class InvitationRESTHandler(utils.BaseRESTHandler):
"""Custom REST handler for the invitation panel."""
URL = 'rest/modules/invitation'
XSRF_SCOPE = 'invitation'
SCHEMA = {
'type': 'object',
'properties': {
'emailList': {'type': 'string', 'optional': 'true'}
}
}
def before_method(self, verb, path):
# Handler needs to be locale-aware because the messages must be
# localized.
self._old_locale = self.app_context.get_current_locale()
new_locale = self.get_locale_for(self.request, self.app_context)
self.app_context.set_current_locale(new_locale)
def after_method(self, verb, path):
# Handler needs to be locale-aware because the messages must be
# localized.
self.app_context.set_current_locale(self._old_locale)
def post(self):
"""Handle POST requests."""
request = transforms.loads(self.request.get('request'))
if not self.assert_xsrf_token_or_fail(request, self.XSRF_SCOPE, {}):
return
user = self.get_user()
if not user:
transforms.send_json_response(self, 401, 'Access denied.', {})
return
student = models.Student.get_enrolled_student_by_user(user)
if not student:
transforms.send_json_response(self, 401, 'Access denied.', {})
return
if not InvitationEmail.is_available(self):
transforms.send_json_response(self, 500, 'Unavailable.', {})
return
payload_json = request.get('payload')
payload_dict = transforms.json_to_dict(payload_json, self.SCHEMA)
email_set = {
email.strip() for email in payload_dict.get('emailList').split(',')
if email.strip()}
if not email_set:
transforms.send_json_response(
# I18N: Error indicating no email addresses were submitted.
self, 400, self.gettext('Error: Empty email list'))
return
invitation_data = InvitationStudentProperty.load_or_default(student)
# Limit the number of emails a user can send, to prevent spamming
if invitation_data.invited_list_size() + len(email_set) > MAX_EMAILS:
missing_count = MAX_EMAILS - invitation_data.invited_list_size()
# I18N: Error indicating that the user cannot add the desired
# list of additional email addresses to the list of invitations;
# the total size of the list with the additions would be more
# than any single user is allowed to send. No email addresses
# were added to the list to send, and no further email messages
# were sent.
transforms.send_json_response(self, 200, self.gettext(
'This exceeds your email cap. Number of remaining '
'invitations: %s. No messages sent.' % missing_count))
return
email_messages = []
for email in email_set:
if not is_email_valid(email):
# I18N: Error indicating an email addresses is not well-formed.
email_messages.append(self.gettext(
'Error: Invalid email "%s"' % email))
elif invitation_data.is_in_invited_list(email):
# I18N: Error indicating an email addresses is already known.
email_messages.append(self.gettext(
'Error: You have already sent an invitation email to "%s"'
% email))
elif unsubscribe.has_unsubscribed(email):
# No message to the user, for privacy reasons
logging.info('Declined to send email to unsubscribed user')
elif models.Student.is_email_in_use(email):
# No message to the user, for privacy reasons
logging.info('Declined to send email to registered user')
else:
InvitationEmail(self, email, student.name).send()
invitation_data.append_to_invited_list(email_set)
invitation_data.put()
if email_messages:
# I18N: Error indicating not all email messages were sent.
email_messages.insert(0, self.gettext(
'Not all messages were sent (%s / %s):') % (
len(email_set) - len(email_messages), len(email_set)))
transforms.send_json_response(self, 400, '\n'.join(email_messages))
else:
transforms.send_json_response(
self, 200,
# I18N: Success message indicating number of emails sent.
self.gettext('OK, %s messages sent' % len(email_set)))
def get_course_settings_fields():
enable = schema_fields.SchemaField(
'course:invitation_email:enabled',
'Allow Invitation', 'boolean',
description=messages.ALLOW_INVITATION, extra_schema_dict_values={
'className': 'invitation-enable inputEx-Field inputEx-CheckBox'},
optional=True)
sender_email = schema_fields.SchemaField(
'course:invitation_email:sender_email',
'Invitation Origin Email', 'string',
description='The email address shown as the sender for invitation '
'emails to this course.',
extra_schema_dict_values={'className': 'invitation-data inputEx-Field'},
optional=True, i18n=False)
subject_template = schema_fields.SchemaField(
'course:invitation_email:subject_template',
'Invitation Subject Line', 'string',
description='The subject line in invitation emails to this course. '
'Use the string {{sender_name}} to include the name of the student '
'issuing the invitation in the subject line.',
extra_schema_dict_values={'className': 'invitation-data inputEx-Field'},
optional=True)
body_template = schema_fields.SchemaField(
'course:invitation_email:body_template',
'Invitation Body', 'text',
description=messages.INVITATION_BODY,
extra_schema_dict_values={'className': 'invitation-data inputEx-Field'},
optional=True)
return (
lambda c: enable,
lambda c: sender_email,
lambda c: subject_template,
lambda c: body_template)
def get_student_profile_invitation_link(handler, unused_student, unused_course):
env = handler.app_context.get_environ()
email_env = env['course'].get(INVITATION_EMAIL_KEY, {})
if not email_env.get('enabled'):
return (None, None)
# I18N: Title encouraging user to invite friends to join a course
invitation_title = handler.gettext('Invite Friends')
if InvitationEmail.is_available(handler):
invitation_link = safe_dom.A(
InvitationHandler.URL
# I18N: Label on control asking user to invite friends to join.
).add_text(handler.gettext(
'Click to send invitations to family and friends'))
else:
# I18N: Inviting friends to join a course is not currently enabled.
invitation_link = safe_dom.Text(handler.gettext(
'Invitations not currently available'))
return (
invitation_title, invitation_link)
def get_student_profile_sub_unsub_link(handler, student, unused_course):
email = student.email
is_unsubscribed = unsubscribe.has_unsubscribed(email)
# I18N: Control allowing user to subscribe/unsubscribe from email invitation
sub_unsub_title = handler.gettext('Subscribe/Unsubscribe')
sub_unsub_message = safe_dom.NodeList()
if is_unsubscribed:
resubscribe_url = unsubscribe.get_resubscribe_url(handler, email)
sub_unsub_message.append(safe_dom.Text(
# I18N: Message - user has unsubscribed from email invitations.
handler.gettext(
'You are currently unsubscribed from course-related emails.')))
sub_unsub_message.append(safe_dom.A(resubscribe_url).add_text(
# I18N: Control allowing user to re-subscribe to email invitations.
handler.gettext('Click here to re-subscribe.')))
else:
unsubscribe_url = unsubscribe.get_unsubscribe_url(handler, email)
sub_unsub_message.append(safe_dom.Text(
# I18N: Text indicating user has opted in to email invitations.
handler.gettext(
'You are currently receiving course-related emails. ')))
sub_unsub_message.append(safe_dom.A(unsubscribe_url).add_text(
# I18N: Control allowing user to unsubscribe from email invitations.
handler.gettext('Click here to unsubscribe.')))
return (
sub_unsub_title, sub_unsub_message)
custom_module = None
def register_module():
"""Registers this module in the registry."""
course_settings_fields = get_course_settings_fields()
def on_module_enabled():
courses.Course.OPTIONS_SCHEMA_PROVIDERS[
MODULE_NAME] += course_settings_fields
courses.Course.OPTIONS_SCHEMA_PROVIDER_TITLES[
MODULE_NAME] = MODULE_TITLE
settings.CourseSettingsHandler.register_settings_section(MODULE_NAME)
utils.StudentProfileHandler.EXTRA_STUDENT_DATA_PROVIDERS += [
get_student_profile_invitation_link,
get_student_profile_sub_unsub_link]
settings.CourseSettingsHandler.ADDITIONAL_DIRS.append(
TEMPLATES_DIR)
settings.CourseSettingsHandler.EXTRA_JS_FILES.append(
'invitation_course_settings.js')
data_removal.Registry.register_indexed_by_user_id_remover(
InvitationStudentProperty.delete_by_user_id_prefix)
global_routes = [
(os.path.join(RESOURCES_PATH, '.*'), tags.ResourcesHandler)]
namespaced_routes = [
('/' + InvitationHandler.URL, InvitationHandler),
('/' + InvitationRESTHandler.URL, InvitationRESTHandler)]
global custom_module # pylint: disable=global-statement
custom_module = custom_modules.Module(
'Invitation Page',
'A page to invite others to register.',
global_routes, namespaced_routes,
notify_module_enabled=on_module_enabled)
return custom_module
|
GirlsCodePy/girlscode-coursebuilder
|
modules/invitation/invitation.py
|
Python
|
gpl-3.0
| 17,503
|
import re, os
import SCons.Util
nsisFiles_re = re.compile(r'^\s*File "([^"]*)"', re.M)
"""
TODO:
- Extract the target from the nsis file
- When a target is provided use the output function
"""
def generate(env) :
"""Add Builders and construction variables for qt to an Environment."""
print "Loading nsis tool..."
Builder = SCons.Builder.Builder
env['NSIS_MAKENSIS'] = 'makensis'
env['NSIS_OPTIONS'] = ''
def winToLocalReformat(path) :
return os.path.join(*path.split("\\"))
def scanNsisContent(node, env, path, arg):
contents = node.get_contents()
includes = nsisFiles_re.findall(contents)
includes = [ winToLocalReformat(include) for include in includes ]
return filter(lambda x: x.rfind('*')==-1, includes)
nsisscanner = env.Scanner(name = 'nsisfile',
function = scanNsisContent,
argument = None,
skeys = ['.nsi'])
nsisbuilder = Builder(
action = '$NSIS_MAKENSIS $NSIS_OPTIONS $SOURCE',
source_scanner = nsisscanner,
single_source = True
)
env.Append( BUILDERS={'Nsis' : nsisbuilder} )
def exists(env) :
return True
|
txemagon/takeatease
|
vendorsrc/clam-1.4.0/scons/sconstools/nsis.py
|
Python
|
gpl-3.0
| 1,062
|
import sys
import os
import logging
import getpass
import platform
import time
#import pkg_resources
import json
import sassie.util.sasconfig as sasconfig
if sasconfig.__level__ == "DEBUG": DEBUG = True
class run_utils():
def __init__(self,app,txtOutput):
self.__application__ = app
self.txtOutput = txtOutput
def write_json_file(self):
self.logger.debug('in write_json_file')
self.logger.info('writing json files with input variables')
with open(self.parmfile,'w') as outfile:
json.dump([self.v],outfile)
self.logger.info('writing parameters to '+self.parmfile)
if(os.path.isfile('.last_sas.json')):
os.system('mv .last_sas.json .last_sas.json_backup')
self.logger.info('backing up existing .last_sas.json file to .last_sas.json_backup')
if(os.path.isfile('.last_sas_'+self.__application__+'.json')):
os.system('mv .last_sas_'+self.__application__+'.json .last_sas_'+self.__application__+'.json_backup')
self.logger.info('backing up existing .last_sas_'+self.__application__+'.json file to .last_sas_'+self.__application__+'.json_backup')
with open('.last_sas.json','w') as outfile:
json.dump([self.v],outfile)
self.logger.info('writing parameters to .last_sas.json file')
with open('.last_sas_'+self.__application__+'.json','w') as outfile:
json.dump([self.v],outfile)
self.logger.info('writing parameters to .last_sas_'+self.__application__+'.json file')
self.logger.info('input variables: '+json.dumps([self.v] ))
return
def general_setup(self,other_self):
''' method to write json file of input variables and setup logging '''
''' grab all of the variables in the variable class instance '''
log = other_self.log
mvars = other_self.mvars
log.debug('in general_setup')
input_vars = [attr for attr in dir(mvars) if not callable(getattr(mvars,attr)) and not attr.startswith("__")]
''' put the variables into a dictionary to send to json '''
input_variables = {}
for var in input_vars:
input_variables[var] = getattr(mvars, var)
self.v = input_variables
self.write_json_file()
runname = self.v['runname']
self.runpath = os.path.join(runname,self.__application__)
self.logger.info('setting runpath directory to : '+self.runpath)
try:
if(not os.path.exists(runname)):
os.makedirs(self.runpath)
elif(not os.path.exists(self.runpath)):
os.mkdir(self.runpath)
self.logger.info('creating directory : '+self.runpath)
except:
self.logger.critical('FAILED to create '+self.runpath+' directory')
other_self.runpath = self.runpath
other_self.parmfile = self.parmfile
return
def preamble(self):
user = getpass.getuser()
current_path = os.getcwd()
self.logger.debug('in preamble')
#self.logger.info('sassie version(sassie) : '+sassie.__version__)
#self.logger.info('sassie version : '+pkg_resources.get_distribution("sassie_2").version)
self.logger.info('module_application : '+self.__application__)
self.logger.info('executed by user : '+user+' on '+time.ctime())
self.logger.info('current directory : '+current_path)
self.logger.info('hostname : '+platform.node())
return
def setup_logging(self,other_self):
self.logger = logging.getLogger(self.__application__)
if (sasconfig.__level__ == 'DEBUG'):
self.logger.setLevel(logging.DEBUG)
else:
self.logger.setLevel(logging.INFO)
timestr = time.strftime("%Y%m%d-%H%M%S")
self.logfile = self.__application__+'_'+timestr+'.sassie_log'
self.parmfile = self.logfile[:-3]+'json'
outfile = open(self.logfile,"w")
st = 'Date\t\tTime\t\tFile\t\tMethod\t\tLine\tLevel\tMessage\n'
outfile.write(st) #; print(st)
outfile.close()
file_handler = logging.FileHandler(self.logfile)
formatter = logging.Formatter('%(asctime)s - %(filename)s - %(funcName)s - %(lineno)d - %(levelname)s - %(message)s')
file_handler.setFormatter(formatter)
output_handler = logging.StreamHandler(sys.stdout)
output_handler.setLevel(logging.WARNING)
output_handler.setFormatter(formatter)
self.logger.addHandler(file_handler)
self.logger.addHandler(output_handler)
self.logger.debug('in setup_logging')
self.logger.info("Program started")
self.preamble()
other_self.log = self.logger
other_self.logfile = self.logfile
return
def print_gui(self,message):
'''
method to output message to logger and txtOutput (gui)
'''
self.logger.info(message)
self.txtOutput.put(message)
if DEBUG: print message
def clean_up(self,log):
'''
method to move files to runpath directory and finish tasks
'''
log.debug('in clean_up')
os.system('mv '+self.logfile+' '+self.runpath)
os.system('mv '+self.parmfile+' '+self.runpath)
def capture_exception(self,message):
''' this method is a placehoder for a possible future exception handler ... notes are below for usage in-line in code '''
''' This plain 'except:' catches all exceptions, not only system '''
# log = self.log
# try:
# something_that_may_fail()
# except:
# error = sys.exc_info()[0]
# log.error('ERROR: '+error)
''' this is an example of EAFP ... easier to ask for forgiveness than permission ... the preferred way ... also allows some cleanup etc '''
#def display_username(user_id):
# try:
# db_connection = get_db_connection()
# except DatabaseEatenByGrueError:
# print('Sorry! Database was eaten by a grue.')
# else:
# print(db_connection.get_username(user_id))
# db_connection.cleanup()
''' the following is an example of check first or look before you leap LBYL ... could still not work in all cases (race conditions possible) '''
#def print_object(some_object):
# # Check if the object is printable...
# try:
# printable = str(some_object)
# except TypeError:
# print("unprintable object")
# else:
# print(printable)
''' see http://www.jeffknupp.com/blog/2013/02/06/write-cleaner-python-use-exceptions/ '''
''' proper way to open a file to read to make sure that it closes correctly if there is an error '''
#with open("myfile.txt") as f:
# for line in f:
# print line,
|
madscatt/zazzie_1.5
|
trunk/sassie/util/module_utilities.py
|
Python
|
gpl-3.0
| 7,017
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Stored course enrollment statistics.
This module implements three counters that track per-course enrollment activity:
'total', 'adds', and 'drops'. For each course, a per-course instance of these
counters is stored in the default AppEngine namespace.
A course is identified by the corresponding namespace name. To obtain all of
the counters of a given type (e.g. 'total'):
1) Call sites.get_all_courses() to obtain a list of ApplicationContexts.
2) Call get_namespace_name() on each ApplicationContext in the resulting list.
3) Pass this list of namespace names to load_many() of the DAO of the desired
counter type, e.g. TotalEnrollmentDAO.load_many(list_of_namespace_names).
For example, consider the entity key names of the counters associated with a
course with namespace_name 'sample'. The entities storing the counters for
that course would be found in the default namespace at "ns_sample:total",
"ns_sample:adds", and "ns_sample:drops". Similarly, other courses would have
their own instances of these counters in the default namespace, with key names
similarly derived from their namespace names.
'total' is a simple single-value counter that represents the current total
enrollment a single course. This counter is updated in real time by
registering StudentLifecycleObserver handlers that tally the equivalent of:
(number of EVENT_ADD + number of EVENT_REENROLL)
- (number of EVENT_UNENROLL + number of EVENT_UNENROLL_COMMANDED)
A MapReduce will also periodically determine the actual value by iterating over
the Datastore and overwrite the event tally.
'adds' is a collection of counters that bin the EVENT_ADD and EVENT_REENROLL
events that occur in a single day, via StudentLifecycleObserver handlers.
'drops' is the analogue of 'adds' that tracks EVENT_UNENROLL and
EVENT_UNENROLL_COMMANDED.
These counters are stored in the default namespace to reduce the implementation
complexity and to concentrate the data into a fixed number of rows for speed
in loading. The 'total', 'adds', and 'drops' counters are separate entities to
reduce contention between the StudentLifecycleObserver handlers updating them.
A _new_course_counts callback initializes these enrollments counters to zero
upon course creation. Student lifecycle events will update these counters in
near real-time soon after the course is created.
Courses that exist in the Datastore prior to updating an installation to a new
Course Builder version that implements enrollments counters will be missing
those counter entities in the Datastore. These missing counters will be
initialized for the first time by enrollments_mapreduce.ComputeCounts
MapReduceJobs. Student lifecycle events will trigger MapReduceJobs for these
missing counters and then update them in near real-time soon after they are
initialized by the background jobs.
"""
__author__ = 'Todd Larsen (tlarsen@google.com)'
import collections
import copy
import datetime
import logging
import appengine_config
from google.appengine.api import namespace_manager
from google.appengine.ext import db
from common import schema_fields
from common import utc
from common import utils as common_utils
from controllers import sites
from controllers import utils
from models import analytics
from models import data_sources
from models import entities
from models import jobs
from models import models
from models import transforms
from modules.admin import config
from modules.dashboard import dashboard
# The "display" rendition of an uninitialized 'Registered Students' value.
NONE_ENROLLED = u'\u2014' # em dash
class EnrollmentsEntity(entities.BaseEntity):
"""Portions of the Datastore model shared by all enrollment counters.
Enrollment counters implemented in this module are per-course singletons.
The key_name of each counter is derived from what the entity class is
counting (the cls.COUNTING string) and the name of the course, obtained,
for example, via sites.ApplicationContext.get_namespace_name().
Subclasses are expected to override the COUNTING string, e.g. 'total',
'adds', 'drops', etc.
"""
COUNTING = "UNDEFINED -- override in entity subclass"
# JSON-encoded DTO is stored here.
json = db.TextProperty(indexed=False)
class TotalEnrollmentEntity(EnrollmentsEntity):
COUNTING = 'total'
class EnrollmentsAddedEntity(EnrollmentsEntity):
COUNTING = 'adds'
class EnrollmentsDroppedEntity(EnrollmentsEntity):
COUNTING = 'drops'
class EnrollmentsDTO(object):
"""Features common to all DTO of enrollments counters."""
def __init__(self, the_id, the_dict):
self.id = the_id
self.dict = the_dict
@property
def last_modified(self):
return self.dict.get('_last_modified') or 0
def _force_last_modified(self, timestamp):
"""Sets last_modified to a POSIX timestamp, seconds since UTC epoch."""
self.dict['_last_modified'] = timestamp
def set_last_modified_to_now(self):
"""Sets the last_modified property to the current UTC time."""
self._force_last_modified(utc.now_as_timestamp())
def seconds_since_last_modified(self, now=None):
lm = self.last_modified # Copy to lessen races with other modifiers.
if not lm:
return 0 # Last modified not recorded, so no elapsed time since.
now = now if now is not None else utc.now_as_timestamp()
return now - lm
@property
def is_empty(self):
"""True if no count is present (but may still be pending)."""
return not self.binned
@property
def is_pending(self):
"""True if some background job is initializing the counter."""
return self.is_empty and self.last_modified
MAX_PENDING_SEC = 60 * 60 # 1 hour
@property
def is_stalled(self):
"""True if pending but since last modified exceeds MAX_PENDING_SEC."""
return self.seconds_since_last_modified() > self.MAX_PENDING_SEC
@property
def is_missing(self):
"""True for uninitialized counters (empty and not pending)."""
return self.is_empty and (not self.last_modified)
@property
def binned(self):
"""Returns an empty binned counters dict (subclasses should override).
This method exists to provide a uniform representation of what is
stored in an enrollments counter. Some counters are binned (e.g.,
EnrollmentsDroppedDTO), and their get() method requires an extra
timestamp parameter to select a particular bin. Other counters are
single, unbinned counts (e.g. TotalEnrollmentDTO), and their get()
method does not accept the meaningless timestamp parameter.
Some clients want to treat all of these enrollments counter DTOs the
same, rather than needing to distinquish between those with get() that
needs a timestamp and those that do not. So, all of the EnrollmentsDTO
subclasses implement the binned property. For binned counters, it
simply returns all of the bins. For single, unbinned counters, a
single "bin" of the current day is returned, containing the single
counter value.
Returns:
Subclasses should override this method to return a dict whose keys
are integer 00:00:00 UTC "start of day" dates in seconds since
epoch, and whose values are the counts corresponding to those keys.
"""
return {}
def marshal(self, the_dict):
return transforms.dumps(the_dict)
@classmethod
def unmarshal(cls, json):
return transforms.loads(json)
class TotalEnrollmentDTO(EnrollmentsDTO):
"""Data transfer object for a single, per-course enrollment count."""
def get(self):
"""Returns the value of an enrollment counter (or 0 if not yet set)."""
return self.dict.get('count', 0)
def set(self, count):
"""Overwrite the enrollment counter with a new count value."""
self.dict['count'] = count
self.set_last_modified_to_now()
def inc(self, offset=1):
"""Increment an enrollment counter by a signed offset; default is 1."""
self.set(self.get() + offset)
@property
def is_empty(self):
# Faster than base class version that could cause creation of the
# temporary binned dict.
return 'count' not in self.dict
@property
def binned(self):
"""Returns the binned counters dict (empty if uninitialized counter).
Returns:
If the counter is initialized (has been set() at least once), a
dict containing a single bin containing the total enrollments count
is constructed and returned. The single key in the returned dict
is the 00:00:00 UTC "start of day" time of last_modified, as
seconds since epoch. The single value is the get() count.
Otherwise, if the counter is uninitialized, an empty dict is
returned (just like the EnrollmentsDTO base class).
"""
if self.is_empty:
return super(TotalEnrollmentDTO, self).binned
return {
utc.day_start(self.last_modified): self.get(),
}
class BinnedEnrollmentsDTO(EnrollmentsDTO):
"""Data transfer object for per-course, binned enrollment event counts."""
@property
def binned(self):
"""Returns the binned counters dict (possibly empty).
Returns:
If the counter is initialized (at least one timestamped bin has
been set()), the dict containing a bin for each day with at least
one counted event is returned. The keys of the returned dict are
the 00:00:00 UTC "start of day" time of each non-zero daily bin,
as seconds since epoch. The values are the total number of
counted events in the day starting at that time key.
Otherwise, if the counter is uninitialized, an empty dict is
returned (just like the EnrollmentsDTO base class).
"""
return self.dict.setdefault(
'binned', super(BinnedEnrollmentsDTO, self).binned)
@classmethod
def bin(cls, timestamp):
"""Converts POSIX timestamp to daily counter bin in self.binned dict.
Args:
timestamp: UTC time, as a POSIX timestamp (seconds since epoch).
Returns:
The key of the counter bin (which may or may not actually exist)
in the self.binned dict associated with the supplied UTC time.
"""
return utc.day_start(timestamp)
def _get_bin(self, bin_key):
return self.binned.get(bin_key, 0)
def get(self, timestamp):
"""Returns a count of events in for a day (selected via a UTC time).
Args:
timestamp: UTC time, as a POSIX timestamp (seconds since epoch).
Returns:
The counter value of the daily bin, or 0 if the corresponding
self.bin() does not exist in the self.binned dict.
"""
return self._get_bin(self.bin(timestamp))
def _set_bin(self, bin_key, count):
self.binned[bin_key] = count
self.set_last_modified_to_now()
def set(self, timestamp, count):
"""Overwrites the count of events for a day (selected via a UTC time).
Args:
timestamp: UTC time, as a POSIX timestamp (seconds since epoch).
count: the new integer value of the selected binned counter
"""
self._set_bin(self.bin(timestamp), count)
def inc(self, timestamp, offset=1):
"""Increments the count of events for a day (selected via a UTC time).
Args:
timestamp: UTC time, as a POSIX timestamp (seconds since epoch).
offset: optional signed increment offset; defaults to 1.
Returns:
The incremented (by offset) existing count in the specified daily
bin -OR- if the selected bin does not exist, count resulting from
creating a new bin initialized to 0 and incremented by offset.
"""
bin_key = self.bin(timestamp)
self._set_bin(bin_key, self._get_bin(bin_key) + offset)
self.set_last_modified_to_now()
_BIN_FORMAT = '%Y%m%d'
def marshal(self, the_dict):
binned = the_dict.get('binned')
if binned:
# A copy (to avoid mutating the original) is only necessary if
# there are actually int seconds since epoch bin keys that need
# to be made compatible with JSON.
the_dict = copy.copy(the_dict)
the_dict['binned'] = dict(
[(utc.to_text(seconds=seconds, fmt=self._BIN_FORMAT), count)
for seconds, count in binned.iteritems()])
return super(BinnedEnrollmentsDTO, self).marshal(the_dict)
@classmethod
def unmarshal(cls, json):
the_dict = super(BinnedEnrollmentsDTO, cls).unmarshal(json)
binned = the_dict.get('binned')
if binned:
the_dict['binned'] = dict(
[(utc.text_to_timestamp(text, fmt=cls._BIN_FORMAT), count)
for text, count in binned.iteritems()])
return the_dict
class EnrollmentsDAO(object):
"""Operations shared by the DAO of all enrollment counters.
The API is loosely based on models.BaseJsonDao, but with the memcache
complexity removed, and appengine_config.DEFAULT_NAMESPACE_NAME always
used as the namespace.
EnrollmentsDAO is not a generic, "full-featured" DAO. Only the operations
likely to be used by the admin Courses pages, StudentLifecycleObserver
event handlers, and the enrollment total MapReduce are provided.
"""
KEY_SEP = ':'
@classmethod
def key_name(cls, namespace_name):
"""Creates enrollment counter key_name strings for Datastore operations.
Enrollment counter keys are grouped by namespace_name and then by what
entity class is counting. The key name string is expected to be a
KEY_SEP-delimited list of substrings. The first substring must always
be the namespace_name.
Args:
namespace_name: the name of the course (e.g. "ns_my_new_course")
Returns:
namespace_name and then cls.ENTITY.COUNTING in a string.
Some examples:
"ns_example:totals", "ns_example:adds", "ns_example:drops"
"""
return "%s%s%s" % (namespace_name, cls.KEY_SEP, cls.ENTITY.COUNTING)
@classmethod
def namespace_name(cls, key_name):
"""Returns the namespace_name extracted from the supplied key_name."""
# string.split() always returns a list of at least length 1, even for
# an empty string or string that does not contain the separator, so
# this simple expression should not raise exceptions.
return key_name.split(cls.KEY_SEP, 1)[0]
@classmethod
def new_dto(cls, namespace_name, the_dict=None, entity=None):
"""Returns a DTO initialized from entity or namespace_name."""
if entity is not None:
# Prefer namespace_name derived from the entity key name if present.
name = entity.key().name()
if name is not None:
namespace_name = cls.namespace_name(name)
# Prefer the_dict JSON-decoded from the entity if present.
if entity.json:
the_dict = cls.DTO.unmarshal(entity.json)
if the_dict is None:
the_dict = {}
return cls.DTO(cls.key_name(namespace_name), the_dict)
@classmethod
def load_or_default(cls, namespace_name):
"""Returns DTO of the namespace_name entity, or a DTO.is_empty one."""
with common_utils.Namespace(appengine_config.DEFAULT_NAMESPACE_NAME):
entity = cls.ENTITY.get_by_key_name(cls.key_name(namespace_name))
return cls.new_dto(namespace_name, entity=entity)
@classmethod
def load_many(cls, namespace_names):
"""Loads multiple DTOs in the same order as supplied namespace names.
Args:
namespace_names: a list of namespace name strings
Returns:
A list of cls.DTOs created from entities fetched from the
Datastore, in the same order as the supplied namespace_names list.
When no corresponding entity exists in the Datastore for a given
namespace name, a DTO where DTO.is_empty is true is placed in that
slot in the returned list (not None like, say, get_by_key_name()).
"""
with common_utils.Namespace(appengine_config.DEFAULT_NAMESPACE_NAME):
many_entities = cls.ENTITY.get_by_key_name(
[cls.key_name(ns_name) for ns_name in namespace_names])
return [cls.new_dto(ns_name, entity=entity)
for ns_name, entity in zip(namespace_names, many_entities)]
@classmethod
def load_many_mapped(cls, namespace_names):
"""Returns a dict with namespace_name keys and DTO values."""
return dict([(cls.namespace_name(dto.id), dto)
for dto in cls.load_many(namespace_names)])
@classmethod
def load_all(cls):
"""Loads all DTOs that have valid entities, in no particular order.
Returns:
An iterator that produces cls.DTOs created from all the ENTITY
values in the Datastore, in no particular order.
"""
with common_utils.Namespace(appengine_config.DEFAULT_NAMESPACE_NAME):
for e in common_utils.iter_all(cls.ENTITY.all()):
if e.key().name():
yield cls.new_dto('', entity=e)
@classmethod
def delete(cls, namespace_name):
"""Deletes from the Datastore the namespace_name counter entity."""
with common_utils.Namespace(appengine_config.DEFAULT_NAMESPACE_NAME):
entity = cls.ENTITY.get_by_key_name(cls.key_name(namespace_name))
if entity is not None:
entity.delete()
@classmethod
def mark_pending(cls, dto=None, namespace_name=''):
"""Indicates that a background job is initializing the counter.
The last_modified property is set to the current time and stored,
without any counter value, a state that indicates to others that some
background job is currently computing the value of the counter.
Args:
dto: an existing DTO to be "marked" and stored in the Datastore;
dto.is_missing must be True.
namespace_name: used to create a new DTO if dto was not supplied.
Returns:
The original dto if one was provided; unchanged if the required
dto.is_missing precondition was not met.
Otherwise, the last_modified property will be set to the current
UTC time, but no counter value will be set, resulting in
dto.is_empty being True and now dto.is_pending also being True
(and dto.is_missing changing to False).
If an existing dto was not supplied, creates a new one using the
provided namespace_name, setting last_modified only, as above,
again resulting in dto.is_empty and dto.is_pending being True and
dto.is_missing being False.
"""
if not dto:
dto = cls.new_dto(namespace_name, the_dict={})
if dto.is_missing:
dto.set_last_modified_to_now()
cls._save(dto)
return dto
@classmethod
def _save(cls, dto):
# The "save" operation is not public because clients of the enrollments
# module should cause Datastore mutations only via set() and inc().
with common_utils.Namespace(appengine_config.DEFAULT_NAMESPACE_NAME):
entity = cls.ENTITY(key_name=dto.id)
entity.json = dto.marshal(dto.dict)
entity.put()
class TotalEnrollmentDAO(EnrollmentsDAO):
"""A single total enrollment counter for each course."""
DTO = TotalEnrollmentDTO
ENTITY = TotalEnrollmentEntity
@classmethod
def get(cls, namespace_name):
"""Returns value of a single enrollment total from the Datastore."""
return cls.load_or_default(namespace_name).get()
@classmethod
def set(cls, namespace_name, count):
"""Forces single enrollment total in the Datastore to a new count."""
dto = cls.new_dto(namespace_name, the_dict={})
dto.set(count)
cls._save(dto)
return dto
@classmethod
@db.transactional(xg=True)
def inc(cls, namespace_name, offset=1):
"""Loads an enrollment counter from the Datastore and increments it."""
dto = cls.load_or_default(namespace_name)
dto.inc(offset=offset)
cls._save(dto) # Save altered/new DTO as entity.
return dto
class BinnedEnrollmentsDAO(EnrollmentsDAO):
"""Operations common to all binned enrollments counters."""
DTO = BinnedEnrollmentsDTO
@classmethod
def get(cls, namespace_name, date_time):
"""Returns value of a binned enrollment counter from the Datastore."""
return cls.load_or_default(namespace_name).get(
utc.datetime_to_timestamp(date_time))
@classmethod
@db.transactional(xg=True)
def set(cls, namespace_name, date_time, count):
"""Sets the Datastore value of a counter in a specific bin."""
dto = cls.load_or_default(namespace_name)
dto.set(utc.datetime_to_timestamp(date_time), count)
cls._save(dto) # Save altered/new DTO as entity.
return dto
@classmethod
@db.transactional(xg=True)
def inc(cls, namespace_name, date_time, offset=1):
"""Increments the Datastore value of a counter in a specific bin."""
dto = cls.load_or_default(namespace_name)
dto.inc(utc.datetime_to_timestamp(date_time), offset=offset)
cls._save(dto) # Save altered/new DTO as entity.
return dto
class EnrollmentsAddedDAO(BinnedEnrollmentsDAO):
ENTITY = EnrollmentsAddedEntity
class EnrollmentsDroppedDAO(BinnedEnrollmentsDAO):
ENTITY = EnrollmentsDroppedEntity
class ComputeCounts(jobs.MapReduceJob):
"""MapReduce job to set the student total enrollment count for courses.
This MapReduce updates two of the course enrollments counters, the simple
'total' enrollment count and the daily-binned 'adds' counts.
The 'total' counter is updated by calling TotalEnrollmentDAO.set()
to force a known value on the 'total' counter for a specified course. The
purpose of this MapReduce is to "reset" the total enrollment count to an
absolute starting point, and then allow that count to be incremented and
decremented in real time, between runs of the MapReduce, by the registered
StudentLifecycleObserver handlers. Those handlers adjust the MapReduce-
computed starting point by the equivalent of:
(number of EVENT_ADD + number of EVENT_REENROLL)
- (number of EVENT_UNENROLL + number of EVENT_UNENROLL_COMMANDED)
Counters in the daily bins of the 'adds' counters are updated by calling
EnrollmentsAddedDAO.set() overwrite the values for each daily bin. The
bin is determined from the Student.enrolled_on value of each student
enrolled in the specified course. To avoid race conditions between this
MapReduce and real time updates being made by the student lifecycle event
handlers, the bin corresponding to "today" when the MapReduce is run is
*not* overwritten.
"""
@classmethod
def get_description(cls):
return "Update the 'total' and 'adds' counters for a course."
@classmethod
def entity_class(cls):
return models.Student
@classmethod
def map(cls, student):
if student.is_enrolled:
yield (TotalEnrollmentEntity.COUNTING, 1)
bin_seconds_since_epoch = BinnedEnrollmentsDTO.bin(
utc.datetime_to_timestamp(student.enrolled_on))
yield (bin_seconds_since_epoch, 1)
@classmethod
def combine(cls, unused_key, values, previously_combined_outputs=None):
total = sum([int(value) for value in values])
if previously_combined_outputs is not None:
total += sum([int(value) for value in previously_combined_outputs])
yield total
@classmethod
def reduce(cls, key, values):
total = sum(int(value) for value in values)
ns_name = namespace_manager.get_namespace()
if key == TotalEnrollmentEntity.COUNTING:
TotalEnrollmentDAO.set(ns_name, total)
yield key, total
else:
# key is actually a daily 'adds' counter bin seconds since epoch.
bin_seconds_since_epoch = long(key)
today = utc.day_start(utc.now_as_timestamp())
# Avoid race conditions by not updating today's daily bin (which
# is being updated by student lifecycle events).
if bin_seconds_since_epoch != today:
date_time = utc.timestamp_to_datetime(bin_seconds_since_epoch)
EnrollmentsAddedDAO.set(ns_name, date_time, total)
@classmethod
def complete(cls, kwargs, results):
if not results:
ns_name = namespace_manager.get_namespace()
# Re-check that value actually is zero; there is a race between
# this M/R job running and student registration on the user
# lifecycle queue, so don't overwrite to zero unless it really
# is zero _now_.
if TotalEnrollmentDAO.get(ns_name) == 0:
TotalEnrollmentDAO.set(ns_name, 0)
MODULE_NAME = 'site_admin_enrollments'
class _BaseCronHandler(utils.AbstractAllCoursesCronHandler):
URL_FMT = '/cron/%s/%%s' % MODULE_NAME
@classmethod
def is_globally_enabled(cls):
return True
@classmethod
def is_enabled_for_course(cls, app_context):
return True
class StartComputeCounts(_BaseCronHandler):
"""Handle callback from cron by launching enrollments counts MapReduce."""
# /cron/site_admin_enrollments/total
URL = _BaseCronHandler.URL_FMT % TotalEnrollmentEntity.COUNTING
def cron_action(self, app_context, global_state):
job = ComputeCounts(app_context)
ns = app_context.get_namespace_name()
dto = TotalEnrollmentDAO.load_or_default(ns)
if job.is_active():
# Weekly re-computation of enrollments counters, so forcibly stop
# any already-running job and start over.
if not dto.is_empty:
logging.warning(
'CANCELING periodic "%s" enrollments total refresh found'
' unexpectedly still running.', dto.id)
elif dto.is_pending:
logging.warning(
'INTERRUPTING missing "%s" enrollments total'
' initialization started on %s.', dto.id,
utc.to_text(seconds=dto.last_modified))
job.cancel()
else:
when = dto.last_modified
if not dto.is_empty:
logging.info(
'REFRESHING existing "%s" enrollments total, %d as of %s.',
dto.id, dto.get(), utc.to_text(seconds=when))
elif dto.is_pending:
since = dto.seconds_since_last_modified()
logging.warning(
'COMPLETING "%s" enrollments total initialization started '
'on %s stalled for %s.', dto.id, utc.to_text(seconds=when),
datetime.timedelta(seconds=since))
job.submit()
def init_missing_total(enrolled_total_dto, app_context):
"""Returns True if a ComputeCounts MapReduceJob was submitted."""
name = enrolled_total_dto.id
when = enrolled_total_dto.last_modified
if not enrolled_total_dto.is_empty:
logging.warning(StartInitMissingCounts.LOG_SKIPPING_FMT,
name, enrolled_total_dto.get(), utc.to_text(seconds=when))
return False
delta = enrolled_total_dto.seconds_since_last_modified()
if enrolled_total_dto.is_pending:
if not enrolled_total_dto.is_stalled:
logging.info(
'PENDING "%s" enrollments total initialization in progress'
' for %s, since %s.', name, datetime.timedelta(seconds=delta),
utc.to_text(seconds=when))
return False
logging.warning(
'STALLED "%s" enrollments total initialization for %s, since %s.',
name, datetime.timedelta(seconds=delta), utc.to_text(seconds=when))
# enrolled_total_dto is either *completely* missing (not *just* lacking its
# counter property, and thus not indicating initialization of that count
# is in progress), or pending initialization has stalled (taken more than
# MAX_PENDING_SEC to complete), so store "now" as a last_modified value to
# indicate that a MapReduce update has been requested.
marked_dto = TotalEnrollmentDAO.mark_pending(dto=enrolled_total_dto,
namespace_name=app_context.get_namespace_name())
logging.info('SCHEDULING "%s" enrollments total update at %s.',
marked_dto.id, utc.to_text(seconds=marked_dto.last_modified))
ComputeCounts(app_context).submit()
return True
class StartInitMissingCounts(_BaseCronHandler):
"""Handle callback from cron by checking for missing enrollment totals."""
# /cron/site_admin_enrollments/missing
URL = _BaseCronHandler.URL_FMT % 'missing'
LOG_SKIPPING_FMT = (
'SKIPPING existing "%s" enrollments total recomputation, %d as of %s.')
def cron_action(self, app_context, global_state):
total_dto = TotalEnrollmentDAO.load_or_default(
app_context.get_namespace_name())
if total_dto.is_missing or total_dto.is_stalled:
init_missing_total(total_dto, app_context)
else:
# Debug-level log message only for tests examining logs.
logging.debug(self.LOG_SKIPPING_FMT,
total_dto.id, total_dto.get(),
utc.to_text(seconds=total_dto.last_modified))
def delete_counters(namespace_name):
"""Called by admin.config.DeleteCourseHandler.delete_course()."""
TotalEnrollmentDAO.delete(namespace_name)
EnrollmentsAddedDAO.delete(namespace_name)
EnrollmentsDroppedDAO.delete(namespace_name)
def _count_add(unused_id, utc_date_time):
"""Called back from student lifecycle queue when student (re-)enrolls.
This callback only increments 'total' and 'adds' counters that already
exist in the Datastore.
"""
namespace_name = namespace_manager.get_namespace()
total_dto = TotalEnrollmentDAO.load_or_default(namespace_name)
if not total_dto.is_empty:
TotalEnrollmentDAO.inc(namespace_name)
elif total_dto.is_missing:
init_missing_total(
total_dto, sites.get_app_context_for_namespace(namespace_name))
# Update today's 'adds' no matter what, because the ComputeCounts
# MapReduceJob avoids the current day bin, specifically to avoid races
# with this callback.
EnrollmentsAddedDAO.inc(namespace_name, utc_date_time)
def _count_drop(unused_id, utc_date_time):
"""Called back from StudentLifecycleObserver when user is unenrolled.
This callback only decrements 'total' and increments 'drops' counters that
already exist in the Datastore.
"""
namespace_name = namespace_manager.get_namespace()
total_dto = TotalEnrollmentDAO.load_or_default(namespace_name)
if not total_dto.is_empty:
TotalEnrollmentDAO.inc(namespace_name, offset=-1)
elif total_dto.is_missing:
init_missing_total(
total_dto, sites.get_app_context_for_namespace(namespace_name))
# Update today's 'drops' no matter what, because the ComputeCounts
# MapReduceJob avoids the current day bin, specifically to avoid races
# with this callback. (Also, the ComputeCounts MapReduceJob does
# not implement collecting drops at this time.)
EnrollmentsDroppedDAO.inc(namespace_name, utc_date_time)
def _new_course_counts(app_context, unused_errors):
"""Called back from CoursesItemRESTHandler when new course is created."""
namespace_name = app_context.get_namespace_name()
TotalEnrollmentDAO.set(namespace_name, 0)
EnrollmentsAddedDAO.set(namespace_name, utc.now_as_datetime(), 0)
EnrollmentsDroppedDAO.set(namespace_name, utc.now_as_datetime(), 0)
class EnrollmentsDataSource(data_sources.AbstractSmallRestDataSource,
data_sources.SynchronousQuery):
"""Merge adds/drops data to single source for display libraries."""
@classmethod
def get_name(cls):
return 'enrollments'
@classmethod
def get_title(cls):
return 'Enrollments'
@staticmethod
def required_generators():
return []
@classmethod
def get_schema(cls, app_context, log, source_context):
ret = schema_fields.FieldRegistry('enrollments')
ret.add_property(schema_fields.SchemaField(
'timestamp_millis', 'Milliseconds Since Epoch', 'integer'))
ret.add_property(schema_fields.SchemaField(
'add', 'Add', 'integer',
description='Number of students added in this time range'))
ret.add_property(schema_fields.SchemaField(
'drop', 'Drop', 'integer',
description='Number of students dropped in this time range'))
return ret.get_json_schema_dict()['properties']
@classmethod
def fetch_values(cls, app_context, source_ctx, schema, log, page_number):
# Get values as REST to permit simple integration to graph libraries.
add_counts = EnrollmentsAddedDAO.load_or_default(
app_context.get_namespace_name()).binned
drop_counts = EnrollmentsDroppedDAO.load_or_default(
app_context.get_namespace_name()).binned
bin_timestamps = set(add_counts.keys()) | set(drop_counts.keys())
return [
{'timestamp_millis': bin_timestamp * 1000,
'add': add_counts.get(bin_timestamp, 0),
'drop': drop_counts.get(bin_timestamp, 0)}
for bin_timestamp in bin_timestamps], 0
@classmethod
def fill_values(cls, app_context, template_values):
# Provide a boolean for do-we-have-any-data-at-all at static
# page-paint time; DC graphs look awful when empty; simpler to
# suppress than try to make nice.
dto = EnrollmentsAddedDAO.load_or_default(
app_context.get_namespace_name())
template_values['enrollment_data_available'] = not dto.is_empty
CourseEnrolled = collections.namedtuple('CourseEnrolled',
['count', 'display', 'most_recent_enroll'])
_NONE_RECENT_FMT = u'(registration activity is being computed for "{}")'
_MOST_RECENT_FMT = u'Most recent activity at {} for "{}".'
def get_course_enrolled(enrolled_dto, course_name):
if enrolled_dto.is_empty:
# 'count' property is not present, so exit early.
return CourseEnrolled(
0, NONE_ENROLLED, _NONE_RECENT_FMT.format(course_name))
count = enrolled_dto.get()
lm_dt = utc.timestamp_to_datetime(enrolled_dto.last_modified)
lm_text = utc.to_text(dt=lm_dt, fmt=utc.ISO_8601_UTC_HUMAN_FMT)
most_recent_enroll = _MOST_RECENT_FMT.format(lm_text, course_name)
return CourseEnrolled(count, count, most_recent_enroll)
def register_callbacks():
# Update enrollments counters when a student enrolls, unenrolls.
models.StudentLifecycleObserver.EVENT_CALLBACKS[
models.StudentLifecycleObserver.EVENT_ADD][
MODULE_NAME] = _count_add
models.StudentLifecycleObserver.EVENT_CALLBACKS[
models.StudentLifecycleObserver.EVENT_REENROLL][
MODULE_NAME] = _count_add
models.StudentLifecycleObserver.EVENT_CALLBACKS[
models.StudentLifecycleObserver.EVENT_UNENROLL][
MODULE_NAME] = _count_drop
models.StudentLifecycleObserver.EVENT_CALLBACKS[
models.StudentLifecycleObserver.EVENT_UNENROLL_COMMANDED][
MODULE_NAME] = _count_drop
# Set counters for newly-created courses initially to zero (to avoid
# extraneous enrollments MapReduce runs).
config.CoursesItemRESTHandler.NEW_COURSE_ADDED_HOOKS[
MODULE_NAME] = _new_course_counts
# Delete the corresponding enrollments counters when a course is deleted.
config.CourseDeleteHandler.COURSE_DELETED_HOOKS[
MODULE_NAME] = delete_counters
# Register analytic to show nice zoomable graph of enroll/unenroll rates.
data_sources.Registry.register(EnrollmentsDataSource)
visualization = analytics.Visualization(
'enrollments', 'Enrollments', 'templates/enrollments.html',
data_source_classes=[EnrollmentsDataSource])
dashboard.DashboardHandler.add_sub_nav_mapping(
'analytics', 'enrollments', 'Enrollments',
action='analytics_enrollments',
contents=analytics.TabRenderer([visualization]))
|
GirlsCodePy/girlscode-coursebuilder
|
modules/admin/enrollments.py
|
Python
|
gpl-3.0
| 37,647
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
# Copyright (c), Toshio Kuratomi <tkuratomi@ansible.com>, 2015
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The match_hostname function and supporting code is under the terms and
# conditions of the Python Software Foundation License. They were taken from
# the Python3 standard library and adapted for use in Python2. See comments in the
# source for which code precisely is under this License. PSF License text
# follows:
#
# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
# --------------------------------------------
#
# 1. This LICENSE AGREEMENT is between the Python Software Foundation
# ("PSF"), and the Individual or Organization ("Licensee") accessing and
# otherwise using this software ("Python") in source or binary form and
# its associated documentation.
#
# 2. Subject to the terms and conditions of this License Agreement, PSF hereby
# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
# analyze, test, perform and/or display publicly, prepare derivative works,
# distribute, and otherwise use Python alone or in any derivative version,
# provided, however, that PSF's License Agreement and PSF's notice of copyright,
# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
# 2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
# retained in Python alone or in any derivative version prepared by Licensee.
#
# 3. In the event Licensee prepares a derivative work that is based on
# or incorporates Python or any part thereof, and wants to make
# the derivative work available to others as provided herein, then
# Licensee hereby agrees to include in any such work a brief summary of
# the changes made to Python.
#
# 4. PSF is making Python available to Licensee on an "AS IS"
# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
# INFRINGE ANY THIRD PARTY RIGHTS.
#
# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
#
# 6. This License Agreement will automatically terminate upon a material
# breach of its terms and conditions.
#
# 7. Nothing in this License Agreement shall be deemed to create any
# relationship of agency, partnership, or joint venture between PSF and
# Licensee. This License Agreement does not grant permission to use PSF
# trademarks or trade name in a trademark sense to endorse or promote
# products or services of Licensee, or any third party.
#
# 8. By copying, installing or otherwise using Python, Licensee
# agrees to be bound by the terms and conditions of this License
# Agreement.
try:
import urllib2
HAS_URLLIB2 = True
except:
HAS_URLLIB2 = False
try:
import urlparse
HAS_URLPARSE = True
except:
HAS_URLPARSE = False
try:
import ssl
HAS_SSL = True
except:
HAS_SSL = False
try:
# SNI Handling needs python2.7.9's SSLContext
from ssl import create_default_context, SSLContext
HAS_SSLCONTEXT = True
except ImportError:
HAS_SSLCONTEXT = False
# Select a protocol that includes all secure tls protocols
# Exclude insecure ssl protocols if possible
if HAS_SSL:
# If we can't find extra tls methods, ssl.PROTOCOL_TLSv1 is sufficient
PROTOCOL = ssl.PROTOCOL_TLSv1
if not HAS_SSLCONTEXT and HAS_SSL:
try:
import ctypes, ctypes.util
except ImportError:
# python 2.4 (likely rhel5 which doesn't have tls1.1 support in its openssl)
pass
else:
libssl_name = ctypes.util.find_library('ssl')
libssl = ctypes.CDLL(libssl_name)
for method in ('TLSv1_1_method', 'TLSv1_2_method'):
try:
libssl[method]
# Found something - we'll let openssl autonegotiate and hope
# the server has disabled sslv2 and 3. best we can do.
PROTOCOL = ssl.PROTOCOL_SSLv23
break
except AttributeError:
pass
del libssl
HAS_MATCH_HOSTNAME = True
try:
from ssl import match_hostname, CertificateError
except ImportError:
try:
from backports.ssl_match_hostname import match_hostname, CertificateError
except ImportError:
HAS_MATCH_HOSTNAME = False
if not HAS_MATCH_HOSTNAME:
###
### The following block of code is under the terms and conditions of the
### Python Software Foundation License
###
"""The match_hostname() function from Python 3.4, essential when using SSL."""
import re
class CertificateError(ValueError):
pass
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
###
### End of Python Software Foundation Licensed code
###
HAS_MATCH_HOSTNAME = True
import httplib
import os
import re
import sys
import socket
import platform
import tempfile
import base64
# This is a dummy cacert provided for Mac OS since you need at least 1
# ca cert, regardless of validity, for Python on Mac OS to use the
# keychain functionality in OpenSSL for validating SSL certificates.
# See: http://mercurial.selenic.com/wiki/CACertificates#Mac_OS_X_10.6_and_higher
DUMMY_CA_CERT = """-----BEGIN CERTIFICATE-----
MIICvDCCAiWgAwIBAgIJAO8E12S7/qEpMA0GCSqGSIb3DQEBBQUAMEkxCzAJBgNV
BAYTAlVTMRcwFQYDVQQIEw5Ob3J0aCBDYXJvbGluYTEPMA0GA1UEBxMGRHVyaGFt
MRAwDgYDVQQKEwdBbnNpYmxlMB4XDTE0MDMxODIyMDAyMloXDTI0MDMxNTIyMDAy
MlowSTELMAkGA1UEBhMCVVMxFzAVBgNVBAgTDk5vcnRoIENhcm9saW5hMQ8wDQYD
VQQHEwZEdXJoYW0xEDAOBgNVBAoTB0Fuc2libGUwgZ8wDQYJKoZIhvcNAQEBBQAD
gY0AMIGJAoGBANtvpPq3IlNlRbCHhZAcP6WCzhc5RbsDqyh1zrkmLi0GwcQ3z/r9
gaWfQBYhHpobK2Tiq11TfraHeNB3/VfNImjZcGpN8Fl3MWwu7LfVkJy3gNNnxkA1
4Go0/LmIvRFHhbzgfuo9NFgjPmmab9eqXJceqZIlz2C8xA7EeG7ku0+vAgMBAAGj
gaswgagwHQYDVR0OBBYEFPnN1nPRqNDXGlCqCvdZchRNi/FaMHkGA1UdIwRyMHCA
FPnN1nPRqNDXGlCqCvdZchRNi/FaoU2kSzBJMQswCQYDVQQGEwJVUzEXMBUGA1UE
CBMOTm9ydGggQ2Fyb2xpbmExDzANBgNVBAcTBkR1cmhhbTEQMA4GA1UEChMHQW5z
aWJsZYIJAO8E12S7/qEpMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEA
MUB80IR6knq9K/tY+hvPsZer6eFMzO3JGkRFBh2kn6JdMDnhYGX7AXVHGflrwNQH
qFy+aenWXsC0ZvrikFxbQnX8GVtDADtVznxOi7XzFw7JOxdsVrpXgSN0eh0aMzvV
zKPZsZ2miVGclicJHzm5q080b1p/sZtuKIEZk6vZqEg=
-----END CERTIFICATE-----
"""
#
# Exceptions
#
class ConnectionError(Exception):
"""Failed to connect to the server"""
pass
class ProxyError(ConnectionError):
"""Failure to connect because of a proxy"""
pass
class SSLValidationError(ConnectionError):
"""Failure to connect due to SSL validation failing"""
pass
class NoSSLError(SSLValidationError):
"""Needed to connect to an HTTPS url but no ssl library available to verify the certificate"""
pass
class CustomHTTPSConnection(httplib.HTTPSConnection):
def __init__(self, *args, **kwargs):
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
if HAS_SSLCONTEXT:
self.context = create_default_context()
if self.cert_file:
self.context.load_cert_chain(self.cert_file, self.key_file)
def connect(self):
"Connect to a host on a given (SSL) port."
if hasattr(self, 'source_address'):
sock = socket.create_connection((self.host, self.port), self.timeout, self.source_address)
else:
sock = socket.create_connection((self.host, self.port), self.timeout)
if self._tunnel_host:
self.sock = sock
self._tunnel()
if HAS_SSLCONTEXT:
self.sock = self.context.wrap_socket(sock, server_hostname=self.host)
else:
self.sock = ssl.wrap_socket(sock, keyfile=self.key_file, certfile=self.cert_file, ssl_version=PROTOCOL)
class CustomHTTPSHandler(urllib2.HTTPSHandler):
def https_open(self, req):
return self.do_open(CustomHTTPSConnection, req)
https_request = urllib2.AbstractHTTPHandler.do_request_
def generic_urlparse(parts):
'''
Returns a dictionary of url parts as parsed by urlparse,
but accounts for the fact that older versions of that
library do not support named attributes (ie. .netloc)
'''
generic_parts = dict()
if hasattr(parts, 'netloc'):
# urlparse is newer, just read the fields straight
# from the parts object
generic_parts['scheme'] = parts.scheme
generic_parts['netloc'] = parts.netloc
generic_parts['path'] = parts.path
generic_parts['params'] = parts.params
generic_parts['query'] = parts.query
generic_parts['fragment'] = parts.fragment
generic_parts['username'] = parts.username
generic_parts['password'] = parts.password
generic_parts['hostname'] = parts.hostname
generic_parts['port'] = parts.port
else:
# we have to use indexes, and then parse out
# the other parts not supported by indexing
generic_parts['scheme'] = parts[0]
generic_parts['netloc'] = parts[1]
generic_parts['path'] = parts[2]
generic_parts['params'] = parts[3]
generic_parts['query'] = parts[4]
generic_parts['fragment'] = parts[5]
# get the username, password, etc.
try:
netloc_re = re.compile(r'^((?:\w)+(?::(?:\w)+)?@)?([A-Za-z0-9.-]+)(:\d+)?$')
(auth, hostname, port) = netloc_re.match(parts[1])
if port:
# the capture group for the port will include the ':',
# so remove it and convert the port to an integer
port = int(port[1:])
if auth:
# the capture group above inclues the @, so remove it
# and then split it up based on the first ':' found
auth = auth[:-1]
username, password = auth.split(':', 1)
generic_parts['username'] = username
generic_parts['password'] = password
generic_parts['hostname'] = hostname
generic_parts['port'] = port
except:
generic_parts['username'] = None
generic_parts['password'] = None
generic_parts['hostname'] = None
generic_parts['port'] = None
return generic_parts
class RequestWithMethod(urllib2.Request):
'''
Workaround for using DELETE/PUT/etc with urllib2
Originally contained in library/net_infrastructure/dnsmadeeasy
'''
def __init__(self, url, method, data=None, headers={}):
self._method = method
urllib2.Request.__init__(self, url, data, headers)
def get_method(self):
if self._method:
return self._method
else:
return urllib2.Request.get_method(self)
class SSLValidationHandler(urllib2.BaseHandler):
'''
A custom handler class for SSL validation.
Based on:
http://stackoverflow.com/questions/1087227/validate-ssl-certificates-with-python
http://techknack.net/python-urllib2-handlers/
'''
CONNECT_COMMAND = "CONNECT %s:%s HTTP/1.0\r\nConnection: close\r\n"
def __init__(self, hostname, port):
self.hostname = hostname
self.port = port
def get_ca_certs(self):
# tries to find a valid CA cert in one of the
# standard locations for the current distribution
ca_certs = []
paths_checked = []
system = platform.system()
# build a list of paths to check for .crt/.pem files
# based on the platform type
paths_checked.append('/etc/ssl/certs')
if system == 'Linux':
paths_checked.append('/etc/pki/ca-trust/extracted/pem')
paths_checked.append('/etc/pki/tls/certs')
paths_checked.append('/usr/share/ca-certificates/cacert.org')
elif system == 'FreeBSD':
paths_checked.append('/usr/local/share/certs')
elif system == 'OpenBSD':
paths_checked.append('/etc/ssl')
elif system == 'NetBSD':
ca_certs.append('/etc/openssl/certs')
elif system == 'SunOS':
paths_checked.append('/opt/local/etc/openssl/certs')
# fall back to a user-deployed cert in a standard
# location if the OS platform one is not available
paths_checked.append('/etc/ansible')
tmp_fd, tmp_path = tempfile.mkstemp()
# Write the dummy ca cert if we are running on Mac OS X
if system == 'Darwin':
os.write(tmp_fd, DUMMY_CA_CERT)
# Default Homebrew path for OpenSSL certs
paths_checked.append('/usr/local/etc/openssl')
# for all of the paths, find any .crt or .pem files
# and compile them into single temp file for use
# in the ssl check to speed up the test
for path in paths_checked:
if os.path.exists(path) and os.path.isdir(path):
dir_contents = os.listdir(path)
for f in dir_contents:
full_path = os.path.join(path, f)
if os.path.isfile(full_path) and os.path.splitext(f)[1] in ('.crt','.pem'):
try:
cert_file = open(full_path, 'r')
os.write(tmp_fd, cert_file.read())
os.write(tmp_fd, '\n')
cert_file.close()
except:
pass
return (tmp_path, paths_checked)
def validate_proxy_response(self, response, valid_codes=[200]):
'''
make sure we get back a valid code from the proxy
'''
try:
(http_version, resp_code, msg) = re.match(r'(HTTP/\d\.\d) (\d\d\d) (.*)', response).groups()
if int(resp_code) not in valid_codes:
raise Exception
except:
raise ProxyError('Connection to proxy failed')
def detect_no_proxy(self, url):
'''
Detect if the 'no_proxy' environment variable is set and honor those locations.
'''
env_no_proxy = os.environ.get('no_proxy')
if env_no_proxy:
env_no_proxy = env_no_proxy.split(',')
netloc = urlparse.urlparse(url).netloc
for host in env_no_proxy:
if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
# Our requested URL matches something in no_proxy, so don't
# use the proxy for this
return False
return True
def _make_context(self, tmp_ca_cert_path):
context = create_default_context()
context.load_verify_locations(tmp_ca_cert_path)
return context
def http_request(self, req):
tmp_ca_cert_path, paths_checked = self.get_ca_certs()
https_proxy = os.environ.get('https_proxy')
context = None
if HAS_SSLCONTEXT:
context = self._make_context(tmp_ca_cert_path)
# Detect if 'no_proxy' environment variable is set and if our URL is included
use_proxy = self.detect_no_proxy(req.get_full_url())
if not use_proxy:
# ignore proxy settings for this host request
return req
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if https_proxy:
proxy_parts = generic_urlparse(urlparse.urlparse(https_proxy))
s.connect((proxy_parts.get('hostname'), proxy_parts.get('port')))
if proxy_parts.get('scheme') == 'http':
s.sendall(self.CONNECT_COMMAND % (self.hostname, self.port))
if proxy_parts.get('username'):
credentials = "%s:%s" % (proxy_parts.get('username',''), proxy_parts.get('password',''))
s.sendall('Proxy-Authorization: Basic %s\r\n' % credentials.encode('base64').strip())
s.sendall('\r\n')
connect_result = s.recv(4096)
self.validate_proxy_response(connect_result)
if context:
ssl_s = context.wrap_socket(s, server_hostname=proxy_parts.get('hostname'))
else:
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL)
match_hostname(ssl_s.getpeercert(), self.hostname)
else:
raise ProxyError('Unsupported proxy scheme: %s. Currently ansible only supports HTTP proxies.' % proxy_parts.get('scheme'))
else:
s.connect((self.hostname, self.port))
if context:
ssl_s = context.wrap_socket(s, server_hostname=self.hostname)
else:
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL)
match_hostname(ssl_s.getpeercert(), self.hostname)
# close the ssl connection
#ssl_s.unwrap()
s.close()
except (ssl.SSLError, socket.error), e:
# fail if we tried all of the certs but none worked
if 'connection refused' in str(e).lower():
raise ConnectionError('Failed to connect to %s:%s.' % (self.hostname, self.port))
else:
raise SSLValidationError('Failed to validate the SSL certificate for %s:%s.'
' Make sure your managed systems have a valid CA'
' certificate installed. If the website serving the url'
' uses SNI you need python >= 2.7.9 on your managed'
' machine. You can use validate_certs=False if you do'
' not need to confirm the server\s identity but this is'
' unsafe and not recommended'
' Paths checked for this platform: %s' % (self.hostname, self.port, ", ".join(paths_checked))
)
except CertificateError:
raise SSLValidationError("SSL Certificate does not belong to %s. Make sure the url has a certificate that belongs to it or use validate_certs=False (insecure)" % self.hostname)
try:
# cleanup the temp file created, don't worry
# if it fails for some reason
os.remove(tmp_ca_cert_path)
except:
pass
return req
https_request = http_request
# Rewrite of fetch_url to not require the module environment
def open_url(url, data=None, headers=None, method=None, use_proxy=True,
force=False, last_mod_time=None, timeout=10, validate_certs=True,
url_username=None, url_password=None, http_agent=None, force_basic_auth=False):
'''
Fetches a file from an HTTP/FTP server using urllib2
'''
handlers = []
# FIXME: change the following to use the generic_urlparse function
# to remove the indexed references for 'parsed'
parsed = urlparse.urlparse(url)
if parsed[0] == 'https' and validate_certs:
if not HAS_SSL:
raise NoSSLError('SSL validation is not available in your version of python. You can use validate_certs=False, however this is unsafe and not recommended')
# do the cert validation
netloc = parsed[1]
if '@' in netloc:
netloc = netloc.split('@', 1)[1]
if ':' in netloc:
hostname, port = netloc.split(':', 1)
port = int(port)
else:
hostname = netloc
port = 443
# create the SSL validation handler and
# add it to the list of handlers
ssl_handler = SSLValidationHandler(hostname, port)
handlers.append(ssl_handler)
if parsed[0] != 'ftp':
username = url_username
if username:
password = url_password
netloc = parsed[1]
elif '@' in parsed[1]:
credentials, netloc = parsed[1].split('@', 1)
if ':' in credentials:
username, password = credentials.split(':', 1)
else:
username = credentials
password = ''
parsed = list(parsed)
parsed[1] = netloc
# reconstruct url without credentials
url = urlparse.urlunparse(parsed)
if username and not force_basic_auth:
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
# this creates a password manager
passman.add_password(None, netloc, username, password)
# because we have put None at the start it will always
# use this username/password combination for urls
# for which `theurl` is a super-url
authhandler = urllib2.HTTPBasicAuthHandler(passman)
# create the AuthHandler
handlers.append(authhandler)
elif username and force_basic_auth:
if headers is None:
headers = {}
headers["Authorization"] = "Basic {0}".format(base64.b64encode("{0}:{1}".format(username, password)))
if not use_proxy:
proxyhandler = urllib2.ProxyHandler({})
handlers.append(proxyhandler)
# pre-2.6 versions of python cannot use the custom https
# handler, since the socket class is lacking this method
if hasattr(socket, 'create_connection'):
handlers.append(CustomHTTPSHandler)
opener = urllib2.build_opener(*handlers)
urllib2.install_opener(opener)
if method:
if method.upper() not in ('OPTIONS','GET','HEAD','POST','PUT','DELETE','TRACE','CONNECT'):
raise ConnectionError('invalid HTTP request method; %s' % method.upper())
request = RequestWithMethod(url, method.upper(), data)
else:
request = urllib2.Request(url, data)
# add the custom agent header, to help prevent issues
# with sites that block the default urllib agent string
request.add_header('User-agent', http_agent)
# if we're ok with getting a 304, set the timestamp in the
# header, otherwise make sure we don't get a cached copy
if last_mod_time and not force:
tstamp = last_mod_time.strftime('%a, %d %b %Y %H:%M:%S +0000')
request.add_header('If-Modified-Since', tstamp)
else:
request.add_header('cache-control', 'no-cache')
# user defined headers now, which may override things we've set above
if headers:
if not isinstance(headers, dict):
raise ValueError("headers provided to fetch_url() must be a dict")
for header in headers:
request.add_header(header, headers[header])
urlopen_args = [request, None]
if sys.version_info >= (2,6,0):
# urlopen in python prior to 2.6.0 did not
# have a timeout parameter
urlopen_args.append(timeout)
if HAS_SSLCONTEXT and not validate_certs:
# In 2.7.9, the default context validates certificates
context = SSLContext(ssl.PROTOCOL_SSLv23)
context.options |= ssl.OP_NO_SSLv2
context.options |= ssl.OP_NO_SSLv3
context.verify_mode = ssl.CERT_NONE
context.check_hostname = False
urlopen_args += (None, None, None, context)
r = urllib2.urlopen(*urlopen_args)
return r
#
# Module-related functions
#
def url_argument_spec():
'''
Creates an argument spec that can be used with any module
that will be requesting content via urllib/urllib2
'''
return dict(
url = dict(),
force = dict(default='no', aliases=['thirsty'], type='bool'),
http_agent = dict(default='ansible-httpget'),
use_proxy = dict(default='yes', type='bool'),
validate_certs = dict(default='yes', type='bool'),
url_username = dict(required=False),
url_password = dict(required=False),
force_basic_auth = dict(required=False, type='bool', default='no'),
)
def fetch_url(module, url, data=None, headers=None, method=None,
use_proxy=True, force=False, last_mod_time=None, timeout=10):
'''
Fetches a file from an HTTP/FTP server using urllib2. Requires the module environment
'''
if not HAS_URLLIB2:
module.fail_json(msg='urllib2 is not installed')
elif not HAS_URLPARSE:
module.fail_json(msg='urlparse is not installed')
# Get validate_certs from the module params
validate_certs = module.params.get('validate_certs', True)
username = module.params.get('url_username', '')
password = module.params.get('url_password', '')
http_agent = module.params.get('http_agent', None)
force_basic_auth = module.params.get('force_basic_auth', '')
r = None
info = dict(url=url)
try:
r = open_url(url, data=data, headers=headers, method=method,
use_proxy=use_proxy, force=force, last_mod_time=last_mod_time, timeout=timeout,
validate_certs=validate_certs, url_username=username,
url_password=password, http_agent=http_agent, force_basic_auth=force_basic_auth)
info.update(r.info())
info['url'] = r.geturl() # The URL goes in too, because of redirects.
info.update(dict(msg="OK (%s bytes)" % r.headers.get('Content-Length', 'unknown'), status=200))
except NoSSLError, e:
distribution = get_distribution()
if distribution.lower() == 'redhat':
module.fail_json(msg='%s. You can also install python-ssl from EPEL' % str(e))
except (ConnectionError, ValueError), e:
module.fail_json(msg=str(e))
except urllib2.HTTPError, e:
info.update(dict(msg=str(e), status=e.code))
except urllib2.URLError, e:
code = int(getattr(e, 'code', -1))
info.update(dict(msg="Request failed: %s" % str(e), status=code))
except socket.error, e:
info.update(dict(msg="Connection failure: %s" % str(e), status=-1))
except Exception, e:
info.update(dict(msg="An unknown error occurred: %s" % str(e), status=-1))
return r, info
|
hnakamur/ansible
|
lib/ansible/module_utils/urls.py
|
Python
|
gpl-3.0
| 31,571
|
import taggit
from django import forms
from django.utils.translation import ugettext_lazy as _
from geonode.people.models import Profile
from geonode.documents.models import Document
class DocumentForm(forms.ModelForm):
date = forms.DateTimeField(widget=forms.SplitDateTimeWidget)
date.widget.widgets[0].attrs = {"class":"datepicker", 'data-date-format': "yyyy-mm-dd"}
date.widget.widgets[1].attrs = {"class":"time"}
temporal_extent_start = forms.DateField(required=False,widget=forms.DateInput(attrs={"class":"datepicker", 'data-date-format': "yyyy-mm-dd"}))
temporal_extent_end = forms.DateField(required=False,widget=forms.DateInput(attrs={"class":"datepicker", 'data-date-format': "yyyy-mm-dd"}))
poc = forms.ModelChoiceField(empty_label = "Person outside GeoNode (fill form)",
label = "Point Of Contact", required=False,
queryset = Profile.objects.exclude(user=None))
metadata_author = forms.ModelChoiceField(empty_label = "Person outside GeoNode (fill form)",
label = "Metadata Author", required=False,
queryset = Profile.objects.exclude(user=None))
keywords = taggit.forms.TagField(required=False,
help_text=_("A space or comma-separated list of keywords"))
class Meta:
model = Document
exclude = ('contacts','workspace', 'store', 'name', 'uuid', 'storeType', 'typename',
'bbox_x0', 'bbox_x1', 'bbox_y0', 'bbox_y1', 'srid',
'csw_typename', 'csw_schema', 'csw_mdsource', 'csw_type',
'csw_wkt_geometry', 'metadata_uploaded', 'metadata_xml', 'csw_anytext',
'content_type', 'object_id', 'doc_file', 'extension', 'popular_count', 'share_count', 'thumbnail')
class DocumentDescriptionForm(forms.Form):
title = forms.CharField(300)
abstract = forms.CharField(1000, widget=forms.Textarea, required=False)
keywords = forms.CharField(500, required=False)
|
sopac/pacgeo
|
geonode/documents/forms.py
|
Python
|
gpl-3.0
| 2,091
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Operations for embeddings."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import constant_op
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import math_ops
def embedding_lookup(params, ids, partition_strategy="mod", name=None):
"""Looks up `ids` in a list of embedding tensors.
This function is used to perform parallel lookups on the list of
tensors in `params`. It is a generalization of
[`tf.gather()`](../../api_docs/python/array_ops.md#gather), where `params` is
interpreted as a partition of a larger embedding tensor.
If `len(params) > 1`, each element `id` of `ids` is partitioned between
the elements of `params` according to the `partition_strategy`.
In all strategies, if the id space does not evenly divide the number of
partitions, each of the first `(max_id + 1) % len(params)` partitions will
be assigned one more id.
If `partition_strategy` is `"mod"`, we assign each id to partition
`p = id % len(params)`. For instance,
13 ids are split across 5 partitions as:
`[[0, 5, 10], [1, 6, 11], [2, 7, 12], [3, 8], [4, 9]]`
If `partition_strategy` is `"div"`, we assign ids to partitions in a
contiguous manner. In this case, 13 ids are split across 5 partitions as:
`[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10], [11, 12]]`
The results of the lookup are concatenated into a dense
tensor. The returned tensor has shape `shape(ids) + shape(params)[1:]`.
Args:
params: A list of tensors with the same type and which can be concatenated
along dimension 0. Each `Tensor` must be appropriately sized for the given
`partition_strategy`.
ids: A `Tensor` with type `int32` or `int64` containing the ids to be looked
up in `params`.
partition_strategy: A string specifying the partitioning strategy, relevant
if `len(params) > 1`. Currently `"div"` and `"mod"` are supported. Default
is `"mod"`.
name: A name for the operation (optional).
Returns:
A `Tensor` with the same type as the tensors in `params`.
Raises:
ValueError: If `params` is empty.
"""
if not isinstance(params, list):
params = [params]
with ops.op_scope(params + [ids], name, "embedding_lookup") as name:
if not params:
raise ValueError("Need at least one param")
np = len(params) # Number of partitions
params = ops.convert_n_to_tensor_or_indexed_slices(params, name="params")
if np == 1:
with ops.device(params[0].device):
return array_ops.gather(params[0], ids, name=name)
else:
ids = ops.convert_to_tensor(ids, name="ids")
flat_ids = array_ops.reshape(ids, [-1])
original_indices = math_ops.range(array_ops.size(flat_ids))
# Create p_assignments and set new_ids depending on the strategy.
if partition_strategy == "mod":
p_assignments = flat_ids % np
new_ids = flat_ids // np
elif partition_strategy == "div":
# Compute num_total_ids as the sum of dim-0 of params, then assign to
# partitions based on a constant number of ids per partition. Optimize
# if we already know the full shape statically.
dim_0_size = params[0].get_shape()[0]
for p in xrange(1, np):
dim_0_size += params[p].get_shape()[0]
if dim_0_size.value:
num_total_ids = constant_op.constant(dim_0_size.value, flat_ids.dtype)
else:
dim_0_sizes = []
for p in xrange(np):
with ops.device(params[p].device):
dim_0_sizes.append(array_ops.shape(params[p])[0])
num_total_ids = math_ops.reduce_sum(
math_ops.cast(array_ops.pack(dim_0_sizes), flat_ids.dtype))
ids_per_partition = num_total_ids // np
extras = num_total_ids % np
p_assignments = math_ops.maximum(
flat_ids // (ids_per_partition + 1),
(flat_ids - extras) // ids_per_partition)
# Emulate a conditional using a boolean indicator tensor
is_in_first_extras_partitions = math_ops.cast(
p_assignments < extras, flat_ids.dtype)
new_ids = (
is_in_first_extras_partitions * (
flat_ids % (ids_per_partition + 1)) +
(1 - is_in_first_extras_partitions) * (
(flat_ids - extras) % ids_per_partition))
else:
raise ValueError("Unrecognized partition strategy: " +
partition_strategy)
# Cast partition assignments to int32 for use in dynamic_partition.
# There really should not be more than 2^32 partitions.
p_assignments = math_ops.cast(p_assignments, dtypes.int32)
# Partition list of ids based on assignments into np separate lists
gather_ids = data_flow_ops.dynamic_partition(new_ids, p_assignments, np)
# Similarly, partition the original indices.
pindices = data_flow_ops.dynamic_partition(original_indices,
p_assignments, np)
# Do np separate lookups, finding embeddings for plist[p] in params[p]
partitioned_result = []
for p in xrange(np):
with ops.device(params[p].device):
partitioned_result.append(array_ops.gather(params[p], gather_ids[p]))
# Stitch these back together
ret = data_flow_ops.dynamic_stitch(pindices, partitioned_result,
name=name)
# Reshape to reverse the flattening of ids.
# It's important that we compute params[0].shape on the right device
# to avoid data motion.
with ops.device(params[0].device):
params_shape = array_ops.shape(params[0])
ret = array_ops.reshape(ret, array_ops.concat(0, [
array_ops.shape(ids), array_ops.slice(params_shape, [1], [-1])]))
# output shape = ids.shape + params[*].shape[1:]
# Normally the reshape is sufficient, but setting shape explicitly
# teaches shape inference that params[1:].get_shape() matters.
element_shape = params[0].get_shape()[1:]
for p in params[1:]:
element_shape = element_shape.merge_with(p.get_shape()[1:])
ret.set_shape(ids.get_shape().concatenate(element_shape))
return ret
# TODO(lif): Add support for higher-rank SparseTensors
def embedding_lookup_sparse(params, sp_ids, sp_weights,
partition_strategy="mod",
name=None,
combiner="mean"):
"""Computes embeddings for the given ids and weights.
This op assumes that there is at least one id for each row in the dense tensor
represented by sp_ids (i.e. there are no rows with empty features), and that
all the indices of sp_ids are in canonical row-major order.
It also assumes that all id values lie in the range [0, p0), where p0
is the sum of the size of params along dimension 0.
Args:
params: A single tensor representing the complete embedding tensor,
or a list of P tensors all of same shape except for the first dimension,
representing sharded embedding tensors.
sp_ids: N x M SparseTensor of int64 ids (typically from FeatureValueToId),
where N is typically batch size and M is arbitrary.
sp_weights: either a SparseTensor of float / double weights, or None to
indicate all weights should be taken to be 1. If specified, sp_weights
must have exactly the same shape and indices as sp_ids.
partition_strategy: A string specifying the partitioning strategy, relevant
if `len(params) > 1`. Currently `"div"` and `"mod"` are supported. Default
is `"mod"`. See `tf.nn.embedding_lookup` for more details.
name: Optional name for the op.
combiner: A string specifying the reduction op. Currently "mean" and "sum"
are supported.
"sum" computes the weighted sum of the embedding results for each row.
"mean" is the weighted sum divided by the total weight.
Returns:
A dense tensor representing the combined embeddings for the
sparse ids. For each row in the dense tensor represented by sp_ids, the op
looks up the embeddings for all ids in that row, multiplies them by the
corresponding weight, and combines these embeddings as specified.
In other words, if
shape(combined params) = [p0, p1, ..., pm]
and
shape(sp_ids) = shape(sp_weights) = [d0, d1, ..., dn]
then
shape(output) = [d0, d1, ..., dn-1, p1, ..., pm].
For instance, if params is a 10x20 matrix, and sp_ids / sp_weights are
[0, 0]: id 1, weight 2.0
[0, 1]: id 3, weight 0.5
[1, 0]: id 0, weight 1.0
[2, 3]: id 1, weight 3.0
with combiner="mean", then the output will be a 3x20 matrix where
output[0, :] = (params[1, :] * 2.0 + params[3, :] * 0.5) / (2.0 + 0.5)
output[1, :] = params[0, :] * 1.0
output[2, :] = params[1, :] * 3.0
Raises:
TypeError: If sp_ids is not a SparseTensor, or if sp_weights is neither
None nor SparseTensor.
ValueError: If combiner is not one of {"mean", "sum"}.
"""
if combiner not in ("mean", "sum"):
raise ValueError("combiner must be one of 'mean' or 'sum'")
if not isinstance(params, list):
params = [params]
if not isinstance(sp_ids, ops.SparseTensor):
raise TypeError("sp_ids must be SparseTensor")
ignore_weights = sp_weights is None
if not ignore_weights and not isinstance(sp_weights, ops.SparseTensor):
raise TypeError("sp_weights must be either None or SparseTensor")
with ops.op_scope(params + [sp_ids], name, "embedding_lookup_sparse") as name:
segment_ids = sp_ids.indices[:, 0]
if segment_ids.dtype != dtypes.int32:
segment_ids = math_ops.cast(segment_ids, dtypes.int32)
ids = sp_ids.values
if ignore_weights:
ids, idx = array_ops.unique(ids)
else:
idx = None
embeddings = embedding_lookup(
params, ids, partition_strategy=partition_strategy)
if not ignore_weights:
weights = sp_weights.values
if weights.dtype != embeddings.dtype:
weights = math_ops.cast(weights, embeddings.dtype)
# Reshape weights to allow broadcast
ones = array_ops.fill(
array_ops.expand_dims(array_ops.rank(embeddings) - 1, 0), 1)
bcast_weights_shape = array_ops.concat(0, [
array_ops.shape(weights), ones])
weights = array_ops.reshape(weights, bcast_weights_shape)
embeddings *= weights
if combiner == "sum":
embeddings = math_ops.segment_sum(embeddings, segment_ids, name=name)
elif combiner == "mean":
embeddings = math_ops.segment_sum(embeddings, segment_ids)
weight_sum = math_ops.segment_sum(weights, segment_ids)
embeddings = math_ops.div(embeddings, weight_sum, name=name)
else:
assert False, "Unrecognized combiner"
else:
assert idx is not None
if combiner == "sum":
embeddings = math_ops.sparse_segment_sum(embeddings, idx, segment_ids,
name=name)
elif combiner == "mean":
embeddings = math_ops.sparse_segment_mean(embeddings, idx, segment_ids,
name=name)
else:
assert False, "Unrecognized combiner"
return embeddings
|
Eric-Gaudiello/tensorflow_dev
|
tensorflow_home/tensorflow_venv/lib/python3.4/site-packages/tensorflow/python/ops/embedding_ops.py
|
Python
|
gpl-3.0
| 12,213
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# Copyright (c) 2013 Matt Hite <mhite@hotmail.com>
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_pool_member
short_description: Manages F5 BIG-IP LTM pool members
description:
- Manages F5 BIG-IP LTM pool members via iControl SOAP API.
version_added: 1.4
author:
- Matt Hite (@mhite)
- Tim Rupp (@caphrim007)
notes:
- Requires BIG-IP software version >= 11
- F5 developed module 'bigsuds' required (see http://devcentral.f5.com)
- Best run as a local_action in your playbook
- Supersedes bigip_pool for managing pool members
requirements:
- bigsuds
options:
state:
description:
- Pool member state.
required: True
default: present
choices:
- present
- absent
session_state:
description:
- Set new session availability status for pool member.
version_added: 2.0
choices:
- enabled
- disabled
monitor_state:
description:
- Set monitor availability status for pool member.
version_added: 2.0
choices:
- enabled
- disabled
pool:
description:
- Pool name. This pool must exist.
required: True
partition:
description:
- Partition
default: Common
host:
description:
- Pool member IP.
required: True
aliases:
- address
- name
port:
description:
- Pool member port.
required: True
connection_limit:
description:
- Pool member connection limit. Setting this to 0 disables the limit.
description:
description:
- Pool member description.
rate_limit:
description:
- Pool member rate limit (connections-per-second). Setting this to 0
disables the limit.
ratio:
description:
- Pool member ratio weight. Valid values range from 1 through 100.
New pool members -- unless overridden with this value -- default
to 1.
preserve_node:
description:
- When state is absent and the pool member is no longer referenced
in other pools, the default behavior removes the unused node
o bject. Setting this to 'yes' disables this behavior.
default: no
choices:
- yes
- no
version_added: 2.1
extends_documentation_fragment: f5
'''
EXAMPLES = '''
- name: Add pool member
bigip_pool_member:
server: lb.mydomain.com
user: admin
password: secret
state: present
pool: my-pool
partition: Common
host: "{{ ansible_default_ipv4['address'] }}"
port: 80
description: web server
connection_limit: 100
rate_limit: 50
ratio: 2
delegate_to: localhost
- name: Modify pool member ratio and description
bigip_pool_member:
server: lb.mydomain.com
user: admin
password: secret
state: present
pool: my-pool
partition: Common
host: "{{ ansible_default_ipv4['address'] }}"
port: 80
ratio: 1
description: nginx server
delegate_to: localhost
- name: Remove pool member from pool
bigip_pool_member:
server: lb.mydomain.com
user: admin
password: secret
state: absent
pool: my-pool
partition: Common
host: "{{ ansible_default_ipv4['address'] }}"
port: 80
delegate_to: localhost
# The BIG-IP GUI doesn't map directly to the API calls for "Pool ->
# Members -> State". The following states map to API monitor
# and session states.
#
# Enabled (all traffic allowed):
# monitor_state=enabled, session_state=enabled
# Disabled (only persistent or active connections allowed):
# monitor_state=enabled, session_state=disabled
# Forced offline (only active connections allowed):
# monitor_state=disabled, session_state=disabled
#
# See https://devcentral.f5.com/questions/icontrol-equivalent-call-for-b-node-down
- name: Force pool member offline
bigip_pool_member:
server: lb.mydomain.com
user: admin
password: secret
state: present
session_state: disabled
monitor_state: disabled
pool: my-pool
partition: Common
host: "{{ ansible_default_ipv4['address'] }}"
port: 80
delegate_to: localhost
'''
try:
import bigsuds
HAS_BIGSUDS = True
except ImportError:
pass # Handled by f5_utils.bigsuds_found
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.f5_utils import bigip_api, bigsuds_found
HAS_DEVEL_IMPORTS = False
try:
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import fqdn_name
HAS_DEVEL_IMPORTS = True
except ImportError:
from ansible.module_utils.network.f5.common import fqdn_name
from ansible.module_utils.network.f5.common import f5_argument_spec
def pool_exists(api, pool):
# hack to determine if pool exists
result = False
try:
api.LocalLB.Pool.get_object_status(pool_names=[pool])
result = True
except bigsuds.OperationFailed as e:
if "was not found" in str(e):
result = False
else:
# genuine exception
raise
return result
def member_exists(api, pool, address, port):
# hack to determine if member exists
result = False
try:
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.get_member_object_status(pool_names=[pool],
members=[members])
result = True
except bigsuds.OperationFailed as e:
if "was not found" in str(e):
result = False
else:
# genuine exception
raise
return result
def delete_node_address(api, address):
result = False
try:
api.LocalLB.NodeAddressV2.delete_node_address(nodes=[address])
result = True
except bigsuds.OperationFailed as e:
if "is referenced by a member of pool" in str(e):
result = False
else:
# genuine exception
raise
return result
def remove_pool_member(api, pool, address, port):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.remove_member_v2(
pool_names=[pool],
members=[members]
)
def add_pool_member(api, pool, address, port):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.add_member_v2(
pool_names=[pool],
members=[members]
)
def get_connection_limit(api, pool, address, port):
members = [{'address': address, 'port': port}]
result = api.LocalLB.Pool.get_member_connection_limit(
pool_names=[pool],
members=[members]
)[0][0]
return result
def set_connection_limit(api, pool, address, port, limit):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.set_member_connection_limit(
pool_names=[pool],
members=[members],
limits=[[limit]]
)
def get_description(api, pool, address, port):
members = [{'address': address, 'port': port}]
result = api.LocalLB.Pool.get_member_description(
pool_names=[pool],
members=[members]
)[0][0]
return result
def set_description(api, pool, address, port, description):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.set_member_description(
pool_names=[pool],
members=[members],
descriptions=[[description]]
)
def get_rate_limit(api, pool, address, port):
members = [{'address': address, 'port': port}]
result = api.LocalLB.Pool.get_member_rate_limit(
pool_names=[pool],
members=[members]
)[0][0]
return result
def set_rate_limit(api, pool, address, port, limit):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.set_member_rate_limit(
pool_names=[pool],
members=[members],
limits=[[limit]]
)
def get_ratio(api, pool, address, port):
members = [{'address': address, 'port': port}]
result = api.LocalLB.Pool.get_member_ratio(
pool_names=[pool],
members=[members]
)[0][0]
return result
def set_ratio(api, pool, address, port, ratio):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.set_member_ratio(
pool_names=[pool],
members=[members],
ratios=[[ratio]]
)
def get_priority_group(api, pool, address, port):
members = [{'address': address, 'port': port}]
result = api.LocalLB.Pool.get_member_priority(
pool_names=[pool],
members=[members]
)[0][0]
return result
def set_priority_group(api, pool, address, port, priority_group):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.set_member_priority(
pool_names=[pool],
members=[members],
priorities=[[priority_group]]
)
def set_member_session_enabled_state(api, pool, address, port, session_state):
members = [{'address': address, 'port': port}]
session_state = ["STATE_%s" % session_state.strip().upper()]
api.LocalLB.Pool.set_member_session_enabled_state(
pool_names=[pool],
members=[members],
session_states=[session_state]
)
def get_member_session_status(api, pool, address, port):
members = [{'address': address, 'port': port}]
result = api.LocalLB.Pool.get_member_session_status(
pool_names=[pool],
members=[members]
)[0][0]
result = result.split("SESSION_STATUS_")[-1].lower()
return result
def set_member_monitor_state(api, pool, address, port, monitor_state):
members = [{'address': address, 'port': port}]
monitor_state = ["STATE_%s" % monitor_state.strip().upper()]
api.LocalLB.Pool.set_member_monitor_state(
pool_names=[pool],
members=[members],
monitor_states=[monitor_state]
)
def get_member_monitor_status(api, pool, address, port):
members = [{'address': address, 'port': port}]
result = api.LocalLB.Pool.get_member_monitor_status(
pool_names=[pool],
members=[members]
)[0][0]
result = result.split("MONITOR_STATUS_")[-1].lower()
return result
def main():
result = {}
argument_spec = f5_argument_spec
meta_args = dict(
session_state=dict(type='str', choices=['enabled', 'disabled']),
monitor_state=dict(type='str', choices=['enabled', 'disabled']),
pool=dict(type='str', required=True),
host=dict(type='str', required=True, aliases=['address', 'name']),
port=dict(type='int', required=True),
connection_limit=dict(type='int'),
description=dict(type='str'),
rate_limit=dict(type='int'),
ratio=dict(type='int'),
preserve_node=dict(type='bool', default=False),
priority_group=dict(type='int')
)
argument_spec.update(meta_args)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True
)
if not bigsuds_found:
module.fail_json(msg="the python bigsuds module is required")
if module.params['validate_certs']:
import ssl
if not hasattr(ssl, 'SSLContext'):
module.fail_json(
msg='bigsuds does not support verifying certificates with python < 2.7.9. '
'Either update python or set validate_certs=False on the task')
server = module.params['server']
server_port = module.params['server_port']
user = module.params['user']
password = module.params['password']
state = module.params['state']
partition = module.params['partition']
validate_certs = module.params['validate_certs']
session_state = module.params['session_state']
monitor_state = module.params['monitor_state']
pool = fqdn_name(partition, module.params['pool'])
connection_limit = module.params['connection_limit']
description = module.params['description']
rate_limit = module.params['rate_limit']
ratio = module.params['ratio']
priority_group = module.params['priority_group']
host = module.params['host']
address = fqdn_name(partition, host)
port = module.params['port']
preserve_node = module.params['preserve_node']
if (host and port is None) or (port is not None and not host):
module.fail_json(msg="both host and port must be supplied")
if 0 > port or port > 65535:
module.fail_json(msg="valid ports must be in range 0 - 65535")
try:
api = bigip_api(server, user, password, validate_certs, port=server_port)
if not pool_exists(api, pool):
module.fail_json(msg="pool %s does not exist" % pool)
result = {'changed': False} # default
if state == 'absent':
if member_exists(api, pool, address, port):
if not module.check_mode:
remove_pool_member(api, pool, address, port)
if preserve_node:
result = {'changed': True}
else:
deleted = delete_node_address(api, address)
result = {'changed': True, 'deleted': deleted}
else:
result = {'changed': True}
elif state == 'present':
if not member_exists(api, pool, address, port):
if not module.check_mode:
add_pool_member(api, pool, address, port)
if connection_limit is not None:
set_connection_limit(api, pool, address, port, connection_limit)
if description is not None:
set_description(api, pool, address, port, description)
if rate_limit is not None:
set_rate_limit(api, pool, address, port, rate_limit)
if ratio is not None:
set_ratio(api, pool, address, port, ratio)
if session_state is not None:
set_member_session_enabled_state(api, pool, address, port, session_state)
if monitor_state is not None:
set_member_monitor_state(api, pool, address, port, monitor_state)
if priority_group is not None:
set_priority_group(api, pool, address, port, priority_group)
result = {'changed': True}
else:
# pool member exists -- potentially modify attributes
if connection_limit is not None and connection_limit != get_connection_limit(api, pool, address, port):
if not module.check_mode:
set_connection_limit(api, pool, address, port, connection_limit)
result = {'changed': True}
if description is not None and description != get_description(api, pool, address, port):
if not module.check_mode:
set_description(api, pool, address, port, description)
result = {'changed': True}
if rate_limit is not None and rate_limit != get_rate_limit(api, pool, address, port):
if not module.check_mode:
set_rate_limit(api, pool, address, port, rate_limit)
result = {'changed': True}
if ratio is not None and ratio != get_ratio(api, pool, address, port):
if not module.check_mode:
set_ratio(api, pool, address, port, ratio)
result = {'changed': True}
if session_state is not None:
session_status = get_member_session_status(api, pool, address, port)
if session_state == 'enabled' and session_status == 'forced_disabled':
if not module.check_mode:
set_member_session_enabled_state(api, pool, address, port, session_state)
result = {'changed': True}
elif session_state == 'disabled' and session_status != 'forced_disabled':
if not module.check_mode:
set_member_session_enabled_state(api, pool, address, port, session_state)
result = {'changed': True}
if monitor_state is not None:
monitor_status = get_member_monitor_status(api, pool, address, port)
if monitor_state == 'enabled' and monitor_status == 'forced_down':
if not module.check_mode:
set_member_monitor_state(api, pool, address, port, monitor_state)
result = {'changed': True}
elif monitor_state == 'disabled' and monitor_status != 'forced_down':
if not module.check_mode:
set_member_monitor_state(api, pool, address, port, monitor_state)
result = {'changed': True}
if priority_group is not None and priority_group != get_priority_group(api, pool, address, port):
if not module.check_mode:
set_priority_group(api, pool, address, port, priority_group)
result = {'changed': True}
except Exception as e:
module.fail_json(msg="received exception: %s" % e)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
photoninger/ansible
|
lib/ansible/modules/network/f5/bigip_pool_member.py
|
Python
|
gpl-3.0
| 17,653
|
#pylint: disable=invalid-name,no-init
from __future__ import (absolute_import, division, print_function)
import stresstesting
from mantid.simpleapi import *
######################################################################
# Common configuration
# Main data file /SNS/SEQ/IPTS-4783/data
DATA_FILE = "SEQ_11499_event.nxs"
# Vanadium file
VAN_FILE = "SEQ_van.nxs"
# Initial energy guess
E_GUESS = 50
# Energy bins: Emin, Estep, Emax
E_RANGE = "-10.0,0.2,45.0"
#######################################################################
def makeOutputName(ws_name, dohist, doproj):
md_ws_name = ws_name + '_md'
tag=""
if dohist:
tag += "h"
else:
tag += "e"
if doproj:
tag += "wp"
else:
tag += "np"
md_ws_name += "_" + tag
return md_ws_name
def execReduction(dohist, doproj):
# Set the facility
config['default.facility'] = "SNS"
# SPE workspace name
workspace_name = "reduced"
# Run the reduction
DgsReduction(SampleInputFile=DATA_FILE,
IncidentBeamNormalisation="ByCurrent",
OutputWorkspace=workspace_name,
IncidentEnergyGuess=E_GUESS,
EnergyTransferRange=E_RANGE,
SofPhiEIsDistribution=dohist,
DetectorVanadiumInputFile=VAN_FILE,
UseProcessedDetVan=True)
# Set the goniometer. Add a rotation angle fix as well.
SetGoniometer(Workspace=workspace_name, Axis0="CCR13VRot,0,1,0,1",
Axis1="49.73,0,1,0,1")
# Set the information for the UB matrix
SetUB(Workspace=workspace_name,
a=3.643, b=3.643, c=5.781, alpha=90, beta=90, gamma=120,
u='1,1,0', v='0,0,1')
# Create the MDEventWorkspace
md_output_ws = makeOutputName(workspace_name, dohist, doproj)
if not doproj:
ConvertToMD(InputWorkspace=workspace_name,
OutputWorkspace=md_output_ws,
QDimensions='Q3D', MinValues='-5,-5,-5,-10',
QConversionScales='HKL',
MaxValues='5,5,5,45', MaxRecursionDepth='1')
else:
ConvertToMD(InputWorkspace=workspace_name,
OutputWorkspace=md_output_ws,
QDimensions='Q3D', MinValues='-5,-5,-5,-10',
QConversionScales='HKL',
MaxValues='5,5,5,45', MaxRecursionDepth='1',
Uproj='1,1,0', Vproj='1,-1,0', Wproj='0,0,1')
# Remove SPE workspace
DeleteWorkspace(Workspace=workspace_name)
return md_output_ws
def validateMD(result,reference,tol=1.e-5,class_name='dummy',mismatchName=None):
"""Returns the name of the workspace & file to compare"""
#elf.disableChecking.append('SpectraMap')
#elf.disableChecking.append('Instrument')
valNames = [result,reference]
if reference not in mtd:
Load(Filename=reference,OutputWorkspace=valNames[1])
checker = AlgorithmManager.create("CompareMDWorkspaces")
checker.setLogging(True)
checker.setPropertyValue("Workspace1",result)
checker.setPropertyValue("Workspace2",valNames[1])
checker.setPropertyValue("Tolerance", str(tol))
checker.setPropertyValue("IgnoreBoxID", "1")
checker.setPropertyValue("CheckEvents", "1")
checker.execute()
if checker.getPropertyValue("Equals") != "1":
print(" Workspaces do not match, result: ",checker.getPropertyValue("Result"))
print(" Test {0} fails".format(class_name))
if mismatchName:
targetFilename = class_name+mismatchName+'-mismatch.nxs'
else:
targetFilename = class_name+'-mismatch.nxs'
SaveMD(InputWorkspace=valNames[0],Filename=targetFilename )
return False
else:
return True
class SNSConvertToMDNoHistNoProjTest(stresstesting.MantidStressTest):
truth_file = "SEQ_11499_md_enp.nxs"
output_ws=None
tolerance=0.0
gold_ws_name =''
def requiredMemoryMB(self):
""" Require about 2.5GB free """
return 2500
def requiredFiles(self):
files = [self.truth_file, DATA_FILE]
return files
def runTest(self):
self.output_ws = execReduction(False, False)
self.gold_ws_name = self.truth_file.split('.')[0] + "_golden"
LoadMD(self.truth_file, OutputWorkspace=self.gold_ws_name)
def validate(self):
self.tolerance = 1.0e-1
return validateMD(self.output_ws, self.gold_ws_name,self.tolerance,self.__class__.__name__)
class SNSConvertToMDHistNoProjTest(stresstesting.MantidStressTest):
truth_file = "SEQ_11499_md_hnp.nxs"
output_ws=None
tolerance=0.0
gold_ws_name =''
def requiredMemoryMB(self):
""" Require about 2.5GB free """
return 2500
def requiredFiles(self):
config.appendDataSearchDir("/home/builder/data/SystemTests/AnalysisTests/ReferenceResults/")
files = [self.truth_file, DATA_FILE]
return files
def runTest(self):
self.output_ws = execReduction(True, False)
self.gold_ws_name = self.truth_file.split('.')[0] + "_golden"
LoadMD(self.truth_file, OutputWorkspace=self.gold_ws_name)
def validate(self):
self.tolerance = 1.0e-1
return validateMD(self.output_ws, self.gold_ws_name,self.tolerance,self.__class__.__name__,self.gold_ws_name)
class SNSConvertToMDNoHistProjTest(stresstesting.MantidStressTest):
truth_file = "SEQ_11499_md_ewp.nxs"
output_ws=None
tolerance=0.0
gold_ws_name =''
def requiredMemoryMB(self):
""" Require about 2.5GB free """
return 2500
def requiredFiles(self):
files = [self.truth_file, DATA_FILE]
return files
def runTest(self):
self.output_ws = execReduction(False, True)
self.gold_ws_name = self.truth_file.split('.')[0] + "_golden"
LoadMD(self.truth_file, OutputWorkspace=self.gold_ws_name)
def validate(self):
self.tolerance = 1.0e-3
return validateMD(self.output_ws, self.gold_ws_name,self.tolerance,self.__class__.__name__,self.gold_ws_name)
#return (self.output_ws, self.gold_ws_name)
class SNSConvertToMDHistProjTest(stresstesting.MantidStressTest):
truth_file = "SEQ_11499_md_hwp.nxs"
output_ws=None
tolerance=0.0
gold_ws_name =''
def requiredMemoryMB(self):
""" Require about 2.5GB free """
return 2500
def requiredFiles(self):
files = [self.truth_file, DATA_FILE]
return files
def runTest(self):
self.output_ws = execReduction(True, True)
self.gold_ws_name = self.truth_file.split('.')[0] + "_golden"
LoadMD(self.truth_file, OutputWorkspace=self.gold_ws_name)
def validate(self):
self.tolerance = 1.0e-3
return validateMD(self.output_ws, self.gold_ws_name,self.tolerance,self.__class__.__name__,self.gold_ws_name)
#return (self.output_ws, self.gold_ws_name)
|
dymkowsk/mantid
|
Testing/SystemTests/tests/analysis/SNSConvertToMDTest.py
|
Python
|
gpl-3.0
| 6,953
|
"""
This module provides GOCDB2CSAgent code.
The agent is used to synchronize information between GOCDB and DIRAC configuration System (CS)
"""
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.Core.LCG.GOCDBClient import GOCDBClient
from DIRAC.Core.Utilities.SitesDIRACGOCDBmapping import getDIRACGOCDictionary
from DIRAC.ConfigurationSystem.Client.Helpers.Path import cfgPath
from DIRAC.ConfigurationSystem.Client.CSAPI import CSAPI
from DIRAC.ConfigurationSystem.Client.Config import gConfig
__RCSID__ = "$Id: $"
class GOCDB2CSAgent ( AgentModule ):
""" Class to retrieve information about service endpoints
from GOCDB and update configuration stored by CS
"""
def __init__( self, *args, **kwargs ):
""" c'tor
"""
super(GOCDB2CSAgent, self).__init__( *args, **kwargs )
self.GOCDBClient = None
self.csAPI = None
self.dryRun = False
def initialize( self ):
""" Run at the agent initialization (normally every 500 cycles)
"""
# client to connect to GOCDB
self.GOCDBClient = GOCDBClient()
self.dryRun = self.am_getOption( 'DryRun', self.dryRun )
# API needed to update configuration stored by CS
self.csAPI = CSAPI()
return self.csAPI.initialize()
def execute( self ):
"""
Execute GOCDB queries according to the function map
and user request (options in configuration).
"""
# __functionMap is at the end of the class definition
for option, functionCall in GOCDB2CSAgent.__functionMap.iteritems():
optionValue = self.am_getOption( option, True )
if optionValue:
result = functionCall( self )
if not result['OK']:
self.log.error( "%s() failed with message: %s" % ( functionCall.__name__, result['Message'] ) )
else:
self.log.info( "Successfully executed %s" % functionCall.__name__ )
return S_OK()
def updatePerfSONARConfiguration( self ):
"""
Get current status of perfSONAR endpoints from GOCDB
and update CS configuration accordingly.
"""
log = self.log.getSubLogger( 'updatePerfSONAREndpoints' )
log.debug( 'Begin function ...' )
# get endpoints
result = self.__getPerfSONAREndpoints()
if not result['OK']:
log.error( "__getPerfSONAREndpoints() failed with message: %s" % result['Message'] )
return S_ERROR( 'Unable to fetch perfSONAR endpoints from GOCDB.' )
endpointList = result['Value']
# add DIRAC site name
result = self.__addDIRACSiteName( endpointList )
if not result['OK']:
log.error( "__addDIRACSiteName() failed with message: %s" % result['Message'] )
return S_ERROR( 'Unable to extend the list with DIRAC site names.' )
extendedEndpointList = result['Value']
# prepare dictionary with new configuration
result = self.__preparePerfSONARConfiguration( extendedEndpointList )
if not result['OK']:
log.error( "__preparePerfSONARConfiguration() failed with message: %s" % result['Message'] )
return S_ERROR( 'Unable to prepare a new perfSONAR configuration.' )
finalConfiguration = result['Value']
# update configuration according to the final status of endpoints
self.__updateConfiguration( finalConfiguration )
log.debug( "Configuration updated succesfully" )
log.debug( 'End function.' )
return S_OK()
def __getPerfSONAREndpoints( self ):
"""
Retrieve perfSONAR endpoint information directly form GOCDB.
:return: List of perfSONAR endpoints (dictionaries) as stored by GOCDB.
"""
log = self.log.getSubLogger( '__getPerfSONAREndpoints' )
log.debug( 'Begin function ...' )
# get perfSONAR endpoints (latency and bandwidth) form GOCDB
endpointList = []
for endpointType in ['Latency', 'Bandwidth']:
result = self.GOCDBClient.getServiceEndpointInfo( 'service_type', 'net.perfSONAR.%s' % endpointType )
if not result['OK']:
log.error( "getServiceEndpointInfo() failed with message: %s" % result['Message'] )
return S_ERROR( 'Could not fetch %s endpoints from GOCDB' % endpointType.lower() )
log.debug( 'Number of %s endpoints: %s' % ( endpointType.lower(), len( result['Value'] ) ) )
endpointList.extend( result['Value'] )
log.debug( 'Number of perfSONAR endpoints: %s' % len( endpointList ) )
log.debug( 'End function.' )
return S_OK( endpointList )
def __preparePerfSONARConfiguration( self, endpointList ):
"""
Prepare a dictionary with a new CS configuration of perfSONAR endpoints.
:return: Dictionary where keys are configuration paths (options and sections)
and values are values of corresponding options
or None in case of a path pointing to a section.
"""
log = self.log.getSubLogger( '__preparePerfSONARConfiguration' )
log.debug( 'Begin function ...' )
# static elements of a path
rootPath = '/Resources/Sites'
extPath = 'Network'
baseOptionName = 'Enabled'
options = {baseOptionName: 'True', 'ServiceType': 'perfSONAR'}
# enable GOCDB endpoints in configuration
newConfiguration = {}
for endpoint in endpointList:
if endpoint['DIRACSITENAME'] is None:
continue
split = endpoint['DIRACSITENAME'].split( '.' )
path = cfgPath( rootPath, split[0], endpoint['DIRACSITENAME'], extPath, endpoint['HOSTNAME'] )
for name, defaultValue in options.iteritems():
newConfiguration[cfgPath(path, name)] = defaultValue
# get current configuration
currentConfiguration = {}
for option in options.iterkeys():
result = gConfig.getConfigurationTree( rootPath, extPath + '/', '/' + option )
if not result['OK']:
log.error( "getConfigurationTree() failed with message: %s" % result['Message'] )
return S_ERROR( 'Unable to fetch perfSONAR endpoints from CS.' )
currentConfiguration.update(result['Value'])
# disable endpoints that disappeared in GOCDB
removedElements = set( currentConfiguration ) - set( newConfiguration )
newElements = set( newConfiguration ) - set( currentConfiguration )
addedEndpoints = len( newElements )/len( options )
disabledEndpoints = 0
for path in removedElements:
if baseOptionName in path:
newConfiguration[path] = 'False'
if currentConfiguration[path] != 'False':
disabledEndpoints = disabledEndpoints + 1
# inform what will be changed
if addedEndpoints > 0:
self.log.info( "%s new perfSONAR endpoints will be added to the configuration" % addedEndpoints )
if disabledEndpoints > 0:
self.log.info( "%s old perfSONAR endpoints will be disable in the configuration" % disabledEndpoints )
if addedEndpoints == 0 and disabledEndpoints == 0:
self.log.info( "perfSONAR configuration is up-to-date" )
log.debug( 'End function.' )
return S_OK( newConfiguration )
def __addDIRACSiteName( self, inputList ):
"""
Extend given list of GOCDB endpoints with DIRAC site name, i.e.
add an entry "DIRACSITENAME" in dictionaries that describe endpoints.
If given site name could not be found "DIRACSITENAME" is set to 'None'.
:return: List of perfSONAR endpoints (dictionaries).
"""
log = self.log.getSubLogger( '__addDIRACSiteName' )
log.debug( 'Begin function ...' )
# get site name dictionary
result = getDIRACGOCDictionary()
if not result['OK']:
log.error( "getDIRACGOCDictionary() failed with message: %s" % result['Message'] )
return S_ERROR( 'Could not get site name dictionary' )
# reverse the dictionary (assume 1 to 1 relation)
DIRACGOCDict = result['Value']
GOCDIRACDict = dict( zip( DIRACGOCDict.values(), DIRACGOCDict.keys() ) )
# add DIRAC site names
outputList = []
for entry in inputList:
try:
entry['DIRACSITENAME'] = GOCDIRACDict[entry['SITENAME']]
except KeyError:
self.log.warn( "No dictionary entry for %s. " % entry['SITENAME'] )
entry['DIRACSITENAME'] = None
outputList.append( entry )
log.debug( 'End function.' )
return S_OK( outputList )
def __updateConfiguration( self, setElements = None, delElements = None ):
"""
Update configuration stored by CS.
"""
if setElements is None:
setElements = {}
if delElements is None:
delElements = []
log = self.log.getSubLogger( '__updateConfiguration' )
log.debug( 'Begin function ...' )
# assure existence and proper value of a section or an option
for path, value in setElements.iteritems():
if value is None:
section = path
else:
split = path.rsplit( '/', 1 )
section = split[0]
try:
result = self.csAPI.createSection( section )
if not result['OK']:
log.error( "createSection() failed with message: %s" % result['Message'] )
except Exception as e:
log.error( "Exception in createSection(): %s" % repr( e ).replace( ',)', ')' ) )
if value is not None:
try:
result = self.csAPI.setOption( path, value )
if not result['OK']:
log.error( "setOption() failed with message: %s" % result['Message'] )
except Exception as e:
log.error( "Exception in setOption(): %s" % repr( e ).replace( ',)', ')' ) )
# delete elements in the configuration
for path in delElements:
result = self.csAPI.delOption( path )
if not result['OK']:
log.warn( "delOption() failed with message: %s" % result['Message'] )
result = self.csAPI.delSection( path )
if not result['OK']:
log.warn( "delSection() failed with message: %s" % result['Message'] )
if self.dryRun:
log.info( "Dry Run: CS won't be updated" )
self.csAPI.showDiff()
else:
# update configuration stored by CS
result = self.csAPI.commit()
if not result['OK']:
log.error( "commit() failed with message: %s" % result['Message'] )
return S_ERROR( "Could not commit changes to CS." )
else:
log.info("Committed changes to CS")
log.debug( 'End function.' )
return S_OK()
# define mapping between an agent option in the configuration and a function call
__functionMap = { 'UpdatePerfSONARS': updatePerfSONARConfiguration,
}
|
arrabito/DIRAC
|
ConfigurationSystem/Agent/GOCDB2CSAgent.py
|
Python
|
gpl-3.0
| 10,349
|
# Copyright (C) 2013 Matthew C. Zwier and Lillian T. Chong
#
# This file is part of WESTPA.
#
# WESTPA is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# WESTPA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with WESTPA. If not, see <http://www.gnu.org/licenses/>.
__metaclass__ = type
import westpa
import itertools
def blocked_iter(blocksize, iterable, fillvalue = None):
# From the Python "itertools recipes" (grouper)
args = [iter(iterable)] * blocksize
return itertools.izip_longest(fillvalue=fillvalue, *args)
class WESTPropagator:
def __init__(self, rc=None):
# For maximum flexibility, the basis states and initial states valid
# at the point in the simulation when the propgator is used must be
# available in several routines, and it is inconvenient to pass them
# to every routine that needs them. A currently-reasonable-seeming solution
# is to store at least the basis states and initial states necessary for
# the current operation (propagation, etc). The set_basis_initial_states() function
# accomplishes this. They are stored as dictionaries of state_id -> state,
# so they can be looked up by ID without needing to store them all (and
# thus potentially send them all over the wire when only one of them is needed, e.g.)
self.basis_states = {}
self.initial_states = {}
self.rc = rc or westpa.rc
def prepare_iteration(self, n_iter, segments):
"""Perform any necessary per-iteration preparation. This is run by the work manager."""
pass
def finalize_iteration(self, n_iter, segments):
"""Perform any necessary post-iteration cleanup. This is run by the work manager."""
pass
# Specific functions required by the WEST framework
def get_pcoord(self, state):
'''Get the progress coordinate of the given basis or initial state.'''
raise NotImplementedError
def gen_istate(self, basis_state, initial_state):
'''Generate a new initial state from the given basis state.'''
raise NotImplementedError
def propagate(self, segments):
"""Propagate one or more segments, including any necessary per-iteration setup and teardown for this propagator."""
raise NotImplementedError
def clear_basis_initial_states(self):
self.basis_states = {}
self.initial_states = {}
def update_basis_initial_states(self, basis_states, initial_states):
self.basis_states.update({state.state_id: state for state in basis_states})
self.initial_states.update({state.state_id: state for state in initial_states})
|
nrego/westpa
|
src/west/propagators/__init__.py
|
Python
|
gpl-3.0
| 3,193
|
import os
import pynsive
import importlib
from operator import itemgetter
from .utilities.dict2List import dict2List
from .utilities.logger import logger
class PluginSet(object):
def __init__(self, plugin_location, enabled_plugins=None):
self.plugin_location = plugin_location
self.enabled_plugins = self.identify_plugins(enabled_plugins)
def identify_plugins(self, enabled_plugins):
if not os.path.exists(self.plugin_location):
return []
module_name = os.path.basename(self.plugin_location)
root_plugin_directory = os.path.join(self.plugin_location, '..')
plugin_manager = pynsive.PluginManager()
plugin_manager.plug_into(root_plugin_directory)
plugins = []
found_modules = pynsive.list_modules(module_name)
for found_module in found_modules:
module_filename, module_name = found_module.split('.')
if enabled_plugins is not None and module_name not in enabled_plugins:
# Skip this plugin since it's not listed as enabled plugins
# as long as we have specified some enabled plugins though
# this allows us to specify no specific plugins and get all of them
continue
try:
module_obj = pynsive.import_module(found_module)
importlib.reload(module_obj)
plugin_class_obj = module_obj.message()
if 'priority' in dir(plugin_class_obj):
priority = plugin_class_obj.priority
else:
priority = 100
logger.info('[*] plugin {0} registered to receive messages with {1}'.format(module_name, plugin_class_obj.registration))
plugins.append(
{
'plugin_class': plugin_class_obj,
'registration': plugin_class_obj.registration,
'priority': priority
}
)
except Exception as e:
logger.exception('Received exception when loading {0} plugins\n{1}'.format(module_name, e))
plugin_manager.destroy()
return plugins
@property
def ordered_enabled_plugins(self):
return sorted(self.enabled_plugins, key=itemgetter('priority'), reverse=False)
def run_plugins(self, message, metadata=None):
'''compare the message to the plugin registrations.
plugins register with a list of keys or values
or values they want to match on
this function compares that registration list
to the current message and sends the message to plugins
in order
'''
if not isinstance(message, dict):
raise TypeError('event is type {0}, should be a dict'.format(type(message)))
for plugin in self.ordered_enabled_plugins:
send = False
message_fields = [e for e in dict2List(message)]
# this is to make it so we can match on all fields
message_fields.append('*')
if isinstance(plugin['registration'], list):
if set(plugin['registration']).intersection(message_fields):
send = True
elif isinstance(plugin['registration'], str):
if plugin['registration'] in message_fields:
send = True
if send:
try:
(message, metadata) = self.send_message_to_plugin(plugin_class=plugin['plugin_class'], message=message, metadata=metadata)
except Exception as e:
logger.exception('Received exception in {0}: message: {1}\n{2}'.format(plugin['plugin_class'], message, e))
if message is None:
return (message, metadata)
return (message, metadata)
def send_message_to_plugin(self, plugin_class, message, metadata=None):
'''moving this logic to a separate function allows
different types of plugin_sets, such as alerts that might not care
about receiving metadata in its plugins
'''
return plugin_class.onMessage(message, metadata)
|
jeffbryner/MozDef
|
mozdef_util/mozdef_util/plugin_set.py
|
Python
|
mpl-2.0
| 4,225
|
# Copyright (C) 2015 Mozilla Contributors
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# As a special exception, the copyright holders of this code give you
# permission to combine this code with the software known as 'mozbuild',
# and to distribute those combinations without any restriction
# coming from the use of this file. (The General Public License
# restrictions do apply in other respects; for example, they cover
# modification of the file, and distribution when not combined with
# mozbuild.)
#
# If you modify this code, you may extend this exception to your
# version of the code, but you are not obliged to do so. If you
# do not wish to do so, delete this exception statement from your
# version.
from __future__ import absolute_import, print_function, unicode_literals
import mercurial.error as error
import mercurial.hg as hg
import mercurial.ui as hgui
import six
from .files import (
BaseFinder,
MercurialFile,
)
import mozpack.path as mozpath
# This isn't a complete implementation of BaseFile. But it is complete
# enough for moz.build reading.
class MercurialNativeFile(MercurialFile):
def __init__(self, data):
self.data = data
def read(self):
return self.data
class MercurialNativeRevisionFinder(BaseFinder):
def __init__(self, repo, rev='.', recognize_repo_paths=False):
"""Create a finder attached to a specific changeset.
Accepts a Mercurial localrepo and changectx instance.
"""
if isinstance(repo, six.string_types):
path = repo
repo = hg.repository(hgui.ui(), repo)
else:
path = repo.root
super(MercurialNativeRevisionFinder, self).__init__(base=repo.root)
self._repo = repo
self._rev = rev
self._root = mozpath.normpath(path)
self._recognize_repo_paths = recognize_repo_paths
def _find(self, pattern):
if self._recognize_repo_paths:
raise NotImplementedError('cannot use find with recognize_repo_path')
return self._find_helper(pattern, self._repo[self._rev], self._get)
def get(self, path):
if self._recognize_repo_paths:
if not path.startswith(self._root):
raise ValueError('lookups in recognize_repo_paths mode must be '
'prefixed with repo path: %s' % path)
path = path[len(self._root) + 1:]
return self._get(path)
def _get(self, path):
if isinstance(path, six.text_type):
path = path.encode('utf-8', 'replace')
try:
fctx = self._repo.filectx(path, self._rev)
return MercurialNativeFile(fctx.data())
except error.LookupError:
return None
|
escapewindow/signingscript
|
src/signingscript/vendored/mozbuild/mozpack/hg.py
|
Python
|
mpl-2.0
| 3,409
|
import random
import math
def generate(data):
height_canvas = 400
data["params"]["height_canvas"] = height_canvas
base_triangle = 180
height_triangle = 240
xA = 40
yA = height_canvas - 80
xB = xA + base_triangle
yB = yA
xC = xB
yC = yB - height_triangle
xO = xA + (2/3)*base_triangle
yO = yB - (1/3)*height_triangle
data["params"]["xA"] = xA
data["params"]["yA"] = yA
data["params"]["xB"] = xB
data["params"]["yB"] = yB
data["params"]["xC"] = xC
data["params"]["yC"] = yC
data["params"]["xO"] = xO
data["params"]["yO"] = yO
base_rectangle = 80
height_rectangle = 280
xD = 300
yD = 200
data["params"]["xD"] = xD
data["params"]["yD"] = yD
data["params"]["base_rectangle"] = base_rectangle
data["params"]["height_rectangle"] = height_rectangle
return data
|
rbessick5/PrairieLearn
|
exampleCourse/questions/examplePLDrawingCentroid/server.py
|
Python
|
agpl-3.0
| 998
|
# DWC Network Server Emulator
# Copyright (C) 2014 polaris-
# Copyright (C) 2014 ToadKing
# Copyright (C) 2014 AdmiralCurtiss
# Copyright (C) 2014 msoucy
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import base64
import hashlib
import time
import other.utils as utils
def generate_secret_keys(filename="gslist.cfg"):
secret_key_list = {}
with open(filename) as key_file:
for line in key_file.readlines():
#name = line[:54].strip() # Probably won't do anything with the name for now.
id = line[54:54+19].strip()
key = line[54+19:].strip()
secret_key_list[id] = key
return secret_key_list
# GameSpy uses a slightly modified version of base64 which replaces +/= with []_
def base64_encode(input):
output = base64.b64encode(input).replace('+', '[').replace('/', ']').replace('=', '_')
return output
def base64_decode(input):
output = base64.b64decode(input.replace('[', '+').replace(']', '/').replace('_', '='))
return output
# Tetris DS overlay 10 @ 0216E9B8
def rc4_encrypt(_key, _data):
key = bytearray(_key)
data = bytearray(_data)
if len(key) == 0:
# This shouldn't happen but it apparently can on a rare occasion. key should always be set.
return
# Key-scheduling algorithm
S = range(0x100)
j = 0
for i in range(0x100):
# Get index to swap with
j = (j + S[i] + key[i % len(key)]) & 0xff
# Perform swap
S[i], S[j] = S[j], S[i]
# Pseudo-random generation algorithm + encryption
i = 0
j = 0
for x in range(len(data)):
i = (i + 1 + data[x]) & 0xff # Modified RC4? What's this data[x] doing here?
j = (j + S[i]) & 0xff
S[i], S[j] = S[j], S[i]
data[x] ^= S[(S[i] + S[j]) & 0xff]
return data
# Tetris DS overlay 10 @ 0216E9B8
# Used by the master server to send some data between the client and server.
# This seems to be what Luigi Auriemma calls "Gsmsalg".
def prepare_rc4_base64(_key, _data):
data = rc4_encrypt(_key, _data)
if data == None:
data = bytearray()
data.append(0)
return base64.b64encode(buffer(data))
# get the login data from nas.nintendowifi.net/ac from an authtoken
def parse_authtoken(authtoken, db):
return db.get_nas_login(authtoken)
def login_profile_via_parsed_authtoken(authtoken_parsed, db):
console = 0
userid = authtoken_parsed['userid']
csnum = authtoken_parsed.get('csnum', '') # Wii: Serial number
cfc = authtoken_parsed.get('cfc', '') # Wii: Friend code
bssid = authtoken_parsed.get('bssid', '') # NDS: Wifi network's BSSID
devname = authtoken_parsed.get('devname', '') # NDS: Device name
birth = authtoken_parsed.get('birth', '') # NDS: User's birthday
# The Wii does not use passwd, so take another uniquely generated string as the password.
# if "passwd" in authtoken_parsed:
# password = authtoken_parsed['passwd']
# else:
# password = authtoken_parsed['gsbrcd']
# console = 1
if not "passwd" in authtoken_parsed:
console = 1
password = authtoken_parsed['gsbrcd']
gsbrcd = authtoken_parsed['gsbrcd']
gameid = gsbrcd[:4]
macadr = authtoken_parsed['macadr']
uniquenick = utils.base32_encode(int(userid)) + gsbrcd
email = uniquenick + "@nds" # The Wii also seems to use @nds.
if "csnum" in authtoken_parsed:
console = 1
if "cfc" in authtoken_parsed:
console = 1
valid_user = db.check_user_exists(userid, gsbrcd)
if valid_user == False:
profileid = db.create_user(userid, password, email, uniquenick, gsbrcd, console, csnum, cfc, bssid, devname, birth, gameid, macadr)
else:
profileid = db.perform_login(userid, password, gsbrcd)
return userid, profileid, gsbrcd, uniquenick
def generate_response(challenge, ac_challenge, secretkey, authtoken):
md5 = hashlib.md5()
md5.update(ac_challenge)
output = md5.hexdigest()
output += ' ' * 0x30
output += authtoken
output += secretkey
output += challenge
output += md5.hexdigest()
md5_2 = hashlib.md5()
md5_2.update(output)
return md5_2.hexdigest()
# The proof is practically the same thing as the response, except it has the challenge and the secret key swapped.
# Maybe combine the two functions later?
def generate_proof(challenge, ac_challenge, secretkey, authtoken):
md5 = hashlib.md5()
md5.update(ac_challenge)
output = md5.hexdigest()
output += ' ' * 0x30
output += authtoken
output += challenge
output += secretkey
output += md5.hexdigest()
md5_2 = hashlib.md5()
md5_2.update(output)
return md5_2.hexdigest()
# Code: Tetris DS @ 02057A14
def get_friendcode_from_profileid(profileid, gameid):
friendcode = 0
# Combine the profileid and gameid into one buffer
buffer = [(profileid >> (8 * i)) & 0xff for i in range(4)]
buffer += [ord(c) for c in gameid]
crc = utils.calculate_crc8(buffer)
# The upper 32 bits is the crc8 of the combined buffer.
# The lower 32 bits of the friend code is the profileid.
friendcode = ((crc & 0x7f) << 32) | profileid
return friendcode
def get_profileid_from_friendcode(friendcode):
# Get the lower 32 bits as the profile id
profileid = friendcode & 0xffffffff
return profileid
# Code from Luigi Auriemma's enctypex_decoder.c
# It's kind of sloppy in parts, but it works. Unless there's some issues then it'll probably not change any longer.
class EncTypeX:
def __init__(self):
return
def decrypt(self, key, validate, data):
if not key or not validate or not data:
return None
encxkey = bytearray([0] * 261)
data = self.init(encxkey, key, validate, data)
self.func6(encxkey, data, len(data))
return data
def encrypt(self, key, validate, data):
if not key or not validate or not data:
return None
# Convert data from strings to byte arrays before use or else it'll raise an error
key = bytearray(key)
validate = bytearray(validate)
# Add room for the header
tmp_len = 20
data = bytearray(tmp_len) + data
keylen = len(key)
vallen = len(validate)
rnd = ~int(time.time())
for i in range(tmp_len):
rnd = (rnd * 0x343FD) + 0x269EC3
data[i] = (rnd ^ key[i % keylen] ^ validate[i % vallen]) & 0xff
header_len = 7
data[0] = (header_len - 2) ^ 0xec
data[1] = 0x00
data[2] = 0x00
data[header_len - 1] = (tmp_len - header_len) ^ 0xea
header = data[:tmp_len] # The header of the data gets chopped off in init(), so save it
encxkey = bytearray([0] * 261)
data = self.init(encxkey, key, validate, data)
self.func6e(encxkey, data, len(data))
# Reappend header that we saved earlier before returning to make the complete buffer
return header + data
def init(self, encxkey, key, validate, data):
data_len = len(data)
if data_len < 1:
return None
header_len = (data[0] ^ 0xec) + 2
if data_len < header_len:
return None
data_start = (data[header_len - 1] ^ 0xea)
if data_len < (header_len + data_start):
return None
data = self.enctypex_funcx(encxkey, bytearray(key), bytearray(validate), data[header_len:], data_start)
return data[data_start:]
def enctypex_funcx(self, encxkey, key, validate, data, datalen):
keylen = len(key)
for i in range(datalen):
validate[(key[i % keylen] * i) & 7] ^= validate[i & 7] ^ data[i]
self.func4(encxkey, validate, 8)
return data
def func4(self, encxkey, id, idlen):
if idlen < 1:
return
for i in range(256):
encxkey[i] = i
n1 = 0
n2 = 0
for i in range(255,-1,-1):
t1, n1, n2 = self.func5(encxkey, i, id, idlen, n1, n2)
t2 = encxkey[i]
encxkey[i] = encxkey[t1]
encxkey[t1] = t2
encxkey[256] = encxkey[1]
encxkey[257] = encxkey[3]
encxkey[258] = encxkey[5]
encxkey[259] = encxkey[7]
encxkey[260] = encxkey[n1 & 0xff]
def func5(self, encxkey, cnt, id, idlen, n1, n2):
if cnt == 0:
return 0, n1, n2
mask = 1
doLoop = True
if cnt > 1:
while doLoop:
mask = (mask << 1) + 1
doLoop = mask < cnt
i = 0
tmp = 0
doLoop = True
while doLoop:
n1 = encxkey[n1 & 0xff] + id[n2]
n2 += 1
if n2 >= idlen:
n2 = 0
n1 += idlen
tmp = n1 & mask
i += 1
if i > 11:
tmp %= cnt
doLoop = tmp > cnt
return tmp, n1, n2
def func6(self, encxkey, data, data_len):
for i in range(data_len):
data[i] = self.func7(encxkey, data[i])
return len(data)
def func7(self, encxkey, d):
a = encxkey[256]
b = encxkey[257]
c = encxkey[a]
encxkey[256] = (a + 1) & 0xff
encxkey[257] = (b + c) & 0xff
a = encxkey[260]
b = encxkey[257]
b = encxkey[b]
c = encxkey[a]
encxkey[a] = b
a = encxkey[259]
b = encxkey[257]
a = encxkey[a]
encxkey[b] = a
a = encxkey[256]
b = encxkey[259]
a = encxkey[a]
encxkey[b] = a
a = encxkey[256]
encxkey[a] = c
b = encxkey[258]
a = encxkey[c]
c = encxkey[259]
b = (a + b) & 0xff
encxkey[258] = b
a = b
c = encxkey[c]
b = encxkey[257]
b = encxkey[b]
a = encxkey[a]
c = (b + c) & 0xff
b = encxkey[260]
b = encxkey[b]
c = (b + c) & 0xff
b = encxkey[c]
c = encxkey[256]
c = encxkey[c]
a = (a + c) & 0xff
c = encxkey[b]
b = encxkey[a]
encxkey[260] = d
c ^= b ^ d
encxkey[259] = c
return c
def func6e(self, encxkey, data, data_len):
for i in range(data_len):
data[i] = self.func7e(encxkey, data[i])
return len(data)
def func7e(self, encxkey, d):
a = encxkey[256]
b = encxkey[257]
c = encxkey[a]
encxkey[256] = (a + 1) & 0xff
encxkey[257] = (b + c) & 0xff
a = encxkey[260]
b = encxkey[257]
b = encxkey[b]
c = encxkey[a]
encxkey[a] = b
a = encxkey[259]
b = encxkey[257]
a = encxkey[a]
encxkey[b] = a
a = encxkey[256]
b = encxkey[259]
a = encxkey[a]
encxkey[b] = a
a = encxkey[256]
encxkey[a] = c
b = encxkey[258]
a = encxkey[c]
c = encxkey[259]
b = (a + b) & 0xff
encxkey[258] = b
a = b
c = encxkey[c]
b = encxkey[257]
b = encxkey[b]
a = encxkey[a]
c = (b + c) & 0xff
b = encxkey[260]
b = encxkey[b]
c = (b + c) & 0xff
b = encxkey[c]
c = encxkey[256]
c = encxkey[c]
a = (a + c) & 0xff
c = encxkey[b]
b = encxkey[a]
c ^= b ^ d
encxkey[260] = c
encxkey[259] = d
return c
|
bmezali/dwc_network_server_emulator
|
gamespy/gs_utility.py
|
Python
|
agpl-3.0
| 12,197
|
from django.db import models
from geodata.models import Country
class UnescoIndicator(models.Model):
id = models.CharField(max_length=50, primary_key=True)
description = models.TextField(null=True, blank=True)
friendly_label = models.CharField(max_length=255, null=True, blank=True)
def __unicode__(self):
return self.id
class UnescoIndicatorData(models.Model):
unesco_indicator = models.ForeignKey(UnescoIndicator)
country = models.ForeignKey(Country, null=True)
value = models.FloatField(null=True, blank=True)
type_value = models.IntegerField(blank=True, null=True)
website = models.CharField(max_length=255, null=True, blank=True)
created = models.DateTimeField(auto_now=True)
updated = models.DateTimeField(auto_now_add=True)
class Meta:
verbose_name_plural = "Unesco indicator data"
def __unicode__(self):
return self.unesco_indicator.id
|
tokatikato/OIPA
|
OIPA/indicator_unesco/models.py
|
Python
|
agpl-3.0
| 926
|
{
'name': 'Multiuser To-Do',
'description': 'Extend To-Do Tasks for multiuser',
'author': 'Daniel Reis',
'depends': ['todo_app'],
'data': ['todo_view.xml', 'security/todo_access_rules.xml'],
'demo': ['todo.task.csv', 'todo_data.xml'],
}
|
raycarnes/todo_app
|
todo_user/__openerp__.py
|
Python
|
agpl-3.0
| 261
|
# -*- coding: utf-8 -*-
# Copyright 2017 Lara Baggio - Link IT srl
# (<http://www.linkgroup.it/>)
# Copyright 2014-2017 Lorenzo Battistini - Agile Business Group
# (<http://www.agilebg.com>)
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
from . import models
from . import wizard
|
linkitspa/l10n-italy
|
l10n_it_vat_registries_cash_basis/__init__.py
|
Python
|
agpl-3.0
| 298
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import platform
class PyScipy(PythonPackage):
"""SciPy (pronounced "Sigh Pie") is a Scientific Library for Python.
It provides many user-friendly and efficient numerical routines such
as routines for numerical integration and optimization."""
homepage = "https://www.scipy.org/"
pypi = "scipy/scipy-1.5.4.tar.gz"
git = "https://github.com/scipy/scipy.git"
maintainers = ['adamjstewart']
version('master', branch='master')
version('1.7.3', sha256='ab5875facfdef77e0a47d5fd39ea178b58e60e454a4c85aa1e52fcb80db7babf')
version('1.7.2', sha256='fa2dbabaaecdb502641b0b3c00dec05fb475ae48655c66da16c9ed24eda1e711')
version('1.7.1', sha256='6b47d5fa7ea651054362561a28b1ccc8da9368a39514c1bbf6c0977a1c376764')
version('1.7.0', sha256='998c5e6ea649489302de2c0bc026ed34284f531df89d2bdc8df3a0d44d165739')
version('1.6.3', sha256='a75b014d3294fce26852a9d04ea27b5671d86736beb34acdfc05859246260707')
version('1.6.2', sha256='e9da33e21c9bc1b92c20b5328adb13e5f193b924c9b969cd700c8908f315aa59')
version('1.6.1', sha256='c4fceb864890b6168e79b0e714c585dbe2fd4222768ee90bc1aa0f8218691b11')
version('1.6.0', sha256='cb6dc9f82dfd95f6b9032a8d7ea70efeeb15d5b5fd6ed4e8537bb3c673580566')
version('1.5.4', sha256='4a453d5e5689de62e5d38edf40af3f17560bfd63c9c5bd228c18c1f99afa155b')
version('1.5.3', sha256='ddae76784574cc4c172f3d5edd7308be16078dd3b977e8746860c76c195fa707')
version('1.5.2', sha256='066c513d90eb3fd7567a9e150828d39111ebd88d3e924cdfc9f8ce19ab6f90c9')
version('1.5.1', sha256='039572f0ca9578a466683558c5bf1e65d442860ec6e13307d528749cfe6d07b8')
version('1.5.0', sha256='4ff72877d19b295ee7f7727615ea8238f2d59159df0bdd98f91754be4a2767f0')
version('1.4.1', sha256='dee1bbf3a6c8f73b6b218cb28eed8dd13347ea2f87d572ce19b289d6fd3fbc59')
version('1.4.0', sha256='31f7cfa93b01507c935c12b535e24812594002a02a56803d7cd063e9920d25e8')
version('1.3.3', sha256='64bf4e8ae0db2d42b58477817f648d81e77f0b381d0ea4427385bba3f959380a')
version('1.3.2', sha256='a03939b431994289f39373c57bbe452974a7da724ae7f9620a1beee575434da4')
version('1.3.1', sha256='2643cfb46d97b7797d1dbdb6f3c23fe3402904e3c90e6facfe6a9b98d808c1b5')
version('1.3.0', sha256='c3bb4bd2aca82fb498247deeac12265921fe231502a6bc6edea3ee7fe6c40a7a')
version('1.2.3', sha256='ecbe6413ca90b8e19f8475bfa303ac001e81b04ec600d17fa7f816271f7cca57')
version('1.2.2', sha256='a4331e0b8dab1ff75d2c67b5158a8bb9a83c799d7140094dda936d876c7cfbb1')
version('1.2.1', sha256='e085d1babcb419bbe58e2e805ac61924dac4ca45a07c9fa081144739e500aa3c')
version('1.1.0', sha256='878352408424dffaa695ffedf2f9f92844e116686923ed9aa8626fc30d32cfd1')
version('1.0.0', sha256='87ea1f11a0e9ec08c264dc64551d501fa307289460705f6fccd84cbfc7926d10')
version('0.19.1', sha256='a19a2ca7a7336495ec180adeaa0dfdcf41e96dbbee90d51c3ed828ba570884e6')
version('0.18.1', sha256='8ab6e9c808bf2fb3e8576cd8cf07226d9cdc18b012c06d9708429a821ac6634e')
version('0.17.0', sha256='f600b755fb69437d0f70361f9e560ab4d304b1b66987ed5a28bdd9dd7793e089')
version('0.15.1', sha256='a212cbc3b79e9a563aa45fc5c517b3499198bd7eb7e7be1e047568a5f48c259a')
version('0.15.0', sha256='0c74e31e08acc8bf9b6ceb9bced73df2ae0cc76003e0366350bc7b26292bf8b1')
depends_on('python@2.6:2.8,3.2:', when='@:0.17', type=('build', 'link', 'run'))
depends_on('python@2.7:2.8,3.4:', when='@0.18:1.2', type=('build', 'link', 'run'))
depends_on('python@3.5:', when='@1.3:1.4', type=('build', 'link', 'run'))
depends_on('python@3.6:', when='@1.5.0:1.5', type=('build', 'link', 'run'))
depends_on('python@3.7:', when='@1.6:1.6.1', type=('build', 'link', 'run'))
depends_on('python@3.7:3.9', when='@1.6.2:1.7.1', type=('build', 'link', 'run'))
depends_on('python@3.7:3.10', when='@1.7.2:', type=('build', 'link', 'run'))
depends_on('py-setuptools', when='@:1.5', type='build')
depends_on('py-setuptools@:51.0.0', when='@1.6', type='build')
depends_on('py-setuptools@:57', when='@1.7:', type='build')
depends_on('py-pybind11@2.2.4:', when='@1.4.0', type=('build', 'link'))
depends_on('py-pybind11@2.4.0:', when='@1.4.1:1.4', type=('build', 'link'))
depends_on('py-pybind11@2.4.3:', when='@1.5:1.6.1', type=('build', 'link'))
depends_on('py-pybind11@2.4.3:2.6', when='@1.6.2:1.7.1', type=('build', 'link'))
depends_on('py-pybind11@2.4.3:2.7', when='@1.7.2:', type=('build', 'link'))
depends_on('py-numpy@1.5.1:+blas+lapack', when='@:0.15', type=('build', 'link', 'run'))
depends_on('py-numpy@1.6.2:+blas+lapack', when='@0.16:0.17', type=('build', 'link', 'run'))
depends_on('py-numpy@1.7.1:+blas+lapack', when='@0.18.0:0.18', type=('build', 'link', 'run'))
depends_on('py-numpy@1.8.2:+blas+lapack', when='@0.19:1.2', type=('build', 'link', 'run'))
depends_on('py-numpy@1.13.3:+blas+lapack', when='@1.3:1.4', type=('build', 'link', 'run'))
depends_on('py-numpy@1.14.5:+blas+lapack', when='@1.5.0:1.5', type=('build', 'link', 'run'))
depends_on('py-numpy@1.16.5:+blas+lapack', when='@1.6:1.6.1', type=('build', 'link', 'run'))
depends_on('py-numpy@1.16.5:1.22+blas+lapack', when='@1.6.2:', type=('build', 'link', 'run'))
depends_on('py-cython@0.29.18:2', when='@1.7:', type='build')
depends_on('py-pythran@0.9.11', when='@1.7.0:1.7.1', type=('build', 'link'))
depends_on('py-pythran@0.9.12:0.9', when='@1.7.2:', type=('build', 'link'))
depends_on('py-pytest', type='test')
# NOTE: scipy picks up Blas/Lapack from numpy, see
# http://www.scipy.org/scipylib/building/linux.html#step-4-build-numpy-1-5-0
depends_on('blas')
depends_on('lapack')
# https://github.com/scipy/scipy/issues/12860
patch('https://git.sagemath.org/sage.git/plain/build/pkgs/scipy/patches/extern_decls.patch?id=711fe05025795e44b84233e065d240859ccae5bd',
sha256='5433f60831cb554101520a8f8871ac5a32c95f7a971ccd68b69049535b106780', when='@1.2:1.5.3')
patch('scipy-clang.patch', when='@1.5.0:1.6.3 %clang')
def setup_build_environment(self, env):
# https://github.com/scipy/scipy/issues/9080
env.set('F90', spack_fc)
# https://github.com/scipy/scipy/issues/11611
if self.spec.satisfies('@:1.4 %gcc@10:'):
env.set('FFLAGS', '-fallow-argument-mismatch')
# Kluge to get the gfortran linker to work correctly on Big
# Sur, at least until a gcc release > 10.2 is out with a fix.
# (There is a fix in their development tree.)
if platform.mac_ver()[0][0:2] == '11':
env.set('MACOSX_DEPLOYMENT_TARGET', '10.15')
def build_args(self, spec, prefix):
args = []
if spec.satisfies('%fj'):
args.extend(['config_fc', '--fcompiler=fujitsu'])
# Build in parallel
# Known problems with Python 3.5+
# https://github.com/spack/spack/issues/7927
# https://github.com/scipy/scipy/issues/7112
if not spec.satisfies('^python@3.5:'):
args.extend(['-j', str(make_jobs)])
return args
@run_after('install')
@on_package_attributes(run_tests=True)
def install_test(self):
with working_dir('spack-test', create=True):
python('-c', 'import scipy; scipy.test("full", verbose=2)')
|
LLNL/spack
|
var/spack/repos/builtin/packages/py-scipy/package.py
|
Python
|
lgpl-2.1
| 7,478
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RMagrittr(RPackage):
"""Provides a mechanism for chaining commands with a new forward-pipe
operator, %>%. This operator will forward a value, or the result of an
expression, into the next function call/expression. There is flexible
support for the type of right-hand side expressions. For more information,
see package vignette."""
homepage = "https://cran.r-project.org/web/packages/magrittr/index.html"
url = "https://cran.r-project.org/src/contrib/magrittr_1.5.tar.gz"
version('1.5', 'e74ab7329f2b9833f0c3c1216f86d65a')
|
TheTimmy/spack
|
var/spack/repos/builtin/packages/r-magrittr/package.py
|
Python
|
lgpl-2.1
| 1,824
|
# -*- coding: utf-8 -*-
#
# Detective.io documentation build configuration file, created by
# sphinx-quickstart on Wed Jul 2 12:38:10 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import sphinx_bootstrap_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
sys.path.insert(0, os.path.abspath('../app'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.httpdomain'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Detective.io'
copyright = u'2014, Journalism++ SAS • Software under LGPLv3'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.12'
# The full version, including alpha/beta/rc tags.
release = '1.12.13 Gorilla'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# At the top.
#
# Activate the theme.
html_theme = 'bootstrap'
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
# Bootswatch (http://bootswatch.com/) theme.
#
# Options are nothing with "" (default) or the name of a valid theme
# such as "amelia" or "cosmo".
'bootswatch_theme': "lumen",
# Render the next and previous page links in navbar. (Default: true)
'navbar_sidebarrel': False,
# Render the current pages TOC in the navbar. (Default: true)
'navbar_pagenav': False,
# A list of tuples containing pages or urls to link to.
# Valid tuples should be in the following forms:
# (name, page) # a link to a page
# (name, "/aa/bb", 1) # a link to an arbitrary relative url
# (name, "http://example.com", True) # arbitrary absolute url
# Note the "1" or "True" value above as the third argument to indicate
# an arbitrary url.
'navbar_links': [
("Source", "http://github.com/jplusplus/detective.io", True),
],
# Location of link to source.
# Options are "nav" (default), "footer" or anything else to exclude.
'source_link_position': False,
# Global TOC depth for "site" navbar tab. (Default: 1)
# Switching to -1 shows all levels.
'globaltoc_depth': 2,
# Choose Bootstrap version.
# Values: "3" (default) or "2" (in quotes)
'bootstrap_version': "3",
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Detectiveiodoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Detectiveio.tex', u'Detective.io Documentation',
u'Journalism++ SAS', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'detectiveio', u'Detective.io Documentation',
[u'Journalism++ SAS'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Detectiveio', u'Detective.io Documentation',
u'Journalism++ SAS', 'Detectiveio', 'Detective.io is a tool for collaborative network analysis. It lets you make powerful queries to mine your data. Simply describe your field of study and detective.io builds the input interface as well as a state-of-the-art front-end.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
carlvlewis/detective.io
|
docs/conf.py
|
Python
|
lgpl-3.0
| 9,968
|
"""Test Home Assistant pressure utility functions."""
import pytest
from homeassistant.const import (
PRESSURE_CBAR,
PRESSURE_HPA,
PRESSURE_INHG,
PRESSURE_KPA,
PRESSURE_MBAR,
PRESSURE_PA,
PRESSURE_PSI,
)
import homeassistant.util.pressure as pressure_util
INVALID_SYMBOL = "bob"
VALID_SYMBOL = PRESSURE_PA
def test_convert_same_unit():
"""Test conversion from any unit to same unit."""
assert pressure_util.convert(2, PRESSURE_PA, PRESSURE_PA) == 2
assert pressure_util.convert(3, PRESSURE_HPA, PRESSURE_HPA) == 3
assert pressure_util.convert(4, PRESSURE_MBAR, PRESSURE_MBAR) == 4
assert pressure_util.convert(5, PRESSURE_INHG, PRESSURE_INHG) == 5
assert pressure_util.convert(6, PRESSURE_KPA, PRESSURE_KPA) == 6
assert pressure_util.convert(7, PRESSURE_CBAR, PRESSURE_CBAR) == 7
def test_convert_invalid_unit():
"""Test exception is thrown for invalid units."""
with pytest.raises(ValueError):
pressure_util.convert(5, INVALID_SYMBOL, VALID_SYMBOL)
with pytest.raises(ValueError):
pressure_util.convert(5, VALID_SYMBOL, INVALID_SYMBOL)
def test_convert_nonnumeric_value():
"""Test exception is thrown for nonnumeric type."""
with pytest.raises(TypeError):
pressure_util.convert("a", PRESSURE_HPA, PRESSURE_INHG)
def test_convert_from_hpascals():
"""Test conversion from hPA to other units."""
hpascals = 1000
assert pressure_util.convert(hpascals, PRESSURE_HPA, PRESSURE_PSI) == pytest.approx(
14.5037743897
)
assert pressure_util.convert(
hpascals, PRESSURE_HPA, PRESSURE_INHG
) == pytest.approx(29.5299801647)
assert pressure_util.convert(hpascals, PRESSURE_HPA, PRESSURE_PA) == pytest.approx(
100000
)
assert pressure_util.convert(hpascals, PRESSURE_HPA, PRESSURE_KPA) == pytest.approx(
100
)
assert pressure_util.convert(
hpascals, PRESSURE_HPA, PRESSURE_MBAR
) == pytest.approx(1000)
assert pressure_util.convert(
hpascals, PRESSURE_HPA, PRESSURE_CBAR
) == pytest.approx(100)
def test_convert_from_kpascals():
"""Test conversion from hPA to other units."""
kpascals = 100
assert pressure_util.convert(kpascals, PRESSURE_KPA, PRESSURE_PSI) == pytest.approx(
14.5037743897
)
assert pressure_util.convert(
kpascals, PRESSURE_KPA, PRESSURE_INHG
) == pytest.approx(29.5299801647)
assert pressure_util.convert(kpascals, PRESSURE_KPA, PRESSURE_PA) == pytest.approx(
100000
)
assert pressure_util.convert(kpascals, PRESSURE_KPA, PRESSURE_HPA) == pytest.approx(
1000
)
assert pressure_util.convert(
kpascals, PRESSURE_KPA, PRESSURE_MBAR
) == pytest.approx(1000)
assert pressure_util.convert(
kpascals, PRESSURE_KPA, PRESSURE_CBAR
) == pytest.approx(100)
def test_convert_from_inhg():
"""Test conversion from inHg to other units."""
inhg = 30
assert pressure_util.convert(inhg, PRESSURE_INHG, PRESSURE_PSI) == pytest.approx(
14.7346266155
)
assert pressure_util.convert(inhg, PRESSURE_INHG, PRESSURE_KPA) == pytest.approx(
101.59167
)
assert pressure_util.convert(inhg, PRESSURE_INHG, PRESSURE_HPA) == pytest.approx(
1015.9167
)
assert pressure_util.convert(inhg, PRESSURE_INHG, PRESSURE_PA) == pytest.approx(
101591.67
)
assert pressure_util.convert(inhg, PRESSURE_INHG, PRESSURE_MBAR) == pytest.approx(
1015.9167
)
assert pressure_util.convert(inhg, PRESSURE_INHG, PRESSURE_CBAR) == pytest.approx(
101.59167
)
|
jawilson/home-assistant
|
tests/util/test_pressure.py
|
Python
|
apache-2.0
| 3,635
|
# coding: utf-8
from __future__ import unicode_literals
from contextlib import contextmanager
from cryptography.hazmat.backends import default_backend
from datetime import datetime, timedelta
import json
from mock import Mock, mock_open, patch, sentinel
import pytest
import random
import string
from boxsdk.auth.jwt_auth import JWTAuth
from boxsdk.config import API
from boxsdk.object.user import User
from boxsdk.util.compat import total_seconds
@pytest.fixture(params=[16, 32, 128])
def jti_length(request):
return request.param
@pytest.fixture(params=('RS256', 'RS512'))
def jwt_algorithm(request):
return request.param
@pytest.fixture(params=(None, b'strong_password'))
def rsa_passphrase(request):
return request.param
@pytest.fixture(scope='module')
def successful_token_response(successful_token_mock, successful_token_json_response):
# pylint:disable=redefined-outer-name
response = successful_token_json_response.copy()
del response['refresh_token']
successful_token_mock.json = Mock(return_value=response)
successful_token_mock.ok = True
successful_token_mock.content = json.dumps(response)
successful_token_mock.status_code = 200
return successful_token_mock
@contextmanager
def jwt_auth_init_mocks(
mock_network_layer,
successful_token_response,
jwt_algorithm,
rsa_passphrase,
enterprise_token=None,
):
# pylint:disable=redefined-outer-name
fake_client_id = 'fake_client_id'
fake_client_secret = 'fake_client_secret'
assertion = Mock()
data = {
'grant_type': JWTAuth._GRANT_TYPE, # pylint:disable=protected-access
'client_id': fake_client_id,
'client_secret': fake_client_secret,
'assertion': assertion,
'box_device_id': '0',
'box_device_name': 'my_awesome_device',
}
mock_network_layer.request.return_value = successful_token_response
key_file = mock_open()
with patch('boxsdk.auth.jwt_auth.open', key_file, create=True) as jwt_auth_open:
with patch('cryptography.hazmat.primitives.serialization.load_pem_private_key') as load_pem_private_key:
oauth = JWTAuth(
client_id=fake_client_id,
client_secret=fake_client_secret,
enterprise_id=enterprise_token,
rsa_private_key_file_sys_path=sentinel.rsa_path,
rsa_private_key_passphrase=rsa_passphrase,
network_layer=mock_network_layer,
box_device_name='my_awesome_device',
jwt_algorithm=jwt_algorithm,
)
jwt_auth_open.assert_called_once_with(sentinel.rsa_path)
key_file.return_value.read.assert_called_once_with()
load_pem_private_key.assert_called_once_with(
key_file.return_value.read.return_value,
password=rsa_passphrase,
backend=default_backend(),
)
yield oauth, assertion, fake_client_id, load_pem_private_key.return_value
mock_network_layer.request.assert_called_once_with(
'POST',
'{0}/token'.format(API.OAUTH2_API_URL),
data=data,
headers={'content-type': 'application/x-www-form-urlencoded'},
access_token=None,
)
assert oauth.access_token == successful_token_response.json()['access_token']
@contextmanager
def jwt_auth_auth_mocks(jti_length, jwt_algorithm, sub, sub_type, oauth, assertion, client_id, secret):
# pylint:disable=redefined-outer-name
with patch('jwt.encode') as jwt_encode:
with patch('boxsdk.auth.jwt_auth.datetime') as mock_datetime:
with patch('boxsdk.auth.jwt_auth.random.SystemRandom') as mock_system_random:
jwt_encode.return_value = assertion
mock_datetime.utcnow.return_value = datetime(2015, 7, 6, 12, 1, 2)
mock_datetime.return_value = datetime(1970, 1, 1)
now_plus_30 = mock_datetime.utcnow.return_value + timedelta(seconds=30)
exp = int(total_seconds(now_plus_30 - datetime(1970, 1, 1)))
system_random = mock_system_random.return_value
system_random.randint.return_value = jti_length
random_choices = [random.random() for _ in range(jti_length)]
system_random.random.side_effect = random_choices
ascii_alphabet = string.ascii_letters + string.digits
ascii_len = len(ascii_alphabet)
jti = ''.join(ascii_alphabet[int(r * ascii_len)] for r in random_choices)
yield oauth
system_random.randint.assert_called_once_with(16, 128)
assert len(system_random.random.mock_calls) == jti_length
jwt_encode.assert_called_once_with({
'iss': client_id,
'sub': sub,
'box_sub_type': sub_type,
'aud': 'https://api.box.com/oauth2/token',
'jti': jti,
'exp': exp,
}, secret, algorithm=jwt_algorithm)
def test_authenticate_app_user_sends_post_request_with_correct_params(
mock_network_layer,
successful_token_response,
jti_length,
jwt_algorithm,
rsa_passphrase,
):
# pylint:disable=redefined-outer-name
fake_user_id = 'fake_user_id'
with jwt_auth_init_mocks(mock_network_layer, successful_token_response, jwt_algorithm, rsa_passphrase) as params:
with jwt_auth_auth_mocks(jti_length, jwt_algorithm, fake_user_id, 'user', *params) as oauth:
oauth.authenticate_app_user(User(None, fake_user_id))
def test_authenticate_instance_sends_post_request_with_correct_params(
mock_network_layer,
successful_token_response,
jti_length,
jwt_algorithm,
rsa_passphrase,
):
# pylint:disable=redefined-outer-name
enterprise_token = 'fake_enterprise_token'
with jwt_auth_init_mocks(
mock_network_layer,
successful_token_response,
jwt_algorithm,
rsa_passphrase,
enterprise_token,
) as params:
with jwt_auth_auth_mocks(jti_length, jwt_algorithm, enterprise_token, 'enterprise', *params) as oauth:
oauth.authenticate_instance()
def test_refresh_app_user_sends_post_request_with_correct_params(
mock_network_layer,
successful_token_response,
jti_length,
jwt_algorithm,
rsa_passphrase,
):
# pylint:disable=redefined-outer-name
fake_user_id = 'fake_user_id'
with jwt_auth_init_mocks(mock_network_layer, successful_token_response, jwt_algorithm, rsa_passphrase) as params:
with jwt_auth_auth_mocks(jti_length, jwt_algorithm, fake_user_id, 'user', *params) as oauth:
oauth._user_id = fake_user_id # pylint:disable=protected-access
oauth.refresh(None)
def test_refresh_instance_sends_post_request_with_correct_params(
mock_network_layer,
successful_token_response,
jti_length,
jwt_algorithm,
rsa_passphrase,
):
# pylint:disable=redefined-outer-name
enterprise_token = 'fake_enterprise_token'
with jwt_auth_init_mocks(
mock_network_layer,
successful_token_response,
jwt_algorithm,
rsa_passphrase,
enterprise_token,
) as params:
with jwt_auth_auth_mocks(jti_length, jwt_algorithm, enterprise_token, 'enterprise', *params) as oauth:
oauth.refresh(None)
|
sanketdjain/box-python-sdk
|
test/unit/auth/test_jwt_auth.py
|
Python
|
apache-2.0
| 7,519
|
#!/usr/bin/env python
import os
import sys
import argparse
import pprint
from collections import defaultdict, OrderedDict
from collections import namedtuple
from enum import Enum
try:
from elftools.elf.elffile import ELFFile
from elftools.common.py3compat import itervalues
from elftools.dwarf.descriptions import (describe_DWARF_expr, set_global_machine_arch, describe_CFI_instructions)
from elftools.dwarf.descriptions import describe_attr_value, describe_reg_name
from elftools.dwarf.locationlists import LocationEntry
from elftools.common.py3compat import maxint, bytes2str, byte2int, int2byte
from elftools.dwarf.callframe import instruction_name, CIE, FDE, ZERO
except ImportError:
print "Install pyelf tools"
import CFG_pb2
DWARF_OPERATIONS = defaultdict(lambda: (lambda *args: None))
SYMBOL_BLACKLIST = defaultdict(lambda: (lambda *args: None))
BINARY_FILE = ""
Type = namedtuple('Type', ['name', 'size', 'type_offset', 'tag'])
GLOBAL_VARIABLES = OrderedDict()
TYPES_MAP = OrderedDict()
EH_FRAMES = OrderedDict()
BASE_TYPES = [
'DW_TAG_base_type',
'DW_TAG_structure_type',
'DW_TAG_union_type',
'DW_TAG_enumeration_type',
]
INDIRECT_TYPES = [
'DW_TAG_typedef',
'DW_TAG_const_type',
'DW_TAG_volatile_type',
'DW_TAG_restrict_type',
'DW_TAG_subroutine_type',
]
POINTER_TYPES = {
'DW_TAG_pointer_type' : '*',
}
ARRAY_TYPES = {
'DW_TAG_array_type',
}
TYPE_ENUM = {
'DW_TAG_unknown_type': 0,
'DW_TAG_base_type': 1,
'DW_TAG_structure_type' : 2,
'DW_TAG_union_type': 3,
'DW_TAG_pointer_type': 4,
'DW_TAG_array_type': 5,
}
_DEBUG = False
_DEBUG_FILE = None
_DEBUG_PREFIX = ""
def DEBUG_INIT(file, flag):
global _DEBUG
global _DEBUG_FILE
_DEBUG = flag
_DEBUG_FILE = file
def DEBUG_PUSH():
global _DEBUG_PREFIX
_DEBUG_PREFIX += " "
def DEBUG_POP():
global _DEBUG_PREFIX
_DEBUG_PREFIX = _DEBUG_PREFIX[:-2]
def DEBUG(s):
if _DEBUG:
_DEBUG_FILE.write("{}{}\n".format(_DEBUG_PREFIX, str(s)))
'''
DIE attributes utilities
'''
def get_name(die):
if 'DW_AT_name' in die.attributes:
return die.attributes['DW_AT_name'].value
else:
return 'UNKNOWN'
def get_size(die):
if 'DW_AT_byte_size' in die.attributes:
return die.attributes['DW_AT_byte_size'].value
else:
return -1
def get_location(die):
if 'DW_AT_location' in die.attributes:
return die.attributes['DW_AT_location'].value
else:
return None
def get_types(die):
if 'DW_AT_type' in die.attributes:
offset = die.attributes['DW_AT_type'].value + die.cu.cu_offset
if offset in TYPES_MAP:
return (TYPES_MAP[offset], TYPES_MAP[offset].size, TYPES_MAP[offset].type_offset)
return (Type(None, None, None, None), -1, -1)
def _create_variable_entry(name, offset):
return dict(name=name, offset=offset, type=Type(None, None, None, None), size=0, addr=0, is_global=False)
def process_types(dwarf, typemap):
def process_direct_types(die):
if die.tag in BASE_TYPES:
name = get_name(die)
size = get_size(die)
if die.offset not in typemap :
typemap[die.offset] = Type(name=name, size=size, type_offset=die.offset, tag=TYPE_ENUM.get(die.tag))
DEBUG("<{0:x}> {1}".format(die.offset, typemap.get(die.offset)))
def process_pointer_types(die):
if die.tag in POINTER_TYPES:
if 'DW_AT_type' in die.attributes:
offset = die.attributes['DW_AT_type'].value + die.cu.cu_offset
indirect = POINTER_TYPES[die.tag]
name = (typemap[offset].name if offset in typemap else 'UNKNOWN') + indirect
type_offset = typemap[offset].type_offset if offset in typemap else -1
else:
name = 'void*'
type_offset = 0
if die.offset not in typemap:
typemap[die.offset] = Type(name=name, size=die.cu['address_size'], type_offset=type_offset, tag=TYPE_ENUM.get(die.tag))
DEBUG("<{0:x}> {1}".format(die.offset, typemap.get(die.offset)))
def process_indirect_types(die):
if die.tag in INDIRECT_TYPES:
if 'DW_AT_type' in die.attributes:
offset = die.attributes['DW_AT_type'].value + die.cu.cu_offset
if offset in typemap:
size = typemap[offset].size
name = typemap[offset].name
type_offset = typemap[offset].type_offset
tag = typemap[offset].tag if offset in typemap else 0
if die.offset not in typemap:
typemap[die.offset] = Type(name=name, size=size, type_offset=type_offset, tag=tag)
else:
tag = 0
type_offset = 0
name = get_name(die)
if die.offset not in typemap:
typemap[die.offset] = Type(name=name, size=-1, type_offset=type_offset, tag=tag)
DEBUG("<{0:x}> {1}".format(die.offset, typemap.get(die.offset)))
def process_array_types(die):
if die.tag in ARRAY_TYPES:
if 'DW_AT_type' in die.attributes:
offset = die.attributes['DW_AT_type'].value + die.cu.cu_offset
if offset in typemap:
name = typemap[offset].name if offset in typemap else 'UNKNOWN'
type_offset = typemap[offset].type_offset if offset in typemap else 0
size = typemap[offset].size if offset in typemap else 0
# get sub range to get the array size
for child_die in die.iter_children():
if child_die.tag == 'DW_TAG_subrange_type':
if 'DW_AT_upper_bound' in child_die.attributes:
index = child_die.attributes['DW_AT_upper_bound'].value
if type(index) is int:
index = index +1
size = size*index
break
typemap[die.offset] = Type(name=name, size=size, type_offset=type_offset, tag=TYPE_ENUM.get(die.tag))
DEBUG("<{0:x}> {1}".format(die.offset, typemap.get(die.offset)))
build_typemap(dwarf, process_direct_types)
build_typemap(dwarf, process_indirect_types)
build_typemap(dwarf, process_pointer_types)
build_typemap(dwarf, process_array_types)
build_typemap(dwarf, process_indirect_types)
build_typemap(dwarf, process_array_types)
def _process_dies(die, fn):
fn(die)
for child in die.iter_children():
_process_dies(child, fn)
def build_typemap(dwarf, fn):
for CU in dwarf.iter_CUs():
top = CU.get_top_DIE()
_process_dies(top, fn)
def _process_frames_info(dwarf, cfi_entries, eh_frames):
for entry in cfi_entries:
if isinstance(entry, CIE):
pass
elif isinstance(entry, FDE):
pc = entry['initial_location']
if pc not in eh_frames:
eh_frames[pc] = entry
else:
continue
def process_frames(dwarf, eh_frames):
if dwarf.has_EH_CFI():
_process_frames_info(dwarf, dwarf.EH_CFI_entries(), eh_frames)
def _create_global_var_entry(memory_ref, var_name):
return dict(addrs=set(), size=-1, name=var_name, type=None, safe=True)
VARIABLE_STAT = {"type1": 0, "type2": 0}
def address_lookup(g_ref, global_var_array):
for value, gvar in GLOBAL_VARIABLES.iteritems():
if ((gvar['type'].tag == 1) or (gvar['type'].tag == 4)):
if gvar['addr'] == g_ref.address:
address = gvar['addr']
size = gvar['size']
if address not in global_var_array:
global_var_array[address] = _create_global_var_entry(address, g_ref.var.name)
global_var_array[address]['size'] = size
global_var_array[address]['type'] = g_ref.var.ida_type
for ref in g_ref.var.ref_eas:
global_var_array[address]['addrs'].add((ref.inst_addr, ref.offset))
VARIABLE_STAT["type1"] = VARIABLE_STAT["type1"] + 1
return None
elif (gvar['type'].tag == 5) or (gvar['type'].tag == 2):
base_address = gvar['addr']
size = gvar['size']
name = "recovered_global_{:0x}".format(base_address)
if g_ref.address in xrange(base_address, base_address + size):
if base_address not in global_var_array:
global_var_array[base_address] = _create_global_var_entry(base_address, name)
global_var_array[base_address]['size'] = size
global_var_array[base_address]['type'] = g_ref.var.ida_type
offset = g_ref.address - base_address
for ref in g_ref.var.ref_eas:
global_var_array[base_address]['addrs'].add((ref.inst_addr, offset))
VARIABLE_STAT["type2"] = VARIABLE_STAT["type2"] + 1
return None
return None
def _print_die(die, section_offset):
DEBUG("Processing DIE: {}".format(str(die)))
for attr in itervalues(die.attributes):
if attr.name == 'DW_AT_name' :
variable_name = attr.value
name = attr.name
if isinstance(name, int):
name = 'Unknown AT value: %x' % name
DEBUG(' <%x> %-18s: %s' % (attr.offset, name, describe_attr_value(attr, die, section_offset)))
def _process_variable_tag(die, section_offset, M, global_var_data):
if die.tag != 'DW_TAG_variable':
return
name = get_name(die)
if 'DW_AT_location' in die.attributes:
attr = die.attributes['DW_AT_location']
if attr.form not in ('DW_FORM_data4', 'DW_FORM_data8', 'DW_FORM_sec_offset'):
loc_expr = "{}".format(describe_DWARF_expr(attr.value, die.cu.structs)).split(':')
if loc_expr[0][1:] == 'DW_OP_addr':
memory_ref = int(loc_expr[1][:-1][1:], 16)
if memory_ref not in global_var_data:
global_var_data[memory_ref] = _create_variable_entry(name, die.offset)
global_var_data[memory_ref]['is_global'] = True
global_var_data[memory_ref]['addr'] = memory_ref
(type, size, offset) = get_types(die)
global_var_data[memory_ref]['type'] = type
global_var_data[memory_ref]['size'] = size
DEBUG("{}".format(pprint.pformat(global_var_data[memory_ref]))) # DEBUG_ENABLE
def _full_reg_name(regnum):
regname = describe_reg_name(regnum, None, False)
if regname:
return 'r%s (%s)' % (regnum, regname)
else:
return 'r%s' % regnum
"""
Process subprogram tag and recover the local variables
"""
def _process_subprogram_tag(die, section_offset, M, global_var_data):
if die.tag != 'DW_TAG_subprogram':
return
F = M.funcs.add()
F.ea = 0
F.name = get_name(die)
F.is_entrypoint = 0
has_frame = False
frame_regname = ""
if 'DW_AT_frame_base' in die.attributes:
frame_attr = die.attributes['DW_AT_frame_base']
has_frame = True
loc_expr = "{}".format(describe_DWARF_expr(frame_attr.value, die.cu.structs)).split(' ')
if loc_expr[0][1:][:-1] == "DW_OP_call_frame_cfa":
lowpc_attr = die.attributes['DW_AT_low_pc']
#DEBUG("loc_expr {0} {1:x}".format(loc_expr, lowpc_attr.value))
frame = EH_FRAMES[lowpc_attr.value] if lowpc_attr.value in EH_FRAMES else None
if frame:
DEBUG("{0:x}, {1}".format(frame['initial_location'], frame))
for instr in frame.instructions:
name = instruction_name(instr.opcode)
if name == 'DW_CFA_def_cfa_register':
frame_regname = describe_reg_name(instr.args[0], None, False)
for child in die.iter_children():
if child.tag != 'DW_TAG_variable':
continue
stackvar = F.stack_vars.add()
stackvar.name = get_name(child)
stackvar.sp_offset = 0
stackvar.has_frame = has_frame
stackvar.reg_name = frame_regname
(type, size, offset) = get_types(child)
stackvar.size = size if size > 0 else 0
if 'DW_AT_location' in child.attributes:
attr = child.attributes['DW_AT_location']
if attr.form not in ('DW_FORM_data4', 'DW_FORM_data8', 'DW_FORM_sec_offset'):
loc_expr = "{}".format(describe_DWARF_expr(attr.value, child.cu.structs)).split(' ')
if loc_expr[0][1:][:-1] == 'DW_OP_fbreg':
offset = int(loc_expr[1][:-1])
stackvar.sp_offset = offset
DWARF_OPERATIONS = {
#'DW_TAG_compile_unit': _process_compile_unit_tag,
'DW_TAG_variable' : _process_variable_tag,
'DW_TAG_subprogram' : _process_subprogram_tag,
}
class CUnit(object):
def __init__(self, die, cu_len, cu_offset, global_offset = 0):
self._die = die
self._length = cu_len
self._offset = cu_offset
self._section_offset = global_offset
self._global_variable = dict()
def _process_child(self, child_die, M, global_var_data):
for child in child_die.iter_children():
func_ = DWARF_OPERATIONS.get(child.tag)
if func_:
func_(child, self._section_offset, M, global_var_data)
continue
self._process_child(child, M, global_var_data)
def decode_control_unit(self, M, global_var_data):
for child in self._die.iter_children():
func_ = DWARF_OPERATIONS.get(child.tag)
if func_:
func_(child, self._section_offset, M, global_var_data)
continue
self._process_child(child, M, global_var_data)
def process_dwarf_info(in_file, out_file):
'''
Main function processing the dwarf informations from debug sections
'''
DEBUG('Processing file: {0}'.format(in_file))
with open(in_file, 'rb') as f:
f_elf = ELFFile(f)
if not f_elf.has_dwarf_info():
DEBUG("{0} has no debug informations!".format(file))
return False
M = CFG_pb2.Module()
M.name = "GlobalVariable".format('utf-8')
set_global_machine_arch(f_elf.get_machine_arch())
dwarf_info = f_elf.get_dwarf_info()
process_types(dwarf_info, TYPES_MAP)
process_frames(dwarf_info, EH_FRAMES)
section_offset = dwarf_info.debug_info_sec.global_offset
# Iterate through all the compile units
for CU in dwarf_info.iter_CUs():
DEBUG('Found a compile unit at offset {0}, length {1}'.format(CU.cu_offset, CU['unit_length']))
top_DIE = CU.get_top_DIE()
c_unit = CUnit(top_DIE, CU['unit_length'], CU.cu_offset, section_offset)
c_unit.decode_control_unit(M, GLOBAL_VARIABLES)
for key, value in GLOBAL_VARIABLES.iteritems():
if value["size"] > 0:
gvar = M.global_vars.add()
gvar.name = value["name"]
gvar.ea = value["addr"]
gvar.size = value["size"]
else:
DEBUG("Look for {}".format(pprint.pformat(value)))
#for func in M.funcs:
# DEBUG("Function name {}".format(func.name))
# for sv in func.stackvars:
# DEBUG_PUSH()
# DEBUG("{} : {}, ".format(sv.name, sv.sp_offset))
# DEBUG_POP()
with open(out_file, "w") as outf:
outf.write(M.SerializeToString())
DEBUG("Global Vars\n")
DEBUG('Number of Global Vars: {0}'.format(len(GLOBAL_VARIABLES)))
DEBUG("{}".format(pprint.pformat(GLOBAL_VARIABLES)))
DEBUG("End Global Vars\n")
def is_global_variable_reference(global_var, address):
for key in sorted(global_var.iterkeys()):
entry = global_var[key]
start = key
end = start + entry['size']
if (start <= address) and (end > address):
return True
return False
def add_global_variable_entry(M, ds):
for g in M.global_vars:
start = g.address
end = start + g.var.size
if (ds.base_address >= start) and (ds.base_address < end):
symbol = g.symbols.add()
symbol.base_address = ds.base_address
symbol.symbol_name = ds.symbol_name
symbol.symbol_size = ds.symbol_size
def updateCFG(in_file, out_file):
global_var_array = dict()
M = CFG_pb2.Module()
with open(in_file, 'rb') as inf:
M.ParseFromString(inf.read())
GV = list(M.global_vars)
DEBUG('Number of Global Variables recovered from dwarf: {0}'.format(len(GLOBAL_VARIABLES)))
for g in GV:
gvar = address_lookup(g, global_var_array)
if gvar is None:
DEBUG("Global Vars {} {}".format(str(g.var.name), hex(g.address)))
M.global_vars.remove(g)
for key in sorted(global_var_array.iterkeys()):
entry = global_var_array[key]
var = M.global_vars.add()
var.address = key
var.var.name = entry['name']
var.var.size = entry['size']
var.var.ida_type = entry['type']
for i in entry["addrs"]:
r = var.var.ref_eas.add()
r.inst_addr = i[0]
r.offset = i[1]
for data in M.internal_data:
for ds in data.symbols:
symbol = ds.symbol_name.split("_")
if (symbol[0] == 'data') and (is_global_variable_reference(global_var_array, long(symbol[1], 16)) is True):
ds.symbol_name = "recovered_global_{0:x}".format(long(symbol[1], 16))
add_global_variable_entry(M, ds)
with open(out_file, "w") as outf:
outf.write(M.SerializeToString())
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--log_file", type=argparse.FileType('w'),
default=sys.stderr,
help='Name of the log file. Default is stderr.')
parser.add_argument('--out',
help='Name of the output proto buffer file.',
required=True)
parser.add_argument('--binary',
help='Name of the binary image.',
required=True)
args = parser.parse_args(sys.argv[1:])
if args.log_file:
DEBUG_INIT(args.log_file, True)
DEBUG("Debugging is enabled.")
BINARY_FILE = args.binary
process_dwarf_info(args.binary, args.out)
|
trailofbits/mcsema
|
tools/mcsema_disass/ida7/var_recovery.py
|
Python
|
apache-2.0
| 17,326
|
# Copyright(c) 2007-2008 by Lorenzo Gil Sanchez <lorenzo.gil.sanchez@gmail.com>
# 2009 by Yaco S.L. <lgs@yaco.es>
#
# This file is part of PyCha.
#
# PyCha is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyCha is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with PyCha. If not, see <http://www.gnu.org/licenses/>.
import unittest
import pycha.color
class SimpleColorScheme(pycha.color.ColorScheme):
pass
class ColorTests(unittest.TestCase):
def test_clamp(self):
self.assertEqual(pycha.color.clamp(0, 1, 2), 1)
self.assertEqual(pycha.color.clamp(0, 1, -1), 0)
self.assertEqual(pycha.color.clamp(0, 1, 0.5), 0.5)
self.assertEqual(pycha.color.clamp(0, 1, 1), 1)
self.assertEqual(pycha.color.clamp(0, 1, 0), 0)
def test_hex2rgb(self):
color = pycha.color.hex2rgb('#ff0000')
self.assert_(isinstance(color, tuple))
self.assertAlmostEqual(1, color[0])
self.assertAlmostEqual(0, color[1])
self.assertAlmostEqual(0, color[2])
color2 = pycha.color.hex2rgb(color)
self.assertEqual(color, color2)
color = pycha.color.hex2rgb('#000fff000', digits=3)
self.assert_(isinstance(color, tuple))
self.assertEqual(0, color[0])
self.assertEqual(1, color[1])
self.assertEqual(0, color[2])
color = pycha.color.hex2rgb('#00000000ffff', digits=4)
self.assert_(isinstance(color, tuple))
self.assertEqual(0, color[0])
self.assertEqual(0, color[1])
self.assertEqual(1, color[2])
def test_rgb2hsv_and_hsv2rgb(self):
for rgb, hsv in (((1.0, 0.0, 0.0), (0.0, 1.0, 1.0)),
((1.0, 0.5, 0.0), (30.0, 1.0, 1.0)),
((1.0, 1.0, 0.0), (60.0, 1.0, 1.0)),
((0.5, 1.0, 0.0), (90.0, 1.0, 1.0)),
((0.0, 1.0, 0.0), (120.0, 1.0, 1.0)),
((0.0, 1.0, 0.5), (150.0, 1.0, 1.0)),
((0.0, 1.0, 1.0), (180.0, 1.0, 1.0)),
((0.0, 0.5, 1.0), (210.0, 1.0, 1.0)),
((0.0, 0.0, 1.0), (240.0, 1.0, 1.0)),
((0.5, 0.0, 1.0), (270.0, 1.0, 1.0)),
((1.0, 0.0, 1.0), (300.0, 1.0, 1.0)),
((1.0, 0.0, 0.5), (330.0, 1.0, 1.0)),
((0.375, 0.5, 0.25), (90.0, 0.5, 0.5)),
((0.21875, 0.25, 0.1875), (90.0, 0.25, 0.25))):
self._assertColors(pycha.color.rgb2hsv(*rgb), hsv, 5)
self._assertColors(pycha.color.hsv2rgb(*hsv), rgb, 5)
def test_lighten(self):
r, g, b = (1.0, 1.0, 0.0)
r2, g2, b2 = pycha.color.lighten(r, g, b, 0.1)
self.assertEqual((r2, g2, b2), (1.0, 1.0, 0.1))
r3, g3, b3 = pycha.color.lighten(r2, g2, b2, 0.5)
self.assertEqual((r3, g3, b3), (1.0, 1.0, 0.6))
def _assertColors(self, c1, c2, precission):
for i in range(3):
self.assertAlmostEqual(c1[i], c2[i], precission)
def test_basicColors(self):
colors = ('red', 'green', 'blue', 'grey', 'black', 'darkcyan')
for color in colors:
self.assert_(color in pycha.color.basicColors)
def test_ColorSchemeRegistry(self):
self.assertEquals(SimpleColorScheme,
pycha.color.ColorScheme.getColorScheme('simple'))
self.assertEquals(None,
pycha.color.ColorScheme.getColorScheme('foo'))
def test_FixedColorScheme(self):
keys = range(3)
colors = ((1.0, 0.0, 0.0), (0.0, 1.0, 0.0), (0.0, 0.0, 1.0))
scheme = pycha.color.FixedColorScheme(keys, colors)
self._assertColors(scheme[0], (1.0, 0.0, 0.0), 1)
self._assertColors(scheme[1], (0.0, 1.0, 0.0), 3)
self._assertColors(scheme[2], (0.0, 0.0, 1.0), 3)
def test_GradientColorScheme(self):
keys = range(5)
scheme = pycha.color.GradientColorScheme(keys, "#000000")
self._assertColors(scheme[0], (0.0, 0.0, 0.0), 3)
self._assertColors(scheme[1], (0.1, 0.1, 0.1), 3)
self._assertColors(scheme[2], (0.2, 0.2, 0.2), 3)
self._assertColors(scheme[3], (0.3, 0.3, 0.3), 3)
self._assertColors(scheme[4], (0.4, 0.4, 0.4), 3)
def test_autoLighting(self):
"""This test ensures that the colors don't get to white too fast.
See bug #8.
"""
# we have a lot of keys
n = 50
keys = range(n)
color = '#ff0000'
scheme = pycha.color.GradientColorScheme(keys, color)
# ensure that the last color is not completely white
color = scheme[n - 1]
# the red component was already 1
self.assertAlmostEqual(color[0], 1.0, 4)
self.assertNotAlmostEqual(color[1], 1.0, 4)
self.assertNotAlmostEqual(color[2], 1.0, 4)
def test_RainbowColorScheme(self):
keys = range(5)
scheme = pycha.color.GradientColorScheme(keys, "#ff0000")
self._assertColors(scheme[0], (1.0, 0.0, 0.0), 3)
self._assertColors(scheme[1], (1.0, 0.1, 0.1), 3)
self._assertColors(scheme[2], (1.0, 0.2, 0.2), 3)
self._assertColors(scheme[3], (1.0, 0.3, 0.3), 3)
self._assertColors(scheme[4], (1.0, 0.4, 0.4), 3)
def test_suite():
return unittest.TestSuite((
unittest.makeSuite(ColorTests),
))
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
|
jmouriz/sanaviron
|
sanaviron/src/3rd/pycha/tests/color.py
|
Python
|
apache-2.0
| 5,920
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""File logging handler for tasks."""
import logging
import os
from datetime import datetime
from pathlib import Path
from typing import TYPE_CHECKING, Optional, Tuple
from itsdangerous import TimedJSONWebSignatureSerializer
from airflow.configuration import AirflowConfigException, conf
from airflow.utils.context import Context
from airflow.utils.helpers import parse_template_string, render_template_to_string
from airflow.utils.log.non_caching_file_handler import NonCachingFileHandler
if TYPE_CHECKING:
from airflow.models import TaskInstance
class FileTaskHandler(logging.Handler):
"""
FileTaskHandler is a python log handler that handles and reads
task instance logs. It creates and delegates log handling
to `logging.FileHandler` after receiving task instance context.
It reads logs from task instance's host machine.
:param base_log_folder: Base log folder to place logs.
:param filename_template: template filename string
"""
def __init__(self, base_log_folder: str, filename_template: str):
super().__init__()
self.handler: Optional[logging.FileHandler] = None
self.local_base = base_log_folder
self.filename_template, self.filename_jinja_template = parse_template_string(filename_template)
def set_context(self, ti: "TaskInstance"):
"""
Provide task_instance context to airflow task handler.
:param ti: task instance object
"""
local_loc = self._init_file(ti)
self.handler = NonCachingFileHandler(local_loc, encoding='utf-8')
if self.formatter:
self.handler.setFormatter(self.formatter)
self.handler.setLevel(self.level)
def emit(self, record):
if self.handler:
self.handler.emit(record)
def flush(self):
if self.handler:
self.handler.flush()
def close(self):
if self.handler:
self.handler.close()
def _render_filename(self, ti: "TaskInstance", try_number: int) -> str:
if self.filename_jinja_template:
if hasattr(ti, "task"):
context = ti.get_template_context()
else:
context = Context(ti=ti, ts=ti.get_dagrun().logical_date.isoformat())
context["try_number"] = try_number
return render_template_to_string(self.filename_jinja_template, context)
elif self.filename_template:
dag_run = ti.get_dagrun()
dag = ti.task.dag
assert dag is not None # For Mypy.
try:
data_interval: Tuple[datetime, datetime] = dag.get_run_data_interval(dag_run)
except AttributeError: # ti.task is not always set.
data_interval = (dag_run.data_interval_start, dag_run.data_interval_end)
if data_interval[0]:
data_interval_start = data_interval[0].isoformat()
else:
data_interval_start = ""
if data_interval[1]:
data_interval_end = data_interval[1].isoformat()
else:
data_interval_end = ""
return self.filename_template.format(
dag_id=ti.dag_id,
task_id=ti.task_id,
run_id=ti.run_id,
data_interval_start=data_interval_start,
data_interval_end=data_interval_end,
execution_date=ti.get_dagrun().logical_date.isoformat(),
try_number=try_number,
)
else:
raise RuntimeError(f"Unable to render log filename for {ti}. This should never happen")
def _read_grouped_logs(self):
return False
def _read(self, ti, try_number, metadata=None):
"""
Template method that contains custom logic of reading
logs given the try_number.
:param ti: task instance record
:param try_number: current try_number to read log from
:param metadata: log metadata,
can be used for steaming log reading and auto-tailing.
:return: log message as a string and metadata.
"""
# Task instance here might be different from task instance when
# initializing the handler. Thus explicitly getting log location
# is needed to get correct log path.
log_relative_path = self._render_filename(ti, try_number)
location = os.path.join(self.local_base, log_relative_path)
log = ""
if os.path.exists(location):
try:
with open(location, encoding="utf-8", errors="surrogateescape") as file:
log += f"*** Reading local file: {location}\n"
log += "".join(file.readlines())
except Exception as e:
log = f"*** Failed to load local log file: {location}\n"
log += f"*** {str(e)}\n"
elif conf.get('core', 'executor') == 'KubernetesExecutor':
try:
from airflow.kubernetes.kube_client import get_kube_client
kube_client = get_kube_client()
if len(ti.hostname) >= 63:
# Kubernetes takes the pod name and truncates it for the hostname. This truncated hostname
# is returned for the fqdn to comply with the 63 character limit imposed by DNS standards
# on any label of a FQDN.
pod_list = kube_client.list_namespaced_pod(conf.get('kubernetes', 'namespace'))
matches = [
pod.metadata.name
for pod in pod_list.items
if pod.metadata.name.startswith(ti.hostname)
]
if len(matches) == 1:
if len(matches[0]) > len(ti.hostname):
ti.hostname = matches[0]
log += f'*** Trying to get logs (last 100 lines) from worker pod {ti.hostname} ***\n\n'
res = kube_client.read_namespaced_pod_log(
name=ti.hostname,
namespace=conf.get('kubernetes', 'namespace'),
container='base',
follow=False,
tail_lines=100,
_preload_content=False,
)
for line in res:
log += line.decode()
except Exception as f:
log += f'*** Unable to fetch logs from worker pod {ti.hostname} ***\n{str(f)}\n\n'
else:
import httpx
url = os.path.join("http://{ti.hostname}:{worker_log_server_port}/log", log_relative_path).format(
ti=ti, worker_log_server_port=conf.get('logging', 'WORKER_LOG_SERVER_PORT')
)
log += f"*** Log file does not exist: {location}\n"
log += f"*** Fetching from: {url}\n"
try:
timeout = None # No timeout
try:
timeout = conf.getint('webserver', 'log_fetch_timeout_sec')
except (AirflowConfigException, ValueError):
pass
signer = TimedJSONWebSignatureSerializer(
secret_key=conf.get('webserver', 'secret_key'),
algorithm_name='HS512',
expires_in=conf.getint('webserver', 'log_request_clock_grace', fallback=30),
# This isn't really a "salt", more of a signing context
salt='task-instance-logs',
)
response = httpx.get(
url, timeout=timeout, headers={'Authorization': signer.dumps(log_relative_path)}
)
response.encoding = "utf-8"
if response.status_code == 403:
log += (
"*** !!!! Please make sure that all your Airflow components (e.g. "
"schedulers, webservers and workers) have "
"the same 'secret_key' configured in 'webserver' section and "
"time is synchronized on all your machines (for example with ntpd) !!!!!\n***"
)
log += (
"*** See more at https://airflow.apache.org/docs/apache-airflow/"
"stable/configurations-ref.html#secret-key\n***"
)
# Check if the resource was properly fetched
response.raise_for_status()
log += '\n' + response.text
except Exception as e:
log += f"*** Failed to fetch log file from worker. {str(e)}\n"
return log, {'end_of_log': True}
def read(self, task_instance, try_number=None, metadata=None):
"""
Read logs of given task instance from local machine.
:param task_instance: task instance object
:param try_number: task instance try_number to read logs from. If None
it returns all logs separated by try_number
:param metadata: log metadata,
can be used for steaming log reading and auto-tailing.
:return: a list of listed tuples which order log string by host
"""
# Task instance increments its try number when it starts to run.
# So the log for a particular task try will only show up when
# try number gets incremented in DB, i.e logs produced the time
# after cli run and before try_number + 1 in DB will not be displayed.
if try_number is None:
next_try = task_instance.next_try_number
try_numbers = list(range(1, next_try))
elif try_number < 1:
logs = [
[('default_host', f'Error fetching the logs. Try number {try_number} is invalid.')],
]
return logs, [{'end_of_log': True}]
else:
try_numbers = [try_number]
logs = [''] * len(try_numbers)
metadata_array = [{}] * len(try_numbers)
for i, try_number_element in enumerate(try_numbers):
log, metadata = self._read(task_instance, try_number_element, metadata)
# es_task_handler return logs grouped by host. wrap other handler returning log string
# with default/ empty host so that UI can render the response in the same way
logs[i] = log if self._read_grouped_logs() else [(task_instance.hostname, log)]
metadata_array[i] = metadata
return logs, metadata_array
def _init_file(self, ti):
"""
Create log directory and give it correct permissions.
:param ti: task instance object
:return: relative log path of the given task instance
"""
# To handle log writing when tasks are impersonated, the log files need to
# be writable by the user that runs the Airflow command and the user
# that is impersonated. This is mainly to handle corner cases with the
# SubDagOperator. When the SubDagOperator is run, all of the operators
# run under the impersonated user and create appropriate log files
# as the impersonated user. However, if the user manually runs tasks
# of the SubDagOperator through the UI, then the log files are created
# by the user that runs the Airflow command. For example, the Airflow
# run command may be run by the `airflow_sudoable` user, but the Airflow
# tasks may be run by the `airflow` user. If the log files are not
# writable by both users, then it's possible that re-running a task
# via the UI (or vice versa) results in a permission error as the task
# tries to write to a log file created by the other user.
relative_path = self._render_filename(ti, ti.try_number)
full_path = os.path.join(self.local_base, relative_path)
directory = os.path.dirname(full_path)
# Create the log file and give it group writable permissions
# TODO(aoen): Make log dirs and logs globally readable for now since the SubDag
# operator is not compatible with impersonation (e.g. if a Celery executor is used
# for a SubDag operator and the SubDag operator has a different owner than the
# parent DAG)
Path(directory).mkdir(mode=0o777, parents=True, exist_ok=True)
if not os.path.exists(full_path):
open(full_path, "a").close()
# TODO: Investigate using 444 instead of 666.
try:
os.chmod(full_path, 0o666)
except OSError:
logging.warning("OSError while change ownership of the log file")
return full_path
|
bolkedebruin/airflow
|
airflow/utils/log/file_task_handler.py
|
Python
|
apache-2.0
| 13,517
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
from unittest import mock
from airflow.providers.amazon.aws.hooks.athena import AWSAthenaHook
MOCK_DATA = {
'query': 'SELECT * FROM TEST_TABLE',
'database': 'TEST_DATABASE',
'outputLocation': 's3://test_s3_bucket/',
'client_request_token': 'eac427d0-1c6d-4dfb-96aa-2835d3ac6595',
'workgroup': 'primary',
'query_execution_id': 'eac427d0-1c6d-4dfb-96aa-2835d3ac6595',
'next_token_id': 'eac427d0-1c6d-4dfb-96aa-2835d3ac6595',
'max_items': 1000,
}
mock_query_context = {'Database': MOCK_DATA['database']}
mock_result_configuration = {'OutputLocation': MOCK_DATA['outputLocation']}
MOCK_RUNNING_QUERY_EXECUTION = {'QueryExecution': {'Status': {'State': 'RUNNING'}}}
MOCK_SUCCEEDED_QUERY_EXECUTION = {'QueryExecution': {'Status': {'State': 'SUCCEEDED'}}}
MOCK_QUERY_EXECUTION = {'QueryExecutionId': MOCK_DATA['query_execution_id']}
class TestAWSAthenaHook(unittest.TestCase):
def setUp(self):
self.athena = AWSAthenaHook(sleep_time=0)
def test_init(self):
self.assertEqual(self.athena.aws_conn_id, 'aws_default')
self.assertEqual(self.athena.sleep_time, 0)
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_run_query_without_token(self, mock_conn):
mock_conn.return_value.start_query_execution.return_value = MOCK_QUERY_EXECUTION
result = self.athena.run_query(
query=MOCK_DATA['query'],
query_context=mock_query_context,
result_configuration=mock_result_configuration,
)
expected_call_params = {
'QueryString': MOCK_DATA['query'],
'QueryExecutionContext': mock_query_context,
'ResultConfiguration': mock_result_configuration,
'WorkGroup': MOCK_DATA['workgroup'],
}
mock_conn.return_value.start_query_execution.assert_called_with(**expected_call_params)
self.assertEqual(result, MOCK_DATA['query_execution_id'])
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_run_query_with_token(self, mock_conn):
mock_conn.return_value.start_query_execution.return_value = MOCK_QUERY_EXECUTION
result = self.athena.run_query(
query=MOCK_DATA['query'],
query_context=mock_query_context,
result_configuration=mock_result_configuration,
client_request_token=MOCK_DATA['client_request_token'],
)
expected_call_params = {
'QueryString': MOCK_DATA['query'],
'QueryExecutionContext': mock_query_context,
'ResultConfiguration': mock_result_configuration,
'ClientRequestToken': MOCK_DATA['client_request_token'],
'WorkGroup': MOCK_DATA['workgroup'],
}
mock_conn.return_value.start_query_execution.assert_called_with(**expected_call_params)
self.assertEqual(result, MOCK_DATA['query_execution_id'])
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_get_query_results_with_non_succeeded_query(self, mock_conn):
mock_conn.return_value.get_query_execution.return_value = MOCK_RUNNING_QUERY_EXECUTION
result = self.athena.get_query_results(query_execution_id=MOCK_DATA['query_execution_id'])
self.assertIsNone(result)
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_get_query_results_with_default_params(self, mock_conn):
mock_conn.return_value.get_query_execution.return_value = MOCK_SUCCEEDED_QUERY_EXECUTION
self.athena.get_query_results(query_execution_id=MOCK_DATA['query_execution_id'])
expected_call_params = {'QueryExecutionId': MOCK_DATA['query_execution_id'], 'MaxResults': 1000}
mock_conn.return_value.get_query_results.assert_called_with(**expected_call_params)
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_get_query_results_with_next_token(self, mock_conn):
mock_conn.return_value.get_query_execution.return_value = MOCK_SUCCEEDED_QUERY_EXECUTION
self.athena.get_query_results(
query_execution_id=MOCK_DATA['query_execution_id'], next_token_id=MOCK_DATA['next_token_id']
)
expected_call_params = {
'QueryExecutionId': MOCK_DATA['query_execution_id'],
'NextToken': MOCK_DATA['next_token_id'],
'MaxResults': 1000,
}
mock_conn.return_value.get_query_results.assert_called_with(**expected_call_params)
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_get_paginator_with_non_succeeded_query(self, mock_conn):
mock_conn.return_value.get_query_execution.return_value = MOCK_RUNNING_QUERY_EXECUTION
result = self.athena.get_query_results_paginator(query_execution_id=MOCK_DATA['query_execution_id'])
self.assertIsNone(result)
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_get_paginator_with_default_params(self, mock_conn):
mock_conn.return_value.get_query_execution.return_value = MOCK_SUCCEEDED_QUERY_EXECUTION
self.athena.get_query_results_paginator(query_execution_id=MOCK_DATA['query_execution_id'])
expected_call_params = {
'QueryExecutionId': MOCK_DATA['query_execution_id'],
'PaginationConfig': {'MaxItems': None, 'PageSize': None, 'StartingToken': None},
}
mock_conn.return_value.get_paginator.return_value.paginate.assert_called_with(**expected_call_params)
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_get_paginator_with_pagination_config(self, mock_conn):
mock_conn.return_value.get_query_execution.return_value = MOCK_SUCCEEDED_QUERY_EXECUTION
self.athena.get_query_results_paginator(
query_execution_id=MOCK_DATA['query_execution_id'],
max_items=MOCK_DATA['max_items'],
page_size=MOCK_DATA['max_items'],
starting_token=MOCK_DATA['next_token_id'],
)
expected_call_params = {
'QueryExecutionId': MOCK_DATA['query_execution_id'],
'PaginationConfig': {
'MaxItems': MOCK_DATA['max_items'],
'PageSize': MOCK_DATA['max_items'],
'StartingToken': MOCK_DATA['next_token_id'],
},
}
mock_conn.return_value.get_paginator.return_value.paginate.assert_called_with(**expected_call_params)
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_poll_query_when_final(self, mock_conn):
mock_conn.return_value.get_query_execution.return_value = MOCK_SUCCEEDED_QUERY_EXECUTION
result = self.athena.poll_query_status(query_execution_id=MOCK_DATA['query_execution_id'])
mock_conn.return_value.get_query_execution.assert_called_once()
self.assertEqual(result, 'SUCCEEDED')
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_poll_query_with_timeout(self, mock_conn):
mock_conn.return_value.get_query_execution.return_value = MOCK_RUNNING_QUERY_EXECUTION
result = self.athena.poll_query_status(
query_execution_id=MOCK_DATA['query_execution_id'], max_tries=1
)
mock_conn.return_value.get_query_execution.assert_called_once()
self.assertEqual(result, 'RUNNING')
if __name__ == '__main__':
unittest.main()
|
airbnb/airflow
|
tests/providers/amazon/aws/hooks/test_athena.py
|
Python
|
apache-2.0
| 8,056
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
'''Predicting the labels for new images using the pre-trained alexnet model'''
from __future__ import print_function
from builtins import range
try:
import pickle
except ImportError:
import cPickle as pickle
import numpy as np
from singa import device
from singa import tensor
import alexnet
def predict(net, images, dev, topk=5):
'''Predict the label of each image.
Args:
net, a pretrained neural net
images, a batch of images [batch_size, 3, 32, 32], which have been
pre-processed
dev, the training device
topk, return the topk labels for each image.
'''
x = tensor.from_numpy(images.astype(np.float32))
x.to_device(dev)
y = net.predict(x)
y.to_host()
prob = tensor.to_numpy(y)
# prob = np.average(prob, 0)
labels = np.flipud(np.argsort(prob)) # sort prob in descending order
return labels[:, 0:topk]
def load_dataset(filepath):
print('Loading data file %s' % filepath)
with open(filepath, 'rb') as fd:
cifar10 = pickle.load(fd, encoding='latin1')
image = cifar10['data'].astype(dtype=np.uint8)
image = image.reshape((-1, 3, 32, 32))
label = np.asarray(cifar10['labels'], dtype=np.uint8)
label = label.reshape(label.size, 1)
return image, label
def load_train_data(dir_path, num_batches=5):
labels = []
batchsize = 10000
images = np.empty((num_batches * batchsize, 3, 32, 32), dtype=np.uint8)
for did in range(1, num_batches + 1):
fname_train_data = dir_path + "/data_batch_{}".format(did)
image, label = load_dataset(fname_train_data)
images[(did - 1) * batchsize:did * batchsize] = image
labels.extend(label)
images = np.array(images, dtype=np.float32)
labels = np.array(labels, dtype=np.int32)
return images, labels
def load_test_data(dir_path):
images, labels = load_dataset(dir_path + "/test_batch")
return np.array(images, dtype=np.float32), np.array(labels, dtype=np.int32)
def compute_image_mean(train_dir):
images, _ = load_train_data(train_dir)
return np.average(images, 0)
if __name__ == '__main__':
model = alexnet.create_net(True)
model.load('model', 20) # the checkpoint from train.py
dev = device.get_default_device()
model.to_device(dev)
mean = compute_image_mean('cifar-10-batches-py')
test_images, _ = load_test_data('cifar-10-batches-py')
# predict for two images
print(predict(model, test_images[0:2] - mean, dev))
|
kaiping/incubator-singa
|
examples/cifar10/predict.py
|
Python
|
apache-2.0
| 3,356
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import uuid
import mock
from oslo_config import cfg
from oslo_config import fixture as config_fixture
from oslotest import mockpatch
from pycadf import cadftaxonomy
from pycadf import cadftype
from pycadf import eventfactory
from pycadf import resource as cadfresource
from keystone import notifications
from keystone.tests import unit
from keystone.tests.unit import test_v3
CONF = cfg.CONF
EXP_RESOURCE_TYPE = uuid.uuid4().hex
CREATED_OPERATION = notifications.ACTIONS.created
UPDATED_OPERATION = notifications.ACTIONS.updated
DELETED_OPERATION = notifications.ACTIONS.deleted
DISABLED_OPERATION = notifications.ACTIONS.disabled
class ArbitraryException(Exception):
pass
def register_callback(operation, resource_type=EXP_RESOURCE_TYPE):
"""Helper for creating and registering a mock callback.
"""
callback = mock.Mock(__name__='callback',
im_class=mock.Mock(__name__='class'))
notifications.register_event_callback(operation, resource_type, callback)
return callback
class AuditNotificationsTestCase(unit.BaseTestCase):
def setUp(self):
super(AuditNotificationsTestCase, self).setUp()
self.config_fixture = self.useFixture(config_fixture.Config(CONF))
self.addCleanup(notifications.clear_subscribers)
def _test_notification_operation(self, notify_function, operation):
exp_resource_id = uuid.uuid4().hex
callback = register_callback(operation)
notify_function(EXP_RESOURCE_TYPE, exp_resource_id)
callback.assert_called_once_with('identity', EXP_RESOURCE_TYPE,
operation,
{'resource_info': exp_resource_id})
self.config_fixture.config(notification_format='cadf')
with mock.patch(
'keystone.notifications._create_cadf_payload') as cadf_notify:
notify_function(EXP_RESOURCE_TYPE, exp_resource_id)
initiator = None
cadf_notify.assert_called_once_with(
operation, EXP_RESOURCE_TYPE, exp_resource_id,
notifications.taxonomy.OUTCOME_SUCCESS, initiator)
notify_function(EXP_RESOURCE_TYPE, exp_resource_id, public=False)
cadf_notify.assert_called_once_with(
operation, EXP_RESOURCE_TYPE, exp_resource_id,
notifications.taxonomy.OUTCOME_SUCCESS, initiator)
def test_resource_created_notification(self):
self._test_notification_operation(notifications.Audit.created,
CREATED_OPERATION)
def test_resource_updated_notification(self):
self._test_notification_operation(notifications.Audit.updated,
UPDATED_OPERATION)
def test_resource_deleted_notification(self):
self._test_notification_operation(notifications.Audit.deleted,
DELETED_OPERATION)
def test_resource_disabled_notification(self):
self._test_notification_operation(notifications.Audit.disabled,
DISABLED_OPERATION)
class NotificationsWrapperTestCase(unit.BaseTestCase):
def create_fake_ref(self):
resource_id = uuid.uuid4().hex
return resource_id, {
'id': resource_id,
'key': uuid.uuid4().hex
}
@notifications.created(EXP_RESOURCE_TYPE)
def create_resource(self, resource_id, data):
return data
def test_resource_created_notification(self):
exp_resource_id, data = self.create_fake_ref()
callback = register_callback(CREATED_OPERATION)
self.create_resource(exp_resource_id, data)
callback.assert_called_with('identity', EXP_RESOURCE_TYPE,
CREATED_OPERATION,
{'resource_info': exp_resource_id})
@notifications.updated(EXP_RESOURCE_TYPE)
def update_resource(self, resource_id, data):
return data
def test_resource_updated_notification(self):
exp_resource_id, data = self.create_fake_ref()
callback = register_callback(UPDATED_OPERATION)
self.update_resource(exp_resource_id, data)
callback.assert_called_with('identity', EXP_RESOURCE_TYPE,
UPDATED_OPERATION,
{'resource_info': exp_resource_id})
@notifications.deleted(EXP_RESOURCE_TYPE)
def delete_resource(self, resource_id):
pass
def test_resource_deleted_notification(self):
exp_resource_id = uuid.uuid4().hex
callback = register_callback(DELETED_OPERATION)
self.delete_resource(exp_resource_id)
callback.assert_called_with('identity', EXP_RESOURCE_TYPE,
DELETED_OPERATION,
{'resource_info': exp_resource_id})
@notifications.created(EXP_RESOURCE_TYPE)
def create_exception(self, resource_id):
raise ArbitraryException()
def test_create_exception_without_notification(self):
callback = register_callback(CREATED_OPERATION)
self.assertRaises(
ArbitraryException, self.create_exception, uuid.uuid4().hex)
self.assertFalse(callback.called)
@notifications.created(EXP_RESOURCE_TYPE)
def update_exception(self, resource_id):
raise ArbitraryException()
def test_update_exception_without_notification(self):
callback = register_callback(UPDATED_OPERATION)
self.assertRaises(
ArbitraryException, self.update_exception, uuid.uuid4().hex)
self.assertFalse(callback.called)
@notifications.deleted(EXP_RESOURCE_TYPE)
def delete_exception(self, resource_id):
raise ArbitraryException()
def test_delete_exception_without_notification(self):
callback = register_callback(DELETED_OPERATION)
self.assertRaises(
ArbitraryException, self.delete_exception, uuid.uuid4().hex)
self.assertFalse(callback.called)
class NotificationsTestCase(unit.BaseTestCase):
def test_send_notification(self):
"""Test the private method _send_notification to ensure event_type,
payload, and context are built and passed properly.
"""
resource = uuid.uuid4().hex
resource_type = EXP_RESOURCE_TYPE
operation = CREATED_OPERATION
# NOTE(ldbragst): Even though notifications._send_notification doesn't
# contain logic that creates cases, this is supposed to test that
# context is always empty and that we ensure the resource ID of the
# resource in the notification is contained in the payload. It was
# agreed that context should be empty in Keystone's case, which is
# also noted in the /keystone/notifications.py module. This test
# ensures and maintains these conditions.
expected_args = [
{}, # empty context
'identity.%s.created' % resource_type, # event_type
{'resource_info': resource}, # payload
'INFO', # priority is always INFO...
]
with mock.patch.object(notifications._get_notifier(),
'_notify') as mocked:
notifications._send_notification(operation, resource_type,
resource)
mocked.assert_called_once_with(*expected_args)
class BaseNotificationTest(test_v3.RestfulTestCase):
def setUp(self):
super(BaseNotificationTest, self).setUp()
self._notifications = []
self._audits = []
def fake_notify(operation, resource_type, resource_id,
public=True):
note = {
'resource_id': resource_id,
'operation': operation,
'resource_type': resource_type,
'send_notification_called': True,
'public': public}
self._notifications.append(note)
self.useFixture(mockpatch.PatchObject(
notifications, '_send_notification', fake_notify))
def fake_audit(action, initiator, outcome, target,
event_type, **kwargs):
service_security = cadftaxonomy.SERVICE_SECURITY
event = eventfactory.EventFactory().new_event(
eventType=cadftype.EVENTTYPE_ACTIVITY,
outcome=outcome,
action=action,
initiator=initiator,
target=target,
observer=cadfresource.Resource(typeURI=service_security))
for key, value in kwargs.items():
setattr(event, key, value)
audit = {
'payload': event.as_dict(),
'event_type': event_type,
'send_notification_called': True}
self._audits.append(audit)
self.useFixture(mockpatch.PatchObject(
notifications, '_send_audit_notification', fake_audit))
def _assert_last_note(self, resource_id, operation, resource_type):
# NOTE(stevemar): If 'basic' format is not used, then simply
# return since this assertion is not valid.
if CONF.notification_format != 'basic':
return
self.assertTrue(len(self._notifications) > 0)
note = self._notifications[-1]
self.assertEqual(note['operation'], operation)
self.assertEqual(note['resource_id'], resource_id)
self.assertEqual(note['resource_type'], resource_type)
self.assertTrue(note['send_notification_called'])
def _assert_last_audit(self, resource_id, operation, resource_type,
target_uri):
# NOTE(stevemar): If 'cadf' format is not used, then simply
# return since this assertion is not valid.
if CONF.notification_format != 'cadf':
return
self.assertTrue(len(self._audits) > 0)
audit = self._audits[-1]
payload = audit['payload']
self.assertEqual(resource_id, payload['resource_info'])
action = '%s.%s' % (operation, resource_type)
self.assertEqual(action, payload['action'])
self.assertEqual(target_uri, payload['target']['typeURI'])
self.assertEqual(resource_id, payload['target']['id'])
event_type = '%s.%s.%s' % ('identity', resource_type, operation)
self.assertEqual(event_type, audit['event_type'])
self.assertTrue(audit['send_notification_called'])
def _assert_notify_not_sent(self, resource_id, operation, resource_type,
public=True):
unexpected = {
'resource_id': resource_id,
'operation': operation,
'resource_type': resource_type,
'send_notification_called': True,
'public': public}
for note in self._notifications:
self.assertNotEqual(unexpected, note)
def _assert_notify_sent(self, resource_id, operation, resource_type,
public=True):
expected = {
'resource_id': resource_id,
'operation': operation,
'resource_type': resource_type,
'send_notification_called': True,
'public': public}
for note in self._notifications:
if expected == note:
break
else:
self.fail("Notification not sent.")
class NotificationsForEntities(BaseNotificationTest):
def test_create_group(self):
group_ref = self.new_group_ref(domain_id=self.domain_id)
group_ref = self.identity_api.create_group(group_ref)
self._assert_last_note(group_ref['id'], CREATED_OPERATION, 'group')
self._assert_last_audit(group_ref['id'], CREATED_OPERATION, 'group',
cadftaxonomy.SECURITY_GROUP)
def test_create_project(self):
project_ref = self.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self._assert_last_note(
project_ref['id'], CREATED_OPERATION, 'project')
self._assert_last_audit(project_ref['id'], CREATED_OPERATION,
'project', cadftaxonomy.SECURITY_PROJECT)
def test_create_role(self):
role_ref = self.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
self._assert_last_note(role_ref['id'], CREATED_OPERATION, 'role')
self._assert_last_audit(role_ref['id'], CREATED_OPERATION, 'role',
cadftaxonomy.SECURITY_ROLE)
def test_create_user(self):
user_ref = self.new_user_ref(domain_id=self.domain_id)
user_ref = self.identity_api.create_user(user_ref)
self._assert_last_note(user_ref['id'], CREATED_OPERATION, 'user')
self._assert_last_audit(user_ref['id'], CREATED_OPERATION, 'user',
cadftaxonomy.SECURITY_ACCOUNT_USER)
def test_create_trust(self):
trustor = self.new_user_ref(domain_id=self.domain_id)
trustor = self.identity_api.create_user(trustor)
trustee = self.new_user_ref(domain_id=self.domain_id)
trustee = self.identity_api.create_user(trustee)
role_ref = self.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
trust_ref = self.new_trust_ref(trustor['id'],
trustee['id'])
self.trust_api.create_trust(trust_ref['id'],
trust_ref,
[role_ref])
self._assert_last_note(
trust_ref['id'], CREATED_OPERATION, 'OS-TRUST:trust')
self._assert_last_audit(trust_ref['id'], CREATED_OPERATION,
'OS-TRUST:trust', cadftaxonomy.SECURITY_TRUST)
def test_delete_group(self):
group_ref = self.new_group_ref(domain_id=self.domain_id)
group_ref = self.identity_api.create_group(group_ref)
self.identity_api.delete_group(group_ref['id'])
self._assert_last_note(group_ref['id'], DELETED_OPERATION, 'group')
self._assert_last_audit(group_ref['id'], DELETED_OPERATION, 'group',
cadftaxonomy.SECURITY_GROUP)
def test_delete_project(self):
project_ref = self.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.resource_api.delete_project(project_ref['id'])
self._assert_last_note(
project_ref['id'], DELETED_OPERATION, 'project')
self._assert_last_audit(project_ref['id'], DELETED_OPERATION,
'project', cadftaxonomy.SECURITY_PROJECT)
def test_delete_role(self):
role_ref = self.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
self.role_api.delete_role(role_ref['id'])
self._assert_last_note(role_ref['id'], DELETED_OPERATION, 'role')
self._assert_last_audit(role_ref['id'], DELETED_OPERATION, 'role',
cadftaxonomy.SECURITY_ROLE)
def test_delete_user(self):
user_ref = self.new_user_ref(domain_id=self.domain_id)
user_ref = self.identity_api.create_user(user_ref)
self.identity_api.delete_user(user_ref['id'])
self._assert_last_note(user_ref['id'], DELETED_OPERATION, 'user')
self._assert_last_audit(user_ref['id'], DELETED_OPERATION, 'user',
cadftaxonomy.SECURITY_ACCOUNT_USER)
def test_create_domain(self):
domain_ref = self.new_domain_ref()
self.resource_api.create_domain(domain_ref['id'], domain_ref)
self._assert_last_note(domain_ref['id'], CREATED_OPERATION, 'domain')
self._assert_last_audit(domain_ref['id'], CREATED_OPERATION, 'domain',
cadftaxonomy.SECURITY_DOMAIN)
def test_update_domain(self):
domain_ref = self.new_domain_ref()
self.resource_api.create_domain(domain_ref['id'], domain_ref)
domain_ref['description'] = uuid.uuid4().hex
self.resource_api.update_domain(domain_ref['id'], domain_ref)
self._assert_last_note(domain_ref['id'], UPDATED_OPERATION, 'domain')
self._assert_last_audit(domain_ref['id'], UPDATED_OPERATION, 'domain',
cadftaxonomy.SECURITY_DOMAIN)
def test_delete_domain(self):
domain_ref = self.new_domain_ref()
self.resource_api.create_domain(domain_ref['id'], domain_ref)
domain_ref['enabled'] = False
self.resource_api.update_domain(domain_ref['id'], domain_ref)
self.resource_api.delete_domain(domain_ref['id'])
self._assert_last_note(domain_ref['id'], DELETED_OPERATION, 'domain')
self._assert_last_audit(domain_ref['id'], DELETED_OPERATION, 'domain',
cadftaxonomy.SECURITY_DOMAIN)
def test_delete_trust(self):
trustor = self.new_user_ref(domain_id=self.domain_id)
trustor = self.identity_api.create_user(trustor)
trustee = self.new_user_ref(domain_id=self.domain_id)
trustee = self.identity_api.create_user(trustee)
role_ref = self.new_role_ref()
trust_ref = self.new_trust_ref(trustor['id'], trustee['id'])
self.trust_api.create_trust(trust_ref['id'],
trust_ref,
[role_ref])
self.trust_api.delete_trust(trust_ref['id'])
self._assert_last_note(
trust_ref['id'], DELETED_OPERATION, 'OS-TRUST:trust')
self._assert_last_audit(trust_ref['id'], DELETED_OPERATION,
'OS-TRUST:trust', cadftaxonomy.SECURITY_TRUST)
def test_create_endpoint(self):
endpoint_ref = self.new_endpoint_ref(service_id=self.service_id)
self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
self._assert_notify_sent(endpoint_ref['id'], CREATED_OPERATION,
'endpoint')
self._assert_last_audit(endpoint_ref['id'], CREATED_OPERATION,
'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
def test_update_endpoint(self):
endpoint_ref = self.new_endpoint_ref(service_id=self.service_id)
self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
self.catalog_api.update_endpoint(endpoint_ref['id'], endpoint_ref)
self._assert_notify_sent(endpoint_ref['id'], UPDATED_OPERATION,
'endpoint')
self._assert_last_audit(endpoint_ref['id'], UPDATED_OPERATION,
'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
def test_delete_endpoint(self):
endpoint_ref = self.new_endpoint_ref(service_id=self.service_id)
self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
self.catalog_api.delete_endpoint(endpoint_ref['id'])
self._assert_notify_sent(endpoint_ref['id'], DELETED_OPERATION,
'endpoint')
self._assert_last_audit(endpoint_ref['id'], DELETED_OPERATION,
'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
def test_create_service(self):
service_ref = self.new_service_ref()
self.catalog_api.create_service(service_ref['id'], service_ref)
self._assert_notify_sent(service_ref['id'], CREATED_OPERATION,
'service')
self._assert_last_audit(service_ref['id'], CREATED_OPERATION,
'service', cadftaxonomy.SECURITY_SERVICE)
def test_update_service(self):
service_ref = self.new_service_ref()
self.catalog_api.create_service(service_ref['id'], service_ref)
self.catalog_api.update_service(service_ref['id'], service_ref)
self._assert_notify_sent(service_ref['id'], UPDATED_OPERATION,
'service')
self._assert_last_audit(service_ref['id'], UPDATED_OPERATION,
'service', cadftaxonomy.SECURITY_SERVICE)
def test_delete_service(self):
service_ref = self.new_service_ref()
self.catalog_api.create_service(service_ref['id'], service_ref)
self.catalog_api.delete_service(service_ref['id'])
self._assert_notify_sent(service_ref['id'], DELETED_OPERATION,
'service')
self._assert_last_audit(service_ref['id'], DELETED_OPERATION,
'service', cadftaxonomy.SECURITY_SERVICE)
def test_create_region(self):
region_ref = self.new_region_ref()
self.catalog_api.create_region(region_ref)
self._assert_notify_sent(region_ref['id'], CREATED_OPERATION,
'region')
self._assert_last_audit(region_ref['id'], CREATED_OPERATION,
'region', cadftaxonomy.SECURITY_REGION)
def test_update_region(self):
region_ref = self.new_region_ref()
self.catalog_api.create_region(region_ref)
self.catalog_api.update_region(region_ref['id'], region_ref)
self._assert_notify_sent(region_ref['id'], UPDATED_OPERATION,
'region')
self._assert_last_audit(region_ref['id'], UPDATED_OPERATION,
'region', cadftaxonomy.SECURITY_REGION)
def test_delete_region(self):
region_ref = self.new_region_ref()
self.catalog_api.create_region(region_ref)
self.catalog_api.delete_region(region_ref['id'])
self._assert_notify_sent(region_ref['id'], DELETED_OPERATION,
'region')
self._assert_last_audit(region_ref['id'], DELETED_OPERATION,
'region', cadftaxonomy.SECURITY_REGION)
def test_create_policy(self):
policy_ref = self.new_policy_ref()
self.policy_api.create_policy(policy_ref['id'], policy_ref)
self._assert_notify_sent(policy_ref['id'], CREATED_OPERATION,
'policy')
self._assert_last_audit(policy_ref['id'], CREATED_OPERATION,
'policy', cadftaxonomy.SECURITY_POLICY)
def test_update_policy(self):
policy_ref = self.new_policy_ref()
self.policy_api.create_policy(policy_ref['id'], policy_ref)
self.policy_api.update_policy(policy_ref['id'], policy_ref)
self._assert_notify_sent(policy_ref['id'], UPDATED_OPERATION,
'policy')
self._assert_last_audit(policy_ref['id'], UPDATED_OPERATION,
'policy', cadftaxonomy.SECURITY_POLICY)
def test_delete_policy(self):
policy_ref = self.new_policy_ref()
self.policy_api.create_policy(policy_ref['id'], policy_ref)
self.policy_api.delete_policy(policy_ref['id'])
self._assert_notify_sent(policy_ref['id'], DELETED_OPERATION,
'policy')
self._assert_last_audit(policy_ref['id'], DELETED_OPERATION,
'policy', cadftaxonomy.SECURITY_POLICY)
def test_disable_domain(self):
domain_ref = self.new_domain_ref()
self.resource_api.create_domain(domain_ref['id'], domain_ref)
domain_ref['enabled'] = False
self.resource_api.update_domain(domain_ref['id'], domain_ref)
self._assert_notify_sent(domain_ref['id'], 'disabled', 'domain',
public=False)
def test_disable_of_disabled_domain_does_not_notify(self):
domain_ref = self.new_domain_ref()
domain_ref['enabled'] = False
self.resource_api.create_domain(domain_ref['id'], domain_ref)
# The domain_ref above is not changed during the create process. We
# can use the same ref to perform the update.
self.resource_api.update_domain(domain_ref['id'], domain_ref)
self._assert_notify_not_sent(domain_ref['id'], 'disabled', 'domain',
public=False)
def test_update_group(self):
group_ref = self.new_group_ref(domain_id=self.domain_id)
group_ref = self.identity_api.create_group(group_ref)
self.identity_api.update_group(group_ref['id'], group_ref)
self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group')
self._assert_last_audit(group_ref['id'], UPDATED_OPERATION, 'group',
cadftaxonomy.SECURITY_GROUP)
def test_update_project(self):
project_ref = self.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.resource_api.update_project(project_ref['id'], project_ref)
self._assert_notify_sent(
project_ref['id'], UPDATED_OPERATION, 'project', public=True)
self._assert_last_audit(project_ref['id'], UPDATED_OPERATION,
'project', cadftaxonomy.SECURITY_PROJECT)
def test_disable_project(self):
project_ref = self.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
project_ref['enabled'] = False
self.resource_api.update_project(project_ref['id'], project_ref)
self._assert_notify_sent(project_ref['id'], 'disabled', 'project',
public=False)
def test_disable_of_disabled_project_does_not_notify(self):
project_ref = self.new_project_ref(domain_id=self.domain_id)
project_ref['enabled'] = False
self.resource_api.create_project(project_ref['id'], project_ref)
# The project_ref above is not changed during the create process. We
# can use the same ref to perform the update.
self.resource_api.update_project(project_ref['id'], project_ref)
self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project',
public=False)
def test_update_project_does_not_send_disable(self):
project_ref = self.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
project_ref['enabled'] = True
self.resource_api.update_project(project_ref['id'], project_ref)
self._assert_last_note(
project_ref['id'], UPDATED_OPERATION, 'project')
self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project')
def test_update_role(self):
role_ref = self.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
self.role_api.update_role(role_ref['id'], role_ref)
self._assert_last_note(role_ref['id'], UPDATED_OPERATION, 'role')
self._assert_last_audit(role_ref['id'], UPDATED_OPERATION, 'role',
cadftaxonomy.SECURITY_ROLE)
def test_update_user(self):
user_ref = self.new_user_ref(domain_id=self.domain_id)
user_ref = self.identity_api.create_user(user_ref)
self.identity_api.update_user(user_ref['id'], user_ref)
self._assert_last_note(user_ref['id'], UPDATED_OPERATION, 'user')
self._assert_last_audit(user_ref['id'], UPDATED_OPERATION, 'user',
cadftaxonomy.SECURITY_ACCOUNT_USER)
def test_config_option_no_events(self):
self.config_fixture.config(notification_format='basic')
role_ref = self.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
# The regular notifications will still be emitted, since they are
# used for callback handling.
self._assert_last_note(role_ref['id'], CREATED_OPERATION, 'role')
# No audit event should have occurred
self.assertEqual(0, len(self._audits))
class CADFNotificationsForEntities(NotificationsForEntities):
def setUp(self):
super(CADFNotificationsForEntities, self).setUp()
self.config_fixture.config(notification_format='cadf')
def test_initiator_data_is_set(self):
ref = self.new_domain_ref()
resp = self.post('/domains', body={'domain': ref})
resource_id = resp.result.get('domain').get('id')
self._assert_last_audit(resource_id, CREATED_OPERATION, 'domain',
cadftaxonomy.SECURITY_DOMAIN)
self.assertTrue(len(self._audits) > 0)
audit = self._audits[-1]
payload = audit['payload']
self.assertEqual(self.user_id, payload['initiator']['id'])
self.assertEqual(self.project_id, payload['initiator']['project_id'])
class TestEventCallbacks(test_v3.RestfulTestCase):
def setUp(self):
super(TestEventCallbacks, self).setUp()
self.has_been_called = False
def _project_deleted_callback(self, service, resource_type, operation,
payload):
self.has_been_called = True
def _project_created_callback(self, service, resource_type, operation,
payload):
self.has_been_called = True
def test_notification_received(self):
callback = register_callback(CREATED_OPERATION, 'project')
project_ref = self.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.assertTrue(callback.called)
def test_notification_method_not_callable(self):
fake_method = None
self.assertRaises(TypeError,
notifications.register_event_callback,
UPDATED_OPERATION,
'project',
[fake_method])
def test_notification_event_not_valid(self):
self.assertRaises(ValueError,
notifications.register_event_callback,
uuid.uuid4().hex,
'project',
self._project_deleted_callback)
def test_event_registration_for_unknown_resource_type(self):
# Registration for unknown resource types should succeed. If no event
# is issued for that resource type, the callback wont be triggered.
notifications.register_event_callback(DELETED_OPERATION,
uuid.uuid4().hex,
self._project_deleted_callback)
resource_type = uuid.uuid4().hex
notifications.register_event_callback(DELETED_OPERATION,
resource_type,
self._project_deleted_callback)
def test_provider_event_callback_subscription(self):
callback_called = []
@notifications.listener
class Foo(object):
def __init__(self):
self.event_callbacks = {
CREATED_OPERATION: {'project': self.foo_callback}}
def foo_callback(self, service, resource_type, operation,
payload):
# uses callback_called from the closure
callback_called.append(True)
Foo()
project_ref = self.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.assertEqual([True], callback_called)
def test_provider_event_callbacks_subscription(self):
callback_called = []
@notifications.listener
class Foo(object):
def __init__(self):
self.event_callbacks = {
CREATED_OPERATION: {
'project': [self.callback_0, self.callback_1]}}
def callback_0(self, service, resource_type, operation, payload):
# uses callback_called from the closure
callback_called.append('cb0')
def callback_1(self, service, resource_type, operation, payload):
# uses callback_called from the closure
callback_called.append('cb1')
Foo()
project_ref = self.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.assertItemsEqual(['cb1', 'cb0'], callback_called)
def test_invalid_event_callbacks(self):
@notifications.listener
class Foo(object):
def __init__(self):
self.event_callbacks = 'bogus'
self.assertRaises(AttributeError, Foo)
def test_invalid_event_callbacks_event(self):
@notifications.listener
class Foo(object):
def __init__(self):
self.event_callbacks = {CREATED_OPERATION: 'bogus'}
self.assertRaises(AttributeError, Foo)
def test_using_an_unbound_method_as_a_callback_fails(self):
# NOTE(dstanek): An unbound method is when you reference a method
# from a class object. You'll get a method that isn't bound to a
# particular instance so there is no magic 'self'. You can call it,
# but you have to pass in the instance manually like: C.m(C()).
# If you reference the method from an instance then you get a method
# that effectively curries the self argument for you
# (think functools.partial). Obviously is we don't have an
# instance then we can't call the method.
@notifications.listener
class Foo(object):
def __init__(self):
self.event_callbacks = {CREATED_OPERATION:
{'project': Foo.callback}}
def callback(self, *args):
pass
# TODO(dstanek): it would probably be nice to fail early using
# something like:
# self.assertRaises(TypeError, Foo)
Foo()
project_ref = self.new_project_ref(domain_id=self.domain_id)
self.assertRaises(TypeError, self.resource_api.create_project,
project_ref['id'], project_ref)
class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
LOCAL_HOST = 'localhost'
ACTION = 'authenticate'
ROLE_ASSIGNMENT = 'role_assignment'
def setUp(self):
super(CadfNotificationsWrapperTestCase, self).setUp()
self._notifications = []
def fake_notify(action, initiator, outcome, target,
event_type, **kwargs):
service_security = cadftaxonomy.SERVICE_SECURITY
event = eventfactory.EventFactory().new_event(
eventType=cadftype.EVENTTYPE_ACTIVITY,
outcome=outcome,
action=action,
initiator=initiator,
target=target,
observer=cadfresource.Resource(typeURI=service_security))
for key, value in kwargs.items():
setattr(event, key, value)
note = {
'action': action,
'initiator': initiator,
'event': event,
'event_type': event_type,
'send_notification_called': True}
self._notifications.append(note)
self.useFixture(mockpatch.PatchObject(
notifications, '_send_audit_notification', fake_notify))
def _assert_last_note(self, action, user_id, event_type=None):
self.assertTrue(self._notifications)
note = self._notifications[-1]
self.assertEqual(note['action'], action)
initiator = note['initiator']
self.assertEqual(initiator.id, user_id)
self.assertEqual(initiator.host.address, self.LOCAL_HOST)
self.assertTrue(note['send_notification_called'])
if event_type:
self.assertEqual(note['event_type'], event_type)
def _assert_event(self, role_id, project=None, domain=None,
user=None, group=None, inherit=False):
"""Assert that the CADF event is valid.
In the case of role assignments, the event will have extra data,
specifically, the role, target, actor, and if the role is inherited.
An example event, as a dictionary is seen below:
{
'typeURI': 'http://schemas.dmtf.org/cloud/audit/1.0/event',
'initiator': {
'typeURI': 'service/security/account/user',
'host': {'address': 'localhost'},
'id': 'openstack:0a90d95d-582c-4efb-9cbc-e2ca7ca9c341',
'name': u'bccc2d9bfc2a46fd9e33bcf82f0b5c21'
},
'target': {
'typeURI': 'service/security/account/user',
'id': 'openstack:d48ea485-ef70-4f65-8d2b-01aa9d7ec12d'
},
'observer': {
'typeURI': 'service/security',
'id': 'openstack:d51dd870-d929-4aba-8d75-dcd7555a0c95'
},
'eventType': 'activity',
'eventTime': '2014-08-21T21:04:56.204536+0000',
'role': u'0e6b990380154a2599ce6b6e91548a68',
'domain': u'24bdcff1aab8474895dbaac509793de1',
'inherited_to_projects': False,
'group': u'c1e22dc67cbd469ea0e33bf428fe597a',
'action': 'created.role_assignment',
'outcome': 'success',
'id': 'openstack:782689dd-f428-4f13-99c7-5c70f94a5ac1'
}
"""
note = self._notifications[-1]
event = note['event']
if project:
self.assertEqual(project, event.project)
if domain:
self.assertEqual(domain, event.domain)
if group:
self.assertEqual(group, event.group)
elif user:
self.assertEqual(user, event.user)
self.assertEqual(role_id, event.role)
self.assertEqual(inherit, event.inherited_to_projects)
def test_v3_authenticate_user_name_and_domain_id(self):
user_id = self.user_id
user_name = self.user['name']
password = self.user['password']
domain_id = self.domain_id
data = self.build_authentication_request(username=user_name,
user_domain_id=domain_id,
password=password)
self.post('/auth/tokens', body=data)
self._assert_last_note(self.ACTION, user_id)
def test_v3_authenticate_user_id(self):
user_id = self.user_id
password = self.user['password']
data = self.build_authentication_request(user_id=user_id,
password=password)
self.post('/auth/tokens', body=data)
self._assert_last_note(self.ACTION, user_id)
def test_v3_authenticate_user_name_and_domain_name(self):
user_id = self.user_id
user_name = self.user['name']
password = self.user['password']
domain_name = self.domain['name']
data = self.build_authentication_request(username=user_name,
user_domain_name=domain_name,
password=password)
self.post('/auth/tokens', body=data)
self._assert_last_note(self.ACTION, user_id)
def _test_role_assignment(self, url, role, project=None, domain=None,
user=None, group=None):
self.put(url)
action = "%s.%s" % (CREATED_OPERATION, self.ROLE_ASSIGNMENT)
event_type = '%s.%s.%s' % (notifications.SERVICE,
self.ROLE_ASSIGNMENT, CREATED_OPERATION)
self._assert_last_note(action, self.user_id, event_type)
self._assert_event(role, project, domain, user, group)
self.delete(url)
action = "%s.%s" % (DELETED_OPERATION, self.ROLE_ASSIGNMENT)
event_type = '%s.%s.%s' % (notifications.SERVICE,
self.ROLE_ASSIGNMENT, DELETED_OPERATION)
self._assert_last_note(action, self.user_id, event_type)
self._assert_event(role, project, domain, user, None)
def test_user_project_grant(self):
url = ('/projects/%s/users/%s/roles/%s' %
(self.project_id, self.user_id, self.role_id))
self._test_role_assignment(url, self.role_id,
project=self.project_id,
user=self.user_id)
def test_group_domain_grant(self):
group_ref = self.new_group_ref(domain_id=self.domain_id)
group = self.identity_api.create_group(group_ref)
self.identity_api.add_user_to_group(self.user_id, group['id'])
url = ('/domains/%s/groups/%s/roles/%s' %
(self.domain_id, group['id'], self.role_id))
self._test_role_assignment(url, self.role_id,
domain=self.domain_id,
user=self.user_id,
group=group['id'])
def test_add_role_to_user_and_project(self):
# A notification is sent when add_role_to_user_and_project is called on
# the assignment manager.
project_ref = self.new_project_ref(self.domain_id)
project = self.resource_api.create_project(
project_ref['id'], project_ref)
tenant_id = project['id']
self.assignment_api.add_role_to_user_and_project(
self.user_id, tenant_id, self.role_id)
self.assertTrue(self._notifications)
note = self._notifications[-1]
self.assertEqual(note['action'], 'created.role_assignment')
self.assertTrue(note['send_notification_called'])
self._assert_event(self.role_id, project=tenant_id, user=self.user_id)
def test_remove_role_from_user_and_project(self):
# A notification is sent when remove_role_from_user_and_project is
# called on the assignment manager.
self.assignment_api.remove_role_from_user_and_project(
self.user_id, self.project_id, self.role_id)
self.assertTrue(self._notifications)
note = self._notifications[-1]
self.assertEqual(note['action'], 'deleted.role_assignment')
self.assertTrue(note['send_notification_called'])
self._assert_event(self.role_id, project=self.project_id,
user=self.user_id)
class TestCallbackRegistration(unit.BaseTestCase):
def setUp(self):
super(TestCallbackRegistration, self).setUp()
self.mock_log = mock.Mock()
# Force the callback logging to occur
self.mock_log.logger.getEffectiveLevel.return_value = logging.DEBUG
def verify_log_message(self, data):
"""Tests that use this are a little brittle because adding more
logging can break them.
TODO(dstanek): remove the need for this in a future refactoring
"""
log_fn = self.mock_log.debug
self.assertEqual(len(data), log_fn.call_count)
for datum in data:
log_fn.assert_any_call(mock.ANY, datum)
def test_a_function_callback(self):
def callback(*args, **kwargs):
pass
resource_type = 'thing'
with mock.patch('keystone.notifications.LOG', self.mock_log):
notifications.register_event_callback(
CREATED_OPERATION, resource_type, callback)
callback = 'keystone.tests.unit.common.test_notifications.callback'
expected_log_data = {
'callback': callback,
'event': 'identity.%s.created' % resource_type
}
self.verify_log_message([expected_log_data])
def test_a_method_callback(self):
class C(object):
def callback(self, *args, **kwargs):
pass
with mock.patch('keystone.notifications.LOG', self.mock_log):
notifications.register_event_callback(
CREATED_OPERATION, 'thing', C.callback)
callback = 'keystone.tests.unit.common.test_notifications.C.callback'
expected_log_data = {
'callback': callback,
'event': 'identity.thing.created'
}
self.verify_log_message([expected_log_data])
def test_a_list_of_callbacks(self):
def callback(*args, **kwargs):
pass
class C(object):
def callback(self, *args, **kwargs):
pass
with mock.patch('keystone.notifications.LOG', self.mock_log):
notifications.register_event_callback(
CREATED_OPERATION, 'thing', [callback, C.callback])
callback_1 = 'keystone.tests.unit.common.test_notifications.callback'
callback_2 = 'keystone.tests.unit.common.test_notifications.C.callback'
expected_log_data = [
{
'callback': callback_1,
'event': 'identity.thing.created'
},
{
'callback': callback_2,
'event': 'identity.thing.created'
},
]
self.verify_log_message(expected_log_data)
def test_an_invalid_callback(self):
self.assertRaises(TypeError,
notifications.register_event_callback,
(CREATED_OPERATION, 'thing', object()))
def test_an_invalid_event(self):
def callback(*args, **kwargs):
pass
self.assertRaises(ValueError,
notifications.register_event_callback,
uuid.uuid4().hex,
'thing',
callback)
|
takeshineshiro/keystone
|
keystone/tests/unit/common/test_notifications.py
|
Python
|
apache-2.0
| 46,330
|
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
iLO Driver for managing HP Proliant Gen8 and above servers.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules import agent
from ironic.drivers.modules.ilo import deploy
from ironic.drivers.modules.ilo import management
from ironic.drivers.modules.ilo import power
class IloVirtualMediaIscsiDriver(base.BaseDriver):
"""IloDriver using IloClient interface.
This driver implements the `core` functionality using
:class:ironic.drivers.modules.ilo.power.IloPower for power management.
and
:class:ironic.drivers.modules.ilo.deploy.IloVirtualMediaIscsiDeploy for
deploy.
"""
def __init__(self):
if not importutils.try_import('proliantutils'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_("Unable to import proliantutils library"))
self.power = power.IloPower()
self.deploy = deploy.IloVirtualMediaIscsiDeploy()
self.console = deploy.IloConsoleInterface()
self.management = management.IloManagement()
self.vendor = deploy.VendorPassthru()
class IloVirtualMediaAgentDriver(base.BaseDriver):
"""IloDriver using IloClient interface.
This driver implements the `core` functionality using
:class:ironic.drivers.modules.ilo.power.IloPower for power management
and
:class:ironic.drivers.modules.ilo.deploy.IloVirtualMediaAgentDriver for
deploy.
"""
def __init__(self):
if not importutils.try_import('proliantutils'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_("Unable to import proliantutils library"))
self.power = power.IloPower()
self.deploy = deploy.IloVirtualMediaAgentDeploy()
self.console = deploy.IloConsoleInterface()
self.management = management.IloManagement()
self.vendor = agent.AgentVendorInterface()
|
ramineni/myironic
|
ironic/drivers/ilo.py
|
Python
|
apache-2.0
| 2,664
|
#!/usr/bin/python
# Copyright 2014 Google.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for VP8 MPEG mode encoder module."""
import encoder
import optimizer
import unittest
import test_tools
import vp8_mpeg
class TestVp8Mpeg(test_tools.FileUsingCodecTest):
def test_OneBlackFrame(self):
codec = vp8_mpeg.Vp8CodecMpegMode()
my_optimizer = optimizer.Optimizer(codec)
videofile = test_tools.MakeYuvFileWithOneBlankFrame(
'one_black_frame_1024_768_30.yuv')
encoding = my_optimizer.BestEncoding(1000, videofile)
encoding.Execute()
# Most codecs should be good at this.
self.assertLess(50.0, my_optimizer.Score(encoding))
def test_ConfigurationFixups(self):
codec = vp8_mpeg.Vp8CodecMpegMode()
fixups = codec.ConfigurationFixups(
encoder.OptionValueSet(codec.option_set,
'--fixed-q=6 --gold-q=27 --key-q=8'))
self.assertEqual('--fixed-q=6 --gold-q=6 --key-q=6', fixups.ToString())
def test_SuggestedTweakRefersToSameContext(self):
codec = vp8_mpeg.Vp8CodecMpegMode()
my_optimizer = optimizer.Optimizer(codec)
videofile = test_tools.MakeYuvFileWithOneBlankFrame(
'one_black_frame_1024_768_30.yuv')
encoding = my_optimizer.BestEncoding(1000, videofile)
# Fake result.
encoding.result = {'psnr': 42.0, 'bitrate':1000}
next_encoding = codec.SuggestTweak(encoding)
self.assertEqual(encoding.context, next_encoding.context)
if __name__ == '__main__':
unittest.main()
|
google/compare-codecs
|
lib/vp8_mpeg_unittest.py
|
Python
|
apache-2.0
| 2,004
|
# Dummy test
import pytest
def test_file1_method1():
print("Hello")
|
Humbedooh/ponymail
|
test_one.py
|
Python
|
apache-2.0
| 72
|
# Copyright (c) 2007-2019 UShareSoft, All rights reserved
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__all__ = ['os']
|
usharesoft/hammr
|
hammr/commands/os/__init__.py
|
Python
|
apache-2.0
| 651
|
"""Remote control support for Panasonic Viera TV."""
from __future__ import annotations
from homeassistant.components.remote import RemoteEntity
from homeassistant.const import CONF_NAME, STATE_ON
from homeassistant.helpers.entity import DeviceInfo
from .const import (
ATTR_DEVICE_INFO,
ATTR_MANUFACTURER,
ATTR_MODEL_NUMBER,
ATTR_REMOTE,
ATTR_UDN,
DEFAULT_MANUFACTURER,
DEFAULT_MODEL_NUMBER,
DOMAIN,
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Panasonic Viera TV Remote from a config entry."""
config = config_entry.data
remote = hass.data[DOMAIN][config_entry.entry_id][ATTR_REMOTE]
name = config[CONF_NAME]
device_info = config[ATTR_DEVICE_INFO]
async_add_entities([PanasonicVieraRemoteEntity(remote, name, device_info)])
class PanasonicVieraRemoteEntity(RemoteEntity):
"""Representation of a Panasonic Viera TV Remote."""
def __init__(self, remote, name, device_info):
"""Initialize the entity."""
# Save a reference to the imported class
self._remote = remote
self._name = name
self._device_info = device_info
@property
def unique_id(self):
"""Return the unique ID of the device."""
if self._device_info is None:
return None
return self._device_info[ATTR_UDN]
@property
def device_info(self) -> DeviceInfo | None:
"""Return device specific attributes."""
if self._device_info is None:
return None
return DeviceInfo(
identifiers={(DOMAIN, self._device_info[ATTR_UDN])},
manufacturer=self._device_info.get(ATTR_MANUFACTURER, DEFAULT_MANUFACTURER),
model=self._device_info.get(ATTR_MODEL_NUMBER, DEFAULT_MODEL_NUMBER),
name=self._name,
)
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def available(self):
"""Return True if the device is available."""
return self._remote.available
@property
def is_on(self):
"""Return true if device is on."""
return self._remote.state == STATE_ON
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
await self._remote.async_turn_on(context=self._context)
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._remote.async_turn_off()
async def async_send_command(self, command, **kwargs):
"""Send a command to one device."""
for cmd in command:
await self._remote.async_send_key(cmd)
|
jawilson/home-assistant
|
homeassistant/components/panasonic_viera/remote.py
|
Python
|
apache-2.0
| 2,653
|
#!/usr/bin/python2.5
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions for use with the Google App Engine Pipeline API."""
__all__ = ["for_name",
"JsonEncoder",
"JsonDecoder",
"JSON_DEFAULTS"]
#pylint: disable=g-bad-name
import datetime
import inspect
import logging
import os
# Relative imports
import simplejson
# pylint: disable=protected-access
def _get_task_target():
"""Get the default target for a pipeline task.
Current version id format is: user_defined_version.minor_version_number
Current module id is just the module's name. It could be "default"
Returns:
A complete target name is of format version.module. If module is the
default module, just version. None if target can not be determined.
"""
# Break circular dependency.
# pylint: disable=g-import-not-at-top
import pipeline
if pipeline._TEST_MODE:
return None
# Further protect against test cases that doesn't set env vars
# propertly.
if ("CURRENT_VERSION_ID" not in os.environ or
"CURRENT_MODULE_ID" not in os.environ):
logging.warning("Running Pipeline in non TEST_MODE but important "
"env vars are not set.")
return None
version = os.environ["CURRENT_VERSION_ID"].split(".")[0]
module = os.environ["CURRENT_MODULE_ID"]
if module == "default":
return version
return "%s.%s" % (version, module)
def for_name(fq_name, recursive=False):
"""Find class/function/method specified by its fully qualified name.
Fully qualified can be specified as:
* <module_name>.<class_name>
* <module_name>.<function_name>
* <module_name>.<class_name>.<method_name> (an unbound method will be
returned in this case).
for_name works by doing __import__ for <module_name>, and looks for
<class_name>/<function_name> in module's __dict__/attrs. If fully qualified
name doesn't contain '.', the current module will be used.
Args:
fq_name: fully qualified name of something to find
Returns:
class object.
Raises:
ImportError: when specified module could not be loaded or the class
was not found in the module.
"""
fq_name = str(fq_name)
module_name = __name__
short_name = fq_name
if fq_name.rfind(".") >= 0:
(module_name, short_name) = (fq_name[:fq_name.rfind(".")],
fq_name[fq_name.rfind(".") + 1:])
try:
result = __import__(module_name, None, None, [short_name])
return result.__dict__[short_name]
except KeyError:
# If we're recursively inside a for_name() chain, then we want to raise
# this error as a key error so we can report the actual source of the
# problem. If we're *not* recursively being called, that means the
# module was found and the specific item could not be loaded, and thus
# we want to raise an ImportError directly.
if recursive:
raise
else:
raise ImportError("Could not find '%s' on path '%s'" % (
short_name, module_name))
except ImportError, e:
# module_name is not actually a module. Try for_name for it to figure
# out what's this.
try:
module = for_name(module_name, recursive=True)
if hasattr(module, short_name):
return getattr(module, short_name)
else:
# The module was found, but the function component is missing.
raise KeyError()
except KeyError:
raise ImportError("Could not find '%s' on path '%s'" % (
short_name, module_name))
except ImportError:
# This means recursive import attempts failed, thus we will raise the
# first ImportError we encountered, since it's likely the most accurate.
pass
# Raise the original import error that caused all of this, since it is
# likely the real cause of the overall problem.
raise
def is_generator_function(obj):
"""Return true if the object is a user-defined generator function.
Generator function objects provides same attributes as functions.
See isfunction.__doc__ for attributes listing.
Adapted from Python 2.6.
Args:
obj: an object to test.
Returns:
true if the object is generator function.
"""
CO_GENERATOR = 0x20
return bool(((inspect.isfunction(obj) or inspect.ismethod(obj)) and
obj.func_code.co_flags & CO_GENERATOR))
class JsonEncoder(simplejson.JSONEncoder):
"""Pipeline customized json encoder."""
TYPE_ID = "__pipeline_json_type"
def default(self, o):
"""Inherit docs."""
if type(o) in JSON_DEFAULTS:
encoder = JSON_DEFAULTS[type(o)][0]
json_struct = encoder(o)
json_struct[self.TYPE_ID] = type(o).__name__
return json_struct
return super(JsonEncoder, self).default(o)
class JsonDecoder(simplejson.JSONDecoder):
"""Pipeline customized json decoder."""
def __init__(self, **kwargs):
if "object_hook" not in kwargs:
kwargs["object_hook"] = self._dict_to_obj
super(JsonDecoder, self).__init__(**kwargs)
def _dict_to_obj(self, d):
"""Converts a dictionary of json object to a Python object."""
if JsonEncoder.TYPE_ID not in d:
return d
obj_type = d.pop(JsonEncoder.TYPE_ID)
if obj_type in _TYPE_IDS:
decoder = JSON_DEFAULTS[_TYPE_IDS[obj_type]][1]
return decoder(d)
else:
raise TypeError("Invalid type %s.", obj_type)
_DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
def _json_encode_datetime(o):
"""Json encode a datetime object.
Args:
o: a datetime object.
Returns:
A dict of json primitives.
"""
return {"isostr": o.strftime(_DATETIME_FORMAT)}
def _json_decode_datetime(d):
"""Converts a dict of json primitives to a datetime object."""
return datetime.datetime.strptime(d["isostr"], _DATETIME_FORMAT)
# To extend what Pipeline can json serialize, add to this where
# key is the type and value is a tuple of encoder and decoder function.
JSON_DEFAULTS = {
datetime.datetime: (_json_encode_datetime, _json_decode_datetime),
}
_TYPE_IDS = dict(zip([cls.__name__ for cls in JSON_DEFAULTS],
JSON_DEFAULTS.keys()))
|
svn2github/appengine-pipeline
|
src/pipeline/util.py
|
Python
|
apache-2.0
| 6,644
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class crvserver_filterpolicy_binding(base_resource) :
""" Binding class showing the filterpolicy that can be bound to crvserver.
"""
def __init__(self) :
self._policyname = ""
self._priority = 0
self._inherited = ""
self._name = ""
self._targetvserver = ""
self.___count = 0
@property
def priority(self) :
ur"""The priority for the policy.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
ur"""The priority for the policy.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def policyname(self) :
ur"""Policies bound to this vserver.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
ur"""Policies bound to this vserver.
"""
try :
self._policyname = policyname
except Exception as e:
raise e
@property
def name(self) :
ur"""Name of the cache redirection virtual server to which to bind the cache redirection policy.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
ur"""Name of the cache redirection virtual server to which to bind the cache redirection policy.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def targetvserver(self) :
ur"""Name of the virtual server to which content is forwarded. Applicable only if the policy is a map policy and the cache redirection virtual server is of type REVERSE.
"""
try :
return self._targetvserver
except Exception as e:
raise e
@targetvserver.setter
def targetvserver(self, targetvserver) :
ur"""Name of the virtual server to which content is forwarded. Applicable only if the policy is a map policy and the cache redirection virtual server is of type REVERSE.
"""
try :
self._targetvserver = targetvserver
except Exception as e:
raise e
@property
def inherited(self) :
ur"""On State describes that policy bound is inherited from global binding.<br/>Possible values = ON, OFF.
"""
try :
return self._inherited
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(crvserver_filterpolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.crvserver_filterpolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = crvserver_filterpolicy_binding()
updateresource.name = resource.name
updateresource.policyname = resource.policyname
updateresource.targetvserver = resource.targetvserver
updateresource.priority = resource.priority
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [crvserver_filterpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].policyname = resource[i].policyname
updateresources[i].targetvserver = resource[i].targetvserver
updateresources[i].priority = resource[i].priority
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = crvserver_filterpolicy_binding()
deleteresource.name = resource.name
deleteresource.policyname = resource.policyname
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [crvserver_filterpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
deleteresources[i].policyname = resource[i].policyname
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
ur""" Use this API to fetch crvserver_filterpolicy_binding resources.
"""
try :
obj = crvserver_filterpolicy_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
ur""" Use this API to fetch filtered set of crvserver_filterpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = crvserver_filterpolicy_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
ur""" Use this API to count crvserver_filterpolicy_binding resources configued on NetScaler.
"""
try :
obj = crvserver_filterpolicy_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
ur""" Use this API to count the filtered set of crvserver_filterpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = crvserver_filterpolicy_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Inherited:
ON = "ON"
OFF = "OFF"
class crvserver_filterpolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.crvserver_filterpolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.crvserver_filterpolicy_binding = [crvserver_filterpolicy_binding() for _ in range(length)]
|
benfinke/ns_python
|
nssrc/com/citrix/netscaler/nitro/resource/config/cr/crvserver_filterpolicy_binding.py
|
Python
|
apache-2.0
| 7,861
|
import os
import logging
import pkg_resources
log = logging.getLogger(__name__)
def register_ew_resources(manager):
manager.register_directory(
'js', pkg_resources.resource_filename('allura', 'lib/widgets/resources/js'))
manager.register_directory(
'css', pkg_resources.resource_filename('allura', 'lib/widgets/resources/css'))
manager.register_directory(
'allura', pkg_resources.resource_filename('allura', 'public/nf'))
for ep in pkg_resources.iter_entry_points('allura'):
try:
manager.register_directory(
'tool/%s' % ep.name.lower(),
pkg_resources.resource_filename(
ep.module_name,
os.path.join('nf', ep.name.lower())))
except ImportError:
log.warning('Cannot import entry point %s', ep)
raise
for ep in pkg_resources.iter_entry_points('allura.theme'):
try:
theme = ep.load()
theme.register_ew_resources(manager, ep.name)
except ImportError:
log.warning('Cannot import entry point %s', ep)
raise
|
Bitergia/allura
|
Allura/allura/config/resources.py
|
Python
|
apache-2.0
| 1,135
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class appfwtransactionrecords(base_resource) :
""" Configuration for Application firewall transaction record resource. """
#------- Read only Parameter ---------
def __init__(self) :
self._httptransactionid = 0
self._packetengineid = 0
self._appfwsessionid = ""
self._profilename = ""
self._url = ""
self._clientip = ""
self._destip = ""
self._starttime = ""
self._endtime = ""
self._requestcontentlength = 0
self._requestyields = 0
self._requestmaxprocessingtime = 0
self._responsecontentlength = 0
self._responseyields = 0
self._responsemaxprocessingtime = 0
self.___count = 0
@property
def httptransactionid(self) :
ur"""The http transaction identifier.
"""
try :
return self._httptransactionid
except Exception as e:
raise e
@property
def packetengineid(self) :
ur"""The packet engine identifier.
"""
try :
return self._packetengineid
except Exception as e:
raise e
@property
def appfwsessionid(self) :
ur"""The session identifier set by the Application Firewall to track the user session.
"""
try :
return self._appfwsessionid
except Exception as e:
raise e
@property
def profilename(self) :
ur"""Application Firewall profile name.
"""
try :
return self._profilename
except Exception as e:
raise e
@property
def url(self) :
ur"""Request URL.
"""
try :
return self._url
except Exception as e:
raise e
@property
def clientip(self) :
ur"""The IP address of client.
"""
try :
return self._clientip
except Exception as e:
raise e
@property
def destip(self) :
ur"""The IP address of destination.
"""
try :
return self._destip
except Exception as e:
raise e
@property
def starttime(self) :
ur"""Conveys time at which request processing started.
"""
try :
return self._starttime
except Exception as e:
raise e
@property
def endtime(self) :
ur"""Conveys time at which request processing end.
"""
try :
return self._endtime
except Exception as e:
raise e
@property
def requestcontentlength(self) :
ur"""The content length of request.
"""
try :
return self._requestcontentlength
except Exception as e:
raise e
@property
def requestyields(self) :
ur"""The number of times yielded during request processing to send heart beat packets.
"""
try :
return self._requestyields
except Exception as e:
raise e
@property
def requestmaxprocessingtime(self) :
ur"""The maximum processing time across yields during request processing.
"""
try :
return self._requestmaxprocessingtime
except Exception as e:
raise e
@property
def responsecontentlength(self) :
ur"""The content length of response.
"""
try :
return self._responsecontentlength
except Exception as e:
raise e
@property
def responseyields(self) :
ur"""The number of times yielded during response processing to send heart beat packets.
"""
try :
return self._responseyields
except Exception as e:
raise e
@property
def responsemaxprocessingtime(self) :
ur"""The maximum processing time across yields during response processing.
"""
try :
return self._responsemaxprocessingtime
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(appfwtransactionrecords_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.appfwtransactionrecords
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
return 0
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
ur""" Use this API to fetch all the appfwtransactionrecords resources that are configured on netscaler.
"""
try :
if not name :
obj = appfwtransactionrecords()
response = obj.get_resources(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
ur""" Use this API to fetch filtered set of appfwtransactionrecords resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = appfwtransactionrecords()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
ur""" Use this API to count the appfwtransactionrecords resources configured on NetScaler.
"""
try :
obj = appfwtransactionrecords()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
ur""" Use this API to count filtered the set of appfwtransactionrecords resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = appfwtransactionrecords()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class appfwtransactionrecords_response(base_response) :
def __init__(self, length=1) :
self.appfwtransactionrecords = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.appfwtransactionrecords = [appfwtransactionrecords() for _ in range(length)]
|
benfinke/ns_python
|
nssrc/com/citrix/netscaler/nitro/resource/config/appfw/appfwtransactionrecords.py
|
Python
|
apache-2.0
| 7,068
|
# Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import time
from fuel_agent_ci.tests import base
REGULAR_PARTED_INFO = {
'sda': """BYT;
/dev/sda:10240MiB:scsi:512:512:gpt:QEMU QEMU HARDDISK;
1:0.02MiB:1.00MiB:0.98MiB:free;
1:1.00MiB:25.0MiB:24.0MiB::primary:bios_grub;
2:25.0MiB:225MiB:200MiB::primary:;
3:225MiB:425MiB:200MiB::primary:;
4:425MiB:625MiB:200MiB:ext2:primary:;
5:625MiB:3958MiB:3333MiB::primary:;
6:3958MiB:4758MiB:800MiB::primary:;
7:4758MiB:4778MiB:20.0MiB::primary:;
1:4778MiB:10240MiB:5462MiB:free;""",
'sdb': """BYT;
/dev/sdb:10240MiB:scsi:512:512:gpt:QEMU QEMU HARDDISK;
1:0.02MiB:1.00MiB:0.98MiB:free;
1:1.00MiB:25.0MiB:24.0MiB::primary:bios_grub;
2:25.0MiB:225MiB:200MiB::primary:;
3:225MiB:425MiB:200MiB::primary:;
4:425MiB:4869MiB:4444MiB::primary:;
1:4869MiB:10240MiB:5371MiB:free;""",
'sdc': """BYT;
/dev/sdc:10240MiB:scsi:512:512:gpt:QEMU QEMU HARDDISK;
1:0.02MiB:1.00MiB:0.98MiB:free;
1:1.00MiB:25.0MiB:24.0MiB::primary:bios_grub;
2:25.0MiB:225MiB:200MiB::primary:;
3:225MiB:425MiB:200MiB::primary:;
4:425MiB:2396MiB:1971MiB::primary:;
1:2396MiB:10240MiB:7844MiB:free;"""
}
CEPH_PARTED_INFO = {
'sda': """BYT;
/dev/sda:10240MiB:scsi:512:512:gpt:QEMU QEMU HARDDISK;
1:0.02MiB:1.00MiB:0.98MiB:free;
1:1.00MiB:25.0MiB:24.0MiB::primary:bios_grub;
2:25.0MiB:225MiB:200MiB::primary:;
3:225MiB:425MiB:200MiB::primary:;
4:425MiB:625MiB:200MiB:ext2:primary:;
5:625MiB:3958MiB:3333MiB::primary:;
6:3958MiB:4758MiB:800MiB::primary:;
7:4758MiB:8091MiB:3333MiB::primary:;
8:8091MiB:8111MiB:20.0MiB::primary:;
1:8111MiB:10240MiB:2129MiB:free;""",
'sdb': """BYT;
/dev/sdb:10240MiB:scsi:512:512:gpt:QEMU QEMU HARDDISK;
1:0.02MiB:1.00MiB:0.98MiB:free;
1:1.00MiB:25.0MiB:24.0MiB::primary:bios_grub;
2:25.0MiB:225MiB:200MiB::primary:;
3:225MiB:425MiB:200MiB::primary:;
4:425MiB:4869MiB:4444MiB::primary:;
5:4869MiB:8202MiB:3333MiB::primary:;
1:8202MiB:10240MiB:2038MiB:free;""",
'sdc': """BYT;
/dev/sdc:10240MiB:scsi:512:512:gpt:QEMU QEMU HARDDISK;
1:0.02MiB:1.00MiB:0.98MiB:free;
1:1.00MiB:25.0MiB:24.0MiB::primary:bios_grub;
2:25.0MiB:225MiB:200MiB::primary:;
3:225MiB:425MiB:200MiB::primary:;
4:425MiB:2396MiB:1971MiB::primary:;
5:2396MiB:5729MiB:3333MiB::primary:;
1:5729MiB:10240MiB:4511MiB:free;"""
}
class TestPartition(base.BaseFuelAgentCITest):
def compare_output(self, expected, actual):
def _split_strip_to_lines(data):
return [s.strip() for s in data.split('\n')]
return self.assertEqual(_split_strip_to_lines(expected),
_split_strip_to_lines(actual))
def _test_partitioning(self, canned_parted_info):
self.ssh.run('partition')
#FIXME(agordeev): mdadm resyncing time
time.sleep(10)
for disk_name, expected_parted_info in canned_parted_info.items():
actual_parted_info = self.ssh.run(
'parted -s /dev/%s -m unit MiB print free' % disk_name)
self.compare_output(expected_parted_info, actual_parted_info)
actual_guid = self.ssh.run(
'sgdisk -i 4 /dev/sda').split('\n')[0].split()[3]
self.assertEqual("0FC63DAF-8483-4772-8E79-3D69D8477DE4", actual_guid)
actual_md_output = self.ssh.run(
'mdadm --detail %s' % '/dev/md0')
#NOTE(agordeev): filter out lines with time stamps and UUID
def _filter_mdadm_output(output):
return "\n".join([s for s in output.split('\n')
if not any(('Time' in s, 'UUID' in s))])
expected_md = """/dev/md0:
Version : 1.2
Raid Level : raid1
Array Size : 204608 (199.85 MiB 209.52 MB)
Used Dev Size : 204608 (199.85 MiB 209.52 MB)
Raid Devices : 3
Total Devices : 3
Persistence : Superblock is persistent
State : active
Active Devices : 3
Working Devices : 3
Failed Devices : 0
Spare Devices : 0
Name : bootstrap:0 (local to host bootstrap)
Events : 18
Number Major Minor RaidDevice State
0 8 3 0 active sync /dev/sda3
1 8 19 1 active sync /dev/sdb3
2 8 35 2 active sync /dev/sdc3"""
self.compare_output(expected_md,
_filter_mdadm_output(actual_md_output))
pvdisplay_expected_output = """/dev/sda5;os;3204.00m;3333.00m
/dev/sda6;image;668.00m;800.00m
/dev/sdb4;image;4312.00m;4444.00m
/dev/sdc4;image;1840.00m;1971.00m"""
pvdisplay_actual_output = self.ssh.run(
'pvdisplay -C --noheading --units m --options '
'pv_name,vg_name,pv_size,dev_size --separator ";"')
self.compare_output(pvdisplay_expected_output, pvdisplay_actual_output)
vgdisplay_expected_output = """image;6820.00m;5060.00m
os;3204.00m;1260.00m"""
vgdisplay_actual_output = self.ssh.run(
'vgdisplay -C --noheading --units m --options '
'vg_name,vg_size,vg_free --separator ";"')
self.compare_output(vgdisplay_expected_output, vgdisplay_actual_output)
lvdisplay_expected_output = """glance;1760.00m;image
root;1900.00m;os
swap;44.00m;os"""
lvdisplay_actual_output = self.ssh.run(
'lvdisplay -C --noheading --units m --options '
'lv_name,lv_size,vg_name --separator ";"')
self.compare_output(lvdisplay_expected_output, lvdisplay_actual_output)
expected_fs_data = [('/dev/md0', 'ext2', ''),
('/dev/sda4', 'ext2', ''),
('/dev/mapper/os-root', 'ext4', ''),
('/dev/mapper/os-swap', 'swap', ''),
('/dev/mapper/image-glance', 'xfs', '')]
for device, fs_type, label in expected_fs_data:
fs_type_output = self.ssh.run(
'blkid -o value -s TYPE %s' % device)
self.assertEqual(fs_type, fs_type_output)
label_output = self.ssh.run(
'blkid -o value -s LABEL %s' % device)
self.assertEqual(label, label_output)
#TODO(agordeev): check fs options and mount point
def test_do_partitioning_gpt(self):
provision_data = self.render_template(
template_data={
'IP': self.dhcp_hosts[0]['ip'],
'MAC': self.dhcp_hosts[0]['mac'],
'MASTER_IP': self.net.ip,
'MASTER_HTTP_PORT': self.http.port,
'PROFILE': 'ubuntu_1404_x86_64'
},
template_name='provision.json'
)
self.ssh.put_content(provision_data, '/tmp/provision.json')
self._test_partitioning(REGULAR_PARTED_INFO)
def test_do_ceph_partitioning(self):
provision_data = self.render_template(
template_data={
'IP': self.dhcp_hosts[0]['ip'],
'MAC': self.dhcp_hosts[0]['mac'],
'MASTER_IP': self.net.ip,
'MASTER_HTTP_PORT': self.http.port,
'PROFILE': 'ubuntu_1404_x86_64'
},
template_name='provision_ceph.json'
)
self.ssh.put_content(provision_data, '/tmp/provision.json')
self._test_partitioning(CEPH_PARTED_INFO)
# NOTE(agordeev): checking if GUIDs are correct for ceph partitions
ceph_partitions = {'sda': 7, 'sdb': 5, 'sdc': 5}
for disk_name, partition_num in ceph_partitions.items():
actual_guid = self.ssh.run(
'sgdisk -i %s /dev/%s' % (partition_num, disk_name)).\
split('\n')[0].split()[3]
self.assertEqual('4fbd7e29-9d25-41b8-afd0-062c0ceff05d'.upper(),
actual_guid)
# FIXME(kozhukalov): check if ceph journals are created and their GUIDs
|
zhaochao/fuel-web
|
fuel_agent_ci/fuel_agent_ci/tests/test_partition.py
|
Python
|
apache-2.0
| 9,086
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import timedelta
import pytest
from sqlalchemy.orm import eagerload
from airflow import models
from airflow.api.common.mark_tasks import (
_create_dagruns,
_DagRunInfo,
set_dag_run_state_to_failed,
set_dag_run_state_to_running,
set_dag_run_state_to_success,
set_state,
)
from airflow.models import DagRun
from airflow.utils import timezone
from airflow.utils.dates import days_ago
from airflow.utils.session import create_session, provide_session
from airflow.utils.state import State
from airflow.utils.types import DagRunType
from tests.test_utils.db import clear_db_runs
DEV_NULL = "/dev/null"
@pytest.fixture(scope="module")
def dagbag():
from airflow.models.dagbag import DagBag
# Ensure the DAGs we are looking at from the DB are up-to-date
non_serialized_dagbag = DagBag(read_dags_from_db=False, include_examples=False)
non_serialized_dagbag.sync_to_db()
return DagBag(read_dags_from_db=True)
class TestMarkTasks:
@pytest.fixture(scope="class", autouse=True, name="create_dags")
@classmethod
def create_dags(cls, dagbag):
cls.dag1 = dagbag.get_dag('miscellaneous_test_dag')
cls.dag2 = dagbag.get_dag('example_subdag_operator')
cls.dag3 = dagbag.get_dag('example_trigger_target_dag')
cls.execution_dates = [days_ago(2), days_ago(1)]
start_date3 = cls.dag3.start_date
cls.dag3_execution_dates = [
start_date3,
start_date3 + timedelta(days=1),
start_date3 + timedelta(days=2),
]
@pytest.fixture(autouse=True)
def setup(self):
clear_db_runs()
drs = _create_dagruns(
self.dag1,
[_DagRunInfo(d, (d, d + timedelta(days=1))) for d in self.execution_dates],
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
for dr in drs:
dr.dag = self.dag1
drs = _create_dagruns(
self.dag2,
[
_DagRunInfo(
self.dag2.start_date,
(self.dag2.start_date, self.dag2.start_date + timedelta(days=1)),
),
],
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
for dr in drs:
dr.dag = self.dag2
drs = _create_dagruns(
self.dag3,
[_DagRunInfo(d, (d, d)) for d in self.dag3_execution_dates],
state=State.SUCCESS,
run_type=DagRunType.MANUAL,
)
for dr in drs:
dr.dag = self.dag3
yield
clear_db_runs()
@staticmethod
def snapshot_state(dag, execution_dates):
TI = models.TaskInstance
DR = models.DagRun
with create_session() as session:
return (
session.query(TI)
.join(TI.dag_run)
.options(eagerload(TI.dag_run))
.filter(TI.dag_id == dag.dag_id, DR.execution_date.in_(execution_dates))
.all()
)
@provide_session
def verify_state(self, dag, task_ids, execution_dates, state, old_tis, session=None):
TI = models.TaskInstance
DR = models.DagRun
tis = (
session.query(TI)
.join(TI.dag_run)
.options(eagerload(TI.dag_run))
.filter(TI.dag_id == dag.dag_id, DR.execution_date.in_(execution_dates))
.all()
)
assert len(tis) > 0
for ti in tis:
assert ti.operator == dag.get_task(ti.task_id).task_type
if ti.task_id in task_ids and ti.execution_date in execution_dates:
assert ti.state == state
if state in State.finished:
assert ti.end_date is not None
else:
for old_ti in old_tis:
if old_ti.task_id == ti.task_id and old_ti.execution_date == ti.execution_date:
assert ti.state == old_ti.state
def test_mark_tasks_now(self):
# set one task to success but do not commit
snapshot = TestMarkTasks.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_1")
dr = DagRun.find(dag_id=self.dag1.dag_id, execution_date=self.execution_dates[0])[0]
altered = set_state(
tasks=[task],
dag_run_id=dr.run_id,
upstream=False,
downstream=False,
future=False,
past=False,
state=State.SUCCESS,
commit=False,
)
assert len(altered) == 1
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]], None, snapshot)
# set one and only one task to success
altered = set_state(
tasks=[task],
dag_run_id=dr.run_id,
upstream=False,
downstream=False,
future=False,
past=False,
state=State.SUCCESS,
commit=True,
)
assert len(altered) == 1
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]], State.SUCCESS, snapshot)
# set no tasks
altered = set_state(
tasks=[task],
dag_run_id=dr.run_id,
upstream=False,
downstream=False,
future=False,
past=False,
state=State.SUCCESS,
commit=True,
)
assert len(altered) == 0
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]], State.SUCCESS, snapshot)
# set task to other than success
altered = set_state(
tasks=[task],
dag_run_id=dr.run_id,
upstream=False,
downstream=False,
future=False,
past=False,
state=State.FAILED,
commit=True,
)
assert len(altered) == 1
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]], State.FAILED, snapshot)
# don't alter other tasks
snapshot = TestMarkTasks.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_0")
altered = set_state(
tasks=[task],
dag_run_id=dr.run_id,
upstream=False,
downstream=False,
future=False,
past=False,
state=State.SUCCESS,
commit=True,
)
assert len(altered) == 1
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]], State.SUCCESS, snapshot)
# set one task as FAILED. dag3 has schedule_interval None
snapshot = TestMarkTasks.snapshot_state(self.dag3, self.dag3_execution_dates)
task = self.dag3.get_task("run_this")
dr = DagRun.find(dag_id=self.dag3.dag_id, execution_date=self.dag3_execution_dates[1])[0]
altered = set_state(
tasks=[task],
dag_run_id=dr.run_id,
upstream=False,
downstream=False,
future=False,
past=False,
state=State.FAILED,
commit=True,
)
# exactly one TaskInstance should have been altered
assert len(altered) == 1
# task should have been marked as failed
self.verify_state(self.dag3, [task.task_id], [self.dag3_execution_dates[1]], State.FAILED, snapshot)
# tasks on other days should be unchanged
self.verify_state(self.dag3, [task.task_id], [self.dag3_execution_dates[0]], None, snapshot)
self.verify_state(self.dag3, [task.task_id], [self.dag3_execution_dates[2]], None, snapshot)
def test_mark_downstream(self):
# test downstream
snapshot = TestMarkTasks.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_1")
relatives = task.get_flat_relatives(upstream=False)
task_ids = [t.task_id for t in relatives]
task_ids.append(task.task_id)
dr = DagRun.find(dag_id=self.dag1.dag_id, execution_date=self.execution_dates[0])[0]
altered = set_state(
tasks=[task],
dag_run_id=dr.run_id,
upstream=False,
downstream=True,
future=False,
past=False,
state=State.SUCCESS,
commit=True,
)
assert len(altered) == 3
self.verify_state(self.dag1, task_ids, [self.execution_dates[0]], State.SUCCESS, snapshot)
def test_mark_upstream(self):
# test upstream
snapshot = TestMarkTasks.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("run_after_loop")
dr = DagRun.find(dag_id=self.dag1.dag_id, execution_date=self.execution_dates[0])[0]
relatives = task.get_flat_relatives(upstream=True)
task_ids = [t.task_id for t in relatives]
task_ids.append(task.task_id)
altered = set_state(
tasks=[task],
dag_run_id=dr.run_id,
upstream=True,
downstream=False,
future=False,
past=False,
state=State.SUCCESS,
commit=True,
)
assert len(altered) == 4
self.verify_state(self.dag1, task_ids, [self.execution_dates[0]], State.SUCCESS, snapshot)
def test_mark_tasks_future(self):
# set one task to success towards end of scheduled dag runs
snapshot = TestMarkTasks.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_1")
dr = DagRun.find(dag_id=self.dag1.dag_id, execution_date=self.execution_dates[0])[0]
altered = set_state(
tasks=[task],
dag_run_id=dr.run_id,
upstream=False,
downstream=False,
future=True,
past=False,
state=State.SUCCESS,
commit=True,
)
assert len(altered) == 2
self.verify_state(self.dag1, [task.task_id], self.execution_dates, State.SUCCESS, snapshot)
snapshot = TestMarkTasks.snapshot_state(self.dag3, self.dag3_execution_dates)
task = self.dag3.get_task("run_this")
dr = DagRun.find(dag_id=self.dag3.dag_id, execution_date=self.dag3_execution_dates[1])[0]
altered = set_state(
tasks=[task],
dag_run_id=dr.run_id,
upstream=False,
downstream=False,
future=True,
past=False,
state=State.FAILED,
commit=True,
)
assert len(altered) == 2
self.verify_state(self.dag3, [task.task_id], [self.dag3_execution_dates[0]], None, snapshot)
self.verify_state(self.dag3, [task.task_id], self.dag3_execution_dates[1:], State.FAILED, snapshot)
def test_mark_tasks_past(self):
# set one task to success towards end of scheduled dag runs
snapshot = TestMarkTasks.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_1")
dr = DagRun.find(dag_id=self.dag1.dag_id, execution_date=self.execution_dates[1])[0]
altered = set_state(
tasks=[task],
dag_run_id=dr.run_id,
upstream=False,
downstream=False,
future=False,
past=True,
state=State.SUCCESS,
commit=True,
)
assert len(altered) == 2
self.verify_state(self.dag1, [task.task_id], self.execution_dates, State.SUCCESS, snapshot)
snapshot = TestMarkTasks.snapshot_state(self.dag3, self.dag3_execution_dates)
task = self.dag3.get_task("run_this")
dr = DagRun.find(dag_id=self.dag3.dag_id, execution_date=self.dag3_execution_dates[1])[0]
altered = set_state(
tasks=[task],
dag_run_id=dr.run_id,
upstream=False,
downstream=False,
future=False,
past=True,
state=State.FAILED,
commit=True,
)
assert len(altered) == 2
self.verify_state(self.dag3, [task.task_id], self.dag3_execution_dates[:2], State.FAILED, snapshot)
self.verify_state(self.dag3, [task.task_id], [self.dag3_execution_dates[2]], None, snapshot)
def test_mark_tasks_multiple(self):
# set multiple tasks to success
snapshot = TestMarkTasks.snapshot_state(self.dag1, self.execution_dates)
tasks = [self.dag1.get_task("runme_1"), self.dag1.get_task("runme_2")]
dr = DagRun.find(dag_id=self.dag1.dag_id, execution_date=self.execution_dates[0])[0]
altered = set_state(
tasks=tasks,
dag_run_id=dr.run_id,
upstream=False,
downstream=False,
future=False,
past=False,
state=State.SUCCESS,
commit=True,
)
assert len(altered) == 2
self.verify_state(
self.dag1, [task.task_id for task in tasks], [self.execution_dates[0]], State.SUCCESS, snapshot
)
# TODO: this backend should be removed once a fixing solution is found later
# We skip it here because this test case is working with Postgres & SQLite
# but not with MySQL
@pytest.mark.backend("sqlite", "postgres")
def test_mark_tasks_subdag(self):
# set one task to success towards end of scheduled dag runs
task = self.dag2.get_task("section-1")
relatives = task.get_flat_relatives(upstream=False)
task_ids = [t.task_id for t in relatives]
task_ids.append(task.task_id)
dr = DagRun.find(dag_id=self.dag2.dag_id, execution_date=self.execution_dates[0])[0]
altered = set_state(
tasks=[task],
dag_run_id=dr.run_id,
upstream=False,
downstream=True,
future=False,
past=False,
state=State.SUCCESS,
commit=True,
)
assert len(altered) == 14
# cannot use snapshot here as that will require drilling down the
# sub dag tree essentially recreating the same code as in the
# tested logic.
self.verify_state(self.dag2, task_ids, [self.execution_dates[0]], State.SUCCESS, [])
class TestMarkDAGRun:
INITIAL_TASK_STATES = {
'runme_0': State.SUCCESS,
'runme_1': State.SKIPPED,
'runme_2': State.UP_FOR_RETRY,
'also_run_this': State.QUEUED,
'run_after_loop': State.RUNNING,
'run_this_last': State.FAILED,
}
@classmethod
def setup_class(cls):
dagbag = models.DagBag(include_examples=True, read_dags_from_db=False)
cls.dag1 = dagbag.dags['miscellaneous_test_dag']
cls.dag1.sync_to_db()
cls.dag2 = dagbag.dags['example_subdag_operator']
cls.dag2.sync_to_db()
cls.execution_dates = [days_ago(2), days_ago(1), days_ago(0)]
def setup_method(self):
clear_db_runs()
def teardown_method(self) -> None:
clear_db_runs()
def _get_num_tasks_with_starting_state(self, state: State, inclusion: bool):
"""
If ``inclusion=True``, get num tasks with initial state ``state``.
Otherwise, get number tasks with initial state not equal to ``state``
:param state: State to compare against
:param inclusion: whether to look for inclusion or exclusion
:return: number of tasks meeting criteria
"""
states = self.INITIAL_TASK_STATES.values()
def compare(x, y):
return x == y if inclusion else x != y
return len([s for s in states if compare(s, state)])
def _set_default_task_instance_states(self, dr):
for task_id, state in self.INITIAL_TASK_STATES.items():
dr.get_task_instance(task_id).set_state(state)
def _verify_task_instance_states_remain_default(self, dr):
for task_id, state in self.INITIAL_TASK_STATES.items():
assert dr.get_task_instance(task_id).state == state
@provide_session
def _verify_task_instance_states(self, dag, date, state, session=None):
TI = models.TaskInstance
tis = session.query(TI).filter(TI.dag_id == dag.dag_id, TI.execution_date == date)
for ti in tis:
assert ti.state == state
def _create_test_dag_run(self, state, date):
return self.dag1.create_dagrun(
run_type=DagRunType.MANUAL, state=state, start_date=date, execution_date=date
)
def _verify_dag_run_state(self, dag, date, state):
drs = models.DagRun.find(dag_id=dag.dag_id, execution_date=date)
dr = drs[0]
assert dr.get_state() == state
@provide_session
def _verify_dag_run_dates(self, dag, date, state, middle_time, session=None):
# When target state is RUNNING, we should set start_date,
# otherwise we should set end_date.
DR = DagRun
dr = session.query(DR).filter(DR.dag_id == dag.dag_id, DR.execution_date == date).one()
if state == State.RUNNING:
# Since the DAG is running, the start_date must be updated after creation
assert dr.start_date > middle_time
# If the dag is still running, we don't have an end date
assert dr.end_date is None
else:
# If the dag is not running, there must be an end time
assert dr.start_date < middle_time
assert dr.end_date > middle_time
def test_set_running_dag_run_to_success(self):
date = self.execution_dates[0]
dr = self._create_test_dag_run(State.RUNNING, date)
middle_time = timezone.utcnow()
self._set_default_task_instance_states(dr)
altered = set_dag_run_state_to_success(dag=self.dag1, run_id=dr.run_id, commit=True)
# All except the SUCCESS task should be altered.
expected = self._get_num_tasks_with_starting_state(State.SUCCESS, inclusion=False)
assert len(altered) == expected
self._verify_dag_run_state(self.dag1, date, State.SUCCESS)
self._verify_task_instance_states(self.dag1, date, State.SUCCESS)
self._verify_dag_run_dates(self.dag1, date, State.SUCCESS, middle_time)
def test_set_running_dag_run_to_failed(self):
date = self.execution_dates[0]
dr = self._create_test_dag_run(State.RUNNING, date)
middle_time = timezone.utcnow()
self._set_default_task_instance_states(dr)
altered = set_dag_run_state_to_failed(dag=self.dag1, run_id=dr.run_id, commit=True)
# Only running task should be altered.
expected = self._get_num_tasks_with_starting_state(State.RUNNING, inclusion=True)
assert len(altered) == expected
self._verify_dag_run_state(self.dag1, date, State.FAILED)
assert dr.get_task_instance('run_after_loop').state == State.FAILED
self._verify_dag_run_dates(self.dag1, date, State.FAILED, middle_time)
def test_set_running_dag_run_to_running(self):
date = self.execution_dates[0]
dr = self._create_test_dag_run(State.RUNNING, date)
middle_time = timezone.utcnow()
self._set_default_task_instance_states(dr)
altered = set_dag_run_state_to_running(dag=self.dag1, run_id=dr.run_id, commit=True)
# None of the tasks should be altered, only the dag itself
assert len(altered) == 0
self._verify_dag_run_state(self.dag1, date, State.RUNNING)
self._verify_task_instance_states_remain_default(dr)
self._verify_dag_run_dates(self.dag1, date, State.RUNNING, middle_time)
def test_set_success_dag_run_to_success(self):
date = self.execution_dates[0]
dr = self._create_test_dag_run(State.SUCCESS, date)
middle_time = timezone.utcnow()
self._set_default_task_instance_states(dr)
altered = set_dag_run_state_to_success(dag=self.dag1, run_id=dr.run_id, commit=True)
# All except the SUCCESS task should be altered.
expected = self._get_num_tasks_with_starting_state(State.SUCCESS, inclusion=False)
assert len(altered) == expected
self._verify_dag_run_state(self.dag1, date, State.SUCCESS)
self._verify_task_instance_states(self.dag1, date, State.SUCCESS)
self._verify_dag_run_dates(self.dag1, date, State.SUCCESS, middle_time)
def test_set_success_dag_run_to_failed(self):
date = self.execution_dates[0]
dr = self._create_test_dag_run(State.SUCCESS, date)
middle_time = timezone.utcnow()
self._set_default_task_instance_states(dr)
altered = set_dag_run_state_to_failed(dag=self.dag1, run_id=dr.run_id, commit=True)
# Only running task should be altered.
expected = self._get_num_tasks_with_starting_state(State.RUNNING, inclusion=True)
assert len(altered) == expected
self._verify_dag_run_state(self.dag1, date, State.FAILED)
assert dr.get_task_instance('run_after_loop').state == State.FAILED
self._verify_dag_run_dates(self.dag1, date, State.FAILED, middle_time)
def test_set_success_dag_run_to_running(self):
date = self.execution_dates[0]
dr = self._create_test_dag_run(State.SUCCESS, date)
middle_time = timezone.utcnow()
self._set_default_task_instance_states(dr)
altered = set_dag_run_state_to_running(dag=self.dag1, run_id=dr.run_id, commit=True)
# None of the tasks should be altered, but only the dag object should be changed
assert len(altered) == 0
self._verify_dag_run_state(self.dag1, date, State.RUNNING)
self._verify_task_instance_states_remain_default(dr)
self._verify_dag_run_dates(self.dag1, date, State.RUNNING, middle_time)
def test_set_failed_dag_run_to_success(self):
date = self.execution_dates[0]
dr = self._create_test_dag_run(State.SUCCESS, date)
middle_time = timezone.utcnow()
self._set_default_task_instance_states(dr)
altered = set_dag_run_state_to_success(dag=self.dag1, run_id=dr.run_id, commit=True)
# All except the SUCCESS task should be altered.
expected = self._get_num_tasks_with_starting_state(State.SUCCESS, inclusion=False)
assert len(altered) == expected
self._verify_dag_run_state(self.dag1, date, State.SUCCESS)
self._verify_task_instance_states(self.dag1, date, State.SUCCESS)
self._verify_dag_run_dates(self.dag1, date, State.SUCCESS, middle_time)
def test_set_failed_dag_run_to_failed(self):
date = self.execution_dates[0]
dr = self._create_test_dag_run(State.SUCCESS, date)
middle_time = timezone.utcnow()
self._set_default_task_instance_states(dr)
altered = set_dag_run_state_to_failed(dag=self.dag1, run_id=dr.run_id, commit=True)
# Only running task should be altered.
expected = self._get_num_tasks_with_starting_state(State.RUNNING, inclusion=True)
assert len(altered) == expected
self._verify_dag_run_state(self.dag1, date, State.FAILED)
assert dr.get_task_instance('run_after_loop').state == State.FAILED
self._verify_dag_run_dates(self.dag1, date, State.FAILED, middle_time)
def test_set_failed_dag_run_to_running(self):
date = self.execution_dates[0]
dr = self._create_test_dag_run(State.SUCCESS, date)
middle_time = timezone.utcnow()
self._set_default_task_instance_states(dr)
altered = set_dag_run_state_to_running(dag=self.dag1, run_id=dr.run_id, commit=True)
# None of the tasks should be altered, since we've only altered the DAG itself
assert len(altered) == 0
self._verify_dag_run_state(self.dag1, date, State.RUNNING)
self._verify_task_instance_states_remain_default(dr)
self._verify_dag_run_dates(self.dag1, date, State.RUNNING, middle_time)
def test_set_state_without_commit(self):
date = self.execution_dates[0]
dr = self._create_test_dag_run(State.RUNNING, date)
self._set_default_task_instance_states(dr)
will_be_altered = set_dag_run_state_to_running(dag=self.dag1, run_id=dr.run_id, commit=False)
# None of the tasks will be altered.
assert len(will_be_altered) == 0
self._verify_dag_run_state(self.dag1, date, State.RUNNING)
self._verify_task_instance_states_remain_default(dr)
will_be_altered = set_dag_run_state_to_failed(dag=self.dag1, run_id=dr.run_id, commit=False)
# Only the running task should be altered.
expected = self._get_num_tasks_with_starting_state(State.RUNNING, inclusion=True)
assert len(will_be_altered) == expected
self._verify_dag_run_state(self.dag1, date, State.RUNNING)
self._verify_task_instance_states_remain_default(dr)
will_be_altered = set_dag_run_state_to_success(dag=self.dag1, run_id=dr.run_id, commit=False)
# All except the SUCCESS task should be altered.
expected = self._get_num_tasks_with_starting_state(State.SUCCESS, inclusion=False)
assert len(will_be_altered) == expected
self._verify_dag_run_state(self.dag1, date, State.RUNNING)
self._verify_task_instance_states_remain_default(dr)
@provide_session
def test_set_state_with_multiple_dagruns(self, session=None):
self.dag2.create_dagrun(
run_type=DagRunType.MANUAL,
state=State.FAILED,
execution_date=self.execution_dates[0],
session=session,
)
dr2 = self.dag2.create_dagrun(
run_type=DagRunType.MANUAL,
state=State.FAILED,
execution_date=self.execution_dates[1],
session=session,
)
self.dag2.create_dagrun(
run_type=DagRunType.MANUAL,
state=State.RUNNING,
execution_date=self.execution_dates[2],
session=session,
)
altered = set_dag_run_state_to_success(dag=self.dag2, run_id=dr2.run_id, commit=True)
# Recursively count number of tasks in the dag
def count_dag_tasks(dag):
count = len(dag.tasks)
subdag_counts = [count_dag_tasks(subdag) for subdag in dag.subdags]
count += sum(subdag_counts)
return count
assert len(altered) == count_dag_tasks(self.dag2)
self._verify_dag_run_state(self.dag2, self.execution_dates[1], State.SUCCESS)
# Make sure other dag status are not changed
models.DagRun.find(dag_id=self.dag2.dag_id, execution_date=self.execution_dates[0])
self._verify_dag_run_state(self.dag2, self.execution_dates[0], State.FAILED)
models.DagRun.find(dag_id=self.dag2.dag_id, execution_date=self.execution_dates[2])
self._verify_dag_run_state(self.dag2, self.execution_dates[2], State.RUNNING)
def test_set_dag_run_state_edge_cases(self):
# Dag does not exist
altered = set_dag_run_state_to_success(dag=None, execution_date=self.execution_dates[0])
assert len(altered) == 0
altered = set_dag_run_state_to_failed(dag=None, execution_date=self.execution_dates[0])
assert len(altered) == 0
altered = set_dag_run_state_to_running(dag=None, execution_date=self.execution_dates[0])
assert len(altered) == 0
# No dag_run_id
altered = set_dag_run_state_to_success(dag=self.dag1, run_id=None)
assert len(altered) == 0
altered = set_dag_run_state_to_failed(dag=self.dag1, run_id=None)
assert len(altered) == 0
altered = set_dag_run_state_to_running(dag=self.dag1, run_id=None)
assert len(altered) == 0
# This will throw ValueError since dag.last_dagrun is None
# need to be 0 does not exist.
with pytest.raises(ValueError):
set_dag_run_state_to_success(dag=self.dag2, run_id='dag_run_id_that_does_not_exist')
# DagRun does not exist
# This will throw ValueError since dag.last_dagrun does not exist
with pytest.raises(ValueError):
set_dag_run_state_to_success(dag=self.dag2, run_id='dag_run_id_that_does_not_exist')
def test_set_dag_run_state_to_failed_no_running_tasks(self):
"""
set_dag_run_state_to_failed when there are no running tasks to update
"""
date = self.execution_dates[0]
dr = self._create_test_dag_run(State.SUCCESS, date)
for task in self.dag1.tasks:
dr.get_task_instance(task.task_id).set_state(State.SUCCESS)
set_dag_run_state_to_failed(dag=self.dag1, run_id=dr.run_id)
def tearDown(self):
self.dag1.clear()
self.dag2.clear()
with create_session() as session:
session.query(models.DagRun).delete()
session.query(models.TaskInstance).delete()
|
Acehaidrey/incubator-airflow
|
tests/api/common/test_mark_tasks.py
|
Python
|
apache-2.0
| 29,540
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A word-counting workflow using the DataFrame API."""
# pytype: skip-file
import argparse
import logging
import apache_beam as beam
from apache_beam.dataframe.convert import to_dataframe
from apache_beam.dataframe.convert import to_pcollection
from apache_beam.io import ReadFromText
from apache_beam.options.pipeline_options import PipelineOptions
def run(argv=None):
"""Main entry point; defines and runs the wordcount pipeline."""
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'--input',
dest='input',
default='gs://dataflow-samples/shakespeare/kinglear.txt',
help='Input file to process.')
parser.add_argument(
'--output',
dest='output',
required=True,
help='Output file to write results to.')
known_args, pipeline_args = parser.parse_known_args(argv)
# Import this here to avoid pickling the main session.
import re
# The pipeline will be run on exiting the with block.
with beam.Pipeline(options=PipelineOptions(pipeline_args)) as p:
# Read the text file[pattern] into a PCollection.
lines = p | 'Read' >> ReadFromText(known_args.input)
words = (
lines
| 'Split' >> beam.FlatMap(
lambda line: re.findall(r'[\w]+', line)).with_output_types(str)
# Map to Row objects to generate a schema suitable for conversion
# to a dataframe.
| 'ToRows' >> beam.Map(lambda word: beam.Row(word=word)))
df = to_dataframe(words)
df['count'] = 1
counted = df.groupby('word').sum()
counted.to_csv(known_args.output)
# Deferred DataFrames can also be converted back to schema'd PCollections
counted_pc = to_pcollection(counted, include_indexes=True)
# Print out every word that occurred >50 times
_ = (
counted_pc
| beam.Filter(lambda row: row.count > 50)
| beam.Map(lambda row: f'{row.word}: {row.count}')
| beam.Map(print))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
run()
|
lukecwik/incubator-beam
|
sdks/python/apache_beam/examples/dataframe/wordcount.py
|
Python
|
apache-2.0
| 2,857
|
"""
This sample shows how to loop through all users
and delete all their content and groups
Python 2.x
ArcREST 3.5
"""
from __future__ import print_function
import arcrest
from arcresthelper import resettools
from arcresthelper import common
def trace():
"""
trace finds the line, the filename
and error message and returns it
to the user
"""
import traceback, inspect, sys
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
filename = inspect.getfile(inspect.currentframe())
# script name + line number
line = tbinfo.split(", ")[1]
# Get Python syntax error
#
synerror = traceback.format_exc().splitlines()[-1]
return line, filename, synerror
def main():
proxy_port = None
proxy_url = None
securityinfo = {}
securityinfo['security_type'] = 'Portal'#LDAP, NTLM, OAuth, Portal, PKI
securityinfo['username'] = ""#Username
securityinfo['password'] = ""#Password
securityinfo['org_url'] = "http://www.arcgis.com"
securityinfo['proxy_url'] = proxy_url
securityinfo['proxy_port'] = proxy_port
securityinfo['referer_url'] = None
securityinfo['token_url'] = None
securityinfo['certificatefile'] = None
securityinfo['keyfile'] = None
securityinfo['client_id'] = None
securityinfo['secret_id'] = None
try:
rst = resettools.resetTools(securityinfo=securityinfo)
if rst.valid:
users = rst.securityhandler.username# comma delimited list of users ex: 'User1, User2'
rst.removeUserData(users=users)
rst.removeUserGroups(users=users)
else:
print (rst.message)
except (common.ArcRestHelperError) as e:
print ("error in function: %s" % e[0]['function'])
print ("error on line: %s" % e[0]['line'])
print ("error in file name: %s" % e[0]['filename'])
print ("with error message: %s" % e[0]['synerror'])
if 'arcpyError' in e[0]:
print ("with arcpy message: %s" % e[0]['arcpyError'])
except:
line, filename, synerror = trace()
print ("error on line: %s" % line)
print ("error in file name: %s" % filename)
print ("with error message: %s" % synerror)
if __name__ == "__main__":
main()
|
pLeBlanc93/ArcREST
|
samples/remove_all_content_groups_allusers.py
|
Python
|
apache-2.0
| 2,295
|
"""
Copyright 2020 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import storage
import expiration
# Route to requested handler.
def app(environ, start_response):
if environ["PATH_INFO"] == "/":
return redirect(environ, start_response)
if environ["PATH_INFO"] == "/storage":
return storage.app(environ, start_response)
if environ["PATH_INFO"] == "/expiration":
return expiration.app(environ, start_response)
start_response("404 Not Found", [])
return [b"Page not found."]
# Redirect for root directory.
def redirect(environ, start_response):
headers = [
("Location", "static/demos/index.html")
]
start_response("301 Found", headers)
return []
|
rachel-fenichel/blockly
|
appengine/main.py
|
Python
|
apache-2.0
| 1,173
|
"""Test converted models
"""
import os
import argparse
import sys
import logging
import mxnet as mx
from convert_caffe_modelzoo import convert_caffe_model, get_model_meta_info
curr_path = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.join(curr_path, "../../example/image-classification"))
from test_score import download_data # pylint: disable=wrong-import-position
from score import score # pylint: disable=wrong-import-position
logging.basicConfig(level=logging.DEBUG)
def test_imagenet_model(model_name, val_data, gpus, batch_size):
"""test model on imagenet """
logging.info('test %s', model_name)
meta_info = get_model_meta_info(model_name)
[model_name, mean] = convert_caffe_model(model_name, meta_info)
sym, arg_params, aux_params = mx.model.load_checkpoint(model_name, 0)
acc = [mx.metric.create('acc'), mx.metric.create('top_k_accuracy', top_k=5)]
if isinstance(mean, str):
mean_args = {'mean_img':mean}
else:
mean_args = {'rgb_mean':','.join([str(i) for i in mean])}
print(val_data)
(speed,) = score(model=(sym, arg_params, aux_params),
data_val=val_data,
label_name='prob_label',
metrics=acc,
gpus=gpus,
batch_size=batch_size,
max_num_examples=500,
**mean_args)
logging.info('speed : %f image/sec', speed)
for a in acc:
logging.info(a.get())
assert acc[0].get()[1] > meta_info['top-1-acc'] - 0.03
assert acc[1].get()[1] > meta_info['top-5-acc'] - 0.03
def main():
"""Entrypoint for test_converter"""
parser = argparse.ArgumentParser(description='Test Caffe converter')
parser.add_argument('--cpu', action='store_true', help='use cpu?')
args = parser.parse_args()
if args.cpu:
gpus = ''
batch_size = 32
else:
gpus = mx.test_utils.list_gpus()
assert gpus, 'At least one GPU is needed to run test_converter in GPU mode'
batch_size = 32 * len(gpus)
models = ['bvlc_googlenet', 'vgg-16', 'resnet-50']
val = download_data()
for m in models:
test_imagenet_model(m, val, ','.join([str(i) for i in gpus]), batch_size)
if __name__ == '__main__':
main()
|
danithaca/mxnet
|
tools/caffe_converter/test_converter.py
|
Python
|
apache-2.0
| 2,297
|
from CoreFoundation import *
from Foundation import NSDictionary, NSMutableDictionary, NSCFDictionary
from PyObjCTools.TestSupport import *
try:
long
except NameError:
long = int
class TestCFDictionary (TestCase):
def testCreation(self):
dictionary = CFDictionaryCreate(None,
('aap', 'noot', 'mies', 'wim'),
('monkey', 'nut', 'missy', 'john'),
4, kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks)
self.assert_(isinstance(dictionary, CFDictionaryRef))
self.assertEqual(dictionary, {
'aap': 'monkey',
'noot': 'nut',
'mies': 'missy',
'wim': 'john'
})
dictionary = CFDictionaryCreateMutable(None, 0, kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks)
self.assert_(isinstance(dictionary, CFMutableDictionaryRef))
CFDictionarySetValue(dictionary, 'hello', 'world')
self.assertEqual(dictionary, {'hello': 'world'})
def testApplyFunction(self):
dictionary = CFDictionaryCreate(None,
('aap', 'noot', 'mies', 'wim'),
('monkey', 'nut', 'missy', 'john'), 4, kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks)
context = []
def function(key, value, context):
context.append((key, value))
self.assertArgIsFunction(CFDictionaryApplyFunction, 1, b'v@@@', False)
self.assertArgHasType(CFDictionaryApplyFunction, 2, b'@')
CFDictionaryApplyFunction(dictionary, function, context)
context.sort()
self.assertEqual(len(context) , 4)
self.assertEqual(context,
[
(b'aap'.decode('ascii'), b'monkey'.decode('ascii')),
(b'mies'.decode('ascii'), b'missy'.decode('ascii')),
(b'noot'.decode('ascii'), b'nut'.decode('ascii')),
(b'wim'.decode('ascii'), b'john'.decode('ascii'))
])
def testTypeID(self):
self.assertIsInstance(CFDictionaryGetTypeID(), (int, long))
def testCreation(self):
dct = CFDictionaryCreate(None, [b"key1".decode('ascii'), b"key2".decode('ascii')], [42, 43], 2, kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks)
self.assertIsInstance(dct, CFDictionaryRef)
dct = CFDictionaryCreateCopy(None, dct)
self.assertIsInstance(dct, CFDictionaryRef)
dct = CFDictionaryCreateMutable(None, 0, kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks)
self.assertIsInstance(dct, CFDictionaryRef)
dct = CFDictionaryCreateMutableCopy(None, 0, dct)
self.assertIsInstance(dct, CFDictionaryRef)
def testInspection(self):
dct = CFDictionaryCreate(None, [b"key1".decode('ascii'), b"key2".decode('ascii')], [42, 42], 2, kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks)
self.assertIsInstance(dct, CFDictionaryRef)
self.assertEqual(CFDictionaryGetCount(dct) , 2)
self.assertEqual(CFDictionaryGetCountOfKey(dct, b"key1".decode('ascii')) , 1)
self.assertEqual(CFDictionaryGetCountOfKey(dct, b"key3".decode('ascii')) , 0)
self.assertEqual(CFDictionaryGetCountOfValue(dct, 42) , 2)
self.assertEqual(CFDictionaryGetCountOfValue(dct, 44) , 0)
self.assertResultHasType(CFDictionaryContainsKey, objc._C_NSBOOL)
self.assertTrue(CFDictionaryContainsKey(dct, b"key1".decode('ascii')))
self.assertFalse(CFDictionaryContainsKey(dct, b"key3".decode('ascii')))
self.assertResultHasType(CFDictionaryContainsValue, objc._C_NSBOOL)
self.assertTrue(CFDictionaryContainsValue(dct, 42))
self.assertFalse(CFDictionaryContainsValue(dct, b"key3".decode('ascii')))
self.assertEqual(CFDictionaryGetValue(dct, "key2") , 42)
self.assertIs(CFDictionaryGetValue(dct, "key3"), None)
self.assertResultHasType(CFDictionaryGetValueIfPresent, objc._C_NSBOOL)
self.assertArgIsOut(CFDictionaryGetValueIfPresent, 2)
ok, value = CFDictionaryGetValueIfPresent(dct, "key2", None)
self.assertTrue(ok)
self.assertEqual(value , 42)
ok, value = CFDictionaryGetValueIfPresent(dct, "key3", None)
self.assertFalse(ok)
self.assertIs(value, None)
keys, values = CFDictionaryGetKeysAndValues(dct, None, None)
self.assertEqual(values , (42, 42))
keys = list(keys)
keys.sort()
self.assertEqual(keys , ['key1', 'key2'])
def testMutation(self):
dct = CFDictionaryCreateMutable(None, 0, kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks)
self.assertEqual(CFDictionaryGetCount(dct) , 0)
CFDictionaryAddValue(dct, b"key1".decode('ascii'), b"value1".decode('ascii'))
self.assertEqual(CFDictionaryGetCount(dct) , 1)
self.assertTrue(CFDictionaryContainsKey(dct, b"key1".decode('ascii')))
CFDictionarySetValue(dct, b"key2".decode('ascii'), b"value2".decode('ascii'))
self.assertEqual(CFDictionaryGetCount(dct) , 2)
self.assertTrue(CFDictionaryContainsKey(dct, b"key2".decode('ascii')))
CFDictionaryReplaceValue(dct, b"key2".decode('ascii'), b"value2b".decode('ascii'))
self.assertEqual(CFDictionaryGetCount(dct) , 2)
self.assertTrue(CFDictionaryContainsKey(dct, b"key2".decode('ascii')))
self.assertEqual(CFDictionaryGetValue(dct, "key2") , b"value2b".decode('ascii'))
CFDictionaryReplaceValue(dct, b"key3".decode('ascii'), b"value2b".decode('ascii'))
self.assertEqual(CFDictionaryGetCount(dct) , 2)
self.assertFalse(CFDictionaryContainsKey(dct, b"key3".decode('ascii')))
CFDictionaryRemoveValue(dct, b"key1".decode('ascii'))
self.assertFalse(CFDictionaryContainsKey(dct, b"key1".decode('ascii')))
CFDictionaryRemoveAllValues(dct)
self.assertFalse(CFDictionaryContainsKey(dct, b"key2".decode('ascii')))
self.assertEqual(CFDictionaryGetCount(dct) , 0)
if __name__ == "__main__":
main()
|
albertz/music-player
|
mac/pyobjc-framework-Cocoa/PyObjCTest/test_cfdictionary.py
|
Python
|
bsd-2-clause
| 6,093
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import Field
from django.forms.util import ValidationError as FormValidationError
from django.template import TemplateSyntaxError
from django.template.loader import get_template
import re
def ContextNameValidator(value):
r = re.findall(r'[\w_][\d\w_]+', value)
if not(bool(re) and len(r) == 1):
raise FormValidationError(_('A-z 1-9 _ only. (with first char not 1-9)'))
return value
class TemplateFormField(Field):
description = "Field to store valide template path"
def __init__(self,*args,**kwargs):
self.max_length = kwargs.pop('max_length') or 255
super(TemplateFormField,self).__init__(*args,**kwargs)
def clean(self,value):
if not value and not self.required:
return None
value = super(TemplateFormField,self).clean(value)
if isinstance(value, basestring):
try :
get_template(value)
except TemplateSyntaxError,e:
raise FormValidationError('%s' % e)
except:
raise FormValidationError(_('Template %s does not existe' % value))
else:
raise FormValidationError(_('Not string instance' % value))
return value
class TemplateField(models.CharField):
""" Field to store valid template path"""
__metaclass__ = models.SubfieldBase
def db_type(self, connection):
return 'char(%s)' % self.max_length
def get_internal_type(self):
return "CharField"
#def to_python(self,value):
# return value
def get_db_prep_value(self, value, connection=None, prepared=False):
return value
def value_to_string(self, obj):
value = self._get_val_from_obj(obj)
return self.get_db_prep_value(value)
def value_from_object(self, obj):
value= super(TemplateField, self).value_from_object(obj)
return value
def formfield(self, **kwargs):
if "form_class" not in kwargs:
kwargs["form_class"] = TemplateFormField
field = super(TemplateField, self).formfield(**kwargs)
if not field.help_text:
field.help_text = "Enter valide template path"
return field
|
Krozark/Kraggne
|
Kraggne/fields.py
|
Python
|
bsd-2-clause
| 2,331
|
##########################################################################
#
# Copyright (c) 2011-2013, John Haddon. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
class About :
@staticmethod
def name() :
return "Gaffer"
@staticmethod
def majorVersion() :
return !GAFFER_MAJOR_VERSION!
@staticmethod
def minorVersion() :
return !GAFFER_MINOR_VERSION!
@staticmethod
def patchVersion() :
return !GAFFER_PATCH_VERSION!
@staticmethod
def versionString() :
return "%d.%d.%d" % ( About.majorVersion(), About.minorVersion(), About.patchVersion() )
@staticmethod
def copyright() :
return "Copyright (c) 2011-2012 John Haddon"
@staticmethod
def license() :
return "$GAFFER_ROOT/LICENSE"
@staticmethod
def url() :
return "http://imageengine.github.io/gaffer/"
@staticmethod
def dependenciesPreamble() :
return ( About.name() + " includes code from several open source projects. "
"Specific licensing information, credits, source downloads and "
"URLs are provided for each project below."
)
@staticmethod
def dependencies() :
result = [
{
"name" : "boost",
"url" : "http://www.boost.org/",
"license" : "$GAFFER_ROOT/doc/licenses/boost",
},
{
"name" : "cortex",
"url" : "http://code.google.com/p/cortex-vfx",
"license" : "$GAFFER_ROOT/doc/licenses/cortex",
},
{
"name" : "freetype",
"url" : "http://www.freetype.org/",
"license" : "$GAFFER_ROOT/doc/licenses/freetype",
"credit" : "Portions of this software are copyright (c) 2009 The FreeType Project (www.freetype.org). All rights reserved."
},
{
"name" : "glew",
"url" : "http://glew.sourceforge.net/",
"license" : "$GAFFER_ROOT/doc/licenses/glew",
},
{
"name" : "ilmbase",
"url" : "http://www.openexr.com/",
"license" : "$GAFFER_ROOT/doc/licenses/ilmbase",
},
{
"name" : "libjpeg",
"url" : "http://www.ijg.org/",
"license" : "$GAFFER_ROOT/doc/licenses/libjpeg",
"credit" : "This software is based in part on the work of the Independent JPEG Group.",
},
{
"name" : "libpng",
"url" : "http://www.libpng.org/",
"license" : "$GAFFER_ROOT/doc/licenses/libpng",
},
{
"name" : "openexr",
"url" : "http://www.openexr.com/",
"license" : "$GAFFER_ROOT/doc/licenses/openexr",
},
{
"name" : "python",
"url" : "http://python.org/",
"license" : "$GAFFER_ROOT/doc/licenses/python",
},
{
"name" : "pyopengl",
"url" : "http://pyopengl.sourceforge.net/",
},
{
"name" : "libtiff",
"url" : "http://www.libtiff.org/",
"license" : "$GAFFER_ROOT/doc/licenses/libtiff",
},
{
"name" : "tbb",
"url" : "http://threadingbuildingblocks.org/",
"license" : "$GAFFER_ROOT/doc/licenses/tbb",
},
{
"name" : "OpenColorIO",
"url" : "http://opencolorio.org/",
"license" : "$GAFFER_ROOT/doc/licenses/openColorIO",
},
{
"name" : "OpenImageIO",
"url" : "http://www.openimageio.org/",
"license" : "$GAFFER_ROOT/doc/licenses/openImageIO",
},
{
"name" : "HDF5",
"url" : "http://www.hdfgroup.org/",
"license" : "$GAFFER_ROOT/doc/licenses/hdf5",
},
{
"name" : "Alembic",
"url" : "http://www.alembic.io/",
"license" : "$GAFFER_ROOT/doc/licenses/alembic",
},
{
"name" : "Qt",
"url" : "http://qt.nokia.com/",
"license" : "$GAFFER_ROOT/doc/licenses/qt",
},
]
if os.path.exists( os.environ["GAFFER_ROOT"] + "/python/PyQt4" ) :
result.append( {
"name" : "PyQt",
"url" : "http://www.riverbankcomputing.co.uk/",
"license" : "$GAFFER_ROOT/doc/licenses/pyQt",
} )
if os.path.exists( os.environ["GAFFER_ROOT"] + "/python/PySide" ) :
result.append( {
"name" : "PySide",
"url" : "http://www.pyside.org/",
"license" : "$GAFFER_ROOT/doc/licenses/pySide",
} )
return result
|
DoubleNegativeVisualEffects/gaffer
|
python/Gaffer/About.py
|
Python
|
bsd-3-clause
| 5,658
|
##########################################################################
#
# Copyright (c) 2011-2012, Image Engine Design Inc. All rights reserved.
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
from __future__ import with_statement
import IECore
import Gaffer
import GafferUI
class CompoundVectorParameterValueWidget( GafferUI.CompoundParameterValueWidget ) :
def __init__( self, parameterHandler, collapsible=None, **kw ) :
GafferUI.CompoundParameterValueWidget.__init__( self, parameterHandler, collapsible, _plugValueWidgetClass=_PlugValueWidget, **kw )
class _PlugValueWidget( GafferUI.CompoundParameterValueWidget._PlugValueWidget ) :
def __init__( self, parameterHandler, collapsed ) :
GafferUI.CompoundParameterValueWidget._PlugValueWidget.__init__( self, parameterHandler, collapsed )
self.__vectorDataWidget = None
def _headerWidget( self ) :
if self.__vectorDataWidget is not None :
return self.__vectorDataWidget
header = [ IECore.CamelCase.toSpaced( x ) for x in self._parameter().keys() ]
columnToolTips = [ self._parameterToolTip( self._parameterHandler().childParameterHandler( x ) ) for x in self._parameter().values() ]
self.__vectorDataWidget = GafferUI.VectorDataWidget( header = header, columnToolTips = columnToolTips )
self.__dataChangedConnection = self.__vectorDataWidget.dataChangedSignal().connect( Gaffer.WeakMethod( self.__dataChanged ) )
self._updateFromPlug()
return self.__vectorDataWidget
def _childPlugs( self ) :
# because we represent everything in the header we don't
# need any plug widgets made by the base class.
return []
def _updateFromPlug( self ) :
GafferUI.CompoundParameterValueWidget._PlugValueWidget._updateFromPlug( self )
if self.__vectorDataWidget is None:
return
data = []
for plug in self._parameterHandler().plug().children() :
plugData = plug.getValue()
if len( data ) and len( plugData ) != len( data[0] ) :
# in __dataChanged we have to update the child plug values
# one at a time. when adding or removing rows, this means that the
# columns will have differing lengths until the last plug
# has been set. in this case we shortcut ourselves, and wait
# for the final plug to be set before updating the VectorDataWidget.
return
data.append( plugData )
self.__vectorDataWidget.setData( data )
self.__vectorDataWidget.setEditable( self._editable() )
def __dataChanged( self, vectorDataWidget ) :
data = vectorDataWidget.getData()
with Gaffer.UndoContext( self.getPlug().ancestor( Gaffer.ScriptNode.staticTypeId() ) ) :
for d, p in zip( data, self._parameterHandler().plug().children() ) :
p.setValue( d )
GafferUI.ParameterValueWidget.registerType( IECore.CompoundVectorParameter.staticTypeId(), CompoundVectorParameterValueWidget )
|
DoubleNegativeVisualEffects/gaffer
|
python/GafferUI/CompoundVectorParameterValueWidget.py
|
Python
|
bsd-3-clause
| 4,546
|
from __future__ import absolute_import
import unittest
import bokeh.core.query as query
from bokeh.models import (
Axis, BoxZoomTool, ColumnDataSource, DatetimeAxis, GlyphRenderer, Grid, LinearAxis,
LogAxis, PanTool, Plot, PreviewSaveTool, Range1d, ResetTool, ResizeTool, Tool, WheelZoomTool,
)
from bokeh.models.glyphs import Glyph, Circle, Line, Rect
def large_plot():
source = ColumnDataSource(data=dict(x=[0, 1], y=[0, 1]))
xdr = Range1d(start=0, end=1)
xdr.tags.append("foo")
xdr.tags.append("bar")
ydr = Range1d(start=10, end=20)
ydr.tags.append("foo")
plot = Plot(x_range=xdr, y_range=ydr)
ydr2 = Range1d(start=0, end=100)
plot.extra_y_ranges = {"liny": ydr2}
circle = Circle(x="x", y="y", fill_color="red", size=5, line_color="black")
plot.add_glyph(source, circle, name="mycircle")
line = Line(x="x", y="y")
plot.add_glyph(source, line, name="myline")
rect = Rect(x="x", y="y", width=1, height=1, fill_color="green")
plot.add_glyph(source, rect, name="myrect")
plot.add_layout(DatetimeAxis(), 'below')
plot.add_layout(LogAxis(), 'left')
plot.add_layout(LinearAxis(y_range_name="liny"), 'left')
plot.add_layout(Grid(dimension=0), 'left')
plot.add_layout(Grid(dimension=1), 'left')
plot.add_tools(
BoxZoomTool(), PanTool(), PreviewSaveTool(), ResetTool(), ResizeTool(), WheelZoomTool(),
)
return plot
class TestMatch(unittest.TestCase):
def test_type(self):
pass
typcases = {
Range1d: 3,
Plot: 1,
Glyph: 3,
Axis: 3,
DatetimeAxis: 1,
LinearAxis: 2, # DatetimeAxis is subclass of LinearAxis
LogAxis: 1,
Grid: 2,
Tool: 6,
BoxZoomTool: 1,
PanTool: 1,
PreviewSaveTool: 1,
ResetTool: 1,
ResizeTool: 1,
WheelZoomTool: 1,
}
class TestFind(unittest.TestCase):
def setUp(self):
self.plot = large_plot()
def test_type(self):
for typ, count in typcases.items():
res = list(query.find(self.plot.references(), dict(type=typ)))
self.assertEqual(len(res), count)
self.assertTrue(all(isinstance(x, typ) for x in res))
def test_tags_with_string(self):
cases = {
"foo": 2,
"bar": 1,
}
for tag, count in cases.items():
res = list(query.find(self.plot.references(), dict(tags=tag)))
self.assertEqual(len(res), count)
def test_tags_with_seq(self):
cases = {
"foo": 2,
"bar": 1,
}
for tag, count in cases.items():
res = list(query.find(self.plot.references(), dict(tags=[tag])))
self.assertEqual(len(res), count)
res = list(query.find(self.plot.references(), dict(tags=list(cases.keys()))))
self.assertEqual(len(res), 2)
def test_name(self):
cases = {
"myline": Line,
"mycircle": Circle,
"myrect": Rect,
}
for name, typ in cases.items():
res = list(query.find(self.plot.references(), dict(name=name)))
self.assertEqual(len(res), 1)
self.assertTrue(all(isinstance(x.glyph, typ) for x in res))
def test_in(self):
from bokeh.core.query import IN
res = list(query.find(self.plot.references(), dict(name={IN: ['a', 'b']})))
self.assertEqual(len(res), 0)
res = list(query.find(self.plot.references(), dict(name={IN: ['a', 'mycircle']})))
self.assertEqual(len(res), 1)
res = list(query.find(self.plot.references(), dict(name={IN: ['a', 'mycircle', 'myline']})))
self.assertEqual(len(res), 2)
res = list(query.find(self.plot.references(), dict(name={IN: ['a', 'mycircle', 'myline', 'myrect']})))
self.assertEqual(len(res), 3)
for typ, count in typcases.items():
res = list(query.find(self.plot.references(), dict(type={IN: [typ]})))
self.assertEqual(len(res), count)
self.assertTrue(all(isinstance(x, typ) for x in res))
res = list(query.find(self.plot.references(), dict(type={IN: [typ, dict]})))
self.assertEqual(len(res), count)
self.assertTrue(all(isinstance(x, typ) for x in res))
res = list(query.find(self.plot.references(), dict(type={IN: [dict]})))
self.assertEqual(len(res), 0)
# count adjusted by hand to account for duplicates/subclasses
res = list(query.find(self.plot.references(), dict(type={IN: list(typcases.keys())})))
self.assertEqual(len(res), 18)
def test_disjuction(self):
from bokeh.core.query import OR
res = list(
query.find(self.plot.references(),
{OR: [dict(type=Axis), dict(type=Grid)]})
)
self.assertEqual(len(res), 5)
res = list(
query.find(self.plot.references(),
{OR: [dict(type=Axis), dict(name="mycircle")]})
)
self.assertEqual(len(res), 4)
res = list(
query.find(self.plot.references(),
{OR: [dict(type=Axis), dict(tags="foo"), dict(name="mycircle")]})
)
self.assertEqual(len(res), 6)
res = list(
query.find(self.plot.references(),
{OR: [dict(type=Axis), dict(tags="foo"), dict(name="mycircle"), dict(name="bad")]})
)
self.assertEqual(len(res), 6)
def test_conjuction(self):
res = list(
query.find(self.plot.references(), dict(type=Axis, tags="foo"))
)
self.assertEqual(len(res), 0)
res = list(
query.find(self.plot.references(), dict(type=Range1d, tags="foo"))
)
self.assertEqual(len(res), 2)
res = list(
query.find(self.plot.references(), dict(type=GlyphRenderer, name="mycircle"))
)
self.assertEqual(len(res), 1)
def test_ops(self):
from bokeh.core.query import EQ, LEQ, GEQ, LT, GT, NEQ
res = list(
query.find(self.plot.references(), {'size': {EQ: 5}})
)
self.assertEqual(len(res), 1)
res = list(
query.find(self.plot.references(), {'size': {NEQ: 5}})
)
self.assertEqual(len(res), 0)
res = list(
query.find(self.plot.references(), {'size': {GEQ: 5}})
)
self.assertEqual(len(res), 1)
res = list(
query.find(self.plot.references(), {'size': {LEQ: 5}})
)
self.assertEqual(len(res), 1)
res = list(
query.find(self.plot.references(), {'size': {GT: 5}})
)
self.assertEqual(len(res), 0)
res = list(
query.find(self.plot.references(), {'size': {LT: 5}})
)
self.assertEqual(len(res), 0)
if __name__ == "__main__":
unittest.main()
|
pombredanne/bokeh
|
bokeh/core/tests/test_query.py
|
Python
|
bsd-3-clause
| 6,856
|
def ignore(line):
return line.startswith("m4_include") or line.startswith("#") or line.startswith("l:") or line.startswith("h:") or not len(line)
def print_summary(summary):
for language, data in summary.items():
with open(language.split()[1] + ".txt", "w", encoding='utf-8-sig') as outFile:
keyLen = max(map(len, data))
valueLen = max(map(len, data.values()))
for item, str in data.items():
print(item.ljust(keyLen), "|", str.ljust(valueLen), file = outFile)
def generate():
with open(r"..\farlang.templ.m4", "r", encoding='utf-8-sig') as feedFile:
lines = [l.rstrip() for l in feedFile.readlines() if not ignore(l.rstrip())]
count = int(lines[1])
languages = lines[2 : 2 + count]
summary = {}
for i, line in enumerate(lines):
if line.startswith("upd:"):
labelIndex = i - 1
while not lines[labelIndex].startswith("M"):
labelIndex = labelIndex - 1
lngIndex = i - labelIndex - 1
if languages[lngIndex] not in summary.keys():
summary[languages[lngIndex]] = {}
summary[languages[lngIndex]][lines[labelIndex]] = lines[i][4 :]
print_summary(summary)
if __name__=="__main__":
generate()
|
FarGroup/FarManager
|
far/tools/translation.summary.py
|
Python
|
bsd-3-clause
| 1,144
|
from distutils.core import setup
from distutils.command.install import INSTALL_SCHEMES
import os
import sys
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join) in a
platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
# Tell distutils to put the data_files in platform-specific installation
# locations. See here for an explanation:
# http://groups.google.com/group/comp.lang.python/browse_thread/thread/35ec7b2fed36eaec/2105ee4d9e8042cb
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
django_dir = 'django'
for dirpath, dirnames, filenames in os.walk(django_dir):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'): del dirnames[i]
if '__init__.py' in filenames:
packages.append('.'.join(fullsplit(dirpath)))
elif filenames:
data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames]])
# Dynamically calculate the version based on django.VERSION.
version_tuple = __import__('django').VERSION
if version_tuple[2] is not None:
version = "%d.%d_%s" % version_tuple
else:
version = "%d.%d" % version_tuple[:2]
setup(
name = "Django",
version = version,
url = 'http://www.djangoproject.com/',
author = 'Django Software Foundation',
author_email = 'foundation@djangoproject.com',
description = 'A high-level Python Web framework that encourages rapid development and clean, pragmatic design.',
packages = packages,
data_files = data_files,
scripts = ['django/bin/django-admin.py'],
)
|
paulsmith/geodjango
|
setup.py
|
Python
|
bsd-3-clause
| 2,032
|
from tests.base_test_case import BaseTestCase
import radiotherm
from radiotherm.thermostat import CT50v194
class TestGetThermostatClass(BaseTestCase):
def test_class_exists(self):
ret = radiotherm.get_thermostat_class('CT50 V1.94')
self.assertEqual(ret, CT50v194)
def test_class_does_not_exist(self):
ret = radiotherm.get_thermostat_class('CT51 V3.17')
self.assertIsNone(ret)
|
trisk/radiotherm
|
tests/radiotherm/test_get_thermostat_class.py
|
Python
|
bsd-3-clause
| 418
|
from __future__ import absolute_import, unicode_literals
from django.test import SimpleTestCase
from localflavor.ar.forms import (ARProvinceSelect, ARPostalCodeField,
ARDNIField, ARCUITField)
class ARLocalFlavorTests(SimpleTestCase):
def test_ARProvinceSelect(self):
f = ARProvinceSelect()
out = '''<select name="provincias">
<option value="B">Buenos Aires</option>
<option value="K">Catamarca</option>
<option value="H">Chaco</option>
<option value="U">Chubut</option>
<option value="C">Ciudad Aut\xf3noma de Buenos Aires</option>
<option value="X">C\xf3rdoba</option>
<option value="W">Corrientes</option>
<option value="E">Entre R\xedos</option>
<option value="P">Formosa</option>
<option value="Y">Jujuy</option>
<option value="L">La Pampa</option>
<option value="F">La Rioja</option>
<option value="M">Mendoza</option>
<option value="N">Misiones</option>
<option value="Q">Neuqu\xe9n</option>
<option value="R">R\xedo Negro</option>
<option value="A" selected="selected">Salta</option>
<option value="J">San Juan</option>
<option value="D">San Luis</option>
<option value="Z">Santa Cruz</option>
<option value="S">Santa Fe</option>
<option value="G">Santiago del Estero</option>
<option value="V">Tierra del Fuego, Ant\xe1rtida e Islas del Atl\xe1ntico Sur</option>
<option value="T">Tucum\xe1n</option>
</select>'''
self.assertHTMLEqual(f.render('provincias', 'A'), out)
def test_ARPostalCodeField(self):
error_format = ['Enter a postal code in the format NNNN or ANNNNAAA.']
error_atmost = ['Ensure this value has at most 8 characters (it has 9).']
error_atleast = ['Ensure this value has at least 4 characters (it has 3).']
valid = {
'5000': '5000',
'C1064AAB': 'C1064AAB',
'c1064AAB': 'C1064AAB',
'C1064aab': 'C1064AAB',
'4400': '4400',
'C1064AAB': 'C1064AAB',
}
invalid = {
'C1064AABB': error_atmost + error_format,
'C1064AA': error_format,
'C1064AB': error_format,
'106AAB': error_format,
'500': error_atleast + error_format,
'5PPP': error_format,
}
self.assertFieldOutput(ARPostalCodeField, valid, invalid)
def test_ARDNIField(self):
error_length = ['This field requires 7 or 8 digits.']
error_digitsonly = ['This field requires only numbers.']
valid = {
'20123456': '20123456',
'20.123.456': '20123456',
'20123456': '20123456',
'20.123.456': '20123456',
'20.123456': '20123456',
'9123456': '9123456',
'9.123.456': '9123456',
}
invalid = {
'101234566': error_length,
'W0123456': error_digitsonly,
'10,123,456': error_digitsonly,
}
self.assertFieldOutput(ARDNIField, valid, invalid)
def test_ARCUITField(self):
error_format = ['Enter a valid CUIT in XX-XXXXXXXX-X or XXXXXXXXXXXX format.']
error_invalid = ['Invalid CUIT.']
error_legal_type = ['Invalid legal type. Type must be 27, 20, 30, 23, 24 or 33.']
valid = {
'20-10123456-9': '20-10123456-9',
'20-10123456-9': '20-10123456-9',
'27-10345678-4': '27-10345678-4',
'20101234569': '20-10123456-9',
'27103456784': '27-10345678-4',
'30011111110': '30-01111111-0',
'24117166062': '24-11716606-2',
'33500001599': '33-50000159-9',
'23000052264': '23-00005226-4',
}
invalid = {
'2-10123456-9': error_format,
'210123456-9': error_format,
'20-10123456': error_format,
'20-10123456-': error_format,
'20-10123456-5': error_invalid,
'27-10345678-1': error_invalid,
'27-10345678-1': error_invalid,
'11211111110': error_legal_type,
}
self.assertFieldOutput(ARCUITField, valid, invalid)
|
M157q/django-localflavor
|
tests/test_ar.py
|
Python
|
bsd-3-clause
| 4,069
|
"""
Reference for line-styles included with Matplotlib.
"""
import numpy as np
import matplotlib.pyplot as plt
color = 'cornflowerblue'
points = np.ones(5) # Draw 5 points for each line
text_style = dict(horizontalalignment='right', verticalalignment='center',
fontsize=12, fontdict={'family': 'monospace'})
def format_axes(ax):
ax.margins(0.2)
ax.set_axis_off()
def nice_repr(text):
return repr(text).lstrip('u')
# Plot all line styles.
f, ax = plt.subplots()
linestyles = ['-', '--', '-.', ':']
for y, linestyle in enumerate(linestyles):
ax.text(-0.5, y, nice_repr(linestyle), **text_style)
ax.plot(y * points, linestyle=linestyle, color=color, linewidth=3)
format_axes(ax)
ax.set_title('line styles')
plt.show()
|
bundgus/python-playground
|
matplotlib-playground/examples/lines_bars_and_markers/line_styles_reference.py
|
Python
|
mit
| 772
|
def read_data(filename):
# Read filename where filename starts with comment lines
# defined by symbol '#' then contains data in 2 columns
# of identical length
inp = open(filename, 'r')
myline = inp.readline()
while myline[0] == '#':
myline = inp.readline()
t = []; h = []
while myline:
mls = myline.split()
t.append(float(mls[0]))
h.append(float(mls[1]))
myline = inp.readline()
inp.close()
return t, h
|
nicholasmalaya/arcanus
|
uq/ps3/read_data.py
|
Python
|
mit
| 429
|
""" Python 'hex_bytes' Codec - 2-digit hex codec with spaces between bytes.
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
"""
import codecs, binascii
from string import hexdigits
### Codec APIs
def hex_encode(input, errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
temp = binascii.b2a_hex(input)
output = " ".join(temp[i:i + 2] for i in xrange(0, len(temp), 2))
return (output, len(input))
def hex_decode(input, errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = binascii.a2b_hex("".join(char for char in input if char in hexdigits))
return (output, len(input))
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return hex_encode(input, errors)
def decode(self, input, errors='strict'):
return hex_decode(input, errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
assert self.errors == 'strict'
return binascii.b2a_hex(input)
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
assert self.errors == 'strict'
return binascii.a2b_hex(input)
class StreamWriter(Codec, codecs.StreamWriter):
pass
class StreamReader(Codec, codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='hex-bytes',
encode=hex_encode,
decode=hex_decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
|
chubbymaggie/Sark
|
sark/encodings/hex_bytes.py
|
Python
|
mit
| 2,424
|
# encoding: utf-8
"""
Test suite for pptx.coreprops module
"""
from __future__ import absolute_import
import pytest
from datetime import datetime, timedelta
from pptx.opc.constants import CONTENT_TYPE as CT
from pptx.oxml.parts.coreprops import CT_CoreProperties
from pptx.parts.coreprops import CoreProperties
class DescribeCoreProperties(object):
def it_knows_the_string_property_values(self, str_prop_get_fixture):
core_properties, prop_name, expected_value = str_prop_get_fixture
actual_value = getattr(core_properties, prop_name)
assert actual_value == expected_value
def it_can_change_the_string_property_values(self, str_prop_set_fixture):
core_properties, prop_name, value, expected_xml = str_prop_set_fixture
setattr(core_properties, prop_name, value)
assert core_properties._element.xml == expected_xml
def it_knows_the_date_property_values(self, date_prop_get_fixture):
core_properties, prop_name, expected_datetime = date_prop_get_fixture
actual_datetime = getattr(core_properties, prop_name)
assert actual_datetime == expected_datetime
def it_can_change_the_date_property_values(self, date_prop_set_fixture):
core_properties, prop_name, value, expected_xml = (
date_prop_set_fixture
)
setattr(core_properties, prop_name, value)
assert core_properties._element.xml == expected_xml
def it_knows_the_revision_number(self, revision_get_fixture):
core_properties, expected_revision = revision_get_fixture
assert core_properties.revision == expected_revision
def it_can_change_the_revision_number(self, revision_set_fixture):
core_properties, revision, expected_xml = revision_set_fixture
core_properties.revision = revision
assert core_properties._element.xml == expected_xml
def it_can_construct_a_default_core_props(self):
core_props = CoreProperties.default()
# verify -----------------------
assert isinstance(core_props, CoreProperties)
assert core_props.content_type is CT.OPC_CORE_PROPERTIES
assert core_props.partname == '/docProps/core.xml'
assert isinstance(core_props._element, CT_CoreProperties)
assert core_props.title == 'PowerPoint Presentation'
assert core_props.last_modified_by == 'python-pptx'
assert core_props.revision == 1
# core_props.modified only stores time with seconds resolution, so
# comparison needs to be a little loose (within two seconds)
modified_timedelta = datetime.utcnow() - core_props.modified
max_expected_timedelta = timedelta(seconds=2)
assert modified_timedelta < max_expected_timedelta
# fixtures -------------------------------------------------------
@pytest.fixture(params=[
('created', datetime(2012, 11, 17, 16, 37, 40)),
('last_printed', datetime(2014, 6, 4, 4, 28)),
('modified', None),
])
def date_prop_get_fixture(self, request, core_properties):
prop_name, expected_datetime = request.param
return core_properties, prop_name, expected_datetime
@pytest.fixture(params=[
('created', 'dcterms:created', datetime(2001, 2, 3, 4, 5),
'2001-02-03T04:05:00Z', ' xsi:type="dcterms:W3CDTF"'),
('last_printed', 'cp:lastPrinted', datetime(2014, 6, 4, 4),
'2014-06-04T04:00:00Z', ''),
('modified', 'dcterms:modified', datetime(2005, 4, 3, 2, 1),
'2005-04-03T02:01:00Z', ' xsi:type="dcterms:W3CDTF"'),
])
def date_prop_set_fixture(self, request):
prop_name, tagname, value, str_val, attrs = request.param
coreProperties = self.coreProperties(None, None)
core_properties = CoreProperties.load(
None, None, coreProperties, None
)
expected_xml = self.coreProperties(tagname, str_val, attrs)
return core_properties, prop_name, value, expected_xml
@pytest.fixture(params=[
('author', 'python-pptx'),
('category', ''),
('comments', ''),
('content_status', 'DRAFT'),
('identifier', 'GXS 10.2.1ab'),
('keywords', 'foo bar baz'),
('language', 'US-EN'),
('last_modified_by', 'Steve Canny'),
('subject', 'Spam'),
('title', 'Presentation'),
('version', '1.2.88'),
])
def str_prop_get_fixture(self, request, core_properties):
prop_name, expected_value = request.param
return core_properties, prop_name, expected_value
@pytest.fixture(params=[
('author', 'dc:creator', 'scanny'),
('category', 'cp:category', 'silly stories'),
('comments', 'dc:description', 'Bar foo to you'),
('content_status', 'cp:contentStatus', 'FINAL'),
('identifier', 'dc:identifier', 'GT 5.2.xab'),
('keywords', 'cp:keywords', 'dog cat moo'),
('language', 'dc:language', 'GB-EN'),
('last_modified_by', 'cp:lastModifiedBy', 'Billy Bob'),
('subject', 'dc:subject', 'Eggs'),
('title', 'dc:title', 'Dissertation'),
('version', 'cp:version', '81.2.8'),
])
def str_prop_set_fixture(self, request):
prop_name, tagname, value = request.param
coreProperties = self.coreProperties(None, None)
core_properties = CoreProperties.load(
None, None, coreProperties, None
)
expected_xml = self.coreProperties(tagname, value)
return core_properties, prop_name, value, expected_xml
@pytest.fixture(params=[
('42', 42), (None, 0), ('foobar', 0), ('-17', 0), ('32.7', 0)
])
def revision_get_fixture(self, request):
str_val, expected_revision = request.param
tagname = '' if str_val is None else 'cp:revision'
coreProperties = self.coreProperties(tagname, str_val)
core_properties = CoreProperties.load(
None, None, coreProperties, None
)
return core_properties, expected_revision
@pytest.fixture(params=[
(42, '42'),
])
def revision_set_fixture(self, request):
value, str_val = request.param
coreProperties = self.coreProperties(None, None)
core_properties = CoreProperties.load(
None, None, coreProperties, None
)
expected_xml = self.coreProperties('cp:revision', str_val)
return core_properties, value, expected_xml
# fixture components ---------------------------------------------
def coreProperties(self, tagname, str_val, attrs=''):
tmpl = (
'<cp:coreProperties xmlns:cp="http://schemas.openxmlformats.org/'
'package/2006/metadata/core-properties" xmlns:dc="http://purl.or'
'g/dc/elements/1.1/" xmlns:dcmitype="http://purl.org/dc/dcmitype'
'/" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:xsi="http://'
'www.w3.org/2001/XMLSchema-instance">%s</cp:coreProperties>\n'
)
if not tagname:
child_element = ''
elif not str_val:
child_element = '\n <%s%s/>\n' % (tagname, attrs)
else:
child_element = (
'\n <%s%s>%s</%s>\n' % (tagname, attrs, str_val, tagname)
)
return tmpl % child_element
@pytest.fixture
def core_properties(self):
xml = (
b'<?xml version=\'1.0\' encoding=\'UTF-8\' standalone=\'yes\'?>'
b'\n<cp:coreProperties xmlns:cp="http://schemas.openxmlformats.o'
b'rg/package/2006/metadata/core-properties" xmlns:dc="http://pur'
b'l.org/dc/elements/1.1/" xmlns:dcmitype="http://purl.org/dc/dcm'
b'itype/" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:xsi="h'
b'ttp://www.w3.org/2001/XMLSchema-instance">\n'
b' <cp:contentStatus>DRAFT</cp:contentStatus>\n'
b' <dc:creator>python-pptx</dc:creator>\n'
b' <dcterms:created xsi:type="dcterms:W3CDTF">2012-11-17T11:07:'
b'40-05:30</dcterms:created>\n'
b' <dc:description/>\n'
b' <dc:identifier>GXS 10.2.1ab</dc:identifier>\n'
b' <dc:language>US-EN</dc:language>\n'
b' <cp:lastPrinted>2014-06-04T04:28:00Z</cp:lastPrinted>\n'
b' <cp:keywords>foo bar baz</cp:keywords>\n'
b' <cp:lastModifiedBy>Steve Canny</cp:lastModifiedBy>\n'
b' <cp:revision>4</cp:revision>\n'
b' <dc:subject>Spam</dc:subject>\n'
b' <dc:title>Presentation</dc:title>\n'
b' <cp:version>1.2.88</cp:version>\n'
b'</cp:coreProperties>\n'
)
return CoreProperties.load(None, None, xml, None)
|
cchanrhiza/python-pptx
|
tests/parts/test_coreprops.py
|
Python
|
mit
| 8,938
|