id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
13,900
|
test_RedirectHandlerFactory.py
|
ansible_ansible/test/units/module_utils/urls/test_RedirectHandlerFactory.py
|
# -*- coding: utf-8 -*-
# (c) 2018 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import io
import urllib.request
import urllib.error
from ansible.module_utils.urls import HTTPRedirectHandler
import pytest
@pytest.fixture
def urllib_req():
req = urllib.request.Request(
'https://ansible.com/'
)
return req
@pytest.fixture
def request_body():
return io.StringIO('TESTS')
def test_no_redirs(urllib_req, request_body):
handler = HTTPRedirectHandler('none')
inst = handler()
with pytest.raises(urllib.error.HTTPError):
inst.redirect_request(urllib_req, request_body, 301, '301 Moved Permanently', {}, 'https://docs.ansible.com/')
def test_urllib2_redir(urllib_req, request_body, mocker):
redir_request_mock = mocker.patch('ansible.module_utils.urls.urllib.request.HTTPRedirectHandler.redirect_request')
handler = HTTPRedirectHandler('urllib2')
inst = handler()
inst.redirect_request(urllib_req, request_body, 301, '301 Moved Permanently', {}, 'https://docs.ansible.com/')
redir_request_mock.assert_called_once_with(inst, urllib_req, request_body, 301, '301 Moved Permanently', {}, 'https://docs.ansible.com/')
def test_all_redir(urllib_req, request_body, mocker):
req_mock = mocker.patch('ansible.module_utils.urls.urllib.request.Request')
handler = HTTPRedirectHandler('all')
inst = handler()
inst.redirect_request(urllib_req, request_body, 301, '301 Moved Permanently', {}, 'https://docs.ansible.com/')
req_mock.assert_called_once_with('https://docs.ansible.com/', data=None, headers={}, method='GET', origin_req_host='ansible.com', unverifiable=True)
def test_all_redir_post(request_body, mocker):
handler = HTTPRedirectHandler('all')
inst = handler()
req = urllib.request.Request(
'https://ansible.com/',
'POST'
)
req_mock = mocker.patch('ansible.module_utils.urls.urllib.request.Request')
inst.redirect_request(req, request_body, 301, '301 Moved Permanently', {}, 'https://docs.ansible.com/')
req_mock.assert_called_once_with('https://docs.ansible.com/', data=None, headers={}, method='GET', origin_req_host='ansible.com', unverifiable=True)
def test_redir_headers_removal(urllib_req, request_body, mocker):
req_mock = mocker.patch('ansible.module_utils.urls.urllib.request.Request')
handler = HTTPRedirectHandler('all')
inst = handler()
urllib_req.headers = {
'Content-Type': 'application/json',
'Content-Length': 100,
'Foo': 'bar',
}
inst.redirect_request(urllib_req, request_body, 301, '301 Moved Permanently', {}, 'https://docs.ansible.com/')
req_mock.assert_called_once_with('https://docs.ansible.com/', data=None, headers={'Foo': 'bar'}, method='GET', origin_req_host='ansible.com',
unverifiable=True)
def test_redir_url_spaces(urllib_req, request_body, mocker):
req_mock = mocker.patch('ansible.module_utils.urls.urllib.request.Request')
handler = HTTPRedirectHandler('all')
inst = handler()
inst.redirect_request(urllib_req, request_body, 301, '301 Moved Permanently', {}, 'https://docs.ansible.com/foo bar')
req_mock.assert_called_once_with('https://docs.ansible.com/foo%20bar', data=None, headers={}, method='GET', origin_req_host='ansible.com',
unverifiable=True)
def test_redir_safe(urllib_req, request_body, mocker):
req_mock = mocker.patch('ansible.module_utils.urls.urllib.request.Request')
handler = HTTPRedirectHandler('safe')
inst = handler()
inst.redirect_request(urllib_req, request_body, 301, '301 Moved Permanently', {}, 'https://docs.ansible.com/')
req_mock.assert_called_once_with('https://docs.ansible.com/', data=None, headers={}, method='GET', origin_req_host='ansible.com', unverifiable=True)
def test_redir_safe_not_safe(request_body):
handler = HTTPRedirectHandler('safe')
inst = handler()
req = urllib.request.Request(
'https://ansible.com/',
'POST'
)
with pytest.raises(urllib.error.HTTPError):
inst.redirect_request(req, request_body, 301, '301 Moved Permanently', {}, 'https://docs.ansible.com/')
def test_redir_no_error_on_invalid(urllib_req, request_body):
handler = HTTPRedirectHandler('invalid')
inst = handler()
with pytest.raises(urllib.error.HTTPError):
inst.redirect_request(urllib_req, request_body, 301, '301 Moved Permanently', {}, 'https://docs.ansible.com/')
def test_redir_http_error_308_urllib2(urllib_req, request_body, mocker):
redir_mock = mocker.patch.object(urllib.request.HTTPRedirectHandler, 'redirect_request')
handler = HTTPRedirectHandler('urllib2')
inst = handler()
inst.redirect_request(urllib_req, request_body, 308, '308 Permanent Redirect', {}, 'https://docs.ansible.com/')
assert redir_mock.call_count == 1
| 5,003
|
Python
|
.py
| 90
| 49.911111
| 152
| 0.701232
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,901
|
test_prepare_multipart.py
|
ansible_ansible/test/units/module_utils/urls/test_prepare_multipart.py
|
# -*- coding: utf-8 -*-
# (c) 2020 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import os
from email.message import Message
import pytest
from ansible.module_utils.urls import prepare_multipart
def test_prepare_multipart():
fixture_boundary = b'===============3996062709511591449=='
here = os.path.dirname(__file__)
multipart = os.path.join(here, 'fixtures/multipart.txt')
client_cert = os.path.join(here, 'fixtures/client.pem')
client_key = os.path.join(here, 'fixtures/client.key')
client_txt = os.path.join(here, 'fixtures/client.txt')
fields = {
'form_field_1': 'form_value_1',
'form_field_2': {
'content': 'form_value_2',
},
'form_field_3': {
'content': '<html></html>',
'mime_type': 'text/html',
},
'form_field_4': {
'content': '{"foo": "bar"}',
'mime_type': 'application/json',
},
'file1': {
'content': 'file_content_1',
'filename': 'fake_file1.txt',
},
'file2': {
'content': '<html></html>',
'mime_type': 'text/html',
'filename': 'fake_file2.html',
},
'file3': {
'content': '{"foo": "bar"}',
'mime_type': 'application/json',
'filename': 'fake_file3.json',
},
'file4': {
'filename': client_cert,
'mime_type': 'text/plain',
},
'file5': {
'filename': client_key,
'mime_type': 'application/octet-stream'
},
'file6': {
'filename': client_txt,
},
}
content_type, b_data = prepare_multipart(fields)
headers = Message()
headers['Content-Type'] = content_type
assert headers.get_content_type() == 'multipart/form-data'
boundary = headers.get_boundary()
assert boundary is not None
with open(multipart, 'rb') as f:
b_expected = f.read().replace(fixture_boundary, boundary.encode())
# Depending on Python version, there may or may not be a trailing newline
assert b_data.rstrip(b'\r\n') == b_expected.rstrip(b'\r\n')
def test_wrong_type():
pytest.raises(TypeError, prepare_multipart, 'foo')
pytest.raises(TypeError, prepare_multipart, {'foo': None})
def test_empty():
pytest.raises(ValueError, prepare_multipart, {'foo': {}})
def test_unknown_mime(mocker):
fields = {'foo': {'filename': 'foo.boom', 'content': 'foo'}}
mocker.patch('mimetypes.guess_type', return_value=(None, None))
content_type, b_data = prepare_multipart(fields)
assert b'Content-Type: application/octet-stream' in b_data
def test_bad_mime(mocker):
fields = {'foo': {'filename': 'foo.boom', 'content': 'foo'}}
mocker.patch('mimetypes.guess_type', side_effect=TypeError)
content_type, b_data = prepare_multipart(fields)
assert b'Content-Type: application/octet-stream' in b_data
| 3,063
|
Python
|
.py
| 79
| 31.329114
| 92
| 0.59433
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,902
|
test_gzip.py
|
ansible_ansible/test/units/module_utils/urls/test_gzip.py
|
# -*- coding: utf-8 -*-
# (c) 2021 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import gzip
import io
import sys
import http.client
from ansible.module_utils.urls import GzipDecodedReader, Request
import pytest
def compress(data):
buf = io.BytesIO()
try:
f = gzip.GzipFile(fileobj=buf, mode='wb')
f.write(data)
finally:
f.close()
return buf.getvalue()
class Sock(io.BytesIO):
def makefile(self, *args, **kwds):
return self
@pytest.fixture
def urlopen_mock(mocker):
return mocker.patch('ansible.module_utils.urls.urllib.request.urlopen')
JSON_DATA = b'{"foo": "bar", "baz": "qux", "sandwich": "ham", "tech_level": "pickle", "pop": "corn", "ansible": "awesome"}'
RESP = b"""HTTP/1.1 200 OK
Content-Type: application/json; charset=utf-8
Set-Cookie: foo
Set-Cookie: bar
Content-Length: 108
%s""" % JSON_DATA
GZIP_RESP = b"""HTTP/1.1 200 OK
Content-Type: application/json; charset=utf-8
Set-Cookie: foo
Set-Cookie: bar
Content-Encoding: gzip
Content-Length: 100
%s""" % compress(JSON_DATA)
def test_Request_open_gzip(urlopen_mock):
h = http.client.HTTPResponse(
Sock(GZIP_RESP),
method='GET',
)
h.begin()
urlopen_mock.return_value = h
r = Request().open('GET', 'https://ansible.com/')
assert isinstance(r.fp, GzipDecodedReader)
assert r.read() == JSON_DATA
def test_Request_open_not_gzip(urlopen_mock):
h = http.client.HTTPResponse(
Sock(RESP),
method='GET',
)
h.begin()
urlopen_mock.return_value = h
r = Request().open('GET', 'https://ansible.com/')
assert not isinstance(r.fp, GzipDecodedReader)
assert r.read() == JSON_DATA
def test_Request_open_decompress_false(urlopen_mock):
h = http.client.HTTPResponse(
Sock(RESP),
method='GET',
)
h.begin()
urlopen_mock.return_value = h
r = Request().open('GET', 'https://ansible.com/', decompress=False)
assert not isinstance(r.fp, GzipDecodedReader)
assert r.read() == JSON_DATA
def test_GzipDecodedReader_no_gzip(monkeypatch, mocker):
monkeypatch.delitem(sys.modules, 'gzip')
monkeypatch.delitem(sys.modules, 'ansible.module_utils.urls')
orig_import = __import__
def _import(*args):
if args[0] == 'gzip':
raise ImportError
return orig_import(*args)
mocker.patch('builtins.__import__', _import)
mod = __import__('ansible.module_utils.urls').module_utils.urls
assert mod.HAS_GZIP is False
pytest.raises(mod.MissingModuleError, mod.GzipDecodedReader, None)
| 2,688
|
Python
|
.py
| 80
| 29.0625
| 123
| 0.679751
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,903
|
test_channel_binding.py
|
ansible_ansible/test/units/module_utils/urls/test_channel_binding.py
|
# -*- coding: utf-8 -*-
# (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import base64
import os.path
import pytest
from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils import urls
# cryptography < 41.0.0 does not associate the algorithm with the cert,
# so module_utils falls back to cryptography.hazmat.primitives.hashes.SHA256
rsa_pss_sha512 = (
b'\x85\x85\x19\xB9\xE1\x0F\x23\xE2'
b'\x1D\x2C\xE9\xD5\x47\x2A\xAB\xCE'
b'\x42\x0F\xD1\x00\x75\x9C\x53\xA1'
b'\x7B\xB9\x79\x86\xB2\x59\x61\x27'
)
if urls.HAS_CRYPTOGRAPHY:
import cryptography
if LooseVersion(cryptography.__version__) >= LooseVersion('41.0.0'):
rsa_pss_sha512 = (
b"K\x8c\xa5\xf5y\x89A\xa0\xaf'\xeb"
b"\x00\xeb\xccUz6z\xe0l\x035\xa3h"
b"\xfc\xa6 k\xda]\xba\x88\xf8m\xf3"
b"\x98\xd2\xd2wW\x87w\xa4\x0e\x14"
b"\t\xd4]\xb9\xa29\xe2h\x1b\x9f"
b"\xe6\x04\x00\xec\x7fc\x83\xd7b"
)
@pytest.mark.skipif(not urls.HAS_CRYPTOGRAPHY, reason='Requires cryptography to be installed')
@pytest.mark.parametrize('certificate, expected', [
('rsa_md5.pem', b'\x23\x34\xB8\x47\x6C\xBF\x4E\x6D'
b'\xFC\x76\x6A\x5D\x5A\x30\xD6\x64'
b'\x9C\x01\xBA\xE1\x66\x2A\x5C\x3A'
b'\x13\x02\xA9\x68\xD7\xC6\xB0\xF6'),
('rsa_sha1.pem', b'\x14\xCF\xE8\xE4\xB3\x32\xB2\x0A'
b'\x34\x3F\xC8\x40\xB1\x8F\x9F\x6F'
b'\x78\x92\x6A\xFE\x7E\xC3\xE7\xB8'
b'\xE2\x89\x69\x61\x9B\x1E\x8F\x3E'),
('rsa_sha256.pem', b'\x99\x6F\x3E\xEA\x81\x2C\x18\x70'
b'\xE3\x05\x49\xFF\x9B\x86\xCD\x87'
b'\xA8\x90\xB6\xD8\xDF\xDF\x4A\x81'
b'\xBE\xF9\x67\x59\x70\xDA\xDB\x26'),
('rsa_sha384.pem', b'\x34\xF3\x03\xC9\x95\x28\x6F\x4B'
b'\x21\x4A\x9B\xA6\x43\x5B\x69\xB5'
b'\x1E\xCF\x37\x58\xEA\xBC\x2A\x14'
b'\xD7\xA4\x3F\xD2\x37\xDC\x2B\x1A'
b'\x1A\xD9\x11\x1C\x5C\x96\x5E\x10'
b'\x75\x07\xCB\x41\x98\xC0\x9F\xEC'),
('rsa_sha512.pem', b'\x55\x6E\x1C\x17\x84\xE3\xB9\x57'
b'\x37\x0B\x7F\x54\x4F\x62\xC5\x33'
b'\xCB\x2C\xA5\xC1\xDA\xE0\x70\x6F'
b'\xAE\xF0\x05\x44\xE1\xAD\x2B\x76'
b'\xFF\x25\xCF\xBE\x69\xB1\xC4\xE6'
b'\x30\xC3\xBB\x02\x07\xDF\x11\x31'
b'\x4C\x67\x38\xBC\xAE\xD7\xE0\x71'
b'\xD7\xBF\xBF\x2C\x9D\xFA\xB8\x5D'),
('rsa-pss_sha256.pem', b'\xF2\x31\xE6\xFF\x3F\x9E\x16\x1B'
b'\xC2\xDC\xBB\x89\x8D\x84\x47\x4E'
b'\x58\x9C\xD7\xC2\x7A\xDB\xEF\x8B'
b'\xD9\xC0\xC0\x68\xAF\x9C\x36\x6D'),
('rsa-pss_sha512.pem', rsa_pss_sha512),
('ecdsa_sha256.pem', b'\xFE\xCF\x1B\x25\x85\x44\x99\x90'
b'\xD9\xE3\xB2\xC9\x2D\x3F\x59\x7E'
b'\xC8\x35\x4E\x12\x4E\xDA\x75\x1D'
b'\x94\x83\x7C\x2C\x89\xA2\xC1\x55'),
('ecdsa_sha512.pem', b'\xE5\xCB\x68\xB2\xF8\x43\xD6\x3B'
b'\xF4\x0B\xCB\x20\x07\x60\x8F\x81'
b'\x97\x61\x83\x92\x78\x3F\x23\x30'
b'\xE5\xEF\x19\xA5\xBD\x8F\x0B\x2F'
b'\xAA\xC8\x61\x85\x5F\xBB\x63\xA2'
b'\x21\xCC\x46\xFC\x1E\x22\x6A\x07'
b'\x24\x11\xAF\x17\x5D\xDE\x47\x92'
b'\x81\xE0\x06\x87\x8B\x34\x80\x59'),
])
def test_cbt_with_cert(certificate, expected):
with open(os.path.join(os.path.dirname(__file__), 'fixtures', 'cbt', certificate)) as fd:
cert_der = base64.b64decode("".join([l.strip() for l in fd.readlines()[1:-1]]))
actual = urls.get_channel_binding_cert_hash(cert_der)
assert actual == expected
def test_cbt_no_cryptography(monkeypatch):
monkeypatch.setattr(urls, 'HAS_CRYPTOGRAPHY', False)
assert urls.get_channel_binding_cert_hash(None) is None
| 4,289
|
Python
|
.py
| 82
| 39.268293
| 94
| 0.559476
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,904
|
test_urls.py
|
ansible_ansible/test/units/module_utils/urls/test_urls.py
|
# -*- coding: utf-8 -*-
# (c) 2018 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.module_utils import urls
def test_basic_auth_header():
header = urls.basic_auth_header('user', 'passwd')
assert header == b'Basic dXNlcjpwYXNzd2Q='
def test_ParseResultDottedDict():
url = 'https://ansible.com/blog'
parts = urls.urlparse(url)
dotted_parts = urls.ParseResultDottedDict(parts._asdict())
assert parts[0] == dotted_parts.scheme
assert dotted_parts.as_list() == list(parts)
def test_unix_socket_patch_httpconnection_connect(mocker):
unix_conn = mocker.patch.object(urls.UnixHTTPConnection, 'connect')
conn = urls.http.client.HTTPConnection('ansible.com')
with urls.unix_socket_patch_httpconnection_connect():
conn.connect()
assert unix_conn.call_count == 1
| 933
|
Python
|
.py
| 20
| 42.6
| 92
| 0.724558
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,905
|
test_split.py
|
ansible_ansible/test/units/module_utils/urls/test_split.py
|
# -*- coding: utf-8 -*-
# Copyright: Contributors to the Ansible project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.module_utils.urls import _split_multiext
@pytest.mark.parametrize(
'name, expected',
(
('', ('', '')),
('a', ('a', '')),
('file.tar', ('file', '.tar')),
('file.tar.', ('file.tar.', '')),
('file.hidden', ('file.hidden', '')),
('file.tar.gz', ('file', '.tar.gz')),
('yaml-0.2.5.tar.gz', ('yaml-0.2.5', '.tar.gz')),
('yaml-0.2.5.zip', ('yaml-0.2.5', '.zip')),
('yaml-0.2.5.zip.hidden', ('yaml-0.2.5.zip.hidden', '')),
('geckodriver-v0.26.0-linux64.tar', ('geckodriver-v0.26.0-linux64', '.tar')),
('/var/lib/geckodriver-v0.26.0-linux64.tar', ('/var/lib/geckodriver-v0.26.0-linux64', '.tar')),
('https://acme.com/drivers/geckodriver-v0.26.0-linux64.tar', ('https://acme.com/drivers/geckodriver-v0.26.0-linux64', '.tar')),
('https://acme.com/drivers/geckodriver-v0.26.0-linux64.tar.bz', ('https://acme.com/drivers/geckodriver-v0.26.0-linux64', '.tar.bz')),
)
)
def test__split_multiext(name, expected):
assert expected == _split_multiext(name)
@pytest.mark.parametrize(
'args, expected',
(
(('base-v0.26.0-linux64.tar.gz', 4, 4), ('base-v0.26.0-linux64.tar.gz', '')),
(('base-v0.26.0.hidden', 1, 7), ('base-v0.26', '.0.hidden')),
(('base-v0.26.0.hidden', 3, 4), ('base-v0.26.0.hidden', '')),
(('base-v0.26.0.hidden.tar', 1, 7), ('base-v0.26.0', '.hidden.tar')),
(('base-v0.26.0.hidden.tar.gz', 1, 7), ('base-v0.26.0.hidden', '.tar.gz')),
(('base-v0.26.0.hidden.tar.gz', 4, 7), ('base-v0.26.0.hidden.tar.gz', '')),
)
)
def test__split_multiext_min_max(args, expected):
assert expected == _split_multiext(*args)
@pytest.mark.parametrize(
'kwargs, expected', (
(({'name': 'base-v0.25.0.tar.gz', 'count': 1}), ('base-v0.25.0.tar', '.gz')),
(({'name': 'base-v0.25.0.tar.gz', 'count': 2}), ('base-v0.25.0', '.tar.gz')),
(({'name': 'base-v0.25.0.tar.gz', 'count': 3}), ('base-v0.25.0', '.tar.gz')),
(({'name': 'base-v0.25.0.tar.gz', 'count': 4}), ('base-v0.25.0', '.tar.gz')),
(({'name': 'base-v0.25.foo.tar.gz', 'count': 3}), ('base-v0.25', '.foo.tar.gz')),
(({'name': 'base-v0.25.foo.tar.gz', 'count': 4}), ('base-v0', '.25.foo.tar.gz')),
)
)
def test__split_multiext_count(kwargs, expected):
assert expected == _split_multiext(**kwargs)
@pytest.mark.parametrize(
'name',
(
list(),
tuple(),
dict(),
set(),
1.729879,
247,
)
)
def test__split_multiext_invalid(name):
with pytest.raises((TypeError, AttributeError)):
_split_multiext(name)
| 2,880
|
Python
|
.py
| 65
| 38.276923
| 141
| 0.554565
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,906
|
test_fetch_url.py
|
ansible_ansible/test/units/module_utils/urls/test_fetch_url.py
|
# -*- coding: utf-8 -*-
# (c) 2018 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import io
import socket
import sys
import http.client
import urllib.error
from http.cookiejar import Cookie
from ansible.module_utils.urls import fetch_url, ConnectionError
import pytest
from unittest.mock import MagicMock
BASE_URL = 'https://ansible.com/'
class AnsibleModuleExit(Exception):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
class ExitJson(AnsibleModuleExit):
pass
class FailJson(AnsibleModuleExit):
pass
@pytest.fixture
def open_url_mock(mocker):
return mocker.patch('ansible.module_utils.urls.open_url')
@pytest.fixture
def fake_ansible_module():
return FakeAnsibleModule()
class FakeAnsibleModule:
def __init__(self):
self.params = {}
self.tmpdir = None
def exit_json(self, *args, **kwargs):
raise ExitJson(*args, **kwargs)
def fail_json(self, *args, **kwargs):
raise FailJson(*args, **kwargs)
def test_fetch_url(open_url_mock, fake_ansible_module):
r, info = fetch_url(fake_ansible_module, BASE_URL)
dummy, kwargs = open_url_mock.call_args
open_url_mock.assert_called_once_with(BASE_URL, client_cert=None, client_key=None, cookies=kwargs['cookies'], data=None,
follow_redirects='urllib2', force=False, force_basic_auth='', headers=None,
http_agent='ansible-httpget', last_mod_time=None, method=None, timeout=10, url_password='', url_username='',
use_proxy=True, validate_certs=True, use_gssapi=False, unix_socket=None, ca_path=None, unredirected_headers=None,
decompress=True, ciphers=None, use_netrc=True)
def test_fetch_url_params(open_url_mock, fake_ansible_module):
fake_ansible_module.params = {
'validate_certs': False,
'url_username': 'user',
'url_password': 'passwd',
'http_agent': 'ansible-test',
'force_basic_auth': True,
'follow_redirects': 'all',
'client_cert': 'client.pem',
'client_key': 'client.key',
}
r, info = fetch_url(fake_ansible_module, BASE_URL)
dummy, kwargs = open_url_mock.call_args
open_url_mock.assert_called_once_with(BASE_URL, client_cert='client.pem', client_key='client.key', cookies=kwargs['cookies'], data=None,
follow_redirects='all', force=False, force_basic_auth=True, headers=None,
http_agent='ansible-test', last_mod_time=None, method=None, timeout=10, url_password='passwd', url_username='user',
use_proxy=True, validate_certs=False, use_gssapi=False, unix_socket=None, ca_path=None, unredirected_headers=None,
decompress=True, ciphers=None, use_netrc=True)
def test_fetch_url_cookies(mocker, fake_ansible_module):
def make_cookies(*args, **kwargs):
cookies = kwargs['cookies']
r = MagicMock()
r.headers = http.client.HTTPMessage()
add_header = r.headers.add_header
r.info.return_value = r.headers
for name, value in (('Foo', 'bar'), ('Baz', 'qux')):
cookie = Cookie(
version=0,
name=name,
value=value,
port=None,
port_specified=False,
domain="ansible.com",
domain_specified=True,
domain_initial_dot=False,
path="/",
path_specified=True,
secure=False,
expires=None,
discard=False,
comment=None,
comment_url=None,
rest=None
)
cookies.set_cookie(cookie)
add_header('Set-Cookie', '%s=%s' % (name, value))
return r
mocker = mocker.patch('ansible.module_utils.urls.open_url', new=make_cookies)
r, info = fetch_url(fake_ansible_module, BASE_URL)
assert info['cookies'] == {'Baz': 'qux', 'Foo': 'bar'}
if sys.version_info < (3, 11):
# Python sorts cookies in order of most specific (ie. longest) path first
# items with the same path are reversed from response order
assert info['cookies_string'] == 'Baz=qux; Foo=bar'
else:
# Python 3.11 and later preserve the Set-Cookie order.
# See: https://github.com/python/cpython/pull/22745/
assert info['cookies_string'] == 'Foo=bar; Baz=qux'
# The key here has a `-` as opposed to what we see in the `uri` module that converts to `_`
# Note: this is response order, which differs from cookies_string
assert info['set-cookie'] == 'Foo=bar, Baz=qux'
def test_fetch_url_connectionerror(open_url_mock, fake_ansible_module):
open_url_mock.side_effect = ConnectionError('TESTS')
with pytest.raises(FailJson) as excinfo:
fetch_url(fake_ansible_module, BASE_URL)
assert excinfo.value.kwargs['msg'] == 'TESTS'
assert BASE_URL == excinfo.value.kwargs['url']
assert excinfo.value.kwargs['status'] == -1
open_url_mock.side_effect = ValueError('TESTS')
with pytest.raises(FailJson) as excinfo:
fetch_url(fake_ansible_module, BASE_URL)
assert excinfo.value.kwargs['msg'] == 'TESTS'
assert BASE_URL == excinfo.value.kwargs['url']
assert excinfo.value.kwargs['status'] == -1
def test_fetch_url_httperror(open_url_mock, fake_ansible_module):
open_url_mock.side_effect = urllib.error.HTTPError(
BASE_URL,
500,
'Internal Server Error',
{'Content-Type': 'application/json'},
io.StringIO('TESTS')
)
r, info = fetch_url(fake_ansible_module, BASE_URL)
assert info == {'msg': 'HTTP Error 500: Internal Server Error', 'body': 'TESTS',
'status': 500, 'url': BASE_URL, 'content-type': 'application/json'}
def test_fetch_url_urlerror(open_url_mock, fake_ansible_module):
open_url_mock.side_effect = urllib.error.URLError('TESTS')
r, info = fetch_url(fake_ansible_module, BASE_URL)
assert info == {'msg': 'Request failed: <urlopen error TESTS>', 'status': -1, 'url': BASE_URL}
def test_fetch_url_socketerror(open_url_mock, fake_ansible_module):
open_url_mock.side_effect = socket.error('TESTS')
r, info = fetch_url(fake_ansible_module, BASE_URL)
assert info == {'msg': 'Connection failure: TESTS', 'status': -1, 'url': BASE_URL}
def test_fetch_url_exception(open_url_mock, fake_ansible_module):
open_url_mock.side_effect = Exception('TESTS')
r, info = fetch_url(fake_ansible_module, BASE_URL)
exception = info.pop('exception')
assert info == {'msg': 'An unknown error occurred: TESTS', 'status': -1, 'url': BASE_URL}
assert "Exception: TESTS" in exception
def test_fetch_url_badstatusline(open_url_mock, fake_ansible_module):
open_url_mock.side_effect = http.client.BadStatusLine('TESTS')
r, info = fetch_url(fake_ansible_module, BASE_URL)
assert info == {'msg': 'Connection failure: connection was closed before a valid response was received: TESTS', 'status': -1, 'url': BASE_URL}
| 7,365
|
Python
|
.py
| 147
| 40.959184
| 157
| 0.63238
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,907
|
test_Request.py
|
ansible_ansible/test/units/module_utils/urls/test_Request.py
|
# -*- coding: utf-8 -*-
# (c) 2018 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import datetime
import os
import urllib.request
import http.client
from ansible.module_utils.urls import (Request, open_url, cookiejar,
UnixHTTPHandler, UnixHTTPSConnection)
from ansible.module_utils.urls import HTTPRedirectHandler
import pytest
from unittest.mock import call
import ssl
@pytest.fixture
def urlopen_mock(mocker):
return mocker.patch('ansible.module_utils.urls.urllib.request.urlopen')
@pytest.fixture
def install_opener_mock(mocker):
return mocker.patch('ansible.module_utils.urls.urllib.request.install_opener')
def test_Request_fallback(urlopen_mock, install_opener_mock, mocker):
here = os.path.dirname(__file__)
pem = os.path.join(here, 'fixtures/client.pem')
client_key = os.path.join(here, 'fixtures/client.key')
cookies = cookiejar.CookieJar()
request = Request(
headers={'foo': 'bar'},
use_proxy=False,
force=True,
timeout=100,
validate_certs=False,
url_username='user',
url_password='passwd',
http_agent='ansible-tests',
force_basic_auth=True,
follow_redirects='all',
client_cert=pem,
client_key=client_key,
cookies=cookies,
unix_socket='/foo/bar/baz.sock',
ca_path=pem,
ciphers=['ECDHE-RSA-AES128-SHA256'],
use_netrc=True,
)
fallback_mock = mocker.spy(request, '_fallback')
r = request.open('GET', 'https://ansible.com')
calls = [
call(None, False), # use_proxy
call(None, True), # force
call(None, 100), # timeout
call(None, False), # validate_certs
call(None, 'user'), # url_username
call(None, 'passwd'), # url_password
call(None, 'ansible-tests'), # http_agent
call(None, True), # force_basic_auth
call(None, 'all'), # follow_redirects
call(None, pem), # client_cert
call(None, client_key), # client_key
call(None, cookies), # cookies
call(None, '/foo/bar/baz.sock'), # unix_socket
call(None, pem), # ca_path
call(None, None), # unredirected_headers
call(None, True), # auto_decompress
call(None, ['ECDHE-RSA-AES128-SHA256']), # ciphers
call(None, True), # use_netrc
call(None, None), # context
]
fallback_mock.assert_has_calls(calls)
assert fallback_mock.call_count == 19 # All but headers use fallback
args = urlopen_mock.call_args[0]
assert args[1] is None # data, this is handled in the Request not urlopen
assert args[2] == 100 # timeout
req = args[0]
assert req.headers == {
'Authorization': b'Basic dXNlcjpwYXNzd2Q=',
'Cache-control': 'no-cache',
'Foo': 'bar',
'User-agent': 'ansible-tests'
}
assert req.data is None
assert req.get_method() == 'GET'
def test_Request_open(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'https://ansible.com/')
args = urlopen_mock.call_args[0]
assert args[1] is None # data, this is handled in the Request not urlopen
assert args[2] == 10 # timeout
req = args[0]
assert req.headers == {}
assert req.data is None
assert req.get_method() == 'GET'
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
expected_handlers = (
HTTPRedirectHandler(),
)
found_handlers = []
for handler in handlers:
if handler.__class__.__name__ == 'HTTPRedirectHandler':
found_handlers.append(handler)
assert len(found_handlers) == len(expected_handlers)
def test_Request_open_unix_socket(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'http://ansible.com/', unix_socket='/foo/bar/baz.sock')
args = urlopen_mock.call_args[0]
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
found_handlers = []
for handler in handlers:
if isinstance(handler, UnixHTTPHandler):
found_handlers.append(handler)
assert len(found_handlers) == 1
def test_Request_open_https_unix_socket(urlopen_mock, install_opener_mock, mocker):
do_open = mocker.patch.object(urllib.request.HTTPSHandler, 'do_open')
r = Request().open('GET', 'https://ansible.com/', unix_socket='/foo/bar/baz.sock')
args = urlopen_mock.call_args[0]
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
found_handlers = []
for handler in handlers:
if isinstance(handler, urllib.request.HTTPSHandler):
found_handlers.append(handler)
assert len(found_handlers) == 1
found_handlers[0].https_open(None)
args = do_open.call_args[0]
cls = args[0]
assert isinstance(cls, UnixHTTPSConnection)
def test_Request_open_ftp(urlopen_mock, install_opener_mock, mocker):
mocker.patch('ansible.module_utils.urls.ParseResultDottedDict.as_list', side_effect=AssertionError)
# Using ftp scheme should prevent the AssertionError side effect to fire
r = Request().open('GET', 'ftp://foo@ansible.com/')
def test_Request_open_headers(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'http://ansible.com/', headers={'Foo': 'bar'})
args = urlopen_mock.call_args[0]
req = args[0]
assert req.headers == {'Foo': 'bar'}
def test_Request_open_username(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'http://ansible.com/', url_username='user')
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
expected_handlers = (
urllib.request.HTTPBasicAuthHandler,
urllib.request.HTTPDigestAuthHandler,
)
found_handlers = []
for handler in handlers:
if isinstance(handler, expected_handlers):
found_handlers.append(handler)
assert len(found_handlers) == 2
assert found_handlers[0].passwd.passwd[None] == {(('ansible.com', '/'),): ('user', None)}
@pytest.mark.parametrize('url, expected', (
('user2@ansible.com', ('user2', '')),
('user2%40@ansible.com', ('user2@', '')),
('user2%40:%40@ansible.com', ('user2@', '@')),
))
def test_Request_open_username_in_url(url, expected, urlopen_mock, install_opener_mock):
r = Request().open('GET', f'http://{url}/')
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
expected_handlers = (
urllib.request.HTTPBasicAuthHandler,
urllib.request.HTTPDigestAuthHandler,
)
found_handlers = []
for handler in handlers:
if isinstance(handler, expected_handlers):
found_handlers.append(handler)
assert found_handlers[0].passwd.passwd[None] == {(('ansible.com', '/'),): expected}
def test_Request_open_username_force_basic(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'http://ansible.com/', url_username='user', url_password='passwd', force_basic_auth=True)
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
expected_handlers = (
urllib.request.HTTPBasicAuthHandler,
urllib.request.HTTPDigestAuthHandler,
)
found_handlers = []
for handler in handlers:
if isinstance(handler, expected_handlers):
found_handlers.append(handler)
assert len(found_handlers) == 0
args = urlopen_mock.call_args[0]
req = args[0]
assert req.headers.get('Authorization') == b'Basic dXNlcjpwYXNzd2Q='
def test_Request_open_auth_in_netloc(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'http://user:passwd@ansible.com/')
args = urlopen_mock.call_args[0]
req = args[0]
assert req.get_full_url() == 'http://ansible.com/'
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
expected_handlers = (
urllib.request.HTTPBasicAuthHandler,
urllib.request.HTTPDigestAuthHandler,
)
found_handlers = []
for handler in handlers:
if isinstance(handler, expected_handlers):
found_handlers.append(handler)
assert len(found_handlers) == 2
def test_Request_open_netrc(urlopen_mock, install_opener_mock, monkeypatch):
here = os.path.dirname(__file__)
monkeypatch.setenv('NETRC', os.path.join(here, 'fixtures/netrc'))
r = Request().open('GET', 'http://ansible.com/')
args = urlopen_mock.call_args[0]
req = args[0]
assert req.headers.get('Authorization') == b'Basic dXNlcjpwYXNzd2Q='
r = Request().open('GET', 'http://foo.ansible.com/')
args = urlopen_mock.call_args[0]
req = args[0]
assert 'Authorization' not in req.headers
monkeypatch.setenv('NETRC', os.path.join(here, 'fixtures/netrc.nonexistant'))
r = Request().open('GET', 'http://ansible.com/')
args = urlopen_mock.call_args[0]
req = args[0]
assert 'Authorization' not in req.headers
def test_Request_open_no_proxy(urlopen_mock, install_opener_mock, mocker):
build_opener_mock = mocker.patch('ansible.module_utils.urls.urllib.request.build_opener')
r = Request().open('GET', 'http://ansible.com/', use_proxy=False)
handlers = build_opener_mock.call_args[0]
found_handlers = []
for handler in handlers:
if isinstance(handler, urllib.request.ProxyHandler):
found_handlers.append(handler)
assert len(found_handlers) == 1
def test_Request_open_no_validate_certs(urlopen_mock, install_opener_mock, mocker):
do_open = mocker.patch.object(urllib.request.HTTPSHandler, 'do_open')
r = Request().open('GET', 'https://ansible.com/', validate_certs=False)
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
ssl_handler = None
for handler in handlers:
if isinstance(handler, urllib.request.HTTPSHandler):
ssl_handler = handler
break
assert ssl_handler is not None
ssl_handler.https_open(None)
args = do_open.call_args[0]
cls = args[0]
assert cls is http.client.HTTPSConnection
context = ssl_handler._context
# Differs by Python version
# assert context.protocol == ssl.PROTOCOL_SSLv23
if ssl.OP_NO_SSLv2:
assert context.options & ssl.OP_NO_SSLv2
assert context.options & ssl.OP_NO_SSLv3
assert context.verify_mode == ssl.CERT_NONE
assert context.check_hostname is False
def test_Request_open_client_cert(urlopen_mock, install_opener_mock, mocker):
load_cert_chain = mocker.patch.object(ssl.SSLContext, 'load_cert_chain')
here = os.path.dirname(__file__)
client_cert = os.path.join(here, 'fixtures/client.pem')
client_key = os.path.join(here, 'fixtures/client.key')
r = Request().open('GET', 'https://ansible.com/', client_cert=client_cert, client_key=client_key)
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
ssl_handler = None
for handler in handlers:
if isinstance(handler, urllib.request.HTTPSHandler):
ssl_handler = handler
break
assert ssl_handler is not None
load_cert_chain.assert_called_once_with(client_cert, keyfile=client_key)
def test_Request_open_cookies(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'https://ansible.com/', cookies=cookiejar.CookieJar())
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
cookies_handler = None
for handler in handlers:
if isinstance(handler, urllib.request.HTTPCookieProcessor):
cookies_handler = handler
break
assert cookies_handler is not None
def test_Request_open_invalid_method(urlopen_mock, install_opener_mock):
r = Request().open('UNKNOWN', 'https://ansible.com/')
args = urlopen_mock.call_args[0]
req = args[0]
assert req.data is None
assert req.get_method() == 'UNKNOWN'
# assert r.status == 504
def test_Request_open_user_agent(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'https://ansible.com/', http_agent='ansible-tests')
args = urlopen_mock.call_args[0]
req = args[0]
assert req.headers.get('User-agent') == 'ansible-tests'
def test_Request_open_force(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'https://ansible.com/', force=True, last_mod_time=datetime.datetime.now())
args = urlopen_mock.call_args[0]
req = args[0]
assert req.headers.get('Cache-control') == 'no-cache'
assert 'If-modified-since' not in req.headers
def test_Request_open_last_mod(urlopen_mock, install_opener_mock):
now = datetime.datetime.now()
r = Request().open('GET', 'https://ansible.com/', last_mod_time=now)
args = urlopen_mock.call_args[0]
req = args[0]
assert req.headers.get('If-modified-since') == now.strftime('%a, %d %b %Y %H:%M:%S GMT')
def test_Request_open_headers_not_dict(urlopen_mock, install_opener_mock):
with pytest.raises(ValueError):
Request().open('GET', 'https://ansible.com/', headers=['bob'])
def test_Request_init_headers_not_dict(urlopen_mock, install_opener_mock):
with pytest.raises(ValueError):
Request(headers=['bob'])
@pytest.mark.parametrize('method,kwargs', [
('get', {}),
('options', {}),
('head', {}),
('post', {'data': None}),
('put', {'data': None}),
('patch', {'data': None}),
('delete', {}),
])
def test_methods(method, kwargs, mocker):
expected = method.upper()
open_mock = mocker.patch('ansible.module_utils.urls.Request.open')
request = Request()
getattr(request, method)('https://ansible.com')
open_mock.assert_called_once_with(expected, 'https://ansible.com', **kwargs)
def test_open_url(urlopen_mock, install_opener_mock, mocker):
req_mock = mocker.patch('ansible.module_utils.urls.Request.open')
open_url('https://ansible.com/')
req_mock.assert_called_once_with('GET', 'https://ansible.com/', data=None, headers=None, use_proxy=True,
force=False, last_mod_time=None, timeout=10, validate_certs=True,
url_username=None, url_password=None, http_agent=None,
force_basic_auth=False, follow_redirects='urllib2',
client_cert=None, client_key=None, cookies=None, use_gssapi=False,
unix_socket=None, ca_path=None, unredirected_headers=None, decompress=True,
ciphers=None, use_netrc=True)
| 14,644
|
Python
|
.py
| 326
| 38.257669
| 119
| 0.662746
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,908
|
test_generic_urlparse.py
|
ansible_ansible/test/units/module_utils/urls/test_generic_urlparse.py
|
# -*- coding: utf-8 -*-
# (c) 2018 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.module_utils.urls import generic_urlparse
from urllib.parse import urlparse, urlunparse
def test_generic_urlparse():
url = 'https://ansible.com/blog'
parts = urlparse(url)
generic_parts = generic_urlparse(parts)
assert generic_parts.as_list() == list(parts)
assert urlunparse(generic_parts.as_list()) == url
def test_generic_urlparse_netloc():
url = 'https://ansible.com:443/blog'
parts = urlparse(url)
generic_parts = generic_urlparse(parts)
assert generic_parts.hostname == parts.hostname
assert generic_parts.hostname == 'ansible.com'
assert generic_parts.port == 443
assert urlunparse(generic_parts.as_list()) == url
| 880
|
Python
|
.py
| 20
| 40.25
| 92
| 0.723329
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,909
|
test_fetch_file.py
|
ansible_ansible/test/units/module_utils/urls/test_fetch_file.py
|
# -*- coding: utf-8 -*-
# Copyright: Contributors to the Ansible project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import os
from ansible.module_utils.urls import fetch_file
import pytest
class FakeTemporaryFile:
def __init__(self, name):
self.name = name
@pytest.mark.parametrize(
'url, prefix, suffix, expected', (
('http://ansible.com/foo.tar.gz?foo=%s' % ('bar' * 100), 'foo', '.tar.gz', 'foo.tar.gz'),
('https://www.gnu.org/licenses/gpl-3.0.txt', 'gpl-3.0', '.txt', 'gpl-3.0.txt'),
('http://pyyaml.org/download/libyaml/yaml-0.2.5.tar.gz', 'yaml-0.2.5', '.tar.gz', 'yaml-0.2.5.tar.gz'),
(
'https://github.com/mozilla/geckodriver/releases/download/v0.26.0/geckodriver-v0.26.0-linux64.tar.gz',
'geckodriver-v0.26.0-linux64',
'.tar.gz',
'geckodriver-v0.26.0-linux64.tar.gz'
),
)
)
def test_file_multiple_extensions(mocker, url, prefix, suffix, expected):
module = mocker.Mock()
module.tmpdir = '/tmp'
module.add_cleanup_file = mocker.Mock(side_effect=AttributeError('raised intentionally'))
mock_NamedTemporaryFile = mocker.patch('ansible.module_utils.urls.tempfile.NamedTemporaryFile',
return_value=FakeTemporaryFile(os.path.join(module.tmpdir, expected)))
with pytest.raises(AttributeError, match='raised intentionally'):
fetch_file(module, url)
mock_NamedTemporaryFile.assert_called_with(dir=module.tmpdir, prefix=prefix, suffix=suffix, delete=False)
| 1,635
|
Python
|
.py
| 32
| 44.03125
| 114
| 0.662688
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,910
|
test_daemon_threading.py
|
ansible_ansible/test/units/module_utils/_internal/_concurrent/test_daemon_threading.py
|
from __future__ import annotations
import threading
from ansible.module_utils._internal._concurrent import _daemon_threading
def test_daemon_thread_getattr() -> None:
"""Ensure that the threading module proxy delegates properly to the real module."""
assert _daemon_threading.current_thread is threading.current_thread
def test_daemon_threading_thread_override() -> None:
"""Ensure that the proxy module's Thread attribute is different from the real module's."""
assert _daemon_threading.Thread is not threading.Thread
| 541
|
Python
|
.py
| 9
| 56.666667
| 94
| 0.78327
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,911
|
test_futures.py
|
ansible_ansible/test/units/module_utils/_internal/_concurrent/test_futures.py
|
from __future__ import annotations
import concurrent.futures as _cf
import subprocess
import sys
import time
import pytest
from ansible.module_utils._internal._concurrent import _futures
def test_daemon_thread_pool_nonblocking_cm_exit() -> None:
"""Ensure that the ThreadPoolExecutor context manager exit is not blocked by in-flight tasks."""
with _futures.DaemonThreadPoolExecutor(max_workers=1) as executor:
future = executor.submit(time.sleep, 5)
with pytest.raises(_cf.TimeoutError): # deprecated: description='aliased to stdlib TimeoutError in 3.11' python_version='3.10'
future.result(timeout=1)
assert future.running() # ensure the future is still going (ie, we didn't have to wait for it to return)
_task_success_msg = "work completed"
_process_success_msg = "exit success"
_timeout_sec = 3
_sleep_time_sec = _timeout_sec * 2
def test_blocking_shutdown() -> None:
"""Run with the DaemonThreadPoolExecutor patch disabled to verify that shutdown is blocked by in-flight tasks."""
with pytest.raises(subprocess.TimeoutExpired):
subprocess.run(args=[sys.executable, __file__, 'block'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, check=True, timeout=_timeout_sec)
def test_non_blocking_shutdown() -> None:
"""Run with the DaemonThreadPoolExecutor patch enabled to verify that shutdown is not blocked by in-flight tasks."""
cp = subprocess.run(args=[sys.executable, __file__, ''], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, check=True, timeout=_timeout_sec)
assert _task_success_msg in cp.stdout
assert _process_success_msg in cp.stdout
def _run_blocking_exit_test(use_patched: bool) -> None: # pragma: nocover
"""Helper for external process integration test."""
tpe_type = _futures.DaemonThreadPoolExecutor if use_patched else _cf.ThreadPoolExecutor
with tpe_type(max_workers=2) as tp:
fs_non_blocking = tp.submit(lambda: print(_task_success_msg))
assert [tp.submit(time.sleep, _sleep_time_sec) for _idx in range(4)] # not a pointless statement
fs_non_blocking.result(timeout=1)
print(_process_success_msg)
def main() -> None: # pragma: nocover
"""Used by test_(non)blocking_shutdown as a script-style run."""
_run_blocking_exit_test(sys.argv[1] != 'block')
if __name__ == '__main__': # pragma: nocover
main()
| 2,397
|
Python
|
.py
| 40
| 55.275
| 157
| 0.728051
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,912
|
test_facts.py
|
ansible_ansible/test/units/module_utils/facts/test_facts.py
|
# This file is part of Ansible
# -*- coding: utf-8 -*-
#
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import annotations
import os
import pytest
# for testing
import unittest
from unittest.mock import Mock, patch
from ansible.module_utils import facts
from ansible.module_utils.facts import hardware
from ansible.module_utils.facts import network
from ansible.module_utils.facts import virtual
class BaseTestFactsPlatform(unittest.TestCase):
platform_id = 'Generic'
fact_class = hardware.base.Hardware
collector_class = None
"""Verify that the automagic in Hardware.__new__ selects the right subclass."""
@patch('platform.system')
def test_new(self, mock_platform):
if not self.fact_class:
pytest.skip('This platform (%s) does not have a fact_class.' % self.platform_id)
mock_platform.return_value = self.platform_id
inst = self.fact_class(module=Mock(), load_on_init=False)
self.assertIsInstance(inst, self.fact_class)
self.assertEqual(inst.platform, self.platform_id)
def test_subclass(self):
if not self.fact_class:
pytest.skip('This platform (%s) does not have a fact_class.' % self.platform_id)
# 'Generic' will try to map to platform.system() that we are not mocking here
if self.platform_id == 'Generic':
return
inst = self.fact_class(module=Mock(), load_on_init=False)
self.assertIsInstance(inst, self.fact_class)
self.assertEqual(inst.platform, self.platform_id)
def test_collector(self):
if not self.collector_class:
pytest.skip('This test class needs to be updated to specify collector_class')
inst = self.collector_class()
self.assertIsInstance(inst, self.collector_class)
self.assertEqual(inst._platform, self.platform_id)
class TestLinuxFactsPlatform(BaseTestFactsPlatform):
platform_id = 'Linux'
fact_class = hardware.linux.LinuxHardware
collector_class = hardware.linux.LinuxHardwareCollector
class TestHurdFactsPlatform(BaseTestFactsPlatform):
platform_id = 'GNU'
fact_class = hardware.hurd.HurdHardware
collector_class = hardware.hurd.HurdHardwareCollector
class TestSunOSHardware(BaseTestFactsPlatform):
platform_id = 'SunOS'
fact_class = hardware.sunos.SunOSHardware
collector_class = hardware.sunos.SunOSHardwareCollector
class TestOpenBSDHardware(BaseTestFactsPlatform):
platform_id = 'OpenBSD'
fact_class = hardware.openbsd.OpenBSDHardware
collector_class = hardware.openbsd.OpenBSDHardwareCollector
class TestFreeBSDHardware(BaseTestFactsPlatform):
platform_id = 'FreeBSD'
fact_class = hardware.freebsd.FreeBSDHardware
collector_class = hardware.freebsd.FreeBSDHardwareCollector
class TestDragonFlyHardware(BaseTestFactsPlatform):
platform_id = 'DragonFly'
fact_class = None
collector_class = hardware.dragonfly.DragonFlyHardwareCollector
class TestNetBSDHardware(BaseTestFactsPlatform):
platform_id = 'NetBSD'
fact_class = hardware.netbsd.NetBSDHardware
collector_class = hardware.netbsd.NetBSDHardwareCollector
class TestAIXHardware(BaseTestFactsPlatform):
platform_id = 'AIX'
fact_class = hardware.aix.AIXHardware
collector_class = hardware.aix.AIXHardwareCollector
class TestHPUXHardware(BaseTestFactsPlatform):
platform_id = 'HP-UX'
fact_class = hardware.hpux.HPUXHardware
collector_class = hardware.hpux.HPUXHardwareCollector
class TestDarwinHardware(BaseTestFactsPlatform):
platform_id = 'Darwin'
fact_class = hardware.darwin.DarwinHardware
collector_class = hardware.darwin.DarwinHardwareCollector
class TestGenericNetwork(BaseTestFactsPlatform):
platform_id = 'Generic'
fact_class = network.base.Network
class TestHurdPfinetNetwork(BaseTestFactsPlatform):
platform_id = 'GNU'
fact_class = network.hurd.HurdPfinetNetwork
collector_class = network.hurd.HurdNetworkCollector
class TestLinuxNetwork(BaseTestFactsPlatform):
platform_id = 'Linux'
fact_class = network.linux.LinuxNetwork
collector_class = network.linux.LinuxNetworkCollector
class TestGenericBsdIfconfigNetwork(BaseTestFactsPlatform):
platform_id = 'Generic_BSD_Ifconfig'
fact_class = network.generic_bsd.GenericBsdIfconfigNetwork
collector_class = None
class TestHPUXNetwork(BaseTestFactsPlatform):
platform_id = 'HP-UX'
fact_class = network.hpux.HPUXNetwork
collector_class = network.hpux.HPUXNetworkCollector
class TestDarwinNetwork(BaseTestFactsPlatform):
platform_id = 'Darwin'
fact_class = network.darwin.DarwinNetwork
collector_class = network.darwin.DarwinNetworkCollector
class TestFreeBSDNetwork(BaseTestFactsPlatform):
platform_id = 'FreeBSD'
fact_class = network.freebsd.FreeBSDNetwork
collector_class = network.freebsd.FreeBSDNetworkCollector
class TestDragonFlyNetwork(BaseTestFactsPlatform):
platform_id = 'DragonFly'
fact_class = network.dragonfly.DragonFlyNetwork
collector_class = network.dragonfly.DragonFlyNetworkCollector
class TestAIXNetwork(BaseTestFactsPlatform):
platform_id = 'AIX'
fact_class = network.aix.AIXNetwork
collector_class = network.aix.AIXNetworkCollector
class TestNetBSDNetwork(BaseTestFactsPlatform):
platform_id = 'NetBSD'
fact_class = network.netbsd.NetBSDNetwork
collector_class = network.netbsd.NetBSDNetworkCollector
class TestOpenBSDNetwork(BaseTestFactsPlatform):
platform_id = 'OpenBSD'
fact_class = network.openbsd.OpenBSDNetwork
collector_class = network.openbsd.OpenBSDNetworkCollector
class TestSunOSNetwork(BaseTestFactsPlatform):
platform_id = 'SunOS'
fact_class = network.sunos.SunOSNetwork
collector_class = network.sunos.SunOSNetworkCollector
class TestLinuxVirtual(BaseTestFactsPlatform):
platform_id = 'Linux'
fact_class = virtual.linux.LinuxVirtual
collector_class = virtual.linux.LinuxVirtualCollector
class TestFreeBSDVirtual(BaseTestFactsPlatform):
platform_id = 'FreeBSD'
fact_class = virtual.freebsd.FreeBSDVirtual
collector_class = virtual.freebsd.FreeBSDVirtualCollector
class TestNetBSDVirtual(BaseTestFactsPlatform):
platform_id = 'NetBSD'
fact_class = virtual.netbsd.NetBSDVirtual
collector_class = virtual.netbsd.NetBSDVirtualCollector
class TestOpenBSDVirtual(BaseTestFactsPlatform):
platform_id = 'OpenBSD'
fact_class = virtual.openbsd.OpenBSDVirtual
collector_class = virtual.openbsd.OpenBSDVirtualCollector
class TestHPUXVirtual(BaseTestFactsPlatform):
platform_id = 'HP-UX'
fact_class = virtual.hpux.HPUXVirtual
collector_class = virtual.hpux.HPUXVirtualCollector
class TestSunOSVirtual(BaseTestFactsPlatform):
platform_id = 'SunOS'
fact_class = virtual.sunos.SunOSVirtual
collector_class = virtual.sunos.SunOSVirtualCollector
LSBLK_OUTPUT = b"""
/dev/sda
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
/dev/sda2 66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK
/dev/mapper/fedora_dhcp129--186-swap eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d
/dev/mapper/fedora_dhcp129--186-root d34cf5e3-3449-4a6c-8179-a1feb2bca6ce
/dev/mapper/fedora_dhcp129--186-home 2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d
/dev/sr0
/dev/loop0 0f031512-ab15-497d-9abd-3a512b4a9390
/dev/loop1 7c1b0f30-cf34-459f-9a70-2612f82b870a
/dev/loop9 0f031512-ab15-497d-9abd-3a512b4a9390
/dev/loop9 7c1b4444-cf34-459f-9a70-2612f82b870a
/dev/mapper/docker-253:1-1050967-pool
/dev/loop2
/dev/mapper/docker-253:1-1050967-pool
"""
LSBLK_OUTPUT_2 = b"""
/dev/sda
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
/dev/sda2 66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK
/dev/mapper/fedora_dhcp129--186-swap eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d
/dev/mapper/fedora_dhcp129--186-root d34cf5e3-3449-4a6c-8179-a1feb2bca6ce
/dev/mapper/fedora_dhcp129--186-home 2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d
/dev/mapper/an-example-mapper with a space in the name 84639acb-013f-4d2f-9392-526a572b4373
/dev/sr0
/dev/loop0 0f031512-ab15-497d-9abd-3a512b4a9390
"""
LSBLK_UUIDS = {'/dev/sda1': '66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK'}
UDEVADM_UUID = 'N/A'
MTAB = r"""
sysfs /sys sysfs rw,seclabel,nosuid,nodev,noexec,relatime 0 0
proc /proc proc rw,nosuid,nodev,noexec,relatime 0 0
devtmpfs /dev devtmpfs rw,seclabel,nosuid,size=8044400k,nr_inodes=2011100,mode=755 0 0
securityfs /sys/kernel/security securityfs rw,nosuid,nodev,noexec,relatime 0 0
tmpfs /dev/shm tmpfs rw,seclabel,nosuid,nodev 0 0
devpts /dev/pts devpts rw,seclabel,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000 0 0
tmpfs /run tmpfs rw,seclabel,nosuid,nodev,mode=755 0 0
tmpfs /sys/fs/cgroup tmpfs ro,seclabel,nosuid,nodev,noexec,mode=755 0 0
cgroup /sys/fs/cgroup/systemd cgroup rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd 0 0
pstore /sys/fs/pstore pstore rw,seclabel,nosuid,nodev,noexec,relatime 0 0
cgroup /sys/fs/cgroup/devices cgroup rw,nosuid,nodev,noexec,relatime,devices 0 0
cgroup /sys/fs/cgroup/freezer cgroup rw,nosuid,nodev,noexec,relatime,freezer 0 0
cgroup /sys/fs/cgroup/memory cgroup rw,nosuid,nodev,noexec,relatime,memory 0 0
cgroup /sys/fs/cgroup/pids cgroup rw,nosuid,nodev,noexec,relatime,pids 0 0
cgroup /sys/fs/cgroup/blkio cgroup rw,nosuid,nodev,noexec,relatime,blkio 0 0
cgroup /sys/fs/cgroup/cpuset cgroup rw,nosuid,nodev,noexec,relatime,cpuset 0 0
cgroup /sys/fs/cgroup/cpu,cpuacct cgroup rw,nosuid,nodev,noexec,relatime,cpu,cpuacct 0 0
cgroup /sys/fs/cgroup/hugetlb cgroup rw,nosuid,nodev,noexec,relatime,hugetlb 0 0
cgroup /sys/fs/cgroup/perf_event cgroup rw,nosuid,nodev,noexec,relatime,perf_event 0 0
cgroup /sys/fs/cgroup/net_cls,net_prio cgroup rw,nosuid,nodev,noexec,relatime,net_cls,net_prio 0 0
configfs /sys/kernel/config configfs rw,relatime 0 0
/dev/mapper/fedora_dhcp129--186-root / ext4 rw,seclabel,relatime,data=ordered 0 0
selinuxfs /sys/fs/selinux selinuxfs rw,relatime 0 0
systemd-1 /proc/sys/fs/binfmt_misc autofs rw,relatime,fd=24,pgrp=1,timeout=0,minproto=5,maxproto=5,direct 0 0
debugfs /sys/kernel/debug debugfs rw,seclabel,relatime 0 0
hugetlbfs /dev/hugepages hugetlbfs rw,seclabel,relatime 0 0
tmpfs /tmp tmpfs rw,seclabel 0 0
mqueue /dev/mqueue mqueue rw,seclabel,relatime 0 0
/dev/loop0 /var/lib/machines btrfs rw,seclabel,relatime,space_cache,subvolid=5,subvol=/ 0 0
/dev/sda1 /boot ext4 rw,seclabel,relatime,data=ordered 0 0
/dev/mapper/fedora_dhcp129--186-home /home ext4 rw,seclabel,relatime,data=ordered 0 0
tmpfs /run/user/1000 tmpfs rw,seclabel,nosuid,nodev,relatime,size=1611044k,mode=700,uid=1000,gid=1000 0 0
gvfsd-fuse /run/user/1000/gvfs fuse.gvfsd-fuse rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
fusectl /sys/fs/fuse/connections fusectl rw,relatime 0 0
grimlock.g.a: /home/adrian/sshfs-grimlock fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:test_path/path_with'single_quotes /home/adrian/sshfs-grimlock-single-quote fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:path_with'single_quotes /home/adrian/sshfs-grimlock-single-quote-2 fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:/mnt/data/foto's /home/adrian/fotos fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
\\Windows\share /data/ cifs credentials=/root/.creds 0 0
"""
MTAB_ENTRIES = [
[
'sysfs',
'/sys',
'sysfs',
'rw,seclabel,nosuid,nodev,noexec,relatime',
'0',
'0'
],
['proc', '/proc', 'proc', 'rw,nosuid,nodev,noexec,relatime', '0', '0'],
[
'devtmpfs',
'/dev',
'devtmpfs',
'rw,seclabel,nosuid,size=8044400k,nr_inodes=2011100,mode=755',
'0',
'0'
],
[
'securityfs',
'/sys/kernel/security',
'securityfs',
'rw,nosuid,nodev,noexec,relatime',
'0',
'0'
],
['tmpfs', '/dev/shm', 'tmpfs', 'rw,seclabel,nosuid,nodev', '0', '0'],
[
'devpts',
'/dev/pts',
'devpts',
'rw,seclabel,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000',
'0',
'0'
],
['tmpfs', '/run', 'tmpfs', 'rw,seclabel,nosuid,nodev,mode=755', '0', '0'],
[
'tmpfs',
'/sys/fs/cgroup',
'tmpfs',
'ro,seclabel,nosuid,nodev,noexec,mode=755',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/systemd',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd',
'0',
'0'
],
[
'pstore',
'/sys/fs/pstore',
'pstore',
'rw,seclabel,nosuid,nodev,noexec,relatime',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/devices',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,devices',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/freezer',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,freezer',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/memory',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,memory',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/pids',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,pids',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/blkio',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,blkio',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/cpuset',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,cpuset',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/cpu,cpuacct',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,cpu,cpuacct',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/hugetlb',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,hugetlb',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/perf_event',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,perf_event',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/net_cls,net_prio',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,net_cls,net_prio',
'0',
'0'
],
['configfs', '/sys/kernel/config', 'configfs', 'rw,relatime', '0', '0'],
[
'/dev/mapper/fedora_dhcp129--186-root',
'/',
'ext4',
'rw,seclabel,relatime,data=ordered',
'0',
'0'
],
['selinuxfs', '/sys/fs/selinux', 'selinuxfs', 'rw,relatime', '0', '0'],
[
'systemd-1',
'/proc/sys/fs/binfmt_misc',
'autofs',
'rw,relatime,fd=24,pgrp=1,timeout=0,minproto=5,maxproto=5,direct',
'0',
'0'
],
['debugfs', '/sys/kernel/debug', 'debugfs', 'rw,seclabel,relatime', '0', '0'],
[
'hugetlbfs',
'/dev/hugepages',
'hugetlbfs',
'rw,seclabel,relatime',
'0',
'0'
],
['tmpfs', '/tmp', 'tmpfs', 'rw,seclabel', '0', '0'],
['mqueue', '/dev/mqueue', 'mqueue', 'rw,seclabel,relatime', '0', '0'],
[
'/dev/loop0',
'/var/lib/machines',
'btrfs',
'rw,seclabel,relatime,space_cache,subvolid=5,subvol=/',
'0',
'0'
],
['/dev/sda1', '/boot', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
# A 'none' fstype
['/dev/sdz3', '/not/a/real/device', 'none', 'rw,seclabel,relatime,data=ordered', '0', '0'],
# lets assume this is a bindmount
['/dev/sdz4', '/not/a/real/bind_mount', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
[
'/dev/mapper/fedora_dhcp129--186-home',
'/home',
'ext4',
'rw,seclabel,relatime,data=ordered',
'0',
'0'
],
[
'tmpfs',
'/run/user/1000',
'tmpfs',
'rw,seclabel,nosuid,nodev,relatime,size=1611044k,mode=700,uid=1000,gid=1000',
'0',
'0'
],
[
'gvfsd-fuse',
'/run/user/1000/gvfs',
'fuse.gvfsd-fuse',
'rw,nosuid,nodev,relatime,user_id=1000,group_id=1000',
'0',
'0'
],
['fusectl', '/sys/fs/fuse/connections', 'fusectl', 'rw,relatime', '0', '0'],
# Mount path with space in the name
# The space is encoded as \040 since the fields in /etc/mtab are space-delimited
['/dev/sdz9', r'/mnt/foo\040bar', 'ext4', 'rw,relatime', '0', '0'],
['\\\\Windows\\share', '/data/', 'cifs', 'credentials=/root/.creds', '0', '0'],
]
BIND_MOUNTS = ['/not/a/real/bind_mount']
with open(os.path.join(os.path.dirname(__file__), 'fixtures/findmount_output.txt')) as f:
FINDMNT_OUTPUT = f.read()
class TestFactsLinuxHardwareGetMountFacts(unittest.TestCase):
# FIXME: mock.patch instead
def setUp(self):
# The @timeout tracebacks if there isn't a GATHER_TIMEOUT is None (the default until get_all_facts sets it via global)
facts.GATHER_TIMEOUT = 10
def tearDown(self):
facts.GATHER_TIMEOUT = None
# The Hardware subclasses freakout if instaniated directly, so
# mock platform.system and inst Hardware() so we get a LinuxHardware()
# we can test.
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._mtab_entries', return_value=MTAB_ENTRIES)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._find_bind_mounts', return_value=BIND_MOUNTS)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._lsblk_uuid', return_value=LSBLK_UUIDS)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._udevadm_uuid', return_value=UDEVADM_UUID)
def test_get_mount_facts(self,
mock_lsblk_uuid,
mock_find_bind_mounts,
mock_mtab_entries,
mock_udevadm_uuid):
module = Mock()
# Returns a LinuxHardware-ish
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
# Nothing returned, just self.facts modified as a side effect
mount_facts = lh.get_mount_facts()
self.assertIsInstance(mount_facts, dict)
self.assertIn('mounts', mount_facts)
self.assertIsInstance(mount_facts['mounts'], list)
self.assertIsInstance(mount_facts['mounts'][0], dict)
# Find mounts with space in the mountpoint path
mounts_with_space = [x for x in mount_facts['mounts'] if ' ' in x['mount']]
self.assertEqual(len(mounts_with_space), 1)
self.assertEqual(mounts_with_space[0]['mount'], '/mnt/foo bar')
@patch('ansible.module_utils.facts.hardware.linux.get_file_content', return_value=MTAB)
def test_get_mtab_entries(self, mock_get_file_content):
module = Mock()
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
mtab_entries = lh._mtab_entries()
self.assertIsInstance(mtab_entries, list)
self.assertIsInstance(mtab_entries[0], list)
self.assertEqual(len(mtab_entries), 39)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_findmnt', return_value=(0, FINDMNT_OUTPUT, ''))
def test_find_bind_mounts(self, mock_run_findmnt):
module = Mock()
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
# If bind_mounts becomes another seq type, feel free to change
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 1)
self.assertIn('/not/a/real/bind_mount', bind_mounts)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_findmnt', return_value=(37, '', ''))
def test_find_bind_mounts_non_zero(self, mock_run_findmnt):
module = Mock()
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 0)
def test_find_bind_mounts_no_findmnts(self):
module = Mock()
module.get_bin_path = Mock(return_value=None)
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 0)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(0, LSBLK_OUTPUT, ''))
def test_lsblk_uuid(self, mock_run_lsblk):
module = Mock()
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertIn(b'/dev/loop9', lsblk_uuids)
self.assertIn(b'/dev/sda1', lsblk_uuids)
self.assertEqual(lsblk_uuids[b'/dev/sda1'], b'32caaec3-ef40-4691-a3b6-438c3f9bc1c0')
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(37, LSBLK_OUTPUT, ''))
def test_lsblk_uuid_non_zero(self, mock_run_lsblk):
module = Mock()
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertEqual(len(lsblk_uuids), 0)
def test_lsblk_uuid_no_lsblk(self):
module = Mock()
module.get_bin_path = Mock(return_value=None)
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertEqual(len(lsblk_uuids), 0)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(0, LSBLK_OUTPUT_2, ''))
def test_lsblk_uuid_dev_with_space_in_name(self, mock_run_lsblk):
module = Mock()
lh = hardware.linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertIn(b'/dev/loop0', lsblk_uuids)
self.assertIn(b'/dev/sda1', lsblk_uuids)
self.assertEqual(lsblk_uuids[b'/dev/mapper/an-example-mapper with a space in the name'], b'84639acb-013f-4d2f-9392-526a572b4373')
self.assertEqual(lsblk_uuids[b'/dev/sda1'], b'32caaec3-ef40-4691-a3b6-438c3f9bc1c0')
| 23,266
|
Python
|
.py
| 547
| 36.427788
| 154
| 0.683582
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,913
|
test_ansible_collector.py
|
ansible_ansible/test/units/module_utils/facts/test_ansible_collector.py
|
# -*- coding: utf-8 -*-
#
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import annotations
# for testing
import unittest
from unittest.mock import Mock, patch
from ansible.module_utils.facts import collector
from ansible.module_utils.facts import ansible_collector
from ansible.module_utils.facts import namespace
from ansible.module_utils.facts.other.facter import FacterFactCollector
from ansible.module_utils.facts.other.ohai import OhaiFactCollector
from ansible.module_utils.facts.system.apparmor import ApparmorFactCollector
from ansible.module_utils.facts.system.caps import SystemCapabilitiesFactCollector
from ansible.module_utils.facts.system.date_time import DateTimeFactCollector
from ansible.module_utils.facts.system.env import EnvFactCollector
from ansible.module_utils.facts.system.distribution import DistributionFactCollector
from ansible.module_utils.facts.system.dns import DnsFactCollector
from ansible.module_utils.facts.system.fips import FipsFactCollector
from ansible.module_utils.facts.system.local import LocalFactCollector
from ansible.module_utils.facts.system.lsb import LSBFactCollector
from ansible.module_utils.facts.system.pkg_mgr import PkgMgrFactCollector, OpenBSDPkgMgrFactCollector
from ansible.module_utils.facts.system.platform import PlatformFactCollector
from ansible.module_utils.facts.system.python import PythonFactCollector
from ansible.module_utils.facts.system.selinux import SelinuxFactCollector
from ansible.module_utils.facts.system.service_mgr import ServiceMgrFactCollector
from ansible.module_utils.facts.system.user import UserFactCollector
# from ansible.module_utils.facts.hardware.base import HardwareCollector
from ansible.module_utils.facts.network.base import NetworkCollector
from ansible.module_utils.facts.virtual.base import VirtualCollector
ALL_COLLECTOR_CLASSES = \
[PlatformFactCollector,
DistributionFactCollector,
SelinuxFactCollector,
ApparmorFactCollector,
SystemCapabilitiesFactCollector,
FipsFactCollector,
PkgMgrFactCollector,
OpenBSDPkgMgrFactCollector,
ServiceMgrFactCollector,
LSBFactCollector,
DateTimeFactCollector,
UserFactCollector,
LocalFactCollector,
EnvFactCollector,
DnsFactCollector,
PythonFactCollector,
# FIXME: re-enable when hardware doesnt Hardware() doesnt munge self.facts
# HardwareCollector
NetworkCollector,
VirtualCollector,
OhaiFactCollector,
FacterFactCollector]
def mock_module(gather_subset=None,
filter=None):
if gather_subset is None:
gather_subset = ['all', '!facter', '!ohai']
if filter is None:
filter = '*'
mock_module = Mock()
mock_module.params = {'gather_subset': gather_subset,
'gather_timeout': 5,
'filter': filter}
mock_module.get_bin_path = Mock(return_value=None)
return mock_module
def _collectors(module,
all_collector_classes=None,
minimal_gather_subset=None):
gather_subset = module.params.get('gather_subset')
if all_collector_classes is None:
all_collector_classes = ALL_COLLECTOR_CLASSES
if minimal_gather_subset is None:
minimal_gather_subset = frozenset([])
collector_classes = \
collector.collector_classes_from_gather_subset(all_collector_classes=all_collector_classes,
minimal_gather_subset=minimal_gather_subset,
gather_subset=gather_subset)
collectors = []
for collector_class in collector_classes:
collector_obj = collector_class()
collectors.append(collector_obj)
# Add a collector that knows what gather_subset we used so it it can provide a fact
collector_meta_data_collector = \
ansible_collector.CollectorMetaDataCollector(gather_subset=gather_subset,
module_setup=True)
collectors.append(collector_meta_data_collector)
return collectors
ns = namespace.PrefixFactNamespace('ansible_facts', 'ansible_')
# FIXME: this is brute force, but hopefully enough to get some refactoring to make facts testable
class TestInPlace(unittest.TestCase):
def _mock_module(self, gather_subset=None):
return mock_module(gather_subset=gather_subset)
def _collectors(self, module,
all_collector_classes=None,
minimal_gather_subset=None):
return _collectors(module=module,
all_collector_classes=all_collector_classes,
minimal_gather_subset=minimal_gather_subset)
def test(self):
gather_subset = ['all']
mock_module = self._mock_module(gather_subset=gather_subset)
all_collector_classes = [EnvFactCollector]
collectors = self._collectors(mock_module,
all_collector_classes=all_collector_classes)
fact_collector = \
ansible_collector.AnsibleFactCollector(collectors=collectors,
namespace=ns)
res = fact_collector.collect(module=mock_module)
self.assertIsInstance(res, dict)
self.assertIn('env', res)
self.assertIn('gather_subset', res)
self.assertEqual(res['gather_subset'], ['all'])
def test1(self):
gather_subset = ['all']
mock_module = self._mock_module(gather_subset=gather_subset)
collectors = self._collectors(mock_module)
fact_collector = \
ansible_collector.AnsibleFactCollector(collectors=collectors,
namespace=ns)
res = fact_collector.collect(module=mock_module)
self.assertIsInstance(res, dict)
# just assert it's not almost empty
# with run_command and get_file_content mock, many facts are empty, like network
self.assertGreater(len(res), 20)
def test_empty_all_collector_classes(self):
mock_module = self._mock_module()
all_collector_classes = []
collectors = self._collectors(mock_module,
all_collector_classes=all_collector_classes)
fact_collector = \
ansible_collector.AnsibleFactCollector(collectors=collectors,
namespace=ns)
res = fact_collector.collect()
self.assertIsInstance(res, dict)
# just assert it's not almost empty
self.assertLess(len(res), 3)
# def test_facts_class(self):
# mock_module = self._mock_module()
# Facts(mock_module)
# def test_facts_class_load_on_init_false(self):
# mock_module = self._mock_module()
# Facts(mock_module, load_on_init=False)
# # FIXME: assert something
class TestCollectedFacts(unittest.TestCase):
gather_subset = ['all', '!facter', '!ohai']
min_fact_count = 30
max_fact_count = 1000
# TODO: add ansible_cmdline, ansible_*_pubkey* back when TempFactCollector goes away
expected_facts = ['date_time',
'user_id', 'distribution',
'gather_subset', 'module_setup',
'env']
not_expected_facts = ['facter', 'ohai']
collected_facts: dict[str, str] = {}
def _mock_module(self, gather_subset=None):
return mock_module(gather_subset=self.gather_subset)
@patch('platform.system', return_value='Linux')
@patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value='systemd')
def setUp(self, mock_gfc, mock_ps):
mock_module = self._mock_module()
collectors = self._collectors(mock_module)
fact_collector = \
ansible_collector.AnsibleFactCollector(collectors=collectors,
namespace=ns)
self.facts = fact_collector.collect(module=mock_module,
collected_facts=self.collected_facts)
def _collectors(self, module,
all_collector_classes=None,
minimal_gather_subset=None):
return _collectors(module=module,
all_collector_classes=all_collector_classes,
minimal_gather_subset=minimal_gather_subset)
def test_basics(self):
self._assert_basics(self.facts)
def test_expected_facts(self):
self._assert_expected_facts(self.facts)
def test_not_expected_facts(self):
self._assert_not_expected_facts(self.facts)
def _assert_basics(self, facts):
self.assertIsInstance(facts, dict)
# just assert it's not almost empty
self.assertGreaterEqual(len(facts), self.min_fact_count)
# and that is not huge number of keys
self.assertLess(len(facts), self.max_fact_count)
# everything starts with ansible_ namespace
def _assert_ansible_namespace(self, facts):
# FIXME: kluge for non-namespace fact
facts.pop('module_setup', None)
facts.pop('gather_subset', None)
for fact_key in facts:
self.assertTrue(fact_key.startswith('ansible_'),
'The fact name "%s" does not startwith "ansible_"' % fact_key)
def _assert_expected_facts(self, facts):
facts_keys = sorted(facts.keys())
for expected_fact in self.expected_facts:
self.assertIn(expected_fact, facts_keys)
def _assert_not_expected_facts(self, facts):
facts_keys = sorted(facts.keys())
for not_expected_fact in self.not_expected_facts:
self.assertNotIn(not_expected_fact, facts_keys)
class ProvidesOtherFactCollector(collector.BaseFactCollector):
name = 'provides_something'
_fact_ids = set(['needed_fact'])
def collect(self, module=None, collected_facts=None):
return {'needed_fact': 'THE_NEEDED_FACT_VALUE'}
class RequiresOtherFactCollector(collector.BaseFactCollector):
name = 'requires_something'
def collect(self, module=None, collected_facts=None):
collected_facts = collected_facts or {}
fact_dict = {}
fact_dict['needed_fact'] = collected_facts['needed_fact']
fact_dict['compound_fact'] = "compound-%s" % collected_facts['needed_fact']
return fact_dict
class ConCatFactCollector(collector.BaseFactCollector):
name = 'concat_collected'
def collect(self, module=None, collected_facts=None):
collected_facts = collected_facts or {}
fact_dict = {}
con_cat_list = []
for key, value in collected_facts.items():
con_cat_list.append(value)
fact_dict['concat_fact'] = '-'.join(con_cat_list)
return fact_dict
class TestCollectorDepsWithFilter(unittest.TestCase):
gather_subset = ['all', '!facter', '!ohai']
def _mock_module(self, gather_subset=None, filter=None):
return mock_module(gather_subset=self.gather_subset,
filter=filter)
def setUp(self):
self.mock_module = self._mock_module()
self.collectors = self._collectors(mock_module)
def _collectors(self, module,
all_collector_classes=None,
minimal_gather_subset=None):
return [ProvidesOtherFactCollector(),
RequiresOtherFactCollector()]
def test_no_filter(self):
_mock_module = mock_module(gather_subset=['all', '!facter', '!ohai'])
facts_dict = self._collect(_mock_module)
expected = {'needed_fact': 'THE_NEEDED_FACT_VALUE',
'compound_fact': 'compound-THE_NEEDED_FACT_VALUE'}
self.assertEqual(expected, facts_dict)
def test_with_filter_on_compound_fact(self):
_mock_module = mock_module(gather_subset=['all', '!facter', '!ohai'],
filter='compound_fact')
facts_dict = self._collect(_mock_module)
expected = {'compound_fact': 'compound-THE_NEEDED_FACT_VALUE'}
self.assertEqual(expected, facts_dict)
def test_with_filter_on_needed_fact(self):
_mock_module = mock_module(gather_subset=['all', '!facter', '!ohai'],
filter='needed_fact')
facts_dict = self._collect(_mock_module)
expected = {'needed_fact': 'THE_NEEDED_FACT_VALUE'}
self.assertEqual(expected, facts_dict)
def test_with_filter_on_compound_gather_compound(self):
_mock_module = mock_module(gather_subset=['!all', '!any', 'compound_fact'],
filter='compound_fact')
facts_dict = self._collect(_mock_module)
expected = {'compound_fact': 'compound-THE_NEEDED_FACT_VALUE'}
self.assertEqual(expected, facts_dict)
def test_with_filter_no_match(self):
_mock_module = mock_module(gather_subset=['all', '!facter', '!ohai'],
filter='ansible_this_doesnt_exist')
facts_dict = self._collect(_mock_module)
expected = {}
self.assertEqual(expected, facts_dict)
def test_concat_collector(self):
_mock_module = mock_module(gather_subset=['all', '!facter', '!ohai'])
_collectors = self._collectors(_mock_module)
_collectors.append(ConCatFactCollector())
fact_collector = \
ansible_collector.AnsibleFactCollector(collectors=_collectors,
namespace=ns,
filter_spec=_mock_module.params['filter'])
collected_facts = {}
facts_dict = fact_collector.collect(module=_mock_module,
collected_facts=collected_facts)
self.assertIn('concat_fact', facts_dict)
self.assertIn('THE_NEEDED_FACT_VALUE', facts_dict['concat_fact'])
def test_concat_collector_with_filter_on_concat(self):
_mock_module = mock_module(gather_subset=['all', '!facter', '!ohai'],
filter='concat_fact')
_collectors = self._collectors(_mock_module)
_collectors.append(ConCatFactCollector())
fact_collector = \
ansible_collector.AnsibleFactCollector(collectors=_collectors,
namespace=ns,
filter_spec=_mock_module.params['filter'])
collected_facts = {}
facts_dict = fact_collector.collect(module=_mock_module,
collected_facts=collected_facts)
self.assertIn('concat_fact', facts_dict)
self.assertIn('THE_NEEDED_FACT_VALUE', facts_dict['concat_fact'])
self.assertIn('compound', facts_dict['concat_fact'])
def _collect(self, _mock_module, collected_facts=None):
_collectors = self._collectors(_mock_module)
fact_collector = \
ansible_collector.AnsibleFactCollector(collectors=_collectors,
namespace=ns,
filter_spec=_mock_module.params['filter'])
facts_dict = fact_collector.collect(module=_mock_module,
collected_facts=collected_facts)
return facts_dict
class ExceptionThrowingCollector(collector.BaseFactCollector):
def collect(self, module=None, collected_facts=None):
raise Exception('A collector failed')
class TestExceptionCollectedFacts(TestCollectedFacts):
def _collectors(self, module,
all_collector_classes=None,
minimal_gather_subset=None):
collectors = _collectors(module=module,
all_collector_classes=all_collector_classes,
minimal_gather_subset=minimal_gather_subset)
c = [ExceptionThrowingCollector()] + collectors
return c
class TestOnlyExceptionCollector(TestCollectedFacts):
expected_facts = []
min_fact_count = 0
def _collectors(self, module,
all_collector_classes=None,
minimal_gather_subset=None):
return [ExceptionThrowingCollector()]
class NoneReturningCollector(collector.BaseFactCollector):
def collect(self, module=None, collected_facts=None):
return None
class TestOnlyNoneCollector(TestCollectedFacts):
expected_facts = []
min_fact_count = 0
def _collectors(self, module,
all_collector_classes=None,
minimal_gather_subset=None):
return [NoneReturningCollector(namespace='ansible')]
class TestMinimalCollectedFacts(TestCollectedFacts):
gather_subset = ['!all']
min_fact_count = 1
max_fact_count = 10
expected_facts = ['gather_subset',
'module_setup']
not_expected_facts = ['lsb']
class TestFacterCollectedFacts(TestCollectedFacts):
gather_subset = ['!all', 'facter']
min_fact_count = 1
max_fact_count = 10
expected_facts = ['gather_subset',
'module_setup']
not_expected_facts = ['lsb']
class TestOhaiCollectedFacts(TestCollectedFacts):
gather_subset = ['!all', 'ohai']
min_fact_count = 1
max_fact_count = 10
expected_facts = ['gather_subset',
'module_setup']
not_expected_facts = ['lsb']
class TestPkgMgrFacts(TestCollectedFacts):
gather_subset = ['pkg_mgr']
min_fact_count = 1
max_fact_count = 20
expected_facts = ['gather_subset',
'module_setup',
'pkg_mgr']
collected_facts = {
"ansible_distribution": "Fedora",
"ansible_distribution_major_version": "28",
"ansible_os_family": "RedHat"
}
class TestPkgMgrOSTreeFacts(TestPkgMgrFacts):
@patch(
'ansible.module_utils.facts.system.pkg_mgr.os.path.exists',
side_effect=lambda x: x == '/run/ostree-booted')
def _recollect_facts(self, distribution, version, mock_exists):
self.collected_facts['ansible_distribution'] = distribution
self.collected_facts['ansible_distribution_major_version'] = \
str(version)
# Recollect facts
self.setUp()
self.assertIn('pkg_mgr', self.facts)
self.assertEqual(self.facts['pkg_mgr'], 'atomic_container')
def test_is_rhel_edge_ostree(self):
self._recollect_facts('RedHat', 8)
def test_is_fedora_ostree(self):
self._recollect_facts('Fedora', 33)
class TestOpenBSDPkgMgrFacts(TestPkgMgrFacts):
def test_is_openbsd_pkg(self):
self.assertIn('pkg_mgr', self.facts)
self.assertEqual(self.facts['pkg_mgr'], 'openbsd_pkg')
def setUp(self):
self.patcher = patch('platform.system')
mock_platform = self.patcher.start()
mock_platform.return_value = 'OpenBSD'
mock_module = self._mock_module()
collectors = self._collectors(mock_module)
fact_collector = \
ansible_collector.AnsibleFactCollector(collectors=collectors,
namespace=ns)
self.facts = fact_collector.collect(module=mock_module)
def tearDown(self):
self.patcher.stop()
| 19,996
|
Python
|
.py
| 410
| 38.307317
| 101
| 0.645203
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,914
|
test_utils.py
|
ansible_ansible/test/units/module_utils/facts/test_utils.py
|
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from unittest.mock import patch
from ansible.module_utils.facts import utils
class TestGetMountSize(unittest.TestCase):
def test(self):
mount_info = utils.get_mount_size('/dev/null/not/a/real/mountpoint')
self.assertIsInstance(mount_info, dict)
def test_proc(self):
mount_info = utils.get_mount_size('/proc')
self.assertIsInstance(mount_info, dict)
@patch('ansible.module_utils.facts.utils.os.statvfs', side_effect=OSError('intentionally induced os error'))
def test_oserror_on_statvfs(self, mock_statvfs):
mount_info = utils.get_mount_size('/dev/null/doesnt/matter')
self.assertIsInstance(mount_info, dict)
self.assertDictEqual(mount_info, {})
| 1,427
|
Python
|
.py
| 30
| 43.933333
| 112
| 0.75036
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,915
|
test_collector.py
|
ansible_ansible/test/units/module_utils/facts/test_collector.py
|
# This file is part of Ansible
# -*- coding: utf-8 -*-
#
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import annotations
from collections import defaultdict
import pprint
# for testing
import unittest
from ansible.module_utils.facts import collector
from ansible.module_utils.facts import default_collectors
class TestFindCollectorsForPlatform(unittest.TestCase):
def test(self):
compat_platforms = [{'system': 'Generic'}]
res = collector.find_collectors_for_platform(default_collectors.collectors,
compat_platforms)
for coll_class in res:
self.assertIn(coll_class._platform, ('Generic'))
def test_linux(self):
compat_platforms = [{'system': 'Linux'}]
res = collector.find_collectors_for_platform(default_collectors.collectors,
compat_platforms)
for coll_class in res:
self.assertIn(coll_class._platform, ('Linux'))
def test_linux_or_generic(self):
compat_platforms = [{'system': 'Generic'}, {'system': 'Linux'}]
res = collector.find_collectors_for_platform(default_collectors.collectors,
compat_platforms)
for coll_class in res:
self.assertIn(coll_class._platform, ('Generic', 'Linux'))
class TestSelectCollectorNames(unittest.TestCase):
def _assert_equal_detail(self, obj1, obj2):
msg = 'objects are not equal\n%s\n\n!=\n\n%s' % (pprint.pformat(obj1), pprint.pformat(obj2))
return self.assertEqual(obj1, obj2, msg)
def test(self):
collector_names = ['distribution', 'all_ipv4_addresses',
'local', 'pkg_mgr']
all_fact_subsets = self._all_fact_subsets()
res = collector.select_collector_classes(collector_names,
all_fact_subsets)
expected = [default_collectors.DistributionFactCollector,
default_collectors.PkgMgrFactCollector]
self._assert_equal_detail(res, expected)
def test_default_collectors(self):
platform_info = {'system': 'Generic'}
compat_platforms = [platform_info]
collectors_for_platform = collector.find_collectors_for_platform(default_collectors.collectors,
compat_platforms)
all_fact_subsets, aliases_map = collector.build_fact_id_to_collector_map(collectors_for_platform)
all_valid_subsets = frozenset(all_fact_subsets.keys())
collector_names = collector.get_collector_names(valid_subsets=all_valid_subsets,
aliases_map=aliases_map,
platform_info=platform_info)
complete_collector_names = collector._solve_deps(collector_names, all_fact_subsets)
dep_map = collector.build_dep_data(complete_collector_names, all_fact_subsets)
ordered_deps = collector.tsort(dep_map)
ordered_collector_names = [x[0] for x in ordered_deps]
res = collector.select_collector_classes(ordered_collector_names,
all_fact_subsets)
assert res.index(default_collectors.ServiceMgrFactCollector) > res.index(default_collectors.DistributionFactCollector)
assert res.index(default_collectors.ServiceMgrFactCollector) > res.index(default_collectors.PlatformFactCollector)
def _all_fact_subsets(self, data=None):
all_fact_subsets = defaultdict(list)
_data = {'pkg_mgr': [default_collectors.PkgMgrFactCollector],
'distribution': [default_collectors.DistributionFactCollector],
'network': [default_collectors.LinuxNetworkCollector]}
data = data or _data
for key, value in data.items():
all_fact_subsets[key] = value
return all_fact_subsets
class TestGetCollectorNames(unittest.TestCase):
def test_none(self):
res = collector.get_collector_names()
self.assertIsInstance(res, set)
self.assertEqual(res, set([]))
def test_empty_sets(self):
res = collector.get_collector_names(valid_subsets=frozenset([]),
minimal_gather_subset=frozenset([]),
gather_subset=[])
self.assertIsInstance(res, set)
self.assertEqual(res, set([]))
def test_empty_valid_and_min_with_all_gather_subset(self):
res = collector.get_collector_names(valid_subsets=frozenset([]),
minimal_gather_subset=frozenset([]),
gather_subset=['all'])
self.assertIsInstance(res, set)
self.assertEqual(res, set([]))
def test_one_valid_with_all_gather_subset(self):
valid_subsets = frozenset(['my_fact'])
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=frozenset([]),
gather_subset=['all'])
self.assertIsInstance(res, set)
self.assertEqual(res, set(['my_fact']))
def _compare_res(self, gather_subset1, gather_subset2,
valid_subsets=None, min_subset=None):
valid_subsets = valid_subsets or frozenset()
minimal_gather_subset = min_subset or frozenset()
res1 = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=gather_subset1)
res2 = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=gather_subset2)
return res1, res2
def test_not_all_other_order(self):
valid_subsets = frozenset(['min_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['min_fact'])
res1, res2 = self._compare_res(['!all', 'whatever'],
['whatever', '!all'],
valid_subsets=valid_subsets,
min_subset=minimal_gather_subset)
self.assertEqual(res1, res2)
self.assertEqual(res1, set(['min_fact', 'whatever']))
def test_not_all_other_order_min(self):
valid_subsets = frozenset(['min_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['min_fact'])
res1, res2 = self._compare_res(['!min_fact', 'whatever'],
['whatever', '!min_fact'],
valid_subsets=valid_subsets,
min_subset=minimal_gather_subset)
self.assertEqual(res1, res2)
self.assertEqual(res1, set(['whatever']))
def test_one_minimal_with_all_gather_subset(self):
my_fact = 'my_fact'
valid_subsets = frozenset([my_fact])
minimal_gather_subset = valid_subsets
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=['all'])
self.assertIsInstance(res, set)
self.assertEqual(res, set(['my_fact']))
def test_with_all_gather_subset(self):
valid_subsets = frozenset(['my_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['my_fact'])
# even with '!all', the minimal_gather_subset should be returned
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=['all'])
self.assertIsInstance(res, set)
self.assertEqual(res, set(['my_fact', 'something_else', 'whatever']))
def test_one_minimal_with_not_all_gather_subset(self):
valid_subsets = frozenset(['my_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['my_fact'])
# even with '!all', the minimal_gather_subset should be returned
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=['!all'])
self.assertIsInstance(res, set)
self.assertEqual(res, set(['my_fact']))
def test_gather_subset_excludes(self):
valid_subsets = frozenset(['my_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['min_fact', 'min_another'])
# even with '!all', the minimal_gather_subset should be returned
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
# gather_subset=set(['all', '!my_fact', '!whatever']))
# gather_subset=['all', '!my_fact', '!whatever'])
gather_subset=['!min_fact', '!whatever'])
self.assertIsInstance(res, set)
# min_another is in minimal_gather_subset, so always returned
self.assertEqual(res, set(['min_another']))
def test_gather_subset_excludes_ordering(self):
valid_subsets = frozenset(['my_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['my_fact'])
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=['!all', 'whatever'])
self.assertIsInstance(res, set)
# excludes are higher precedence than includes, so !all excludes everything
# and then minimal_gather_subset is added. so '!all', 'other' == '!all'
self.assertEqual(res, set(['my_fact', 'whatever']))
def test_gather_subset_excludes_min(self):
valid_subsets = frozenset(['min_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['min_fact'])
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=['whatever', '!min'])
self.assertIsInstance(res, set)
# excludes are higher precedence than includes, so !all excludes everything
# and then minimal_gather_subset is added. so '!all', 'other' == '!all'
self.assertEqual(res, set(['whatever']))
def test_gather_subset_excludes_min_and_all(self):
valid_subsets = frozenset(['min_fact', 'something_else', 'whatever'])
minimal_gather_subset = frozenset(['min_fact'])
res = collector.get_collector_names(valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=['whatever', '!all', '!min'])
self.assertIsInstance(res, set)
# excludes are higher precedence than includes, so !all excludes everything
# and then minimal_gather_subset is added. so '!all', 'other' == '!all'
self.assertEqual(res, set(['whatever']))
def test_invalid_gather_subset(self):
valid_subsets = frozenset(['my_fact', 'something_else'])
minimal_gather_subset = frozenset(['my_fact'])
self.assertRaisesRegex(TypeError,
r'Bad subset .* given to Ansible.*allowed\:.*all,.*my_fact.*',
collector.get_collector_names,
valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=['my_fact', 'not_a_valid_gather_subset'])
class TestFindUnresolvedRequires(unittest.TestCase):
def test(self):
names = ['network', 'virtual', 'env']
all_fact_subsets = {'env': [default_collectors.EnvFactCollector],
'network': [default_collectors.LinuxNetworkCollector],
'virtual': [default_collectors.LinuxVirtualCollector]}
res = collector.find_unresolved_requires(names, all_fact_subsets)
self.assertIsInstance(res, set)
self.assertEqual(res, set(['platform', 'distribution']))
def test_resolved(self):
names = ['network', 'virtual', 'env', 'platform', 'distribution']
all_fact_subsets = {'env': [default_collectors.EnvFactCollector],
'network': [default_collectors.LinuxNetworkCollector],
'distribution': [default_collectors.DistributionFactCollector],
'platform': [default_collectors.PlatformFactCollector],
'virtual': [default_collectors.LinuxVirtualCollector]}
res = collector.find_unresolved_requires(names, all_fact_subsets)
self.assertIsInstance(res, set)
self.assertEqual(res, set())
class TestBuildDepData(unittest.TestCase):
def test(self):
names = ['network', 'virtual', 'env']
all_fact_subsets = {'env': [default_collectors.EnvFactCollector],
'network': [default_collectors.LinuxNetworkCollector],
'virtual': [default_collectors.LinuxVirtualCollector]}
res = collector.build_dep_data(names, all_fact_subsets)
self.assertIsInstance(res, defaultdict)
self.assertEqual(dict(res),
{'network': set(['platform', 'distribution']),
'virtual': set(),
'env': set()})
class TestSolveDeps(unittest.TestCase):
def test_no_solution(self):
unresolved = set(['required_thing1', 'required_thing2'])
all_fact_subsets = {'env': [default_collectors.EnvFactCollector],
'network': [default_collectors.LinuxNetworkCollector],
'virtual': [default_collectors.LinuxVirtualCollector]}
self.assertRaises(collector.CollectorNotFoundError,
collector._solve_deps,
unresolved,
all_fact_subsets)
def test(self):
unresolved = set(['env', 'network'])
all_fact_subsets = {'env': [default_collectors.EnvFactCollector],
'network': [default_collectors.LinuxNetworkCollector],
'virtual': [default_collectors.LinuxVirtualCollector],
'platform': [default_collectors.PlatformFactCollector],
'distribution': [default_collectors.DistributionFactCollector]}
collector.resolve_requires(unresolved, all_fact_subsets)
res = collector._solve_deps(unresolved, all_fact_subsets)
self.assertIsInstance(res, set)
for goal in unresolved:
self.assertIn(goal, res)
class TestResolveRequires(unittest.TestCase):
def test_no_resolution(self):
unresolved = ['required_thing1', 'required_thing2']
all_fact_subsets = {'env': [default_collectors.EnvFactCollector],
'network': [default_collectors.LinuxNetworkCollector],
'virtual': [default_collectors.LinuxVirtualCollector]}
self.assertRaisesRegex(collector.UnresolvedFactDep,
'unresolved fact dep.*required_thing2',
collector.resolve_requires,
unresolved, all_fact_subsets)
def test(self):
unresolved = ['env', 'network']
all_fact_subsets = {'env': [default_collectors.EnvFactCollector],
'network': [default_collectors.LinuxNetworkCollector],
'virtual': [default_collectors.LinuxVirtualCollector]}
res = collector.resolve_requires(unresolved, all_fact_subsets)
for goal in unresolved:
self.assertIn(goal, res)
def test_exception(self):
unresolved = ['required_thing1']
all_fact_subsets = {}
try:
collector.resolve_requires(unresolved, all_fact_subsets)
except collector.UnresolvedFactDep as exc:
self.assertIn(unresolved[0], '%s' % exc)
class TestTsort(unittest.TestCase):
def test(self):
dep_map = {'network': set(['distribution', 'platform']),
'virtual': set(),
'platform': set(['what_platform_wants']),
'what_platform_wants': set(),
'network_stuff': set(['network'])}
res = collector.tsort(dep_map)
self.assertIsInstance(res, list)
names = [x[0] for x in res]
assert names.index('network_stuff') > names.index('network')
assert names.index('platform') > names.index('what_platform_wants')
assert names.index('network') > names.index('platform')
def test_cycles(self):
dep_map = {'leaf1': set(),
'leaf2': set(),
'node1': set(['node2']),
'node2': set(['node3']),
'node3': set(['node1'])}
self.assertRaises(collector.CycleFoundInFactDeps,
collector.tsort,
dep_map)
def test_just_nodes(self):
dep_map = {'leaf1': set(),
'leaf4': set(),
'leaf3': set(),
'leaf2': set()}
res = collector.tsort(dep_map)
self.assertIsInstance(res, list)
names = [x[0] for x in res]
# not a lot to assert here, any order of the
# results is valid
self.assertEqual(set(names), set(dep_map.keys()))
def test_self_deps(self):
dep_map = {'node1': set(['node1']),
'node2': set(['node2'])}
self.assertRaises(collector.CycleFoundInFactDeps,
collector.tsort,
dep_map)
def test_unsolvable(self):
dep_map = {'leaf1': set(),
'node2': set(['leaf2'])}
res = collector.tsort(dep_map)
self.assertIsInstance(res, list)
names = [x[0] for x in res]
self.assertEqual(set(names), set(dep_map.keys()))
def test_chain(self):
dep_map = {'leaf1': set(['leaf2']),
'leaf2': set(['leaf3']),
'leaf3': set(['leaf4']),
'leaf4': set(),
'leaf5': set(['leaf1'])}
res = collector.tsort(dep_map)
self.assertIsInstance(res, list)
names = [x[0] for x in res]
self.assertEqual(set(names), set(dep_map.keys()))
def test_multi_pass(self):
dep_map = {'leaf1': set(),
'leaf2': set(['leaf3', 'leaf1', 'leaf4', 'leaf5']),
'leaf3': set(['leaf4', 'leaf1']),
'leaf4': set(['leaf1']),
'leaf5': set(['leaf1'])}
res = collector.tsort(dep_map)
self.assertIsInstance(res, list)
names = [x[0] for x in res]
self.assertEqual(set(names), set(dep_map.keys()))
assert names.index('leaf1') < names.index('leaf2')
for leaf in ('leaf2', 'leaf3', 'leaf4', 'leaf5'):
assert names.index('leaf1') < names.index(leaf)
class TestCollectorClassesFromGatherSubset(unittest.TestCase):
maxDiff = None
def _classes(self,
all_collector_classes=None,
valid_subsets=None,
minimal_gather_subset=None,
gather_subset=None,
gather_timeout=None,
platform_info=None):
platform_info = platform_info or {'system': 'Linux'}
return collector.collector_classes_from_gather_subset(all_collector_classes=all_collector_classes,
valid_subsets=valid_subsets,
minimal_gather_subset=minimal_gather_subset,
gather_subset=gather_subset,
gather_timeout=gather_timeout,
platform_info=platform_info)
def test_no_args(self):
res = self._classes()
self.assertIsInstance(res, list)
self.assertEqual(res, [])
def test_not_all(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=['!all'])
self.assertIsInstance(res, list)
self.assertEqual(res, [])
def test_all(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=['all'])
self.assertIsInstance(res, list)
def test_hardware(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=['hardware'])
self.assertIsInstance(res, list)
self.assertIn(default_collectors.PlatformFactCollector, res)
self.assertIn(default_collectors.LinuxHardwareCollector, res)
assert res.index(default_collectors.LinuxHardwareCollector) > res.index(default_collectors.PlatformFactCollector)
def test_network(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=['network'])
self.assertIsInstance(res, list)
self.assertIn(default_collectors.DistributionFactCollector, res)
self.assertIn(default_collectors.PlatformFactCollector, res)
self.assertIn(default_collectors.LinuxNetworkCollector, res)
assert res.index(default_collectors.LinuxNetworkCollector) > res.index(default_collectors.PlatformFactCollector)
assert res.index(default_collectors.LinuxNetworkCollector) > res.index(default_collectors.DistributionFactCollector)
def test_env(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=['env'])
self.assertIsInstance(res, list)
self.assertEqual(res, [default_collectors.EnvFactCollector])
def test_facter(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=set(['env', 'facter']))
self.assertIsInstance(res, list)
self.assertEqual(set(res),
set([default_collectors.EnvFactCollector,
default_collectors.FacterFactCollector]))
def test_facter_ohai(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=set(['env', 'facter', 'ohai']))
self.assertIsInstance(res, list)
self.assertEqual(set(res),
set([default_collectors.EnvFactCollector,
default_collectors.FacterFactCollector,
default_collectors.OhaiFactCollector]))
def test_just_facter(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=set(['facter']))
self.assertIsInstance(res, list)
self.assertEqual(set(res),
set([default_collectors.FacterFactCollector]))
def test_collector_specified_multiple_times(self):
res = self._classes(all_collector_classes=default_collectors.collectors,
gather_subset=['platform', 'all', 'machine'])
self.assertIsInstance(res, list)
self.assertIn(default_collectors.PlatformFactCollector,
res)
def test_unknown_collector(self):
# something claims 'unknown_collector' is a valid gather_subset, but there is
# no FactCollector mapped to 'unknown_collector'
self.assertRaisesRegex(TypeError,
r'Bad subset.*unknown_collector.*given to Ansible.*allowed\:.*all,.*env.*',
self._classes,
all_collector_classes=default_collectors.collectors,
gather_subset=['env', 'unknown_collector'])
| 25,491
|
Python
|
.py
| 446
| 41.612108
| 126
| 0.581055
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,916
|
test_collectors.py
|
ansible_ansible/test/units/module_utils/facts/test_collectors.py
|
# unit tests for ansible fact collectors
# -*- coding: utf-8 -*-
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import annotations
from unittest.mock import Mock, patch
from . base import BaseFactsTest
from ansible.module_utils.facts import collector
from ansible.module_utils.facts.system.apparmor import ApparmorFactCollector
from ansible.module_utils.facts.system.caps import SystemCapabilitiesFactCollector
from ansible.module_utils.facts.system.cmdline import CmdLineFactCollector
from ansible.module_utils.facts.system.distribution import DistributionFactCollector
from ansible.module_utils.facts.system.dns import DnsFactCollector
from ansible.module_utils.facts.system.env import EnvFactCollector
from ansible.module_utils.facts.system.fips import FipsFactCollector
from ansible.module_utils.facts.system.pkg_mgr import PkgMgrFactCollector, OpenBSDPkgMgrFactCollector
from ansible.module_utils.facts.system.platform import PlatformFactCollector
from ansible.module_utils.facts.system.python import PythonFactCollector
from ansible.module_utils.facts.system.selinux import SelinuxFactCollector
from ansible.module_utils.facts.system.service_mgr import ServiceMgrFactCollector
from ansible.module_utils.facts.system.ssh_pub_keys import SshPubKeyFactCollector
from ansible.module_utils.facts.system.user import UserFactCollector
from ansible.module_utils.facts.virtual.base import VirtualCollector
from ansible.module_utils.facts.network.base import NetworkCollector
from ansible.module_utils.facts.hardware.base import HardwareCollector
class CollectorException(Exception):
pass
class ExceptionThrowingCollector(collector.BaseFactCollector):
name = 'exc_throwing'
def __init__(self, collectors=None, namespace=None, exception=None):
super(ExceptionThrowingCollector, self).__init__(collectors, namespace)
self._exception = exception or CollectorException('collection failed')
def collect(self, module=None, collected_facts=None):
raise self._exception
class TestExceptionThrowingCollector(BaseFactsTest):
__test__ = True
gather_subset = ['exc_throwing']
valid_subsets = ['exc_throwing']
collector_class = ExceptionThrowingCollector
def test_collect(self):
module = self._mock_module()
fact_collector = self.collector_class()
self.assertRaises(CollectorException,
fact_collector.collect,
module=module,
collected_facts=self.collected_facts)
def test_collect_with_namespace(self):
module = self._mock_module()
fact_collector = self.collector_class()
self.assertRaises(CollectorException,
fact_collector.collect_with_namespace,
module=module,
collected_facts=self.collected_facts)
class TestApparmorFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'apparmor']
valid_subsets = ['apparmor']
fact_namespace = 'ansible_apparmor'
collector_class = ApparmorFactCollector
def test_collect(self):
facts_dict = super(TestApparmorFacts, self)._test_collect()
self.assertIn('status', facts_dict['apparmor'])
class TestCapsFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'caps']
valid_subsets = ['caps']
fact_namespace = 'ansible_system_capabilities'
collector_class = SystemCapabilitiesFactCollector
def _mock_module(self):
mock_module = Mock()
mock_module.params = {'gather_subset': self.gather_subset,
'gather_timeout': 10,
'filter': '*'}
mock_module.get_bin_path = Mock(return_value='/usr/sbin/capsh')
mock_module.run_command = Mock(return_value=(0, 'Current: =ep', ''))
return mock_module
class TestCmdLineFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'cmdline']
valid_subsets = ['cmdline']
fact_namespace = 'ansible_cmdline'
collector_class = CmdLineFactCollector
def test_parse_proc_cmdline_uefi(self):
uefi_cmdline = r'initrd=\70ef65e1a04a47aea04f7b5145ea3537\4.10.0-19-generic\initrd root=UUID=50973b75-4a66-4bf0-9764-2b7614489e64 ro quiet'
expected = {'initrd': r'\70ef65e1a04a47aea04f7b5145ea3537\4.10.0-19-generic\initrd',
'root': 'UUID=50973b75-4a66-4bf0-9764-2b7614489e64',
'quiet': True,
'ro': True}
fact_collector = self.collector_class()
facts_dict = fact_collector._parse_proc_cmdline(uefi_cmdline)
self.assertDictEqual(facts_dict, expected)
def test_parse_proc_cmdline_fedora(self):
cmdline_fedora = r'BOOT_IMAGE=/vmlinuz-4.10.16-200.fc25.x86_64 root=/dev/mapper/fedora-root ro rd.lvm.lv=fedora/root rd.luks.uuid=luks-c80b7537-358b-4a07-b88c-c59ef187479b rd.lvm.lv=fedora/swap rhgb quiet LANG=en_US.UTF-8' # noqa
expected = {'BOOT_IMAGE': '/vmlinuz-4.10.16-200.fc25.x86_64',
'LANG': 'en_US.UTF-8',
'quiet': True,
'rd.luks.uuid': 'luks-c80b7537-358b-4a07-b88c-c59ef187479b',
'rd.lvm.lv': 'fedora/swap',
'rhgb': True,
'ro': True,
'root': '/dev/mapper/fedora-root'}
fact_collector = self.collector_class()
facts_dict = fact_collector._parse_proc_cmdline(cmdline_fedora)
self.assertDictEqual(facts_dict, expected)
def test_parse_proc_cmdline_dup_console(self):
example = r'BOOT_IMAGE=/boot/vmlinuz-4.4.0-72-generic root=UUID=e12e46d9-06c9-4a64-a7b3-60e24b062d90 ro console=tty1 console=ttyS0'
# FIXME: Two 'console' keywords? Using a dict for the fact value here loses info. Currently the 'last' one wins
expected = {'BOOT_IMAGE': '/boot/vmlinuz-4.4.0-72-generic',
'root': 'UUID=e12e46d9-06c9-4a64-a7b3-60e24b062d90',
'ro': True,
'console': 'ttyS0'}
fact_collector = self.collector_class()
facts_dict = fact_collector._parse_proc_cmdline(example)
# TODO: fails because we lose a 'console'
self.assertDictEqual(facts_dict, expected)
class TestDistributionFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'distribution']
valid_subsets = ['distribution']
fact_namespace = 'ansible_distribution'
collector_class = DistributionFactCollector
class TestDnsFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'dns']
valid_subsets = ['dns']
fact_namespace = 'ansible_dns'
collector_class = DnsFactCollector
class TestEnvFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'env']
valid_subsets = ['env']
fact_namespace = 'ansible_env'
collector_class = EnvFactCollector
def test_collect(self):
facts_dict = super(TestEnvFacts, self)._test_collect()
self.assertIn('HOME', facts_dict['env'])
class TestFipsFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'fips']
valid_subsets = ['fips']
fact_namespace = 'ansible_fips'
collector_class = FipsFactCollector
class TestHardwareCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'hardware']
valid_subsets = ['hardware']
fact_namespace = 'ansible_hardware'
collector_class = HardwareCollector
collected_facts = {'ansible_architecture': 'x86_64'}
class TestNetworkCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'network']
valid_subsets = ['network']
fact_namespace = 'ansible_network'
collector_class = NetworkCollector
class TestPkgMgrFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'pkg_mgr']
valid_subsets = ['pkg_mgr']
fact_namespace = 'ansible_pkgmgr'
collector_class = PkgMgrFactCollector
collected_facts = {
"ansible_distribution": "Fedora",
"ansible_distribution_major_version": "28",
"ansible_os_family": "RedHat"
}
def test_collect(self):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
self.assertIn('pkg_mgr', facts_dict)
class TestMacOSXPkgMgrFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'pkg_mgr']
valid_subsets = ['pkg_mgr']
fact_namespace = 'ansible_pkgmgr'
collector_class = PkgMgrFactCollector
collected_facts = {
"ansible_distribution": "MacOSX",
"ansible_distribution_major_version": "11",
"ansible_os_family": "Darwin"
}
@patch('ansible.module_utils.facts.system.pkg_mgr.os.path.exists', side_effect=lambda x: x == '/opt/homebrew/bin/brew')
def test_collect_opt_homebrew(self, p_exists):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
self.assertIn('pkg_mgr', facts_dict)
self.assertEqual(facts_dict['pkg_mgr'], 'homebrew')
@patch('ansible.module_utils.facts.system.pkg_mgr.os.path.exists', side_effect=lambda x: x == '/usr/local/bin/brew')
def test_collect_usr_homebrew(self, p_exists):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
self.assertIn('pkg_mgr', facts_dict)
self.assertEqual(facts_dict['pkg_mgr'], 'homebrew')
@patch('ansible.module_utils.facts.system.pkg_mgr.os.path.exists', side_effect=lambda x: x == '/opt/local/bin/port')
def test_collect_macports(self, p_exists):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
self.assertIn('pkg_mgr', facts_dict)
self.assertEqual(facts_dict['pkg_mgr'], 'macports')
def _sanitize_os_path_apt_get(path):
if path == '/usr/bin/apt-get':
return True
else:
return False
class TestPkgMgrFactsAptFedora(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'pkg_mgr']
valid_subsets = ['pkg_mgr']
fact_namespace = 'ansible_pkgmgr'
collector_class = PkgMgrFactCollector
collected_facts = {
"ansible_distribution": "Fedora",
"ansible_distribution_major_version": "28",
"ansible_os_family": "RedHat",
"ansible_pkg_mgr": "apt"
}
@patch('ansible.module_utils.facts.system.pkg_mgr.os.path.exists', side_effect=_sanitize_os_path_apt_get)
def test_collect(self, mock_os_path_exists):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
self.assertIn('pkg_mgr', facts_dict)
class TestOpenBSDPkgMgrFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'pkg_mgr']
valid_subsets = ['pkg_mgr']
fact_namespace = 'ansible_pkgmgr'
collector_class = OpenBSDPkgMgrFactCollector
def test_collect(self):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
self.assertIn('pkg_mgr', facts_dict)
self.assertEqual(facts_dict['pkg_mgr'], 'openbsd_pkg')
class TestPlatformFactCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'platform']
valid_subsets = ['platform']
fact_namespace = 'ansible_platform'
collector_class = PlatformFactCollector
class TestPythonFactCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'python']
valid_subsets = ['python']
fact_namespace = 'ansible_python'
collector_class = PythonFactCollector
class TestSelinuxFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'selinux']
valid_subsets = ['selinux']
fact_namespace = 'ansible_selinux'
collector_class = SelinuxFactCollector
def test_no_selinux(self):
with patch('ansible.module_utils.facts.system.selinux.HAVE_SELINUX', False):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict['selinux']['status'], 'Missing selinux Python library')
class TestServiceMgrFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'service_mgr']
valid_subsets = ['service_mgr']
fact_namespace = 'ansible_service_mgr'
collector_class = ServiceMgrFactCollector
# TODO: dedupe some of this test code
@patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value=None)
def test_no_proc1_ps_random_init(self, mock_gfc):
# no /proc/1/comm, ps returns '/sbin/sys11' which we dont know
# should end up return 'sys11'
module = self._mock_module()
module.run_command = Mock(return_value=(0, '/sbin/sys11', ''))
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict['service_mgr'], 'sys11')
@patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value='runit-init')
def test_service_mgr_runit(self, mock_gfc):
# /proc/1/comm contains 'runit-init', ps fails, service manager is runit
# should end up return 'runit'
module = self._mock_module()
module.run_command = Mock(return_value=(1, '', ''))
collected_facts = {'ansible_system': 'Linux'}
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module,
collected_facts=collected_facts)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict['service_mgr'], 'runit')
@patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value=None)
@patch('ansible.module_utils.facts.system.service_mgr.os.path.islink', side_effect=lambda x: x == '/sbin/init')
@patch('ansible.module_utils.facts.system.service_mgr.os.readlink', side_effect=lambda x: '/sbin/runit-init' if x == '/sbin/init' else '/bin/false')
def test_service_mgr_runit_no_comm(self, mock_gfc, mock_opl, mock_orl):
# no /proc/1/comm, ps returns 'COMMAND\n', service manager is runit
# should end up return 'runit'
module = self._mock_module()
module.run_command = Mock(return_value=(1, 'COMMAND\n', ''))
collected_facts = {'ansible_system': 'Linux'}
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module,
collected_facts=collected_facts)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict['service_mgr'], 'runit')
class TestSshPubKeyFactCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'ssh_pub_keys']
valid_subsets = ['ssh_pub_keys']
fact_namespace = 'ansible_ssh_pub_leys'
collector_class = SshPubKeyFactCollector
class TestUserFactCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'user']
valid_subsets = ['user']
fact_namespace = 'ansible_user'
collector_class = UserFactCollector
class TestVirtualFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'virtual']
valid_subsets = ['virtual']
fact_namespace = 'ansible_virtual'
collector_class = VirtualCollector
| 16,819
|
Python
|
.py
| 342
| 41.885965
| 239
| 0.684133
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,917
|
test_timeout.py
|
ansible_ansible/test/units/module_utils/facts/test_timeout.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Toshio Kuratomi <tkuratomi@ansible.com>
# Copyright: Contributors to the Ansible project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import sys
import time
import pytest
from ansible.module_utils.facts import timeout
@pytest.fixture
def set_gather_timeout_higher():
default_timeout = timeout.GATHER_TIMEOUT
timeout.GATHER_TIMEOUT = 5
yield
timeout.GATHER_TIMEOUT = default_timeout
@pytest.fixture
def set_gather_timeout_lower():
default_timeout = timeout.GATHER_TIMEOUT
timeout.GATHER_TIMEOUT = 2
yield
timeout.GATHER_TIMEOUT = default_timeout
@timeout.timeout
def sleep_amount_implicit(amount):
# implicit refers to the lack of argument to the decorator
time.sleep(amount)
return 'Succeeded after {0} sec'.format(amount)
@timeout.timeout(timeout.DEFAULT_GATHER_TIMEOUT + 5)
def sleep_amount_explicit_higher(amount):
# explicit refers to the argument to the decorator
time.sleep(amount)
return 'Succeeded after {0} sec'.format(amount)
@timeout.timeout(2)
def sleep_amount_explicit_lower(amount):
# explicit refers to the argument to the decorator
time.sleep(amount)
return 'Succeeded after {0} sec'.format(amount)
#
# Tests for how the timeout decorator is specified
#
def test_defaults_still_within_bounds():
# If the default changes outside of these bounds, some of the tests will
# no longer test the right thing. Need to review and update the timeouts
# in the other tests if this fails
assert timeout.DEFAULT_GATHER_TIMEOUT >= 4
def test_implicit_file_default_succeeds():
# amount checked must be less than DEFAULT_GATHER_TIMEOUT
assert sleep_amount_implicit(1) == 'Succeeded after 1 sec'
def test_implicit_file_default_timesout(monkeypatch):
monkeypatch.setattr(timeout, 'DEFAULT_GATHER_TIMEOUT', 1)
# sleep_time is greater than the default
sleep_time = timeout.DEFAULT_GATHER_TIMEOUT + 1
with pytest.raises(timeout.TimeoutError, match=r"^Timer expired after"):
sleep_amount_implicit(sleep_time)
def test_implicit_file_overridden_succeeds(set_gather_timeout_higher):
# Set sleep_time greater than the default timeout and less than our new timeout
sleep_time = 3
assert sleep_amount_implicit(sleep_time) == 'Succeeded after {0} sec'.format(sleep_time)
def test_implicit_file_overridden_timesout(set_gather_timeout_lower):
# Set sleep_time greater than our new timeout but less than the default
sleep_time = 3
with pytest.raises(timeout.TimeoutError, match=r"^Timer expired after"):
sleep_amount_implicit(sleep_time)
def test_explicit_succeeds(monkeypatch):
monkeypatch.setattr(timeout, 'DEFAULT_GATHER_TIMEOUT', 1)
# Set sleep_time greater than the default timeout and less than our new timeout
sleep_time = 2
assert sleep_amount_explicit_higher(sleep_time) == 'Succeeded after {0} sec'.format(sleep_time)
def test_explicit_timeout():
# Set sleep_time greater than our new timeout but less than the default
sleep_time = 3
with pytest.raises(timeout.TimeoutError, match=r"^Timer expired after"):
sleep_amount_explicit_lower(sleep_time)
#
# Test that exception handling works
#
@timeout.timeout(1)
def function_times_out():
time.sleep(2)
# This is just about the same test as function_times_out but uses a separate process which is where
# we normally have our timeouts. It's more of an integration test than a unit test.
@timeout.timeout(1)
def function_times_out_in_run_command(am):
am.run_command([sys.executable, '-c', 'import time ; time.sleep(2)'])
@timeout.timeout(1)
def function_other_timeout():
raise TimeoutError('Vanilla Timeout')
@timeout.timeout(1)
def function_raises():
return 1 / 0
@timeout.timeout(1)
def function_catches_all_exceptions():
try:
time.sleep(10)
except BaseException:
raise RuntimeError('We should not have gotten here')
def test_timeout_raises_timeout():
with pytest.raises(timeout.TimeoutError, match=r"^Timer expired after"):
function_times_out()
@pytest.mark.parametrize('stdin', ({},), indirect=['stdin'])
def test_timeout_raises_timeout_integration_test(am):
with pytest.raises(timeout.TimeoutError, match=r"^Timer expired after"):
function_times_out_in_run_command(am)
def test_timeout_raises_other_exception():
with pytest.raises(ZeroDivisionError, match=r"^division by"):
function_raises()
def test_exception_not_caught_by_called_code():
with pytest.raises(timeout.TimeoutError, match=r"^Timer expired after"):
function_catches_all_exceptions()
| 4,742
|
Python
|
.py
| 108
| 39.944444
| 99
| 0.750763
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,918
|
test_sysctl.py
|
ansible_ansible/test/units/module_utils/facts/test_sysctl.py
|
# This file is part of Ansible
# -*- coding: utf-8 -*-
#
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import annotations
# for testing
import unittest
from unittest.mock import MagicMock
from ansible.module_utils.facts.sysctl import get_sysctl
# `sysctl hw` on an openbsd machine
OPENBSD_SYSCTL_HW = """
hw.machine=amd64
hw.model=AMD EPYC Processor (with IBPB)
hw.ncpu=1
hw.byteorder=1234
hw.pagesize=4096
hw.disknames=cd0:,sd0:9e1bd96cb20ab429,fd0:
hw.diskcount=3
hw.sensors.viomb0.raw0=0 (desired)
hw.sensors.viomb0.raw1=0 (current)
hw.cpuspeed=3394
hw.vendor=QEMU
hw.product=Standard PC (i440FX + PIIX, 1996)
hw.version=pc-i440fx-5.1
hw.uuid=5833415a-eefc-964f-a306-fa434d44d117
hw.physmem=1056804864
hw.usermem=1056792576
hw.ncpufound=1
hw.allowpowerdown=1
hw.smt=0
hw.ncpuonline=1
"""
# partial output of `sysctl kern` on an openbsd machine
# for testing multiline parsing
OPENBSD_SYSCTL_KERN_PARTIAL = """
kern.ostype=OpenBSD
kern.osrelease=6.7
kern.osrevision=202005
kern.version=OpenBSD 6.7 (GENERIC) #179: Thu May 7 11:02:37 MDT 2020
deraadt@amd64.openbsd.org:/usr/src/sys/arch/amd64/compile/GENERIC
kern.maxvnodes=12447
kern.maxproc=1310
kern.maxfiles=7030
kern.argmax=524288
kern.securelevel=1
kern.hostname=openbsd67.vm.home.elrod.me
kern.hostid=0
kern.clockrate=tick = 10000, tickadj = 40, hz = 100, profhz = 100, stathz = 100
kern.posix1version=200809
"""
# partial output of `sysctl vm` on Linux. The output has tabs in it and Linux
# sysctl has spaces around the =
LINUX_SYSCTL_VM_PARTIAL = """
vm.dirty_background_ratio = 10
vm.dirty_bytes = 0
vm.dirty_expire_centisecs = 3000
vm.dirty_ratio = 20
vm.dirty_writeback_centisecs = 500
vm.dirtytime_expire_seconds = 43200
vm.extfrag_threshold = 500
vm.hugetlb_shm_group = 0
vm.laptop_mode = 0
vm.legacy_va_layout = 0
vm.lowmem_reserve_ratio = 256 256 32 0
vm.max_map_count = 65530
vm.min_free_kbytes = 22914
vm.min_slab_ratio = 5
"""
# partial output of `sysctl vm` on macOS. The output is colon-separated.
MACOS_SYSCTL_VM_PARTIAL = """
vm.loadavg: { 1.28 1.18 1.13 }
vm.swapusage: total = 2048.00M used = 1017.50M free = 1030.50M (encrypted)
vm.cs_force_kill: 0
vm.cs_force_hard: 0
vm.cs_debug: 0
vm.cs_debug_fail_on_unsigned_code: 0
vm.cs_debug_unsigned_exec_failures: 0
vm.cs_debug_unsigned_mmap_failures: 0
vm.cs_all_vnodes: 0
vm.cs_system_enforcement: 1
vm.cs_process_enforcement: 0
vm.cs_enforcement_panic: 0
vm.cs_library_validation: 0
vm.global_user_wire_limit: 3006477107
"""
# Invalid/bad output
BAD_SYSCTL = """
this.output.is.invalid
it.has.no.equals.sign.or.colon
so.it.should.fail.to.parse
"""
# Mixed good/bad output
GOOD_BAD_SYSCTL = """
bad.output.here
hw.smt=0
and.bad.output.here
"""
class TestSysctlParsingInFacts(unittest.TestCase):
def test_get_sysctl_missing_binary(self):
module = MagicMock()
module.get_bin_path.return_value = '/usr/sbin/sysctl'
module.run_command.side_effect = ValueError
self.assertRaises(ValueError, get_sysctl, module, ['vm'])
def test_get_sysctl_nonzero_rc(self):
module = MagicMock()
module.get_bin_path.return_value = '/usr/sbin/sysctl'
module.run_command.return_value = (1, '', '')
sysctl = get_sysctl(module, ['hw'])
self.assertEqual(sysctl, {})
def test_get_sysctl_command_error(self):
module = MagicMock()
module.get_bin_path.return_value = '/usr/sbin/sysctl'
for err in (IOError, OSError):
module.reset_mock()
module.run_command.side_effect = err('foo')
sysctl = get_sysctl(module, ['hw'])
module.warn.assert_called_once_with('Unable to read sysctl: foo')
self.assertEqual(sysctl, {})
def test_get_sysctl_all_invalid_output(self):
module = MagicMock()
module.get_bin_path.return_value = '/sbin/sysctl'
module.run_command.return_value = (0, BAD_SYSCTL, '')
sysctl = get_sysctl(module, ['hw'])
module.run_command.assert_called_once_with(['/sbin/sysctl', 'hw'])
lines = [l for l in BAD_SYSCTL.splitlines() if l]
for call in module.warn.call_args_list:
self.assertIn('Unable to split sysctl line', call[0][0])
self.assertEqual(module.warn.call_count, len(lines))
self.assertEqual(sysctl, {})
def test_get_sysctl_mixed_invalid_output(self):
module = MagicMock()
module.get_bin_path.return_value = '/sbin/sysctl'
module.run_command.return_value = (0, GOOD_BAD_SYSCTL, '')
sysctl = get_sysctl(module, ['hw'])
module.run_command.assert_called_once_with(['/sbin/sysctl', 'hw'])
bad_lines = ['bad.output.here', 'and.bad.output.here']
for call in module.warn.call_args_list:
self.assertIn('Unable to split sysctl line', call[0][0])
self.assertEqual(module.warn.call_count, 2)
self.assertEqual(sysctl, {'hw.smt': '0'})
def test_get_sysctl_openbsd_hw(self):
expected_lines = [l for l in OPENBSD_SYSCTL_HW.splitlines() if l]
module = MagicMock()
module.get_bin_path.return_value = '/sbin/sysctl'
module.run_command.return_value = (0, OPENBSD_SYSCTL_HW, '')
sysctl = get_sysctl(module, ['hw'])
module.run_command.assert_called_once_with(['/sbin/sysctl', 'hw'])
self.assertEqual(len(sysctl), len(expected_lines))
self.assertEqual(sysctl['hw.machine'], 'amd64') # first line
self.assertEqual(sysctl['hw.smt'], '0') # random line
self.assertEqual(sysctl['hw.ncpuonline'], '1') # last line
# weird chars in value
self.assertEqual(
sysctl['hw.disknames'],
'cd0:,sd0:9e1bd96cb20ab429,fd0:')
# more symbols/spaces in value
self.assertEqual(
sysctl['hw.product'],
'Standard PC (i440FX + PIIX, 1996)')
def test_get_sysctl_openbsd_kern(self):
module = MagicMock()
module.get_bin_path.return_value = '/sbin/sysctl'
module.run_command.return_value = (0, OPENBSD_SYSCTL_KERN_PARTIAL, '')
sysctl = get_sysctl(module, ['kern'])
module.run_command.assert_called_once_with(['/sbin/sysctl', 'kern'])
self.assertEqual(
len(sysctl),
len(
[l for l
in OPENBSD_SYSCTL_KERN_PARTIAL.splitlines()
if l.startswith('kern')]))
self.assertEqual(sysctl['kern.ostype'], 'OpenBSD') # first line
self.assertEqual(sysctl['kern.maxproc'], '1310') # random line
self.assertEqual(sysctl['kern.posix1version'], '200809') # last line
# multiline
self.assertEqual(
sysctl['kern.version'],
'OpenBSD 6.7 (GENERIC) #179: Thu May 7 11:02:37 MDT 2020\n '
'deraadt@amd64.openbsd.org:/usr/src/sys/arch/amd64/compile/GENERIC')
# more symbols/spaces in value
self.assertEqual(
sysctl['kern.clockrate'],
'tick = 10000, tickadj = 40, hz = 100, profhz = 100, stathz = 100')
def test_get_sysctl_linux_vm(self):
module = MagicMock()
module.get_bin_path.return_value = '/usr/sbin/sysctl'
module.run_command.return_value = (0, LINUX_SYSCTL_VM_PARTIAL, '')
sysctl = get_sysctl(module, ['vm'])
module.run_command.assert_called_once_with(['/usr/sbin/sysctl', 'vm'])
self.assertEqual(
len(sysctl),
len([l for l in LINUX_SYSCTL_VM_PARTIAL.splitlines() if l]))
self.assertEqual(sysctl['vm.dirty_background_ratio'], '10')
self.assertEqual(sysctl['vm.laptop_mode'], '0')
self.assertEqual(sysctl['vm.min_slab_ratio'], '5')
# tabs
self.assertEqual(sysctl['vm.lowmem_reserve_ratio'], '256\t256\t32\t0')
def test_get_sysctl_macos_vm(self):
module = MagicMock()
module.get_bin_path.return_value = '/usr/sbin/sysctl'
module.run_command.return_value = (0, MACOS_SYSCTL_VM_PARTIAL, '')
sysctl = get_sysctl(module, ['vm'])
module.run_command.assert_called_once_with(['/usr/sbin/sysctl', 'vm'])
self.assertEqual(
len(sysctl),
len([l for l in MACOS_SYSCTL_VM_PARTIAL.splitlines() if l]))
self.assertEqual(sysctl['vm.loadavg'], '{ 1.28 1.18 1.13 }')
self.assertEqual(
sysctl['vm.swapusage'],
'total = 2048.00M used = 1017.50M free = 1030.50M (encrypted)')
| 9,000
|
Python
|
.py
| 223
| 34.838565
| 80
| 0.671616
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,919
|
test_date_time.py
|
ansible_ansible/test/units/module_utils/facts/test_date_time.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
import datetime
import string
import time
from ansible.module_utils.compat.datetime import UTC
from ansible.module_utils.facts.system import date_time
EPOCH_TS = 1594449296.123456
DT = datetime.datetime(2020, 7, 11, 12, 34, 56, 124356)
UTC_DT = datetime.datetime(2020, 7, 11, 2, 34, 56, 124356)
@pytest.fixture
def fake_now(monkeypatch):
"""
Patch `datetime.datetime.fromtimestamp()`,
and `time.time()` to return deterministic values.
"""
class FakeNow:
@classmethod
def fromtimestamp(cls, timestamp, tz=None):
if tz == UTC:
return UTC_DT.replace(tzinfo=tz)
return DT.replace(tzinfo=tz)
def _time():
return EPOCH_TS
monkeypatch.setattr(date_time.datetime, 'datetime', FakeNow)
monkeypatch.setattr(time, 'time', _time)
@pytest.fixture
def fake_date_facts(fake_now):
"""Return a predictable instance of collected date_time facts."""
collector = date_time.DateTimeFactCollector()
data = collector.collect()
return data
@pytest.mark.parametrize(
('fact_name', 'fact_value'),
(
('year', '2020'),
('month', '07'),
('weekday', 'Saturday'),
('weekday_number', '6'),
('weeknumber', '27'),
('day', '11'),
('hour', '12'),
('minute', '34'),
('second', '56'),
('date', '2020-07-11'),
('time', '12:34:56'),
('iso8601_basic', '20200711T123456124356'),
('iso8601_basic_short', '20200711T123456'),
('iso8601_micro', '2020-07-11T02:34:56.124356Z'),
('iso8601', '2020-07-11T02:34:56Z'),
),
)
def test_date_time_facts(fake_date_facts, fact_name, fact_value):
assert fake_date_facts['date_time'][fact_name] == fact_value
def test_date_time_epoch(fake_date_facts):
"""Test that format of returned epoch value is correct"""
assert fake_date_facts['date_time']['epoch'].isdigit()
assert len(fake_date_facts['date_time']['epoch']) == 10 # This length will not change any time soon
assert fake_date_facts['date_time']['epoch_int'].isdigit()
assert len(fake_date_facts['date_time']['epoch_int']) == 10 # This length will not change any time soon
@pytest.mark.parametrize('fact_name', ('tz', 'tz_dst'))
def test_date_time_tz(fake_date_facts, fact_name):
"""
Test the returned value for timezone consists of only uppercase
letters and is the expected length.
"""
assert fake_date_facts['date_time'][fact_name].isupper()
assert 2 <= len(fake_date_facts['date_time'][fact_name]) <= 5
assert not set(fake_date_facts['date_time'][fact_name]).difference(set(string.ascii_uppercase))
def test_date_time_tz_offset(fake_date_facts):
"""
Test that the timezone offset begins with a `+` or `-` and ends with a
series of integers.
"""
assert fake_date_facts['date_time']['tz_offset'][0] in ['-', '+']
assert fake_date_facts['date_time']['tz_offset'][1:].isdigit()
assert len(fake_date_facts['date_time']['tz_offset']) == 5
| 3,236
|
Python
|
.py
| 80
| 35.1
| 108
| 0.651341
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,920
|
base.py
|
ansible_ansible/test/units/module_utils/facts/base.py
|
# base unit test classes for ansible/module_utils/facts/ tests
# -*- coding: utf-8 -*-
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import annotations
import unittest
from unittest.mock import Mock, patch
class BaseFactsTest(unittest.TestCase):
# just a base class, not an actual test
__test__ = False
gather_subset = ['all']
valid_subsets = None
fact_namespace = None
collector_class = None
# a dict ansible_facts. Some fact collectors depend on facts gathered by
# other collectors (like 'ansible_architecture' or 'ansible_system') which
# can be passed via the collected_facts arg to collect()
collected_facts = None
def _mock_module(self):
mock_module = Mock()
mock_module.params = {'gather_subset': self.gather_subset,
'gather_timeout': 5,
'filter': '*'}
mock_module.get_bin_path = Mock(return_value=None)
return mock_module
@patch('platform.system', return_value='Linux')
@patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value='systemd')
def test_collect(self, mock_gfc, mock_ps):
self._test_collect()
def _test_collect(self):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
return facts_dict
@patch('platform.system', return_value='Linux')
@patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value='systemd')
def test_collect_with_namespace(self, mock_gfc, mock_ps):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect_with_namespace(module=module,
collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
| 2,603
|
Python
|
.py
| 55
| 40.636364
| 100
| 0.686761
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,921
|
test_fc_wwn.py
|
ansible_ansible/test/units/module_utils/facts/network/test_fc_wwn.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.module_utils.facts.network import fc_wwn
import pytest
# AIX lsdev
LSDEV_OUTPUT = """
fcs0 Defined 00-00 8Gb PCI Express Dual Port FC Adapter (df1000f114108a03)
fcs1 Available 04-00 8Gb PCI Express Dual Port FC Adapter (df1000f114108a03)
"""
# slightly cut output of lscfg (from Z0 to ZC)
LSCFG_OUTPUT = """
fcs1 U78CB.001.WZS00ZS-P1-C9-T1 8Gb PCI Express Dual Port FC Adapter (df1000f114108a03)
Part Number.................00E0806
Serial Number...............1C4090830F
Manufacturer................001C
EC Level.................... D77161
Customer Card ID Number.....577D
FRU Number..................00E0806
Device Specific.(ZM)........3
Network Address.............10000090FA551508
ROS Level and ID............027820B7
Device Specific.(Z0)........31004549
Device Specific.(ZC)........00000000
Hardware Location Code......U78CB.001.WZS00ZS-P1-C9-T1
"""
# Solaris
FCINFO_OUTPUT = """
HBA Port WWN: 10000090fa1658de
Port Mode: Initiator
Port ID: 30100
OS Device Name: /dev/cfg/c13
Manufacturer: Emulex
Model: LPe12002-S
Firmware Version: LPe12002-S 2.01a12
FCode/BIOS Version: Boot:5.03a0 Fcode:3.01a1
Serial Number: 4925381+13090001ER
Driver Name: emlxs
Driver Version: 3.3.00.1 (2018.01.05.16.30)
Type: N-port
State: online
Supported Speeds: 2Gb 4Gb 8Gb
Current Speed: 8Gb
Node WWN: 20000090fa1658de
NPIV Not Supported
"""
IOSCAN_OUT = """
Class I H/W Path Driver S/W State H/W Type Description
==================================================================
fc 0 2/0/10/1/0 fcd CLAIMED INTERFACE HP AB379-60101 4Gb Dual Port PCI/PCI-X Fibre Channel Adapter (FC Port 1)
/dev/fcd0
"""
FCMSUTIL_OUT = """
Vendor ID is = 0x1077
Device ID is = 0x2422
PCI Sub-system Vendor ID is = 0x103C
PCI Sub-system ID is = 0x12D7
PCI Mode = PCI-X 133 MHz
ISP Code version = 5.4.0
ISP Chip version = 3
Topology = PTTOPT_FABRIC
Link Speed = 4Gb
Local N_Port_id is = 0x010300
Previous N_Port_id is = None
N_Port Node World Wide Name = 0x50060b00006975ed
N_Port Port World Wide Name = 0x50060b00006975ec
Switch Port World Wide Name = 0x200300051e046c0f
Switch Node World Wide Name = 0x100000051e046c0f
N_Port Symbolic Port Name = server1_fcd0
N_Port Symbolic Node Name = server1_HP-UX_B.11.31
Driver state = ONLINE
Hardware Path is = 2/0/10/1/0
Maximum Frame Size = 2048
Driver-Firmware Dump Available = NO
Driver-Firmware Dump Timestamp = N/A
TYPE = PFC
NPIV Supported = YES
Driver Version = @(#) fcd B.11.31.1103 Dec 6 2010
"""
def mock_get_bin_path(cmd, required=False, opt_dirs=None):
cmds = {
'lsdev': '/usr/sbin/lsdev',
'lscfg': '/usr/sbin/lscfg',
'fcinfo': '/usr/sbin/fcinfo',
'ioscan': '/usr/bin/ioscan',
'fcmsutil': '/opt/fcms/bin/fcmsutil',
}
return cmds.get(cmd, None)
@pytest.mark.parametrize(
("test_input", "expected"),
[
pytest.param(
{
"platform": "aix6",
"mock_run_command": [(0, LSDEV_OUTPUT, ""), (0, LSCFG_OUTPUT, "")],
},
["10000090FA551508"],
id="aix6",
),
pytest.param(
{
"platform": "sunos5",
"mock_run_command": [
(0, FCINFO_OUTPUT, ""),
],
},
["10000090fa1658de"],
id="sunos5",
),
pytest.param(
{
"platform": "hp-ux11",
"mock_run_command": [(0, IOSCAN_OUT, ""), (0, FCMSUTIL_OUT, "")],
},
["0x50060b00006975ec"],
id="hp-ux11",
),
],
)
def test_get_fc_wwn_info(mocker, test_input, expected):
module = mocker.MagicMock()
inst = fc_wwn.FcWwnInitiatorFactCollector()
mocker.patch("sys.platform", test_input["platform"])
mocker.patch.object(module, "get_bin_path", side_effect=mock_get_bin_path)
mocker.patch.object(
module, "run_command", side_effect=test_input["mock_run_command"]
)
wwn_expected = {"fibre_channel_wwn": expected}
assert wwn_expected == inst.collect(module=module)
| 5,041
|
Python
|
.py
| 130
| 28.523077
| 128
| 0.53543
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,922
|
test_iscsi_get_initiator.py
|
ansible_ansible/test/units/module_utils/facts/network/test_iscsi_get_initiator.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.module_utils.facts.network import iscsi
import pytest
# AIX # lsattr -E -l iscsi0
LSATTR_OUTPUT = """
disc_filename /etc/iscsi/targets Configuration file False
disc_policy file Discovery Policy True
initiator_name iqn.localhost.hostid.7f000002 iSCSI Initiator Name True
isns_srvnames auto iSNS Servers IP Addresses True
isns_srvports iSNS Servers Port Numbers True
max_targets 16 Maximum Targets Allowed True
num_cmd_elems 200 Maximum number of commands to queue to driver True
"""
# HP-UX # iscsiutil -l
ISCSIUTIL_OUTPUT = """
Initiator Name : iqn.2001-04.com.hp.stor:svcio
Initiator Alias :
Authentication Method : None
CHAP Method : CHAP_UNI
Initiator CHAP Name :
CHAP Secret :
NAS Hostname :
NAS Secret :
Radius Server Hostname :
Header Digest : None,CRC32C (default)
Data Digest : None,CRC32C (default)
SLP Scope list for iSLPD :
"""
@pytest.mark.parametrize(
("test_input", "expected"),
[
pytest.param(
{
"platform": "aix6",
"iscsi_path": "/usr/sbin/lsattr",
"return_command": LSATTR_OUTPUT
},
{"iscsi_iqn": "iqn.localhost.hostid.7f000002"},
id="aix",
),
pytest.param(
{
"platform": "hp-ux",
"iscsi_path": "/opt/iscsi/bin/iscsiutil",
"return_command": ISCSIUTIL_OUTPUT
},
{"iscsi_iqn": " iqn.2001-04.com.hp.stor:svcio"},
id="hpux",
)
]
)
def test_get_iscsi_info(mocker, test_input, expected):
module = mocker.MagicMock()
inst = iscsi.IscsiInitiatorNetworkCollector()
mocker.patch('sys.platform', test_input['platform'])
mocker.patch.object(module, 'get_bin_path', return_value=test_input['iscsi_path'])
mocker.patch.object(module, 'run_command', return_value=(0, test_input['return_command'], ''))
assert expected == inst.collect(module=module)
| 2,547
|
Python
|
.py
| 61
| 36.360656
| 98
| 0.552462
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,923
|
test_generic_bsd.py
|
ansible_ansible/test/units/module_utils/facts/network/test_generic_bsd.py
|
# -*- coding: utf-8 -*-
# Copyright: Contributors to the Ansible project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.module_utils.facts.network import generic_bsd
def mock_get_bin_path(command):
cmds = {
'ifconfig': 'fake/ifconfig',
'route': 'fake/route',
}
return cmds.get(command, None)
NETBSD_IFCONFIG_A_OUT_7_1 = r"""
lo0: flags=8049<UP,LOOPBACK,RUNNING,MULTICAST> mtu 33624
inet 127.0.0.1 netmask 0xff000000
inet6 ::1 prefixlen 128
inet6 fe80::1%lo0 prefixlen 64 scopeid 0x1
re0: flags=8843<UP,BROADCAST,RUNNING,SIMPLEX,MULTICAST> mtu 1500
capabilities=3f80<TSO4,IP4CSUM_Rx,IP4CSUM_Tx,TCP4CSUM_Rx,TCP4CSUM_Tx>
capabilities=3f80<UDP4CSUM_Rx,UDP4CSUM_Tx>
enabled=0
ec_capabilities=3<VLAN_MTU,VLAN_HWTAGGING>
ec_enabled=0
address: 52:54:00:63:55:af
media: Ethernet autoselect (100baseTX full-duplex)
status: active
inet 192.168.122.205 netmask 0xffffff00 broadcast 192.168.122.255
inet6 fe80::5054:ff:fe63:55af%re0 prefixlen 64 scopeid 0x2
"""
NETBSD_IFCONFIG_A_OUT_POST_7_1 = r"""
lo0: flags=0x8049<UP,LOOPBACK,RUNNING,MULTICAST> mtu 33624
inet 127.0.0.1/8 flags 0x0
inet6 ::1/128 flags 0x20<NODAD>
inet6 fe80::1%lo0/64 flags 0x0 scopeid 0x1
re0: flags=0x8843<UP,BROADCAST,RUNNING,SIMPLEX,MULTICAST> mtu 1500
capabilities=3f80<TSO4,IP4CSUM_Rx,IP4CSUM_Tx,TCP4CSUM_Rx,TCP4CSUM_Tx>
capabilities=3f80<UDP4CSUM_Rx,UDP4CSUM_Tx>
enabled=0
ec_capabilities=3<VLAN_MTU,VLAN_HWTAGGING>
ec_enabled=0
address: 52:54:00:63:55:af
media: Ethernet autoselect (100baseTX full-duplex)
status: active
inet 192.168.122.205/24 broadcast 192.168.122.255 flags 0x0
inet6 fe80::5054:ff:fe63:55af%re0/64 flags 0x0 scopeid 0x2
"""
NETBSD_EXPECTED = {'all_ipv4_addresses': ['192.168.122.205'],
'all_ipv6_addresses': ['fe80::5054:ff:fe63:55af%re0'],
'default_ipv4': {},
'default_ipv6': {},
'interfaces': ['lo0', 're0'],
'lo0': {'device': 'lo0',
'flags': ['UP', 'LOOPBACK', 'RUNNING', 'MULTICAST'],
'ipv4': [{'address': '127.0.0.1',
'broadcast': '127.255.255.255',
'netmask': '255.0.0.0',
'network': '127.0.0.0'}],
'ipv6': [{'address': '::1', 'prefix': '128'},
{'address': 'fe80::1%lo0', 'prefix': '64', 'scope': '0x1'}],
'macaddress': 'unknown',
'mtu': '33624',
'type': 'loopback'},
're0': {'device': 're0',
'flags': ['UP', 'BROADCAST', 'RUNNING', 'SIMPLEX', 'MULTICAST'],
'ipv4': [{'address': '192.168.122.205',
'broadcast': '192.168.122.255',
'netmask': '255.255.255.0',
'network': '192.168.122.0'}],
'ipv6': [{'address': 'fe80::5054:ff:fe63:55af%re0',
'prefix': '64',
'scope': '0x2'}],
'macaddress': 'unknown',
'media': 'Ethernet',
'media_options': [],
'media_select': 'autoselect',
'media_type': '100baseTX',
'mtu': '1500',
'status': 'active',
'type': 'ether'}}
@pytest.mark.parametrize(
("test_input", "expected"),
[
pytest.param(
{
"mock_run_command": [
(0, "Foo", ""),
(0, "Foo", ""),
(0, NETBSD_IFCONFIG_A_OUT_7_1, ""),
],
},
NETBSD_EXPECTED,
id="old-ifconfig",
),
pytest.param(
{
"mock_run_command": [
(0, "Foo", ""),
(0, "Foo", ""),
(0, NETBSD_IFCONFIG_A_OUT_POST_7_1, ""),
],
},
NETBSD_EXPECTED,
id="post-7-1-ifconfig",
),
],
)
def test_generic_bsd_ifconfig(mocker, test_input, expected):
module = mocker.MagicMock()
mocker.patch.object(module, "get_bin_path", side_effect=mock_get_bin_path)
mocker.patch.object(
module, "run_command", side_effect=test_input["mock_run_command"]
)
bsd_net = generic_bsd.GenericBsdIfconfigNetwork(module)
res = bsd_net.populate()
assert res == expected
def test_compare_old_new_ifconfig(mocker):
old_ifconfig_module = mocker.MagicMock()
mocker.patch.object(old_ifconfig_module, "get_bin_path", side_effect=mock_get_bin_path)
mocker.patch.object(
old_ifconfig_module,
"run_command",
side_effect=[
(0, "Foo", ""),
(0, "Foo", ""),
(0, NETBSD_IFCONFIG_A_OUT_7_1, ""),
],
)
old_bsd_net = generic_bsd.GenericBsdIfconfigNetwork(old_ifconfig_module)
old_res = old_bsd_net.populate()
new_ifconfig_module = mocker.MagicMock()
mocker.patch.object(new_ifconfig_module, "get_bin_path", side_effect=mock_get_bin_path)
mocker.patch.object(
new_ifconfig_module,
"run_command",
side_effect=[
(0, "Foo", ""),
(0, "Foo", ""),
(0, NETBSD_IFCONFIG_A_OUT_POST_7_1, ""),
],
)
new_bsd_net = generic_bsd.GenericBsdIfconfigNetwork(new_ifconfig_module)
new_res = new_bsd_net.populate()
assert old_res == new_res
@pytest.mark.parametrize(
("test_input", "expected"),
[
pytest.param(
"inet 192.168.7.113 netmask 0xffffff00 broadcast 192.168.7.255",
(
{
'ipv4': [
{
'address': '192.168.7.113',
'netmask': '255.255.255.0',
'network': '192.168.7.0',
'broadcast': '192.168.7.255',
}
]
},
{'all_ipv4_addresses': ['192.168.7.113']},
),
id="ifconfig-output-1",
),
pytest.param(
"inet 10.109.188.206 --> 10.109.188.206 netmask 0xffffe000",
(
{
'ipv4': [
{
'address': '10.109.188.206',
'netmask': '255.255.224.0',
'network': '10.109.160.0',
'broadcast': '10.109.191.255',
}
]
},
{'all_ipv4_addresses': ['10.109.188.206']},
),
id="ifconfig-output-2",
),
],
)
def test_ensure_correct_netmask_parsing(test_input, expected):
n = generic_bsd.GenericBsdIfconfigNetwork(None)
words = test_input.split()
current_if = {"ipv4": []}
ips = {"all_ipv4_addresses": []}
n.parse_inet_line(words, current_if, ips)
assert current_if == expected[0]
assert ips == expected[1]
| 7,616
|
Python
|
.py
| 190
| 26.610526
| 96
| 0.485896
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,924
|
test_locally_reachable_ips.py
|
ansible_ansible/test/units/module_utils/facts/network/test_locally_reachable_ips.py
|
# Copyright: Contributors to the Ansible project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.module_utils.facts.network import linux
# ip -4 route show table local
IP4_ROUTE_SHOW_LOCAL = """
broadcast 127.0.0.0 dev lo proto kernel scope link src 127.0.0.1
local 127.0.0.0/8 dev lo proto kernel scope host src 127.0.0.1
local 127.0.0.1 dev lo proto kernel scope host src 127.0.0.1
broadcast 127.255.255.255 dev lo proto kernel scope link src 127.0.0.1
local 192.168.1.0/24 dev lo scope host
"""
# ip -6 route show table local
IP6_ROUTE_SHOW_LOCAL = """
local ::1 dev lo proto kernel metric 0 pref medium
local 2a02:123:3:1::e dev enp94s0f0np0 proto kernel metric 0 pref medium
local 2a02:123:15::/48 dev lo metric 1024 pref medium
local 2a02:123:16::/48 dev lo metric 1024 pref medium
local fe80::2eea:7fff:feca:fe68 dev enp94s0f0np0 proto kernel metric 0 pref medium
multicast ff00::/8 dev enp94s0f0np0 proto kernel metric 256 pref medium
"""
# Hash returned by get_locally_reachable_ips()
IP_ROUTE_SHOW_LOCAL_EXPECTED = {
'ipv4': [
'127.0.0.0/8',
'127.0.0.1',
'192.168.1.0/24'
],
'ipv6': [
'::1',
'2a02:123:3:1::e',
'2a02:123:15::/48',
'2a02:123:16::/48',
'fe80::2eea:7fff:feca:fe68'
]
}
def mock_get_bin_path(command):
cmds = {"ip": "fake/ip"}
return cmds.get(command, None)
def test_linux_local_routes(mocker):
module = mocker.MagicMock()
mocker.patch.object(module, "get_bin_path", side_effect=mock_get_bin_path)
mocker.patch.object(
module,
"run_command",
side_effect=[(0, IP4_ROUTE_SHOW_LOCAL, ""), (0, IP6_ROUTE_SHOW_LOCAL, "")],
)
net = linux.LinuxNetwork(module)
res = net.get_locally_reachable_ips(mock_get_bin_path("ip"))
assert res == IP_ROUTE_SHOW_LOCAL_EXPECTED
| 1,923
|
Python
|
.py
| 50
| 34.46
| 92
| 0.687601
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,925
|
test_pkg_mgr.py
|
ansible_ansible/test/units/module_utils/facts/system/test_pkg_mgr.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2023, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.module_utils.facts.system.pkg_mgr import PkgMgrFactCollector
_FACTS = {"ansible_os_family": "RedHat"}
# NOTE pkg_mgr == "dnf" means the dnf module for the dnf 4 or below
def test_default_dnf_version_detection_fedora_dnf4(mocker):
mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/dnf", "/usr/bin/dnf-3"))
mocker.patch("os.path.realpath", lambda p: {"/usr/bin/dnf": "/usr/bin/dnf-3"}.get(p, p))
assert PkgMgrFactCollector().collect(collected_facts=_FACTS).get("pkg_mgr") == "dnf"
def test_default_dnf_version_detection_fedora_dnf5(mocker):
mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/dnf", "/usr/bin/dnf5"))
mocker.patch("os.path.realpath", lambda p: {"/usr/bin/dnf": "/usr/bin/dnf5"}.get(p, p))
assert PkgMgrFactCollector().collect(collected_facts=_FACTS).get("pkg_mgr") == "dnf5"
def test_default_dnf_version_detection_fedora_dnf4_both_installed(mocker):
mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/dnf", "/usr/bin/dnf-3", "/usr/bin/dnf5"))
mocker.patch("os.path.realpath", lambda p: {"/usr/bin/dnf": "/usr/bin/dnf-3"}.get(p, p))
assert PkgMgrFactCollector().collect(collected_facts=_FACTS).get("pkg_mgr") == "dnf"
def test_default_dnf_version_detection_fedora_dnf4_microdnf5_installed(mocker):
mocker.patch(
"os.path.exists",
lambda p: p in ("/usr/bin/dnf", "/usr/bin/microdnf", "/usr/bin/dnf-3", "/usr/bin/dnf5")
)
mocker.patch(
"os.path.realpath",
lambda p: {"/usr/bin/dnf": "/usr/bin/dnf-3", "/usr/bin/microdnf": "/usr/bin/dnf5"}.get(p, p)
)
assert PkgMgrFactCollector().collect(collected_facts=_FACTS).get("pkg_mgr") == "dnf"
def test_default_dnf_version_detection_fedora_dnf4_microdnf(mocker):
mocker.patch("os.path.exists", lambda p: p == "/usr/bin/microdnf")
assert PkgMgrFactCollector().collect(collected_facts=_FACTS).get("pkg_mgr") == "dnf"
def test_default_dnf_version_detection_fedora_dnf5_microdnf(mocker):
mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/microdnf", "/usr/bin/dnf5"))
mocker.patch("os.path.realpath", lambda p: {"/usr/bin/microdnf": "/usr/bin/dnf5"}.get(p, p))
assert PkgMgrFactCollector().collect(collected_facts=_FACTS).get("pkg_mgr") == "dnf5"
def test_default_dnf_version_detection_fedora_no_default(mocker):
mocker.patch("os.path.exists", lambda p: p in ("/usr/bin/dnf-3", "/usr/bin/dnf5"))
assert PkgMgrFactCollector().collect(collected_facts=_FACTS).get("pkg_mgr") == "unknown"
| 2,698
|
Python
|
.py
| 39
| 64.692308
| 102
| 0.688897
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,926
|
test_user.py
|
ansible_ansible/test/units/module_utils/facts/system/test_user.py
|
# unit tests for ansible system lsb fact collectors
# -*- coding: utf-8 -*-
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import annotations
from ansible.module_utils.facts.system.user import UserFactCollector
import os
def test_logname():
""" Test if ``UserFactCollector`` still works with LOGNAME set """
collector = UserFactCollector()
unmodified_facts = collector.collect()
# Change logname env var and check if the collector still finds
# the pw entry.
os.environ["LOGNAME"] = "NONEXISTINGUSERDONTEXISTPLEASE"
modified_facts = collector.collect()
# Set logname should be different to the real name.
assert unmodified_facts['user_id'] != modified_facts['user_id']
# Actual UID is the same.
assert unmodified_facts['user_uid'] == modified_facts['user_uid']
| 1,404
|
Python
|
.py
| 31
| 42.645161
| 70
| 0.75183
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,927
|
test_lsb.py
|
ansible_ansible/test/units/module_utils/facts/system/test_lsb.py
|
# unit tests for ansible system lsb fact collectors
# -*- coding: utf-8 -*-
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import annotations
from unittest.mock import Mock, patch
from .. base import BaseFactsTest
from ansible.module_utils.facts.system.lsb import LSBFactCollector
lsb_release_a_fedora_output = """
LSB Version: :core-4.1-amd64:core-4.1-noarch:cxx-4.1-amd64:cxx-4.1-noarch:desktop-4.1-amd64:desktop-4.1-noarch:languages-4.1-amd64:languages-4.1-noarch:printing-4.1-amd64:printing-4.1-noarch
Distributor ID: Fedora
Description: Fedora release 25 (Twenty Five)
Release: 25
Codename: TwentyFive
""" # noqa
# FIXME: a
etc_lsb_release_ubuntu14 = """DISTRIB_ID=Ubuntu
DISTRIB_RELEASE=14.04
DISTRIB_CODENAME=trusty
DISTRIB_DESCRIPTION="Ubuntu 14.04.3 LTS"
"""
etc_lsb_release_no_decimal = """DISTRIB_ID=AwesomeOS
DISTRIB_RELEASE=11
DISTRIB_CODENAME=stonehenge
DISTRIB_DESCRIPTION="AwesomeÖS 11"
"""
class TestLSBFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'lsb']
valid_subsets = ['lsb']
fact_namespace = 'ansible_lsb'
collector_class = LSBFactCollector
def _mock_module(self):
mock_module = Mock()
mock_module.params = {'gather_subset': self.gather_subset,
'gather_timeout': 10,
'filter': '*'}
mock_module.get_bin_path = Mock(return_value='/usr/bin/lsb_release')
mock_module.run_command = Mock(return_value=(0, lsb_release_a_fedora_output, ''))
return mock_module
def test_lsb_release_bin(self):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict['lsb']['release'], '25')
self.assertEqual(facts_dict['lsb']['id'], 'Fedora')
self.assertEqual(facts_dict['lsb']['description'], 'Fedora release 25 (Twenty Five)')
self.assertEqual(facts_dict['lsb']['codename'], 'TwentyFive')
self.assertEqual(facts_dict['lsb']['major_release'], '25')
def test_etc_lsb_release(self):
module = self._mock_module()
module.get_bin_path = Mock(return_value=None)
with patch('ansible.module_utils.facts.system.lsb.os.path.exists',
return_value=True):
with patch('ansible.module_utils.facts.system.lsb.get_file_lines',
return_value=etc_lsb_release_ubuntu14.splitlines()):
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict['lsb']['release'], '14.04')
self.assertEqual(facts_dict['lsb']['id'], 'Ubuntu')
self.assertEqual(facts_dict['lsb']['description'], 'Ubuntu 14.04.3 LTS')
self.assertEqual(facts_dict['lsb']['codename'], 'trusty')
def test_etc_lsb_release_no_decimal_release(self):
module = self._mock_module()
module.get_bin_path = Mock(return_value=None)
with patch('ansible.module_utils.facts.system.lsb.os.path.exists',
return_value=True):
with patch('ansible.module_utils.facts.system.lsb.get_file_lines',
return_value=etc_lsb_release_no_decimal.splitlines()):
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict['lsb']['release'], '11')
self.assertEqual(facts_dict['lsb']['id'], 'AwesomeOS')
self.assertEqual(facts_dict['lsb']['description'], 'AwesomeÖS 11')
self.assertEqual(facts_dict['lsb']['codename'], 'stonehenge')
| 4,401
|
Python
|
.py
| 90
| 42.055556
| 190
| 0.677382
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,928
|
test_fips.py
|
ansible_ansible/test/units/module_utils/facts/system/test_fips.py
|
# Copyright: Contributors to the Ansible project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.module_utils.facts.system.fips import FipsFactCollector
@pytest.mark.parametrize(("return_value", "expected"), [('1', True), ('0', False)])
def test_fips(mocker, return_value, expected):
mocker.patch('ansible.module_utils.facts.system.fips.get_file_content', return_value=return_value)
fips_mgr = FipsFactCollector().collect()
assert fips_mgr['fips'] is expected
| 584
|
Python
|
.py
| 10
| 55.7
| 102
| 0.759227
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,929
|
test_cmdline.py
|
ansible_ansible/test/units/module_utils/facts/system/test_cmdline.py
|
# unit tests for ansible system cmdline fact collectors
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.module_utils.facts.system.cmdline import CmdLineFactCollector
test_data = [
(
"crashkernel=auto rd.lvm.lv=fedora_test-elementary-os/root rd.lvm.lv=fedora_test-elementary-os/swap rhgb quiet",
{
'crashkernel': 'auto',
'quiet': True,
'rd.lvm.lv': [
'fedora_test-elementary-os/root',
'fedora_test-elementary-os/swap',
],
'rhgb': True
}
),
(
"root=/dev/mapper/vg_ssd-root ro rd.lvm.lv=fedora_xenon/root rd.lvm.lv=fedora_xenon/swap rhgb quiet "
"resume=/dev/mapper/fedora_xenon-swap crashkernel=128M zswap.enabled=1",
{
'crashkernel': '128M',
'quiet': True,
'rd.lvm.lv': [
'fedora_xenon/root',
'fedora_xenon/swap'
],
'resume': '/dev/mapper/fedora_xenon-swap',
'rhgb': True,
'ro': True,
'root': '/dev/mapper/vg_ssd-root',
'zswap.enabled': '1'
}
),
(
"rhgb",
{
"rhgb": True
}
),
(
"root=/dev/mapper/vg_ssd-root",
{
'root': '/dev/mapper/vg_ssd-root',
}
),
(
"",
{},
)
]
test_ids = ['lvm_1', 'lvm_2', 'single_without_equal_sign', 'single_with_equal_sign', 'blank_cmdline']
@pytest.mark.parametrize("cmdline, cmdline_dict", test_data, ids=test_ids)
def test_cmd_line_factor(cmdline, cmdline_dict):
cmdline_facter = CmdLineFactCollector()
parsed_cmdline = cmdline_facter._parse_proc_cmdline_facts(data=cmdline)
assert parsed_cmdline == cmdline_dict
| 1,945
|
Python
|
.py
| 60
| 24.183333
| 120
| 0.564662
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,930
|
conftest.py
|
ansible_ansible/test/units/module_utils/facts/system/distribution/conftest.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from unittest.mock import Mock
@pytest.fixture
def mock_module():
mock_module = Mock()
mock_module.params = {'gather_subset': ['all'],
'gather_timeout': 5,
'filter': '*'}
mock_module.get_bin_path = Mock(return_value=None)
return mock_module
| 518
|
Python
|
.py
| 14
| 30.714286
| 92
| 0.636546
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,931
|
test_parse_distribution_file_ClearLinux.py
|
ansible_ansible/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_ClearLinux.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import os
import pytest
from ansible.module_utils.facts.system.distribution import DistributionFiles
@pytest.fixture
def test_input():
return {
'name': 'Clearlinux',
'path': '/usr/lib/os-release',
'collected_facts': None,
}
def test_parse_distribution_file_clear_linux(mock_module, test_input):
with open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files/ClearLinux')) as file:
test_input['data'] = file.read()
result = (
True,
{
'distribution': 'Clear Linux OS',
'distribution_major_version': '28120',
'distribution_release': 'clear-linux-os',
'distribution_version': '28120'
}
)
distribution = DistributionFiles(module=mock_module())
assert result == distribution.parse_distribution_file_ClearLinux(**test_input)
@pytest.mark.parametrize('distro_file', ('CoreOS', 'LinuxMint'))
def test_parse_distribution_file_clear_linux_no_match(mock_module, distro_file, test_input):
"""
Test against data from Linux Mint and CoreOS to ensure we do not get a reported
match from parse_distribution_file_ClearLinux()
"""
with open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files', distro_file)) as file:
test_input['data'] = file.read()
result = (False, {})
distribution = DistributionFiles(module=mock_module())
assert result == distribution.parse_distribution_file_ClearLinux(**test_input)
| 1,706
|
Python
|
.py
| 39
| 38
| 113
| 0.680169
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,932
|
test_distribution_files.py
|
ansible_ansible/test/units/module_utils/facts/system/distribution/test_distribution_files.py
|
# Copyright: Contributors to the Ansible project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import tempfile
from ansible.module_utils.facts.system.distribution import DistributionFiles
def test_distribution_files(mock_module):
d = DistributionFiles(mock_module)
temp_dir = tempfile.TemporaryDirectory()
dist_file, dist_file_content = d._get_dist_file_content(temp_dir.name)
assert not dist_file
assert dist_file_content is None
| 539
|
Python
|
.py
| 11
| 45.636364
| 92
| 0.783525
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,933
|
test_parse_distribution_file_Slackware.py
|
ansible_ansible/test/units/module_utils/facts/system/distribution/test_parse_distribution_file_Slackware.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import os
import pytest
from ansible.module_utils.facts.system.distribution import DistributionFiles
@pytest.mark.parametrize(
('distro_file', 'expected_version'),
(
('Slackware', '14.1'),
('SlackwareCurrent', '14.2+'),
)
)
def test_parse_distribution_file_slackware(mock_module, distro_file, expected_version):
with open(os.path.join(os.path.dirname(__file__), '../../fixtures/distribution_files', distro_file)) as file:
data = file.read()
test_input = {
'name': 'Slackware',
'data': data,
'path': '/etc/os-release',
'collected_facts': None,
}
result = (
True,
{
'distribution': 'Slackware',
'distribution_version': expected_version
}
)
distribution = DistributionFiles(module=mock_module())
assert result == distribution.parse_distribution_file_Slackware(**test_input)
| 1,111
|
Python
|
.py
| 32
| 29
| 113
| 0.648321
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,934
|
test_distribution_sles4sap.py
|
ansible_ansible/test/units/module_utils/facts/system/distribution/test_distribution_sles4sap.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.module_utils.facts.system.distribution import DistributionFiles
@pytest.mark.parametrize('realpath', ('SUSE_SLES_SAP.prod', 'SLES_SAP.prod'))
def test_distribution_sles4sap_suse_sles_sap(mock_module, mocker, realpath):
mocker.patch('os.path.islink', return_value=True)
mocker.patch('os.path.realpath', return_value='/etc/products.d/' + realpath)
test_input = {
'name': 'SUSE',
'path': '',
'data': 'suse',
'collected_facts': None,
}
test_result = (
True,
{
'distribution': 'SLES_SAP',
}
)
distribution = DistributionFiles(module=mock_module())
assert test_result == distribution.parse_distribution_file_SUSE(**test_input)
| 946
|
Python
|
.py
| 24
| 33.916667
| 92
| 0.66849
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,935
|
test_distribution_version.py
|
ansible_ansible/test/units/module_utils/facts/system/distribution/test_distribution_version.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import glob
import json
import os
import pytest
from itertools import product
import builtins
# the module we are actually testing (sort of)
from ansible.module_utils.facts.system.distribution import DistributionFactCollector
# to generate the testcase data, you can use the script gen_distribution_version_testcase.py in hacking/tests
TESTSETS = []
for datafile in glob.glob(os.path.join(os.path.dirname(__file__), 'fixtures/*.json')):
with open(os.path.join(os.path.dirname(__file__), '%s' % datafile)) as f:
TESTSETS.append(json.loads(f.read()))
@pytest.mark.parametrize("stdin, testcase", product([{}], TESTSETS), ids=lambda x: x.get('name'), indirect=['stdin'])
def test_distribution_version(am, mocker, testcase):
"""tests the distribution parsing code of the Facts class
testsets have
* a name (for output/debugging only)
* input files that are faked
* those should be complete and also include "irrelevant" files that might be mistaken as coming from other distributions
* all files that are not listed here are assumed to not exist at all
* the output of ansible.module_utils.distro.linux_distribution() [called platform.dist() for historical reasons]
* results for the ansible variables distribution* and os_family
"""
# prepare some mock functions to get the testdata in
def mock_get_file_content(fname, default=None, strip=True):
"""give fake content if it exists, otherwise pretend the file is empty"""
data = default
if fname in testcase['input']:
# for debugging
print('faked %s for %s' % (fname, testcase['name']))
data = testcase['input'][fname].strip()
if strip and data is not None:
data = data.strip()
return data
def mock_get_file_lines(fname, strip=True):
"""give fake lines if file exists, otherwise return empty list"""
data = mock_get_file_content(fname=fname, strip=strip)
if data:
return [data]
return []
def mock_get_uname(am, flags):
if '-v' in flags:
return testcase.get('uname_v', None)
elif '-r' in flags:
return testcase.get('uname_r', None)
else:
return None
def mock_file_exists(fname, allow_empty=False):
if fname not in testcase['input']:
return False
if allow_empty:
return True
return bool(len(testcase['input'][fname]))
def mock_platform_system():
return testcase.get('platform.system', 'Linux')
def mock_platform_release():
return testcase.get('platform.release', '')
def mock_platform_version():
return testcase.get('platform.version', '')
def mock_distro_name():
return testcase['distro']['name']
def mock_distro_id():
return testcase['distro']['id']
def mock_distro_version(best=False):
if best:
return testcase['distro']['version_best']
return testcase['distro']['version']
def mock_distro_codename():
return testcase['distro']['codename']
def mock_distro_os_release_info():
return testcase['distro']['os_release_info']
def mock_distro_lsb_release_info():
return testcase['distro']['lsb_release_info']
def mock_open(filename, mode='r'):
if filename in testcase['input']:
file_object = mocker.mock_open(read_data=testcase['input'][filename]).return_value
file_object.__iter__.return_value = testcase['input'][filename].splitlines(True)
else:
file_object = real_open(filename, mode)
return file_object
def mock_os_path_is_file(filename):
if filename in testcase['input']:
return True
return False
def mock_run_command_output(v, command):
ret = (0, '', '')
if 'command_output' in testcase:
ret = (0, testcase['command_output'].get(command, ''), '')
return ret
mocker.patch('ansible.module_utils.facts.system.distribution.get_file_content', mock_get_file_content)
mocker.patch('ansible.module_utils.facts.system.distribution.get_file_lines', mock_get_file_lines)
mocker.patch('ansible.module_utils.facts.system.distribution.get_uname', mock_get_uname)
mocker.patch('ansible.module_utils.facts.system.distribution._file_exists', mock_file_exists)
mocker.patch('ansible.module_utils.distro.name', mock_distro_name)
mocker.patch('ansible.module_utils.distro.id', mock_distro_id)
mocker.patch('ansible.module_utils.distro.version', mock_distro_version)
mocker.patch('ansible.module_utils.distro.codename', mock_distro_codename)
mocker.patch(
'ansible.module_utils.common.sys_info.distro.os_release_info',
mock_distro_os_release_info)
mocker.patch(
'ansible.module_utils.common.sys_info.distro.lsb_release_info',
mock_distro_lsb_release_info)
mocker.patch('os.path.isfile', mock_os_path_is_file)
mocker.patch('platform.system', mock_platform_system)
mocker.patch('platform.release', mock_platform_release)
mocker.patch('platform.version', mock_platform_version)
mocker.patch('ansible.module_utils.basic.AnsibleModule.run_command', mock_run_command_output)
real_open = builtins.open
mocker.patch.object(builtins, 'open', new=mock_open)
# run Facts()
distro_collector = DistributionFactCollector()
generated_facts = distro_collector.collect(am)
# compare with the expected output
# testcase['result'] has a list of variables and values it expects Facts() to set
for key, val in testcase['result'].items():
assert key in generated_facts
msg = 'Comparing value of %s on %s, should: %s, is: %s' %\
(key, testcase['name'], val, generated_facts[key])
assert generated_facts[key] == val, msg
| 6,078
|
Python
|
.py
| 125
| 41.48
| 126
| 0.67573
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,936
|
test_linux.py
|
ansible_ansible/test/units/module_utils/facts/virtual/test_linux.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.module_utils.facts.virtual import linux
def mock_os_path_is_file_docker(filename):
if filename in ('/.dockerenv', '/.dockerinit'):
return True
return False
def test_get_virtual_facts_docker(mocker):
mocker.patch('os.path.exists', mock_os_path_is_file_docker)
module = mocker.Mock()
module.run_command.return_value = (0, '', '')
inst = linux.LinuxVirtual(module)
facts = inst.get_virtual_facts()
expected = {
'virtualization_role': 'guest',
'virtualization_tech_host': set(),
'virtualization_type': 'docker',
'virtualization_tech_guest': set(['docker', 'container']),
}
assert facts == expected
def test_get_virtual_facts_bhyve(mocker):
mocker.patch('os.path.exists', return_value=False)
mocker.patch('ansible.module_utils.facts.virtual.linux.get_file_content', return_value='')
mocker.patch('ansible.module_utils.facts.virtual.linux.get_file_lines', return_value=[])
module = mocker.Mock()
module.run_command.return_value = (0, 'BHYVE\n', '')
inst = linux.LinuxVirtual(module)
facts = inst.get_virtual_facts()
expected = {
'virtualization_role': 'guest',
'virtualization_tech_host': set(),
'virtualization_type': 'bhyve',
'virtualization_tech_guest': set(['bhyve']),
}
assert facts == expected
| 1,562
|
Python
|
.py
| 37
| 36.72973
| 94
| 0.674388
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,937
|
test_facter.py
|
ansible_ansible/test/units/module_utils/facts/other/test_facter.py
|
# unit tests for ansible other facter fact collector
# -*- coding: utf-8 -*-
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import annotations
from unittest.mock import Mock, patch
from .. base import BaseFactsTest
from ansible.module_utils.facts.other.facter import FacterFactCollector
facter_json_output = """
{
"operatingsystemmajrelease": "25",
"hardwareisa": "x86_64",
"kernel": "Linux",
"path": "/home/testuser/src/ansible/bin:/home/testuser/perl5/bin:/home/testuser/perl5/bin:/home/testuser/bin:/home/testuser/.local/bin:/home/testuser/pythons/bin:/usr/lib64/qt-3.3/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/home/testuser/.cabal/bin:/home/testuser/gopath/bin:/home/testuser/.rvm/bin",
"memorysize": "15.36 GB",
"memoryfree": "4.88 GB",
"swapsize": "7.70 GB",
"swapfree": "6.75 GB",
"swapsize_mb": "7880.00",
"swapfree_mb": "6911.41",
"memorysize_mb": "15732.95",
"memoryfree_mb": "4997.68",
"lsbmajdistrelease": "25",
"macaddress": "02:42:ea:15:d8:84",
"id": "testuser",
"domain": "example.com",
"augeasversion": "1.7.0",
"os": {
"name": "Fedora",
"family": "RedHat",
"release": {
"major": "25",
"full": "25"
},
"lsb": {
"distcodename": "TwentyFive",
"distid": "Fedora",
"distdescription": "Fedora release 25 (Twenty Five)",
"release": ":core-4.1-amd64:core-4.1-noarch:cxx-4.1-amd64:cxx-4.1-noarch:desktop-4.1-amd64:desktop-4.1-noarch:languages-4.1-amd64:languages-4.1-noarch:printing-4.1-amd64:printing-4.1-noarch",
"distrelease": "25",
"majdistrelease": "25"
}
},
"processors": {
"models": [
"Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz"
],
"count": 8,
"physicalcount": 1
},
"architecture": "x86_64",
"hardwaremodel": "x86_64",
"operatingsystem": "Fedora",
"processor0": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"processor1": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"processor2": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"processor3": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"processor4": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"processor5": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"processor6": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"processor7": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"processorcount": 8,
"uptime_seconds": 1558090,
"fqdn": "myhostname.example.com",
"rubyversion": "2.3.3",
"gid": "testuser",
"physicalprocessorcount": 1,
"netmask": "255.255.0.0",
"uniqueid": "a8c01301",
"uptime_days": 18,
"interfaces": "docker0,em1,lo,vethf20ff12,virbr0,virbr1,virbr0_nic,virbr1_nic,wlp4s0",
"ipaddress_docker0": "172.17.0.1",
"macaddress_docker0": "02:42:ea:15:d8:84",
"netmask_docker0": "255.255.0.0",
"mtu_docker0": 1500,
"macaddress_em1": "3c:97:0e:e9:28:8e",
"mtu_em1": 1500,
"ipaddress_lo": "127.0.0.1",
"netmask_lo": "255.0.0.0",
"mtu_lo": 65536,
"macaddress_vethf20ff12": "ae:6e:2b:1e:a1:31",
"mtu_vethf20ff12": 1500,
"ipaddress_virbr0": "192.168.137.1",
"macaddress_virbr0": "52:54:00:ce:82:5e",
"netmask_virbr0": "255.255.255.0",
"mtu_virbr0": 1500,
"ipaddress_virbr1": "192.168.121.1",
"macaddress_virbr1": "52:54:00:b4:68:a9",
"netmask_virbr1": "255.255.255.0",
"mtu_virbr1": 1500,
"macaddress_virbr0_nic": "52:54:00:ce:82:5e",
"mtu_virbr0_nic": 1500,
"macaddress_virbr1_nic": "52:54:00:b4:68:a9",
"mtu_virbr1_nic": 1500,
"ipaddress_wlp4s0": "192.168.1.19",
"macaddress_wlp4s0": "5c:51:4f:e6:a8:e3",
"netmask_wlp4s0": "255.255.255.0",
"mtu_wlp4s0": 1500,
"virtual": "physical",
"is_virtual": false,
"partitions": {
"sda2": {
"size": "499091456"
},
"sda1": {
"uuid": "32caaec3-ef40-4691-a3b6-438c3f9bc1c0",
"size": "1024000",
"mount": "/boot"
}
},
"lsbdistcodename": "TwentyFive",
"lsbrelease": ":core-4.1-amd64:core-4.1-noarch:cxx-4.1-amd64:cxx-4.1-noarch:desktop-4.1-amd64:desktop-4.1-noarch:languages-4.1-amd64:languages-4.1-noarch:printing-4.1-amd64:printing-4.1-noarch", # noqa
"filesystems": "btrfs,ext2,ext3,ext4,xfs",
"system_uptime": {
"seconds": 1558090,
"hours": 432,
"days": 18,
"uptime": "18 days"
},
"ipaddress": "172.17.0.1",
"timezone": "EDT",
"ps": "ps -ef",
"rubyplatform": "x86_64-linux",
"rubysitedir": "/usr/local/share/ruby/site_ruby",
"uptime": "18 days",
"lsbdistrelease": "25",
"operatingsystemrelease": "25",
"facterversion": "2.4.3",
"kernelrelease": "4.9.14-200.fc25.x86_64",
"lsbdistdescription": "Fedora release 25 (Twenty Five)",
"network_docker0": "172.17.0.0",
"network_lo": "127.0.0.0",
"network_virbr0": "192.168.137.0",
"network_virbr1": "192.168.121.0",
"network_wlp4s0": "192.168.1.0",
"lsbdistid": "Fedora",
"selinux": true,
"selinux_enforced": false,
"selinux_policyversion": "30",
"selinux_current_mode": "permissive",
"selinux_config_mode": "permissive",
"selinux_config_policy": "targeted",
"hostname": "myhostname",
"osfamily": "RedHat",
"kernelmajversion": "4.9",
"blockdevice_sr0_size": 1073741312,
"blockdevice_sr0_vendor": "MATSHITA",
"blockdevice_sr0_model": "DVD-RAM UJ8E2",
"blockdevice_sda_size": 256060514304,
"blockdevice_sda_vendor": "ATA",
"blockdevice_sda_model": "SAMSUNG MZ7TD256",
"blockdevices": "sda,sr0",
"uptime_hours": 432,
"kernelversion": "4.9.14"
}
"""
class TestFacterCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'facter']
valid_subsets = ['facter']
fact_namespace = 'ansible_facter'
collector_class = FacterFactCollector
def _mock_module(self):
mock_module = Mock()
mock_module.params = {'gather_subset': self.gather_subset,
'gather_timeout': 10,
'filter': '*'}
mock_module.get_bin_path = Mock(return_value='/not/actually/facter')
mock_module.run_command = Mock(return_value=(0, facter_json_output, ''))
return mock_module
@patch('ansible.module_utils.facts.other.facter.FacterFactCollector.get_facter_output')
def test_bogus_json(self, mock_get_facter_output):
module = self._mock_module()
# bogus json
mock_get_facter_output.return_value = '{'
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict, {})
@patch('ansible.module_utils.facts.other.facter.FacterFactCollector.run_facter')
def test_facter_non_zero_return_code(self, mock_run_facter):
module = self._mock_module()
# bogus json
mock_run_facter.return_value = (1, '{}', '')
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
# This assumes no 'facter' entry at all is correct
self.assertNotIn('facter', facts_dict)
self.assertEqual(facts_dict, {})
| 7,974
|
Python
|
.py
| 209
| 33.789474
| 313
| 0.654362
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,938
|
test_ohai.py
|
ansible_ansible/test/units/module_utils/facts/other/test_ohai.py
|
# unit tests for ansible ohai fact collector
# -*- coding: utf-8 -*-
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import annotations
from unittest.mock import Mock, patch
from .. base import BaseFactsTest
from ansible.module_utils.facts.other.ohai import OhaiFactCollector
ohai_json_output = r"""
{
"kernel": {
"name": "Linux",
"release": "4.9.14-200.fc25.x86_64",
"version": "#1 SMP Mon Mar 13 19:26:40 UTC 2017",
"machine": "x86_64",
"processor": "x86_64",
"os": "GNU/Linux",
"modules": {
"binfmt_misc": {
"size": "20480",
"refcount": "1"
},
"veth": {
"size": "16384",
"refcount": "0"
},
"xfs": {
"size": "1200128",
"refcount": "1"
},
"xt_addrtype": {
"size": "16384",
"refcount": "2"
},
"br_netfilter": {
"size": "24576",
"refcount": "0"
},
"dm_thin_pool": {
"size": "65536",
"refcount": "2"
},
"dm_persistent_data": {
"size": "69632",
"refcount": "1"
},
"dm_bio_prison": {
"size": "16384",
"refcount": "1"
},
"libcrc32c": {
"size": "16384",
"refcount": "2"
},
"rfcomm": {
"size": "77824",
"refcount": "14",
"version": "1.11"
},
"fuse": {
"size": "102400",
"refcount": "3"
},
"ccm": {
"size": "20480",
"refcount": "2"
},
"xt_CHECKSUM": {
"size": "16384",
"refcount": "2"
},
"iptable_mangle": {
"size": "16384",
"refcount": "1"
},
"ipt_MASQUERADE": {
"size": "16384",
"refcount": "7"
},
"nf_nat_masquerade_ipv4": {
"size": "16384",
"refcount": "1"
},
"iptable_nat": {
"size": "16384",
"refcount": "1"
},
"nf_nat_ipv4": {
"size": "16384",
"refcount": "1"
},
"nf_nat": {
"size": "28672",
"refcount": "2"
},
"nf_conntrack_ipv4": {
"size": "16384",
"refcount": "4"
},
"nf_defrag_ipv4": {
"size": "16384",
"refcount": "1"
},
"xt_conntrack": {
"size": "16384",
"refcount": "3"
},
"nf_conntrack": {
"size": "106496",
"refcount": "5"
},
"ip6t_REJECT": {
"size": "16384",
"refcount": "2"
},
"nf_reject_ipv6": {
"size": "16384",
"refcount": "1"
},
"tun": {
"size": "28672",
"refcount": "4"
},
"bridge": {
"size": "135168",
"refcount": "1",
"version": "2.3"
},
"stp": {
"size": "16384",
"refcount": "1"
},
"llc": {
"size": "16384",
"refcount": "2"
},
"ebtable_filter": {
"size": "16384",
"refcount": "0"
},
"ebtables": {
"size": "36864",
"refcount": "1"
},
"ip6table_filter": {
"size": "16384",
"refcount": "1"
},
"ip6_tables": {
"size": "28672",
"refcount": "1"
},
"cmac": {
"size": "16384",
"refcount": "3"
},
"uhid": {
"size": "20480",
"refcount": "2"
},
"bnep": {
"size": "20480",
"refcount": "2",
"version": "1.3"
},
"btrfs": {
"size": "1056768",
"refcount": "1"
},
"xor": {
"size": "24576",
"refcount": "1"
},
"raid6_pq": {
"size": "106496",
"refcount": "1"
},
"loop": {
"size": "28672",
"refcount": "6"
},
"arc4": {
"size": "16384",
"refcount": "2"
},
"snd_hda_codec_hdmi": {
"size": "45056",
"refcount": "1"
},
"intel_rapl": {
"size": "20480",
"refcount": "0"
},
"x86_pkg_temp_thermal": {
"size": "16384",
"refcount": "0"
},
"intel_powerclamp": {
"size": "16384",
"refcount": "0"
},
"coretemp": {
"size": "16384",
"refcount": "0"
},
"kvm_intel": {
"size": "192512",
"refcount": "0"
},
"kvm": {
"size": "585728",
"refcount": "1"
},
"irqbypass": {
"size": "16384",
"refcount": "1"
},
"crct10dif_pclmul": {
"size": "16384",
"refcount": "0"
},
"crc32_pclmul": {
"size": "16384",
"refcount": "0"
},
"iTCO_wdt": {
"size": "16384",
"refcount": "0",
"version": "1.11"
},
"ghash_clmulni_intel": {
"size": "16384",
"refcount": "0"
},
"mei_wdt": {
"size": "16384",
"refcount": "0"
},
"iTCO_vendor_support": {
"size": "16384",
"refcount": "1",
"version": "1.04"
},
"iwlmvm": {
"size": "364544",
"refcount": "0"
},
"intel_cstate": {
"size": "16384",
"refcount": "0"
},
"uvcvideo": {
"size": "90112",
"refcount": "0",
"version": "1.1.1"
},
"videobuf2_vmalloc": {
"size": "16384",
"refcount": "1"
},
"intel_uncore": {
"size": "118784",
"refcount": "0"
},
"videobuf2_memops": {
"size": "16384",
"refcount": "1"
},
"videobuf2_v4l2": {
"size": "24576",
"refcount": "1"
},
"videobuf2_core": {
"size": "40960",
"refcount": "2"
},
"intel_rapl_perf": {
"size": "16384",
"refcount": "0"
},
"mac80211": {
"size": "749568",
"refcount": "1"
},
"videodev": {
"size": "172032",
"refcount": "3"
},
"snd_usb_audio": {
"size": "180224",
"refcount": "3"
},
"e1000e": {
"size": "249856",
"refcount": "0",
"version": "3.2.6-k"
}
}
},
"os": "linux",
"os_version": "4.9.14-200.fc25.x86_64",
"lsb": {
"id": "Fedora",
"description": "Fedora release 25 (Twenty Five)",
"release": "25",
"codename": "TwentyFive"
},
"platform": "fedora",
"platform_version": "25",
"platform_family": "fedora",
"packages": {
"ansible": {
"epoch": "0",
"version": "2.2.1.0",
"release": "1.fc25",
"installdate": "1486050042",
"arch": "noarch"
},
"python3": {
"epoch": "0",
"version": "3.5.3",
"release": "3.fc25",
"installdate": "1490025957",
"arch": "x86_64"
},
"kernel": {
"epoch": "0",
"version": "4.9.6",
"release": "200.fc25",
"installdate": "1486047522",
"arch": "x86_64"
},
"glibc": {
"epoch": "0",
"version": "2.24",
"release": "4.fc25",
"installdate": "1483402427",
"arch": "x86_64"
}
},
"chef_packages": {
ohai": {
"version": "13.0.0",
"ohai_root": "/home/some_user/.gem/ruby/gems/ohai-13.0.0/lib/ohai"
}
},
"dmi": {
"dmidecode_version": "3.0"
},
"uptime_seconds": 2509008,
"uptime": "29 days 00 hours 56 minutes 48 seconds",
"idletime_seconds": 19455087,
"idletime": "225 days 04 hours 11 minutes 27 seconds",
"memory": {
"swap": {
"cached": "262436kB",
"total": "8069116kB",
"free": "5154396kB"
},
"hugepages": {
"total": "0",
"free": "0",
"reserved": "0",
"surplus": "0"
},
"total": "16110540kB",
"free": "3825844kB",
"buffers": "377240kB",
"cached": "3710084kB",
"active": "8104320kB",
"inactive": "3192920kB",
"dirty": "812kB",
"writeback": "0kB",
"anon_pages": "7124992kB",
"mapped": "580700kB",
"slab": "622848kB",
"slab_reclaimable": "307300kB",
"slab_unreclaim": "315548kB",
"page_tables": "157572kB",
"nfs_unstable": "0kB",
"bounce": "0kB",
"commit_limit": "16124384kB",
"committed_as": "31345068kB",
"vmalloc_total": "34359738367kB",
"vmalloc_used": "0kB",
"vmalloc_chunk": "0kB",
"hugepage_size": "2048kB"
},
"filesystem": {
"by_device": {
"devtmpfs": {
"kb_size": "8044124",
"kb_used": "0",
"kb_available": "8044124",
"percent_used": "0%",
"total_inodes": "2011031",
"inodes_used": "629",
"inodes_available": "2010402",
"inodes_percent_used": "1%",
"fs_type": "devtmpfs",
"mount_options": [
"rw",
"nosuid",
"seclabel",
"size=8044124k",
"nr_inodes=2011031",
"mode=755"
],
"mounts": [
"/dev"
]
},
"tmpfs": {
"kb_size": "1611052",
"kb_used": "72",
"kb_available": "1610980",
"percent_used": "1%",
"total_inodes": "2013817",
"inodes_used": "36",
"inodes_available": "2013781",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700",
"uid=1000",
"gid=1000"
],
"mounts": [
"/dev/shm",
"/run",
"/sys/fs/cgroup",
"/tmp",
"/run/user/0",
"/run/user/1000"
]
},
"/dev/mapper/fedora_host--186-root": {
"kb_size": "51475068",
"kb_used": "42551284",
"kb_available": "6285960",
"percent_used": "88%",
"total_inodes": "3276800",
"inodes_used": "532908",
"inodes_available": "2743892",
"inodes_percent_used": "17%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "12312331-3449-4a6c-8179-a1feb2bca6ce",
"mounts": [
"/",
"/var/lib/docker/devicemapper"
]
},
"/dev/sda1": {
"kb_size": "487652",
"kb_used": "126628",
"kb_available": "331328",
"percent_used": "28%",
"total_inodes": "128016",
"inodes_used": "405",
"inodes_available": "127611",
"inodes_percent_used": "1%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "12312311-ef40-4691-a3b6-438c3f9bc1c0",
"mounts": [
"/boot"
]
},
"/dev/mapper/fedora_host--186-home": {
"kb_size": "185948124",
"kb_used": "105904724",
"kb_available": "70574680",
"percent_used": "61%",
"total_inodes": "11821056",
"inodes_used": "1266687",
"inodes_available": "10554369",
"inodes_percent_used": "11%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d",
"mounts": [
"/home"
]
},
"/dev/loop0": {
"kb_size": "512000",
"kb_used": "16672",
"kb_available": "429056",
"percent_used": "4%",
"fs_type": "btrfs",
"uuid": "0f031512-ab15-497d-9abd-3a512b4a9390",
"mounts": [
"/var/lib/machines"
]
},
"sysfs": {
"fs_type": "sysfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"mounts": [
"/sys"
]
},
"proc": {
"fs_type": "proc",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"mounts": [
"/proc"
]
},
"securityfs": {
"fs_type": "securityfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"mounts": [
"/sys/kernel/security"
]
},
"devpts": {
"fs_type": "devpts",
"mount_options": [
"rw",
"nosuid",
"noexec",
"relatime",
"seclabel",
"gid=5",
"mode=620",
"ptmxmode=000"
],
"mounts": [
"/dev/pts"
]
},
"cgroup": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"net_cls",
"net_prio"
],
"mounts": [
"/sys/fs/cgroup/systemd",
"/sys/fs/cgroup/devices",
"/sys/fs/cgroup/cpuset",
"/sys/fs/cgroup/perf_event",
"/sys/fs/cgroup/hugetlb",
"/sys/fs/cgroup/cpu,cpuacct",
"/sys/fs/cgroup/blkio",
"/sys/fs/cgroup/freezer",
"/sys/fs/cgroup/memory",
"/sys/fs/cgroup/pids",
"/sys/fs/cgroup/net_cls,net_prio"
]
},
"pstore": {
"fs_type": "pstore",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"mounts": [
"/sys/fs/pstore"
]
},
"configfs": {
"fs_type": "configfs",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/kernel/config"
]
},
"selinuxfs": {
"fs_type": "selinuxfs",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/fs/selinux"
]
},
"debugfs": {
"fs_type": "debugfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"mounts": [
"/sys/kernel/debug"
]
},
"hugetlbfs": {
"fs_type": "hugetlbfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"mounts": [
"/dev/hugepages"
]
},
"mqueue": {
"fs_type": "mqueue",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"mounts": [
"/dev/mqueue"
]
},
"systemd-1": {
"fs_type": "autofs",
"mount_options": [
"rw",
"relatime",
"fd=40",
"pgrp=1",
"timeout=0",
"minproto=5",
"maxproto=5",
"direct",
"pipe_ino=17610"
],
"mounts": [
"/proc/sys/fs/binfmt_misc"
]
},
"/var/lib/machines.raw": {
"fs_type": "btrfs",
"mount_options": [
"rw",
"relatime",
"seclabel",
"space_cache",
"subvolid=5",
"subvol=/"
],
"mounts": [
"/var/lib/machines"
]
},
"fusectl": {
"fs_type": "fusectl",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/fs/fuse/connections"
]
},
"gvfsd-fuse": {
"fs_type": "fuse.gvfsd-fuse",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"user_id=1000",
"group_id=1000"
],
"mounts": [
"/run/user/1000/gvfs"
]
},
"binfmt_misc": {
"fs_type": "binfmt_misc",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/proc/sys/fs/binfmt_misc"
]
},
"/dev/mapper/docker-253:1-1180487-0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8": {
"fs_type": "xfs",
"mount_options": [
"rw",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"nouuid",
"attr2",
"inode64",
"logbsize=64k",
"sunit=128",
"swidth=128",
"noquota"
],
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123",
"mounts": [
"/var/lib/docker/devicemapper/mnt/0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8"
]
},
"shm": {
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"size=65536k"
],
"mounts": [
"/var/lib/docker/containers/426e513ed508a451e3f70440eed040761f81529e4bc4240e7522d331f3f3bc12/shm"
]
},
"nsfs": {
"fs_type": "nsfs",
"mount_options": [
"rw"
],
"mounts": [
"/run/docker/netns/1ce89fd79f3d"
]
},
"tracefs": {
"fs_type": "tracefs",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/kernel/debug/tracing"
]
},
"/dev/loop1": {
"fs_type": "xfs",
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123",
"mounts": [
]
},
"/dev/mapper/docker-253:1-1180487-pool": {
"mounts": [
]
},
"/dev/sr0": {
"mounts": [
]
},
"/dev/loop2": {
"mounts": [
]
},
"/dev/sda": {
"mounts": [
]
},
"/dev/sda2": {
"fs_type": "LVM2_member",
"uuid": "66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK",
"mounts": [
]
},
"/dev/mapper/fedora_host--186-swap": {
"fs_type": "swap",
"uuid": "eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d",
"mounts": [
]
}
},
"by_mountpoint": {
"/dev": {
"kb_size": "8044124",
"kb_used": "0",
"kb_available": "8044124",
"percent_used": "0%",
"total_inodes": "2011031",
"inodes_used": "629",
"inodes_available": "2010402",
"inodes_percent_used": "1%",
"fs_type": "devtmpfs",
"mount_options": [
"rw",
"nosuid",
"seclabel",
"size=8044124k",
"nr_inodes=2011031",
"mode=755"
],
"devices": [
"devtmpfs"
]
},
"/dev/shm": {
"kb_size": "8055268",
"kb_used": "96036",
"kb_available": "7959232",
"percent_used": "2%",
"total_inodes": "2013817",
"inodes_used": "217",
"inodes_available": "2013600",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
],
"devices": [
"tmpfs"
]
},
"/run": {
"kb_size": "8055268",
"kb_used": "2280",
"kb_available": "8052988",
"percent_used": "1%",
"total_inodes": "2013817",
"inodes_used": "1070",
"inodes_available": "2012747",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel",
"mode=755"
],
"devices": [
"tmpfs"
]
},
"/sys/fs/cgroup": {
"kb_size": "8055268",
"kb_used": "0",
"kb_available": "8055268",
"percent_used": "0%",
"total_inodes": "2013817",
"inodes_used": "16",
"inodes_available": "2013801",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"ro",
"nosuid",
"nodev",
"noexec",
"seclabel",
"mode=755"
],
"devices": [
"tmpfs"
]
},
"/": {
"kb_size": "51475068",
"kb_used": "42551284",
"kb_available": "6285960",
"percent_used": "88%",
"total_inodes": "3276800",
"inodes_used": "532908",
"inodes_available": "2743892",
"inodes_percent_used": "17%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce",
"devices": [
"/dev/mapper/fedora_host--186-root"
]
},
"/tmp": {
"kb_size": "8055268",
"kb_used": "848396",
"kb_available": "7206872",
"percent_used": "11%",
"total_inodes": "2013817",
"inodes_used": "1353",
"inodes_available": "2012464",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
],
"devices": [
"tmpfs"
]
},
"/boot": {
"kb_size": "487652",
"kb_used": "126628",
"kb_available": "331328",
"percent_used": "28%",
"total_inodes": "128016",
"inodes_used": "405",
"inodes_available": "127611",
"inodes_percent_used": "1%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "32caaec3-ef40-4691-a3b6-438c3f9bc1c0",
"devices": [
"/dev/sda1"
]
},
"/home": {
"kb_size": "185948124",
"kb_used": "105904724",
"kb_available": "70574680",
"percent_used": "61%",
"total_inodes": "11821056",
"inodes_used": "1266687",
"inodes_available": "10554369",
"inodes_percent_used": "11%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d",
"devices": [
"/dev/mapper/fedora_host--186-home"
]
},
"/var/lib/machines": {
"kb_size": "512000",
"kb_used": "16672",
"kb_available": "429056",
"percent_used": "4%",
"fs_type": "btrfs",
"uuid": "0f031512-ab15-497d-9abd-3a512b4a9390",
"devices": [
"/dev/loop0",
"/var/lib/machines.raw"
],
"mount_options": [
"rw",
"relatime",
"seclabel",
"space_cache",
"subvolid=5",
"subvol=/"
]
},
"/run/user/0": {
"kb_size": "1611052",
"kb_used": "0",
"kb_available": "1611052",
"percent_used": "0%",
"total_inodes": "2013817",
"inodes_used": "7",
"inodes_available": "2013810",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700"
],
"devices": [
"tmpfs"
]
},
"/run/user/1000": {
"kb_size": "1611052",
"kb_used": "72",
"kb_available": "1610980",
"percent_used": "1%",
"total_inodes": "2013817",
"inodes_used": "36",
"inodes_available": "2013781",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700",
"uid=1000",
"gid=1000"
],
"devices": [
"tmpfs"
]
},
"/sys": {
"fs_type": "sysfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"devices": [
"sysfs"
]
},
"/proc": {
"fs_type": "proc",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"devices": [
"proc"
]
},
"/sys/kernel/security": {
"fs_type": "securityfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"devices": [
"securityfs"
]
},
"/dev/pts": {
"fs_type": "devpts",
"mount_options": [
"rw",
"nosuid",
"noexec",
"relatime",
"seclabel",
"gid=5",
"mode=620",
"ptmxmode=000"
],
"devices": [
"devpts"
]
},
"/sys/fs/cgroup/systemd": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"xattr",
"release_agent=/usr/lib/systemd/systemd-cgroups-agent",
"name=systemd"
],
"devices": [
"cgroup"
]
},
"/sys/fs/pstore": {
"fs_type": "pstore",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"devices": [
"pstore"
]
},
"/sys/fs/cgroup/devices": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"devices"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/cpuset": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpuset"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/perf_event": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"perf_event"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/hugetlb": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"hugetlb"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/cpu,cpuacct": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpu",
"cpuacct"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/blkio": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"blkio"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/freezer": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"freezer"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/memory": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"memory"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/pids": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"pids"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/net_cls,net_prio": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"net_cls",
"net_prio"
],
"devices": [
"cgroup"
]
},
"/sys/kernel/config": {
"fs_type": "configfs",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"configfs"
]
},
"/sys/fs/selinux": {
"fs_type": "selinuxfs",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"selinuxfs"
]
},
"/sys/kernel/debug": {
"fs_type": "debugfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"devices": [
"debugfs"
]
},
"/dev/hugepages": {
"fs_type": "hugetlbfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"devices": [
"hugetlbfs"
]
},
"/dev/mqueue": {
"fs_type": "mqueue",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"devices": [
"mqueue"
]
},
"/proc/sys/fs/binfmt_misc": {
"fs_type": "binfmt_misc",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"systemd-1",
"binfmt_misc"
]
},
"/sys/fs/fuse/connections": {
"fs_type": "fusectl",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"fusectl"
]
},
"/run/user/1000/gvfs": {
"fs_type": "fuse.gvfsd-fuse",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"user_id=1000",
"group_id=1000"
],
"devices": [
"gvfsd-fuse"
]
},
"/var/lib/docker/devicemapper": {
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce",
"devices": [
"/dev/mapper/fedora_host--186-root"
]
},
"/var/lib/docker/devicemapper/mnt/0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8": {
"fs_type": "xfs",
"mount_options": [
"rw",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"nouuid",
"attr2",
"inode64",
"logbsize=64k",
"sunit=128",
"swidth=128",
"noquota"
],
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123",
"devices": [
"/dev/mapper/docker-253:1-1180487-0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8"
]
},
"/var/lib/docker/containers/426e513ed508a451e3f70440eed040761f81529e4bc4240e7522d331f3f3bc12/shm": {
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"size=65536k"
],
"devices": [
"shm"
]
},
"/run/docker/netns/1ce89fd79f3d": {
"fs_type": "nsfs",
"mount_options": [
"rw"
],
"devices": [
"nsfs"
]
},
"/sys/kernel/debug/tracing": {
"fs_type": "tracefs",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"tracefs"
]
}
},
"by_pair": {
"devtmpfs,/dev": {
"device": "devtmpfs",
"kb_size": "8044124",
"kb_used": "0",
"kb_available": "8044124",
"percent_used": "0%",
"mount": "/dev",
"total_inodes": "2011031",
"inodes_used": "629",
"inodes_available": "2010402",
"inodes_percent_used": "1%",
"fs_type": "devtmpfs",
"mount_options": [
"rw",
"nosuid",
"seclabel",
"size=8044124k",
"nr_inodes=2011031",
"mode=755"
]
},
"tmpfs,/dev/shm": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "96036",
"kb_available": "7959232",
"percent_used": "2%",
"mount": "/dev/shm",
"total_inodes": "2013817",
"inodes_used": "217",
"inodes_available": "2013600",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
]
},
"tmpfs,/run": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "2280",
"kb_available": "8052988",
"percent_used": "1%",
"mount": "/run",
"total_inodes": "2013817",
"inodes_used": "1070",
"inodes_available": "2012747",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel",
"mode=755"
]
},
"tmpfs,/sys/fs/cgroup": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "0",
"kb_available": "8055268",
"percent_used": "0%",
"mount": "/sys/fs/cgroup",
"total_inodes": "2013817",
"inodes_used": "16",
"inodes_available": "2013801",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"ro",
"nosuid",
"nodev",
"noexec",
"seclabel",
"mode=755"
]
},
"/dev/mapper/fedora_host--186-root,/": {
"device": "/dev/mapper/fedora_host--186-root",
"kb_size": "51475068",
"kb_used": "42551284",
"kb_available": "6285960",
"percent_used": "88%",
"mount": "/",
"total_inodes": "3276800",
"inodes_used": "532908",
"inodes_available": "2743892",
"inodes_percent_used": "17%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce"
},
"tmpfs,/tmp": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "848396",
"kb_available": "7206872",
"percent_used": "11%",
"mount": "/tmp",
"total_inodes": "2013817",
"inodes_used": "1353",
"inodes_available": "2012464",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
]
},
"/dev/sda1,/boot": {
"device": "/dev/sda1",
"kb_size": "487652",
"kb_used": "126628",
"kb_available": "331328",
"percent_used": "28%",
"mount": "/boot",
"total_inodes": "128016",
"inodes_used": "405",
"inodes_available": "127611",
"inodes_percent_used": "1%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "32caaec3-ef40-4691-a3b6-438c3f9bc1c0"
},
"/dev/mapper/fedora_host--186-home,/home": {
"device": "/dev/mapper/fedora_host--186-home",
"kb_size": "185948124",
"kb_used": "105904724",
"kb_available": "70574680",
"percent_used": "61%",
"mount": "/home",
"total_inodes": "11821056",
"inodes_used": "1266687",
"inodes_available": "10554369",
"inodes_percent_used": "11%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d"
},
"/dev/loop0,/var/lib/machines": {
"device": "/dev/loop0",
"kb_size": "512000",
"kb_used": "16672",
"kb_available": "429056",
"percent_used": "4%",
"mount": "/var/lib/machines",
"fs_type": "btrfs",
"uuid": "0f031512-ab15-497d-9abd-3a512b4a9390"
},
"tmpfs,/run/user/0": {
"device": "tmpfs",
"kb_size": "1611052",
"kb_used": "0",
"kb_available": "1611052",
"percent_used": "0%",
"mount": "/run/user/0",
"total_inodes": "2013817",
"inodes_used": "7",
"inodes_available": "2013810",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700"
]
},
"tmpfs,/run/user/1000": {
"device": "tmpfs",
"kb_size": "1611052",
"kb_used": "72",
"kb_available": "1610980",
"percent_used": "1%",
"mount": "/run/user/1000",
"total_inodes": "2013817",
"inodes_used": "36",
"inodes_available": "2013781",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700",
"uid=1000",
"gid=1000"
]
},
"sysfs,/sys": {
"device": "sysfs",
"mount": "/sys",
"fs_type": "sysfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
]
},
"proc,/proc": {
"device": "proc",
"mount": "/proc",
"fs_type": "proc",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
]
},
"securityfs,/sys/kernel/security": {
"device": "securityfs",
"mount": "/sys/kernel/security",
"fs_type": "securityfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
]
},
"devpts,/dev/pts": {
"device": "devpts",
"mount": "/dev/pts",
"fs_type": "devpts",
"mount_options": [
"rw",
"nosuid",
"noexec",
"relatime",
"seclabel",
"gid=5",
"mode=620",
"ptmxmode=000"
]
},
"cgroup,/sys/fs/cgroup/systemd": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/systemd",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"xattr",
"release_agent=/usr/lib/systemd/systemd-cgroups-agent",
"name=systemd"
]
},
"pstore,/sys/fs/pstore": {
"device": "pstore",
"mount": "/sys/fs/pstore",
"fs_type": "pstore",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
]
},
"cgroup,/sys/fs/cgroup/devices": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/devices",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"devices"
]
},
"cgroup,/sys/fs/cgroup/cpuset": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/cpuset",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpuset"
]
},
"cgroup,/sys/fs/cgroup/perf_event": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/perf_event",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"perf_event"
]
},
"cgroup,/sys/fs/cgroup/hugetlb": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/hugetlb",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"hugetlb"
]
},
"cgroup,/sys/fs/cgroup/cpu,cpuacct": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/cpu,cpuacct",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpu",
"cpuacct"
]
},
"cgroup,/sys/fs/cgroup/blkio": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/blkio",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"blkio"
]
},
"cgroup,/sys/fs/cgroup/freezer": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/freezer",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"freezer"
]
},
"cgroup,/sys/fs/cgroup/memory": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/memory",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"memory"
]
},
"cgroup,/sys/fs/cgroup/pids": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/pids",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"pids"
]
},
"cgroup,/sys/fs/cgroup/net_cls,net_prio": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/net_cls,net_prio",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"net_cls",
"net_prio"
]
},
"configfs,/sys/kernel/config": {
"device": "configfs",
"mount": "/sys/kernel/config",
"fs_type": "configfs",
"mount_options": [
"rw",
"relatime"
]
},
"selinuxfs,/sys/fs/selinux": {
"device": "selinuxfs",
"mount": "/sys/fs/selinux",
"fs_type": "selinuxfs",
"mount_options": [
"rw",
"relatime"
]
},
"debugfs,/sys/kernel/debug": {
"device": "debugfs",
"mount": "/sys/kernel/debug",
"fs_type": "debugfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
]
},
"hugetlbfs,/dev/hugepages": {
"device": "hugetlbfs",
"mount": "/dev/hugepages",
"fs_type": "hugetlbfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
]
},
"mqueue,/dev/mqueue": {
"device": "mqueue",
"mount": "/dev/mqueue",
"fs_type": "mqueue",
"mount_options": [
"rw",
"relatime",
"seclabel"
]
},
"systemd-1,/proc/sys/fs/binfmt_misc": {
"device": "systemd-1",
"mount": "/proc/sys/fs/binfmt_misc",
"fs_type": "autofs",
"mount_options": [
"rw",
"relatime",
"fd=40",
"pgrp=1",
"timeout=0",
"minproto=5",
"maxproto=5",
"direct",
"pipe_ino=17610"
]
},
"/var/lib/machines.raw,/var/lib/machines": {
"device": "/var/lib/machines.raw",
"mount": "/var/lib/machines",
"fs_type": "btrfs",
"mount_options": [
"rw",
"relatime",
"seclabel",
"space_cache",
"subvolid=5",
"subvol=/"
]
},
"fusectl,/sys/fs/fuse/connections": {
"device": "fusectl",
"mount": "/sys/fs/fuse/connections",
"fs_type": "fusectl",
"mount_options": [
"rw",
"relatime"
]
},
"gvfsd-fuse,/run/user/1000/gvfs": {
"device": "gvfsd-fuse",
"mount": "/run/user/1000/gvfs",
"fs_type": "fuse.gvfsd-fuse",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"user_id=1000",
"group_id=1000"
]
},
"/dev/mapper/fedora_host--186-root,/var/lib/docker/devicemapper": {
"device": "/dev/mapper/fedora_host--186-root",
"mount": "/var/lib/docker/devicemapper",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce"
},
"binfmt_misc,/proc/sys/fs/binfmt_misc": {
"device": "binfmt_misc",
"mount": "/proc/sys/fs/binfmt_misc",
"fs_type": "binfmt_misc",
"mount_options": [
"rw",
"relatime"
]
},
"/dev/mapper/docker-253:1-1180487-0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8,/var/lib/docker/devicemapper/mnt/0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8": {
"device": "/dev/mapper/docker-253:1-1180487-0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8",
"mount": "/var/lib/docker/devicemapper/mnt/0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8",
"fs_type": "xfs",
"mount_options": [
"rw",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"nouuid",
"attr2",
"inode64",
"logbsize=64k",
"sunit=128",
"swidth=128",
"noquota"
],
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123"
},
"shm,/var/lib/docker/containers/426e513ed508a451e3f70440eed040761f81529e4bc4240e7522d331f3f3bc12/shm": {
"device": "shm",
"mount": "/var/lib/docker/containers/426e513ed508a451e3f70440eed040761f81529e4bc4240e7522d331f3f3bc12/shm",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"size=65536k"
]
},
"nsfs,/run/docker/netns/1ce89fd79f3d": {
"device": "nsfs",
"mount": "/run/docker/netns/1ce89fd79f3d",
"fs_type": "nsfs",
"mount_options": [
"rw"
]
},
"tracefs,/sys/kernel/debug/tracing": {
"device": "tracefs",
"mount": "/sys/kernel/debug/tracing",
"fs_type": "tracefs",
"mount_options": [
"rw",
"relatime"
]
},
"/dev/loop1,": {
"device": "/dev/loop1",
"fs_type": "xfs",
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123"
},
"/dev/mapper/docker-253:1-1180487-pool,": {
"device": "/dev/mapper/docker-253:1-1180487-pool"
},
"/dev/sr0,": {
"device": "/dev/sr0"
},
"/dev/loop2,": {
"device": "/dev/loop2"
},
"/dev/sda,": {
"device": "/dev/sda"
},
"/dev/sda2,": {
"device": "/dev/sda2",
"fs_type": "LVM2_member",
"uuid": "66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK"
},
"/dev/mapper/fedora_host--186-swap,": {
"device": "/dev/mapper/fedora_host--186-swap",
"fs_type": "swap",
"uuid": "eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d"
}
}
},
"filesystem2": {
"by_device": {
"devtmpfs": {
"kb_size": "8044124",
"kb_used": "0",
"kb_available": "8044124",
"percent_used": "0%",
"total_inodes": "2011031",
"inodes_used": "629",
"inodes_available": "2010402",
"inodes_percent_used": "1%",
"fs_type": "devtmpfs",
"mount_options": [
"rw",
"nosuid",
"seclabel",
"size=8044124k",
"nr_inodes=2011031",
"mode=755"
],
"mounts": [
"/dev"
]
},
"tmpfs": {
"kb_size": "1611052",
"kb_used": "72",
"kb_available": "1610980",
"percent_used": "1%",
"total_inodes": "2013817",
"inodes_used": "36",
"inodes_available": "2013781",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700",
"uid=1000",
"gid=1000"
],
"mounts": [
"/dev/shm",
"/run",
"/sys/fs/cgroup",
"/tmp",
"/run/user/0",
"/run/user/1000"
]
},
"/dev/mapper/fedora_host--186-root": {
"kb_size": "51475068",
"kb_used": "42551284",
"kb_available": "6285960",
"percent_used": "88%",
"total_inodes": "3276800",
"inodes_used": "532908",
"inodes_available": "2743892",
"inodes_percent_used": "17%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce",
"mounts": [
"/",
"/var/lib/docker/devicemapper"
]
},
"/dev/sda1": {
"kb_size": "487652",
"kb_used": "126628",
"kb_available": "331328",
"percent_used": "28%",
"total_inodes": "128016",
"inodes_used": "405",
"inodes_available": "127611",
"inodes_percent_used": "1%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "32caaec3-ef40-4691-a3b6-438c3f9bc1c0",
"mounts": [
"/boot"
]
},
"/dev/mapper/fedora_host--186-home": {
"kb_size": "185948124",
"kb_used": "105904724",
"kb_available": "70574680",
"percent_used": "61%",
"total_inodes": "11821056",
"inodes_used": "1266687",
"inodes_available": "10554369",
"inodes_percent_used": "11%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d",
"mounts": [
"/home"
]
},
"/dev/loop0": {
"kb_size": "512000",
"kb_used": "16672",
"kb_available": "429056",
"percent_used": "4%",
"fs_type": "btrfs",
"uuid": "0f031512-ab15-497d-9abd-3a512b4a9390",
"mounts": [
"/var/lib/machines"
]
},
"sysfs": {
"fs_type": "sysfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"mounts": [
"/sys"
]
},
"proc": {
"fs_type": "proc",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"mounts": [
"/proc"
]
},
"securityfs": {
"fs_type": "securityfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"mounts": [
"/sys/kernel/security"
]
},
"devpts": {
"fs_type": "devpts",
"mount_options": [
"rw",
"nosuid",
"noexec",
"relatime",
"seclabel",
"gid=5",
"mode=620",
"ptmxmode=000"
],
"mounts": [
"/dev/pts"
]
},
"cgroup": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"net_cls",
"net_prio"
],
"mounts": [
"/sys/fs/cgroup/systemd",
"/sys/fs/cgroup/devices",
"/sys/fs/cgroup/cpuset",
"/sys/fs/cgroup/perf_event",
"/sys/fs/cgroup/hugetlb",
"/sys/fs/cgroup/cpu,cpuacct",
"/sys/fs/cgroup/blkio",
"/sys/fs/cgroup/freezer",
"/sys/fs/cgroup/memory",
"/sys/fs/cgroup/pids",
"/sys/fs/cgroup/net_cls,net_prio"
]
},
"pstore": {
"fs_type": "pstore",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"mounts": [
"/sys/fs/pstore"
]
},
"configfs": {
"fs_type": "configfs",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/kernel/config"
]
},
"selinuxfs": {
"fs_type": "selinuxfs",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/fs/selinux"
]
},
"debugfs": {
"fs_type": "debugfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"mounts": [
"/sys/kernel/debug"
]
},
"hugetlbfs": {
"fs_type": "hugetlbfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"mounts": [
"/dev/hugepages"
]
},
"mqueue": {
"fs_type": "mqueue",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"mounts": [
"/dev/mqueue"
]
},
"systemd-1": {
"fs_type": "autofs",
"mount_options": [
"rw",
"relatime",
"fd=40",
"pgrp=1",
"timeout=0",
"minproto=5",
"maxproto=5",
"direct",
"pipe_ino=17610"
],
"mounts": [
"/proc/sys/fs/binfmt_misc"
]
},
"/var/lib/machines.raw": {
"fs_type": "btrfs",
"mount_options": [
"rw",
"relatime",
"seclabel",
"space_cache",
"subvolid=5",
"subvol=/"
],
"mounts": [
"/var/lib/machines"
]
},
"fusectl": {
"fs_type": "fusectl",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/fs/fuse/connections"
]
},
"gvfsd-fuse": {
"fs_type": "fuse.gvfsd-fuse",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"user_id=1000",
"group_id=1000"
],
"mounts": [
"/run/user/1000/gvfs"
]
},
"binfmt_misc": {
"fs_type": "binfmt_misc",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/proc/sys/fs/binfmt_misc"
]
},
"/dev/mapper/docker-253:1-1180487-0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8": {
"fs_type": "xfs",
"mount_options": [
"rw",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"nouuid",
"attr2",
"inode64",
"logbsize=64k",
"sunit=128",
"swidth=128",
"noquota"
],
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123",
"mounts": [
"/var/lib/docker/devicemapper/mnt/0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8"
]
},
"shm": {
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"size=65536k"
],
"mounts": [
"/var/lib/docker/containers/426e513ed508a451e3f70440eed040761f81529e4bc4240e7522d331f3f3bc12/shm"
]
},
"nsfs": {
"fs_type": "nsfs",
"mount_options": [
"rw"
],
"mounts": [
"/run/docker/netns/1ce89fd79f3d"
]
},
"tracefs": {
"fs_type": "tracefs",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/kernel/debug/tracing"
]
},
"/dev/loop1": {
"fs_type": "xfs",
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123",
"mounts": [
]
},
"/dev/mapper/docker-253:1-1180487-pool": {
"mounts": [
]
},
"/dev/sr0": {
"mounts": [
]
},
"/dev/loop2": {
"mounts": [
]
},
"/dev/sda": {
"mounts": [
]
},
"/dev/sda2": {
"fs_type": "LVM2_member",
"uuid": "66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK",
"mounts": [
]
},
"/dev/mapper/fedora_host--186-swap": {
"fs_type": "swap",
"uuid": "eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d",
"mounts": [
]
}
},
"by_mountpoint": {
"/dev": {
"kb_size": "8044124",
"kb_used": "0",
"kb_available": "8044124",
"percent_used": "0%",
"total_inodes": "2011031",
"inodes_used": "629",
"inodes_available": "2010402",
"inodes_percent_used": "1%",
"fs_type": "devtmpfs",
"mount_options": [
"rw",
"nosuid",
"seclabel",
"size=8044124k",
"nr_inodes=2011031",
"mode=755"
],
"devices": [
"devtmpfs"
]
},
"/dev/shm": {
"kb_size": "8055268",
"kb_used": "96036",
"kb_available": "7959232",
"percent_used": "2%",
"total_inodes": "2013817",
"inodes_used": "217",
"inodes_available": "2013600",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
],
"devices": [
"tmpfs"
]
},
"/run": {
"kb_size": "8055268",
"kb_used": "2280",
"kb_available": "8052988",
"percent_used": "1%",
"total_inodes": "2013817",
"inodes_used": "1070",
"inodes_available": "2012747",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel",
"mode=755"
],
"devices": [
"tmpfs"
]
},
"/sys/fs/cgroup": {
"kb_size": "8055268",
"kb_used": "0",
"kb_available": "8055268",
"percent_used": "0%",
"total_inodes": "2013817",
"inodes_used": "16",
"inodes_available": "2013801",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"ro",
"nosuid",
"nodev",
"noexec",
"seclabel",
"mode=755"
],
"devices": [
"tmpfs"
]
},
"/": {
"kb_size": "51475068",
"kb_used": "42551284",
"kb_available": "6285960",
"percent_used": "88%",
"total_inodes": "3276800",
"inodes_used": "532908",
"inodes_available": "2743892",
"inodes_percent_used": "17%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce",
"devices": [
"/dev/mapper/fedora_host--186-root"
]
},
"/tmp": {
"kb_size": "8055268",
"kb_used": "848396",
"kb_available": "7206872",
"percent_used": "11%",
"total_inodes": "2013817",
"inodes_used": "1353",
"inodes_available": "2012464",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
],
"devices": [
"tmpfs"
]
},
"/boot": {
"kb_size": "487652",
"kb_used": "126628",
"kb_available": "331328",
"percent_used": "28%",
"total_inodes": "128016",
"inodes_used": "405",
"inodes_available": "127611",
"inodes_percent_used": "1%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "32caaec3-ef40-4691-a3b6-438c3f9bc1c0",
"devices": [
"/dev/sda1"
]
},
"/home": {
"kb_size": "185948124",
"kb_used": "105904724",
"kb_available": "70574680",
"percent_used": "61%",
"total_inodes": "11821056",
"inodes_used": "1266687",
"inodes_available": "10554369",
"inodes_percent_used": "11%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d",
"devices": [
"/dev/mapper/fedora_host--186-home"
]
},
"/var/lib/machines": {
"kb_size": "512000",
"kb_used": "16672",
"kb_available": "429056",
"percent_used": "4%",
"fs_type": "btrfs",
"uuid": "0f031512-ab15-497d-9abd-3a512b4a9390",
"devices": [
"/dev/loop0",
"/var/lib/machines.raw"
],
"mount_options": [
"rw",
"relatime",
"seclabel",
"space_cache",
"subvolid=5",
"subvol=/"
]
},
"/run/user/0": {
"kb_size": "1611052",
"kb_used": "0",
"kb_available": "1611052",
"percent_used": "0%",
"total_inodes": "2013817",
"inodes_used": "7",
"inodes_available": "2013810",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700"
],
"devices": [
"tmpfs"
]
},
"/run/user/1000": {
"kb_size": "1611052",
"kb_used": "72",
"kb_available": "1610980",
"percent_used": "1%",
"total_inodes": "2013817",
"inodes_used": "36",
"inodes_available": "2013781",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700",
"uid=1000",
"gid=1000"
],
"devices": [
"tmpfs"
]
},
"/sys": {
"fs_type": "sysfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"devices": [
"sysfs"
]
},
"/proc": {
"fs_type": "proc",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"devices": [
"proc"
]
},
"/sys/kernel/security": {
"fs_type": "securityfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"devices": [
"securityfs"
]
},
"/dev/pts": {
"fs_type": "devpts",
"mount_options": [
"rw",
"nosuid",
"noexec",
"relatime",
"seclabel",
"gid=5",
"mode=620",
"ptmxmode=000"
],
"devices": [
"devpts"
]
},
"/sys/fs/cgroup/systemd": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"xattr",
"release_agent=/usr/lib/systemd/systemd-cgroups-agent",
"name=systemd"
],
"devices": [
"cgroup"
]
},
"/sys/fs/pstore": {
"fs_type": "pstore",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"devices": [
"pstore"
]
},
"/sys/fs/cgroup/devices": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"devices"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/cpuset": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpuset"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/perf_event": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"perf_event"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/hugetlb": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"hugetlb"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/cpu,cpuacct": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpu",
"cpuacct"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/blkio": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"blkio"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/freezer": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"freezer"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/memory": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"memory"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/pids": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"pids"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/net_cls,net_prio": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"net_cls",
"net_prio"
],
"devices": [
"cgroup"
]
},
"/sys/kernel/config": {
"fs_type": "configfs",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"configfs"
]
},
"/sys/fs/selinux": {
"fs_type": "selinuxfs",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"selinuxfs"
]
},
"/sys/kernel/debug": {
"fs_type": "debugfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"devices": [
"debugfs"
]
},
"/dev/hugepages": {
"fs_type": "hugetlbfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"devices": [
"hugetlbfs"
]
},
"/dev/mqueue": {
"fs_type": "mqueue",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"devices": [
"mqueue"
]
},
"/proc/sys/fs/binfmt_misc": {
"fs_type": "binfmt_misc",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"systemd-1",
"binfmt_misc"
]
},
"/sys/fs/fuse/connections": {
"fs_type": "fusectl",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"fusectl"
]
},
"/run/user/1000/gvfs": {
"fs_type": "fuse.gvfsd-fuse",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"user_id=1000",
"group_id=1000"
],
"devices": [
"gvfsd-fuse"
]
},
"/var/lib/docker/devicemapper": {
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce",
"devices": [
"/dev/mapper/fedora_host--186-root"
]
},
{
"/run/docker/netns/1ce89fd79f3d": {
"fs_type": "nsfs",
"mount_options": [
"rw"
],
"devices": [
"nsfs"
]
},
"/sys/kernel/debug/tracing": {
"fs_type": "tracefs",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"tracefs"
]
}
},
"by_pair": {
"devtmpfs,/dev": {
"device": "devtmpfs",
"kb_size": "8044124",
"kb_used": "0",
"kb_available": "8044124",
"percent_used": "0%",
"mount": "/dev",
"total_inodes": "2011031",
"inodes_used": "629",
"inodes_available": "2010402",
"inodes_percent_used": "1%",
"fs_type": "devtmpfs",
"mount_options": [
"rw",
"nosuid",
"seclabel",
"size=8044124k",
"nr_inodes=2011031",
"mode=755"
]
},
"tmpfs,/dev/shm": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "96036",
"kb_available": "7959232",
"percent_used": "2%",
"mount": "/dev/shm",
"total_inodes": "2013817",
"inodes_used": "217",
"inodes_available": "2013600",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
]
},
"tmpfs,/run": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "2280",
"kb_available": "8052988",
"percent_used": "1%",
"mount": "/run",
"total_inodes": "2013817",
"inodes_used": "1070",
"inodes_available": "2012747",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel",
"mode=755"
]
},
"tmpfs,/sys/fs/cgroup": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "0",
"kb_available": "8055268",
"percent_used": "0%",
"mount": "/sys/fs/cgroup",
"total_inodes": "2013817",
"inodes_used": "16",
"inodes_available": "2013801",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"ro",
"nosuid",
"nodev",
"noexec",
"seclabel",
"mode=755"
]
},
"/dev/mapper/fedora_host--186-root,/": {
"device": "/dev/mapper/fedora_host--186-root",
"kb_size": "51475068",
"kb_used": "42551284",
"kb_available": "6285960",
"percent_used": "88%",
"mount": "/",
"total_inodes": "3276800",
"inodes_used": "532908",
"inodes_available": "2743892",
"inodes_percent_used": "17%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce"
},
"tmpfs,/tmp": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "848396",
"kb_available": "7206872",
"percent_used": "11%",
"mount": "/tmp",
"total_inodes": "2013817",
"inodes_used": "1353",
"inodes_available": "2012464",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
]
},
"/dev/sda1,/boot": {
"device": "/dev/sda1",
"kb_size": "487652",
"kb_used": "126628",
"kb_available": "331328",
"percent_used": "28%",
"mount": "/boot",
"total_inodes": "128016",
"inodes_used": "405",
"inodes_available": "127611",
"inodes_percent_used": "1%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "32caaec3-ef40-4691-a3b6-438c3f9bc1c0"
},
"/dev/mapper/fedora_host--186-home,/home": {
"device": "/dev/mapper/fedora_host--186-home",
"kb_size": "185948124",
"kb_used": "105904724",
"kb_available": "70574680",
"percent_used": "61%",
"mount": "/home",
"total_inodes": "11821056",
"inodes_used": "1266687",
"inodes_available": "10554369",
"inodes_percent_used": "11%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d"
},
"/dev/loop0,/var/lib/machines": {
"device": "/dev/loop0",
"kb_size": "512000",
"kb_used": "16672",
"kb_available": "429056",
"percent_used": "4%",
"mount": "/var/lib/machines",
"fs_type": "btrfs",
"uuid": "0f031512-ab15-497d-9abd-3a512b4a9390"
},
"tmpfs,/run/user/0": {
"device": "tmpfs",
"kb_size": "1611052",
"kb_used": "0",
"kb_available": "1611052",
"percent_used": "0%",
"mount": "/run/user/0",
"total_inodes": "2013817",
"inodes_used": "7",
"inodes_available": "2013810",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700"
]
},
"tmpfs,/run/user/1000": {
"device": "tmpfs",
"kb_size": "1611052",
"kb_used": "72",
"kb_available": "1610980",
"percent_used": "1%",
"mount": "/run/user/1000",
"total_inodes": "2013817",
"inodes_used": "36",
"inodes_available": "2013781",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700",
"uid=1000",
"gid=1000"
]
},
"sysfs,/sys": {
"device": "sysfs",
"mount": "/sys",
"fs_type": "sysfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
]
},
"proc,/proc": {
"device": "proc",
"mount": "/proc",
"fs_type": "proc",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
]
},
"securityfs,/sys/kernel/security": {
"device": "securityfs",
"mount": "/sys/kernel/security",
"fs_type": "securityfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
]
},
"devpts,/dev/pts": {
"device": "devpts",
"mount": "/dev/pts",
"fs_type": "devpts",
"mount_options": [
"rw",
"nosuid",
"noexec",
"relatime",
"seclabel",
"gid=5",
"mode=620",
"ptmxmode=000"
]
},
"cgroup,/sys/fs/cgroup/systemd": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/systemd",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"xattr",
"release_agent=/usr/lib/systemd/systemd-cgroups-agent",
"name=systemd"
]
},
"pstore,/sys/fs/pstore": {
"device": "pstore",
"mount": "/sys/fs/pstore",
"fs_type": "pstore",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
]
},
"cgroup,/sys/fs/cgroup/devices": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/devices",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"devices"
]
},
"cgroup,/sys/fs/cgroup/cpuset": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/cpuset",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpuset"
]
},
"cgroup,/sys/fs/cgroup/perf_event": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/perf_event",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"perf_event"
]
},
"cgroup,/sys/fs/cgroup/hugetlb": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/hugetlb",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"hugetlb"
]
},
"cgroup,/sys/fs/cgroup/cpu,cpuacct": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/cpu,cpuacct",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpu",
"cpuacct"
]
},
"cgroup,/sys/fs/cgroup/blkio": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/blkio",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"blkio"
]
},
"cgroup,/sys/fs/cgroup/freezer": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/freezer",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"freezer"
]
},
"cgroup,/sys/fs/cgroup/memory": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/memory",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"memory"
]
},
"cgroup,/sys/fs/cgroup/pids": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/pids",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"pids"
]
},
"cgroup,/sys/fs/cgroup/net_cls,net_prio": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/net_cls,net_prio",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"net_cls",
"net_prio"
]
},
"configfs,/sys/kernel/config": {
"device": "configfs",
"mount": "/sys/kernel/config",
"fs_type": "configfs",
"mount_options": [
"rw",
"relatime"
]
},
"selinuxfs,/sys/fs/selinux": {
"device": "selinuxfs",
"mount": "/sys/fs/selinux",
"fs_type": "selinuxfs",
"mount_options": [
"rw",
"relatime"
]
},
"debugfs,/sys/kernel/debug": {
"device": "debugfs",
"mount": "/sys/kernel/debug",
"fs_type": "debugfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
]
},
"hugetlbfs,/dev/hugepages": {
"device": "hugetlbfs",
"mount": "/dev/hugepages",
"fs_type": "hugetlbfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
]
},
"mqueue,/dev/mqueue": {
"device": "mqueue",
"mount": "/dev/mqueue",
"fs_type": "mqueue",
"mount_options": [
"rw",
"relatime",
"seclabel"
]
},
"systemd-1,/proc/sys/fs/binfmt_misc": {
"device": "systemd-1",
"mount": "/proc/sys/fs/binfmt_misc",
"fs_type": "autofs",
"mount_options": [
"rw",
"relatime",
"fd=40",
"pgrp=1",
"timeout=0",
"minproto=5",
"maxproto=5",
"direct",
"pipe_ino=17610"
]
},
"/var/lib/machines.raw,/var/lib/machines": {
"device": "/var/lib/machines.raw",
"mount": "/var/lib/machines",
"fs_type": "btrfs",
"mount_options": [
"rw",
"relatime",
"seclabel",
"space_cache",
"subvolid=5",
"subvol=/"
]
},
"fusectl,/sys/fs/fuse/connections": {
"device": "fusectl",
"mount": "/sys/fs/fuse/connections",
"fs_type": "fusectl",
"mount_options": [
"rw",
"relatime"
]
},
"gvfsd-fuse,/run/user/1000/gvfs": {
"device": "gvfsd-fuse",
"mount": "/run/user/1000/gvfs",
"fs_type": "fuse.gvfsd-fuse",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"user_id=1000",
"group_id=1000"
]
},
"/dev/mapper/fedora_host--186-root,/var/lib/docker/devicemapper": {
"device": "/dev/mapper/fedora_host--186-root",
"mount": "/var/lib/docker/devicemapper",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce"
},
"binfmt_misc,/proc/sys/fs/binfmt_misc": {
"device": "binfmt_misc",
"mount": "/proc/sys/fs/binfmt_misc",
"fs_type": "binfmt_misc",
"mount_options": [
"rw",
"relatime"
]
},
"/dev/mapper/docker-253:1-1180487-0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8,/var/lib/docker/devicemapper/mnt/0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8": {
"device": "/dev/mapper/docker-253:1-1180487-0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8",
"mount": "/var/lib/docker/devicemapper/mnt/0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8",
"fs_type": "xfs",
"mount_options": [
"rw",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"nouuid",
"attr2",
"inode64",
"logbsize=64k",
"sunit=128",
"swidth=128",
"noquota"
],
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123"
},
"shm,/var/lib/docker/containers/426e513ed508a451e3f70440eed040761f81529e4bc4240e7522d331f3f3bc12/shm": {
"device": "shm",
"mount": "/var/lib/docker/containers/426e513ed508a451e3f70440eed040761f81529e4bc4240e7522d331f3f3bc12/shm",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"size=65536k"
]
},
"nsfs,/run/docker/netns/1ce89fd79f3d": {
"device": "nsfs",
"mount": "/run/docker/netns/1ce89fd79f3d",
"fs_type": "nsfs",
"mount_options": [
"rw"
]
},
"tracefs,/sys/kernel/debug/tracing": {
"device": "tracefs",
"mount": "/sys/kernel/debug/tracing",
"fs_type": "tracefs",
"mount_options": [
"rw",
"relatime"
]
},
"/dev/loop1,": {
"device": "/dev/loop1",
"fs_type": "xfs",
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123"
},
"/dev/mapper/docker-253:1-1180487-pool,": {
"device": "/dev/mapper/docker-253:1-1180487-pool"
},
"/dev/sr0,": {
"device": "/dev/sr0"
},
"/dev/loop2,": {
"device": "/dev/loop2"
},
"/dev/sda,": {
"device": "/dev/sda"
},
"/dev/sda2,": {
"device": "/dev/sda2",
"fs_type": "LVM2_member",
"uuid": "66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK"
},
"/dev/mapper/fedora_host--186-swap,": {
"device": "/dev/mapper/fedora_host--186-swap",
"fs_type": "swap",
"uuid": "eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d"
}
}
},
"virtualization": {
"systems": {
"kvm": "host"
},
"system": "kvm",
"role": "host",
"libvirt_version": "2.2.0",
"uri": "qemu:///system",
"capabilities": {
},
"nodeinfo": {
"cores": 4,
"cpus": 8,
"memory": 16110540,
"mhz": 2832,
"model": "x86_64",
"nodes": 1,
"sockets": 1,
"threads": 2
},
"domains": {
},
"networks": {
"vagrant-libvirt": {
"bridge_name": "virbr1",
"uuid": "877ddb27-b39c-427e-a7bf-1aa829389eeb"
},
"default": {
"bridge_name": "virbr0",
"uuid": "750d2567-23a8-470d-8a2b-71cd651e30d1"
}
},
"storage": {
"virt-images": {
"autostart": true,
"uuid": "d8a189fa-f98c-462f-9ea4-204eb77a96a1",
"allocation": 106412863488,
"available": 83998015488,
"capacity": 190410878976,
"state": 2,
"volumes": {
"rhel-atomic-host-standard-2014-7-1.qcow2": {
"key": "/home/some_user/virt-images/rhel-atomic-host-standard-2014-7-1.qcow2",
"name": "rhel-atomic-host-standard-2014-7-1.qcow2",
"path": "/home/some_user/virt-images/rhel-atomic-host-standard-2014-7-1.qcow2",
"allocation": 1087115264,
"capacity": 8589934592,
"type": 0
},
"atomic-beta-instance-7.qcow2": {
"key": "/home/some_user/virt-images/atomic-beta-instance-7.qcow2",
"name": "atomic-beta-instance-7.qcow2",
"path": "/home/some_user/virt-images/atomic-beta-instance-7.qcow2",
"allocation": 200704,
"capacity": 8589934592,
"type": 0
},
"os1-atomic-meta-data": {
"key": "/home/some_user/virt-images/os1-atomic-meta-data",
"name": "os1-atomic-meta-data",
"path": "/home/some_user/virt-images/os1-atomic-meta-data",
"allocation": 4096,
"capacity": 49,
"type": 0
},
"atomic-user-data": {
"key": "/home/some_user/virt-images/atomic-user-data",
"name": "atomic-user-data",
"path": "/home/some_user/virt-images/atomic-user-data",
"allocation": 4096,
"capacity": 512,
"type": 0
},
"qemu-snap.txt": {
"key": "/home/some_user/virt-images/qemu-snap.txt",
"name": "qemu-snap.txt",
"path": "/home/some_user/virt-images/qemu-snap.txt",
"allocation": 4096,
"capacity": 111,
"type": 0
},
"atomic-beta-instance-5.qcow2": {
"key": "/home/some_user/virt-images/atomic-beta-instance-5.qcow2",
"name": "atomic-beta-instance-5.qcow2",
"path": "/home/some_user/virt-images/atomic-beta-instance-5.qcow2",
"allocation": 339091456,
"capacity": 8589934592,
"type": 0
},
"meta-data": {
"key": "/home/some_user/virt-images/meta-data",
"name": "meta-data",
"path": "/home/some_user/virt-images/meta-data",
"allocation": 4096,
"capacity": 49,
"type": 0
},
"atomic-beta-instance-8.qcow2": {
"key": "/home/some_user/virt-images/atomic-beta-instance-8.qcow2",
"name": "atomic-beta-instance-8.qcow2",
"path": "/home/some_user/virt-images/atomic-beta-instance-8.qcow2",
"allocation": 322576384,
"capacity": 8589934592,
"type": 0
},
"user-data": {
"key": "/home/some_user/virt-images/user-data",
"name": "user-data",
"path": "/home/some_user/virt-images/user-data",
"allocation": 4096,
"capacity": 512,
"type": 0
},
"rhel-6-2015-10-16.qcow2": {
"key": "/home/some_user/virt-images/rhel-6-2015-10-16.qcow2",
"name": "rhel-6-2015-10-16.qcow2",
"path": "/home/some_user/virt-images/rhel-6-2015-10-16.qcow2",
"allocation": 7209422848,
"capacity": 17179869184,
"type": 0
},
"atomic_demo_notes.txt": {
"key": "/home/some_user/virt-images/atomic_demo_notes.txt",
"name": "atomic_demo_notes.txt",
"path": "/home/some_user/virt-images/atomic_demo_notes.txt",
"allocation": 4096,
"capacity": 354,
"type": 0
},
"packer-windows-2012-R2-standard": {
"key": "/home/some_user/virt-images/packer-windows-2012-R2-standard",
"name": "packer-windows-2012-R2-standard",
"path": "/home/some_user/virt-images/packer-windows-2012-R2-standard",
"allocation": 16761495552,
"capacity": 64424509440,
"type": 0
},
"atomic3-cidata.iso": {
"key": "/home/some_user/virt-images/atomic3-cidata.iso",
"name": "atomic3-cidata.iso",
"path": "/home/some_user/virt-images/atomic3-cidata.iso",
"allocation": 376832,
"capacity": 374784,
"type": 0
},
".atomic_demo_notes.txt.swp": {
"key": "/home/some_user/virt-images/.atomic_demo_notes.txt.swp",
"name": ".atomic_demo_notes.txt.swp",
"path": "/home/some_user/virt-images/.atomic_demo_notes.txt.swp",
"allocation": 12288,
"capacity": 12288,
"type": 0
},
"rhel7-2015-10-13.qcow2": {
"key": "/home/some_user/virt-images/rhel7-2015-10-13.qcow2",
"name": "rhel7-2015-10-13.qcow2",
"path": "/home/some_user/virt-images/rhel7-2015-10-13.qcow2",
"allocation": 4679413760,
"capacity": 12884901888,
"type": 0
}
}
},
"default": {
"autostart": true,
"uuid": "c8d9d160-efc0-4207-81c2-e79d6628f7e1",
"allocation": 43745488896,
"available": 8964980736,
"capacity": 52710469632,
"state": 2,
"volumes": {
"s3than-VAGRANTSLASH-trusty64_vagrant_box_image_0.0.1.img": {
"key": "/var/lib/libvirt/images/s3than-VAGRANTSLASH-trusty64_vagrant_box_image_0.0.1.img",
"name": "s3than-VAGRANTSLASH-trusty64_vagrant_box_image_0.0.1.img",
"path": "/var/lib/libvirt/images/s3than-VAGRANTSLASH-trusty64_vagrant_box_image_0.0.1.img",
"allocation": 1258622976,
"capacity": 42949672960,
"type": 0
},
"centos-7.0_vagrant_box_image.img": {
"key": "/var/lib/libvirt/images/centos-7.0_vagrant_box_image.img",
"name": "centos-7.0_vagrant_box_image.img",
"path": "/var/lib/libvirt/images/centos-7.0_vagrant_box_image.img",
"allocation": 1649414144,
"capacity": 42949672960,
"type": 0
},
"baremettle-VAGRANTSLASH-centos-5.10_vagrant_box_image_1.0.0.img": {
"key": "/var/lib/libvirt/images/baremettle-VAGRANTSLASH-centos-5.10_vagrant_box_image_1.0.0.img",
"name": "baremettle-VAGRANTSLASH-centos-5.10_vagrant_box_image_1.0.0.img",
"path": "/var/lib/libvirt/images/baremettle-VAGRANTSLASH-centos-5.10_vagrant_box_image_1.0.0.img",
"allocation": 810422272,
"capacity": 42949672960,
"type": 0
},
"centos-6_vagrant_box_image.img": {
"key": "/var/lib/libvirt/images/centos-6_vagrant_box_image.img",
"name": "centos-6_vagrant_box_image.img",
"path": "/var/lib/libvirt/images/centos-6_vagrant_box_image.img",
"allocation": 1423642624,
"capacity": 42949672960,
"type": 0
},
"centos5-ansible_default.img": {
"key": "/var/lib/libvirt/images/centos5-ansible_default.img",
"name": "centos5-ansible_default.img",
"path": "/var/lib/libvirt/images/centos5-ansible_default.img",
"allocation": 8986624,
"capacity": 42949672960,
"type": 0
},
"ubuntu_default.img": {
"key": "/var/lib/libvirt/images/ubuntu_default.img",
"name": "ubuntu_default.img",
"path": "/var/lib/libvirt/images/ubuntu_default.img",
"allocation": 3446833152,
"capacity": 42949672960,
"type": 0
}
}
},
"boot-scratch": {
"autostart": true,
"uuid": "e5ef4360-b889-4843-84fb-366e8fb30f20",
"allocation": 43745488896,
"available": 8964980736,
"capacity": 52710469632,
"state": 2,
"volumes": {
}
}
}
},
"network": {
"interfaces": {
"lo": {
"mtu": "65536",
"flags": [
"LOOPBACK",
"UP",
"LOWER_UP"
],
"encapsulation": "Loopback",
"addresses": {
"127.0.0.1": {
"family": "inet",
"prefixlen": "8",
"netmask": "255.0.0.0",
"scope": "Node",
"ip_scope": "LOOPBACK"
},
"::1": {
"family": "inet6",
"prefixlen": "128",
"scope": "Node",
"tags": [
],
"ip_scope": "LINK LOCAL LOOPBACK"
}
},
"state": "unknown"
},
"em1": {
"type": "em",
"number": "1",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST",
"UP"
],
"encapsulation": "Ethernet",
"addresses": {
"3C:97:0E:E9:28:8E": {
"family": "lladdr"
}
},
"state": "down",
"link_speed": 0,
"duplex": "Unknown! (255)",
"port": "Twisted Pair",
"transceiver": "internal",
"auto_negotiation": "on",
"mdi_x": "Unknown (auto)",
"ring_params": {
"max_rx": 4096,
"max_rx_mini": 0,
"max_rx_jumbo": 0,
"max_tx": 4096,
"current_rx": 256,
"current_rx_mini": 0,
"current_rx_jumbo": 0,
"current_tx": 256
}
},
"wlp4s0": {
"type": "wlp4s",
"number": "0",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST",
"UP",
"LOWER_UP"
],
"encapsulation": "Ethernet",
"addresses": {
"5C:51:4F:E6:A8:E3": {
"family": "lladdr"
},
"192.168.1.19": {
"family": "inet",
"prefixlen": "24",
"netmask": "255.255.255.0",
"broadcast": "192.168.1.255",
"scope": "Global",
"ip_scope": "RFC1918 PRIVATE"
},
"fe80::5e51:4fff:fee6:a8e3": {
"family": "inet6",
"prefixlen": "64",
"scope": "Link",
"tags": [
],
"ip_scope": "LINK LOCAL UNICAST"
}
},
"state": "up",
"arp": {
"192.168.1.33": "00:11:d9:39:3e:e0",
"192.168.1.20": "ac:3a:7a:a7:49:e8",
"192.168.1.17": "00:09:b0:d0:64:19",
"192.168.1.22": "ac:bc:32:82:30:bb",
"192.168.1.15": "00:11:32:2e:10:d5",
"192.168.1.1": "84:1b:5e:03:50:b2",
"192.168.1.34": "00:11:d9:5f:e8:e6",
"192.168.1.16": "dc:a5:f4:ac:22:3a",
"192.168.1.21": "74:c2:46:73:28:d8",
"192.168.1.27": "00:17:88:09:3c:bb",
"192.168.1.24": "08:62:66:90:a2:b8"
},
"routes": [
{
"destination": "default",
"family": "inet",
"via": "192.168.1.1",
"metric": "600",
"proto": "static"
},
{
"destination": "66.187.232.64",
"family": "inet",
"via": "192.168.1.1",
"metric": "600",
"proto": "static"
},
{
"destination": "192.168.1.0/24",
"family": "inet",
"scope": "link",
"metric": "600",
"proto": "kernel",
"src": "192.168.1.19"
},
{
"destination": "192.168.1.1",
"family": "inet",
"scope": "link",
"metric": "600",
"proto": "static"
},
{
"destination": "fe80::/64",
"family": "inet6",
"metric": "256",
"proto": "kernel"
}
],
"ring_params": {
"max_rx": 0,
"max_rx_mini": 0,
"max_rx_jumbo": 0,
"max_tx": 0,
"current_rx": 0,
"current_rx_mini": 0,
"current_rx_jumbo": 0,
"current_tx": 0
}
},
"virbr1": {
"type": "virbr",
"number": "1",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST",
"UP"
],
"encapsulation": "Ethernet",
"addresses": {
"52:54:00:B4:68:A9": {
"family": "lladdr"
},
"192.168.121.1": {
"family": "inet",
"prefixlen": "24",
"netmask": "255.255.255.0",
"broadcast": "192.168.121.255",
"scope": "Global",
"ip_scope": "RFC1918 PRIVATE"
}
},
"state": "1",
"routes": [
{
"destination": "192.168.121.0/24",
"family": "inet",
"scope": "link",
"proto": "kernel",
"src": "192.168.121.1"
}
],
"ring_params": {
}
},
"virbr1-nic": {
"type": "virbr",
"number": "1-nic",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST"
],
"encapsulation": "Ethernet",
"addresses": {
"52:54:00:B4:68:A9": {
"family": "lladdr"
}
},
"state": "disabled",
"link_speed": 10,
"duplex": "Full",
"port": "Twisted Pair",
"transceiver": "internal",
"auto_negotiation": "off",
"mdi_x": "Unknown",
"ring_params": {
}
},
"virbr0": {
"type": "virbr",
"number": "0",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST",
"UP"
],
"encapsulation": "Ethernet",
"addresses": {
"52:54:00:CE:82:5E": {
"family": "lladdr"
},
"192.168.137.1": {
"family": "inet",
"prefixlen": "24",
"netmask": "255.255.255.0",
"broadcast": "192.168.137.255",
"scope": "Global",
"ip_scope": "RFC1918 PRIVATE"
}
},
"state": "1",
"routes": [
{
"destination": "192.168.137.0/24",
"family": "inet",
"scope": "link",
"proto": "kernel",
"src": "192.168.137.1"
}
],
"ring_params": {
}
},
"virbr0-nic": {
"type": "virbr",
"number": "0-nic",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST"
],
"encapsulation": "Ethernet",
"addresses": {
"52:54:00:CE:82:5E": {
"family": "lladdr"
}
},
"state": "disabled",
"link_speed": 10,
"duplex": "Full",
"port": "Twisted Pair",
"transceiver": "internal",
"auto_negotiation": "off",
"mdi_x": "Unknown",
"ring_params": {
}
},
"docker0": {
"type": "docker",
"number": "0",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST",
"UP",
"LOWER_UP"
],
"encapsulation": "Ethernet",
"addresses": {
"02:42:EA:15:D8:84": {
"family": "lladdr"
},
"172.17.0.1": {
"family": "inet",
"prefixlen": "16",
"netmask": "255.255.0.0",
"scope": "Global",
"ip_scope": "RFC1918 PRIVATE"
},
"fe80::42:eaff:fe15:d884": {
"family": "inet6",
"prefixlen": "64",
"scope": "Link",
"tags": [
],
"ip_scope": "LINK LOCAL UNICAST"
}
},
"state": "0",
"arp": {
"172.17.0.2": "02:42:ac:11:00:02",
"172.17.0.4": "02:42:ac:11:00:04",
"172.17.0.3": "02:42:ac:11:00:03"
},
"routes": [
{
"destination": "172.17.0.0/16",
"family": "inet",
"scope": "link",
"proto": "kernel",
"src": "172.17.0.1"
},
{
"destination": "fe80::/64",
"family": "inet6",
"metric": "256",
"proto": "kernel"
}
],
"ring_params": {
}
},
"vethf20ff12": {
"type": "vethf20ff1",
"number": "2",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST",
"UP",
"LOWER_UP"
],
"encapsulation": "Ethernet",
"addresses": {
"AE:6E:2B:1E:A1:31": {
"family": "lladdr"
},
"fe80::ac6e:2bff:fe1e:a131": {
"family": "inet6",
"prefixlen": "64",
"scope": "Link",
"tags": [
],
"ip_scope": "LINK LOCAL UNICAST"
}
},
"state": "forwarding",
"routes": [
{
"destination": "fe80::/64",
"family": "inet6",
"metric": "256",
"proto": "kernel"
}
],
"link_speed": 10000,
"duplex": "Full",
"port": "Twisted Pair",
"transceiver": "internal",
"auto_negotiation": "off",
"mdi_x": "Unknown",
"ring_params": {
}
},
"tun0": {
"type": "tun",
"number": "0",
"mtu": "1360",
"flags": [
"MULTICAST",
"NOARP",
"UP",
"LOWER_UP"
],
"addresses": {
"10.10.120.68": {
"family": "inet",
"prefixlen": "21",
"netmask": "255.255.248.0",
"broadcast": "10.10.127.255",
"scope": "Global",
"ip_scope": "RFC1918 PRIVATE"
},
"fe80::365e:885c:31ca:7670": {
"family": "inet6",
"prefixlen": "64",
"scope": "Link",
"tags": [
"flags",
"800"
],
"ip_scope": "LINK LOCAL UNICAST"
}
},
"state": "unknown",
"routes": [
{
"destination": "10.0.0.0/8",
"family": "inet",
"via": "10.10.120.1",
"metric": "50",
"proto": "static"
},
{
"destination": "10.10.120.0/21",
"family": "inet",
"scope": "link",
"metric": "50",
"proto": "kernel",
"src": "10.10.120.68"
},
{
"destination": "fe80::/64",
"family": "inet6",
"metric": "256",
"proto": "kernel"
}
]
}
},
"default_interface": "wlp4s0",
"default_gateway": "192.168.1.1"
},
"counters": {
"network": {
"interfaces": {
"lo": {
"tx": {
"queuelen": "1",
"bytes": "202568405",
"packets": "1845473",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "202568405",
"packets": "1845473",
"errors": "0",
"drop": "0",
"overrun": "0"
}
},
"em1": {
"tx": {
"queuelen": "1000",
"bytes": "673898037",
"packets": "1631282",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "1536186718",
"packets": "1994394",
"errors": "0",
"drop": "0",
"overrun": "0"
}
},
"wlp4s0": {
"tx": {
"queuelen": "1000",
"bytes": "3927670539",
"packets": "15146886",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "12367173401",
"packets": "23981258",
"errors": "0",
"drop": "0",
"overrun": "0"
}
},
"virbr1": {
"tx": {
"queuelen": "1000",
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"overrun": "0"
}
},
"virbr1-nic": {
"tx": {
"queuelen": "1000",
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"overrun": "0"
}
},
"virbr0": {
"tx": {
"queuelen": "1000",
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"overrun": "0"
}
},
"virbr0-nic": {
"tx": {
"queuelen": "1000",
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"overrun": "0"
}
},
"docker0": {
"rx": {
"bytes": "2471313",
"packets": "36915",
"errors": "0",
"drop": "0",
"overrun": "0"
},
"tx": {
"bytes": "413371670",
"packets": "127713",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
}
},
"vethf20ff12": {
"rx": {
"bytes": "34391",
"packets": "450",
"errors": "0",
"drop": "0",
"overrun": "0"
},
"tx": {
"bytes": "17919115",
"packets": "108069",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
}
},
"tun0": {
"tx": {
"queuelen": "100",
"bytes": "22343462",
"packets": "253442",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "115160002",
"packets": "197529",
"errors": "0",
"drop": "0",
"overrun": "0"
}
}
}
}
},
"ipaddress": "192.168.1.19",
"macaddress": "5C:51:4F:E6:A8:E3",
"ip6address": "fe80::42:eaff:fe15:d884",
"cpu": {
"0": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "3238.714",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "0",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"1": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "3137.200",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "0",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"2": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "3077.050",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "1",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"3": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "2759.655",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "1",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"4": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "3419.000",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "2",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"5": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "2752.569",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "2",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"6": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "2953.619",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "3",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"7": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "2927.087",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "3",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"total": 8,
"real": 1,
"cores": 4
},
"etc": {
"passwd": {
"root": {
"dir": "/root",
"gid": 0,
"uid": 0,
"shell": "/bin/bash",
"gecos": "root"
},
"bin": {
"dir": "/bin",
"gid": 1,
"uid": 1,
"shell": "/sbin/nologin",
"gecos": "bin"
},
"daemon": {
"dir": "/sbin",
"gid": 2,
"uid": 2,
"shell": "/sbin/nologin",
"gecos": "daemon"
},
"adm": {
"dir": "/var/adm",
"gid": 4,
"uid": 3,
"shell": "/sbin/nologin",
"gecos": "adm"
},
"lp": {
"dir": "/var/spool/lpd",
"gid": 7,
"uid": 4,
"shell": "/sbin/nologin",
"gecos": "lp"
},
"sync": {
"dir": "/sbin",
"gid": 0,
"uid": 5,
"shell": "/bin/sync",
"gecos": "sync"
},
"shutdown": {
"dir": "/sbin",
"gid": 0,
"uid": 6,
"shell": "/sbin/shutdown",
"gecos": "shutdown"
},
"halt": {
"dir": "/sbin",
"gid": 0,
"uid": 7,
"shell": "/sbin/halt",
"gecos": "halt"
},
"mail": {
"dir": "/var/spool/mail",
"gid": 12,
"uid": 8,
"shell": "/sbin/nologin",
"gecos": "mail"
},
"operator": {
"dir": "/root",
"gid": 0,
"uid": 11,
"shell": "/sbin/nologin",
"gecos": "operator"
},
"games": {
"dir": "/usr/games",
"gid": 100,
"uid": 12,
"shell": "/sbin/nologin",
"gecos": "games"
},
"ftp": {
"dir": "/var/ftp",
"gid": 50,
"uid": 14,
"shell": "/sbin/nologin",
"gecos": "FTP User"
},
"nobody": {
"dir": "/",
"gid": 99,
"uid": 99,
"shell": "/sbin/nologin",
"gecos": "Nobody"
},
"avahi-autoipd": {
"dir": "/var/lib/avahi-autoipd",
"gid": 170,
"uid": 170,
"shell": "/sbin/nologin",
"gecos": "Avahi IPv4LL Stack"
},
"dbus": {
"dir": "/",
"gid": 81,
"uid": 81,
"shell": "/sbin/nologin",
"gecos": "System message bus"
},
"polkitd": {
"dir": "/",
"gid": 999,
"uid": 999,
"shell": "/sbin/nologin",
"gecos": "User for polkitd"
},
"abrt": {
"dir": "/etc/abrt",
"gid": 173,
"uid": 173,
"shell": "/sbin/nologin",
"gecos": ""
},
"usbmuxd": {
"dir": "/",
"gid": 113,
"uid": 113,
"shell": "/sbin/nologin",
"gecos": "usbmuxd user"
},
"colord": {
"dir": "/var/lib/colord",
"gid": 998,
"uid": 998,
"shell": "/sbin/nologin",
"gecos": "User for colord"
},
"geoclue": {
"dir": "/var/lib/geoclue",
"gid": 997,
"uid": 997,
"shell": "/sbin/nologin",
"gecos": "User for geoclue"
},
"rpc": {
"dir": "/var/lib/rpcbind",
"gid": 32,
"uid": 32,
"shell": "/sbin/nologin",
"gecos": "Rpcbind Daemon"
},
"rpcuser": {
"dir": "/var/lib/nfs",
"gid": 29,
"uid": 29,
"shell": "/sbin/nologin",
"gecos": "RPC Service User"
},
"nfsnobody": {
"dir": "/var/lib/nfs",
"gid": 65534,
"uid": 65534,
"shell": "/sbin/nologin",
"gecos": "Anonymous NFS User"
},
"qemu": {
"dir": "/",
"gid": 107,
"uid": 107,
"shell": "/sbin/nologin",
"gecos": "qemu user"
},
"rtkit": {
"dir": "/proc",
"gid": 172,
"uid": 172,
"shell": "/sbin/nologin",
"gecos": "RealtimeKit"
},
"radvd": {
"dir": "/",
"gid": 75,
"uid": 75,
"shell": "/sbin/nologin",
"gecos": "radvd user"
},
"tss": {
"dir": "/dev/null",
"gid": 59,
"uid": 59,
"shell": "/sbin/nologin",
"gecos": "Account used by the trousers package to sandbox the tcsd daemon"
},
"unbound": {
"dir": "/etc/unbound",
"gid": 995,
"uid": 996,
"shell": "/sbin/nologin",
"gecos": "Unbound DNS resolver"
},
"openvpn": {
"dir": "/etc/openvpn",
"gid": 994,
"uid": 995,
"shell": "/sbin/nologin",
"gecos": "OpenVPN"
},
"saslauth": {
"dir": "/run/saslauthd",
"gid": 76,
"uid": 994,
"shell": "/sbin/nologin",
"gecos": "\"Saslauthd user\""
},
"avahi": {
"dir": "/var/run/avahi-daemon",
"gid": 70,
"uid": 70,
"shell": "/sbin/nologin",
"gecos": "Avahi mDNS/DNS-SD Stack"
},
"pulse": {
"dir": "/var/run/pulse",
"gid": 992,
"uid": 993,
"shell": "/sbin/nologin",
"gecos": "PulseAudio System Daemon"
},
"gdm": {
"dir": "/var/lib/gdm",
"gid": 42,
"uid": 42,
"shell": "/sbin/nologin",
"gecos": ""
},
"gnome-initial-setup": {
"dir": "/run/gnome-initial-setup/",
"gid": 990,
"uid": 992,
"shell": "/sbin/nologin",
"gecos": ""
},
"nm-openconnect": {
"dir": "/",
"gid": 989,
"uid": 991,
"shell": "/sbin/nologin",
"gecos": "NetworkManager user for OpenConnect"
},
"sshd": {
"dir": "/var/empty/sshd",
"gid": 74,
"uid": 74,
"shell": "/sbin/nologin",
"gecos": "Privilege-separated SSH"
},
"chrony": {
"dir": "/var/lib/chrony",
"gid": 988,
"uid": 990,
"shell": "/sbin/nologin",
"gecos": ""
},
"tcpdump": {
"dir": "/",
"gid": 72,
"uid": 72,
"shell": "/sbin/nologin",
"gecos": ""
},
"some_user": {
"dir": "/home/some_user",
"gid": 1000,
"uid": 1000,
"shell": "/bin/bash",
"gecos": "some_user"
},
"systemd-journal-gateway": {
"dir": "/var/log/journal",
"gid": 191,
"uid": 191,
"shell": "/sbin/nologin",
"gecos": "Journal Gateway"
},
"postgres": {
"dir": "/var/lib/pgsql",
"gid": 26,
"uid": 26,
"shell": "/bin/bash",
"gecos": "PostgreSQL Server"
},
"dockerroot": {
"dir": "/var/lib/docker",
"gid": 977,
"uid": 984,
"shell": "/sbin/nologin",
"gecos": "Docker User"
},
"apache": {
"dir": "/usr/share/httpd",
"gid": 48,
"uid": 48,
"shell": "/sbin/nologin",
"gecos": "Apache"
},
"systemd-network": {
"dir": "/",
"gid": 974,
"uid": 982,
"shell": "/sbin/nologin",
"gecos": "systemd Network Management"
},
"systemd-resolve": {
"dir": "/",
"gid": 973,
"uid": 981,
"shell": "/sbin/nologin",
"gecos": "systemd Resolver"
},
"systemd-bus-proxy": {
"dir": "/",
"gid": 972,
"uid": 980,
"shell": "/sbin/nologin",
"gecos": "systemd Bus Proxy"
},
"systemd-journal-remote": {
"dir": "//var/log/journal/remote",
"gid": 970,
"uid": 979,
"shell": "/sbin/nologin",
"gecos": "Journal Remote"
},
"systemd-journal-upload": {
"dir": "//var/log/journal/upload",
"gid": 969,
"uid": 978,
"shell": "/sbin/nologin",
"gecos": "Journal Upload"
},
"setroubleshoot": {
"dir": "/var/lib/setroubleshoot",
"gid": 967,
"uid": 977,
"shell": "/sbin/nologin",
"gecos": ""
},
"oprofile": {
"dir": "/var/lib/oprofile",
"gid": 16,
"uid": 16,
"shell": "/sbin/nologin",
"gecos": "Special user account to be used by OProfile"
}
},
"group": {
"root": {
"gid": 0,
"members": [
]
},
"bin": {
"gid": 1,
"members": [
]
},
"daemon": {
"gid": 2,
"members": [
]
},
"sys": {
"gid": 3,
"members": [
]
},
"adm": {
"gid": 4,
"members": [
"logcheck"
]
},
"tty": {
"gid": 5,
"members": [
]
},
"disk": {
"gid": 6,
"members": [
]
},
"lp": {
"gid": 7,
"members": [
]
},
"mem": {
"gid": 8,
"members": [
]
},
"kmem": {
"gid": 9,
"members": [
]
},
"wheel": {
"gid": 10,
"members": [
]
},
"cdrom": {
"gid": 11,
"members": [
]
},
"mail": {
"gid": 12,
"members": [
]
},
"man": {
"gid": 15,
"members": [
]
},
"dialout": {
"gid": 18,
"members": [
"lirc"
]
},
"floppy": {
"gid": 19,
"members": [
]
},
"games": {
"gid": 20,
"members": [
]
},
"tape": {
"gid": 30,
"members": [
]
},
"video": {
"gid": 39,
"members": [
]
},
"ftp": {
"gid": 50,
"members": [
]
},
"lock": {
"gid": 54,
"members": [
"lirc"
]
},
"audio": {
"gid": 63,
"members": [
]
},
"nobody": {
"gid": 99,
"members": [
]
},
"users": {
"gid": 100,
"members": [
]
},
"utmp": {
"gid": 22,
"members": [
]
},
"utempter": {
"gid": 35,
"members": [
]
},
"avahi-autoipd": {
"gid": 170,
"members": [
]
},
"systemd-journal": {
"gid": 190,
"members": [
]
},
"dbus": {
"gid": 81,
"members": [
]
},
"polkitd": {
"gid": 999,
"members": [
]
},
"abrt": {
"gid": 173,
"members": [
]
},
"dip": {
"gid": 40,
"members": [
]
},
"usbmuxd": {
"gid": 113,
"members": [
]
},
"colord": {
"gid": 998,
"members": [
]
},
"geoclue": {
"gid": 997,
"members": [
]
},
"ssh_keys": {
"gid": 996,
"members": [
]
},
"rpc": {
"gid": 32,
"members": [
]
},
"rpcuser": {
"gid": 29,
"members": [
]
},
"nfsnobody": {
"gid": 65534,
"members": [
]
},
"kvm": {
"gid": 36,
"members": [
"qemu"
]
},
"qemu": {
"gid": 107,
"members": [
]
},
"rtkit": {
"gid": 172,
"members": [
]
},
"radvd": {
"gid": 75,
"members": [
]
},
"tss": {
"gid": 59,
"members": [
]
},
"unbound": {
"gid": 995,
"members": [
]
},
"openvpn": {
"gid": 994,
"members": [
]
},
"saslauth": {
"gid": 76,
"members": [
]
},
"avahi": {
"gid": 70,
"members": [
]
},
"brlapi": {
"gid": 993,
"members": [
]
},
"pulse": {
"gid": 992,
"members": [
]
},
"pulse-access": {
"gid": 991,
"members": [
]
},
"gdm": {
"gid": 42,
"members": [
]
},
"gnome-initial-setup": {
"gid": 990,
"members": [
]
},
"nm-openconnect": {
"gid": 989,
"members": [
]
},
"sshd": {
"gid": 74,
"members": [
]
},
"slocate": {
"gid": 21,
"members": [
]
},
"chrony": {
"gid": 988,
"members": [
]
},
"tcpdump": {
"gid": 72,
"members": [
]
},
"some_user": {
"gid": 1000,
"members": [
"some_user"
]
},
"docker": {
"gid": 986,
"members": [
"some_user"
]
}
},
"c": {
"gcc": {
"target": "x86_64-redhat-linux",
"configured_with": "../configure --enable-bootstrap --enable-languages=c,c++,objc,obj-c++,fortran,ada,go,lto --prefix=/usr --mandir=/usr/share/man --infodir=/usr/share/info --with-bugurl=http://bugzilla.redhat.com/bugzilla --enable-shared --enable-threads=posix --enable-checking=release --enable-multilib --with-system-zlib --enable-__cxa_atexit --disable-libunwind-exceptions --enable-gnu-unique-object --enable-linker-build-id --with-linker-hash-style=gnu --enable-plugin --enable-initfini-array --disable-libgcj --with-isl --enable-libmpx --enable-gnu-indirect-function --with-tune=generic --with-arch_32=i686 --build=x86_64-redhat-linux",
"thread_model": "posix",
"description": "gcc version 6.3.1 20161221 (Red Hat 6.3.1-1) (GCC) ",
"version": "6.3.1"
},
"glibc": {
"version": "2.24",
"description": "GNU C Library (GNU libc) stable release version 2.24, by Roland McGrath et al."
}
},
"lua": {
"version": "5.3.4"
},
"ruby": {
"platform": "x86_64-linux",
"version": "2.3.3",
"release_date": "2016-11-21",
"target": "x86_64-redhat-linux-gnu",
"target_cpu": "x86_64",
"target_vendor": "redhat",
"target_os": "linux",
"host": "x86_64-redhat-linux-gnu",
"host_cpu": "x86_64",
"host_os": "linux-gnu",
"host_vendor": "redhat",
"bin_dir": "/usr/bin",
"ruby_bin": "/usr/bin/ruby",
"gems_dir": "/home/some_user/.gem/ruby",
"gem_bin": "/usr/bin/gem"
}
},
"command": {
"ps": "ps -ef"
},
"root_group": "root",
"fips": {
"kernel": {
"enabled": false
}
},
"hostname": "myhostname",
"machinename": "myhostname",
"fqdn": "myhostname",
"domain": null,
"machine_id": "1234567abcede123456123456123456a",
"privateaddress": "192.168.1.100",
"keys": {
"ssh": {
}
},
"time": {
"timezone": "EDT"
},
"sessions": {
"by_session": {
"1918": {
"session": "1918",
"uid": "1000",
"user": "some_user",
"seat": null
},
"5": {
"session": "5",
"uid": "1000",
"user": "some_user",
"seat": "seat0"
},
"3": {
"session": "3",
"uid": "0",
"user": "root",
"seat": "seat0"
}
},
"by_user": {
"some_user": [
{
"session": "1918",
"uid": "1000",
"user": "some_user",
"seat": null
},
{
"session": "5",
"uid": "1000",
"user": "some_user",
"seat": "seat0"
}
],
"root": [
{
"session": "3",
"uid": "0",
"user": "root",
"seat": "seat0"
}
]
}
},
"hostnamectl": {
"static_hostname": "myhostname",
"icon_name": "computer-laptop",
"chassis": "laptop",
"machine_id": "24dc16bd7694404c825b517ab46d9d6b",
"machine_id": "12345123451234512345123451242323",
"boot_id": "3d5d5512341234123412341234123423",
"operating_system": "Fedora 25 (Workstation Edition)",
"cpe_os_name": "cpe",
"kernel": "Linux 4.9.14-200.fc25.x86_64",
"architecture": "x86-64"
},
"block_device": {
"dm-1": {
"size": "104857600",
"removable": "0",
"rotational": "0",
"physical_block_size": "512",
"logical_block_size": "512"
},
"loop1": {
"size": "209715200",
"removable": "0",
"rotational": "1",
"physical_block_size": "512",
"logical_block_size": "512"
},
"sr0": {
"size": "2097151",
"removable": "1",
"model": "DVD-RAM UJ8E2",
"rev": "SB01",
"state": "running",
"timeout": "30",
"vendor": "MATSHITA",
"queue_depth": "1",
"rotational": "1",
"physical_block_size": "512",
"logical_block_size": "512"
},
"dm-2": {
"size": "378093568",
"removable": "0",
"rotational": "0",
"physical_block_size": "512",
"logical_block_size": "512"
},
"loop2": {
"size": "4194304",
"removable": "0",
"rotational": "1",
"physical_block_size": "512",
"logical_block_size": "512"
},
"dm-0": {
"size": "16138240",
"removable": "0",
"rotational": "0",
"physical_block_size": "512",
"logical_block_size": "512"
},
"loop0": {
"size": "1024000",
"removable": "0",
"rotational": "1",
"physical_block_size": "512",
"logical_block_size": "512"
},
"sda": {
"size": "500118192",
"removable": "0",
"model": "SAMSUNG MZ7TD256",
"rev": "2L5Q",
"state": "running",
"timeout": "30",
"vendor": "ATA",
"queue_depth": "31",
"rotational": "0",
"physical_block_size": "512",
"logical_block_size": "512"
},
"dm-5": {
"size": "20971520",
"removable": "0",
"rotational": "1",
"physical_block_size": "512",
"logical_block_size": "512"
},
"dm-3": {
"size": "209715200",
"removable": "0",
"rotational": "1",
"physical_block_size": "512",
"logical_block_size": "512"
}
},
"sysconf": {
"LINK_MAX": 65000,
"_POSIX_LINK_MAX": 65000,
"MAX_CANON": 255,
"_POSIX_MAX_CANON": 255,
"MAX_INPUT": 255,
"_POSIX_MAX_INPUT": 255,
"NAME_MAX": 255,
"_POSIX_NAME_MAX": 255,
"PATH_MAX": 4096,
"_POSIX_PATH_MAX": 4096,
"PIPE_BUF": 4096,
"_POSIX_PIPE_BUF": 4096,
"SOCK_MAXBUF": null,
"_POSIX_ASYNC_IO": null,
"_POSIX_CHOWN_RESTRICTED": 1,
"_POSIX_NO_TRUNC": 1,
"_POSIX_PRIO_IO": null,
"_POSIX_SYNC_IO": null,
"_POSIX_VDISABLE": 0,
"ARG_MAX": 2097152,
"ATEXIT_MAX": 2147483647,
"CHAR_BIT": 8,
"CHAR_MAX": 127,
"CHAR_MIN": -128,
"CHILD_MAX": 62844,
"CLK_TCK": 100,
"INT_MAX": 2147483647,
"INT_MIN": -2147483648,
"IOV_MAX": 1024,
"LOGNAME_MAX": 256,
"LONG_BIT": 64,
"MB_LEN_MAX": 16,
"NGROUPS_MAX": 65536,
"NL_ARGMAX": 4096,
"NL_LANGMAX": 2048,
"NL_MSGMAX": 2147483647,
"NL_NMAX": 2147483647,
"NL_SETMAX": 2147483647,
"NL_TEXTMAX": 2147483647,
"NSS_BUFLEN_GROUP": 1024,
"NSS_BUFLEN_PASSWD": 1024,
"NZERO": 20,
"OPEN_MAX": 1024,
"PAGESIZE": 4096,
"PAGE_SIZE": 4096,
"PASS_MAX": 8192,
"PTHREAD_DESTRUCTOR_ITERATIONS": 4,
"PTHREAD_KEYS_MAX": 1024,
"PTHREAD_STACK_MIN": 16384,
"PTHREAD_THREADS_MAX": null,
"SCHAR_MAX": 127,
"SCHAR_MIN": -128,
"SHRT_MAX": 32767,
"SHRT_MIN": -32768,
"SSIZE_MAX": 32767,
"TTY_NAME_MAX": 32,
"TZNAME_MAX": 6,
"UCHAR_MAX": 255,
"UINT_MAX": 4294967295,
"UIO_MAXIOV": 1024,
"ULONG_MAX": 18446744073709551615,
"USHRT_MAX": 65535,
"WORD_BIT": 32,
"_AVPHYS_PAGES": 955772,
"_NPROCESSORS_CONF": 8,
"_NPROCESSORS_ONLN": 8,
"_PHYS_PAGES": 4027635,
"_POSIX_ARG_MAX": 2097152,
"_POSIX_ASYNCHRONOUS_IO": 200809,
"_POSIX_CHILD_MAX": 62844,
"_POSIX_FSYNC": 200809,
"_POSIX_JOB_CONTROL": 1,
"_POSIX_MAPPED_FILES": 200809,
"_POSIX_MEMLOCK": 200809,
"_POSIX_MEMLOCK_RANGE": 200809,
"_POSIX_MEMORY_PROTECTION": 200809,
"_POSIX_MESSAGE_PASSING": 200809,
"_POSIX_NGROUPS_MAX": 65536,
"_POSIX_OPEN_MAX": 1024,
"_POSIX_PII": null,
"_POSIX_PII_INTERNET": null,
"_POSIX_PII_INTERNET_DGRAM": null,
"_POSIX_PII_INTERNET_STREAM": null,
"_POSIX_PII_OSI": null,
"_POSIX_PII_OSI_CLTS": null,
"_POSIX_PII_OSI_COTS": null,
"_POSIX_PII_OSI_M": null,
"_POSIX_PII_SOCKET": null,
"_POSIX_PII_XTI": null,
"_POSIX_POLL": null,
"_POSIX_PRIORITIZED_IO": 200809,
"_POSIX_PRIORITY_SCHEDULING": 200809,
"_POSIX_REALTIME_SIGNALS": 200809,
"_POSIX_SAVED_IDS": 1,
"_POSIX_SELECT": null,
"_POSIX_SEMAPHORES": 200809,
"_POSIX_SHARED_MEMORY_OBJECTS": 200809,
"_POSIX_SSIZE_MAX": 32767,
"_POSIX_STREAM_MAX": 16,
"_POSIX_SYNCHRONIZED_IO": 200809,
"_POSIX_THREADS": 200809,
"_POSIX_THREAD_ATTR_STACKADDR": 200809,
"_POSIX_THREAD_ATTR_STACKSIZE": 200809,
"_POSIX_THREAD_PRIORITY_SCHEDULING": 200809,
"_POSIX_THREAD_PRIO_INHERIT": 200809,
"_POSIX_THREAD_PRIO_PROTECT": 200809,
"_POSIX_THREAD_ROBUST_PRIO_INHERIT": null,
"_POSIX_THREAD_ROBUST_PRIO_PROTECT": null,
"_POSIX_THREAD_PROCESS_SHARED": 200809,
"_POSIX_THREAD_SAFE_FUNCTIONS": 200809,
"_POSIX_TIMERS": 200809,
"TIMER_MAX": null,
"_POSIX_TZNAME_MAX": 6,
"_POSIX_VERSION": 200809,
"_T_IOV_MAX": null,
"_XOPEN_CRYPT": 1,
"_XOPEN_ENH_I18N": 1,
"_XOPEN_LEGACY": 1,
"_XOPEN_REALTIME": 1,
"_XOPEN_REALTIME_THREADS": 1,
"_XOPEN_SHM": 1,
"_XOPEN_UNIX": 1,
"_XOPEN_VERSION": 700,
"_XOPEN_XCU_VERSION": 4,
"_XOPEN_XPG2": 1,
"_XOPEN_XPG3": 1,
"_XOPEN_XPG4": 1,
"BC_BASE_MAX": 99,
"BC_DIM_MAX": 2048,
"BC_SCALE_MAX": 99,
"BC_STRING_MAX": 1000,
"CHARCLASS_NAME_MAX": 2048,
"COLL_WEIGHTS_MAX": 255,
"EQUIV_CLASS_MAX": null,
"EXPR_NEST_MAX": 32,
"LINE_MAX": 2048,
"POSIX2_BC_BASE_MAX": 99,
"POSIX2_BC_DIM_MAX": 2048,
"POSIX2_BC_SCALE_MAX": 99,
"POSIX2_BC_STRING_MAX": 1000,
"POSIX2_CHAR_TERM": 200809,
"POSIX2_COLL_WEIGHTS_MAX": 255,
"POSIX2_C_BIND": 200809,
"POSIX2_C_DEV": 200809,
"POSIX2_C_VERSION": 200809,
"POSIX2_EXPR_NEST_MAX": 32,
"POSIX2_FORT_DEV": null,
"POSIX2_FORT_RUN": null,
"_POSIX2_LINE_MAX": 2048,
"POSIX2_LINE_MAX": 2048,
"POSIX2_LOCALEDEF": 200809,
"POSIX2_RE_DUP_MAX": 32767,
"POSIX2_SW_DEV": 200809,
"POSIX2_UPE": null,
"POSIX2_VERSION": 200809,
"RE_DUP_MAX": 32767,
"PATH": "/usr/bin",
"CS_PATH": "/usr/bin",
"LFS_CFLAGS": null,
"LFS_LDFLAGS": null,
"LFS_LIBS": null,
"LFS_LINTFLAGS": null,
"LFS64_CFLAGS": "-D_LARGEFILE64_SOURCE",
"LFS64_LDFLAGS": null,
"LFS64_LIBS": null,
"LFS64_LINTFLAGS": "-D_LARGEFILE64_SOURCE",
"_XBS5_WIDTH_RESTRICTED_ENVS": "XBS5_LP64_OFF64",
"XBS5_WIDTH_RESTRICTED_ENVS": "XBS5_LP64_OFF64",
"_XBS5_ILP32_OFF32": null,
"XBS5_ILP32_OFF32_CFLAGS": null,
"XBS5_ILP32_OFF32_LDFLAGS": null,
"XBS5_ILP32_OFF32_LIBS": null,
"XBS5_ILP32_OFF32_LINTFLAGS": null,
"_XBS5_ILP32_OFFBIG": null,
"XBS5_ILP32_OFFBIG_CFLAGS": null,
"XBS5_ILP32_OFFBIG_LDFLAGS": null,
"XBS5_ILP32_OFFBIG_LIBS": null,
"XBS5_ILP32_OFFBIG_LINTFLAGS": null,
"_XBS5_LP64_OFF64": 1,
"XBS5_LP64_OFF64_CFLAGS": "-m64",
"XBS5_LP64_OFF64_LDFLAGS": "-m64",
"XBS5_LP64_OFF64_LIBS": null,
"XBS5_LP64_OFF64_LINTFLAGS": null,
"_XBS5_LPBIG_OFFBIG": null,
"XBS5_LPBIG_OFFBIG_CFLAGS": null,
"XBS5_LPBIG_OFFBIG_LDFLAGS": null,
"XBS5_LPBIG_OFFBIG_LIBS": null,
"XBS5_LPBIG_OFFBIG_LINTFLAGS": null,
"_POSIX_V6_ILP32_OFF32": null,
"POSIX_V6_ILP32_OFF32_CFLAGS": null,
"POSIX_V6_ILP32_OFF32_LDFLAGS": null,
"POSIX_V6_ILP32_OFF32_LIBS": null,
"POSIX_V6_ILP32_OFF32_LINTFLAGS": null,
"_POSIX_V6_WIDTH_RESTRICTED_ENVS": "POSIX_V6_LP64_OFF64",
"POSIX_V6_WIDTH_RESTRICTED_ENVS": "POSIX_V6_LP64_OFF64",
"_POSIX_V6_ILP32_OFFBIG": null,
"POSIX_V6_ILP32_OFFBIG_CFLAGS": null,
"POSIX_V6_ILP32_OFFBIG_LDFLAGS": null,
"POSIX_V6_ILP32_OFFBIG_LIBS": null,
"POSIX_V6_ILP32_OFFBIG_LINTFLAGS": null,
"_POSIX_V6_LP64_OFF64": 1,
"POSIX_V6_LP64_OFF64_CFLAGS": "-m64",
"POSIX_V6_LP64_OFF64_LDFLAGS": "-m64",
"POSIX_V6_LP64_OFF64_LIBS": null,
"POSIX_V6_LP64_OFF64_LINTFLAGS": null,
"_POSIX_V6_LPBIG_OFFBIG": null,
"POSIX_V6_LPBIG_OFFBIG_CFLAGS": null,
"POSIX_V6_LPBIG_OFFBIG_LDFLAGS": null,
"POSIX_V6_LPBIG_OFFBIG_LIBS": null,
"POSIX_V6_LPBIG_OFFBIG_LINTFLAGS": null,
"_POSIX_V7_ILP32_OFF32": null,
"POSIX_V7_ILP32_OFF32_CFLAGS": null,
"POSIX_V7_ILP32_OFF32_LDFLAGS": null,
"POSIX_V7_ILP32_OFF32_LIBS": null,
"POSIX_V7_ILP32_OFF32_LINTFLAGS": null,
"_POSIX_V7_WIDTH_RESTRICTED_ENVS": "POSIX_V7_LP64_OFF64",
"POSIX_V7_WIDTH_RESTRICTED_ENVS": "POSIX_V7_LP64_OFF64",
"_POSIX_V7_ILP32_OFFBIG": null,
"POSIX_V7_ILP32_OFFBIG_CFLAGS": null,
"POSIX_V7_ILP32_OFFBIG_LDFLAGS": null,
"POSIX_V7_ILP32_OFFBIG_LIBS": null,
"POSIX_V7_ILP32_OFFBIG_LINTFLAGS": null,
"_POSIX_V7_LP64_OFF64": 1,
"POSIX_V7_LP64_OFF64_CFLAGS": "-m64",
"POSIX_V7_LP64_OFF64_LDFLAGS": "-m64",
"POSIX_V7_LP64_OFF64_LIBS": null,
"POSIX_V7_LP64_OFF64_LINTFLAGS": null,
"_POSIX_V7_LPBIG_OFFBIG": null,
"POSIX_V7_LPBIG_OFFBIG_CFLAGS": null,
"POSIX_V7_LPBIG_OFFBIG_LDFLAGS": null,
"POSIX_V7_LPBIG_OFFBIG_LIBS": null,
"POSIX_V7_LPBIG_OFFBIG_LINTFLAGS": null,
"_POSIX_ADVISORY_INFO": 200809,
"_POSIX_BARRIERS": 200809,
"_POSIX_BASE": null,
"_POSIX_C_LANG_SUPPORT": null,
"_POSIX_C_LANG_SUPPORT_R": null,
"_POSIX_CLOCK_SELECTION": 200809,
"_POSIX_CPUTIME": 200809,
"_POSIX_THREAD_CPUTIME": 200809,
"_POSIX_DEVICE_SPECIFIC": null,
"_POSIX_DEVICE_SPECIFIC_R": null,
"_POSIX_FD_MGMT": null,
"_POSIX_FIFO": null,
"_POSIX_PIPE": null,
"_POSIX_FILE_ATTRIBUTES": null,
"_POSIX_FILE_LOCKING": null,
"_POSIX_FILE_SYSTEM": null,
"_POSIX_MONOTONIC_CLOCK": 200809,
"_POSIX_MULTI_PROCESS": null,
"_POSIX_SINGLE_PROCESS": null,
"_POSIX_NETWORKING": null,
"_POSIX_READER_WRITER_LOCKS": 200809,
"_POSIX_SPIN_LOCKS": 200809,
"_POSIX_REGEXP": 1,
"_REGEX_VERSION": null,
"_POSIX_SHELL": 1,
"_POSIX_SIGNALS": null,
"_POSIX_SPAWN": 200809,
"_POSIX_SPORADIC_SERVER": null,
"_POSIX_THREAD_SPORADIC_SERVER": null,
"_POSIX_SYSTEM_DATABASE": null,
"_POSIX_SYSTEM_DATABASE_R": null,
"_POSIX_TIMEOUTS": 200809,
"_POSIX_TYPED_MEMORY_OBJECTS": null,
"_POSIX_USER_GROUPS": null,
"_POSIX_USER_GROUPS_R": null,
"POSIX2_PBS": null,
"POSIX2_PBS_ACCOUNTING": null,
"POSIX2_PBS_LOCATE": null,
"POSIX2_PBS_TRACK": null,
"POSIX2_PBS_MESSAGE": null,
"SYMLOOP_MAX": null,
"STREAM_MAX": 16,
"AIO_LISTIO_MAX": null,
"AIO_MAX": null,
"AIO_PRIO_DELTA_MAX": 20,
"DELAYTIMER_MAX": 2147483647,
"HOST_NAME_MAX": 64,
"LOGIN_NAME_MAX": 256,
"MQ_OPEN_MAX": null,
"MQ_PRIO_MAX": 32768,
"_POSIX_DEVICE_IO": null,
"_POSIX_TRACE": null,
"_POSIX_TRACE_EVENT_FILTER": null,
"_POSIX_TRACE_INHERIT": null,
"_POSIX_TRACE_LOG": null,
"RTSIG_MAX": 32,
"SEM_NSEMS_MAX": null,
"SEM_VALUE_MAX": 2147483647,
"SIGQUEUE_MAX": 62844,
"FILESIZEBITS": 64,
"POSIX_ALLOC_SIZE_MIN": 4096,
"POSIX_REC_INCR_XFER_SIZE": null,
"POSIX_REC_MAX_XFER_SIZE": null,
"POSIX_REC_MIN_XFER_SIZE": 4096,
"POSIX_REC_XFER_ALIGN": 4096,
"SYMLINK_MAX": null,
"GNU_LIBC_VERSION": "glibc 2.24",
"GNU_LIBPTHREAD_VERSION": "NPTL 2.24",
"POSIX2_SYMLINKS": 1,
"LEVEL1_ICACHE_SIZE": 32768,
"LEVEL1_ICACHE_ASSOC": 8,
"LEVEL1_ICACHE_LINESIZE": 64,
"LEVEL1_DCACHE_SIZE": 32768,
"LEVEL1_DCACHE_ASSOC": 8,
"LEVEL1_DCACHE_LINESIZE": 64,
"LEVEL2_CACHE_SIZE": 262144,
"LEVEL2_CACHE_ASSOC": 8,
"LEVEL2_CACHE_LINESIZE": 64,
"LEVEL3_CACHE_SIZE": 6291456,
"LEVEL3_CACHE_ASSOC": 12,
"LEVEL3_CACHE_LINESIZE": 64,
"LEVEL4_CACHE_SIZE": 0,
"LEVEL4_CACHE_ASSOC": 0,
"LEVEL4_CACHE_LINESIZE": 0,
"IPV6": 200809,
"RAW_SOCKETS": 200809,
"_POSIX_IPV6": 200809,
"_POSIX_RAW_SOCKETS": 200809
},
"init_package": "systemd",
"shells": [
"/bin/sh",
"/bin/bash",
"/sbin/nologin",
"/usr/bin/sh",
"/usr/bin/bash",
"/usr/sbin/nologin",
"/usr/bin/zsh",
"/bin/zsh"
],
"ohai_time": 1492535225.41052,
"cloud_v2": null,
"cloud": null
}
""" # noqa
class TestOhaiCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'ohai']
valid_subsets = ['ohai']
fact_namespace = 'ansible_ohai'
collector_class = OhaiFactCollector
def _mock_module(self):
mock_module = Mock()
mock_module.params = {'gather_subset': self.gather_subset,
'gather_timeout': 10,
'filter': '*'}
mock_module.get_bin_path = Mock(return_value='/not/actually/ohai')
mock_module.run_command = Mock(return_value=(0, ohai_json_output, ''))
return mock_module
@patch('ansible.module_utils.facts.other.ohai.OhaiFactCollector.get_ohai_output')
def test_bogus_json(self, mock_get_ohai_output):
module = self._mock_module()
# bogus json
mock_get_ohai_output.return_value = '{'
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict, {})
@patch('ansible.module_utils.facts.other.ohai.OhaiFactCollector.run_ohai')
def test_ohai_non_zero_return_code(self, mock_run_ohai):
module = self._mock_module()
# bogus json
mock_run_ohai.return_value = (1, '{}', '')
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
# This assumes no 'ohai' entry at all is correct
self.assertNotIn('ohai', facts_dict)
self.assertEqual(facts_dict, {})
| 158,776
|
Python
|
.py
| 6,667
| 14.837258
| 651
| 0.419722
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,939
|
test_aix_processor.py
|
ansible_ansible/test/units/module_utils/facts/hardware/test_aix_processor.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2022 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.module_utils.facts.hardware import aix
import pytest
from . aix_data import AIX_PROCESSOR_TEST_SCENARIOS
@pytest.mark.parametrize('scenario', AIX_PROCESSOR_TEST_SCENARIOS)
def test_get_cpu_info(mocker, scenario):
commands_results = [
(0, "\n".join(scenario['lsdev_output']), ''),
(0, "\n".join(scenario['lsattr_type_output']), ''),
(0, "\n".join(scenario['lsattr_smt_threads_output']), ''),
]
module = mocker.Mock()
module.run_command = mocker.Mock(side_effect=commands_results)
inst = aix.AIXHardware(module=module)
assert scenario['expected_result'] == inst.get_cpu_facts()
| 831
|
Python
|
.py
| 18
| 42.222222
| 92
| 0.695545
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,940
|
test_sunos_get_uptime_facts.py
|
ansible_ansible/test/units/module_utils/facts/hardware/test_sunos_get_uptime_facts.py
|
from __future__ import annotations
import time
from ansible.module_utils.facts.hardware import sunos
def test_sunos_get_uptime_facts(mocker):
kstat_output = '\nunix:0:system_misc:boot_time\t1548249689\n'
module_mock = mocker.patch('ansible.module_utils.basic.AnsibleModule')
module = module_mock()
module.run_command.return_value = (0, kstat_output, '')
inst = sunos.SunOSHardware(module)
mocker.patch('time.time', return_value=1567052602.5089788)
expected = int(time.time()) - 1548249689
result = inst.get_uptime_facts()
assert expected == result['uptime_seconds']
| 608
|
Python
|
.py
| 13
| 42.538462
| 74
| 0.73854
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,941
|
test_linux.py
|
ansible_ansible/test/units/module_utils/facts/hardware/test_linux.py
|
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
import unittest
from unittest.mock import Mock, patch
from ansible.module_utils.facts import timeout
from ansible.module_utils.facts.hardware import linux
from . linux_data import LSBLK_OUTPUT, LSBLK_OUTPUT_2, LSBLK_UUIDS, MTAB, MTAB_ENTRIES, BIND_MOUNTS, STATVFS_INFO, UDEVADM_UUID, UDEVADM_OUTPUT, SG_INQ_OUTPUTS
with open(os.path.join(os.path.dirname(__file__), '../fixtures/findmount_output.txt')) as f:
FINDMNT_OUTPUT = f.read()
def mock_get_mount_size(mountpoint):
return STATVFS_INFO.get(mountpoint, {})
class TestFactsLinuxHardwareGetMountFacts(unittest.TestCase):
# FIXME: mock.patch instead
def setUp(self):
timeout.GATHER_TIMEOUT = 10
def tearDown(self):
timeout.GATHER_TIMEOUT = None
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._mtab_entries', return_value=MTAB_ENTRIES)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._find_bind_mounts', return_value=BIND_MOUNTS)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._lsblk_uuid', return_value=LSBLK_UUIDS)
@patch('ansible.module_utils.facts.hardware.linux.get_mount_size', side_effect=mock_get_mount_size)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._udevadm_uuid', return_value=UDEVADM_UUID)
def test_get_mount_facts(self,
mock_get_mount_size,
mock_lsblk_uuid,
mock_find_bind_mounts,
mock_mtab_entries,
mock_udevadm_uuid):
module = Mock()
# Returns a LinuxHardware-ish
lh = linux.LinuxHardware(module=module, load_on_init=False)
# Nothing returned, just self.facts modified as a side effect
mount_facts = lh.get_mount_facts()
self.assertIsInstance(mount_facts, dict)
self.assertIn('mounts', mount_facts)
self.assertIsInstance(mount_facts['mounts'], list)
self.assertIsInstance(mount_facts['mounts'][0], dict)
home_expected = {'block_available': 1001578731,
'block_size': 4096,
'block_total': 105871006,
'block_used': 5713133,
'device': '/dev/mapper/fedora_dhcp129--186-home',
'dump': 0,
'fstype': 'ext4',
'inode_available': 26860880,
'inode_total': 26902528,
'inode_used': 41648,
'mount': '/home',
'options': 'rw,seclabel,relatime,data=ordered',
'passno': 0,
'size_available': 410246647808,
'size_total': 433647640576,
'uuid': 'N/A'}
home_info = [x for x in mount_facts['mounts'] if x['mount'] == '/home'][0]
self.maxDiff = 4096
self.assertDictEqual(home_info, home_expected)
@patch('ansible.module_utils.facts.hardware.linux.get_file_content', return_value=MTAB)
def test_get_mtab_entries(self, mock_get_file_content):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
mtab_entries = lh._mtab_entries()
self.assertIsInstance(mtab_entries, list)
self.assertIsInstance(mtab_entries[0], list)
self.assertEqual(len(mtab_entries), 38)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_findmnt', return_value=(0, FINDMNT_OUTPUT, ''))
def test_find_bind_mounts(self, mock_run_findmnt):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
# If bind_mounts becomes another seq type, feel free to change
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 1)
self.assertIn('/not/a/real/bind_mount', bind_mounts)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_findmnt', return_value=(37, '', ''))
def test_find_bind_mounts_non_zero(self, mock_run_findmnt):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 0)
def test_find_bind_mounts_no_findmnts(self):
module = Mock()
module.get_bin_path = Mock(return_value=None)
lh = linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 0)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(0, LSBLK_OUTPUT, ''))
def test_lsblk_uuid(self, mock_run_lsblk):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertIn(b'/dev/loop9', lsblk_uuids)
self.assertIn(b'/dev/sda1', lsblk_uuids)
self.assertEqual(lsblk_uuids[b'/dev/sda1'], b'32caaec3-ef40-4691-a3b6-438c3f9bc1c0')
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(37, LSBLK_OUTPUT, ''))
def test_lsblk_uuid_non_zero(self, mock_run_lsblk):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertEqual(len(lsblk_uuids), 0)
def test_lsblk_uuid_no_lsblk(self):
module = Mock()
module.get_bin_path = Mock(return_value=None)
lh = linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertEqual(len(lsblk_uuids), 0)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(0, LSBLK_OUTPUT_2, ''))
def test_lsblk_uuid_dev_with_space_in_name(self, mock_run_lsblk):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertIn(b'/dev/loop0', lsblk_uuids)
self.assertIn(b'/dev/sda1', lsblk_uuids)
self.assertEqual(lsblk_uuids[b'/dev/mapper/an-example-mapper with a space in the name'], b'84639acb-013f-4d2f-9392-526a572b4373')
self.assertEqual(lsblk_uuids[b'/dev/sda1'], b'32caaec3-ef40-4691-a3b6-438c3f9bc1c0')
def test_udevadm_uuid(self):
module = Mock()
module.run_command = Mock(return_value=(0, UDEVADM_OUTPUT, '')) # (rc, out, err)
lh = linux.LinuxHardware(module=module, load_on_init=False)
udevadm_uuid = lh._udevadm_uuid('mock_device')
self.assertEqual(udevadm_uuid, '57b1a3e7-9019-4747-9809-7ec52bba9179')
def test_get_sg_inq_serial(self):
# Valid outputs
for sq_inq_output in SG_INQ_OUTPUTS:
module = Mock()
module.run_command = Mock(return_value=(0, sq_inq_output, '')) # (rc, out, err)
lh = linux.LinuxHardware(module=module, load_on_init=False)
sg_inq_serial = lh._get_sg_inq_serial('/usr/bin/sg_inq', 'nvme0n1')
self.assertEqual(sg_inq_serial, 'vol0123456789')
# Invalid output
module = Mock()
module.run_command = Mock(return_value=(0, '', '')) # (rc, out, err)
lh = linux.LinuxHardware(module=module, load_on_init=False)
sg_inq_serial = lh._get_sg_inq_serial('/usr/bin/sg_inq', 'nvme0n1')
self.assertEqual(sg_inq_serial, None)
# Non zero rc
module = Mock()
module.run_command = Mock(return_value=(42, '', 'Error 42')) # (rc, out, err)
lh = linux.LinuxHardware(module=module, load_on_init=False)
sg_inq_serial = lh._get_sg_inq_serial('/usr/bin/sg_inq', 'nvme0n1')
self.assertEqual(sg_inq_serial, None)
| 8,826
|
Python
|
.py
| 160
| 45.43125
| 159
| 0.647816
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,942
|
test_linux_get_cpu_info.py
|
ansible_ansible/test/units/module_utils/facts/hardware/test_linux_get_cpu_info.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.module_utils.facts.hardware import linux
from . linux_data import CPU_INFO_TEST_SCENARIOS
def test_get_cpu_info(mocker):
module = mocker.Mock()
inst = linux.LinuxHardware(module)
mocker.patch('os.path.exists', return_value=False)
mocker.patch('os.access', return_value=True)
for test in CPU_INFO_TEST_SCENARIOS:
mocker.patch('ansible.module_utils.facts.hardware.linux.get_file_lines', side_effect=[[], test['cpuinfo']])
mocker.patch('os.sched_getaffinity', create=True, return_value=test['sched_getaffinity'])
module.run_command.return_value = (0, test['nproc_out'], '')
collected_facts = {'ansible_architecture': test['architecture']}
assert test['expected_result'] == inst.get_cpu_facts(collected_facts=collected_facts)
def test_get_cpu_info_nproc(mocker):
module = mocker.Mock()
inst = linux.LinuxHardware(module)
mocker.patch('os.path.exists', return_value=False)
mocker.patch('os.access', return_value=True)
for test in CPU_INFO_TEST_SCENARIOS:
mocker.patch('ansible.module_utils.facts.hardware.linux.get_file_lines', side_effect=[[], test['cpuinfo']])
mocker.patch('os.sched_getaffinity', create=True, side_effect=AttributeError)
mocker.patch.object(module, 'get_bin_path', return_value='/usr/bin/nproc')
module.run_command.return_value = (0, test['nproc_out'], '')
collected_facts = {'ansible_architecture': test['architecture']}
assert test['expected_result'] == inst.get_cpu_facts(collected_facts=collected_facts)
def test_get_cpu_info_missing_arch(mocker):
module = mocker.Mock()
inst = linux.LinuxHardware(module)
# ARM, Power, and zSystems will report incorrect processor count if architecture is not available
mocker.patch('os.path.exists', return_value=False)
mocker.patch('os.access', return_value=True)
for test in CPU_INFO_TEST_SCENARIOS:
mocker.patch('ansible.module_utils.facts.hardware.linux.get_file_lines', side_effect=[[], test['cpuinfo']])
mocker.patch('os.sched_getaffinity', create=True, return_value=test['sched_getaffinity'])
module.run_command.return_value = (0, test['nproc_out'], '')
test_result = inst.get_cpu_facts()
if test['architecture'].startswith(('armv', 'aarch', 'ppc', 's390')):
assert test['expected_result'] != test_result
else:
assert test['expected_result'] == test_result
| 2,661
|
Python
|
.py
| 44
| 54
| 115
| 0.696923
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,943
|
linux_data.py
|
ansible_ansible/test/units/module_utils/facts/hardware/linux_data.py
|
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
def read_lines(path):
with open(path) as file:
return file.readlines()
LSBLK_OUTPUT = b"""
/dev/sda
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
/dev/sda2 66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK
/dev/mapper/fedora_dhcp129--186-swap eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d
/dev/mapper/fedora_dhcp129--186-root d34cf5e3-3449-4a6c-8179-a1feb2bca6ce
/dev/mapper/fedora_dhcp129--186-home 2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d
/dev/sr0
/dev/loop0 0f031512-ab15-497d-9abd-3a512b4a9390
/dev/loop1 7c1b0f30-cf34-459f-9a70-2612f82b870a
/dev/loop9 0f031512-ab15-497d-9abd-3a512b4a9390
/dev/loop9 7c1b4444-cf34-459f-9a70-2612f82b870a
/dev/mapper/docker-253:1-1050967-pool
/dev/loop2
/dev/mapper/docker-253:1-1050967-pool
"""
LSBLK_OUTPUT_2 = b"""
/dev/sda
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
/dev/sda2 66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK
/dev/mapper/fedora_dhcp129--186-swap eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d
/dev/mapper/fedora_dhcp129--186-root d34cf5e3-3449-4a6c-8179-a1feb2bca6ce
/dev/mapper/fedora_dhcp129--186-home 2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d
/dev/mapper/an-example-mapper with a space in the name 84639acb-013f-4d2f-9392-526a572b4373
/dev/sr0
/dev/loop0 0f031512-ab15-497d-9abd-3a512b4a9390
"""
LSBLK_UUIDS = {'/dev/sda1': '66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK'}
UDEVADM_UUID = 'N/A'
UDEVADM_OUTPUT = """
UDEV_LOG=3
DEVPATH=/devices/pci0000:00/0000:00:07.0/virtio2/block/vda/vda1
MAJOR=252
MINOR=1
DEVNAME=/dev/vda1
DEVTYPE=partition
SUBSYSTEM=block
MPATH_SBIN_PATH=/sbin
ID_PATH=pci-0000:00:07.0-virtio-pci-virtio2
ID_PART_TABLE_TYPE=dos
ID_FS_UUID=57b1a3e7-9019-4747-9809-7ec52bba9179
ID_FS_UUID_ENC=57b1a3e7-9019-4747-9809-7ec52bba9179
ID_FS_VERSION=1.0
ID_FS_TYPE=ext4
ID_FS_USAGE=filesystem
LVM_SBIN_PATH=/sbin
DEVLINKS=/dev/block/252:1 /dev/disk/by-path/pci-0000:00:07.0-virtio-pci-virtio2-part1 /dev/disk/by-uuid/57b1a3e7-9019-4747-9809-7ec52bba9179
"""
MTAB = """
sysfs /sys sysfs rw,seclabel,nosuid,nodev,noexec,relatime 0 0
proc /proc proc rw,nosuid,nodev,noexec,relatime 0 0
devtmpfs /dev devtmpfs rw,seclabel,nosuid,size=8044400k,nr_inodes=2011100,mode=755 0 0
securityfs /sys/kernel/security securityfs rw,nosuid,nodev,noexec,relatime 0 0
tmpfs /dev/shm tmpfs rw,seclabel,nosuid,nodev 0 0
devpts /dev/pts devpts rw,seclabel,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000 0 0
tmpfs /run tmpfs rw,seclabel,nosuid,nodev,mode=755 0 0
tmpfs /sys/fs/cgroup tmpfs ro,seclabel,nosuid,nodev,noexec,mode=755 0 0
cgroup /sys/fs/cgroup/systemd cgroup rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd 0 0
pstore /sys/fs/pstore pstore rw,seclabel,nosuid,nodev,noexec,relatime 0 0
cgroup /sys/fs/cgroup/devices cgroup rw,nosuid,nodev,noexec,relatime,devices 0 0
cgroup /sys/fs/cgroup/freezer cgroup rw,nosuid,nodev,noexec,relatime,freezer 0 0
cgroup /sys/fs/cgroup/memory cgroup rw,nosuid,nodev,noexec,relatime,memory 0 0
cgroup /sys/fs/cgroup/pids cgroup rw,nosuid,nodev,noexec,relatime,pids 0 0
cgroup /sys/fs/cgroup/blkio cgroup rw,nosuid,nodev,noexec,relatime,blkio 0 0
cgroup /sys/fs/cgroup/cpuset cgroup rw,nosuid,nodev,noexec,relatime,cpuset 0 0
cgroup /sys/fs/cgroup/cpu,cpuacct cgroup rw,nosuid,nodev,noexec,relatime,cpu,cpuacct 0 0
cgroup /sys/fs/cgroup/hugetlb cgroup rw,nosuid,nodev,noexec,relatime,hugetlb 0 0
cgroup /sys/fs/cgroup/perf_event cgroup rw,nosuid,nodev,noexec,relatime,perf_event 0 0
cgroup /sys/fs/cgroup/net_cls,net_prio cgroup rw,nosuid,nodev,noexec,relatime,net_cls,net_prio 0 0
configfs /sys/kernel/config configfs rw,relatime 0 0
/dev/mapper/fedora_dhcp129--186-root / ext4 rw,seclabel,relatime,data=ordered 0 0
selinuxfs /sys/fs/selinux selinuxfs rw,relatime 0 0
systemd-1 /proc/sys/fs/binfmt_misc autofs rw,relatime,fd=24,pgrp=1,timeout=0,minproto=5,maxproto=5,direct 0 0
debugfs /sys/kernel/debug debugfs rw,seclabel,relatime 0 0
hugetlbfs /dev/hugepages hugetlbfs rw,seclabel,relatime 0 0
tmpfs /tmp tmpfs rw,seclabel 0 0
mqueue /dev/mqueue mqueue rw,seclabel,relatime 0 0
/dev/loop0 /var/lib/machines btrfs rw,seclabel,relatime,space_cache,subvolid=5,subvol=/ 0 0
/dev/sda1 /boot ext4 rw,seclabel,relatime,data=ordered 0 0
/dev/mapper/fedora_dhcp129--186-home /home ext4 rw,seclabel,relatime,data=ordered 0 0
tmpfs /run/user/1000 tmpfs rw,seclabel,nosuid,nodev,relatime,size=1611044k,mode=700,uid=1000,gid=1000 0 0
gvfsd-fuse /run/user/1000/gvfs fuse.gvfsd-fuse rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
fusectl /sys/fs/fuse/connections fusectl rw,relatime 0 0
grimlock.g.a: /home/adrian/sshfs-grimlock fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:test_path/path_with'single_quotes /home/adrian/sshfs-grimlock-single-quote fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:path_with'single_quotes /home/adrian/sshfs-grimlock-single-quote-2 fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:/mnt/data/foto's /home/adrian/fotos fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
"""
MTAB_ENTRIES = [
[
'sysfs',
'/sys',
'sysfs',
'rw,seclabel,nosuid,nodev,noexec,relatime',
'0',
'0'
],
['proc', '/proc', 'proc', 'rw,nosuid,nodev,noexec,relatime', '0', '0'],
[
'devtmpfs',
'/dev',
'devtmpfs',
'rw,seclabel,nosuid,size=8044400k,nr_inodes=2011100,mode=755',
'0',
'0'
],
[
'securityfs',
'/sys/kernel/security',
'securityfs',
'rw,nosuid,nodev,noexec,relatime',
'0',
'0'
],
['tmpfs', '/dev/shm', 'tmpfs', 'rw,seclabel,nosuid,nodev', '0', '0'],
[
'devpts',
'/dev/pts',
'devpts',
'rw,seclabel,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000',
'0',
'0'
],
['tmpfs', '/run', 'tmpfs', 'rw,seclabel,nosuid,nodev,mode=755', '0', '0'],
[
'tmpfs',
'/sys/fs/cgroup',
'tmpfs',
'ro,seclabel,nosuid,nodev,noexec,mode=755',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/systemd',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd',
'0',
'0'
],
[
'pstore',
'/sys/fs/pstore',
'pstore',
'rw,seclabel,nosuid,nodev,noexec,relatime',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/devices',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,devices',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/freezer',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,freezer',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/memory',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,memory',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/pids',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,pids',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/blkio',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,blkio',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/cpuset',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,cpuset',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/cpu,cpuacct',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,cpu,cpuacct',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/hugetlb',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,hugetlb',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/perf_event',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,perf_event',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/net_cls,net_prio',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,net_cls,net_prio',
'0',
'0'
],
['configfs', '/sys/kernel/config', 'configfs', 'rw,relatime', '0', '0'],
[
'/dev/mapper/fedora_dhcp129--186-root',
'/',
'ext4',
'rw,seclabel,relatime,data=ordered',
'0',
'0'
],
['selinuxfs', '/sys/fs/selinux', 'selinuxfs', 'rw,relatime', '0', '0'],
[
'systemd-1',
'/proc/sys/fs/binfmt_misc',
'autofs',
'rw,relatime,fd=24,pgrp=1,timeout=0,minproto=5,maxproto=5,direct',
'0',
'0'
],
['debugfs', '/sys/kernel/debug', 'debugfs', 'rw,seclabel,relatime', '0', '0'],
[
'hugetlbfs',
'/dev/hugepages',
'hugetlbfs',
'rw,seclabel,relatime',
'0',
'0'
],
['tmpfs', '/tmp', 'tmpfs', 'rw,seclabel', '0', '0'],
['mqueue', '/dev/mqueue', 'mqueue', 'rw,seclabel,relatime', '0', '0'],
[
'/dev/loop0',
'/var/lib/machines',
'btrfs',
'rw,seclabel,relatime,space_cache,subvolid=5,subvol=/',
'0',
'0'
],
['/dev/sda1', '/boot', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
# A 'none' fstype
['/dev/sdz3', '/not/a/real/device', 'none', 'rw,seclabel,relatime,data=ordered', '0', '0'],
# lets assume this is a bindmount
['/dev/sdz4', '/not/a/real/bind_mount', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
[
'/dev/mapper/fedora_dhcp129--186-home',
'/home',
'ext4',
'rw,seclabel,relatime,data=ordered',
'0',
'0'
],
[
'tmpfs',
'/run/user/1000',
'tmpfs',
'rw,seclabel,nosuid,nodev,relatime,size=1611044k,mode=700,uid=1000,gid=1000',
'0',
'0'
],
[
'gvfsd-fuse',
'/run/user/1000/gvfs',
'fuse.gvfsd-fuse',
'rw,nosuid,nodev,relatime,user_id=1000,group_id=1000',
'0',
'0'
],
['fusectl', '/sys/fs/fuse/connections', 'fusectl', 'rw,relatime', '0', '0']]
STATVFS_INFO = {'/': {'block_available': 10192323,
'block_size': 4096,
'block_total': 12868728,
'block_used': 2676405,
'inode_available': 3061699,
'inode_total': 3276800,
'inode_used': 215101,
'size_available': 41747755008,
'size_total': 52710309888},
'/not/a/real/bind_mount': {},
'/home': {'block_available': 1001578731,
'block_size': 4096,
'block_total': 105871006,
'block_used': 5713133,
'inode_available': 26860880,
'inode_total': 26902528,
'inode_used': 41648,
'size_available': 410246647808,
'size_total': 433647640576},
'/var/lib/machines': {'block_available': 10192316,
'block_size': 4096,
'block_total': 12868728,
'block_used': 2676412,
'inode_available': 3061699,
'inode_total': 3276800,
'inode_used': 215101,
'size_available': 41747726336,
'size_total': 52710309888},
'/boot': {'block_available': 187585,
'block_size': 4096,
'block_total': 249830,
'block_used': 62245,
'inode_available': 65096,
'inode_total': 65536,
'inode_used': 440,
'size_available': 768348160,
'size_total': 1023303680}
}
# ['/dev/sdz4', '/not/a/real/bind_mount', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
BIND_MOUNTS = ['/not/a/real/bind_mount']
CPU_INFO_TEST_SCENARIOS = [
{
'architecture': 'armv61',
'nproc_out': 1,
'sched_getaffinity': set([0]),
'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv6-rev7-1cpu-cpuinfo')),
'expected_result': {
'processor': ['0', 'ARMv6-compatible processor rev 7 (v6l)'],
'processor_cores': 1,
'processor_count': 1,
'processor_nproc': 1,
'processor_threads_per_core': 1,
'processor_vcpus': 1},
},
{
'architecture': 'armv71',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv7-rev4-4cpu-cpuinfo')),
'expected_result': {
'processor': [
'0', 'ARMv7 Processor rev 4 (v7l)',
'1', 'ARMv7 Processor rev 4 (v7l)',
'2', 'ARMv7 Processor rev 4 (v7l)',
'3', 'ARMv7 Processor rev 4 (v7l)',
],
'processor_cores': 1,
'processor_count': 4,
'processor_nproc': 4,
'processor_threads_per_core': 1,
'processor_vcpus': 4},
},
{
'architecture': 'aarch64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/aarch64-4cpu-cpuinfo')),
'expected_result': {
'processor': [
'0', 'AArch64 Processor rev 4 (aarch64)',
'1', 'AArch64 Processor rev 4 (aarch64)',
'2', 'AArch64 Processor rev 4 (aarch64)',
'3', 'AArch64 Processor rev 4 (aarch64)',
],
'processor_cores': 1,
'processor_count': 4,
'processor_nproc': 4,
'processor_threads_per_core': 1,
'processor_vcpus': 4},
},
{
'architecture': 'x86_64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-4cpu-cpuinfo')),
'expected_result': {
'processor': [
'0', 'AuthenticAMD', 'Dual-Core AMD Opteron(tm) Processor 2216',
'1', 'AuthenticAMD', 'Dual-Core AMD Opteron(tm) Processor 2216',
'2', 'AuthenticAMD', 'Dual-Core AMD Opteron(tm) Processor 2216',
'3', 'AuthenticAMD', 'Dual-Core AMD Opteron(tm) Processor 2216',
],
'processor_cores': 2,
'processor_count': 2,
'processor_nproc': 4,
'processor_threads_per_core': 1,
'flags': [
'fpu', 'vme', 'de', 'pse', 'tsc', 'msr', 'pae', 'mce',
'cx8', 'apic', 'sep', 'mtrr', 'pge', 'mca', 'cmov', 'pat',
'pse36', 'clflush', 'mmx', 'fxsr', 'sse', 'sse2', 'ht',
'syscall', 'nx', 'mmxext', 'fxsr_opt', 'rdtscp', 'lm',
'3dnowext', '3dnow', 'art', 'rep_good', 'nopl', 'extd_apicid',
'pni', 'cx16', 'lahf_lm', 'cmp_legacy', 'svm', 'extapic',
'cr8_legacy', 'retpoline_amd', 'vmmcall'
],
'processor_vcpus': 4},
},
{
'architecture': 'x86_64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-8cpu-cpuinfo')),
'expected_result': {
'processor': [
'0', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
'1', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
'2', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
'3', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
'4', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
'5', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
'6', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
'7', 'GenuineIntel', 'Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz',
],
'processor_cores': 4,
'processor_count': 1,
'processor_nproc': 4,
'processor_threads_per_core': 2,
'flags': [
'fpu', 'vme', 'de', 'pse', 'tsc', 'msr', 'pae', 'mce',
'cx8', 'apic', 'sep', 'mtrr', 'pge', 'mca', 'cmov',
'pat', 'pse36', 'clflush', 'dts', 'acpi', 'mmx', 'fxsr',
'sse', 'sse2', 'ss', 'ht', 'tm', 'pbe', 'syscall', 'nx',
'pdpe1gb', 'rdtscp', 'lm', 'constant_tsc', 'arch_perfmon',
'pebs', 'bts', 'rep_good', 'nopl', 'xtopology', 'nonstop_tsc',
'aperfmperf', 'eagerfpu', 'pni', 'pclmulqdq', 'dtes64', 'monitor',
'ds_cpl', 'vmx', 'smx', 'est', 'tm2', 'ssse3', 'sdbg', 'fma', 'cx16',
'xtpr', 'pdcm', 'pcid', 'sse4_1', 'sse4_2', 'x2apic', 'movbe',
'popcnt', 'tsc_deadline_timer', 'aes', 'xsave', 'avx', 'f16c',
'rdrand', 'lahf_lm', 'abm', 'epb', 'tpr_shadow', 'vnmi', 'flexpriority',
'ept', 'vpid', 'fsgsbase', 'tsc_adjust', 'bmi1', 'avx2', 'smep', 'bmi2',
'erms', 'invpcid', 'xsaveopt', 'dtherm', 'ida', 'arat', 'pln', 'pts'
],
'processor_vcpus': 8},
},
{
'architecture': 'arm64',
'nproc_out': 4,
'sched_getaffinity': set([0, 1, 2, 3]),
'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/arm64-4cpu-cpuinfo')),
'expected_result': {
'processor': ['0', '1', '2', '3'],
'processor_cores': 1,
'processor_count': 4,
'processor_nproc': 4,
'processor_threads_per_core': 1,
'processor_vcpus': 4},
},
{
'architecture': 'armv71',
'nproc_out': 8,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7]),
'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/armv7-rev3-8cpu-cpuinfo')),
'expected_result': {
'processor': [
'0', 'ARMv7 Processor rev 3 (v7l)',
'1', 'ARMv7 Processor rev 3 (v7l)',
'2', 'ARMv7 Processor rev 3 (v7l)',
'3', 'ARMv7 Processor rev 3 (v7l)',
'4', 'ARMv7 Processor rev 3 (v7l)',
'5', 'ARMv7 Processor rev 3 (v7l)',
'6', 'ARMv7 Processor rev 3 (v7l)',
'7', 'ARMv7 Processor rev 3 (v7l)',
],
'processor_cores': 1,
'processor_count': 8,
'processor_nproc': 8,
'processor_threads_per_core': 1,
'processor_vcpus': 8},
},
{
'architecture': 'x86_64',
'nproc_out': 2,
'sched_getaffinity': set([0, 1]),
'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/x86_64-2cpu-cpuinfo')),
'expected_result': {
'processor': [
'0', 'GenuineIntel', 'Intel(R) Xeon(R) CPU E5-2680 v2 @ 2.80GHz',
'1', 'GenuineIntel', 'Intel(R) Xeon(R) CPU E5-2680 v2 @ 2.80GHz',
],
'processor_cores': 1,
'processor_count': 2,
'processor_nproc': 2,
'processor_threads_per_core': 1,
'flags': [
'fpu', 'vme', 'de', 'pse', 'tsc', 'msr', 'pae', 'mce', 'cx8', 'apic',
'sep', 'mtrr', 'pge', 'mca', 'cmov', 'pat', 'pse36', 'clflush', 'mmx',
'fxsr', 'sse', 'sse2', 'ss', 'syscall', 'nx', 'pdpe1gb', 'rdtscp', 'lm',
'constant_tsc', 'arch_perfmon', 'rep_good', 'nopl', 'xtopology',
'cpuid', 'tsc_known_freq', 'pni', 'pclmulqdq', 'ssse3', 'cx16',
'pcid', 'sse4_1', 'sse4_2', 'x2apic', 'popcnt', 'tsc_deadline_timer',
'aes', 'xsave', 'avx', 'f16c', 'rdrand', 'hypervisor', 'lahf_lm',
'pti', 'fsgsbase', 'tsc_adjust', 'smep', 'erms', 'xsaveopt', 'arat'
],
'processor_vcpus': 2},
},
{
'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/ppc64-power7-rhel7-8cpu-cpuinfo')),
'architecture': 'ppc64',
'nproc_out': 8,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7]),
'expected_result': {
'processor': [
'0', 'POWER7 (architected), altivec supported',
'1', 'POWER7 (architected), altivec supported',
'2', 'POWER7 (architected), altivec supported',
'3', 'POWER7 (architected), altivec supported',
'4', 'POWER7 (architected), altivec supported',
'5', 'POWER7 (architected), altivec supported',
'6', 'POWER7 (architected), altivec supported',
'7', 'POWER7 (architected), altivec supported'
],
'processor_cores': 1,
'processor_count': 8,
'processor_nproc': 8,
'processor_threads_per_core': 1,
'processor_vcpus': 8
},
},
{
'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/ppc64le-power8-24cpu-cpuinfo')),
'architecture': 'ppc64le',
'nproc_out': 24,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]),
'expected_result': {
'processor': [
'0', 'POWER8 (architected), altivec supported',
'1', 'POWER8 (architected), altivec supported',
'2', 'POWER8 (architected), altivec supported',
'3', 'POWER8 (architected), altivec supported',
'4', 'POWER8 (architected), altivec supported',
'5', 'POWER8 (architected), altivec supported',
'6', 'POWER8 (architected), altivec supported',
'7', 'POWER8 (architected), altivec supported',
'8', 'POWER8 (architected), altivec supported',
'9', 'POWER8 (architected), altivec supported',
'10', 'POWER8 (architected), altivec supported',
'11', 'POWER8 (architected), altivec supported',
'12', 'POWER8 (architected), altivec supported',
'13', 'POWER8 (architected), altivec supported',
'14', 'POWER8 (architected), altivec supported',
'15', 'POWER8 (architected), altivec supported',
'16', 'POWER8 (architected), altivec supported',
'17', 'POWER8 (architected), altivec supported',
'18', 'POWER8 (architected), altivec supported',
'19', 'POWER8 (architected), altivec supported',
'20', 'POWER8 (architected), altivec supported',
'21', 'POWER8 (architected), altivec supported',
'22', 'POWER8 (architected), altivec supported',
'23', 'POWER8 (architected), altivec supported',
],
'processor_cores': 1,
'processor_count': 24,
'processor_nproc': 24,
'processor_threads_per_core': 1,
'processor_vcpus': 24
},
},
{
'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/s390x-z13-2cpu-cpuinfo')),
'architecture': 's390x',
'nproc_out': 2,
'sched_getaffinity': set([0, 1]),
'expected_result': {
'processor': [
'IBM/S390',
],
'processor_cores': 2,
'processor_count': 1,
'processor_nproc': 2,
'processor_threads_per_core': 1,
'processor_vcpus': 2
},
},
{
'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/s390x-z14-64cpu-cpuinfo')),
'architecture': 's390x',
'nproc_out': 64,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63]),
'expected_result': {
'processor': [
'IBM/S390',
],
'processor_cores': 32,
'processor_count': 1,
'processor_nproc': 64,
'processor_threads_per_core': 2,
'processor_vcpus': 64
},
},
{
'cpuinfo': read_lines(os.path.join(os.path.dirname(__file__), '../fixtures/cpuinfo/sparc-t5-debian-ldom-24vcpu')),
'architecture': 'sparc64',
'nproc_out': 24,
'sched_getaffinity': set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]),
'expected_result': {
'processor': [
'UltraSparc T5 (Niagara5)',
],
'processor_cores': 1,
'processor_count': 24,
'processor_nproc': 24,
'processor_threads_per_core': 1,
'processor_vcpus': 24
},
},
]
SG_INQ_OUTPUTS = ["""
Identify controller for /dev/nvme0n1:
Model number: Amazon Elastic Block Store
Serial number: vol0123456789
Firmware revision: 1.0
Version: 0.0
No optional admin command support
No optional NVM command support
PCI vendor ID VID/SSVID: 0x1d0f/0x1d0f
IEEE OUI Identifier: 0xa002dc
Controller ID: 0x0
Number of namespaces: 1
Maximum data transfer size: 64 pages
Namespace 1 (deduced from device name):
Namespace size/capacity: 62914560/62914560 blocks
Namespace utilization: 0 blocks
Number of LBA formats: 1
Index LBA size: 0
LBA format 0 support: <-- active
Logical block size: 512 bytes
Approximate namespace size: 32 GB
Metadata size: 0 bytes
Relative performance: Best [0x0]
""", """
Identify controller for /dev/nvme0n1:
Model number: Amazon Elastic Block Store
Unit serial number: vol0123456789
Firmware revision: 1.0
Version: 0.0
No optional admin command support
No optional NVM command support
PCI vendor ID VID/SSVID: 0x1d0f/0x1d0f
IEEE OUI Identifier: 0xa002dc
Controller ID: 0x0
Number of namespaces: 1
Maximum data transfer size: 64 pages
Namespace 1 (deduced from device name):
Namespace size/capacity: 62914560/62914560 blocks
Namespace utilization: 0 blocks
Number of LBA formats: 1
Index LBA size: 0
LBA format 0 support: <-- active
Logical block size: 512 bytes
Approximate namespace size: 32 GB
Metadata size: 0 bytes
Relative performance: Best [0x0]
"""]
| 28,061
|
Python
|
.py
| 689
| 31.162554
| 154
| 0.549662
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,944
|
aix_data.py
|
ansible_ansible/test/units/module_utils/facts/hardware/aix_data.py
|
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
AIX_PROCESSOR_TEST_SCENARIOS = [
{
'comment': 'AIX 7.2 (gcc119 on GCC farm)',
'lsdev_output': [
'proc0 Available 00-00 Processor',
'proc8 Available 00-08 Processor',
'proc16 Available 00-16 Processor',
'proc24 Available 00-24 Processor',
'proc32 Available 00-32 Processor',
'proc40 Available 00-40 Processor',
'proc48 Available 00-48 Processor',
'proc56 Available 00-56 Processor',
'proc64 Available 00-64 Processor',
'proc72 Available 00-72 Processor',
],
'lsattr_type_output': ['type PowerPC_POWER8 Processor type False'],
'lsattr_smt_threads_output': [
'smt_threads 8 Processor SMT threads False'
],
'expected_result': {
'processor': ['PowerPC_POWER8'],
'processor_count': 1,
'processor_cores': 10,
'processor_threads_per_core': 8,
'processor_vcpus': 80
},
},
{
'comment': 'AIX 7.1 (gcc111 on GCC farm)',
'lsdev_output': [
'proc0 Available 00-00 Processor',
'proc4 Available 00-04 Processor',
'proc8 Available 00-08 Processor',
'proc12 Available 00-12 Processor',
'proc16 Available 00-16 Processor',
'proc20 Available 00-20 Processor',
'proc24 Available 00-24 Processor',
'proc28 Available 00-28 Processor',
'proc32 Available 00-32 Processor',
'proc36 Available 00-36 Processor',
'proc40 Available 00-40 Processor',
'proc44 Available 00-44 Processor',
],
'lsattr_type_output': ['type PowerPC_POWER7 Processor type False'],
'lsattr_smt_threads_output': [
'smt_threads 4 Processor SMT threads False'
],
'expected_result': {
'processor': ['PowerPC_POWER7'],
'processor_count': 1,
'processor_cores': 12,
'processor_threads_per_core': 4,
'processor_vcpus': 48
},
},
]
| 2,813
|
Python
|
.py
| 71
| 30.802817
| 75
| 0.608251
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,945
|
test_darwin_facts.py
|
ansible_ansible/test/units/module_utils/facts/hardware/test_darwin_facts.py
|
# Copyright: Contributors to the Ansible project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pathlib
from ansible.module_utils.facts.hardware import darwin
from ansible.module_utils.facts.sysctl import get_sysctl
import pytest
class TestDarwinHardwareFacts:
def _get_mock_sysctl_data(self):
fixtures = pathlib.Path(__file__).parent / "fixtures"
return (fixtures / "sysctl_darwin.txt").read_text()
@pytest.fixture()
def mocked_module(self, mocker, request):
request.cls.module = mocker.MagicMock()
request.cls.module.get_bin_path.return_value = "/usr/sbin/sysctl"
yield request.cls.module
def test_get_mac_facts(self, mocked_module):
mocked_module.run_command.return_value = (0, self._get_mock_sysctl_data(), "")
darwin_hardware = darwin.DarwinHardware(mocked_module)
darwin_hardware.sysctl = get_sysctl(
mocked_module, ["hw", "machdep", "kern", "hw.model"]
)
mac_facts = darwin_hardware.get_mac_facts()
expected_mac_facts = {
"model": "MacBookPro18,1",
"product_name": "MacBookPro18,1",
"osversion": "23E224",
"osrevision": "199506",
}
assert mac_facts == expected_mac_facts
def test_get_cpu_facts(self, mocked_module):
mocked_module.run_command.return_value = (0, self._get_mock_sysctl_data(), "")
darwin_hardware = darwin.DarwinHardware(mocked_module)
darwin_hardware.sysctl = get_sysctl(
mocked_module, ["hw", "machdep", "kern", "hw.model"]
)
cpu_facts = darwin_hardware.get_cpu_facts()
expected_cpu_facts = {
"processor": "Apple M1 Pro",
"processor_cores": "10",
"processor_vcpus": "10",
}
assert cpu_facts == expected_cpu_facts
def test_get_memory_facts(self, mocked_module):
fixtures = pathlib.Path(__file__).parent / "fixtures"
mocked_module.get_bin_path.side_effect = [
"/usr/sbin/sysctl",
"/usr/bin/vm_stat",
]
mocked_vm_stat = (fixtures / "vm_stat_darwin.txt").read_text()
mocked_module.run_command.side_effect = [
(0, self._get_mock_sysctl_data(), ""),
(0, mocked_vm_stat, ""),
]
darwin_hardware = darwin.DarwinHardware(mocked_module)
darwin_hardware.sysctl = get_sysctl(
mocked_module, ["hw", "machdep", "kern", "hw.model"]
)
memory_facts = darwin_hardware.get_memory_facts()
expected_memory_facts = {"memtotal_mb": 32768, "memfree_mb": 26491}
assert memory_facts == expected_memory_facts
def test_get_uptime_facts(self, mocked_module):
darwin_hardware = darwin.DarwinHardware(mocked_module)
mocked_module.run_command.return_value = (
0,
b"\xc0\xa0\x05f\x00\x00\x00\x00\xac-\x05\x00\x00\x00\x00\x00",
"",
)
uptime_facts = darwin_hardware.get_uptime_facts()
assert "uptime_seconds" in uptime_facts
| 3,149
|
Python
|
.py
| 70
| 36.142857
| 92
| 0.621331
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,946
|
test_get_device_facts.py
|
ansible_ansible/test/units/module_utils/facts/hardware/freebsd/test_get_device_facts.py
|
# Copyright: Contributors to the Ansible project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import json
import os
import pathlib
from ansible.module_utils.facts.hardware import freebsd
def test_get_device_facts(monkeypatch):
fixtures = pathlib.Path(__file__).parent / 'fixtures'
dev_dir = (fixtures / 'devices').read_text().split()
expected_dev_dir = json.load(open(fixtures / 'expected_devices', 'r'))
monkeypatch.setattr(os.path, 'isdir', lambda x: True)
monkeypatch.setattr(os, 'listdir', lambda x: dev_dir)
freebsd_hardware = freebsd.FreeBSDHardware(None)
facts = freebsd_hardware.get_device_facts()
assert facts == expected_dev_dir
| 761
|
Python
|
.py
| 16
| 44.125
| 92
| 0.741192
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,947
|
test_get_sysinfo_facts.py
|
ansible_ansible/test/units/module_utils/facts/hardware/linux/test_get_sysinfo_facts.py
|
# Copyright: Contributors to the Ansible project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import os
import pathlib
from ansible.module_utils.facts.hardware import linux
def test_get_sysinfo_facts(monkeypatch):
fixtures = pathlib.Path(__file__).parent / 'fixtures'
sysinfo = (fixtures / 'sysinfo').read_text()
monkeypatch.setattr(os.path, 'exists', lambda x: True)
monkeypatch.setattr(linux, 'get_file_content', lambda x: sysinfo)
lh = linux.LinuxHardware(None)
facts = lh.get_sysinfo_facts()
expected = {
'system_vendor': 'IBM',
'product_version': 'NA',
'product_name': '8561',
'product_serial': 'AB1CD',
'product_uuid': 'NA',
}
assert facts == expected
| 824
|
Python
|
.py
| 21
| 34.285714
| 92
| 0.683417
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,948
|
test_datetime.py
|
ansible_ansible/test/units/module_utils/compat/test_datetime.py
|
from __future__ import annotations
import datetime
from ansible.module_utils.compat.datetime import utcnow, utcfromtimestamp, UTC
def test_utc():
assert UTC.tzname(None) == 'UTC'
assert UTC.utcoffset(None) == datetime.timedelta(0)
assert UTC.dst(None) is None
def test_utcnow():
assert utcnow().tzinfo is UTC
def test_utcfometimestamp_zero():
dt = utcfromtimestamp(0)
assert dt.tzinfo is UTC
assert dt.year == 1970
assert dt.month == 1
assert dt.day == 1
assert dt.hour == 0
assert dt.minute == 0
assert dt.second == 0
assert dt.microsecond == 0
| 607
|
Python
|
.py
| 19
| 27.736842
| 78
| 0.704663
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,949
|
test_filter_non_json_lines.py
|
ansible_ansible/test/units/module_utils/json_utils/test_filter_non_json_lines.py
|
# -*- coding: utf-8 -*-
# (c) 2016, Matt Davis <mdavis@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from ansible.module_utils.json_utils import _filter_non_json_lines
class TestAnsibleModuleExitJson(unittest.TestCase):
single_line_json_dict = u"""{"key": "value", "olá": "mundo"}"""
single_line_json_array = u"""["a","b","c"]"""
multi_line_json_dict = u"""{
"key":"value"
}"""
multi_line_json_array = u"""[
"a",
"b",
"c"]"""
all_inputs = [
single_line_json_dict,
single_line_json_array,
multi_line_json_dict,
multi_line_json_array
]
junk = [u"single line of junk", u"line 1/2 of junk\nline 2/2 of junk"]
unparsable_cases = (
u'No json here',
u'"olá": "mundo"',
u'{"No json": "ending"',
u'{"wrong": "ending"]',
u'["wrong": "ending"}',
)
def test_just_json(self):
for i in self.all_inputs:
filtered, warnings = _filter_non_json_lines(i)
self.assertEqual(filtered, i)
self.assertEqual(warnings, [])
def test_leading_junk(self):
for i in self.all_inputs:
for j in self.junk:
filtered, warnings = _filter_non_json_lines(j + "\n" + i)
self.assertEqual(filtered, i)
self.assertEqual(warnings, [])
def test_trailing_junk(self):
for i in self.all_inputs:
for j in self.junk:
filtered, warnings = _filter_non_json_lines(i + "\n" + j)
self.assertEqual(filtered, i)
self.assertEqual(warnings, [u"Module invocation had junk after the JSON data: %s" % j.strip()])
def test_leading_and_trailing_junk(self):
for i in self.all_inputs:
for j in self.junk:
filtered, warnings = _filter_non_json_lines("\n".join([j, i, j]))
self.assertEqual(filtered, i)
self.assertEqual(warnings, [u"Module invocation had junk after the JSON data: %s" % j.strip()])
def test_unparsable_filter_non_json_lines(self):
for i in self.unparsable_cases:
self.assertRaises(
ValueError,
_filter_non_json_lines,
data=i
)
| 2,918
|
Python
|
.py
| 74
| 32.081081
| 111
| 0.616961
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,950
|
test_cli.py
|
ansible_ansible/test/units/cli/test_cli.py
|
# (c) 2017, Adrian Likins <alikins@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from unittest.mock import patch, MagicMock
from units.mock.loader import DictDataLoader
from ansible.release import __version__
from ansible.parsing import vault
from ansible import cli
class TestCliVersion(unittest.TestCase):
def test_version_info(self):
version_info = cli.CLI.version_info()
self.assertEqual(version_info['string'], __version__)
def test_version_info_gitinfo(self):
version_info = cli.CLI.version_info(gitinfo=True)
self.assertIn('python version', version_info['string'])
class TestCliBuildVaultIds(unittest.TestCase):
def setUp(self):
self.tty_patcher = patch('ansible.cli.sys.stdin.isatty', return_value=True)
self.mock_isatty = self.tty_patcher.start()
def tearDown(self):
self.tty_patcher.stop()
def test(self):
res = cli.CLI.build_vault_ids(['foo@bar'])
self.assertEqual(res, ['foo@bar'])
def test_no_vault_id_no_auto_prompt(self):
# simulate 'ansible-playbook site.yml' with out --ask-vault-pass, should not prompt
res = cli.CLI.build_vault_ids([], auto_prompt=False)
self.assertEqual(res, [])
def test_no_vault_ids_auto_prompt(self):
# create_new_password=False
# simulate 'ansible-vault edit encrypted.yml'
res = cli.CLI.build_vault_ids([], auto_prompt=True)
self.assertEqual(res, ['default@prompt_ask_vault_pass'])
def test_no_vault_ids_auto_prompt_ask_vault_pass(self):
# create_new_password=False
# simulate 'ansible-vault edit --ask-vault-pass encrypted.yml'
res = cli.CLI.build_vault_ids([], auto_prompt=True, ask_vault_pass=True)
self.assertEqual(res, ['default@prompt_ask_vault_pass'])
def test_no_vault_id_ask_vault_pass(self):
res = cli.CLI.build_vault_ids([], ask_vault_pass=True)
self.assertEqual(res, ['default@prompt_ask_vault_pass'])
def test_no_vault_ids_password_files(self):
res = cli.CLI.build_vault_ids([], vault_password_files=['some-password-file'])
self.assertEqual(res, ['default@some-password-file'])
def test_everything(self):
res = cli.CLI.build_vault_ids(['blip@prompt', 'baz@prompt_ask_vault_pass',
'some-password-file', 'qux@another-password-file'],
vault_password_files=['yet-another-password-file',
'one-more-password-file'],
ask_vault_pass=True,
auto_prompt=False)
self.assertEqual(set(res), set(['blip@prompt', 'baz@prompt_ask_vault_pass',
'default@prompt_ask_vault_pass',
'some-password-file', 'qux@another-password-file',
'default@yet-another-password-file',
'default@one-more-password-file']))
class TestCliSetupVaultSecrets(unittest.TestCase):
def setUp(self):
self.fake_loader = DictDataLoader({})
self.tty_patcher = patch('ansible.cli.sys.stdin.isatty', return_value=True)
self.mock_isatty = self.tty_patcher.start()
self.display_v_patcher = patch('ansible.cli.display.verbosity', return_value=6)
self.mock_display_v = self.display_v_patcher.start()
cli.display.verbosity = 5
def tearDown(self):
self.tty_patcher.stop()
self.display_v_patcher.stop()
cli.display.verbosity = 0
def test(self):
res = cli.CLI.setup_vault_secrets(None, None, auto_prompt=False)
self.assertIsInstance(res, list)
@patch('ansible.cli.get_file_vault_secret')
def test_password_file(self, mock_file_secret):
filename = '/dev/null/secret'
mock_file_secret.return_value = MagicMock(bytes=b'file1_password',
vault_id='file1',
filename=filename)
res = cli.CLI.setup_vault_secrets(loader=self.fake_loader,
vault_ids=['secret1@%s' % filename, 'secret2'],
vault_password_files=[filename])
self.assertIsInstance(res, list)
matches = vault.match_secrets(res, ['secret1'])
self.assertIn('secret1', [x[0] for x in matches])
match = matches[0][1]
self.assertEqual(match.bytes, b'file1_password')
@patch('ansible.cli.PromptVaultSecret')
def test_prompt(self, mock_prompt_secret):
mock_prompt_secret.return_value = MagicMock(bytes=b'prompt1_password',
vault_id='prompt1')
res = cli.CLI.setup_vault_secrets(loader=self.fake_loader,
vault_ids=['prompt1@prompt'],
ask_vault_pass=True,
auto_prompt=False)
self.assertIsInstance(res, list)
matches = vault.match_secrets(res, ['prompt1'])
self.assertIn('prompt1', [x[0] for x in matches])
match = matches[0][1]
self.assertEqual(match.bytes, b'prompt1_password')
@patch('ansible.cli.PromptVaultSecret')
def test_prompt_no_tty(self, mock_prompt_secret):
self.mock_isatty.return_value = False
mock_prompt_secret.return_value = MagicMock(bytes=b'prompt1_password',
vault_id='prompt1',
name='bytes_should_be_prompt1_password',
spec=vault.PromptVaultSecret)
res = cli.CLI.setup_vault_secrets(loader=self.fake_loader,
vault_ids=['prompt1@prompt'],
ask_vault_pass=True,
auto_prompt=False)
self.assertIsInstance(res, list)
self.assertEqual(len(res), 2)
matches = vault.match_secrets(res, ['prompt1'])
self.assertIn('prompt1', [x[0] for x in matches])
self.assertEqual(len(matches), 1)
@patch('ansible.cli.get_file_vault_secret')
@patch('ansible.cli.PromptVaultSecret')
def test_prompt_no_tty_and_password_file(self, mock_prompt_secret, mock_file_secret):
self.mock_isatty.return_value = False
mock_prompt_secret.return_value = MagicMock(bytes=b'prompt1_password',
vault_id='prompt1')
filename = '/dev/null/secret'
mock_file_secret.return_value = MagicMock(bytes=b'file1_password',
vault_id='file1',
filename=filename)
res = cli.CLI.setup_vault_secrets(loader=self.fake_loader,
vault_ids=['prompt1@prompt', 'file1@/dev/null/secret'],
ask_vault_pass=True)
self.assertIsInstance(res, list)
matches = vault.match_secrets(res, ['file1'])
self.assertIn('file1', [x[0] for x in matches])
self.assertNotIn('prompt1', [x[0] for x in matches])
match = matches[0][1]
self.assertEqual(match.bytes, b'file1_password')
def _assert_ids(self, vault_id_names, res, password=b'prompt1_password'):
self.assertIsInstance(res, list)
len_ids = len(vault_id_names)
matches = vault.match_secrets(res, vault_id_names)
self.assertEqual(len(res), len_ids, 'len(res):%s does not match len_ids:%s' % (len(res), len_ids))
self.assertEqual(len(matches), len_ids)
for index, prompt in enumerate(vault_id_names):
self.assertIn(prompt, [x[0] for x in matches])
# simple mock, same password/prompt for each mock_prompt_secret
self.assertEqual(matches[index][1].bytes, password)
@patch('ansible.cli.PromptVaultSecret')
def test_multiple_prompts(self, mock_prompt_secret):
mock_prompt_secret.return_value = MagicMock(bytes=b'prompt1_password',
vault_id='prompt1')
res = cli.CLI.setup_vault_secrets(loader=self.fake_loader,
vault_ids=['prompt1@prompt',
'prompt2@prompt'],
ask_vault_pass=False)
vault_id_names = ['prompt1', 'prompt2']
self._assert_ids(vault_id_names, res)
@patch('ansible.cli.PromptVaultSecret')
def test_multiple_prompts_and_ask_vault_pass(self, mock_prompt_secret):
self.mock_isatty.return_value = False
mock_prompt_secret.return_value = MagicMock(bytes=b'prompt1_password',
vault_id='prompt1')
res = cli.CLI.setup_vault_secrets(loader=self.fake_loader,
vault_ids=['prompt1@prompt',
'prompt2@prompt',
'prompt3@prompt_ask_vault_pass'],
ask_vault_pass=True)
# We provide some vault-ids and secrets, so auto_prompt shouldn't get triggered,
# so there is
vault_id_names = ['prompt1', 'prompt2', 'prompt3', 'default']
self._assert_ids(vault_id_names, res)
@patch('ansible.cli.C')
@patch('ansible.cli.get_file_vault_secret')
@patch('ansible.cli.PromptVaultSecret')
def test_default_file_vault(self, mock_prompt_secret,
mock_file_secret,
mock_config):
mock_prompt_secret.return_value = MagicMock(bytes=b'prompt1_password',
vault_id='default')
mock_file_secret.return_value = MagicMock(bytes=b'file1_password',
vault_id='default')
mock_config.DEFAULT_VAULT_PASSWORD_FILE = '/dev/null/faux/vault_password_file'
mock_config.DEFAULT_VAULT_IDENTITY = 'default'
res = cli.CLI.setup_vault_secrets(loader=self.fake_loader,
vault_ids=[],
create_new_password=False,
ask_vault_pass=False)
self.assertIsInstance(res, list)
matches = vault.match_secrets(res, ['default'])
# --vault-password-file/DEFAULT_VAULT_PASSWORD_FILE is higher precendce than prompts
# if the same vault-id ('default') regardless of cli order since it didn't matter in 2.3
self.assertEqual(matches[0][1].bytes, b'file1_password')
self.assertEqual(len(matches), 1)
res = cli.CLI.setup_vault_secrets(loader=self.fake_loader,
vault_ids=[],
create_new_password=False,
ask_vault_pass=True,
auto_prompt=True)
self.assertIsInstance(res, list)
matches = vault.match_secrets(res, ['default'])
self.assertEqual(matches[0][1].bytes, b'file1_password')
self.assertEqual(matches[1][1].bytes, b'prompt1_password')
self.assertEqual(len(matches), 2)
@patch('ansible.cli.get_file_vault_secret')
@patch('ansible.cli.PromptVaultSecret')
def test_default_file_vault_identity_list(self, mock_prompt_secret,
mock_file_secret):
default_vault_ids = ['some_prompt@prompt',
'some_file@/dev/null/secret']
mock_prompt_secret.return_value = MagicMock(bytes=b'some_prompt_password',
vault_id='some_prompt')
filename = '/dev/null/secret'
mock_file_secret.return_value = MagicMock(bytes=b'some_file_password',
vault_id='some_file',
filename=filename)
vault_ids = default_vault_ids
res = cli.CLI.setup_vault_secrets(loader=self.fake_loader,
vault_ids=vault_ids,
create_new_password=False,
ask_vault_pass=True)
self.assertIsInstance(res, list)
matches = vault.match_secrets(res, ['some_file'])
# --vault-password-file/DEFAULT_VAULT_PASSWORD_FILE is higher precendce than prompts
# if the same vault-id ('default') regardless of cli order since it didn't matter in 2.3
self.assertEqual(matches[0][1].bytes, b'some_file_password')
matches = vault.match_secrets(res, ['some_prompt'])
self.assertEqual(matches[0][1].bytes, b'some_prompt_password')
@patch('ansible.cli.PromptVaultSecret')
def test_prompt_just_ask_vault_pass(self, mock_prompt_secret):
mock_prompt_secret.return_value = MagicMock(bytes=b'prompt1_password',
vault_id='default')
res = cli.CLI.setup_vault_secrets(loader=self.fake_loader,
vault_ids=[],
create_new_password=False,
ask_vault_pass=True)
self.assertIsInstance(res, list)
match = vault.match_secrets(res, ['default'])[0][1]
self.assertEqual(match.bytes, b'prompt1_password')
@patch('ansible.cli.PromptVaultSecret')
def test_prompt_new_password_ask_vault_pass(self, mock_prompt_secret):
mock_prompt_secret.return_value = MagicMock(bytes=b'prompt1_password',
vault_id='default')
res = cli.CLI.setup_vault_secrets(loader=self.fake_loader,
vault_ids=[],
create_new_password=True,
ask_vault_pass=True)
self.assertIsInstance(res, list)
match = vault.match_secrets(res, ['default'])[0][1]
self.assertEqual(match.bytes, b'prompt1_password')
@patch('ansible.cli.PromptVaultSecret')
def test_prompt_new_password_vault_id_prompt(self, mock_prompt_secret):
mock_prompt_secret.return_value = MagicMock(bytes=b'prompt1_password',
vault_id='some_vault_id')
res = cli.CLI.setup_vault_secrets(loader=self.fake_loader,
vault_ids=['some_vault_id@prompt'],
create_new_password=True,
ask_vault_pass=False)
self.assertIsInstance(res, list)
match = vault.match_secrets(res, ['some_vault_id'])[0][1]
self.assertEqual(match.bytes, b'prompt1_password')
@patch('ansible.cli.PromptVaultSecret')
def test_prompt_new_password_vault_id_prompt_ask_vault_pass(self, mock_prompt_secret):
mock_prompt_secret.return_value = MagicMock(bytes=b'prompt1_password',
vault_id='default')
res = cli.CLI.setup_vault_secrets(loader=self.fake_loader,
vault_ids=['some_vault_id@prompt_ask_vault_pass'],
create_new_password=True,
ask_vault_pass=False)
self.assertIsInstance(res, list)
match = vault.match_secrets(res, ['some_vault_id'])[0][1]
self.assertEqual(match.bytes, b'prompt1_password')
@patch('ansible.cli.PromptVaultSecret')
def test_prompt_new_password_vault_id_prompt_ask_vault_pass_ask_vault_pass(self, mock_prompt_secret):
mock_prompt_secret.return_value = MagicMock(bytes=b'prompt1_password',
vault_id='default')
res = cli.CLI.setup_vault_secrets(loader=self.fake_loader,
vault_ids=['some_vault_id@prompt_ask_vault_pass'],
create_new_password=True,
ask_vault_pass=True)
self.assertIsInstance(res, list)
match = vault.match_secrets(res, ['some_vault_id'])[0][1]
self.assertEqual(match.bytes, b'prompt1_password')
| 17,421
|
Python
|
.py
| 293
| 42.030717
| 106
| 0.561364
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,951
|
test_playbook.py
|
ansible_ansible/test/units/cli/test_playbook.py
|
# (c) 2016, Adrian Likins <alikins@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from units.mock.loader import DictDataLoader
from ansible import context
from ansible.inventory.manager import InventoryManager
from ansible.vars.manager import VariableManager
from ansible.cli.playbook import PlaybookCLI
class TestPlaybookCLI(unittest.TestCase):
def test_flush_cache(self):
cli = PlaybookCLI(args=["ansible-playbook", "--flush-cache", "foobar.yml"])
cli.parse()
self.assertTrue(context.CLIARGS['flush_cache'])
variable_manager = VariableManager()
fake_loader = DictDataLoader({'foobar.yml': ""})
inventory = InventoryManager(loader=fake_loader, sources='testhost,')
variable_manager.set_host_facts('testhost', {'canary': True})
self.assertTrue('testhost' in variable_manager._fact_cache)
cli._flush_cache(inventory, variable_manager)
self.assertFalse('testhost' in variable_manager._fact_cache)
| 1,658
|
Python
|
.py
| 35
| 43.714286
| 83
| 0.754647
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,952
|
test_doc.py
|
ansible_ansible/test/units/cli/test_doc.py
|
from __future__ import annotations
import pytest
from ansible import constants as C
from ansible.cli.doc import DocCLI, RoleMixin
from ansible.plugins.loader import module_loader, init_plugin_loader
C.ANSIBLE_NOCOLOR = True
TTY_IFY_DATA = {
# No substitutions
'no-op': 'no-op',
'no-op Z(test)': 'no-op Z(test)',
# Simple cases of all substitutions
'I(italic)': "`italic`",
'B(bold)': '*bold*',
'M(ansible.builtin.module)': '[ansible.builtin.module]',
'U(https://docs.ansible.com)': 'https://docs.ansible.com',
'L(the user guide,https://docs.ansible.com/user-guide.html)': 'the user guide <https://docs.ansible.com/user-guide.html>',
'R(the user guide,user-guide)': 'the user guide',
'C(/usr/bin/file)': "`/usr/bin/file'",
'HORIZONTALLINE': '\n{0}\n'.format('-' * 13),
# Multiple substitutions
'The M(ansible.builtin.yum) module B(MUST) be given the C(package) parameter. See the R(looping docs,using-loops) for more info':
"The [ansible.builtin.yum] module *MUST* be given the `package' parameter. See the looping docs for more info",
# Problem cases
'IBM(International Business Machines)': 'IBM(International Business Machines)',
'L(the user guide, https://docs.ansible.com/)': 'the user guide <https://docs.ansible.com/>',
'R(the user guide, user-guide)': 'the user guide',
# de-rsty refs and anchors
'yolo :ref:`my boy` does stuff': 'yolo `my boy` does stuff',
'.. seealso:: Something amazing': 'See also: Something amazing',
'.. seealso:: Troublesome multiline\n Stuff goes htere': 'See also: Troublesome multiline\n Stuff goes htere',
'.. note:: boring stuff': 'Note: boring stuff',
}
@pytest.mark.parametrize('text, expected', sorted(TTY_IFY_DATA.items()))
def test_ttyify(text, expected):
assert DocCLI.tty_ify(text) == expected
def test_rolemixin__build_summary():
obj = RoleMixin()
role_name = 'test_role'
collection_name = 'test.units'
argspec = {
'main': {'short_description': 'main short description'},
'alternate': {'short_description': 'alternate short description'},
}
meta = {}
expected = {
'collection': collection_name,
'description': 'UNDOCUMENTED',
'entry_points': {
'main': argspec['main']['short_description'],
'alternate': argspec['alternate']['short_description'],
}
}
fqcn, summary = obj._build_summary(role_name, collection_name, meta, argspec)
assert fqcn == '.'.join([collection_name, role_name])
assert summary == expected
def test_rolemixin__build_summary_empty_argspec():
obj = RoleMixin()
role_name = 'test_role'
collection_name = 'test.units'
argspec = {}
meta = {}
expected = {
'collection': collection_name,
'description': 'UNDOCUMENTED',
'entry_points': {}
}
fqcn, summary = obj._build_summary(role_name, collection_name, meta, argspec)
assert fqcn == '.'.join([collection_name, role_name])
assert summary == expected
def test_rolemixin__build_doc():
obj = RoleMixin()
role_name = 'test_role'
path = '/a/b/c'
collection_name = 'test.units'
entrypoint_filter = 'main'
argspec = {
'main': {'short_description': 'main short description'},
'alternate': {'short_description': 'alternate short description'},
}
expected = {
'path': path,
'collection': collection_name,
'entry_points': {
'main': argspec['main'],
}
}
fqcn, doc = obj._build_doc(role_name, path, collection_name, argspec, entrypoint_filter)
assert fqcn == '.'.join([collection_name, role_name])
assert doc == expected
def test_rolemixin__build_doc_no_filter_match():
obj = RoleMixin()
role_name = 'test_role'
path = '/a/b/c'
collection_name = 'test.units'
entrypoint_filter = 'doesNotExist'
argspec = {
'main': {'short_description': 'main short description'},
'alternate': {'short_description': 'alternate short description'},
}
fqcn, doc = obj._build_doc(role_name, path, collection_name, argspec, entrypoint_filter)
assert fqcn == '.'.join([collection_name, role_name])
assert doc is None
def test_builtin_modules_list():
args = ['ansible-doc', '-l', 'ansible.builtin', '-t', 'module']
obj = DocCLI(args=args)
obj.parse()
init_plugin_loader()
result = obj._list_plugins('module', module_loader)
assert len(result) > 0
def test_legacy_modules_list():
args = ['ansible-doc', '-l', 'ansible.legacy', '-t', 'module']
obj = DocCLI(args=args)
obj.parse()
result = obj._list_plugins('module', module_loader)
assert len(result) > 0
| 4,730
|
Python
|
.py
| 115
| 35.713043
| 134
| 0.647008
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,953
|
test_vault.py
|
ansible_ansible/test/units/cli/test_vault.py
|
# -*- coding: utf-8 -*-
# (c) 2017, Adrian Likins <alikins@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
import pytest
import unittest
from unittest.mock import patch, MagicMock
from units.mock.vault_helper import TextVaultSecret
from ansible import context, errors
from ansible.cli.vault import VaultCLI
from ansible.module_utils.common.text.converters import to_text
from ansible.utils import context_objects as co
# TODO: make these tests assert something, likely by verifying
# mock calls
@pytest.fixture(autouse=True)
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
yield
co.GlobalCLIArgs._Singleton__instance = None
class TestVaultCli(unittest.TestCase):
def setUp(self):
self.tty_patcher = patch('ansible.cli.sys.stdin.isatty', return_value=False)
self.mock_isatty = self.tty_patcher.start()
def tearDown(self):
self.tty_patcher.stop()
def test_parse_empty(self):
cli = VaultCLI(['vaultcli'])
self.assertRaises(SystemExit,
cli.parse)
# FIXME: something weird seems to be afoot when parsing actions
# cli = VaultCLI(args=['view', '/dev/null/foo', 'mysecret3'])
# will skip '/dev/null/foo'. something in cli.CLI.set_action() ?
# maybe we self.args gets modified in a loop?
def test_parse_view_file(self):
cli = VaultCLI(args=['ansible-vault', 'view', '/dev/null/foo'])
cli.parse()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
def test_view_missing_file_no_secret(self, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = []
cli = VaultCLI(args=['ansible-vault', 'view', '/dev/null/foo'])
cli.parse()
self.assertRaisesRegex(errors.AnsibleOptionsError,
"A vault password is required to use Ansible's Vault",
cli.run)
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
def test_encrypt_missing_file_no_secret(self, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = []
cli = VaultCLI(args=['ansible-vault', 'encrypt', '/dev/null/foo'])
cli.parse()
self.assertRaisesRegex(errors.AnsibleOptionsError,
"A vault password is required to use Ansible's Vault",
cli.run)
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_encrypt(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'encrypt', '/dev/null/foo'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_encrypt_string(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'encrypt_string',
'some string to encrypt'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
@patch('ansible.cli.vault.display.prompt', return_value='a_prompt')
def test_encrypt_string_prompt(self, mock_display, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault',
'encrypt_string',
'--prompt',
'--show-input',
'some string to encrypt'])
cli.parse()
cli.run()
args, kwargs = mock_display.call_args
assert kwargs["private"] is False
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
@patch('ansible.cli.vault.display.prompt', return_value='a_prompt')
def test_shadowed_encrypt_string_prompt(self, mock_display, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault',
'encrypt_string',
'--prompt',
'some string to encrypt'])
cli.parse()
cli.run()
args, kwargs = mock_display.call_args
assert kwargs["private"]
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
@patch('ansible.cli.vault.sys.stdin.read', return_value='This is data from stdin')
def test_encrypt_string_stdin(self, mock_stdin_read, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault',
'encrypt_string',
'--stdin-name',
'the_var_from_stdin',
'-'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_encrypt_string_names(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'encrypt_string',
'--name', 'foo1',
'--name', 'foo2',
'some string to encrypt'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_encrypt_string_more_args_than_names(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'encrypt_string',
'--name', 'foo1',
'some string to encrypt',
'other strings',
'a few more string args'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_create(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'create', '/dev/null/foo'])
cli.parse()
self.assertRaisesRegex(errors.AnsibleOptionsError,
"not a tty, editor cannot be opened",
cli.run)
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_create_skip_tty_check(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'create', '--skip-tty-check', '/dev/null/foo'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_create_with_tty(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
self.tty_stdout_patcher = patch('ansible.cli.sys.stdout.isatty', return_value=True)
self.tty_stdout_patcher.start()
cli = VaultCLI(args=['ansible-vault', 'create', '/dev/null/foo'])
cli.parse()
cli.run()
self.tty_stdout_patcher.stop()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_edit(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'edit', '/dev/null/foo'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_decrypt(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'decrypt', '/dev/null/foo'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_view(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'view', '/dev/null/foo'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_rekey(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'rekey', '/dev/null/foo'])
cli.parse()
cli.run()
@pytest.mark.parametrize('cli_args, expected', [
(['ansible-vault', 'view', 'vault.txt'], 0),
(['ansible-vault', 'view', 'vault.txt', '-vvv'], 3),
(['ansible-vault', 'view', 'vault.txt', '-vv'], 2),
])
def test_verbosity_arguments(cli_args, expected, tmp_path_factory, monkeypatch):
# Add a password file so we don't get a prompt in the test
test_dir = to_text(tmp_path_factory.mktemp('test-ansible-vault'))
pass_file = os.path.join(test_dir, 'pass.txt')
with open(pass_file, 'w') as pass_fd:
pass_fd.write('password')
cli_args.extend(['--vault-id', pass_file])
# Mock out the functions so we don't actually execute anything
for func_name in [f for f in dir(VaultCLI) if f.startswith("execute_")]:
monkeypatch.setattr(VaultCLI, func_name, MagicMock())
cli = VaultCLI(args=cli_args)
cli.run()
assert context.CLIARGS['verbosity'] == expected
| 11,012
|
Python
|
.py
| 214
| 42.373832
| 109
| 0.640249
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,954
|
test_console.py
|
ansible_ansible/test/units/cli/test_console.py
|
# (c) 2016, Thilo Uttendorfer <tlo@sengaya.de>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from unittest.mock import patch
from ansible.cli.console import ConsoleCLI
class TestConsoleCLI(unittest.TestCase):
def test_parse(self):
cli = ConsoleCLI(['ansible test'])
cli.parse()
self.assertTrue(cli.parser is not None)
def test_module_args(self):
cli = ConsoleCLI(['ansible test'])
cli.parse()
res = cli.module_args('copy')
self.assertTrue(cli.parser is not None)
self.assertIn('src', res)
self.assertIn('backup', res)
self.assertIsInstance(res, list)
@patch('ansible.utils.display.Display.display')
def test_helpdefault(self, mock_display):
cli = ConsoleCLI(['ansible test'])
cli.parse()
cli.modules = set(['copy'])
cli.helpdefault('copy')
self.assertTrue(cli.parser is not None)
self.assertTrue(len(mock_display.call_args_list) > 0,
"display.display should have been called but was not")
| 1,723
|
Python
|
.py
| 42
| 35.857143
| 78
| 0.701912
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,955
|
test_adhoc.py
|
ansible_ansible/test/units/cli/test_adhoc.py
|
# Copyright: (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
import re
from ansible import context
from ansible.cli.adhoc import AdHocCLI, display
from ansible.errors import AnsibleOptionsError
def test_parse():
""" Test adhoc parse"""
with pytest.raises(ValueError, match='A non-empty list for args is required'):
adhoc_cli = AdHocCLI([])
adhoc_cli = AdHocCLI(['ansibletest'])
with pytest.raises(SystemExit):
adhoc_cli.parse()
def test_with_command():
""" Test simple adhoc command"""
module_name = 'command'
adhoc_cli = AdHocCLI(args=['ansible', '-m', module_name, '-vv', 'localhost'])
adhoc_cli.parse()
assert context.CLIARGS['module_name'] == module_name
assert display.verbosity == 2
def test_simple_command():
""" Test valid command and its run"""
adhoc_cli = AdHocCLI(['/bin/ansible', '-m', 'command', 'localhost', '-a', 'echo "hi"'])
adhoc_cli.parse()
ret = adhoc_cli.run()
assert ret == 0
def test_no_argument():
""" Test no argument command"""
adhoc_cli = AdHocCLI(['/bin/ansible', '-m', 'command', 'localhost'])
adhoc_cli.parse()
with pytest.raises(AnsibleOptionsError) as exec_info:
adhoc_cli.run()
assert 'No argument passed to command module' == str(exec_info.value)
def test_did_you_mean_playbook():
""" Test adhoc with yml file as argument parameter"""
adhoc_cli = AdHocCLI(['/bin/ansible', '-m', 'command', 'localhost.yml'])
adhoc_cli.parse()
with pytest.raises(AnsibleOptionsError) as exec_info:
adhoc_cli.run()
assert 'No argument passed to command module (did you mean to run ansible-playbook?)' == str(exec_info.value)
def test_play_ds_positive():
""" Test _play_ds"""
adhoc_cli = AdHocCLI(args=['/bin/ansible', 'localhost', '-m', 'command'])
adhoc_cli.parse()
ret = adhoc_cli._play_ds('command', 10, 2)
assert ret['name'] == 'Ansible Ad-Hoc'
assert ret['tasks'] == [{'action': {'module': 'command', 'args': {}}, 'async_val': 10, 'poll': 2, 'timeout': 0}]
def test_play_ds_with_include_role():
""" Test include_role command with poll"""
adhoc_cli = AdHocCLI(args=['/bin/ansible', 'localhost', '-m', 'include_role'])
adhoc_cli.parse()
ret = adhoc_cli._play_ds('include_role', None, 2)
assert ret['name'] == 'Ansible Ad-Hoc'
assert ret['gather_facts'] == 'no'
def test_run_import_playbook():
""" Test import_playbook which is not allowed with ad-hoc command"""
import_playbook = 'import_playbook'
adhoc_cli = AdHocCLI(args=['/bin/ansible', '-m', import_playbook, 'localhost'])
adhoc_cli.parse()
with pytest.raises(AnsibleOptionsError) as exec_info:
adhoc_cli.run()
assert context.CLIARGS['module_name'] == import_playbook
assert "'%s' is not a valid action for ad-hoc commands" % import_playbook == str(exec_info.value)
def test_run_no_extra_vars():
adhoc_cli = AdHocCLI(args=['/bin/ansible', 'localhost', '-e'])
with pytest.raises(SystemExit) as exec_info:
adhoc_cli.parse()
assert exec_info.value.code == 2
def test_ansible_version(capsys):
adhoc_cli = AdHocCLI(args=['/bin/ansible', '--version'])
with pytest.raises(SystemExit):
adhoc_cli.run()
version = capsys.readouterr()
version_lines = version.out.splitlines()
assert len(version_lines) == 9, 'Incorrect number of lines in "ansible --version" output'
assert re.match(r'ansible \[core [0-9.a-z]+\]', version_lines[0]), 'Incorrect ansible version line in "ansible --version" output'
assert re.match(' config file = .*$', version_lines[1]), 'Incorrect config file line in "ansible --version" output'
assert re.match(' configured module search path = .*$', version_lines[2]), 'Incorrect module search path in "ansible --version" output'
assert re.match(' ansible python module location = .*$', version_lines[3]), 'Incorrect python module location in "ansible --version" output'
assert re.match(' ansible collection location = .*$', version_lines[4]), 'Incorrect collection location in "ansible --version" output'
assert re.match(' executable location = .*$', version_lines[5]), 'Incorrect executable locaction in "ansible --version" output'
assert re.match(' python version = .*$', version_lines[6]), 'Incorrect python version in "ansible --version" output'
assert re.match(' jinja version = .*$', version_lines[7]), 'Incorrect jinja version in "ansible --version" output'
assert re.match(' libyaml = .*$', version_lines[8]), 'Missing libyaml in "ansible --version" output'
| 4,723
|
Python
|
.py
| 86
| 50.139535
| 145
| 0.676062
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,956
|
test_galaxy.py
|
ansible_ansible/test/units/cli/test_galaxy.py
|
# -*- coding: utf-8 -*-
# (c) 2016, Adrian Likins <alikins@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import contextlib
import ansible
from io import BytesIO
import json
import os
import pytest
import shutil
import stat
import tarfile
import tempfile
import yaml
import ansible.constants as C
from ansible import context
from ansible.cli.galaxy import GalaxyCLI
from ansible.galaxy import collection
from ansible.galaxy.api import GalaxyAPI
from ansible.errors import AnsibleError
from ansible.module_utils.common.file import S_IRWU_RG_RO, S_IRWXU_RXG_RXO
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.utils import context_objects as co
from ansible.utils.display import Display
import unittest
from unittest.mock import patch, MagicMock
@pytest.fixture(autouse=True)
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
yield
co.GlobalCLIArgs._Singleton__instance = None
class TestGalaxy(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""creating prerequisites for installing a role; setUpClass occurs ONCE whereas setUp occurs with every method tested."""
# class data for easy viewing: role_dir, role_tar, role_name, role_req, role_path
cls.temp_dir = tempfile.mkdtemp(prefix='ansible-test_galaxy-')
os.chdir(cls.temp_dir)
shutil.rmtree("./delete_me", ignore_errors=True)
# creating framework for a role
gc = GalaxyCLI(args=["ansible-galaxy", "init", "--offline", "delete_me"])
gc.run()
cls.role_dir = "./delete_me"
cls.role_name = "delete_me"
# making a temp dir for role installation
cls.role_path = os.path.join(tempfile.mkdtemp(), "roles")
os.makedirs(cls.role_path)
# creating a tar file name for class data
cls.role_tar = './delete_me.tar.gz'
cls.makeTar(cls.role_tar, cls.role_dir)
# creating a temp file with installation requirements
cls.role_req = './delete_me_requirements.yml'
with open(cls.role_req, "w") as fd:
fd.write("- 'src': '%s'\n 'name': '%s'\n 'path': '%s'" % (cls.role_tar, cls.role_name, cls.role_path))
@classmethod
def makeTar(cls, output_file, source_dir):
""" used for making a tarfile from a role directory """
# adding directory into a tar file
with tarfile.open(output_file, "w:gz") as tar:
tar.add(source_dir, arcname=os.path.basename(source_dir))
@classmethod
def tearDownClass(cls):
"""After tests are finished removes things created in setUpClass"""
# deleting the temp role directory
shutil.rmtree(cls.role_dir, ignore_errors=True)
with contextlib.suppress(FileNotFoundError):
os.remove(cls.role_req)
with contextlib.suppress(FileNotFoundError):
os.remove(cls.role_tar)
shutil.rmtree(cls.role_path, ignore_errors=True)
os.chdir('/')
shutil.rmtree(cls.temp_dir, ignore_errors=True)
def setUp(self):
# Reset the stored command line args
co.GlobalCLIArgs._Singleton__instance = None
self.default_args = ['ansible-galaxy']
def tearDown(self):
# Reset the stored command line args
co.GlobalCLIArgs._Singleton__instance = None
def test_init(self):
galaxy_cli = GalaxyCLI(args=self.default_args)
assert isinstance(galaxy_cli, GalaxyCLI)
def test_display_min(self):
gc = GalaxyCLI(args=self.default_args)
role_info = {'name': 'some_role_name'}
display_result = gc._display_role_info(role_info)
assert display_result.find('some_role_name') > -1
def test_display_galaxy_info(self):
gc = GalaxyCLI(args=self.default_args)
galaxy_info = {}
role_info = {'name': 'some_role_name',
'galaxy_info': galaxy_info}
display_result = gc._display_role_info(role_info)
self.assertNotEqual(display_result.find('\n\tgalaxy_info:'), -1, 'Expected galaxy_info to be indented once')
def test_run(self):
""" verifies that the GalaxyCLI object's api is created and that execute() is called. """
gc = GalaxyCLI(args=["ansible-galaxy", "install", "--ignore-errors", "imaginary_role"])
gc.parse()
with patch.object(ansible.cli.CLI, "run", return_value=None) as mock_run:
gc.run()
# testing
self.assertIsInstance(gc.galaxy, ansible.galaxy.Galaxy)
self.assertEqual(mock_run.call_count, 1)
assert isinstance(gc.api, ansible.galaxy.api.GalaxyAPI)
def test_execute_remove(self):
# installing role
gc = GalaxyCLI(args=["ansible-galaxy", "install", "-p", self.role_path, "-r", self.role_req, '--force'])
gc.run()
# location where the role was installed
role_file = os.path.join(self.role_path, self.role_name)
# removing role
# Have to reset the arguments in the context object manually since we're doing the
# equivalent of running the command line program twice
co.GlobalCLIArgs._Singleton__instance = None
gc = GalaxyCLI(args=["ansible-galaxy", "remove", role_file, self.role_name])
gc.run()
# testing role was removed
removed_role = not os.path.exists(role_file)
self.assertTrue(removed_role)
def test_exit_without_ignore_without_flag(self):
""" tests that GalaxyCLI exits with the error specified if the --ignore-errors flag is not used """
gc = GalaxyCLI(args=["ansible-galaxy", "install", "--server=None", "fake_role_name"])
with patch.object(ansible.utils.display.Display, "display", return_value=None) as mocked_display:
# testing that error expected is raised
self.assertRaises(AnsibleError, gc.run)
assert mocked_display.call_count == 2
assert mocked_display.mock_calls[0].args[0] == "Starting galaxy role install process"
assert "fake_role_name was NOT installed successfully" in mocked_display.mock_calls[1].args[0]
def test_exit_without_ignore_with_flag(self):
""" tests that GalaxyCLI exits without the error specified if the --ignore-errors flag is used """
# testing with --ignore-errors flag
gc = GalaxyCLI(args=["ansible-galaxy", "install", "--server=None", "fake_role_name", "--ignore-errors"])
with patch.object(ansible.utils.display.Display, "display", return_value=None) as mocked_display:
gc.run()
assert mocked_display.call_count == 2
assert mocked_display.mock_calls[0].args[0] == "Starting galaxy role install process"
assert "fake_role_name was NOT installed successfully" in mocked_display.mock_calls[1].args[0]
def test_parse_no_action(self):
""" testing the options parser when no action is given """
gc = GalaxyCLI(args=["ansible-galaxy", ""])
self.assertRaises(SystemExit, gc.parse)
def test_parse_invalid_action(self):
""" testing the options parser when an invalid action is given """
gc = GalaxyCLI(args=["ansible-galaxy", "NOT_ACTION"])
self.assertRaises(SystemExit, gc.parse)
def test_parse_delete(self):
""" testing the options parser when the action 'delete' is given """
gc = GalaxyCLI(args=["ansible-galaxy", "delete", "foo", "bar"])
gc.parse()
self.assertEqual(context.CLIARGS['verbosity'], 0)
def test_parse_import(self):
""" testing the options parser when the action 'import' is given """
gc = GalaxyCLI(args=["ansible-galaxy", "import", "foo", "bar"])
gc.parse()
assert context.CLIARGS['wait']
assert context.CLIARGS['reference'] is None
assert not context.CLIARGS['check_status']
assert context.CLIARGS['verbosity'] == 0
def test_parse_info(self):
""" testing the options parser when the action 'info' is given """
gc = GalaxyCLI(args=["ansible-galaxy", "info", "foo", "bar"])
gc.parse()
assert not context.CLIARGS['offline']
def test_parse_init(self):
""" testing the options parser when the action 'init' is given """
gc = GalaxyCLI(args=["ansible-galaxy", "init", "foo"])
gc.parse()
assert not context.CLIARGS['offline']
assert not context.CLIARGS['force']
def test_parse_install(self):
""" testing the options parser when the action 'install' is given """
gc = GalaxyCLI(args=["ansible-galaxy", "install"])
gc.parse()
assert not context.CLIARGS['ignore_errors']
assert not context.CLIARGS['no_deps']
assert context.CLIARGS['requirements'] is None
assert not context.CLIARGS['force']
def test_parse_list(self):
""" testing the options parser when the action 'list' is given """
gc = GalaxyCLI(args=["ansible-galaxy", "list"])
gc.parse()
self.assertEqual(context.CLIARGS['verbosity'], 0)
def test_parse_remove(self):
""" testing the options parser when the action 'remove' is given """
gc = GalaxyCLI(args=["ansible-galaxy", "remove", "foo"])
gc.parse()
self.assertEqual(context.CLIARGS['verbosity'], 0)
def test_parse_search(self):
""" testing the options parswer when the action 'search' is given """
gc = GalaxyCLI(args=["ansible-galaxy", "search"])
gc.parse()
assert context.CLIARGS['platforms'] is None
assert context.CLIARGS['galaxy_tags'] is None
assert context.CLIARGS['author'] is None
def test_parse_setup(self):
""" testing the options parser when the action 'setup' is given """
gc = GalaxyCLI(args=["ansible-galaxy", "setup", "source", "github_user", "github_repo", "secret"])
gc.parse()
assert context.CLIARGS['verbosity'] == 0
assert context.CLIARGS['remove_id'] is None
assert not context.CLIARGS['setup_list']
class ValidRoleTests(object):
expected_role_dirs = ('defaults', 'files', 'handlers', 'meta', 'tasks', 'templates', 'vars', 'tests')
@classmethod
def setUpRole(cls, role_name, galaxy_args=None, skeleton_path=None, use_explicit_type=False):
if galaxy_args is None:
galaxy_args = []
if skeleton_path is not None:
cls.role_skeleton_path = skeleton_path
galaxy_args += ['--role-skeleton', skeleton_path]
# Make temp directory for testing
cls.test_dir = tempfile.mkdtemp()
cls.role_dir = os.path.join(cls.test_dir, role_name)
cls.role_name = role_name
# create role using default skeleton
args = ['ansible-galaxy']
if use_explicit_type:
args += ['role']
args += ['init', '-c', '--offline'] + galaxy_args + ['--init-path', cls.test_dir, cls.role_name]
gc = GalaxyCLI(args=args)
gc.run()
cls.gc = gc
if skeleton_path is None:
cls.role_skeleton_path = gc.galaxy.default_role_skeleton_path
@classmethod
def tearDownRole(cls):
shutil.rmtree(cls.test_dir, ignore_errors=True)
def test_metadata(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
self.assertIn('galaxy_info', metadata, msg='unable to find galaxy_info in metadata')
self.assertIn('dependencies', metadata, msg='unable to find dependencies in metadata')
def test_readme(self):
readme_path = os.path.join(self.role_dir, 'README.md')
self.assertTrue(os.path.exists(readme_path), msg='Readme doesn\'t exist')
def test_main_ymls(self):
need_main_ymls = set(self.expected_role_dirs) - set(['meta', 'tests', 'files', 'templates'])
for d in need_main_ymls:
main_yml = os.path.join(self.role_dir, d, 'main.yml')
self.assertTrue(os.path.exists(main_yml))
if self.role_name == 'delete_me_skeleton':
expected_string = "---\n# {0} file for {1}".format(d, self.role_name)
else:
expected_string = "#SPDX-License-Identifier: MIT-0\n---\n# {0} file for {1}".format(d, self.role_name)
with open(main_yml, 'r') as f:
self.assertEqual(expected_string, f.read().strip())
def test_role_dirs(self):
for d in self.expected_role_dirs:
self.assertTrue(os.path.isdir(os.path.join(self.role_dir, d)), msg="Expected role subdirectory {0} doesn't exist".format(d))
def test_readme_contents(self):
with open(os.path.join(self.role_dir, 'README.md'), 'r') as readme:
contents = readme.read()
with open(os.path.join(self.role_skeleton_path, 'README.md'), 'r') as f:
expected_contents = f.read()
self.assertEqual(expected_contents, contents, msg='README.md does not match expected')
def test_test_yml(self):
with open(os.path.join(self.role_dir, 'tests', 'test.yml'), 'r') as f:
test_playbook = yaml.safe_load(f)
print(test_playbook)
self.assertEqual(len(test_playbook), 1)
self.assertEqual(test_playbook[0]['hosts'], 'localhost')
self.assertEqual(test_playbook[0]['remote_user'], 'root')
self.assertListEqual(test_playbook[0]['roles'], [self.role_name], msg='The list of roles included in the test play doesn\'t match')
class TestGalaxyInitDefault(unittest.TestCase, ValidRoleTests):
@classmethod
def setUpClass(cls):
cls.setUpRole(role_name='delete_me')
@classmethod
def tearDownClass(cls):
cls.tearDownRole()
def test_metadata_contents(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
self.assertEqual(metadata.get('galaxy_info', dict()).get('author'), 'your name', msg='author was not set properly in metadata')
class TestGalaxyInitAPB(unittest.TestCase, ValidRoleTests):
@classmethod
def setUpClass(cls):
cls.setUpRole('delete_me_apb', galaxy_args=['--type=apb'])
@classmethod
def tearDownClass(cls):
cls.tearDownRole()
def test_metadata_apb_tag(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
self.assertIn('apb', metadata.get('galaxy_info', dict()).get('galaxy_tags', []), msg='apb tag not set in role metadata')
def test_metadata_contents(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
self.assertEqual(metadata.get('galaxy_info', dict()).get('author'), 'your name', msg='author was not set properly in metadata')
def test_apb_yml(self):
self.assertTrue(os.path.exists(os.path.join(self.role_dir, 'apb.yml')), msg='apb.yml was not created')
def test_test_yml(self):
with open(os.path.join(self.role_dir, 'tests', 'test.yml'), 'r') as f:
test_playbook = yaml.safe_load(f)
print(test_playbook)
self.assertEqual(len(test_playbook), 1)
self.assertEqual(test_playbook[0]['hosts'], 'localhost')
self.assertFalse(test_playbook[0]['gather_facts'])
self.assertEqual(test_playbook[0]['connection'], 'local')
self.assertIsNone(test_playbook[0]['tasks'], msg='We\'re expecting an unset list of tasks in test.yml')
class TestGalaxyInitContainer(unittest.TestCase, ValidRoleTests):
@classmethod
def setUpClass(cls):
cls.setUpRole('delete_me_container', galaxy_args=['--type=container'])
@classmethod
def tearDownClass(cls):
cls.tearDownRole()
def test_metadata_container_tag(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
self.assertIn('container', metadata.get('galaxy_info', dict()).get('galaxy_tags', []), msg='container tag not set in role metadata')
def test_metadata_contents(self):
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
metadata = yaml.safe_load(mf)
self.assertEqual(metadata.get('galaxy_info', dict()).get('author'), 'your name', msg='author was not set properly in metadata')
def test_meta_container_yml(self):
self.assertTrue(os.path.exists(os.path.join(self.role_dir, 'meta', 'container.yml')), msg='container.yml was not created')
def test_test_yml(self):
with open(os.path.join(self.role_dir, 'tests', 'test.yml'), 'r') as f:
test_playbook = yaml.safe_load(f)
print(test_playbook)
self.assertEqual(len(test_playbook), 1)
self.assertEqual(test_playbook[0]['hosts'], 'localhost')
self.assertFalse(test_playbook[0]['gather_facts'])
self.assertEqual(test_playbook[0]['connection'], 'local')
self.assertIsNone(test_playbook[0]['tasks'], msg='We\'re expecting an unset list of tasks in test.yml')
class TestGalaxyInitSkeleton(unittest.TestCase, ValidRoleTests):
@classmethod
def setUpClass(cls):
role_skeleton_path = os.path.join(os.path.split(__file__)[0], 'test_data', 'role_skeleton')
cls.setUpRole('delete_me_skeleton', skeleton_path=role_skeleton_path, use_explicit_type=True)
@classmethod
def tearDownClass(cls):
cls.tearDownRole()
def test_empty_files_dir(self):
files_dir = os.path.join(self.role_dir, 'files')
self.assertTrue(os.path.isdir(files_dir))
self.assertListEqual(os.listdir(files_dir), [], msg='we expect the files directory to be empty, is ignore working?')
def test_template_ignore_jinja(self):
test_conf_j2 = os.path.join(self.role_dir, 'templates', 'test.conf.j2')
self.assertTrue(os.path.exists(test_conf_j2), msg="The test.conf.j2 template doesn't seem to exist, is it being rendered as test.conf?")
with open(test_conf_j2, 'r') as f:
contents = f.read()
expected_contents = '[defaults]\ntest_key = {{ test_variable }}'
self.assertEqual(expected_contents, contents.strip(), msg="test.conf.j2 doesn't contain what it should, is it being rendered?")
def test_template_ignore_jinja_subfolder(self):
test_conf_j2 = os.path.join(self.role_dir, 'templates', 'subfolder', 'test.conf.j2')
self.assertTrue(os.path.exists(test_conf_j2), msg="The test.conf.j2 template doesn't seem to exist, is it being rendered as test.conf?")
with open(test_conf_j2, 'r') as f:
contents = f.read()
expected_contents = '[defaults]\ntest_key = {{ test_variable }}'
self.assertEqual(expected_contents, contents.strip(), msg="test.conf.j2 doesn't contain what it should, is it being rendered?")
def test_template_ignore_similar_folder(self):
self.assertTrue(os.path.exists(os.path.join(self.role_dir, 'templates_extra', 'templates.txt')))
def test_skeleton_option(self):
self.assertEqual(self.role_skeleton_path, context.CLIARGS['role_skeleton'], msg='Skeleton path was not parsed properly from the command line')
@pytest.mark.parametrize('cli_args, expected', [
(['ansible-galaxy', 'collection', 'init', 'abc._def'], 0),
(['ansible-galaxy', 'collection', 'init', 'abc._def', '-vvv'], 3),
(['ansible-galaxy', 'collection', 'init', 'abc._def', '-vv'], 2),
])
def test_verbosity_arguments(cli_args, expected, monkeypatch):
# Mock out the functions so we don't actually execute anything
for func_name in [f for f in dir(GalaxyCLI) if f.startswith("execute_")]:
monkeypatch.setattr(GalaxyCLI, func_name, MagicMock())
cli = GalaxyCLI(args=cli_args)
cli.run()
assert context.CLIARGS['verbosity'] == expected
@pytest.fixture()
def collection_skeleton(request, tmp_path_factory):
name, skeleton_path = request.param
galaxy_args = ['ansible-galaxy', 'collection', 'init', '-c']
if skeleton_path is not None:
galaxy_args += ['--collection-skeleton', skeleton_path]
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
galaxy_args += ['--init-path', test_dir, name]
GalaxyCLI(args=galaxy_args).run()
namespace_name, collection_name = name.split('.', 1)
collection_dir = os.path.join(test_dir, namespace_name, collection_name)
return collection_dir
@pytest.mark.parametrize('collection_skeleton', [
('ansible_test.my_collection', None),
], indirect=True)
def test_collection_default(collection_skeleton):
meta_path = os.path.join(collection_skeleton, 'galaxy.yml')
with open(meta_path, 'r') as galaxy_meta:
metadata = yaml.safe_load(galaxy_meta)
assert metadata['namespace'] == 'ansible_test'
assert metadata['name'] == 'my_collection'
assert metadata['authors'] == ['your name <example@domain.com>']
assert metadata['readme'] == 'README.md'
assert metadata['version'] == '1.0.0'
assert metadata['description'] == 'your collection description'
assert metadata['license'] == ['GPL-2.0-or-later']
assert metadata['tags'] == []
assert metadata['dependencies'] == {}
assert metadata['documentation'] == 'http://docs.example.com'
assert metadata['repository'] == 'http://example.com/repository'
assert metadata['homepage'] == 'http://example.com'
assert metadata['issues'] == 'http://example.com/issue/tracker'
for d in ['docs', 'plugins', 'roles']:
assert os.path.isdir(os.path.join(collection_skeleton, d)), \
"Expected collection subdirectory {0} doesn't exist".format(d)
@pytest.mark.parametrize('collection_skeleton', [
('ansible_test.delete_me_skeleton', os.path.join(os.path.split(__file__)[0], 'test_data', 'collection_skeleton')),
], indirect=True)
def test_collection_skeleton(collection_skeleton):
meta_path = os.path.join(collection_skeleton, 'galaxy.yml')
with open(meta_path, 'r') as galaxy_meta:
metadata = yaml.safe_load(galaxy_meta)
assert metadata['namespace'] == 'ansible_test'
assert metadata['name'] == 'delete_me_skeleton'
assert metadata['authors'] == ['Ansible Cow <acow@bovineuniversity.edu>', 'Tu Cow <tucow@bovineuniversity.edu>']
assert metadata['version'] == '0.1.0'
assert metadata['readme'] == 'README.md'
assert len(metadata) == 5
assert os.path.exists(os.path.join(collection_skeleton, 'README.md'))
# Test empty directories exist and are empty
for empty_dir in ['plugins/action', 'plugins/filter', 'plugins/inventory', 'plugins/lookup',
'plugins/module_utils', 'plugins/modules']:
assert os.listdir(os.path.join(collection_skeleton, empty_dir)) == []
# Test files that don't end with .j2 were not templated
doc_file = os.path.join(collection_skeleton, 'docs', 'My Collection.md')
with open(doc_file, 'r') as f:
doc_contents = f.read()
assert doc_contents.strip() == 'Welcome to my test collection doc for {{ namespace }}.'
# Test files that end with .j2 but are in the templates directory were not templated
for template_dir in ['playbooks/templates', 'playbooks/templates/subfolder',
'roles/common/templates', 'roles/common/templates/subfolder']:
test_conf_j2 = os.path.join(collection_skeleton, template_dir, 'test.conf.j2')
assert os.path.exists(test_conf_j2)
with open(test_conf_j2, 'r') as f:
contents = f.read()
expected_contents = '[defaults]\ntest_key = {{ test_variable }}'
assert expected_contents == contents.strip()
@pytest.fixture()
def collection_artifact(collection_skeleton, tmp_path_factory):
""" Creates a collection artifact tarball that is ready to be published and installed """
output_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Output'))
# Create a file with +x in the collection so we can test the permissions
execute_path = os.path.join(collection_skeleton, 'runme.sh')
with open(execute_path, mode='wb') as fd:
fd.write(b"echo hi")
# S_ISUID should not be present on extraction.
os.chmod(execute_path, os.stat(execute_path).st_mode | stat.S_ISUID | stat.S_IEXEC)
# Because we call GalaxyCLI in collection_skeleton we need to reset the singleton back to None so it uses the new
# args, we reset the original args once it is done.
orig_cli_args = co.GlobalCLIArgs._Singleton__instance
try:
co.GlobalCLIArgs._Singleton__instance = None
galaxy_args = ['ansible-galaxy', 'collection', 'build', collection_skeleton, '--output-path', output_dir]
gc = GalaxyCLI(args=galaxy_args)
gc.run()
yield output_dir
finally:
co.GlobalCLIArgs._Singleton__instance = orig_cli_args
def test_invalid_skeleton_path():
expected = "- the skeleton path '/fake/path' does not exist, cannot init collection"
gc = GalaxyCLI(args=['ansible-galaxy', 'collection', 'init', 'my.collection', '--collection-skeleton',
'/fake/path'])
with pytest.raises(AnsibleError, match=expected):
gc.run()
@pytest.mark.parametrize("name", [
"",
"invalid",
"hypen-ns.collection",
"ns.hyphen-collection",
"ns.collection.weird",
])
def test_invalid_collection_name_init(name):
expected = "Invalid collection name '%s', name must be in the format <namespace>.<collection>" % name
gc = GalaxyCLI(args=['ansible-galaxy', 'collection', 'init', name])
with pytest.raises(AnsibleError, match=expected):
gc.run()
@pytest.mark.parametrize("name, expected", [
("", ""),
("invalid", "invalid"),
("invalid:1.0.0", "invalid"),
("hypen-ns.collection", "hypen-ns.collection"),
("ns.hyphen-collection", "ns.hyphen-collection"),
("ns.collection.weird", "ns.collection.weird"),
])
def test_invalid_collection_name_install(name, expected, tmp_path_factory):
install_path = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
# FIXME: we should add the collection name in the error message
# Used to be: expected = "Invalid collection name '%s', name must be in the format <namespace>.<collection>" % expected
expected = "Neither the collection requirement entry key 'name', nor 'source' point to a concrete resolvable collection artifact. "
expected += r"Also 'name' is not an FQCN\. A valid collection name must be in the format <namespace>\.<collection>\. "
expected += r"Please make sure that the namespace and the collection name contain characters from \[a\-zA\-Z0\-9_\] only\."
gc = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', name, '-p', os.path.join(install_path, 'install')])
with pytest.raises(AnsibleError, match=expected):
gc.run()
@pytest.mark.parametrize('collection_skeleton', [
('ansible_test.build_collection', None),
], indirect=True)
def test_collection_build(collection_artifact):
tar_path = os.path.join(collection_artifact, 'ansible_test-build_collection-1.0.0.tar.gz')
assert tarfile.is_tarfile(tar_path)
with tarfile.open(tar_path, mode='r') as tar:
tar_members = tar.getmembers()
valid_files = ['MANIFEST.json', 'FILES.json', 'roles', 'docs', 'plugins', 'plugins/README.md', 'README.md',
'runme.sh', 'meta', 'meta/runtime.yml']
assert len(tar_members) == len(valid_files)
# Verify the uid and gid is 0 and the correct perms are set
for member in tar_members:
assert member.name in valid_files
assert member.gid == 0
assert member.gname == ''
assert member.uid == 0
assert member.uname == ''
if member.isdir() or member.name == 'runme.sh':
assert member.mode == S_IRWXU_RXG_RXO
else:
assert member.mode == S_IRWU_RG_RO
manifest_file = tar.extractfile(tar_members[0])
try:
manifest = json.loads(to_text(manifest_file.read()))
finally:
manifest_file.close()
coll_info = manifest['collection_info']
file_manifest = manifest['file_manifest_file']
assert manifest['format'] == 1
assert len(manifest.keys()) == 3
assert coll_info['namespace'] == 'ansible_test'
assert coll_info['name'] == 'build_collection'
assert coll_info['version'] == '1.0.0'
assert coll_info['authors'] == ['your name <example@domain.com>']
assert coll_info['readme'] == 'README.md'
assert coll_info['tags'] == []
assert coll_info['description'] == 'your collection description'
assert coll_info['license'] == ['GPL-2.0-or-later']
assert coll_info['license_file'] is None
assert coll_info['dependencies'] == {}
assert coll_info['repository'] == 'http://example.com/repository'
assert coll_info['documentation'] == 'http://docs.example.com'
assert coll_info['homepage'] == 'http://example.com'
assert coll_info['issues'] == 'http://example.com/issue/tracker'
assert len(coll_info.keys()) == 14
assert file_manifest['name'] == 'FILES.json'
assert file_manifest['ftype'] == 'file'
assert file_manifest['chksum_type'] == 'sha256'
assert file_manifest['chksum_sha256'] is not None # Order of keys makes it hard to verify the checksum
assert file_manifest['format'] == 1
assert len(file_manifest.keys()) == 5
files_file = tar.extractfile(tar_members[1])
try:
files = json.loads(to_text(files_file.read()))
finally:
files_file.close()
assert len(files['files']) == 9
assert files['format'] == 1
assert len(files.keys()) == 2
valid_files_entries = ['.', 'roles', 'docs', 'plugins', 'plugins/README.md', 'README.md', 'runme.sh', 'meta', 'meta/runtime.yml']
for file_entry in files['files']:
assert file_entry['name'] in valid_files_entries
assert file_entry['format'] == 1
if file_entry['name'] in ['plugins/README.md', 'runme.sh', 'meta/runtime.yml']:
assert file_entry['ftype'] == 'file'
assert file_entry['chksum_type'] == 'sha256'
# Can't test the actual checksum as the html link changes based on the version or the file contents
# don't matter
assert file_entry['chksum_sha256'] is not None
elif file_entry['name'] == 'README.md':
assert file_entry['ftype'] == 'file'
assert file_entry['chksum_type'] == 'sha256'
assert file_entry['chksum_sha256'] == '6d8b5f9b5d53d346a8cd7638a0ec26e75e8d9773d952162779a49d25da6ef4f5'
else:
assert file_entry['ftype'] == 'dir'
assert file_entry['chksum_type'] is None
assert file_entry['chksum_sha256'] is None
assert len(file_entry.keys()) == 5
@pytest.fixture()
def collection_install(reset_cli_args, tmp_path_factory, monkeypatch):
mock_install = MagicMock()
monkeypatch.setattr(ansible.cli.galaxy, 'install_collections', mock_install)
mock_warning = MagicMock()
monkeypatch.setattr(ansible.utils.display.Display, 'warning', mock_warning)
output_dir = to_text((tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Output')))
yield mock_install, mock_warning, output_dir
def test_collection_install_with_names(collection_install):
mock_install, mock_warning, output_dir = collection_install
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', 'namespace2.collection:1.0.1',
'--collections-path', output_dir]
GalaxyCLI(args=galaxy_args).run()
collection_path = os.path.join(output_dir, 'ansible_collections')
assert os.path.isdir(collection_path)
assert mock_warning.call_count == 1
assert "The specified collections path '%s' is not part of the configured Ansible collections path" % output_dir \
in mock_warning.call_args[0][0]
assert mock_install.call_count == 1
requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
assert requirements == [('namespace.collection', '*', None, 'galaxy'),
('namespace2.collection', '1.0.1', None, 'galaxy')]
assert mock_install.call_args[0][1] == collection_path
assert len(mock_install.call_args[0][2]) == 1
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
assert mock_install.call_args[0][2][0].validate_certs is True
assert mock_install.call_args[0][3] is False # ignore_errors
assert mock_install.call_args[0][4] is False # no_deps
assert mock_install.call_args[0][5] is False # force
assert mock_install.call_args[0][6] is False # force_deps
def test_collection_install_with_invalid_requirements_format(collection_install):
output_dir = collection_install[2]
requirements_file = os.path.join(output_dir, 'requirements.yml')
with open(requirements_file, 'wb') as req_obj:
req_obj.write(b'"invalid"')
galaxy_args = ['ansible-galaxy', 'collection', 'install', '--requirements-file', requirements_file,
'--collections-path', output_dir]
with pytest.raises(AnsibleError, match="Expecting requirements yaml to be a list or dictionary but got str"):
GalaxyCLI(args=galaxy_args).run()
def test_collection_install_with_requirements_file(collection_install):
mock_install, mock_warning, output_dir = collection_install
requirements_file = os.path.join(output_dir, 'requirements.yml')
with open(requirements_file, 'wb') as req_obj:
req_obj.write(b"""---
collections:
- namespace.coll
- name: namespace2.coll
version: '>2.0.1'
""")
galaxy_args = ['ansible-galaxy', 'collection', 'install', '--requirements-file', requirements_file,
'--collections-path', output_dir]
GalaxyCLI(args=galaxy_args).run()
collection_path = os.path.join(output_dir, 'ansible_collections')
assert os.path.isdir(collection_path)
assert mock_warning.call_count == 1
assert "The specified collections path '%s' is not part of the configured Ansible collections path" % output_dir \
in mock_warning.call_args[0][0]
assert mock_install.call_count == 1
requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
assert requirements == [('namespace.coll', '*', None, 'galaxy'),
('namespace2.coll', '>2.0.1', None, 'galaxy')]
assert mock_install.call_args[0][1] == collection_path
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
assert mock_install.call_args[0][2][0].validate_certs is True
assert mock_install.call_args[0][3] is False # ignore_errors
assert mock_install.call_args[0][4] is False # no_deps
assert mock_install.call_args[0][5] is False # force
assert mock_install.call_args[0][6] is False # force_deps
def test_collection_install_with_relative_path(collection_install, monkeypatch):
mock_install = collection_install[0]
mock_req = MagicMock()
mock_req.return_value = {'collections': [('namespace.coll', '*', None, None)], 'roles': []}
monkeypatch.setattr(ansible.cli.galaxy.GalaxyCLI, '_parse_requirements_file', mock_req)
monkeypatch.setattr(os, 'makedirs', MagicMock())
requirements_file = './requirements.myl'
collections_path = './ansible_collections'
galaxy_args = ['ansible-galaxy', 'collection', 'install', '--requirements-file', requirements_file,
'--collections-path', collections_path]
GalaxyCLI(args=galaxy_args).run()
assert mock_install.call_count == 1
assert mock_install.call_args[0][0] == [('namespace.coll', '*', None, None)]
assert mock_install.call_args[0][1] == os.path.abspath(collections_path)
assert len(mock_install.call_args[0][2]) == 1
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
assert mock_install.call_args[0][2][0].validate_certs is True
assert mock_install.call_args[0][3] is False # ignore_errors
assert mock_install.call_args[0][4] is False # no_deps
assert mock_install.call_args[0][5] is False # force
assert mock_install.call_args[0][6] is False # force_deps
assert mock_req.call_count == 1
assert mock_req.call_args[0][0] == os.path.abspath(requirements_file)
def test_collection_install_with_unexpanded_path(collection_install, monkeypatch):
mock_install = collection_install[0]
mock_req = MagicMock()
mock_req.return_value = {'collections': [('namespace.coll', '*', None, None)], 'roles': []}
monkeypatch.setattr(ansible.cli.galaxy.GalaxyCLI, '_parse_requirements_file', mock_req)
monkeypatch.setattr(os, 'makedirs', MagicMock())
requirements_file = '~/requirements.myl'
collections_path = '~/ansible_collections'
galaxy_args = ['ansible-galaxy', 'collection', 'install', '--requirements-file', requirements_file,
'--collections-path', collections_path]
GalaxyCLI(args=galaxy_args).run()
assert mock_install.call_count == 1
assert mock_install.call_args[0][0] == [('namespace.coll', '*', None, None)]
assert mock_install.call_args[0][1] == os.path.expanduser(os.path.expandvars(collections_path))
assert len(mock_install.call_args[0][2]) == 1
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
assert mock_install.call_args[0][2][0].validate_certs is True
assert mock_install.call_args[0][3] is False # ignore_errors
assert mock_install.call_args[0][4] is False # no_deps
assert mock_install.call_args[0][5] is False # force
assert mock_install.call_args[0][6] is False # force_deps
assert mock_req.call_count == 1
assert mock_req.call_args[0][0] == os.path.expanduser(os.path.expandvars(requirements_file))
def test_collection_install_in_collection_dir(collection_install, monkeypatch):
mock_install, mock_warning, output_dir = collection_install
collections_path = C.COLLECTIONS_PATHS[0]
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', 'namespace2.collection:1.0.1',
'--collections-path', collections_path]
GalaxyCLI(args=galaxy_args).run()
assert mock_warning.call_count == 0
assert mock_install.call_count == 1
requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
assert requirements == [('namespace.collection', '*', None, 'galaxy'),
('namespace2.collection', '1.0.1', None, 'galaxy')]
assert mock_install.call_args[0][1] == os.path.join(collections_path, 'ansible_collections')
assert len(mock_install.call_args[0][2]) == 1
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
assert mock_install.call_args[0][2][0].validate_certs is True
assert mock_install.call_args[0][3] is False # ignore_errors
assert mock_install.call_args[0][4] is False # no_deps
assert mock_install.call_args[0][5] is False # force
assert mock_install.call_args[0][6] is False # force_deps
def test_collection_install_with_url(monkeypatch, collection_install):
mock_install, dummy, output_dir = collection_install
mock_open = MagicMock(return_value=BytesIO())
monkeypatch.setattr(collection.concrete_artifact_manager, 'open_url', mock_open)
mock_metadata = MagicMock(return_value={'namespace': 'foo', 'name': 'bar', 'version': 'v1.0.0'})
monkeypatch.setattr(collection.concrete_artifact_manager, '_get_meta_from_tar', mock_metadata)
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'https://foo/bar/foo-bar-v1.0.0.tar.gz',
'--collections-path', output_dir]
GalaxyCLI(args=galaxy_args).run()
collection_path = os.path.join(output_dir, 'ansible_collections')
assert os.path.isdir(collection_path)
assert mock_install.call_count == 1
requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
assert requirements == [('foo.bar', 'v1.0.0', 'https://foo/bar/foo-bar-v1.0.0.tar.gz', 'url')]
assert mock_install.call_args[0][1] == collection_path
assert len(mock_install.call_args[0][2]) == 1
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
assert mock_install.call_args[0][2][0].validate_certs is True
assert mock_install.call_args[0][3] is False # ignore_errors
assert mock_install.call_args[0][4] is False # no_deps
assert mock_install.call_args[0][5] is False # force
assert mock_install.call_args[0][6] is False # force_deps
def test_collection_install_name_and_requirements_fail(collection_install):
test_path = collection_install[2]
expected = 'The positional collection_name arg and --requirements-file are mutually exclusive.'
with pytest.raises(AnsibleError, match=expected):
GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path',
test_path, '--requirements-file', test_path]).run()
def test_collection_install_no_name_and_requirements_fail(collection_install):
test_path = collection_install[2]
expected = 'You must specify a collection name or a requirements file.'
with pytest.raises(AnsibleError, match=expected):
GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', '--collections-path', test_path]).run()
def test_collection_install_path_with_ansible_collections(collection_install):
mock_install, mock_warning, output_dir = collection_install
collection_path = os.path.join(output_dir, 'ansible_collections')
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', 'namespace2.collection:1.0.1',
'--collections-path', collection_path]
GalaxyCLI(args=galaxy_args).run()
assert os.path.isdir(collection_path)
assert mock_warning.call_count == 1
assert "The specified collections path '%s' is not part of the configured Ansible collections path" \
% collection_path in mock_warning.call_args[0][0]
assert mock_install.call_count == 1
requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
assert requirements == [('namespace.collection', '*', None, 'galaxy'),
('namespace2.collection', '1.0.1', None, 'galaxy')]
assert mock_install.call_args[0][1] == collection_path
assert len(mock_install.call_args[0][2]) == 1
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
assert mock_install.call_args[0][2][0].validate_certs is True
assert mock_install.call_args[0][3] is False # ignore_errors
assert mock_install.call_args[0][4] is False # no_deps
assert mock_install.call_args[0][5] is False # force
assert mock_install.call_args[0][6] is False # force_deps
def test_collection_install_ignore_certs(collection_install):
mock_install, mock_warning, output_dir = collection_install
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
'--ignore-certs']
GalaxyCLI(args=galaxy_args).run()
assert mock_install.call_args[0][3] is False
def test_collection_install_force(collection_install):
mock_install, mock_warning, output_dir = collection_install
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
'--force']
GalaxyCLI(args=galaxy_args).run()
# mock_install args: collections, output_path, apis, ignore_errors, no_deps, force, force_deps
assert mock_install.call_args[0][5] is True
def test_collection_install_force_deps(collection_install):
mock_install, mock_warning, output_dir = collection_install
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
'--force-with-deps']
GalaxyCLI(args=galaxy_args).run()
# mock_install args: collections, output_path, apis, ignore_errors, no_deps, force, force_deps
assert mock_install.call_args[0][6] is True
def test_collection_install_no_deps(collection_install):
mock_install, mock_warning, output_dir = collection_install
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
'--no-deps']
GalaxyCLI(args=galaxy_args).run()
# mock_install args: collections, output_path, apis, ignore_errors, no_deps, force, force_deps
assert mock_install.call_args[0][4] is True
def test_collection_install_ignore(collection_install):
mock_install, mock_warning, output_dir = collection_install
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
'--ignore-errors']
GalaxyCLI(args=galaxy_args).run()
# mock_install args: collections, output_path, apis, ignore_errors, no_deps, force, force_deps
assert mock_install.call_args[0][3] is True
def test_collection_install_custom_server(collection_install):
mock_install, mock_warning, output_dir = collection_install
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
'--server', 'https://galaxy-dev.ansible.com']
GalaxyCLI(args=galaxy_args).run()
assert len(mock_install.call_args[0][2]) == 1
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy-dev.ansible.com'
assert mock_install.call_args[0][2][0].validate_certs is True
@pytest.fixture()
def requirements_file(request, tmp_path_factory):
content = request.param
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Requirements'))
requirements_file = os.path.join(test_dir, 'requirements.yml')
if content:
with open(requirements_file, 'wb') as req_obj:
req_obj.write(to_bytes(content))
yield requirements_file
@pytest.fixture()
def requirements_cli(monkeypatch):
monkeypatch.setattr(GalaxyCLI, 'execute_install', MagicMock())
cli = GalaxyCLI(args=['ansible-galaxy', 'install'])
cli.run()
return cli
@pytest.mark.parametrize('requirements_file', [None], indirect=True)
def test_parse_requirements_file_that_doesnt_exist(requirements_cli, requirements_file):
expected = "The requirements file '%s' does not exist." % to_native(requirements_file)
with pytest.raises(AnsibleError, match=expected):
requirements_cli._parse_requirements_file(requirements_file)
@pytest.mark.parametrize('requirements_file', ['not a valid yml file: hi: world'], indirect=True)
def test_parse_requirements_file_that_isnt_yaml(requirements_cli, requirements_file):
expected = "Failed to parse the requirements yml at '%s' with the following error" % to_native(requirements_file)
with pytest.raises(AnsibleError, match=expected):
requirements_cli._parse_requirements_file(requirements_file)
@pytest.mark.parametrize('requirements_file', [("""
# Older role based requirements.yml
- galaxy.role
- anotherrole
""")], indirect=True)
def test_parse_requirements_in_older_format_illegal(requirements_cli, requirements_file):
expected = "Expecting requirements file to be a dict with the key 'collections' that contains a list of " \
"collections to install"
with pytest.raises(AnsibleError, match=expected):
requirements_cli._parse_requirements_file(requirements_file, allow_old_format=False)
@pytest.mark.parametrize('requirements_file', ["""
collections:
- version: 1.0.0
"""], indirect=True)
def test_parse_requirements_without_mandatory_name_key(requirements_cli, requirements_file):
# Used to be "Collections requirement entry should contain the key name."
# Should we check that either source or name is provided before using the dep resolver?
expected = "Neither the collection requirement entry key 'name', nor 'source' point to a concrete resolvable collection artifact. "
expected += r"Also 'name' is not an FQCN\. A valid collection name must be in the format <namespace>\.<collection>\. "
expected += r"Please make sure that the namespace and the collection name contain characters from \[a\-zA\-Z0\-9_\] only\."
with pytest.raises(AnsibleError, match=expected):
requirements_cli._parse_requirements_file(requirements_file)
@pytest.mark.parametrize('requirements_file', [("""
collections:
- namespace.collection1
- namespace.collection2
"""), ("""
collections:
- name: namespace.collection1
- name: namespace.collection2
""")], indirect=True)
def test_parse_requirements(requirements_cli, requirements_file):
expected = {
'roles': [],
'collections': [('namespace.collection1', '*', None, 'galaxy'), ('namespace.collection2', '*', None, 'galaxy')]
}
actual = requirements_cli._parse_requirements_file(requirements_file)
actual['collections'] = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in actual.get('collections', [])]
assert actual == expected
@pytest.mark.parametrize('requirements_file', ["""
collections:
- name: namespace.collection1
version: ">=1.0.0,<=2.0.0"
source: https://galaxy-dev.ansible.com
- namespace.collection2"""], indirect=True)
def test_parse_requirements_with_extra_info(requirements_cli, requirements_file):
actual = requirements_cli._parse_requirements_file(requirements_file)
actual['collections'] = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in actual.get('collections', [])]
assert len(actual['roles']) == 0
assert len(actual['collections']) == 2
assert actual['collections'][0][0] == 'namespace.collection1'
assert actual['collections'][0][1] == '>=1.0.0,<=2.0.0'
assert actual['collections'][0][2].api_server == 'https://galaxy-dev.ansible.com'
assert actual['collections'][1] == ('namespace.collection2', '*', None, 'galaxy')
@pytest.mark.parametrize('requirements_file', ["""
roles:
- username.role_name
- src: username2.role_name2
- src: ssh://github.com/user/repo
scm: git
collections:
- namespace.collection2
"""], indirect=True)
def test_parse_requirements_with_roles_and_collections(requirements_cli, requirements_file):
actual = requirements_cli._parse_requirements_file(requirements_file)
actual['collections'] = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in actual.get('collections', [])]
assert len(actual['roles']) == 3
assert actual['roles'][0].name == 'username.role_name'
assert actual['roles'][1].name == 'username2.role_name2'
assert actual['roles'][2].name == 'repo'
assert actual['roles'][2].src == 'ssh://github.com/user/repo'
assert len(actual['collections']) == 1
assert actual['collections'][0] == ('namespace.collection2', '*', None, 'galaxy')
@pytest.mark.parametrize('requirements_file', ["""
collections:
- name: namespace.collection
- name: namespace2.collection2
source: https://galaxy-dev.ansible.com/
- name: namespace3.collection3
source: server
"""], indirect=True)
def test_parse_requirements_with_collection_source(requirements_cli, requirements_file):
galaxy_api = GalaxyAPI(requirements_cli.api, 'server', 'https://config-server')
requirements_cli.api_servers.append(galaxy_api)
actual = requirements_cli._parse_requirements_file(requirements_file)
actual['collections'] = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in actual.get('collections', [])]
assert actual['roles'] == []
assert len(actual['collections']) == 3
assert actual['collections'][0] == ('namespace.collection', '*', None, 'galaxy')
assert actual['collections'][1][0] == 'namespace2.collection2'
assert actual['collections'][1][1] == '*'
assert actual['collections'][1][2].api_server == 'https://galaxy-dev.ansible.com/'
assert actual['collections'][2][0] == 'namespace3.collection3'
assert actual['collections'][2][1] == '*'
assert actual['collections'][2][2].api_server == 'https://config-server'
@pytest.mark.parametrize('requirements_file', ["""
- username.included_role
- src: https://github.com/user/repo
"""], indirect=True)
def test_parse_requirements_roles_with_include(requirements_cli, requirements_file):
reqs = [
'ansible.role',
{'include': requirements_file},
]
parent_requirements = os.path.join(os.path.dirname(requirements_file), 'parent.yaml')
with open(to_bytes(parent_requirements), 'wb') as req_fd:
req_fd.write(to_bytes(yaml.safe_dump(reqs)))
actual = requirements_cli._parse_requirements_file(parent_requirements)
assert len(actual['roles']) == 3
assert actual['collections'] == []
assert actual['roles'][0].name == 'ansible.role'
assert actual['roles'][1].name == 'username.included_role'
assert actual['roles'][2].name == 'repo'
assert actual['roles'][2].src == 'https://github.com/user/repo'
@pytest.mark.parametrize('requirements_file', ["""
- username.role
- include: missing.yml
"""], indirect=True)
def test_parse_requirements_roles_with_include_missing(requirements_cli, requirements_file):
expected = "Failed to find include requirements file 'missing.yml' in '%s'" % to_native(requirements_file)
with pytest.raises(AnsibleError, match=expected):
requirements_cli._parse_requirements_file(requirements_file)
@pytest.mark.parametrize('requirements_file', ["""
collections:
- namespace.name
roles:
- namespace.name
"""], indirect=True)
def test_install_implicit_role_with_collections(requirements_file, monkeypatch):
mock_collection_install = MagicMock()
monkeypatch.setattr(GalaxyCLI, '_execute_install_collection', mock_collection_install)
mock_role_install = MagicMock()
monkeypatch.setattr(GalaxyCLI, '_execute_install_role', mock_role_install)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
cli = GalaxyCLI(args=['ansible-galaxy', 'install', '-r', requirements_file])
cli.run()
assert mock_collection_install.call_count == 1
requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_collection_install.call_args[0][0]]
assert requirements == [('namespace.name', '*', None, 'galaxy')]
assert mock_collection_install.call_args[0][1] == cli._get_default_collection_path()
assert mock_role_install.call_count == 1
assert len(mock_role_install.call_args[0][0]) == 1
assert str(mock_role_install.call_args[0][0][0]) == 'namespace.name'
assert not any(list('contains collections which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
@pytest.mark.parametrize('requirements_file', ["""
collections:
- namespace.name
roles:
- namespace.name
"""], indirect=True)
def test_install_explicit_role_with_collections(requirements_file, monkeypatch):
mock_collection_install = MagicMock()
monkeypatch.setattr(GalaxyCLI, '_execute_install_collection', mock_collection_install)
mock_role_install = MagicMock()
monkeypatch.setattr(GalaxyCLI, '_execute_install_role', mock_role_install)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_display)
cli = GalaxyCLI(args=['ansible-galaxy', 'role', 'install', '-r', requirements_file])
cli.run()
assert mock_collection_install.call_count == 0
assert mock_role_install.call_count == 1
assert len(mock_role_install.call_args[0][0]) == 1
assert str(mock_role_install.call_args[0][0][0]) == 'namespace.name'
assert any(list('contains collections which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
@pytest.mark.parametrize('requirements_file', ["""
collections:
- namespace.name
roles:
- namespace.name
"""], indirect=True)
def test_install_role_with_collections_and_path(requirements_file, monkeypatch):
mock_collection_install = MagicMock()
monkeypatch.setattr(GalaxyCLI, '_execute_install_collection', mock_collection_install)
mock_role_install = MagicMock()
monkeypatch.setattr(GalaxyCLI, '_execute_install_role', mock_role_install)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'warning', mock_display)
cli = GalaxyCLI(args=['ansible-galaxy', 'install', '-p', 'path', '-r', requirements_file])
cli.run()
assert mock_collection_install.call_count == 0
assert mock_role_install.call_count == 1
assert len(mock_role_install.call_args[0][0]) == 1
assert str(mock_role_install.call_args[0][0][0]) == 'namespace.name'
assert any(list('contains collections which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
@pytest.mark.parametrize('requirements_file', ["""
collections:
- namespace.name
roles:
- namespace.name
"""], indirect=True)
def test_install_collection_with_roles(requirements_file, monkeypatch):
mock_collection_install = MagicMock()
monkeypatch.setattr(GalaxyCLI, '_execute_install_collection', mock_collection_install)
mock_role_install = MagicMock()
monkeypatch.setattr(GalaxyCLI, '_execute_install_role', mock_role_install)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_display)
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', '-r', requirements_file])
cli.run()
assert mock_collection_install.call_count == 1
requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_collection_install.call_args[0][0]]
assert requirements == [('namespace.name', '*', None, 'galaxy')]
assert mock_role_install.call_count == 0
assert any(list('contains roles which will be ignored' in mock_call[1][0] for mock_call in mock_display.mock_calls))
| 58,956
|
Python
|
.py
| 1,046
| 49.573614
| 150
| 0.676816
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,957
|
test_optparse_helpers.py
|
ansible_ansible/test/units/cli/arguments/test_optparse_helpers.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import sys
import pytest
from ansible import constants as C
from ansible.cli.arguments import option_helpers as opt_help
from ansible import __path__ as ansible_path
from ansible.release import __version__ as ansible_version
cpath = C.DEFAULT_MODULE_PATH
FAKE_PROG = u'ansible-cli-test'
VERSION_OUTPUT = opt_help.version(prog=FAKE_PROG)
@pytest.mark.parametrize(
'must_have', [
FAKE_PROG + u' [core %s]' % ansible_version,
u'config file = %s' % C.CONFIG_FILE,
u'configured module search path = %s' % cpath,
u'ansible python module location = %s' % ':'.join(ansible_path),
u'ansible collection location = %s' % ':'.join(C.COLLECTIONS_PATHS),
u'executable location = ',
u'python version = %s' % ''.join(sys.version.splitlines()),
]
)
def test_option_helper_version(must_have):
assert must_have in VERSION_OUTPUT
| 1,077
|
Python
|
.py
| 26
| 37.5
| 92
| 0.696069
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,958
|
test_display_header.py
|
ansible_ansible/test/units/cli/galaxy/test_display_header.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.cli.galaxy import _display_header
def test_display_header_default(capsys):
_display_header('/collections/path', 'h1', 'h2')
out, err = capsys.readouterr()
out_lines = out.splitlines()
assert out_lines[0] == ''
assert out_lines[1] == '# /collections/path'
assert out_lines[2] == 'h1 h2 '
assert out_lines[3] == '---------- -------'
def test_display_header_widths(capsys):
_display_header('/collections/path', 'Collection', 'Version', 18, 18)
out, err = capsys.readouterr()
out_lines = out.splitlines()
assert out_lines[0] == ''
assert out_lines[1] == '# /collections/path'
assert out_lines[2] == 'Collection Version '
assert out_lines[3] == '------------------ ------------------'
def test_display_header_small_widths(capsys):
_display_header('/collections/path', 'Col', 'Ver', 1, 1)
out, err = capsys.readouterr()
out_lines = out.splitlines()
assert out_lines[0] == ''
assert out_lines[1] == '# /collections/path'
assert out_lines[2] == 'Col Ver'
assert out_lines[3] == '--- ---'
| 1,307
|
Python
|
.py
| 29
| 40.793103
| 92
| 0.609313
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,959
|
test_display_role.py
|
ansible_ansible/test/units/cli/galaxy/test_display_role.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.cli.galaxy import _display_role
def test_display_role(mocker, capsys):
mocked_galaxy_role = mocker.Mock(install_info=None)
mocked_galaxy_role.name = 'testrole'
_display_role(mocked_galaxy_role)
out, err = capsys.readouterr()
out_lines = out.splitlines()
assert out_lines[0] == '- testrole, (unknown version)'
def test_display_role_known_version(mocker, capsys):
mocked_galaxy_role = mocker.Mock(install_info={'version': '1.0.0'})
mocked_galaxy_role.name = 'testrole'
_display_role(mocked_galaxy_role)
out, err = capsys.readouterr()
out_lines = out.splitlines()
assert out_lines[0] == '- testrole, 1.0.0'
| 862
|
Python
|
.py
| 19
| 41.421053
| 92
| 0.705389
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,960
|
test_collection_extract_tar.py
|
ansible_ansible/test/units/cli/galaxy/test_collection_extract_tar.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.galaxy.collection import _extract_tar_dir
@pytest.fixture
def fake_tar_obj(mocker):
m_tarfile = mocker.Mock()
m_tarfile.type = mocker.Mock(return_value=b'99')
m_tarfile.SYMTYPE = mocker.Mock(return_value=b'22')
return m_tarfile
def test_extract_tar_dir_exists(mocker, fake_tar_obj):
mocker.patch('os.makedirs', return_value=None)
m_makedir = mocker.patch('os.mkdir', return_value=None)
mocker.patch('os.path.isdir', return_value=True)
_extract_tar_dir(fake_tar_obj, '/some/dir', b'/some/dest')
assert not m_makedir.called
def test_extract_tar_dir_does_not_exist(mocker, fake_tar_obj):
mocker.patch('os.makedirs', return_value=None)
m_makedir = mocker.patch('os.mkdir', return_value=None)
mocker.patch('os.path.isdir', return_value=False)
_extract_tar_dir(fake_tar_obj, '/some/dir', b'/some/dest')
assert m_makedir.called
assert m_makedir.call_args[0] == (b'/some/dir', 0o0755)
| 1,167
|
Python
|
.py
| 25
| 42.72
| 92
| 0.715426
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,961
|
test_display_collection.py
|
ansible_ansible/test/units/cli/galaxy/test_display_collection.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.cli.galaxy import _display_collection
from ansible.galaxy.dependency_resolution.dataclasses import Requirement
@pytest.fixture
def collection_object():
def _cobj(fqcn='sandwiches.ham'):
return Requirement(fqcn, '1.5.0', None, 'galaxy', None)
return _cobj
def test_display_collection(capsys, collection_object):
_display_collection(collection_object())
out, err = capsys.readouterr()
assert out == 'sandwiches.ham 1.5.0 \n'
def test_display_collections_small_max_widths(capsys, collection_object):
_display_collection(collection_object(), 1, 1)
out, err = capsys.readouterr()
assert out == 'sandwiches.ham 1.5.0 \n'
def test_display_collections_large_max_widths(capsys, collection_object):
_display_collection(collection_object(), 20, 20)
out, err = capsys.readouterr()
assert out == 'sandwiches.ham 1.5.0 \n'
def test_display_collection_small_minimum_widths(capsys, collection_object):
_display_collection(collection_object('a.b'), min_cwidth=0, min_vwidth=0)
out, err = capsys.readouterr()
assert out == 'a.b 1.5.0 \n'
| 1,347
|
Python
|
.py
| 28
| 44.214286
| 92
| 0.714286
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,962
|
test_execute_list_collection.py
|
ansible_ansible/test/units/cli/galaxy/test_execute_list_collection.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pathlib
import pytest
from ansible import constants as C
from ansible import context
from ansible.cli.galaxy import GalaxyCLI
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.galaxy import collection
from ansible.galaxy.dependency_resolution.dataclasses import Requirement
from ansible.module_utils.common.text.converters import to_native
from ansible.plugins.loader import init_plugin_loader
def isdir(path):
if to_native(path) == 'nope':
return False
return True
def cliargs(collections_paths=None, collection_name=None):
if collections_paths is None:
collections_paths = ['/root/.ansible/collections', '/usr/share/ansible/collections']
context.CLIARGS._store = {
'collections_path': collections_paths,
'collection': collection_name,
'type': 'collection',
'output_format': 'human'
}
@pytest.fixture
def mock_from_path(mocker, monkeypatch):
collection_args = {
'/usr/share/ansible/collections/ansible_collections/sandwiches/pbj': (
'sandwiches.pbj',
'1.0.0',
'/usr/share/ansible/collections/ansible_collections/sandwiches/pbj',
'dir',
None,
),
'/usr/share/ansible/collections/ansible_collections/sandwiches/ham': (
'sandwiches.ham',
'1.0.0',
'/usr/share/ansible/collections/ansible_collections/sandwiches/ham',
'dir',
None,
),
'/root/.ansible/collections/ansible_collections/sandwiches/pbj': (
'sandwiches.pbj',
'1.5.0',
'/root/.ansible/collections/ansible_collections/sandwiches/pbj',
'dir',
None,
),
'/root/.ansible/collections/ansible_collections/sandwiches/reuben': (
'sandwiches.reuben',
'2.5.0',
'/root/.ansible/collections/ansible_collections/sandwiches/reuben',
'dir',
None,
),
}
def dispatch_requirement(path, am):
return Requirement(*collection_args[to_native(path)])
files_mock = mocker.MagicMock()
mocker.patch('ansible.galaxy.collection.files', return_value=files_mock)
files_mock.glob.return_value = []
mocker.patch.object(pathlib.Path, 'is_dir', return_value=True)
for path, args in collection_args.items():
files_mock.glob.return_value.append(pathlib.Path(args[2]))
mocker.patch('ansible.galaxy.collection.Candidate.from_dir_path_as_unknown', side_effect=dispatch_requirement)
monkeypatch.setattr(C, 'COLLECTIONS_PATHS', ['/root/.ansible/collections', '/usr/share/ansible/collections'])
def test_execute_list_collection_all(mocker, capsys, mock_from_path, tmp_path_factory):
"""Test listing all collections from multiple paths"""
cliargs()
init_plugin_loader()
mocker.patch('os.path.exists', return_value=True)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list'])
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
out, err = capsys.readouterr()
out_lines = out.splitlines()
assert len(out_lines) == 12
assert out_lines[0] == ''
assert out_lines[1] == '# /root/.ansible/collections/ansible_collections'
assert out_lines[2] == 'Collection Version'
assert out_lines[3] == '----------------- -------'
assert out_lines[4] == 'sandwiches.pbj 1.5.0 '
assert out_lines[5] == 'sandwiches.reuben 2.5.0 '
assert out_lines[6] == ''
assert out_lines[7] == '# /usr/share/ansible/collections/ansible_collections'
assert out_lines[8] == 'Collection Version'
assert out_lines[9] == '----------------- -------'
assert out_lines[10] == 'sandwiches.ham 1.0.0 '
assert out_lines[11] == 'sandwiches.pbj 1.0.0 '
def test_execute_list_collection_specific(mocker, capsys, mock_from_path, tmp_path_factory):
"""Test listing a specific collection"""
collection_name = 'sandwiches.ham'
cliargs(collection_name=collection_name)
init_plugin_loader()
mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name)
mocker.patch('ansible.cli.galaxy._get_collection_widths', return_value=(14, 5))
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name])
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
out, err = capsys.readouterr()
out_lines = out.splitlines()
assert len(out_lines) == 5
assert out_lines[0] == ''
assert out_lines[1] == '# /usr/share/ansible/collections/ansible_collections'
assert out_lines[2] == 'Collection Version'
assert out_lines[3] == '-------------- -------'
assert out_lines[4] == 'sandwiches.ham 1.0.0 '
def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_from_path, tmp_path_factory):
"""Test listing a specific collection that exists at multiple paths"""
collection_name = 'sandwiches.pbj'
cliargs(collection_name=collection_name)
init_plugin_loader()
mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name])
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
out, err = capsys.readouterr()
out_lines = out.splitlines()
assert len(out_lines) == 10
assert out_lines[0] == ''
assert out_lines[1] == '# /root/.ansible/collections/ansible_collections'
assert out_lines[2] == 'Collection Version'
assert out_lines[3] == '-------------- -------'
assert out_lines[4] == 'sandwiches.pbj 1.5.0 '
assert out_lines[5] == ''
assert out_lines[6] == '# /usr/share/ansible/collections/ansible_collections'
assert out_lines[7] == 'Collection Version'
assert out_lines[8] == '-------------- -------'
assert out_lines[9] == 'sandwiches.pbj 1.0.0 '
def test_execute_list_collection_specific_invalid_fqcn(mocker, tmp_path_factory):
"""Test an invalid fully qualified collection name (FQCN)"""
init_plugin_loader()
collection_name = 'no.good.name'
cliargs(collection_name=collection_name)
mocker.patch('os.path.exists', return_value=True)
mocker.patch('os.path.isdir', return_value=True)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name])
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
with pytest.raises(AnsibleError, match='Invalid collection name'):
gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
def test_execute_list_collection_no_valid_paths(mocker, capsys, tmp_path_factory):
"""Test listing collections when no valid paths are given"""
cliargs()
init_plugin_loader()
mocker.patch('os.path.exists', return_value=True)
mocker.patch('os.path.isdir', return_value=False)
mocker.patch('ansible.utils.color.ANSIBLE_COLOR', False)
mocker.patch('ansible.cli.galaxy.display.columns', 79)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list'])
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
with pytest.raises(AnsibleOptionsError, match=r'None of the provided paths were usable.'):
gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
out, err = capsys.readouterr()
assert '[WARNING]: - the configured path' in err
assert 'exists, but it\nis not a directory.' in err
def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_from_path, tmp_path_factory):
"""Test listing all collections when one invalid path is given"""
cliargs(collections_paths=['nope'])
init_plugin_loader()
mocker.patch('os.path.exists', return_value=True)
mocker.patch('os.path.isdir', isdir)
mocker.patch('ansible.utils.color.ANSIBLE_COLOR', False)
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', '-p', 'nope'])
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
out, err = capsys.readouterr()
out_lines = out.splitlines()
assert out_lines[0] == ''
assert out_lines[1] == '# /root/.ansible/collections/ansible_collections'
assert out_lines[2] == 'Collection Version'
assert out_lines[3] == '----------------- -------'
assert out_lines[4] == 'sandwiches.pbj 1.5.0 '
# Only a partial test of the output
assert err == '[WARNING]: - the configured path nope, exists, but it is not a directory.\n'
| 9,698
|
Python
|
.py
| 183
| 46.693989
| 120
| 0.690959
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,963
|
test_get_collection_widths.py
|
ansible_ansible/test/units/cli/galaxy/test_get_collection_widths.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.cli.galaxy import _get_collection_widths
from ansible.galaxy.dependency_resolution.dataclasses import Requirement
@pytest.fixture
def collection_objects():
collection_ham = Requirement('sandwiches.ham', '1.5.0', None, 'galaxy', None)
collection_pbj = Requirement('sandwiches.pbj', '2.5', None, 'galaxy', None)
collection_reuben = Requirement('sandwiches.reuben', '4', None, 'galaxy', None)
return [collection_ham, collection_pbj, collection_reuben]
def test_get_collection_widths(collection_objects):
assert _get_collection_widths(collection_objects) == (17, 5)
def test_get_collection_widths_single_collection(mocker):
mocked_collection = Requirement('sandwiches.club', '3.0.0', None, 'galaxy', None)
# Make this look like it is not iterable
mocker.patch('ansible.cli.galaxy.is_iterable', return_value=False)
assert _get_collection_widths(mocked_collection) == (15, 5)
| 1,135
|
Python
|
.py
| 20
| 53.3
| 92
| 0.746824
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,964
|
test_execute_list.py
|
ansible_ansible/test/units/cli/galaxy/test_execute_list.py
|
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible import context
from ansible.cli.galaxy import GalaxyCLI
def test_execute_list_role_called(mocker):
"""Make sure the correct method is called for a role"""
gc = GalaxyCLI(['ansible-galaxy', 'role', 'list'])
context.CLIARGS._store = {'type': 'role'}
execute_list_role_mock = mocker.patch('ansible.cli.galaxy.GalaxyCLI.execute_list_role', side_effect=AttributeError('raised intentionally'))
execute_list_collection_mock = mocker.patch('ansible.cli.galaxy.GalaxyCLI.execute_list_collection', side_effect=AttributeError('raised intentionally'))
with pytest.raises(AttributeError):
gc.execute_list()
assert execute_list_role_mock.call_count == 1
assert execute_list_collection_mock.call_count == 0
def test_execute_list_collection_called(mocker):
"""Make sure the correct method is called for a collection"""
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list'])
context.CLIARGS._store = {'type': 'collection'}
execute_list_role_mock = mocker.patch('ansible.cli.galaxy.GalaxyCLI.execute_list_role', side_effect=AttributeError('raised intentionally'))
execute_list_collection_mock = mocker.patch('ansible.cli.galaxy.GalaxyCLI.execute_list_collection', side_effect=AttributeError('raised intentionally'))
with pytest.raises(AttributeError):
gc.execute_list()
assert execute_list_role_mock.call_count == 0
assert execute_list_collection_mock.call_count == 1
| 1,683
|
Python
|
.py
| 28
| 55.857143
| 155
| 0.740876
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,965
|
test_errors.py
|
ansible_ansible/test/units/errors/test_errors.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from unittest.mock import mock_open, patch
from ansible.errors import AnsibleError
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject
class TestErrors(unittest.TestCase):
def setUp(self):
self.message = 'This is the error message'
self.unicode_message = 'This is an error with \xf0\x9f\x98\xa8 in it'
self.obj = AnsibleBaseYAMLObject()
def test_basic_error(self):
e = AnsibleError(self.message)
self.assertEqual(e.message, self.message)
self.assertEqual(repr(e), self.message)
def test_basic_unicode_error(self):
e = AnsibleError(self.unicode_message)
self.assertEqual(e.message, self.unicode_message)
self.assertEqual(repr(e), self.unicode_message)
@patch.object(AnsibleError, '_get_error_lines_from_file')
def test_error_with_kv(self, mock_method):
""" This tests a task with both YAML and k=v syntax
- lineinfile: line=foo path=bar
line: foo
An accurate error message and position indicator are expected.
_get_error_lines_from_file() returns (target_line, prev_line)
"""
self.obj.ansible_pos = ('foo.yml', 2, 1)
mock_method.return_value = [' line: foo\n', '- lineinfile: line=foo path=bar\n']
e = AnsibleError(self.message, self.obj)
self.assertEqual(
e.message,
("This is the error message\n\nThe error appears to be in 'foo.yml': line 1, column 19, but may\nbe elsewhere in the "
"file depending on the exact syntax problem.\n\nThe offending line appears to be:\n\n- lineinfile: line=foo path=bar\n"
" ^ here\n\n"
"There appears to be both 'k=v' shorthand syntax and YAML in this task. Only one syntax may be used.\n")
)
@patch.object(AnsibleError, '_get_error_lines_from_file')
def test_error_with_object(self, mock_method):
self.obj.ansible_pos = ('foo.yml', 1, 1)
mock_method.return_value = ('this is line 1\n', '')
e = AnsibleError(self.message, self.obj)
self.assertEqual(
e.message,
("This is the error message\n\nThe error appears to be in 'foo.yml': line 1, column 1, but may\nbe elsewhere in the file depending on the "
"exact syntax problem.\n\nThe offending line appears to be:\n\n\nthis is line 1\n^ here\n")
)
def test_get_error_lines_from_file(self):
m = mock_open()
m.return_value.readlines.return_value = ['this is line 1\n']
with patch('builtins.open', m):
# this line will be found in the file
self.obj.ansible_pos = ('foo.yml', 1, 1)
e = AnsibleError(self.message, self.obj)
self.assertEqual(
e.message,
("This is the error message\n\nThe error appears to be in 'foo.yml': line 1, column 1, but may\nbe elsewhere in the file depending on "
"the exact syntax problem.\n\nThe offending line appears to be:\n\n\nthis is line 1\n^ here\n")
)
with patch('ansible.errors.to_text', side_effect=IndexError('Raised intentionally')):
# raise an IndexError
self.obj.ansible_pos = ('foo.yml', 2, 1)
e = AnsibleError(self.message, self.obj)
self.assertEqual(
e.message,
("This is the error message\n\nThe error appears to be in 'foo.yml': line 2, column 1, but may\nbe elsewhere in the file depending on "
"the exact syntax problem.\n\n(specified line no longer in file, maybe it changed?)")
)
m = mock_open()
m.return_value.readlines.return_value = ['this line has unicode \xf0\x9f\x98\xa8 in it!\n']
with patch('builtins.open', m):
# this line will be found in the file
self.obj.ansible_pos = ('foo.yml', 1, 1)
e = AnsibleError(self.unicode_message, self.obj)
self.assertEqual(
e.message,
("This is an error with \xf0\x9f\x98\xa8 in it\n\nThe error appears to be in 'foo.yml': line 1, column 1, but may\nbe elsewhere in the "
"file depending on the exact syntax problem.\n\nThe offending line appears to be:\n\n\nthis line has unicode \xf0\x9f\x98\xa8 in it!\n^ "
"here\n")
)
def test_get_error_lines_error_in_last_line(self):
m = mock_open()
m.return_value.readlines.return_value = ['this is line 1\n', 'this is line 2\n', 'this is line 3\n']
with patch('builtins.open', m):
# If the error occurs in the last line of the file, use the correct index to get the line
# and avoid the IndexError
self.obj.ansible_pos = ('foo.yml', 4, 1)
e = AnsibleError(self.message, self.obj)
self.assertEqual(
e.message,
("This is the error message\n\nThe error appears to be in 'foo.yml': line 4, column 1, but may\nbe elsewhere in the file depending on "
"the exact syntax problem.\n\nThe offending line appears to be:\n\nthis is line 2\nthis is line 3\n^ here\n")
)
def test_get_error_lines_error_empty_lines_around_error(self):
"""Test that trailing whitespace after the error is removed"""
m = mock_open()
m.return_value.readlines.return_value = ['this is line 1\n', 'this is line 2\n', 'this is line 3\n', ' \n', ' \n', ' ']
with patch('builtins.open', m):
self.obj.ansible_pos = ('foo.yml', 5, 1)
e = AnsibleError(self.message, self.obj)
self.assertEqual(
e.message,
("This is the error message\n\nThe error appears to be in 'foo.yml': line 5, column 1, but may\nbe elsewhere in the file depending on "
"the exact syntax problem.\n\nThe offending line appears to be:\n\nthis is line 2\nthis is line 3\n^ here\n")
)
| 6,799
|
Python
|
.py
| 120
| 46.641667
| 155
| 0.628778
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,966
|
test_play_iterator.py
|
ansible_ansible/test/units/executor/test_play_iterator.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from unittest.mock import patch, MagicMock
from ansible.executor.play_iterator import HostState, PlayIterator, IteratingStates, FailedStates
from ansible.playbook import Playbook
from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import init_plugin_loader
from units.mock.loader import DictDataLoader
from units.mock.path import mock_unfrackpath_noop
class TestPlayIterator(unittest.TestCase):
def test_host_state(self):
hs = HostState(blocks=list(range(0, 10)))
hs.tasks_child_state = HostState(blocks=[0])
hs.rescue_child_state = HostState(blocks=[1])
hs.always_child_state = HostState(blocks=[2])
repr(hs)
hs.run_state = 100
repr(hs)
hs.fail_state = 15
repr(hs)
for i in range(0, 10):
hs.cur_block = i
self.assertEqual(hs.get_current_block(), i)
new_hs = hs.copy()
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_play_iterator(self):
fake_loader = DictDataLoader({
"test_play.yml": """
- hosts: all
gather_facts: false
roles:
- test_role
pre_tasks:
- debug: msg="this is a pre_task"
tasks:
- debug: msg="this is a regular task"
- block:
- debug: msg="this is a block task"
- block:
- debug: msg="this is a sub-block in a block"
rescue:
- debug: msg="this is a rescue task"
- block:
- debug: msg="this is a sub-block in a rescue"
always:
- debug: msg="this is an always task"
- block:
- debug: msg="this is a sub-block in an always"
post_tasks:
- debug: msg="this is a post_task"
""",
'/etc/ansible/roles/test_role/tasks/main.yml': """
- name: role task
debug: msg="this is a role task"
- block:
- name: role block task
debug: msg="inside block in role"
always:
- name: role always task
debug: msg="always task in block in role"
- name: role include_tasks
include_tasks: foo.yml
- name: role task after include
debug: msg="after include in role"
- block:
- name: starting role nested block 1
debug:
- block:
- name: role nested block 1 task 1
debug:
- name: role nested block 1 task 2
debug:
- name: role nested block 1 task 3
debug:
- name: end of role nested block 1
debug:
- name: starting role nested block 2
debug:
- block:
- name: role nested block 2 task 1
debug:
- name: role nested block 2 task 2
debug:
- name: role nested block 2 task 3
debug:
- name: end of role nested block 2
debug:
""",
'/etc/ansible/roles/test_role/tasks/foo.yml': """
- name: role included task
debug: msg="this is task in an include from a role"
"""
})
mock_var_manager = MagicMock()
mock_var_manager._fact_cache = dict()
mock_var_manager.get_vars.return_value = dict()
p = Playbook.load('test_play.yml', loader=fake_loader, variable_manager=mock_var_manager)
hosts = []
for i in range(0, 10):
host = MagicMock()
host.name = host.get_name.return_value = 'host%02d' % i
hosts.append(host)
mock_var_manager._fact_cache['host00'] = dict()
inventory = MagicMock()
inventory.get_hosts.return_value = hosts
inventory.filter_hosts.return_value = hosts
play_context = PlayContext(play=p._entries[0])
itr = PlayIterator(
inventory=inventory,
play=p._entries[0],
play_context=play_context,
variable_manager=mock_var_manager,
all_vars=dict(),
)
# pre task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
# role task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.name, "role task")
self.assertIsNotNone(task._role)
# role block task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role block task")
self.assertIsNotNone(task._role)
# role block always task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role always task")
self.assertIsNotNone(task._role)
# role include_tasks
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'include_tasks')
self.assertEqual(task.name, "role include_tasks")
self.assertIsNotNone(task._role)
# role task after include
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role task after include")
self.assertIsNotNone(task._role)
# role nested block tasks
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "starting role nested block 1")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role nested block 1 task 1")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role nested block 1 task 2")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role nested block 1 task 3")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "end of role nested block 1")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "starting role nested block 2")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role nested block 2 task 1")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role nested block 2 task 2")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role nested block 2 task 3")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "end of role nested block 2")
self.assertIsNotNone(task._role)
# implicit meta: role_complete
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'meta')
self.assertIsNotNone(task._role)
# regular play task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertIsNone(task._role)
# block task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a block task"))
# sub-block task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a sub-block in a block"))
# mark the host failed
itr.mark_host_failed(hosts[0])
# block rescue task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a rescue task"))
# sub-block rescue task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a sub-block in a rescue"))
# block always task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is an always task"))
# sub-block always task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a sub-block in an always"))
# post task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
# end of iteration
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNone(task)
# host 0 shouldn't be in the failed hosts, as the error
# was handled by a rescue block
failed_hosts = itr.get_failed_hosts()
self.assertNotIn(hosts[0], failed_hosts)
def test_play_iterator_nested_blocks(self):
init_plugin_loader()
fake_loader = DictDataLoader({
"test_play.yml": """
- hosts: all
gather_facts: false
tasks:
- block:
- block:
- block:
- block:
- block:
- debug: msg="this is the first task"
- ping:
rescue:
- block:
- block:
- block:
- block:
- debug: msg="this is the rescue task"
always:
- block:
- block:
- block:
- block:
- debug: msg="this is the always task"
""",
})
mock_var_manager = MagicMock()
mock_var_manager._fact_cache = dict()
mock_var_manager.get_vars.return_value = dict()
p = Playbook.load('test_play.yml', loader=fake_loader, variable_manager=mock_var_manager)
hosts = []
for i in range(0, 10):
host = MagicMock()
host.name = host.get_name.return_value = 'host%02d' % i
hosts.append(host)
inventory = MagicMock()
inventory.get_hosts.return_value = hosts
inventory.filter_hosts.return_value = hosts
play_context = PlayContext(play=p._entries[0])
itr = PlayIterator(
inventory=inventory,
play=p._entries[0],
play_context=play_context,
variable_manager=mock_var_manager,
all_vars=dict(),
)
# get the first task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg='this is the first task'))
# fail the host
itr.mark_host_failed(hosts[0])
# get the rescue task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg='this is the rescue task'))
# get the always task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg='this is the always task'))
# end of iteration
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNone(task)
def test_play_iterator_add_tasks(self):
fake_loader = DictDataLoader({
'test_play.yml': """
- hosts: all
gather_facts: no
tasks:
- debug: msg="dummy task"
""",
})
mock_var_manager = MagicMock()
mock_var_manager._fact_cache = dict()
mock_var_manager.get_vars.return_value = dict()
p = Playbook.load('test_play.yml', loader=fake_loader, variable_manager=mock_var_manager)
hosts = []
for i in range(0, 10):
host = MagicMock()
host.name = host.get_name.return_value = 'host%02d' % i
hosts.append(host)
inventory = MagicMock()
inventory.get_hosts.return_value = hosts
inventory.filter_hosts.return_value = hosts
play_context = PlayContext(play=p._entries[0])
itr = PlayIterator(
inventory=inventory,
play=p._entries[0],
play_context=play_context,
variable_manager=mock_var_manager,
all_vars=dict(),
)
# test the high-level add_tasks() method
s = HostState(blocks=[0, 1, 2])
itr._insert_tasks_into_state = MagicMock(return_value=s)
itr.add_tasks(hosts[0], [MagicMock(), MagicMock(), MagicMock()])
self.assertEqual(itr._host_states[hosts[0].name], s)
# now actually test the lower-level method that does the work
itr = PlayIterator(
inventory=inventory,
play=p._entries[0],
play_context=play_context,
variable_manager=mock_var_manager,
all_vars=dict(),
)
# iterate past first task
dummy, task = itr.get_next_task_for_host(hosts[0])
while (task and task.action != 'debug'):
dummy, task = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task, 'iterated past end of play while looking for place to insert tasks')
# get the current host state and copy it so we can mutate it
s = itr.get_host_state(hosts[0])
s_copy = s.copy()
# assert with an empty task list, or if we're in a failed state, we simply return the state as-is
res_state = itr._insert_tasks_into_state(s_copy, task_list=[])
self.assertEqual(res_state, s_copy)
s_copy.fail_state = FailedStates.TASKS
res_state = itr._insert_tasks_into_state(s_copy, task_list=[MagicMock()])
self.assertEqual(res_state, s_copy)
# but if we've failed with a rescue/always block
mock_task = MagicMock()
s_copy.run_state = IteratingStates.RESCUE
res_state = itr._insert_tasks_into_state(s_copy, task_list=[mock_task])
self.assertEqual(res_state, s_copy)
self.assertIn(mock_task, res_state._blocks[res_state.cur_block].rescue)
itr.set_state_for_host(hosts[0].name, res_state)
(next_state, next_task) = itr.get_next_task_for_host(hosts[0], peek=True)
self.assertEqual(next_task, mock_task)
itr.set_state_for_host(hosts[0].name, s)
# test a regular insertion
s_copy = s.copy()
res_state = itr._insert_tasks_into_state(s_copy, task_list=[MagicMock()])
| 17,149
|
Python
|
.py
| 392
| 32.954082
| 105
| 0.590702
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,967
|
test_task_result.py
|
ansible_ansible/test/units/executor/test_task_result.py
|
# (c) 2016, James Cammarata <jimi@sngx.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from unittest.mock import patch, MagicMock
from ansible.executor.task_result import TaskResult
class TestTaskResult(unittest.TestCase):
def test_task_result_basic(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test loading a result with a dict
tr = TaskResult(mock_host, mock_task, dict())
# test loading a result with a JSON string
with patch('ansible.parsing.dataloader.DataLoader.load') as p:
tr = TaskResult(mock_host, mock_task, '{}')
def test_task_result_is_changed(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test with no changed in result
tr = TaskResult(mock_host, mock_task, dict())
self.assertFalse(tr.is_changed())
# test with changed in the result
tr = TaskResult(mock_host, mock_task, dict(changed=True))
self.assertTrue(tr.is_changed())
# test with multiple results but none changed
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(foo='bar'), dict(bam='baz'), True]))
self.assertFalse(tr.is_changed())
# test with multiple results and one changed
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(changed=False), dict(changed=True), dict(some_key=False)]))
self.assertTrue(tr.is_changed())
def test_task_result_is_skipped(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test with no skipped in result
tr = TaskResult(mock_host, mock_task, dict())
self.assertFalse(tr.is_skipped())
# test with skipped in the result
tr = TaskResult(mock_host, mock_task, dict(skipped=True))
self.assertTrue(tr.is_skipped())
# test with multiple results but none skipped
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(foo='bar'), dict(bam='baz'), True]))
self.assertFalse(tr.is_skipped())
# test with multiple results and one skipped
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(skipped=False), dict(skipped=True), dict(some_key=False)]))
self.assertFalse(tr.is_skipped())
# test with multiple results and all skipped
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(skipped=True), dict(skipped=True), dict(skipped=True)]))
self.assertTrue(tr.is_skipped())
# test with multiple squashed results (list of strings)
# first with the main result having skipped=False
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=["a", "b", "c"], skipped=False))
self.assertFalse(tr.is_skipped())
# then with the main result having skipped=True
tr = TaskResult(mock_host, mock_task, dict(results=["a", "b", "c"], skipped=True))
self.assertTrue(tr.is_skipped())
def test_task_result_is_unreachable(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test with no unreachable in result
tr = TaskResult(mock_host, mock_task, dict())
self.assertFalse(tr.is_unreachable())
# test with unreachable in the result
tr = TaskResult(mock_host, mock_task, dict(unreachable=True))
self.assertTrue(tr.is_unreachable())
# test with multiple results but none unreachable
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(foo='bar'), dict(bam='baz'), True]))
self.assertFalse(tr.is_unreachable())
# test with multiple results and one unreachable
mock_task.loop = 'foo'
tr = TaskResult(mock_host, mock_task, dict(results=[dict(unreachable=False), dict(unreachable=True), dict(some_key=False)]))
self.assertTrue(tr.is_unreachable())
def test_task_result_is_failed(self):
mock_host = MagicMock()
mock_task = MagicMock()
# test with no failed in result
tr = TaskResult(mock_host, mock_task, dict())
self.assertFalse(tr.is_failed())
# test failed result with rc values (should not matter)
tr = TaskResult(mock_host, mock_task, dict(rc=0))
self.assertFalse(tr.is_failed())
tr = TaskResult(mock_host, mock_task, dict(rc=1))
self.assertFalse(tr.is_failed())
# test with failed in result
tr = TaskResult(mock_host, mock_task, dict(failed=True))
self.assertTrue(tr.is_failed())
# test with failed_when in result
tr = TaskResult(mock_host, mock_task, dict(failed_when_result=True))
self.assertTrue(tr.is_failed())
def test_task_result_no_log(self):
mock_host = MagicMock()
mock_task = MagicMock()
# no_log should remove secrets
tr = TaskResult(mock_host, mock_task, dict(_ansible_no_log=True, secret='DONTSHOWME'))
clean = tr.clean_copy()
self.assertTrue('secret' not in clean._result)
def test_task_result_no_log_preserve(self):
mock_host = MagicMock()
mock_task = MagicMock()
# no_log should not remove preserved keys
tr = TaskResult(
mock_host,
mock_task,
dict(
_ansible_no_log=True,
retries=5,
attempts=5,
changed=False,
foo='bar',
)
)
clean = tr.clean_copy()
self.assertTrue('retries' in clean._result)
self.assertTrue('attempts' in clean._result)
self.assertTrue('changed' in clean._result)
self.assertTrue('foo' not in clean._result)
| 6,478
|
Python
|
.py
| 136
| 39.389706
| 132
| 0.647171
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,968
|
test_interpreter_discovery.py
|
ansible_ansible/test/units/executor/test_interpreter_discovery.py
|
# -*- coding: utf-8 -*-
# (c) 2019, Jordan Borean <jborean@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from unittest.mock import MagicMock
from ansible.executor.interpreter_discovery import discover_interpreter
from ansible.module_utils.common.text.converters import to_text
from ansible.errors import AnsibleConnectionFailure
mock_ubuntu_platform_res = to_text(
r'{"osrelease_content": "NAME=\"Ansible Test\"\nVERSION=\"100\"\nID=ansible-test\nID_LIKE=debian\n'
r'PRETTY_NAME=\"Ansible Test 100\"\nVERSION_ID=\"100\"\nHOME_URL=\"http://ansible.com/\"\n'
r'SUPPORT_URL=\"http://github.com/ansible/ansible\"\nBUG_REPORT_URL=\"http://github.com/ansible/ansible/\"\n'
r'VERSION_CODENAME=beans\nUBUNTU_CODENAME=beans\n", "platform_dist_result": ["Ansible Test", "100", "beans"]}'
)
def test_discovery_interpreter_linux_auto_legacy():
res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python99\n/usr/bin/python3\nENDFOUND'
mock_action = MagicMock()
mock_action._low_level_execute_command.side_effect = [{'stdout': res1}, {'stdout': mock_ubuntu_platform_res}]
actual = discover_interpreter(mock_action, 'python', 'auto_legacy', {'inventory_hostname': u'host-fóöbär'})
assert actual == u'/usr/bin/python3'
assert len(mock_action.method_calls) == 3
assert mock_action.method_calls[2][0] == '_discovery_warnings.append'
assert u'Distribution Ansible Test 100 on host host-fóöbär should use /usr/bin/python99, but is using /usr/bin/python3' \
u' for backward compatibility' in mock_action.method_calls[2][1][0]
def test_discovery_interpreter_linux_auto_legacy_silent():
res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python3.9\n/usr/bin/python3\nENDFOUND'
mock_action = MagicMock()
mock_action._low_level_execute_command.side_effect = [{'stdout': res1}, {'stdout': mock_ubuntu_platform_res}]
actual = discover_interpreter(mock_action, 'python', 'auto_legacy_silent', {'inventory_hostname': u'host-fóöbär'})
assert actual == u'/usr/bin/python3'
assert len(mock_action.method_calls) == 2
def test_discovery_interpreter_linux_auto():
res1 = u'PLATFORM\nLinux\nFOUND\n/usr/bin/python99\n/usr/bin/python3\nENDFOUND'
mock_action = MagicMock()
mock_action._low_level_execute_command.side_effect = [{'stdout': res1}, {'stdout': mock_ubuntu_platform_res}]
actual = discover_interpreter(mock_action, 'python', 'auto', {'inventory_hostname': u'host-fóöbär'})
assert actual == u'/usr/bin/python99'
assert len(mock_action.method_calls) == 2
def test_discovery_interpreter_non_linux():
mock_action = MagicMock()
mock_action._low_level_execute_command.return_value = \
{'stdout': u'PLATFORM\nDarwin\nFOUND\n/usr/bin/python3\nENDFOUND'}
actual = discover_interpreter(mock_action, 'python', 'auto_legacy', {'inventory_hostname': u'host-fóöbär'})
assert actual == u'/usr/bin/python3'
assert len(mock_action.method_calls) == 2
assert mock_action.method_calls[1][0] == '_discovery_warnings.append'
assert u'Platform darwin on host host-fóöbär is using the discovered Python interpreter at /usr/bin/python3, ' \
u'but future installation of another Python interpreter could change the meaning of that path' \
in mock_action.method_calls[1][1][0]
def test_no_interpreters_found():
mock_action = MagicMock()
mock_action._low_level_execute_command.return_value = {'stdout': u'PLATFORM\nWindows\nFOUND\nENDFOUND'}
actual = discover_interpreter(mock_action, 'python', 'auto_legacy', {'inventory_hostname': u'host-fóöbär'})
assert actual == u'/usr/bin/python3'
assert len(mock_action.method_calls) == 2
assert mock_action.method_calls[1][0] == '_discovery_warnings.append'
assert u'No python interpreters found for host host-fóöbär (tried' \
in mock_action.method_calls[1][1][0]
def test_ansible_error_exception():
mock_action = MagicMock()
mock_action._low_level_execute_command.side_effect = AnsibleConnectionFailure("host key mismatch")
with pytest.raises(AnsibleConnectionFailure) as context:
discover_interpreter(mock_action, 'python', 'auto_legacy', {'inventory_hostname': u'host'})
assert 'host key mismatch' == str(context.value)
| 4,381
|
Python
|
.py
| 65
| 62.046154
| 125
| 0.723539
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,969
|
test_task_executor.py
|
ansible_ansible/test/units/executor/test_task_executor.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
from unittest import mock
import unittest
from unittest.mock import patch, MagicMock
from ansible.errors import AnsibleError
from ansible.executor.task_executor import TaskExecutor, remove_omit
from ansible.plugins.loader import action_loader, lookup_loader
from ansible.parsing.yaml.objects import AnsibleUnicode
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes
from collections import namedtuple
from units.mock.loader import DictDataLoader
get_with_context_result = namedtuple('get_with_context_result', ['object', 'plugin_load_context'])
class TestTaskExecutor(unittest.TestCase):
def test_task_executor_init(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_play_context = MagicMock()
mock_shared_loader = MagicMock()
new_stdin = None
job_vars = dict()
mock_queue = MagicMock()
te = TaskExecutor(
host=mock_host,
task=mock_task,
job_vars=job_vars,
play_context=mock_play_context,
new_stdin=new_stdin,
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
final_q=mock_queue,
variable_manager=MagicMock(),
)
def test_task_executor_run(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_task._role._role_path = '/path/to/role/foo'
mock_play_context = MagicMock()
mock_shared_loader = MagicMock()
mock_queue = MagicMock()
new_stdin = None
job_vars = dict()
te = TaskExecutor(
host=mock_host,
task=mock_task,
job_vars=job_vars,
play_context=mock_play_context,
new_stdin=new_stdin,
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
final_q=mock_queue,
variable_manager=MagicMock(),
)
te._get_loop_items = MagicMock(return_value=None)
te._execute = MagicMock(return_value=dict())
res = te.run()
te._get_loop_items = MagicMock(return_value=[])
res = te.run()
te._get_loop_items = MagicMock(return_value=['a', 'b', 'c'])
te._run_loop = MagicMock(return_value=[dict(item='a', changed=True), dict(item='b', failed=True), dict(item='c')])
res = te.run()
te._get_loop_items = MagicMock(side_effect=AnsibleError(""))
res = te.run()
self.assertIn("failed", res)
def test_task_executor_run_clean_res(self):
te = TaskExecutor(None, MagicMock(), None, None, None, None, None, None, None)
te._get_loop_items = MagicMock(return_value=[1])
te._run_loop = MagicMock(
return_value=[
{
'unsafe_bytes': AnsibleUnsafeBytes(b'{{ $bar }}'),
'unsafe_text': AnsibleUnsafeText(u'{{ $bar }}'),
'bytes': b'bytes',
'text': u'text',
'int': 1,
}
]
)
res = te.run()
data = res['results'][0]
self.assertIsInstance(data['unsafe_bytes'], AnsibleUnsafeText)
self.assertIsInstance(data['unsafe_text'], AnsibleUnsafeText)
self.assertIsInstance(data['bytes'], str)
self.assertIsInstance(data['text'], str)
self.assertIsInstance(data['int'], int)
def test_task_executor_get_loop_items(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_task.loop_with = 'items'
mock_task.loop = ['a', 'b', 'c']
mock_play_context = MagicMock()
mock_shared_loader = MagicMock()
mock_shared_loader.lookup_loader = lookup_loader
new_stdin = None
job_vars = dict()
mock_queue = MagicMock()
te = TaskExecutor(
host=mock_host,
task=mock_task,
job_vars=job_vars,
play_context=mock_play_context,
new_stdin=new_stdin,
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
final_q=mock_queue,
variable_manager=MagicMock(),
)
items = te._get_loop_items()
self.assertEqual(items, ['a', 'b', 'c'])
def test_task_executor_run_loop(self):
items = ['a', 'b', 'c']
fake_loader = DictDataLoader({})
mock_host = MagicMock()
def _copy(exclude_parent=False, exclude_tasks=False):
new_item = MagicMock()
new_item.loop_control = MagicMock(break_when=[])
return new_item
mock_task = MagicMock()
mock_task.loop_control = MagicMock(break_when=[])
mock_task.copy.side_effect = _copy
mock_play_context = MagicMock()
mock_shared_loader = MagicMock()
mock_queue = MagicMock()
new_stdin = None
job_vars = dict()
te = TaskExecutor(
host=mock_host,
task=mock_task,
job_vars=job_vars,
play_context=mock_play_context,
new_stdin=new_stdin,
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
final_q=mock_queue,
variable_manager=MagicMock(),
)
def _execute(variables):
return dict(item=variables.get('item'))
te._execute = MagicMock(side_effect=_execute)
res = te._run_loop(items)
self.assertEqual(len(res), 3)
def test_task_executor_get_action_handler(self):
te = TaskExecutor(
host=MagicMock(),
task=MagicMock(),
job_vars={},
play_context=MagicMock(),
new_stdin=None,
loader=DictDataLoader({}),
shared_loader_obj=MagicMock(),
final_q=MagicMock(),
variable_manager=MagicMock(),
)
context = MagicMock(resolved=False)
te._shared_loader_obj.module_loader.find_plugin_with_context.return_value = context
action_loader = te._shared_loader_obj.action_loader
action_loader.has_plugin.return_value = True
action_loader.get.return_value = mock.sentinel.handler
mock_templar = MagicMock()
action = 'namespace.prefix_suffix'
te._task.action = action
te._connection = MagicMock()
with patch('ansible.executor.task_executor.start_connection'):
handler = te._get_action_handler(mock_templar)
self.assertIs(mock.sentinel.handler, handler)
action_loader.has_plugin.assert_called_once_with(action, collection_list=te._task.collections)
action_loader.get.assert_called_with(
te._task.action, task=te._task, connection=te._connection,
play_context=te._play_context, loader=te._loader,
templar=mock_templar, shared_loader_obj=te._shared_loader_obj,
collection_list=te._task.collections)
def test_task_executor_get_handler_prefix(self):
te = TaskExecutor(
host=MagicMock(),
task=MagicMock(),
job_vars={},
play_context=MagicMock(),
new_stdin=None,
loader=DictDataLoader({}),
shared_loader_obj=MagicMock(),
final_q=MagicMock(),
variable_manager=MagicMock(),
)
context = MagicMock(resolved=False)
te._shared_loader_obj.module_loader.find_plugin_with_context.return_value = context
action_loader = te._shared_loader_obj.action_loader
action_loader.has_plugin.side_effect = [False, True]
action_loader.get.return_value = mock.sentinel.handler
action_loader.__contains__.return_value = True
mock_templar = MagicMock()
action = 'namespace.netconf_suffix'
module_prefix = action.split('_', 1)[0]
te._task.action = action
te._connection = MagicMock()
with patch('ansible.executor.task_executor.start_connection'):
handler = te._get_action_handler(mock_templar)
self.assertIs(mock.sentinel.handler, handler)
action_loader.has_plugin.assert_has_calls([mock.call(action, collection_list=te._task.collections), # called twice
mock.call(module_prefix, collection_list=te._task.collections)])
action_loader.get.assert_called_with(
module_prefix, task=te._task, connection=te._connection,
play_context=te._play_context, loader=te._loader,
templar=mock_templar, shared_loader_obj=te._shared_loader_obj,
collection_list=te._task.collections)
def test_task_executor_get_handler_normal(self):
te = TaskExecutor(
host=MagicMock(),
task=MagicMock(),
job_vars={},
play_context=MagicMock(),
new_stdin=None,
loader=DictDataLoader({}),
shared_loader_obj=MagicMock(),
final_q=MagicMock(),
variable_manager=MagicMock(),
)
action_loader = te._shared_loader_obj.action_loader
action_loader.has_plugin.return_value = False
action_loader.get.return_value = mock.sentinel.handler
action_loader.__contains__.return_value = False
module_loader = te._shared_loader_obj.module_loader
context = MagicMock(resolved=False)
module_loader.find_plugin_with_context.return_value = context
mock_templar = MagicMock()
action = 'namespace.prefix_suffix'
module_prefix = action.split('_', 1)[0]
te._task.action = action
te._connection = MagicMock()
with patch('ansible.executor.task_executor.start_connection'):
handler = te._get_action_handler(mock_templar)
self.assertIs(mock.sentinel.handler, handler)
action_loader.has_plugin.assert_has_calls([mock.call(action, collection_list=te._task.collections),
mock.call(module_prefix, collection_list=te._task.collections)])
action_loader.get.assert_called_with(
'ansible.legacy.normal', task=te._task, connection=te._connection,
play_context=te._play_context, loader=te._loader,
templar=mock_templar, shared_loader_obj=te._shared_loader_obj,
collection_list=None)
def test_task_executor_execute(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_task.action = 'mock.action'
mock_task.args = dict()
mock_task.become = False
mock_task.retries = 0
mock_task.delay = -1
mock_task.delegate_to = None
mock_task.register = 'foo'
mock_task.until = None
mock_task.changed_when = None
mock_task.failed_when = None
mock_task.post_validate.return_value = None
# mock_task.async_val cannot be left unset, because on Python 3 MagicMock()
# > 0 raises a TypeError There are two reasons for using the value 1
# here: on Python 2 comparing MagicMock() > 0 returns True, and the
# other reason is that if I specify 0 here, the test fails. ;)
mock_task.async_val = 1
mock_task.poll = 0
mock_task.evaluate_conditional_with_result.return_value = (True, None)
mock_play_context = MagicMock()
mock_play_context.post_validate.return_value = None
mock_play_context.update_vars.return_value = None
mock_connection = MagicMock()
mock_connection.force_persistence = False
mock_connection.supports_persistence = False
mock_connection.set_host_overrides.return_value = None
mock_connection._connect.return_value = None
mock_action = MagicMock()
mock_queue = MagicMock()
mock_vm = MagicMock()
mock_vm.get_delegated_vars_and_hostname.return_value = {}, None
shared_loader = MagicMock()
new_stdin = None
job_vars = dict(omit="XXXXXXXXXXXXXXXXXXX")
te = TaskExecutor(
host=mock_host,
task=mock_task,
job_vars=job_vars,
play_context=mock_play_context,
new_stdin=new_stdin,
loader=fake_loader,
shared_loader_obj=shared_loader,
final_q=mock_queue,
variable_manager=mock_vm,
)
te._get_connection = MagicMock(return_value=mock_connection)
context = MagicMock()
with patch('ansible.executor.task_executor.start_connection'):
te._get_action_handler_with_context = MagicMock(return_value=get_with_context_result(mock_action, context))
mock_action.run.return_value = dict(ansible_facts=dict())
res = te._execute()
mock_task.changed_when = MagicMock(return_value=AnsibleUnicode("1 == 1"))
res = te._execute()
mock_task.changed_when = None
mock_task.failed_when = MagicMock(return_value=AnsibleUnicode("1 == 1"))
res = te._execute()
mock_task.failed_when = None
mock_task.evaluate_conditional.return_value = False
res = te._execute()
mock_task.evaluate_conditional.return_value = True
mock_task.args = dict(_raw_params='foo.yml', a='foo', b='bar')
mock_task.action = 'include'
res = te._execute()
def test_task_executor_poll_async_result(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_task.async_val = 0.1
mock_task.poll = 0.05
mock_play_context = MagicMock()
mock_action = MagicMock()
mock_queue = MagicMock()
shared_loader = MagicMock()
shared_loader.action_loader = action_loader
new_stdin = None
job_vars = dict(omit="XXXXXXXXXXXXXXXXXXX")
te = TaskExecutor(
host=mock_host,
task=mock_task,
job_vars=job_vars,
play_context=mock_play_context,
new_stdin=new_stdin,
loader=fake_loader,
shared_loader_obj=shared_loader,
final_q=mock_queue,
variable_manager=MagicMock(),
)
te._connection = MagicMock()
def _get(*args, **kwargs):
mock_action = MagicMock()
mock_action.run.return_value = dict(stdout='')
return mock_action
# testing with some bad values in the result passed to poll async,
# and with a bad value returned from the mock action
with patch.object(action_loader, 'get', _get):
mock_templar = MagicMock()
res = te._poll_async_result(result=dict(), templar=mock_templar)
self.assertIn('failed', res)
res = te._poll_async_result(result=dict(ansible_job_id=1), templar=mock_templar)
self.assertIn('failed', res)
def _get(*args, **kwargs):
mock_action = MagicMock()
mock_action.run.return_value = dict(finished=1)
return mock_action
# now testing with good values
with patch.object(action_loader, 'get', _get):
mock_templar = MagicMock()
res = te._poll_async_result(result=dict(ansible_job_id=1), templar=mock_templar)
self.assertEqual(res, dict(finished=1))
def test_recursive_remove_omit(self):
omit_token = 'POPCORN'
data = {
'foo': 'bar',
'baz': 1,
'qux': ['one', 'two', 'three'],
'subdict': {
'remove': 'POPCORN',
'keep': 'not_popcorn',
'subsubdict': {
'remove': 'POPCORN',
'keep': 'not_popcorn',
},
'a_list': ['POPCORN'],
},
'a_list': ['POPCORN'],
'list_of_lists': [
['some', 'thing'],
],
'list_of_dicts': [
{
'remove': 'POPCORN',
}
],
}
expected = {
'foo': 'bar',
'baz': 1,
'qux': ['one', 'two', 'three'],
'subdict': {
'keep': 'not_popcorn',
'subsubdict': {
'keep': 'not_popcorn',
},
'a_list': ['POPCORN'],
},
'a_list': ['POPCORN'],
'list_of_lists': [
['some', 'thing'],
],
'list_of_dicts': [{}],
}
self.assertEqual(remove_omit(data, omit_token), expected)
| 17,489
|
Python
|
.py
| 411
| 31.907543
| 123
| 0.592675
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,970
|
test_playbook_executor.py
|
ansible_ansible/test/units/executor/test_playbook_executor.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from unittest.mock import MagicMock
from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.playbook import Playbook
from ansible.template import Templar
from ansible.utils import context_objects as co
from units.mock.loader import DictDataLoader
class TestPlaybookExecutor(unittest.TestCase):
def setUp(self):
# Reset command line args for every test
co.GlobalCLIArgs._Singleton__instance = None
def tearDown(self):
# And cleanup after ourselves too
co.GlobalCLIArgs._Singleton__instance = None
def test_get_serialized_batches(self):
fake_loader = DictDataLoader({
'no_serial.yml': """
- hosts: all
gather_facts: no
tasks:
- debug: var=inventory_hostname
""",
'serial_int.yml': """
- hosts: all
gather_facts: no
serial: 2
tasks:
- debug: var=inventory_hostname
""",
'serial_pct.yml': """
- hosts: all
gather_facts: no
serial: 20%
tasks:
- debug: var=inventory_hostname
""",
'serial_list.yml': """
- hosts: all
gather_facts: no
serial: [1, 2, 3]
tasks:
- debug: var=inventory_hostname
""",
'serial_list_mixed.yml': """
- hosts: all
gather_facts: no
serial: [1, "20%", -1]
tasks:
- debug: var=inventory_hostname
""",
})
mock_inventory = MagicMock()
mock_var_manager = MagicMock()
templar = Templar(loader=fake_loader)
pbe = PlaybookExecutor(
playbooks=['no_serial.yml', 'serial_int.yml', 'serial_pct.yml', 'serial_list.yml', 'serial_list_mixed.yml'],
inventory=mock_inventory,
variable_manager=mock_var_manager,
loader=fake_loader,
passwords=[],
)
playbook = Playbook.load(pbe._playbooks[0], variable_manager=mock_var_manager, loader=fake_loader)
play = playbook.get_plays()[0]
play.post_validate(templar)
mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']
self.assertEqual(pbe._get_serialized_batches(play), [['host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']])
playbook = Playbook.load(pbe._playbooks[1], variable_manager=mock_var_manager, loader=fake_loader)
play = playbook.get_plays()[0]
play.post_validate(templar)
mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']
self.assertEqual(
pbe._get_serialized_batches(play),
[['host0', 'host1'], ['host2', 'host3'], ['host4', 'host5'], ['host6', 'host7'], ['host8', 'host9']]
)
playbook = Playbook.load(pbe._playbooks[2], variable_manager=mock_var_manager, loader=fake_loader)
play = playbook.get_plays()[0]
play.post_validate(templar)
mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']
self.assertEqual(
pbe._get_serialized_batches(play),
[['host0', 'host1'], ['host2', 'host3'], ['host4', 'host5'], ['host6', 'host7'], ['host8', 'host9']]
)
playbook = Playbook.load(pbe._playbooks[3], variable_manager=mock_var_manager, loader=fake_loader)
play = playbook.get_plays()[0]
play.post_validate(templar)
mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']
self.assertEqual(
pbe._get_serialized_batches(play),
[['host0'], ['host1', 'host2'], ['host3', 'host4', 'host5'], ['host6', 'host7', 'host8'], ['host9']]
)
playbook = Playbook.load(pbe._playbooks[4], variable_manager=mock_var_manager, loader=fake_loader)
play = playbook.get_plays()[0]
play.post_validate(templar)
mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']
self.assertEqual(pbe._get_serialized_batches(play), [['host0'], ['host1', 'host2'], ['host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']])
# Test when serial percent is under 1.0
playbook = Playbook.load(pbe._playbooks[2], variable_manager=mock_var_manager, loader=fake_loader)
play = playbook.get_plays()[0]
play.post_validate(templar)
mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2']
self.assertEqual(pbe._get_serialized_batches(play), [['host0'], ['host1'], ['host2']])
# Test when there is a remainder for serial as a percent
playbook = Playbook.load(pbe._playbooks[2], variable_manager=mock_var_manager, loader=fake_loader)
play = playbook.get_plays()[0]
play.post_validate(templar)
mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9', 'host10']
self.assertEqual(
pbe._get_serialized_batches(play),
[['host0', 'host1'], ['host2', 'host3'], ['host4', 'host5'], ['host6', 'host7'], ['host8', 'host9'], ['host10']]
)
| 6,367
|
Python
|
.py
| 127
| 40.874016
| 157
| 0.603601
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,971
|
test_task_queue_manager_callbacks.py
|
ansible_ansible/test/units/executor/test_task_queue_manager_callbacks.py
|
# (c) 2016, Steve Kuznetsov <skuznets@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from unittest.mock import MagicMock
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.playbook import Playbook
from ansible.plugins.callback import CallbackBase
from ansible.utils import context_objects as co
class TestTaskQueueManagerCallbacks(unittest.TestCase):
def setUp(self):
inventory = MagicMock()
variable_manager = MagicMock()
loader = MagicMock()
passwords = []
# Reset the stored command line args
co.GlobalCLIArgs._Singleton__instance = None
self._tqm = TaskQueueManager(inventory, variable_manager, loader, passwords)
self._playbook = Playbook(loader)
# we use a MagicMock to register the result of the call we
# expect to `v2_playbook_on_call`. We don't mock out the
# method since we're testing code that uses `inspect` to
# look at that method's argspec and we want to ensure this
# test is easy to reason about.
self._register = MagicMock()
def tearDown(self):
# Reset the stored command line args
co.GlobalCLIArgs._Singleton__instance = None
def test_task_queue_manager_callbacks_v2_playbook_on_start(self):
"""
Assert that no exceptions are raised when sending a Playbook
start callback to a current callback module plugin.
"""
register = self._register
class CallbackModule(CallbackBase):
"""
This is a callback module with the current
method signature for `v2_playbook_on_start`.
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification'
CALLBACK_NAME = 'current_module'
def v2_playbook_on_start(self, playbook):
register(self, playbook)
callback_module = CallbackModule()
self._tqm._callback_plugins.append(callback_module)
self._tqm.send_callback('v2_playbook_on_start', self._playbook)
register.assert_called_once_with(callback_module, self._playbook)
def test_task_queue_manager_callbacks_v2_playbook_on_start_wrapped(self):
"""
Assert that no exceptions are raised when sending a Playbook
start callback to a wrapped current callback module plugin.
"""
register = self._register
def wrap_callback(func):
"""
This wrapper changes the exposed argument
names for a method from the original names
to (*args, **kwargs). This is used in order
to validate that wrappers which change par-
ameter names do not break the TQM callback
system.
:param func: function to decorate
:return: decorated function
"""
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
class WrappedCallbackModule(CallbackBase):
"""
This is a callback module with the current
method signature for `v2_playbook_on_start`
wrapped in order to change the signature.
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification'
CALLBACK_NAME = 'current_module'
@wrap_callback
def v2_playbook_on_start(self, playbook):
register(self, playbook)
callback_module = WrappedCallbackModule()
self._tqm._callback_plugins.append(callback_module)
self._tqm.send_callback('v2_playbook_on_start', self._playbook)
register.assert_called_once_with(callback_module, self._playbook)
| 4,395
|
Python
|
.py
| 98
| 36.295918
| 84
| 0.66542
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,972
|
conftest.py
|
ansible_ansible/test/units/executor/module_common/conftest.py
|
from __future__ import annotations
import pytest
@pytest.fixture
def templar():
class FakeTemplar:
def template(self, template_string, *args, **kwargs):
return template_string
return FakeTemplar()
| 229
|
Python
|
.py
| 8
| 23.625
| 61
| 0.709677
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,973
|
test_recursive_finder.py
|
ansible_ansible/test/units/executor/module_common/test_recursive_finder.py
|
# (c) 2017, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
import pytest
import zipfile
from collections import namedtuple
from io import BytesIO
import ansible.errors
from ansible.executor.module_common import recursive_finder
from ansible.plugins.loader import init_plugin_loader
# These are the modules that are brought in by module_utils/basic.py This may need to be updated
# when basic.py gains new imports
# We will remove these when we modify AnsiBallZ to store its args in a separate file instead of in
# basic.py
MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py',
'ansible/module_utils/__init__.py',
'ansible/module_utils/_text.py',
'ansible/module_utils/basic.py',
'ansible/module_utils/six/__init__.py',
'ansible/module_utils/_text.py',
'ansible/module_utils/common/collections.py',
'ansible/module_utils/common/parameters.py',
'ansible/module_utils/common/warnings.py',
'ansible/module_utils/parsing/convert_bool.py',
'ansible/module_utils/common/__init__.py',
'ansible/module_utils/common/file.py',
'ansible/module_utils/common/locale.py',
'ansible/module_utils/common/process.py',
'ansible/module_utils/common/sys_info.py',
'ansible/module_utils/common/text/__init__.py',
'ansible/module_utils/common/text/converters.py',
'ansible/module_utils/common/text/formatters.py',
'ansible/module_utils/common/validation.py',
'ansible/module_utils/common/_utils.py',
'ansible/module_utils/common/arg_spec.py',
'ansible/module_utils/compat/__init__.py',
'ansible/module_utils/compat/selinux.py',
'ansible/module_utils/distro/__init__.py',
'ansible/module_utils/distro/_distro.py',
'ansible/module_utils/errors.py',
'ansible/module_utils/parsing/__init__.py',
'ansible/module_utils/parsing/convert_bool.py',
'ansible/module_utils/pycompat24.py',
'ansible/module_utils/six/__init__.py',
))
ONLY_BASIC_FILE = frozenset(('ansible/module_utils/basic.py',))
ANSIBLE_LIB = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))), 'lib', 'ansible')
@pytest.fixture
def finder_containers():
init_plugin_loader()
FinderContainers = namedtuple('FinderContainers', ['zf'])
zipoutput = BytesIO()
zf = zipfile.ZipFile(zipoutput, mode='w', compression=zipfile.ZIP_STORED)
return FinderContainers(zf)
class TestRecursiveFinder(object):
def test_no_module_utils(self, finder_containers):
name = 'ping'
data = b'#!/usr/bin/python\nreturn \'{\"changed\": false}\''
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'ping.py'), data, *finder_containers)
assert frozenset(finder_containers.zf.namelist()) == MODULE_UTILS_BASIC_FILES
def test_module_utils_with_syntax_error(self, finder_containers):
name = 'fake_module'
data = b'#!/usr/bin/python\ndef something(:\n pass\n'
with pytest.raises(ansible.errors.AnsibleError) as exec_info:
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'fake_module.py'), data, *finder_containers)
assert 'Unable to import fake_module due to invalid syntax' in str(exec_info.value)
def test_module_utils_with_identation_error(self, finder_containers):
name = 'fake_module'
data = b'#!/usr/bin/python\n def something():\n pass\n'
with pytest.raises(ansible.errors.AnsibleError) as exec_info:
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'fake_module.py'), data, *finder_containers)
assert 'Unable to import fake_module due to unexpected indent' in str(exec_info.value)
#
# Test importing six with many permutations because it is not a normal module
#
def test_from_import_six(self, finder_containers):
name = 'ping'
data = b'#!/usr/bin/python\nfrom ansible.module_utils import six'
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'ping.py'), data, *finder_containers)
assert frozenset(finder_containers.zf.namelist()) == frozenset(('ansible/module_utils/six/__init__.py', )).union(MODULE_UTILS_BASIC_FILES)
def test_import_six(self, finder_containers):
name = 'ping'
data = b'#!/usr/bin/python\nimport ansible.module_utils.six'
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'ping.py'), data, *finder_containers)
assert frozenset(finder_containers.zf.namelist()) == frozenset(('ansible/module_utils/six/__init__.py', )).union(MODULE_UTILS_BASIC_FILES)
def test_import_six_from_many_submodules(self, finder_containers):
name = 'ping'
data = b'#!/usr/bin/python\nfrom ansible.module_utils.six.moves.urllib.parse import urlparse'
recursive_finder(name, os.path.join(ANSIBLE_LIB, 'modules', 'system', 'ping.py'), data, *finder_containers)
assert frozenset(finder_containers.zf.namelist()) == frozenset(('ansible/module_utils/six/__init__.py',)).union(MODULE_UTILS_BASIC_FILES)
| 6,777
|
Python
|
.py
| 105
| 49.895238
| 146
| 0.605623
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,974
|
test_modify_module.py
|
ansible_ansible/test/units/executor/module_common/test_modify_module.py
|
# Copyright (c) 2018 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# -*- coding: utf-8 -*-
from __future__ import annotations
import pytest
from ansible.executor.module_common import modify_module
FAKE_OLD_MODULE = b"""#!/usr/bin/python
import sys
print('{"result": "%s"}' % sys.executable)
"""
@pytest.fixture
def fake_old_module_open(mocker):
m = mocker.mock_open(read_data=FAKE_OLD_MODULE)
mocker.patch('builtins.open', m)
# this test no longer makes sense, since a Python module will always either have interpreter discovery run or
# an explicit interpreter passed (so we'll never default to the module shebang)
# def test_shebang(fake_old_module_open, templar):
# (data, style, shebang) = modify_module('fake_module', 'fake_path', {}, templar)
# assert shebang == '#!/usr/bin/python'
def test_shebang_task_vars(fake_old_module_open, templar):
task_vars = {
'ansible_python_interpreter': '/usr/bin/python3'
}
(data, style, shebang) = modify_module('fake_module', 'fake_path', {}, templar, task_vars=task_vars)
assert shebang == '#!/usr/bin/python3'
| 1,179
|
Python
|
.py
| 25
| 44.44
| 109
| 0.712161
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,975
|
test_module_common.py
|
ansible_ansible/test/units/executor/module_common/test_module_common.py
|
# (c) 2017, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os.path
import pytest
import ansible.errors
from ansible.executor import module_common as amc
from ansible.executor.interpreter_discovery import InterpreterDiscoveryRequiredError
class TestStripComments:
def test_no_changes(self):
no_comments = u"""def some_code():
return False"""
assert amc._strip_comments(no_comments) == no_comments
def test_all_comments(self):
all_comments = u"""# This is a test
# Being as it is
# To be
"""
assert amc._strip_comments(all_comments) == u""
def test_all_whitespace(self):
all_whitespace = (
'\n'
' \n'
'\n'
' \n'
'\t\t\r\n'
'\n'
' '
)
assert amc._strip_comments(all_whitespace) == u""
def test_somewhat_normal(self):
mixed = u"""#!/usr/bin/python
# here we go
def test(arg):
# this is a thing
thing = '# test'
return thing
# End
"""
mixed_results = u"""def test(arg):
thing = '# test'
return thing"""
assert amc._strip_comments(mixed) == mixed_results
class TestSlurp:
def test_slurp_nonexistent(self, mocker):
mocker.patch('os.path.exists', side_effect=lambda x: False)
with pytest.raises(ansible.errors.AnsibleError):
amc._slurp('no_file')
def test_slurp_file(self, mocker):
mocker.patch('os.path.exists', side_effect=lambda x: True)
m = mocker.mock_open(read_data='This is a test')
mocker.patch('builtins.open', m)
assert amc._slurp('some_file') == 'This is a test'
def test_slurp_file_with_newlines(self, mocker):
mocker.patch('os.path.exists', side_effect=lambda x: True)
m = mocker.mock_open(read_data='#!/usr/bin/python\ndef test(args):\nprint("hi")\n')
mocker.patch('builtins.open', m)
assert amc._slurp('some_file') == '#!/usr/bin/python\ndef test(args):\nprint("hi")\n'
class TestGetShebang:
"""Note: We may want to change the API of this function in the future. It isn't a great API"""
def test_no_interpreter_set(self, templar):
# normally this would return /usr/bin/python, but so long as we're defaulting to auto python discovery, we'll get
# an InterpreterDiscoveryRequiredError here instead
with pytest.raises(InterpreterDiscoveryRequiredError):
amc._get_shebang(u'/usr/bin/python', {}, templar)
def test_python_interpreter(self, templar):
assert amc._get_shebang(u'/usr/bin/python3.8', {}, templar) == ('#!/usr/bin/python3.8', u'/usr/bin/python3.8')
def test_non_python_interpreter(self, templar):
assert amc._get_shebang(u'/usr/bin/ruby', {}, templar) == ('#!/usr/bin/ruby', u'/usr/bin/ruby')
def test_interpreter_set_in_task_vars(self, templar):
assert amc._get_shebang(u'/usr/bin/python', {u'ansible_python_interpreter': u'/usr/bin/pypy'}, templar) == \
(u'#!/usr/bin/pypy', u'/usr/bin/pypy')
def test_non_python_interpreter_in_task_vars(self, templar):
assert amc._get_shebang(u'/usr/bin/ruby', {u'ansible_ruby_interpreter': u'/usr/local/bin/ruby'}, templar) == \
(u'#!/usr/local/bin/ruby', u'/usr/local/bin/ruby')
def test_with_args(self, templar):
assert amc._get_shebang(u'/usr/bin/python', {u'ansible_python_interpreter': u'/usr/bin/python3'}, templar, args=('-tt', '-OO')) == \
(u'#!/usr/bin/python3 -tt -OO', u'/usr/bin/python3')
def test_python_via_env(self, templar):
assert amc._get_shebang(u'/usr/bin/python', {u'ansible_python_interpreter': u'/usr/bin/env python'}, templar) == \
(u'#!/usr/bin/env python', u'/usr/bin/env python')
class TestDetectionRegexes:
ANSIBLE_MODULE_UTIL_STRINGS = (
# Absolute collection imports
b'import ansible_collections.my_ns.my_col.plugins.module_utils.my_util',
b'from ansible_collections.my_ns.my_col.plugins.module_utils import my_util',
b'from ansible_collections.my_ns.my_col.plugins.module_utils.my_util import my_func',
# Absolute core imports
b'import ansible.module_utils.basic',
b'from ansible.module_utils import basic',
b'from ansible.module_utils.basic import AnsibleModule',
# Relative imports
b'from ..module_utils import basic',
b'from .. module_utils import basic',
b'from ....module_utils import basic',
b'from ..module_utils.basic import AnsibleModule',
)
NOT_ANSIBLE_MODULE_UTIL_STRINGS = (
b'from ansible import release',
b'from ..release import __version__',
b'from .. import release',
b'from ansible.modules.system import ping',
b'from ansible_collecitons.my_ns.my_col.plugins.modules import function',
)
OFFSET = os.path.dirname(os.path.dirname(amc.__file__))
CORE_PATHS = (
('%s/modules/from_role.py' % OFFSET, 'ansible/modules/from_role'),
('%s/modules/system/ping.py' % OFFSET, 'ansible/modules/system/ping'),
('%s/modules/cloud/amazon/s3.py' % OFFSET, 'ansible/modules/cloud/amazon/s3'),
)
COLLECTION_PATHS = (
('/root/ansible_collections/ns/col/plugins/modules/ping.py',
'ansible_collections/ns/col/plugins/modules/ping'),
('/root/ansible_collections/ns/col/plugins/modules/subdir/ping.py',
'ansible_collections/ns/col/plugins/modules/subdir/ping'),
)
@pytest.mark.parametrize('testcase', ANSIBLE_MODULE_UTIL_STRINGS)
def test_detect_new_style_python_module_re(self, testcase):
assert amc.NEW_STYLE_PYTHON_MODULE_RE.search(testcase)
@pytest.mark.parametrize('testcase', NOT_ANSIBLE_MODULE_UTIL_STRINGS)
def test_no_detect_new_style_python_module_re(self, testcase):
assert not amc.NEW_STYLE_PYTHON_MODULE_RE.search(testcase)
@pytest.mark.parametrize('testcase, result', CORE_PATHS)
def test_detect_core_library_path_re(self, testcase, result):
assert amc.CORE_LIBRARY_PATH_RE.search(testcase).group('path') == result
@pytest.mark.parametrize('testcase', (p[0] for p in COLLECTION_PATHS))
def test_no_detect_core_library_path_re(self, testcase):
assert not amc.CORE_LIBRARY_PATH_RE.search(testcase)
@pytest.mark.parametrize('testcase, result', COLLECTION_PATHS)
def test_detect_collection_path_re(self, testcase, result):
assert amc.COLLECTION_PATH_RE.search(testcase).group('path') == result
@pytest.mark.parametrize('testcase', (p[0] for p in CORE_PATHS))
def test_no_detect_collection_path_re(self, testcase):
assert not amc.COLLECTION_PATH_RE.search(testcase)
| 7,457
|
Python
|
.py
| 148
| 43.466216
| 140
| 0.663368
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,976
|
test_invalid_var_names.py
|
ansible_ansible/test/units/regex/test_invalid_var_names.py
|
from __future__ import annotations
import unittest
from ansible import constants as C
test_cases = (('not-valid', ['-'], 'not_valid'), ('not!valid@either', ['!', '@'], 'not_valid_either'), ('1_nor_This', ['1'], '__nor_This'))
class TestInvalidVars(unittest.TestCase):
def test_positive_matches(self):
for name, invalid, sanitized in test_cases:
self.assertEqual(C.INVALID_VARIABLE_NAMES.findall(name), invalid)
def test_negative_matches(self):
for name in ('this_is_valid', 'Also_1_valid', 'noproblem'):
self.assertEqual(C.INVALID_VARIABLE_NAMES.findall(name), [])
def test_get_setting(self):
for name, invalid, sanitized in test_cases:
self.assertEqual(C.INVALID_VARIABLE_NAMES.sub('_', name), sanitized)
| 789
|
Python
|
.py
| 14
| 49.428571
| 139
| 0.664921
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,977
|
test_native_concat.py
|
ansible_ansible/test/units/template/test_native_concat.py
|
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.playbook.conditional import Conditional
from ansible.template import Templar
from units.mock.loader import DictDataLoader
def test_cond_eval():
fake_loader = DictDataLoader({})
# True must be stored in a variable to trigger templating. Using True
# directly would be caught by optimization for bools to short-circuit
# templating.
variables = {"foo": True}
templar = Templar(loader=fake_loader, variables=variables)
cond = Conditional(loader=fake_loader)
cond.when = ["foo"]
with templar.set_temporary_context(jinja2_native=True):
assert cond.evaluate_conditional(templar, variables)
| 814
|
Python
|
.py
| 17
| 43.941176
| 92
| 0.757269
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,978
|
test_templar.py
|
ansible_ansible/test/units/template/test_templar.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
from jinja2.runtime import Context
import unittest
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleUndefinedVariable, AnsibleAssertionError
from ansible.plugins.loader import init_plugin_loader
from ansible.template import Templar, AnsibleContext, AnsibleEnvironment, AnsibleUndefined
from ansible.utils.unsafe_proxy import AnsibleUnsafe, wrap_var
from units.mock.loader import DictDataLoader
class BaseTemplar(object):
def setUp(self):
init_plugin_loader()
self.test_vars = dict(
foo="bar",
bam="{{foo}}",
num=1,
var_true=True,
var_false=False,
var_dict=dict(a="b"),
bad_dict="{a='b'",
var_list=[1],
recursive="{{recursive}}",
some_var="blip",
some_static_var="static_blip",
some_keyword="{{ foo }}",
some_unsafe_var=wrap_var("unsafe_blip"),
some_static_unsafe_var=wrap_var("static_unsafe_blip"),
some_unsafe_keyword=wrap_var("{{ foo }}"),
str_with_error="{{ 'str' | from_json }}",
)
self.fake_loader = DictDataLoader({
"/path/to/my_file.txt": "foo\n",
})
self.templar = Templar(loader=self.fake_loader, variables=self.test_vars)
self._ansible_context = AnsibleContext(self.templar.environment, {}, {}, {})
def is_unsafe(self, obj):
return self._ansible_context._is_unsafe(obj)
class SomeUnsafeClass(AnsibleUnsafe):
def __init__(self):
super(SomeUnsafeClass, self).__init__()
self.blip = 'unsafe blip'
class TestTemplarTemplate(BaseTemplar, unittest.TestCase):
def test_lookup_jinja_dict_key_in_static_vars(self):
res = self.templar.template("{'some_static_var': '{{ some_var }}'}",
static_vars=['some_static_var'])
assert res['some_static_var'] == "blip"
def test_is_possibly_template_true(self):
tests = [
'{{ foo }}',
'{% foo %}',
'{# foo #}',
'{# {{ foo }} #}',
'{# {{ nothing }} {# #}',
'{# {{ nothing }} {# #} #}',
'{% raw %}{{ foo }}{% endraw %}',
'{{',
'{%',
'{#',
'{% raw',
]
for test in tests:
self.assertTrue(self.templar.is_possibly_template(test))
def test_is_possibly_template_false(self):
tests = [
'{',
'%',
'#',
'foo',
'}}',
'%}',
'raw %}',
'#}',
]
for test in tests:
self.assertFalse(self.templar.is_possibly_template(test))
def test_is_possible_template(self):
"""This test ensures that a broken template still gets templated"""
# Purposefully invalid jinja
self.assertRaises(AnsibleError, self.templar.template, '{{ foo|default(False)) }}')
def test_is_template_true(self):
tests = [
'{{ foo }}',
'{% foo %}',
'{# foo #}',
'{# {{ foo }} #}',
'{# {{ nothing }} {# #}',
'{# {{ nothing }} {# #} #}',
'{% raw %}{{ foo }}{% endraw %}',
]
for test in tests:
self.assertTrue(self.templar.is_template(test))
def test_is_template_false(self):
tests = [
'foo',
'{{ foo',
'{% foo',
'{# foo',
'{{ foo %}',
'{{ foo #}',
'{% foo }}',
'{% foo #}',
'{# foo %}',
'{# foo }}',
'{{ foo {{',
'{% raw %}{% foo %}',
]
for test in tests:
self.assertFalse(self.templar.is_template(test))
def test_is_template_raw_string(self):
res = self.templar.is_template('foo')
self.assertFalse(res)
def test_is_template_none(self):
res = self.templar.is_template(None)
self.assertFalse(res)
def test_template_convert_bare_string(self):
res = self.templar.template('foo', convert_bare=True)
self.assertEqual(res, 'bar')
def test_template_convert_bare_nested(self):
res = self.templar.template('bam', convert_bare=True)
self.assertEqual(res, 'bar')
def test_template_convert_bare_unsafe(self):
res = self.templar.template('some_unsafe_var', convert_bare=True)
self.assertEqual(res, 'unsafe_blip')
# self.assertIsInstance(res, AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_template_convert_bare_filter(self):
res = self.templar.template('bam|capitalize', convert_bare=True)
self.assertEqual(res, 'Bar')
def test_template_convert_bare_filter_unsafe(self):
res = self.templar.template('some_unsafe_var|capitalize', convert_bare=True)
self.assertEqual(res, 'Unsafe_blip')
# self.assertIsInstance(res, AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_template_convert_data(self):
res = self.templar.template('{{foo}}', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, 'bar')
def test_template_convert_data_template_in_data(self):
res = self.templar.template('{{bam}}', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, 'bar')
def test_template_convert_data_bare(self):
res = self.templar.template('bam', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, 'bam')
def test_template_convert_data_to_json(self):
res = self.templar.template('{{bam|to_json}}', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, '"bar"')
def test_template_convert_data_convert_bare_data_bare(self):
res = self.templar.template('bam', convert_data=True, convert_bare=True)
self.assertTrue(res)
self.assertEqual(res, 'bar')
def test_template_unsafe_non_string(self):
unsafe_obj = AnsibleUnsafe()
res = self.templar.template(unsafe_obj)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_template_unsafe_non_string_subclass(self):
unsafe_obj = SomeUnsafeClass()
res = self.templar.template(unsafe_obj)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_weird(self):
data = u"""1 2 #}huh{# %}ddfg{% }}dfdfg{{ {%what%} {{#foo#}} {%{bar}%} {#%blip%#} {{asdfsd%} 3 4 {{foo}} 5 6 7"""
self.assertRaisesRegex(AnsibleError,
'template error while templating string',
self.templar.template,
data)
def test_template_with_error(self):
"""Check that AnsibleError is raised, fail if an unhandled exception is raised"""
self.assertRaises(AnsibleError, self.templar.template, "{{ str_with_error }}")
class TestTemplarMisc(BaseTemplar, unittest.TestCase):
def test_templar_simple(self):
templar = self.templar
# test some basic templating
self.assertEqual(templar.template("{{foo}}"), "bar")
self.assertEqual(templar.template("{{foo}}\n"), "bar\n")
self.assertEqual(templar.template("{{foo}}\n", preserve_trailing_newlines=True), "bar\n")
self.assertEqual(templar.template("{{foo}}\n", preserve_trailing_newlines=False), "bar")
self.assertEqual(templar.template("{{bam}}"), "bar")
self.assertEqual(templar.template("{{num}}"), 1)
assert templar.template("{{var_true}}")
assert not templar.template("{{var_false}}")
self.assertEqual(templar.template("{{var_dict}}"), dict(a="b"))
self.assertEqual(templar.template("{{bad_dict}}"), "{a='b'")
self.assertEqual(templar.template("{{var_list}}"), [1])
self.assertEqual(templar.template(1, convert_bare=True), 1)
# force errors
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{bad_var}}")
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{lookup('file', bad_var)}}")
self.assertRaises(AnsibleError, templar.template, "{{lookup('bad_lookup')}}")
self.assertRaises(AnsibleError, templar.template, "{{recursive}}")
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{foo-bar}}")
# test with fail_on_undefined=False
self.assertEqual(templar.template("{{bad_var}}", fail_on_undefined=False), "{{bad_var}}")
# test setting available_variables
templar.available_variables = dict(foo="bam")
self.assertEqual(templar.template("{{foo}}"), "bam")
# variables must be a dict() for available_variables setter
with self.assertRaisesRegex(AnsibleAssertionError, r"the type of 'variables'"):
templar.available_variables = "foo=bam"
def test_templar_escape_backslashes(self):
# Rule of thumb: If escape backslashes is True you should end up with
# the same number of backslashes as when you started.
self.assertEqual(self.templar.template("\t{{foo}}", escape_backslashes=True), "\tbar")
self.assertEqual(self.templar.template("\t{{foo}}", escape_backslashes=False), "\tbar")
self.assertEqual(self.templar.template("\\{{foo}}", escape_backslashes=True), "\\bar")
self.assertEqual(self.templar.template("\\{{foo}}", escape_backslashes=False), "\\bar")
self.assertEqual(self.templar.template("\\{{foo + '\t' }}", escape_backslashes=True), "\\bar\t")
self.assertEqual(self.templar.template("\\{{foo + '\t' }}", escape_backslashes=False), "\\bar\t")
self.assertEqual(self.templar.template("\\{{foo + '\\t' }}", escape_backslashes=True), "\\bar\\t")
self.assertEqual(self.templar.template("\\{{foo + '\\t' }}", escape_backslashes=False), "\\bar\t")
self.assertEqual(self.templar.template("\\{{foo + '\\\\t' }}", escape_backslashes=True), "\\bar\\\\t")
self.assertEqual(self.templar.template("\\{{foo + '\\\\t' }}", escape_backslashes=False), "\\bar\\t")
def test_template_jinja2_extensions(self):
fake_loader = DictDataLoader({})
templar = Templar(loader=fake_loader)
old_exts = C.DEFAULT_JINJA2_EXTENSIONS
try:
C.DEFAULT_JINJA2_EXTENSIONS = "foo,bar"
self.assertEqual(templar._get_extensions(), ['foo', 'bar'])
finally:
C.DEFAULT_JINJA2_EXTENSIONS = old_exts
class TestTemplarLookup(BaseTemplar, unittest.TestCase):
def test_lookup_missing_plugin(self):
self.assertRaisesRegex(AnsibleError,
r'lookup plugin \(not_a_real_lookup_plugin\) not found',
self.templar._lookup,
'not_a_real_lookup_plugin',
'an_arg', a_keyword_arg='a_keyword_arg_value')
def test_lookup_list(self):
res = self.templar._lookup('list', 'an_arg', 'another_arg')
self.assertEqual(res, 'an_arg,another_arg')
def test_lookup_jinja_undefined(self):
self.assertRaisesRegex(AnsibleUndefinedVariable,
"'an_undefined_jinja_var' is undefined",
self.templar._lookup,
'list', '{{ an_undefined_jinja_var }}')
def test_lookup_jinja_defined(self):
res = self.templar._lookup('list', '{{ some_var }}')
self.assertTrue(self.is_unsafe(res))
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict_string_passed(self):
self.assertRaisesRegex(AnsibleError,
"with_dict expects a dict",
self.templar._lookup,
'dict',
'{{ some_var }}')
def test_lookup_jinja_dict_list_passed(self):
self.assertRaisesRegex(AnsibleError,
"with_dict expects a dict",
self.templar._lookup,
'dict',
['foo', 'bar'])
def test_lookup_jinja_kwargs(self):
res = self.templar._lookup('list', 'blip', random_keyword='12345')
self.assertTrue(self.is_unsafe(res))
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_list_wantlist(self):
res = self.templar._lookup('list', '{{ some_var }}', wantlist=True)
self.assertEqual(res, ["blip"])
def test_lookup_jinja_list_wantlist_undefined(self):
self.assertRaisesRegex(AnsibleUndefinedVariable,
"'some_undefined_var' is undefined",
self.templar._lookup,
'list',
'{{ some_undefined_var }}',
wantlist=True)
def test_lookup_jinja_list_wantlist_unsafe(self):
res = self.templar._lookup('list', '{{ some_unsafe_var }}', wantlist=True)
for lookup_result in res:
self.assertTrue(self.is_unsafe(lookup_result))
assert isinstance(lookup_result, AnsibleUnsafe)
# TODO: Should this be an AnsibleUnsafe
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict(self):
res = self.templar._lookup('list', {'{{ a_keyword }}': '{{ some_var }}'})
self.assertEqual(res['{{ a_keyword }}'], "blip")
assert isinstance(res['{{ a_keyword }}'], AnsibleUnsafe)
# TODO: Should this be an AnsibleUnsafe
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict_unsafe(self):
res = self.templar._lookup('list', {'{{ some_unsafe_key }}': '{{ some_unsafe_var }}'})
self.assertTrue(self.is_unsafe(res['{{ some_unsafe_key }}']))
assert isinstance(res['{{ some_unsafe_key }}'], AnsibleUnsafe)
# TODO: Should this be an AnsibleUnsafe
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict_unsafe_value(self):
res = self.templar._lookup('list', {'{{ a_keyword }}': '{{ some_unsafe_var }}'})
self.assertTrue(self.is_unsafe(res['{{ a_keyword }}']))
assert isinstance(res['{{ a_keyword }}'], AnsibleUnsafe)
# TODO: Should this be an AnsibleUnsafe
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_none(self):
res = self.templar._lookup('list', None)
self.assertIsNone(res)
class TestAnsibleContext(BaseTemplar, unittest.TestCase):
def _context(self, variables=None):
variables = variables or {}
env = AnsibleEnvironment()
context = AnsibleContext(env, parent={}, name='some_context',
blocks={})
for key, value in variables.items():
context.vars[key] = value
return context
def test(self):
context = self._context()
self.assertIsInstance(context, AnsibleContext)
self.assertIsInstance(context, Context)
def test_resolve_unsafe(self):
context = self._context(variables={'some_unsafe_key': wrap_var('some_unsafe_string')})
res = context.resolve('some_unsafe_key')
assert isinstance(res, AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was expected to be marked unsafe but was not' % res)
def test_resolve_unsafe_list(self):
context = self._context(variables={'some_unsafe_key': [wrap_var('some unsafe string 1')]})
res = context.resolve('some_unsafe_key')
assert isinstance(res[0], AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was expected to be marked unsafe but was not' % res)
def test_resolve_unsafe_dict(self):
context = self._context(variables={'some_unsafe_key':
{'an_unsafe_dict': wrap_var('some unsafe string 1')}
})
res = context.resolve('some_unsafe_key')
self.assertTrue(self.is_unsafe(res['an_unsafe_dict']),
'return of AnsibleContext.resolve (%s) was expected to be marked unsafe but was not' % res['an_unsafe_dict'])
def test_resolve(self):
context = self._context(variables={'some_key': 'some_string'})
res = context.resolve('some_key')
self.assertEqual(res, 'some_string')
assert not isinstance(res, AnsibleUnsafe)
self.assertFalse(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was not expected to be marked unsafe but was' % res)
def test_resolve_none(self):
context = self._context(variables={'some_key': None})
res = context.resolve('some_key')
assert res is None
assert not isinstance(res, AnsibleUnsafe)
self.assertFalse(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was not expected to be marked unsafe but was' % res)
def test_is_unsafe(self):
context = self._context()
self.assertFalse(context._is_unsafe(AnsibleUndefined()))
def test_unsafe_lookup():
res = Templar(
None,
variables={
'var0': '{{ var1 }}',
'var1': ['unsafe'],
}
).template('{{ lookup("list", var0) }}')
assert getattr(res[0], '__UNSAFE__', False)
def test_unsafe_lookup_no_conversion():
res = Templar(
None,
variables={
'var0': '{{ var1 }}',
'var1': ['unsafe'],
}
).template(
'{{ lookup("list", var0) }}',
convert_data=False,
)
assert getattr(res, '__UNSAFE__', False)
| 18,870
|
Python
|
.py
| 381
| 38.965879
| 133
| 0.595298
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,979
|
test_vars.py
|
ansible_ansible/test/units/template/test_vars.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
from ansible.template import Templar
from ansible.template.vars import AnsibleJ2Vars
def test_globals_empty():
assert isinstance(dict(AnsibleJ2Vars(Templar(None), {})), dict)
def test_globals():
res = dict(AnsibleJ2Vars(Templar(None), {'foo': 'bar', 'blip': [1, 2, 3]}))
assert isinstance(res, dict)
assert 'foo' in res
assert res['foo'] == 'bar'
| 1,121
|
Python
|
.py
| 26
| 41.115385
| 79
| 0.751148
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,980
|
test_template_utilities.py
|
ansible_ansible/test/units/template/test_template_utilities.py
|
# (c) 2015 Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import jinja2
import unittest
from ansible.template import AnsibleUndefined, _escape_backslashes, _count_newlines_from_end
# These are internal utility functions only needed for templating. They're
# algorithmic so good candidates for unit testing by themselves
class TestBackslashEscape(unittest.TestCase):
test_data = (
# Test backslashes in a filter arg are double escaped
dict(
template=u"{{ 'test2 %s' | format('\\1') }}",
intermediate=u"{{ 'test2 %s' | format('\\\\1') }}",
expectation=u"test2 \\1",
args=dict()
),
# Test backslashes inside the jinja2 var itself are double
# escaped
dict(
template=u"Test 2\\3: {{ '\\1 %s' | format('\\2') }}",
intermediate=u"Test 2\\3: {{ '\\\\1 %s' | format('\\\\2') }}",
expectation=u"Test 2\\3: \\1 \\2",
args=dict()
),
# Test backslashes outside of the jinja2 var are not double
# escaped
dict(
template=u"Test 2\\3: {{ 'test2 %s' | format('\\1') }}; \\done",
intermediate=u"Test 2\\3: {{ 'test2 %s' | format('\\\\1') }}; \\done",
expectation=u"Test 2\\3: test2 \\1; \\done",
args=dict()
),
# Test backslashes in a variable sent to a filter are handled
dict(
template=u"{{ 'test2 %s' | format(var1) }}",
intermediate=u"{{ 'test2 %s' | format(var1) }}",
expectation=u"test2 \\1",
args=dict(var1=u'\\1')
),
# Test backslashes in a variable expanded by jinja2 are double
# escaped
dict(
template=u"Test 2\\3: {{ var1 | format('\\2') }}",
intermediate=u"Test 2\\3: {{ var1 | format('\\\\2') }}",
expectation=u"Test 2\\3: \\1 \\2",
args=dict(var1=u'\\1 %s')
),
)
def setUp(self):
self.env = jinja2.Environment()
def test_backslash_escaping(self):
for test in self.test_data:
intermediate = _escape_backslashes(test['template'], self.env)
self.assertEqual(intermediate, test['intermediate'])
template = jinja2.Template(intermediate)
args = test['args']
self.assertEqual(template.render(**args), test['expectation'])
class TestCountNewlines(unittest.TestCase):
def test_zero_length_string(self):
self.assertEqual(_count_newlines_from_end(u''), 0)
def test_short_string(self):
self.assertEqual(_count_newlines_from_end(u'The quick\n'), 1)
def test_one_newline(self):
self.assertEqual(_count_newlines_from_end(u'The quick brown fox jumped over the lazy dog' * 1000 + u'\n'), 1)
def test_multiple_newlines(self):
self.assertEqual(_count_newlines_from_end(u'The quick brown fox jumped over the lazy dog' * 1000 + u'\n\n\n'), 3)
def test_zero_newlines(self):
self.assertEqual(_count_newlines_from_end(u'The quick brown fox jumped over the lazy dog' * 1000), 0)
def test_all_newlines(self):
self.assertEqual(_count_newlines_from_end(u'\n' * 10), 10)
def test_mostly_newlines(self):
self.assertEqual(_count_newlines_from_end(u'The quick brown fox jumped over the lazy dog' + u'\n' * 1000), 1000)
class TestAnsibleUndefined(unittest.TestCase):
def test_getattr(self):
val = AnsibleUndefined()
self.assertIs(getattr(val, 'foo'), val)
self.assertRaises(AttributeError, getattr, val, '__UNSAFE__')
| 4,277
|
Python
|
.py
| 92
| 38.847826
| 121
| 0.627823
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,981
|
conftest.py
|
ansible_ansible/test/units/ansible_test/conftest.py
|
from __future__ import annotations
import os
import pytest
import sys
@pytest.fixture(autouse=True, scope='session')
def ansible_test():
"""Make ansible_test available on sys.path for unit testing ansible-test."""
test_lib = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'lib')
sys.path.insert(0, test_lib)
| 351
|
Python
|
.py
| 9
| 36.222222
| 95
| 0.733728
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,982
|
test_diff.py
|
ansible_ansible/test/units/ansible_test/test_diff.py
|
"""Tests for the diff module."""
from __future__ import annotations
import pathlib
import pytest
import typing as t
if t.TYPE_CHECKING: # pragma: nocover
# noinspection PyProtectedMember
from ansible_test._internal.diff import FileDiff
@pytest.fixture()
def diffs(request: pytest.FixtureRequest) -> list[FileDiff]:
"""A fixture which returns the parsed diff associated with the current test."""
return get_parsed_diff(request.node.name.removeprefix('test_'))
def get_parsed_diff(name: str) -> list[FileDiff]:
"""Parse and return the named git diff."""
cache = pathlib.Path(__file__).parent / 'diff' / f'{name}.diff'
content = cache.read_text()
lines = content.splitlines()
assert lines
# noinspection PyProtectedMember
from ansible_test._internal.diff import parse_diff
diffs = parse_diff(lines)
assert diffs
for item in diffs:
assert item.headers
assert item.is_complete
item.old.format_lines()
item.new.format_lines()
for line_range in item.old.ranges:
assert line_range[1] >= line_range[0] > 0
for line_range in item.new.ranges:
assert line_range[1] >= line_range[0] > 0
return diffs
def test_add_binary_file(diffs: list[FileDiff]) -> None:
"""Add a binary file."""
assert len(diffs) == 1
assert diffs[0].old.exists
assert diffs[0].new.exists
assert diffs[0].old.path == 'binary.dat'
assert diffs[0].new.path == 'binary.dat'
assert diffs[0].old.eof_newline
assert diffs[0].new.eof_newline
def test_add_text_file(diffs: list[FileDiff]) -> None:
"""Add a new file."""
assert len(diffs) == 1
assert not diffs[0].old.exists
assert diffs[0].new.exists
assert diffs[0].old.path == 'test.txt'
assert diffs[0].new.path == 'test.txt'
assert diffs[0].old.eof_newline
assert diffs[0].new.eof_newline
def test_remove_trailing_newline(diffs: list[FileDiff]) -> None:
"""Remove the trailing newline from a file."""
assert len(diffs) == 1
assert diffs[0].old.exists
assert diffs[0].new.exists
assert diffs[0].old.path == 'test.txt'
assert diffs[0].new.path == 'test.txt'
assert diffs[0].old.eof_newline
assert not diffs[0].new.eof_newline
def test_add_trailing_newline(diffs: list[FileDiff]) -> None:
"""Add a trailing newline to a file."""
assert len(diffs) == 1
assert diffs[0].old.exists
assert diffs[0].new.exists
assert diffs[0].old.path == 'test.txt'
assert diffs[0].new.path == 'test.txt'
assert not diffs[0].old.eof_newline
assert diffs[0].new.eof_newline
def test_add_two_text_files(diffs: list[FileDiff]) -> None:
"""Add two text files."""
assert len(diffs) == 2
assert not diffs[0].old.exists
assert diffs[0].new.exists
assert diffs[0].old.path == 'one.txt'
assert diffs[0].new.path == 'one.txt'
assert diffs[0].old.eof_newline
assert diffs[0].new.eof_newline
assert not diffs[1].old.exists
assert diffs[1].new.exists
assert diffs[1].old.path == 'two.txt'
assert diffs[1].new.path == 'two.txt'
assert diffs[1].old.eof_newline
assert diffs[1].new.eof_newline
def test_context_no_trailing_newline(diffs: list[FileDiff]) -> None:
"""Context without a trailing newline."""
assert len(diffs) == 1
assert diffs[0].old.exists
assert diffs[0].new.exists
assert diffs[0].old.path == 'test.txt'
assert diffs[0].new.path == 'test.txt'
assert not diffs[0].old.eof_newline
assert not diffs[0].new.eof_newline
def test_multiple_context_lines(diffs: list[FileDiff]) -> None:
"""Multiple context lines."""
assert len(diffs) == 1
assert diffs[0].old.exists
assert diffs[0].new.exists
assert diffs[0].old.path == 'test.txt'
assert diffs[0].new.path == 'test.txt'
assert diffs[0].old.eof_newline
assert diffs[0].new.eof_newline
def test_parse_delete(diffs: list[FileDiff]) -> None:
"""Delete files."""
assert len(diffs) == 1
assert diffs[0].old.exists
assert not diffs[0].new.exists
assert diffs[0].old.path == 'changelogs/fragments/79263-runme-sh-logging-3cb482385bd59058.yaml'
assert diffs[0].new.path == 'changelogs/fragments/79263-runme-sh-logging-3cb482385bd59058.yaml'
def test_parse_rename(diffs) -> None:
"""Rename files."""
assert len(diffs) == 2
assert all(item.old.path != item.new.path and item.old.exists and item.new.exists for item in diffs)
assert diffs[0].old.path == 'packaging/debian/ansible-base.dirs'
assert diffs[0].new.path == 'packaging/debian/ansible-core.dirs'
assert diffs[1].old.path == 'packaging/debian/ansible-base.install'
assert diffs[1].new.path == 'packaging/debian/ansible-core.install'
| 4,798
|
Python
|
.py
| 116
| 36.103448
| 104
| 0.684199
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,983
|
util.py
|
ansible_ansible/test/units/ansible_test/ci/util.py
|
from __future__ import annotations
import base64
import json
import re
def common_auth_test(auth):
private_key_pem = auth.initialize_private_key()
public_key_pem = auth.public_key_pem
extract_pem_key(private_key_pem, private=True)
extract_pem_key(public_key_pem, private=False)
request = dict(hello='World')
auth.sign_request(request)
verify_signature(request, public_key_pem)
def extract_pem_key(value, private):
assert isinstance(value, type(u''))
key_type = '(EC )?PRIVATE' if private else 'PUBLIC'
pattern = r'^-----BEGIN ' + key_type + r' KEY-----\n(?P<key>.*?)\n-----END ' + key_type + r' KEY-----\n$'
match = re.search(pattern, value, flags=re.DOTALL)
assert match, 'key "%s" does not match pattern "%s"' % (value, pattern)
base64.b64decode(match.group('key')) # make sure the key can be decoded
def verify_signature(request, public_key_pem):
signature = request.pop('signature')
payload_bytes = json.dumps(request, sort_keys=True).encode()
assert isinstance(signature, type(u''))
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.serialization import load_pem_public_key
public_key = load_pem_public_key(public_key_pem.encode(), default_backend())
public_key.verify(
base64.b64decode(signature.encode()),
payload_bytes,
ec.ECDSA(hashes.SHA256()),
)
| 1,541
|
Python
|
.py
| 33
| 41.666667
| 109
| 0.705567
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,984
|
test_azp.py
|
ansible_ansible/test/units/ansible_test/ci/test_azp.py
|
from __future__ import annotations
from .util import common_auth_test
def test_auth():
# noinspection PyProtectedMember
from ansible_test._internal.ci.azp import (
AzurePipelinesAuthHelper,
)
class TestAzurePipelinesAuthHelper(AzurePipelinesAuthHelper):
def __init__(self):
self.public_key_pem = None
self.private_key_pem = None
def publish_public_key(self, public_key_pem):
# avoid publishing key
self.public_key_pem = public_key_pem
def initialize_private_key(self):
# cache in memory instead of on disk
if not self.private_key_pem:
self.private_key_pem = self.generate_private_key()
return self.private_key_pem
auth = TestAzurePipelinesAuthHelper()
common_auth_test(auth)
| 840
|
Python
|
.py
| 21
| 31.142857
| 66
| 0.65679
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,985
|
test_plugins.py
|
ansible_ansible/test/units/plugins/test_plugins.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
import unittest
from unittest.mock import patch, MagicMock
from ansible.plugins.loader import PluginLoader, PluginPathContext
class TestErrors(unittest.TestCase):
@patch.object(PluginLoader, '_get_paths')
def test_print_paths(self, mock_method):
mock_method.return_value = ['/path/one', '/path/two', '/path/three']
pl = PluginLoader('foo', 'foo', '', 'test_plugins')
paths = pl.print_paths()
expected_paths = os.pathsep.join(['/path/one', '/path/two', '/path/three'])
self.assertEqual(paths, expected_paths)
def test_plugins__get_package_paths_no_package(self):
pl = PluginLoader('test', '', 'test', 'test_plugin')
self.assertEqual(pl._get_package_paths(), [])
def test_plugins__get_package_paths_with_package(self):
# the _get_package_paths() call uses __import__ to load a
# python library, and then uses the __file__ attribute of
# the result for that to get the library path, so we mock
# that here and patch the builtin to use our mocked result
foo_pkg = MagicMock()
bar_pkg = MagicMock()
bam = MagicMock()
bam.__file__ = '/path/to/my/foo/bar/bam/__init__.py'
bar_pkg.bam = bam
foo_pkg.return_value.bar = bar_pkg
pl = PluginLoader('test', 'foo.bar.bam', 'test', 'test_plugin')
with patch('builtins.__import__', foo_pkg):
self.assertEqual(pl._get_package_paths(), ['/path/to/my/foo/bar/bam'])
def test_plugins__get_paths(self):
pl = PluginLoader('test', '', 'test', 'test_plugin')
pl._paths = [PluginPathContext('/path/one', False),
PluginPathContext('/path/two', True)]
self.assertEqual(pl._get_paths(), ['/path/one', '/path/two'])
# NOT YET WORKING
# def fake_glob(path):
# if path == 'test/*':
# return ['test/foo', 'test/bar', 'test/bam']
# elif path == 'test/*/*'
# m._paths = None
# mock_glob = MagicMock()
# mock_glob.return_value = []
# with patch('glob.glob', mock_glob):
# pass
def assertPluginLoaderConfigBecomes(self, arg, expected):
pl = PluginLoader('test', '', arg, 'test_plugin')
self.assertEqual(pl.config, expected)
def test_plugin__init_config_list(self):
config = ['/one', '/two']
self.assertPluginLoaderConfigBecomes(config, config)
def test_plugin__init_config_str(self):
self.assertPluginLoaderConfigBecomes('test', ['test'])
def test_plugin__init_config_none(self):
self.assertPluginLoaderConfigBecomes(None, [])
def test__load_module_source_no_duplicate_names(self):
"""
This test simulates importing 2 plugins with the same name,
and validating that the import is short circuited if a file with the same name
has already been imported
"""
fixture_path = os.path.join(os.path.dirname(__file__), 'loader_fixtures')
pl = PluginLoader('test', '', 'test', 'test_plugin')
one = pl._load_module_source('import_fixture', os.path.join(fixture_path, 'import_fixture.py'))
# This line wouldn't even succeed if we didn't short circuit on finding a duplicate name
two = pl._load_module_source('import_fixture', '/path/to/import_fixture.py')
self.assertEqual(one, two)
@patch('ansible.plugins.loader.glob')
@patch.object(PluginLoader, '_get_paths_with_context')
def test_all_no_duplicate_names(self, gp_mock, glob_mock):
"""
This test goes along with ``test__load_module_source_no_duplicate_names``
and ensures that we ignore duplicate imports on multiple paths
"""
fixture_path = os.path.join(os.path.dirname(__file__), 'loader_fixtures')
gp_mock.return_value = [
MagicMock(path=fixture_path),
MagicMock(path='/path/to'),
]
glob_mock.glob.side_effect = [
[os.path.join(fixture_path, 'import_fixture.py')],
['/path/to/import_fixture.py']
]
pl = PluginLoader('test', '', 'test', 'test_plugins')
# Aside from needing ``list()`` so we can do a len, ``PluginLoader.all`` returns a generator
# so ``list()`` actually causes ``PluginLoader.all`` to run.
plugins = list(pl.all())
self.assertEqual(len(plugins), 1)
self.assertIn(os.path.join(fixture_path, 'import_fixture.py'), pl._module_cache)
self.assertNotIn('/path/to/import_fixture.py', pl._module_cache)
| 5,325
|
Python
|
.py
| 106
| 42.839623
| 103
| 0.643242
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,986
|
test_script.py
|
ansible_ansible/test/units/plugins/inventory/test_script.py
|
# -*- coding: utf-8 -*-
# Copyright 2017 Chris Meyers <cmeyers@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import pytest
from unittest import mock
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.plugins.loader import PluginLoader
import unittest
from ansible.module_utils.common.text.converters import to_bytes, to_native
class TestInventoryModule(unittest.TestCase):
def setUp(self):
class Inventory():
cache = dict()
class PopenResult():
returncode = 0
stdout = b""
stderr = b""
def communicate(self):
return (self.stdout, self.stderr)
self.popen_result = PopenResult()
self.inventory = Inventory()
self.loader = mock.MagicMock()
self.loader.load = mock.MagicMock()
inv_loader = PluginLoader('InventoryModule', 'ansible.plugins.inventory', C.DEFAULT_INVENTORY_PLUGIN_PATH, 'inventory_plugins')
self.inventory_module = inv_loader.get('script')
self.inventory_module.set_options()
def register_patch(name):
patcher = mock.patch(name)
self.addCleanup(patcher.stop)
return patcher.start()
self.popen = register_patch('subprocess.Popen')
self.popen.return_value = self.popen_result
self.BaseInventoryPlugin = register_patch('ansible.plugins.inventory.BaseInventoryPlugin')
self.BaseInventoryPlugin.get_cache_prefix.return_value = 'abc123'
def test_parse_subprocess_path_not_found_fail(self):
self.popen.side_effect = OSError("dummy text")
with pytest.raises(AnsibleError) as e:
self.inventory_module.parse(self.inventory, self.loader, '/foo/bar/foobar.py')
assert e.value.message == "problem running /foo/bar/foobar.py --list (dummy text)"
def test_parse_subprocess_err_code_fail(self):
self.popen_result.stdout = to_bytes(u"fooébar", errors='surrogate_escape')
self.popen_result.stderr = to_bytes(u"dummyédata")
self.popen_result.returncode = 1
with pytest.raises(AnsibleError) as e:
self.inventory_module.parse(self.inventory, self.loader, '/foo/bar/foobar.py')
assert e.value.message == to_native("Inventory script (/foo/bar/foobar.py) had an execution error: "
"dummyédata\n ")
def test_parse_utf8_fail(self):
self.popen_result.returncode = 0
self.popen_result.stderr = to_bytes("dummyédata")
self.loader.load.side_effect = TypeError('obj must be string')
with pytest.raises(AnsibleError) as e:
self.inventory_module.parse(self.inventory, self.loader, '/foo/bar/foobar.py')
assert e.value.message == to_native("failed to parse executable inventory script results from "
"/foo/bar/foobar.py: obj must be string\ndummyédata\n")
def test_parse_dict_fail(self):
self.popen_result.returncode = 0
self.popen_result.stderr = to_bytes("dummyédata")
self.loader.load.return_value = 'i am not a dict'
with pytest.raises(AnsibleError) as e:
self.inventory_module.parse(self.inventory, self.loader, '/foo/bar/foobar.py')
assert e.value.message == to_native("failed to parse executable inventory script results from "
"/foo/bar/foobar.py: needs to be a json dict\ndummyédata\n")
def test_get_host_variables_subprocess_script_raises_error(self):
self.popen_result.returncode = 1
self.popen_result.stderr = to_bytes("dummyéerror")
with pytest.raises(AnsibleError) as e:
self.inventory_module.get_host_variables('/foo/bar/foobar.py', 'dummy host')
assert e.value.message == "Inventory script (/foo/bar/foobar.py) had an execution error: dummyéerror"
| 4,584
|
Python
|
.py
| 85
| 45.270588
| 135
| 0.680556
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,987
|
test_constructed.py
|
ansible_ansible/test/units/plugins/inventory/test_constructed.py
|
# -*- coding: utf-8 -*-
# Copyright 2019 Alan Rominger <arominge@redhat.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import pytest
from ansible.errors import AnsibleParserError
from ansible.plugins.inventory.constructed import InventoryModule
from ansible.inventory.data import InventoryData
from ansible.template import Templar
@pytest.fixture()
def inventory_module():
r = InventoryModule()
r.inventory = InventoryData()
r.templar = Templar(None)
r._options = {'leading_separator': True}
return r
def test_group_by_value_only(inventory_module):
inventory_module.inventory.add_host('foohost')
inventory_module.inventory.set_variable('foohost', 'bar', 'my_group_name')
host = inventory_module.inventory.get_host('foohost')
keyed_groups = [
{
'prefix': '',
'separator': '',
'key': 'bar'
}
]
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=False
)
assert 'my_group_name' in inventory_module.inventory.groups
group = inventory_module.inventory.groups['my_group_name']
assert group.hosts == [host]
def test_keyed_group_separator(inventory_module):
inventory_module.inventory.add_host('farm')
inventory_module.inventory.set_variable('farm', 'farmer', 'mcdonald')
inventory_module.inventory.set_variable('farm', 'barn', {'cow': 'betsy'})
host = inventory_module.inventory.get_host('farm')
keyed_groups = [
{
'prefix': 'farmer',
'separator': '_old_',
'key': 'farmer'
},
{
'separator': 'mmmmmmmmmm',
'key': 'barn'
}
]
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=False
)
for group_name in ('farmer_old_mcdonald', 'mmmmmmmmmmcowmmmmmmmmmmbetsy'):
assert group_name in inventory_module.inventory.groups
group = inventory_module.inventory.groups[group_name]
assert group.hosts == [host]
def test_keyed_group_empty_construction(inventory_module):
inventory_module.inventory.add_host('farm')
inventory_module.inventory.set_variable('farm', 'barn', {})
host = inventory_module.inventory.get_host('farm')
keyed_groups = [
{
'separator': 'mmmmmmmmmm',
'key': 'barn'
}
]
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=True
)
assert host.groups == []
def test_keyed_group_host_confusion(inventory_module):
inventory_module.inventory.add_host('cow')
inventory_module.inventory.add_group('cow')
host = inventory_module.inventory.get_host('cow')
host.vars['species'] = 'cow'
keyed_groups = [
{
'separator': '',
'prefix': '',
'key': 'species'
}
]
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=True
)
group = inventory_module.inventory.groups['cow']
# group cow has host of cow
assert group.hosts == [host]
def test_keyed_parent_groups(inventory_module):
inventory_module.inventory.add_host('web1')
inventory_module.inventory.add_host('web2')
inventory_module.inventory.set_variable('web1', 'region', 'japan')
inventory_module.inventory.set_variable('web2', 'region', 'japan')
host1 = inventory_module.inventory.get_host('web1')
host2 = inventory_module.inventory.get_host('web2')
keyed_groups = [
{
'prefix': 'region',
'key': 'region',
'parent_group': 'region_list'
}
]
for host in [host1, host2]:
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=False
)
assert 'region_japan' in inventory_module.inventory.groups
assert 'region_list' in inventory_module.inventory.groups
region_group = inventory_module.inventory.groups['region_japan']
all_regions = inventory_module.inventory.groups['region_list']
assert all_regions.child_groups == [region_group]
assert region_group.hosts == [host1, host2]
def test_parent_group_templating(inventory_module):
inventory_module.inventory.add_host('cow')
inventory_module.inventory.set_variable('cow', 'sound', 'mmmmmmmmmm')
inventory_module.inventory.set_variable('cow', 'nickname', 'betsy')
host = inventory_module.inventory.get_host('cow')
keyed_groups = [
{
'key': 'sound',
'prefix': 'sound',
'parent_group': '{{ nickname }}'
},
{
'key': 'nickname',
'prefix': '',
'separator': '',
'parent_group': 'nickname' # statically-named parent group, conflicting with hostvar
},
{
'key': 'nickname',
'separator': '',
'parent_group': '{{ location | default("field") }}'
}
]
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=True
)
# first keyed group, "betsy" is a parent group name dynamically generated
betsys_group = inventory_module.inventory.groups['betsy']
assert [child.name for child in betsys_group.child_groups] == ['sound_mmmmmmmmmm']
# second keyed group, "nickname" is a statically-named root group
nicknames_group = inventory_module.inventory.groups['nickname']
assert [child.name for child in nicknames_group.child_groups] == ['betsy']
# second keyed group actually generated the parent group of the first keyed group
# assert that these are, in fact, the same object
assert nicknames_group.child_groups[0] == betsys_group
# second keyed group has two parents
locations_group = inventory_module.inventory.groups['field']
assert [child.name for child in locations_group.child_groups] == ['betsy']
def test_parent_group_templating_error(inventory_module):
inventory_module.inventory.add_host('cow')
inventory_module.inventory.set_variable('cow', 'nickname', 'betsy')
host = inventory_module.inventory.get_host('cow')
keyed_groups = [
{
'key': 'nickname',
'separator': '',
'parent_group': '{{ location.barn-yard }}'
}
]
with pytest.raises(AnsibleParserError) as ex:
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=True
)
assert 'Could not generate parent group' in str(ex.value)
# invalid parent group did not raise an exception with strict=False
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=False
)
# assert group was never added with invalid parent
assert 'betsy' not in inventory_module.inventory.groups
def test_keyed_group_exclusive_argument(inventory_module):
inventory_module.inventory.add_host('cow')
inventory_module.inventory.set_variable('cow', 'nickname', 'betsy')
host = inventory_module.inventory.get_host('cow')
keyed_groups = [
{
'key': 'nickname',
'separator': '_',
'default_value': 'default_value_name',
'trailing_separator': True
}
]
with pytest.raises(AnsibleParserError) as ex:
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=True
)
assert 'parameters are mutually exclusive' in str(ex.value)
def test_keyed_group_empty_value(inventory_module):
inventory_module.inventory.add_host('server0')
inventory_module.inventory.set_variable('server0', 'tags', {'environment': 'prod', 'status': ''})
host = inventory_module.inventory.get_host('server0')
keyed_groups = [
{
'prefix': 'tag',
'separator': '_',
'key': 'tags'
}
]
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=False
)
for group_name in ('tag_environment_prod', 'tag_status_'):
assert group_name in inventory_module.inventory.groups
def test_keyed_group_dict_with_default_value(inventory_module):
inventory_module.inventory.add_host('server0')
inventory_module.inventory.set_variable('server0', 'tags', {'environment': 'prod', 'status': ''})
host = inventory_module.inventory.get_host('server0')
keyed_groups = [
{
'prefix': 'tag',
'separator': '_',
'key': 'tags',
'default_value': 'running'
}
]
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=False
)
for group_name in ('tag_environment_prod', 'tag_status_running'):
assert group_name in inventory_module.inventory.groups
def test_keyed_group_str_no_default_value(inventory_module):
inventory_module.inventory.add_host('server0')
inventory_module.inventory.set_variable('server0', 'tags', '')
host = inventory_module.inventory.get_host('server0')
keyed_groups = [
{
'prefix': 'tag',
'separator': '_',
'key': 'tags'
}
]
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=False
)
# when the value is an empty string. this group is not generated
assert "tag_" not in inventory_module.inventory.groups
def test_keyed_group_str_with_default_value(inventory_module):
inventory_module.inventory.add_host('server0')
inventory_module.inventory.set_variable('server0', 'tags', '')
host = inventory_module.inventory.get_host('server0')
keyed_groups = [
{
'prefix': 'tag',
'separator': '_',
'key': 'tags',
'default_value': 'running'
}
]
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=False
)
assert "tag_running" in inventory_module.inventory.groups
def test_keyed_group_list_with_default_value(inventory_module):
inventory_module.inventory.add_host('server0')
inventory_module.inventory.set_variable('server0', 'tags', ['test', ''])
host = inventory_module.inventory.get_host('server0')
keyed_groups = [
{
'prefix': 'tag',
'separator': '_',
'key': 'tags',
'default_value': 'prod'
}
]
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=False
)
for group_name in ('tag_test', 'tag_prod'):
assert group_name in inventory_module.inventory.groups
def test_keyed_group_with_trailing_separator(inventory_module):
inventory_module.inventory.add_host('server0')
inventory_module.inventory.set_variable('server0', 'tags', {'environment': 'prod', 'status': ''})
host = inventory_module.inventory.get_host('server0')
keyed_groups = [
{
'prefix': 'tag',
'separator': '_',
'key': 'tags',
'trailing_separator': False
}
]
inventory_module._add_host_to_keyed_groups(
keyed_groups, host.vars, host.name, strict=False
)
for group_name in ('tag_environment_prod', 'tag_status'):
assert group_name in inventory_module.inventory.groups
| 12,028
|
Python
|
.py
| 302
| 32.913907
| 101
| 0.650017
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,988
|
test_inventory.py
|
ansible_ansible/test/units/plugins/inventory/test_inventory.py
|
# Copyright 2015 Abhijit Menon-Sen <ams@2ndQuadrant.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import string
import textwrap
from unittest import mock
from ansible import constants as C
import unittest
from ansible.module_utils.common.text.converters import to_text
from units.mock.path import mock_unfrackpath_noop
from ansible.inventory.manager import InventoryManager, split_host_pattern
from units.mock.loader import DictDataLoader
class TestInventory(unittest.TestCase):
patterns = {
'a': ['a'],
'a, b': ['a', 'b'],
'a , b': ['a', 'b'],
' a,b ,c[1:2] ': ['a', 'b', 'c[1:2]'],
'9a01:7f8:191:7701::9': ['9a01:7f8:191:7701::9'],
'9a01:7f8:191:7701::9,9a01:7f8:191:7701::9': ['9a01:7f8:191:7701::9', '9a01:7f8:191:7701::9'],
'9a01:7f8:191:7701::9,9a01:7f8:191:7701::9,foo': ['9a01:7f8:191:7701::9', '9a01:7f8:191:7701::9', 'foo'],
'foo[1:2]': ['foo[1:2]'],
'a::b': ['a::b'],
'a:b': ['a', 'b'],
' a : b ': ['a', 'b'],
'foo:bar:baz[1:2]': ['foo', 'bar', 'baz[1:2]'],
'a,,b': ['a', 'b'],
'a, ,b,,c, ,': ['a', 'b', 'c'],
',': [],
'': [],
}
pattern_lists = [
[['a'], ['a']],
[['a', 'b'], ['a', 'b']],
[['a, b'], ['a', 'b']],
[['9a01:7f8:191:7701::9', '9a01:7f8:191:7701::9,foo'],
['9a01:7f8:191:7701::9', '9a01:7f8:191:7701::9', 'foo']]
]
# pattern_string: [ ('base_pattern', (a,b)), ['x','y','z'] ]
# a,b are the bounds of the subscript; x..z are the results of the subscript
# when applied to string.ascii_letters.
subscripts = {
'a': [('a', None), list(string.ascii_letters)],
'a[0]': [('a', (0, None)), ['a']],
'a[1]': [('a', (1, None)), ['b']],
'a[2:3]': [('a', (2, 3)), ['c', 'd']],
'a[-1]': [('a', (-1, None)), ['Z']],
'a[-2]': [('a', (-2, None)), ['Y']],
'a[48:]': [('a', (48, -1)), ['W', 'X', 'Y', 'Z']],
'a[49:]': [('a', (49, -1)), ['X', 'Y', 'Z']],
'a[1:]': [('a', (1, -1)), list(string.ascii_letters[1:])],
}
ranges_to_expand = {
'a[1:2]': ['a1', 'a2'],
'a[1:10:2]': ['a1', 'a3', 'a5', 'a7', 'a9'],
'a[a:b]': ['aa', 'ab'],
'a[a:i:3]': ['aa', 'ad', 'ag'],
'a[a:b][c:d]': ['aac', 'aad', 'abc', 'abd'],
'a[0:1][2:3]': ['a02', 'a03', 'a12', 'a13'],
'a[a:b][2:3]': ['aa2', 'aa3', 'ab2', 'ab3'],
}
def setUp(self):
fake_loader = DictDataLoader({})
self.i = InventoryManager(loader=fake_loader, sources=[None])
def test_split_patterns(self):
for p in self.patterns:
r = self.patterns[p]
self.assertEqual(r, split_host_pattern(p))
for p, r in self.pattern_lists:
self.assertEqual(r, split_host_pattern(p))
def test_ranges(self):
for s in self.subscripts:
r = self.subscripts[s]
self.assertEqual(r[0], self.i._split_subscript(s))
self.assertEqual(
r[1],
self.i._apply_subscript(
list(string.ascii_letters),
r[0][1]
)
)
class TestInventoryPlugins(unittest.TestCase):
def test_empty_inventory(self):
inventory = self._get_inventory('')
self.assertIn('all', inventory.groups)
self.assertIn('ungrouped', inventory.groups)
self.assertFalse(inventory.groups['all'].get_hosts())
self.assertFalse(inventory.groups['ungrouped'].get_hosts())
def test_ini(self):
self._test_default_groups("""
host1
host2
host3
[servers]
host3
host4
host5
""")
def test_ini_explicit_ungrouped(self):
self._test_default_groups("""
[ungrouped]
host1
host2
host3
[servers]
host3
host4
host5
""")
def test_ini_variables_stringify(self):
values = ['string', 'no', 'No', 'false', 'FALSE', [], False, 0]
inventory_content = "host1 "
inventory_content += ' '.join(['var%s=%s' % (i, to_text(x)) for i, x in enumerate(values)])
inventory = self._get_inventory(inventory_content)
variables = inventory.get_host('host1').vars
for i in range(len(values)):
if isinstance(values[i], str):
self.assertIsInstance(variables['var%s' % i], str)
else:
self.assertIsInstance(variables['var%s' % i], type(values[i]))
@mock.patch('ansible.inventory.manager.unfrackpath', mock_unfrackpath_noop)
@mock.patch('os.path.exists', lambda x: True)
@mock.patch('os.access', lambda x, y: True)
def test_yaml_inventory(self, filename="test.yaml"):
inventory_content = {filename: textwrap.dedent("""\
---
all:
hosts:
test1:
test2:
""")}
C.INVENTORY_ENABLED = ['yaml']
fake_loader = DictDataLoader(inventory_content)
im = InventoryManager(loader=fake_loader, sources=filename)
self.assertTrue(im._inventory.hosts)
self.assertIn('test1', im._inventory.hosts)
self.assertIn('test2', im._inventory.hosts)
self.assertIn(im._inventory.get_host('test1'), im._inventory.groups['all'].hosts)
self.assertIn(im._inventory.get_host('test2'), im._inventory.groups['all'].hosts)
self.assertEqual(len(im._inventory.groups['all'].hosts), 2)
self.assertIn(im._inventory.get_host('test1'), im._inventory.groups['ungrouped'].hosts)
self.assertIn(im._inventory.get_host('test2'), im._inventory.groups['ungrouped'].hosts)
self.assertEqual(len(im._inventory.groups['ungrouped'].hosts), 2)
def _get_inventory(self, inventory_content):
fake_loader = DictDataLoader({__file__: inventory_content})
return InventoryManager(loader=fake_loader, sources=[__file__])
def _test_default_groups(self, inventory_content):
inventory = self._get_inventory(inventory_content)
self.assertIn('all', inventory.groups)
self.assertIn('ungrouped', inventory.groups)
all_hosts = set(host.name for host in inventory.groups['all'].get_hosts())
self.assertEqual(set(['host1', 'host2', 'host3', 'host4', 'host5']), all_hosts)
ungrouped_hosts = set(host.name for host in inventory.groups['ungrouped'].get_hosts())
self.assertEqual(set(['host1', 'host2']), ungrouped_hosts)
servers_hosts = set(host.name for host in inventory.groups['servers'].get_hosts())
self.assertEqual(set(['host3', 'host4', 'host5']), servers_hosts)
| 7,423
|
Python
|
.py
| 170
| 35.229412
| 113
| 0.564699
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,989
|
conftest.py
|
ansible_ansible/test/units/plugins/become/conftest.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2017 Ansible Project
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.cli.arguments import option_helpers as opt_help
from ansible.utils import context_objects as co
@pytest.fixture
def parser():
parser = opt_help.create_base_parser('testparser')
opt_help.add_runas_options(parser)
opt_help.add_meta_options(parser)
opt_help.add_runtask_options(parser)
opt_help.add_vault_options(parser)
opt_help.add_async_options(parser)
opt_help.add_connect_options(parser)
opt_help.add_subset_options(parser)
opt_help.add_check_options(parser)
opt_help.add_inventory_options(parser)
return parser
@pytest.fixture
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
yield
co.GlobalCLIArgs._Singleton__instance = None
| 956
|
Python
|
.py
| 26
| 33.269231
| 92
| 0.757872
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,990
|
test_su.py
|
ansible_ansible/test/units/plugins/become/test_su.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2020 Ansible Project
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import re
from ansible import context
from ansible.plugins.loader import become_loader, shell_loader
def test_su(mocker, parser, reset_cli_args):
options = parser.parse_args([])
context._init_global_context(options)
su = become_loader.get('su')
sh = shell_loader.get('sh')
sh.executable = "/bin/bash"
su.set_options(direct={
'become_user': 'foo',
'become_flags': '',
})
cmd = su.build_become_command('/bin/foo', sh)
assert re.match(r"""su\s+foo -c '/bin/bash -c '"'"'echo BECOME-SUCCESS-.+?; /bin/foo'"'"''""", cmd)
| 792
|
Python
|
.py
| 20
| 35.6
| 103
| 0.670157
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,991
|
test_sudo.py
|
ansible_ansible/test/units/plugins/become/test_sudo.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2020 Ansible Project
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import re
from ansible import context
from ansible.plugins.loader import become_loader, shell_loader
def test_sudo(mocker, parser, reset_cli_args):
options = parser.parse_args([])
context._init_global_context(options)
sudo = become_loader.get('sudo')
sh = shell_loader.get('sh')
sh.executable = "/bin/bash"
sudo.set_options(direct={
'become_user': 'foo',
'become_flags': '-n -s -H',
})
cmd = sudo.build_become_command('/bin/foo', sh)
assert re.match(r"""sudo\s+-n -s -H\s+-u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd
sudo.set_options(direct={
'become_user': 'foo',
'become_flags': '-n -s -H',
'become_pass': 'testpass',
})
cmd = sudo.build_become_command('/bin/foo', sh)
assert re.match(r"""sudo\s+-s\s-H\s+-p "\[sudo via ansible, key=.+?\] password:" -u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd
sudo.set_options(direct={
'become_user': 'foo',
'become_flags': '-snH',
'become_pass': 'testpass',
})
cmd = sudo.build_become_command('/bin/foo', sh)
assert re.match(r"""sudo\s+-sH\s+-p "\[sudo via ansible, key=.+?\] password:" -u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd
sudo.set_options(direct={
'become_user': 'foo',
'become_flags': '--non-interactive -s -H',
'become_pass': 'testpass',
})
cmd = sudo.build_become_command('/bin/foo', sh)
assert re.match(r"""sudo\s+-s\s-H\s+-p "\[sudo via ansible, key=.+?\] password:" -u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd
sudo.set_options(direct={
'become_user': 'foo',
'become_flags': '--non-interactive -nC5 -s -H',
'become_pass': 'testpass',
})
cmd = sudo.build_become_command('/bin/foo', sh)
assert re.match(r"""sudo\s+-C5\s-s\s-H\s+-p "\[sudo via ansible, key=.+?\] password:" -u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd
| 2,245
|
Python
|
.py
| 48
| 41
| 160
| 0.604128
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,992
|
test_raw.py
|
ansible_ansible/test/units/plugins/action/test_raw.py
|
# (c) 2016, Saran Ahluwalia <ahlusar.ahluwalia@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
import unittest
from unittest.mock import MagicMock, Mock
from ansible.plugins.action.raw import ActionModule
from ansible.playbook.task import Task
from ansible.plugins.loader import connection_loader
class TestCopyResultExclude(unittest.TestCase):
def setUp(self):
self.play_context = Mock()
self.play_context.shell = 'sh'
self.connection = connection_loader.get('local', self.play_context, os.devnull)
def tearDown(self):
pass
def _build_task(self):
task = MagicMock(Task)
task.async_val = False
task.diff = False
task.check_mode = False
task.environment = None
task.args = {'_raw_params': 'Args1'}
return task
# The current behavior of the raw action in regards to executable is currently in question;
# the test_raw_executable_is_not_empty_string verifies the current behavior (whether it is desired or not).
# Please refer to the following for context:
# Issue: https://github.com/ansible/ansible/issues/16054
# PR: https://github.com/ansible/ansible/pull/16085
def test_raw_executable_is_not_empty_string(self):
task = self._build_task()
self.mock_am = ActionModule(task, self.connection, self.play_context, loader=None, templar=None, shared_loader_obj=None)
self.mock_am._low_level_execute_command = Mock(return_value={})
self.mock_am.display = Mock()
self.mock_am._admin_users = ['root', 'toor']
self.mock_am.run()
self.mock_am._low_level_execute_command.assert_called_with('Args1', executable=False)
def test_raw_check_mode_is_True(self):
task = self._build_task()
task.check_mode = True
self.mock_am = ActionModule(task, self.connection, self.play_context, loader=None, templar=None, shared_loader_obj=None)
def test_raw_test_environment_is_None(self):
task = self._build_task()
self.mock_am = ActionModule(task, self.connection, self.play_context, loader=None, templar=None, shared_loader_obj=None)
self.mock_am._low_level_execute_command = Mock(return_value={})
self.mock_am.display = Mock()
self.assertEqual(task.environment, None)
def test_raw_task_vars_is_not_None(self):
task = self._build_task()
self.mock_am = ActionModule(task, self.connection, self.play_context, loader=None, templar=None, shared_loader_obj=None)
self.mock_am._low_level_execute_command = Mock(return_value={})
self.mock_am.display = Mock()
self.mock_am.run(task_vars={'a': 'b'})
self.assertEqual(task.environment, None)
| 3,395
|
Python
|
.py
| 68
| 44.088235
| 128
| 0.707753
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,993
|
test_action.py
|
ansible_ansible/test/units/plugins/action/test_action.py
|
# -*- coding: utf-8 -*-
# (c) 2015, Florian Apolloner <florian@apolloner.eu>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
import re
from importlib import import_module
from ansible import constants as C
import unittest
from unittest.mock import patch, MagicMock, mock_open
from ansible.errors import AnsibleError, AnsibleAuthenticationFailure
import builtins
import shlex
from ansible.module_utils.common.text.converters import to_bytes
from ansible.playbook.play_context import PlayContext
from ansible.plugins.action import ActionBase
from ansible.plugins.loader import init_plugin_loader
from ansible.template import Templar
from ansible.vars.clean import clean_facts
from units.mock.loader import DictDataLoader
python_module_replacers = br"""
#!/usr/bin/python
#ANSIBLE_VERSION = "<<ANSIBLE_VERSION>>"
#MODULE_COMPLEX_ARGS = "<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>"
#SELINUX_SPECIAL_FS="<<SELINUX_SPECIAL_FILESYSTEMS>>"
test = u'Toshio \u304f\u3089\u3068\u307f'
from ansible.module_utils.basic import *
"""
powershell_module_replacers = b"""
WINDOWS_ARGS = "<<INCLUDE_ANSIBLE_MODULE_JSON_ARGS>>"
# POWERSHELL_COMMON
"""
def _action_base():
fake_loader = DictDataLoader({
})
mock_module_loader = MagicMock()
mock_shared_loader_obj = MagicMock()
mock_shared_loader_obj.module_loader = mock_module_loader
mock_connection_loader = MagicMock()
mock_shared_loader_obj.connection_loader = mock_connection_loader
mock_connection = MagicMock()
play_context = MagicMock()
action_base = DerivedActionBase(task=None,
connection=mock_connection,
play_context=play_context,
loader=fake_loader,
templar=None,
shared_loader_obj=mock_shared_loader_obj)
return action_base
class DerivedActionBase(ActionBase):
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=None):
# We're not testing the plugin run() method, just the helper
# methods ActionBase defines
return super(DerivedActionBase, self).run(tmp=tmp, task_vars=task_vars)
class TestActionBase(unittest.TestCase):
def test_action_base_run(self):
mock_task = MagicMock()
mock_task.action = "foo"
mock_task.args = dict(a=1, b=2, c=3)
mock_connection = MagicMock()
play_context = PlayContext()
mock_task.async_val = None
action_base = DerivedActionBase(mock_task, mock_connection, play_context, None, None, None)
results = action_base.run()
self.assertEqual(results, dict())
mock_task.async_val = 0
action_base = DerivedActionBase(mock_task, mock_connection, play_context, None, None, None)
results = action_base.run()
self.assertEqual(results, {})
def test_action_base__configure_module(self):
init_plugin_loader()
# Pre-populate the ansible.builtin collection
# so reading the ansible_builtin_runtime.yml happens
# before the mock_open below
import_module('ansible_collections.ansible.builtin')
fake_loader = DictDataLoader({
})
# create our fake task
mock_task = MagicMock()
mock_task.action = "copy"
mock_task.async_val = 0
mock_task.delegate_to = None
# create a mock connection, so we don't actually try and connect to things
mock_connection = MagicMock()
# create a mock shared loader object
def mock_find_plugin_with_context(name, options, collection_list=None):
mockctx = MagicMock()
if name == 'badmodule':
mockctx.resolved = False
mockctx.plugin_resolved_path = None
elif '.ps1' in options:
mockctx.resolved = True
mockctx.plugin_resolved_path = '/fake/path/to/%s.ps1' % name
else:
mockctx.resolved = True
mockctx.plugin_resolved_path = '/fake/path/to/%s' % name
return mockctx
mock_module_loader = MagicMock()
mock_module_loader.find_plugin_with_context.side_effect = mock_find_plugin_with_context
mock_shared_obj_loader = MagicMock()
mock_shared_obj_loader.module_loader = mock_module_loader
# we're using a real play context here
play_context = PlayContext()
# our test class
action_base = DerivedActionBase(
task=mock_task,
connection=mock_connection,
play_context=play_context,
loader=fake_loader,
templar=Templar(loader=fake_loader),
shared_loader_obj=mock_shared_obj_loader,
)
# test python module formatting
with patch.object(builtins, 'open', mock_open(read_data=to_bytes(python_module_replacers.strip(), encoding='utf-8'))):
with patch.object(os, 'rename'):
mock_task.args = dict(a=1, foo='fö〩')
mock_connection.module_implementation_preferences = ('',)
(style, shebang, data, path) = action_base._configure_module(mock_task.action, mock_task.args,
task_vars=dict(ansible_python_interpreter='/usr/bin/python',
ansible_playbook_python='/usr/bin/python'))
self.assertEqual(style, "new")
self.assertEqual(shebang, u"#!/usr/bin/python")
# test module not found
self.assertRaises(AnsibleError, action_base._configure_module, 'badmodule', mock_task.args, {})
# test powershell module formatting
with patch.object(builtins, 'open', mock_open(read_data=to_bytes(powershell_module_replacers.strip(), encoding='utf-8'))):
mock_task.action = 'win_copy'
mock_task.args = dict(b=2)
mock_connection.module_implementation_preferences = ('.ps1',)
(style, shebang, data, path) = action_base._configure_module('stat', mock_task.args, {})
self.assertEqual(style, "new")
self.assertEqual(shebang, u'#!powershell')
# test module not found
self.assertRaises(AnsibleError, action_base._configure_module, 'badmodule', mock_task.args, {})
def test_action_base__compute_environment_string(self):
fake_loader = DictDataLoader({
})
# create our fake task
mock_task = MagicMock()
mock_task.action = "copy"
mock_task.args = dict(a=1)
# create a mock connection, so we don't actually try and connect to things
def env_prefix(**args):
return ' '.join(['%s=%s' % (k, shlex.quote(str(v))) for k, v in args.items()])
mock_connection = MagicMock()
mock_connection._shell.env_prefix.side_effect = env_prefix
# we're using a real play context here
play_context = PlayContext()
# and we're using a real templar here too
templar = Templar(loader=fake_loader)
# our test class
action_base = DerivedActionBase(
task=mock_task,
connection=mock_connection,
play_context=play_context,
loader=fake_loader,
templar=templar,
shared_loader_obj=None,
)
# test standard environment setup
mock_task.environment = [dict(FOO='foo'), None]
env_string = action_base._compute_environment_string()
self.assertEqual(env_string, "FOO=foo")
# test where environment is not a list
mock_task.environment = dict(FOO='foo')
env_string = action_base._compute_environment_string()
self.assertEqual(env_string, "FOO=foo")
# test environment with a variable in it
templar.available_variables = dict(the_var='bar')
mock_task.environment = [dict(FOO='{{the_var}}')]
env_string = action_base._compute_environment_string()
self.assertEqual(env_string, "FOO=bar")
# test with a bad environment set
mock_task.environment = dict(FOO='foo')
mock_task.environment = ['hi there']
self.assertRaises(AnsibleError, action_base._compute_environment_string)
def test_action_base__early_needs_tmp_path(self):
# create our fake task
mock_task = MagicMock()
# create a mock connection, so we don't actually try and connect to things
mock_connection = MagicMock()
# we're using a real play context here
play_context = PlayContext()
# our test class
action_base = DerivedActionBase(
task=mock_task,
connection=mock_connection,
play_context=play_context,
loader=None,
templar=None,
shared_loader_obj=None,
)
self.assertFalse(action_base._early_needs_tmp_path())
action_base.TRANSFERS_FILES = True
self.assertTrue(action_base._early_needs_tmp_path())
def test_action_base__make_tmp_path(self):
# create our fake task
mock_task = MagicMock()
def get_shell_opt(opt):
assert opt == 'admin_users'
ret = ['root', 'toor', 'Administrator']
return ret
# create a mock connection, so we don't actually try and connect to things
mock_connection = MagicMock()
mock_connection.transport = 'ssh'
mock_connection._shell.mkdtemp.return_value = 'mkdir command'
mock_connection._shell.join_path.side_effect = os.path.join
mock_connection._shell.get_option = get_shell_opt
mock_connection._shell.HOMES_RE = re.compile(r'(\'|\")?(~|\$HOME)(.*)')
# we're using a real play context here
play_context = PlayContext()
play_context.become = True
play_context.become_user = 'foo'
mock_task.become = True
mock_task.become_user = True
# our test class
action_base = DerivedActionBase(
task=mock_task,
connection=mock_connection,
play_context=play_context,
loader=None,
templar=None,
shared_loader_obj=None,
)
action_base._low_level_execute_command = MagicMock()
action_base._low_level_execute_command.return_value = dict(rc=0, stdout='/some/path')
self.assertEqual(action_base._make_tmp_path('root'), '/some/path/')
# empty path fails
action_base._low_level_execute_command.return_value = dict(rc=0, stdout='')
self.assertRaises(AnsibleError, action_base._make_tmp_path, 'root')
# authentication failure
action_base._low_level_execute_command.return_value = dict(rc=5, stdout='')
self.assertRaises(AnsibleError, action_base._make_tmp_path, 'root')
# ssh error
action_base._low_level_execute_command.return_value = dict(rc=255, stdout='', stderr='')
self.assertRaises(AnsibleError, action_base._make_tmp_path, 'root')
self.assertRaises(AnsibleError, action_base._make_tmp_path, 'root')
# general error
action_base._low_level_execute_command.return_value = dict(rc=1, stdout='some stuff here', stderr='')
self.assertRaises(AnsibleError, action_base._make_tmp_path, 'root')
action_base._low_level_execute_command.return_value = dict(rc=1, stdout='some stuff here', stderr='No space left on device')
self.assertRaises(AnsibleError, action_base._make_tmp_path, 'root')
def test_action_base__fixup_perms2(self):
mock_task = MagicMock()
mock_connection = MagicMock()
play_context = PlayContext()
action_base = DerivedActionBase(
task=mock_task,
connection=mock_connection,
play_context=play_context,
loader=None,
templar=None,
shared_loader_obj=None,
)
action_base._low_level_execute_command = MagicMock()
remote_paths = ['/tmp/foo/bar.txt', '/tmp/baz.txt']
remote_user = 'remoteuser1'
# Used for skipping down to common group dir.
CHMOD_ACL_FLAGS = ('+a', 'A+user:remoteuser2:r:allow')
def runWithNoExpectation(execute=False):
return action_base._fixup_perms2(
remote_paths,
remote_user=remote_user,
execute=execute)
def assertSuccess(execute=False):
self.assertEqual(runWithNoExpectation(execute), remote_paths)
def assertThrowRegex(regex, execute=False):
self.assertRaisesRegex(
AnsibleError,
regex,
action_base._fixup_perms2,
remote_paths,
remote_user=remote_user,
execute=execute)
def get_shell_option_for_arg(args_kv, default):
"""A helper for get_shell_option. Returns a function that, if
called with ``option`` that exists in args_kv, will return the
value, else will return ``default`` for every other given arg"""
def _helper(option, *args, **kwargs):
return args_kv.get(option, default)
return _helper
action_base.get_become_option = MagicMock()
action_base.get_become_option.return_value = 'remoteuser2'
# Step 1: On windows, we just return remote_paths
action_base._connection._shell._IS_WINDOWS = True
assertSuccess(execute=False)
assertSuccess(execute=True)
# But if we're not on windows....we have more work to do.
action_base._connection._shell._IS_WINDOWS = False
# Step 2: We're /not/ becoming an unprivileged user
action_base._remote_chmod = MagicMock()
action_base._is_become_unprivileged = MagicMock()
action_base._is_become_unprivileged.return_value = False
# Two subcases:
# - _remote_chmod rc is 0
# - _remote-chmod rc is not 0, something failed
action_base._remote_chmod.return_value = {
'rc': 0,
'stdout': 'some stuff here',
'stderr': '',
}
assertSuccess(execute=True)
# When execute=False, we just get the list back. But add it here for
# completion. chmod is never called.
assertSuccess()
action_base._remote_chmod.return_value = {
'rc': 1,
'stdout': 'some stuff here',
'stderr': 'and here',
}
assertThrowRegex(
'Failed to set execute bit on remote files',
execute=True)
# Step 3: we are becoming unprivileged
action_base._is_become_unprivileged.return_value = True
# Step 3a: setfacl
action_base._remote_set_user_facl = MagicMock()
action_base._remote_set_user_facl.return_value = {
'rc': 0,
'stdout': '',
'stderr': '',
}
assertSuccess()
# Step 3b: chmod +x if we need to
# To get here, setfacl failed, so mock it as such.
action_base._remote_set_user_facl.return_value = {
'rc': 1,
'stdout': '',
'stderr': '',
}
action_base._remote_chmod.return_value = {
'rc': 1,
'stdout': 'some stuff here',
'stderr': '',
}
assertThrowRegex(
'Failed to set file mode or acl on remote temporary files',
execute=True)
action_base._remote_chmod.return_value = {
'rc': 0,
'stdout': 'some stuff here',
'stderr': '',
}
assertSuccess(execute=True)
# Step 3c: chown
action_base._remote_chown = MagicMock()
action_base._remote_chown.return_value = {
'rc': 0,
'stdout': '',
'stderr': '',
}
assertSuccess()
action_base._remote_chown.return_value = {
'rc': 1,
'stdout': '',
'stderr': '',
}
remote_user = 'root'
action_base._get_admin_users = MagicMock()
action_base._get_admin_users.return_value = ['root']
assertThrowRegex('user would be unable to read the file.')
remote_user = 'remoteuser1'
# Step 3d: chmod +a on osx
assertSuccess()
action_base._remote_chmod.assert_called_with(
['remoteuser2 allow read'] + remote_paths,
'+a')
# This case can cause Solaris chmod to return 5 which the ssh plugin
# treats as failure. To prevent a regression and ensure we still try the
# rest of the cases below, we mock the thrown exception here.
# This function ensures that only the macOS case (+a) throws this.
def raise_if_plus_a(definitely_not_underscore, mode):
if mode == '+a':
raise AnsibleAuthenticationFailure()
return {'rc': 0, 'stdout': '', 'stderr': ''}
action_base._remote_chmod.side_effect = raise_if_plus_a
assertSuccess()
# Step 3e: chmod A+ on Solaris
# We threw AnsibleAuthenticationFailure above, try Solaris fallback.
# Based on our lambda above, it should be successful.
action_base._remote_chmod.assert_called_with(
remote_paths,
'A+user:remoteuser2:r:allow')
assertSuccess()
# Step 3f: Common group
def rc_1_if_chmod_acl(definitely_not_underscore, mode):
rc = 0
if mode in CHMOD_ACL_FLAGS:
rc = 1
return {'rc': rc, 'stdout': '', 'stderr': ''}
action_base._remote_chmod = MagicMock()
action_base._remote_chmod.side_effect = rc_1_if_chmod_acl
get_shell_option = action_base.get_shell_option
action_base.get_shell_option = MagicMock()
action_base.get_shell_option.side_effect = get_shell_option_for_arg(
{
'common_remote_group': 'commongroup',
},
None)
action_base._remote_chgrp = MagicMock()
action_base._remote_chgrp.return_value = {
'rc': 0,
'stdout': '',
'stderr': '',
}
# TODO: Add test to assert warning is shown if
# world_readable_temp is set in this case.
assertSuccess()
action_base._remote_chgrp.assert_called_once_with(
remote_paths,
'commongroup')
# Step 4: world-readable tmpdir
action_base.get_shell_option.side_effect = get_shell_option_for_arg(
{
'world_readable_temp': True,
'common_remote_group': None,
},
None)
action_base._remote_chmod.return_value = {
'rc': 0,
'stdout': 'some stuff here',
'stderr': '',
}
assertSuccess()
action_base._remote_chmod = MagicMock()
action_base._remote_chmod.return_value = {
'rc': 1,
'stdout': 'some stuff here',
'stderr': '',
}
assertThrowRegex('Failed to set file mode on remote files')
# Otherwise if we make it here in this state, we hit the catch-all
action_base.get_shell_option.side_effect = get_shell_option_for_arg(
{},
None)
assertThrowRegex('on the temporary files Ansible needs to create')
def test_action_base__remove_tmp_path(self):
# create our fake task
mock_task = MagicMock()
# create a mock connection, so we don't actually try and connect to things
mock_connection = MagicMock()
mock_connection._shell.remove.return_value = 'rm some stuff'
# we're using a real play context here
play_context = PlayContext()
# our test class
action_base = DerivedActionBase(
task=mock_task,
connection=mock_connection,
play_context=play_context,
loader=None,
templar=None,
shared_loader_obj=None,
)
action_base._low_level_execute_command = MagicMock()
# these don't really return anything or raise errors, so
# we're pretty much calling these for coverage right now
action_base._remove_tmp_path('/bad/path/dont/remove')
action_base._remove_tmp_path('/good/path/to/ansible-tmp-thing')
@patch('os.unlink')
@patch('os.fdopen')
@patch('tempfile.mkstemp')
def test_action_base__transfer_data(self, mock_mkstemp, mock_fdopen, mock_unlink):
# create our fake task
mock_task = MagicMock()
# create a mock connection, so we don't actually try and connect to things
mock_connection = MagicMock()
mock_connection.put_file.return_value = None
# we're using a real play context here
play_context = PlayContext()
# our test class
action_base = DerivedActionBase(
task=mock_task,
connection=mock_connection,
play_context=play_context,
loader=None,
templar=None,
shared_loader_obj=None,
)
mock_afd = MagicMock()
mock_afile = MagicMock()
mock_mkstemp.return_value = (mock_afd, mock_afile)
mock_unlink.return_value = None
mock_afo = MagicMock()
mock_afo.write.return_value = None
mock_afo.flush.return_value = None
mock_afo.close.return_value = None
mock_fdopen.return_value = mock_afo
self.assertEqual(action_base._transfer_data('/path/to/remote/file', 'some data'), '/path/to/remote/file')
self.assertEqual(action_base._transfer_data('/path/to/remote/file', 'some mixed data: fö〩'), '/path/to/remote/file')
self.assertEqual(action_base._transfer_data('/path/to/remote/file', dict(some_key='some value')), '/path/to/remote/file')
self.assertEqual(action_base._transfer_data('/path/to/remote/file', dict(some_key='fö〩')), '/path/to/remote/file')
mock_afo.write.side_effect = Exception()
self.assertRaises(AnsibleError, action_base._transfer_data, '/path/to/remote/file', '')
def test_action_base__execute_remote_stat(self):
# create our fake task
mock_task = MagicMock()
# create a mock connection, so we don't actually try and connect to things
mock_connection = MagicMock()
# we're using a real play context here
play_context = PlayContext()
# our test class
action_base = DerivedActionBase(
task=mock_task,
connection=mock_connection,
play_context=play_context,
loader=None,
templar=None,
shared_loader_obj=None,
)
action_base._execute_module = MagicMock()
# test normal case
action_base._execute_module.return_value = dict(stat=dict(checksum='1111111111111111111111111111111111', exists=True))
res = action_base._execute_remote_stat(path='/path/to/file', all_vars=dict(), follow=False)
self.assertEqual(res['checksum'], '1111111111111111111111111111111111')
# test does not exist
action_base._execute_module.return_value = dict(stat=dict(exists=False))
res = action_base._execute_remote_stat(path='/path/to/file', all_vars=dict(), follow=False)
self.assertFalse(res['exists'])
self.assertEqual(res['checksum'], '1')
# test no checksum in result from _execute_module
action_base._execute_module.return_value = dict(stat=dict(exists=True))
res = action_base._execute_remote_stat(path='/path/to/file', all_vars=dict(), follow=False)
self.assertTrue(res['exists'])
self.assertEqual(res['checksum'], '')
# test stat call failed
action_base._execute_module.return_value = dict(failed=True, msg="because I said so")
self.assertRaises(AnsibleError, action_base._execute_remote_stat, path='/path/to/file', all_vars=dict(), follow=False)
def test_action_base__execute_module(self):
# create our fake task
mock_task = MagicMock()
mock_task.action = 'copy'
mock_task.args = dict(a=1, b=2, c=3)
mock_task.diff = False
mock_task.check_mode = False
mock_task.no_log = False
# create a mock connection, so we don't actually try and connect to things
def get_option(option):
return {'admin_users': ['root', 'toor']}.get(option)
mock_connection = MagicMock()
mock_connection.socket_path = None
mock_connection._shell.get_remote_filename.return_value = 'copy.py'
mock_connection._shell.join_path.side_effect = os.path.join
mock_connection._shell.tmpdir = '/var/tmp/mytempdir'
mock_connection._shell.get_option = get_option
# we're using a real play context here
play_context = PlayContext()
# our test class
action_base = DerivedActionBase(
task=mock_task,
connection=mock_connection,
play_context=play_context,
loader=None,
templar=None,
shared_loader_obj=None,
)
# fake a lot of methods as we test those elsewhere
action_base._configure_module = MagicMock()
action_base._supports_check_mode = MagicMock()
action_base._is_pipelining_enabled = MagicMock()
action_base._make_tmp_path = MagicMock()
action_base._transfer_data = MagicMock()
action_base._compute_environment_string = MagicMock()
action_base._low_level_execute_command = MagicMock()
action_base._fixup_perms2 = MagicMock()
action_base._configure_module.return_value = ('new', '#!/usr/bin/python', 'this is the module data', 'path')
action_base._is_pipelining_enabled.return_value = False
action_base._compute_environment_string.return_value = ''
action_base._connection.has_pipelining = False
action_base._make_tmp_path.return_value = '/the/tmp/path'
action_base._low_level_execute_command.return_value = dict(stdout='{"rc": 0, "stdout": "ok"}')
self.assertEqual(action_base._execute_module(module_name=None, module_args=None), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok']))
self.assertEqual(
action_base._execute_module(
module_name='foo',
module_args=dict(z=9, y=8, x=7),
task_vars=dict(a=1)
),
dict(
_ansible_parsed=True,
rc=0,
stdout="ok",
stdout_lines=['ok'],
)
)
# test with needing/removing a remote tmp path
action_base._configure_module.return_value = ('old', '#!/usr/bin/python', 'this is the module data', 'path')
action_base._is_pipelining_enabled.return_value = False
action_base._make_tmp_path.return_value = '/the/tmp/path'
self.assertEqual(action_base._execute_module(), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok']))
action_base._configure_module.return_value = ('non_native_want_json', '#!/usr/bin/python', 'this is the module data', 'path')
self.assertEqual(action_base._execute_module(), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok']))
play_context.become = True
play_context.become_user = 'foo'
mock_task.become = True
mock_task.become_user = True
self.assertEqual(action_base._execute_module(), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok']))
# test an invalid shebang return
action_base._configure_module.return_value = ('new', '', 'this is the module data', 'path')
action_base._is_pipelining_enabled.return_value = False
action_base._make_tmp_path.return_value = '/the/tmp/path'
self.assertRaises(AnsibleError, action_base._execute_module)
# test with check mode enabled, once with support for check
# mode and once with support disabled to raise an error
play_context.check_mode = True
mock_task.check_mode = True
action_base._configure_module.return_value = ('new', '#!/usr/bin/python', 'this is the module data', 'path')
self.assertEqual(action_base._execute_module(), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok']))
action_base._supports_check_mode = False
self.assertRaises(AnsibleError, action_base._execute_module)
def test_action_base_sudo_only_if_user_differs(self):
fake_loader = MagicMock()
fake_loader.get_basedir.return_value = os.getcwd()
play_context = PlayContext()
action_base = DerivedActionBase(None, None, play_context, fake_loader, None, None)
action_base.get_become_option = MagicMock(return_value='root')
action_base._get_remote_user = MagicMock(return_value='root')
action_base._connection = MagicMock(exec_command=MagicMock(return_value=(0, '', '')))
action_base._connection._shell = shell = MagicMock(append_command=MagicMock(return_value=('JOINED CMD')))
action_base._connection.become = become = MagicMock()
become.build_become_command.return_value = 'foo'
action_base._low_level_execute_command('ECHO', sudoable=True)
become.build_become_command.assert_not_called()
action_base._get_remote_user.return_value = 'apo'
action_base._low_level_execute_command('ECHO', sudoable=True, executable='/bin/csh')
become.build_become_command.assert_called_once_with("ECHO", shell)
become.build_become_command.reset_mock()
with patch.object(C, 'BECOME_ALLOW_SAME_USER', new=True):
action_base._get_remote_user.return_value = 'root'
action_base._low_level_execute_command('ECHO SAME', sudoable=True)
become.build_become_command.assert_called_once_with("ECHO SAME", shell)
def test__remote_expand_user_relative_pathing(self):
action_base = _action_base()
action_base._play_context.remote_addr = 'bar'
action_base._connection.get_option.return_value = 'bar'
action_base._low_level_execute_command = MagicMock(return_value={'stdout': b'../home/user'})
action_base._connection._shell.join_path.return_value = '../home/user/foo'
with self.assertRaises(AnsibleError) as cm:
action_base._remote_expand_user('~/foo')
self.assertEqual(
cm.exception.message,
"'bar' returned an invalid relative home directory path containing '..'"
)
class TestActionBaseCleanReturnedData(unittest.TestCase):
def test(self):
data = {'ansible_playbook_python': '/usr/bin/python',
'ansible_python_interpreter': '/usr/bin/python',
'ansible_ssh_some_var': 'whatever',
'ansible_ssh_host_key_somehost': 'some key here',
'some_other_var': 'foo bar'}
data = clean_facts(data)
self.assertNotIn('ansible_playbook_python', data)
self.assertNotIn('ansible_python_interpreter', data)
self.assertIn('ansible_ssh_host_key_somehost', data)
self.assertIn('some_other_var', data)
class TestActionBaseParseReturnedData(unittest.TestCase):
def test_fail_no_json(self):
action_base = _action_base()
rc = 0
stdout = 'foo\nbar\n'
err = 'oopsy'
returned_data = {'rc': rc,
'stdout': stdout,
'stdout_lines': stdout.splitlines(),
'stderr': err}
res = action_base._parse_returned_data(returned_data)
self.assertFalse(res['_ansible_parsed'])
self.assertTrue(res['failed'])
self.assertEqual(res['module_stderr'], err)
def test_json_empty(self):
action_base = _action_base()
rc = 0
stdout = '{}\n'
err = ''
returned_data = {'rc': rc,
'stdout': stdout,
'stdout_lines': stdout.splitlines(),
'stderr': err}
res = action_base._parse_returned_data(returned_data)
del res['_ansible_parsed'] # we always have _ansible_parsed
self.assertEqual(len(res), 0)
self.assertFalse(res)
def test_json_facts(self):
action_base = _action_base()
rc = 0
stdout = '{"ansible_facts": {"foo": "bar", "ansible_blip": "blip_value"}}\n'
err = ''
returned_data = {'rc': rc,
'stdout': stdout,
'stdout_lines': stdout.splitlines(),
'stderr': err}
res = action_base._parse_returned_data(returned_data)
self.assertTrue(res['ansible_facts'])
self.assertIn('ansible_blip', res['ansible_facts'])
# TODO: Should this be an AnsibleUnsafe?
# self.assertIsInstance(res['ansible_facts'], AnsibleUnsafe)
def test_json_facts_add_host(self):
action_base = _action_base()
rc = 0
stdout = """{"ansible_facts": {"foo": "bar", "ansible_blip": "blip_value"},
"add_host": {"host_vars": {"some_key": ["whatever the add_host object is"]}
}
}\n"""
err = ''
returned_data = {'rc': rc,
'stdout': stdout,
'stdout_lines': stdout.splitlines(),
'stderr': err}
res = action_base._parse_returned_data(returned_data)
self.assertTrue(res['ansible_facts'])
self.assertIn('ansible_blip', res['ansible_facts'])
self.assertIn('add_host', res)
# TODO: Should this be an AnsibleUnsafe?
# self.assertIsInstance(res['ansible_facts'], AnsibleUnsafe)
| 34,598
|
Python
|
.py
| 728
| 37.135989
| 157
| 0.614486
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,994
|
test_gather_facts.py
|
ansible_ansible/test/units/plugins/action/test_gather_facts.py
|
# (c) 2016, Saran Ahluwalia <ahlusar.ahluwalia@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from unittest.mock import MagicMock, patch
from ansible import constants as C
from ansible.playbook.task import Task
from ansible.plugins.action.gather_facts import ActionModule as GatherFactsAction
from ansible.template import Templar
from ansible.executor import module_common
from units.mock.loader import DictDataLoader
class TestNetworkFacts(unittest.TestCase):
task = MagicMock(Task)
play_context = MagicMock()
play_context.check_mode = False
connection = MagicMock()
fake_loader = DictDataLoader({
})
templar = Templar(loader=fake_loader)
def setUp(self):
pass
def tearDown(self):
pass
@patch.object(module_common, '_get_collection_metadata', return_value={})
def test_network_gather_facts_smart_facts_module(self, mock_collection_metadata):
self.fqcn_task_vars = {'ansible_network_os': 'ios'}
self.task.action = 'gather_facts'
self.task.async_val = False
self.task.args = {}
plugin = GatherFactsAction(self.task, self.connection, self.play_context, loader=None, templar=self.templar, shared_loader_obj=None)
get_module_args = MagicMock()
plugin._get_module_args = get_module_args
plugin._execute_module = MagicMock()
res = plugin.run(task_vars=self.fqcn_task_vars)
# assert the gather_facts config is 'smart'
facts_modules = C.config.get_config_value('FACTS_MODULES', variables=self.fqcn_task_vars)
self.assertEqual(facts_modules, ['smart'])
# assert the correct module was found
self.assertEqual(get_module_args.call_count, 1)
self.assertEqual(
get_module_args.call_args.args,
('ansible.legacy.ios_facts', {'ansible_network_os': 'ios'},)
)
@patch.object(module_common, '_get_collection_metadata', return_value={})
def test_network_gather_facts_smart_facts_module_fqcn(self, mock_collection_metadata):
self.fqcn_task_vars = {'ansible_network_os': 'cisco.ios.ios'}
self.task.action = 'gather_facts'
self.task.async_val = False
self.task.args = {}
plugin = GatherFactsAction(self.task, self.connection, self.play_context, loader=None, templar=self.templar, shared_loader_obj=None)
get_module_args = MagicMock()
plugin._get_module_args = get_module_args
plugin._execute_module = MagicMock()
res = plugin.run(task_vars=self.fqcn_task_vars)
# assert the gather_facts config is 'smart'
facts_modules = C.config.get_config_value('FACTS_MODULES', variables=self.fqcn_task_vars)
self.assertEqual(facts_modules, ['smart'])
# assert the correct module was found
self.assertEqual(get_module_args.call_count, 1)
self.assertEqual(
get_module_args.call_args.args,
('cisco.ios.ios_facts', {'ansible_network_os': 'cisco.ios.ios'},)
)
| 3,680
|
Python
|
.py
| 77
| 41.701299
| 140
| 0.705275
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,995
|
test_reboot.py
|
ansible_ansible/test/units/plugins/action/test_reboot.py
|
# Copyright (c) 2022 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""Tests for the reboot action plugin."""
from __future__ import annotations
import os
import pytest
from ansible.errors import AnsibleConnectionFailure
from ansible.playbook.task import Task
from ansible.plugins.action.reboot import ActionModule as RebootAction
from ansible.plugins.loader import connection_loader
@pytest.fixture
def task_args(request):
"""Return playbook task args."""
return getattr(request, 'param', {})
@pytest.fixture
def module_task(mocker, task_args):
"""Construct a task object."""
task = mocker.MagicMock(Task)
task.action = 'reboot'
task.args = task_args
task.async_val = False
task.check_mode = False
task.diff = False
return task
@pytest.fixture
def play_context(mocker):
"""Construct a play context."""
ctx = mocker.MagicMock()
ctx.check_mode = False
ctx.shell = 'sh'
return ctx
@pytest.fixture
def action_plugin(play_context, module_task):
"""Initialize an action plugin."""
connection = connection_loader.get('local', play_context, os.devnull)
loader = None
templar = None
shared_loader_obj = None
return RebootAction(
module_task,
connection,
play_context,
loader,
templar,
shared_loader_obj,
)
_SENTINEL_REBOOT_COMMAND = '/reboot-command-mock --arg'
_SENTINEL_SHORT_REBOOT_COMMAND = '/reboot-command-mock'
_SENTINEL_TEST_COMMAND = 'cmd-stub'
@pytest.mark.parametrize(
'task_args',
(
{
'reboot_timeout': 5,
'reboot_command': _SENTINEL_REBOOT_COMMAND,
'test_command': _SENTINEL_TEST_COMMAND,
},
{
'reboot_timeout': 5,
'reboot_command': _SENTINEL_SHORT_REBOOT_COMMAND,
'test_command': _SENTINEL_TEST_COMMAND,
},
),
ids=('reboot command with spaces', 'reboot command without spaces'),
indirect=('task_args', ),
)
def test_reboot_command(action_plugin, mocker, monkeypatch, task_args):
"""Check that the reboot command gets called and reboot verified."""
def _patched_low_level_execute_command(cmd, *args, **kwargs):
return {
_SENTINEL_TEST_COMMAND: {
'rc': 0,
'stderr': '<test command stub-stderr>',
'stdout': '<test command stub-stdout>',
},
_SENTINEL_REBOOT_COMMAND: {
'rc': 0,
'stderr': '<reboot command stub-stderr>',
'stdout': '<reboot command stub-stdout>',
},
f'{_SENTINEL_SHORT_REBOOT_COMMAND} ': { # no args is concatenated
'rc': 0,
'stderr': '<short reboot command stub-stderr>',
'stdout': '<short reboot command stub-stdout>',
},
}[cmd]
monkeypatch.setattr(
action_plugin,
'_low_level_execute_command',
_patched_low_level_execute_command,
)
action_plugin._connection = mocker.Mock()
monkeypatch.setattr(action_plugin, 'check_boot_time', lambda *_a, **_kw: 5)
monkeypatch.setattr(action_plugin, 'get_distribution', mocker.MagicMock())
monkeypatch.setattr(action_plugin, 'get_system_boot_time', lambda d: 0)
low_level_cmd_spy = mocker.spy(action_plugin, '_low_level_execute_command')
action_result = action_plugin.run()
assert low_level_cmd_spy.called
expected_reboot_command = (
task_args['reboot_command'] if ' ' in task_args['reboot_command']
else f'{task_args["reboot_command"] !s} '
)
low_level_cmd_spy.assert_any_call(expected_reboot_command, sudoable=True)
low_level_cmd_spy.assert_any_call(task_args['test_command'], sudoable=True)
assert low_level_cmd_spy.call_count == 2
assert low_level_cmd_spy.spy_return == {
'rc': 0,
'stderr': '<test command stub-stderr>',
'stdout': '<test command stub-stdout>',
}
assert low_level_cmd_spy.spy_exception is None
assert 'failed' not in action_result
assert action_result == {'rebooted': True, 'changed': True, 'elapsed': 0}
@pytest.mark.parametrize(
'task_args',
(
{
'reboot_timeout': 5,
'reboot_command': _SENTINEL_REBOOT_COMMAND,
'test_command': _SENTINEL_TEST_COMMAND,
},
),
ids=('reboot command with spaces', ),
indirect=('task_args', ),
)
def test_reboot_command_connection_fail(action_plugin, mocker, monkeypatch, task_args):
"""Check that the reboot command gets called and reboot verified."""
def _patched_low_level_execute_command(cmd, *args, **kwargs):
if cmd == _SENTINEL_REBOOT_COMMAND:
raise AnsibleConnectionFailure('Fake connection drop')
return {
_SENTINEL_TEST_COMMAND: {
'rc': 0,
'stderr': '<test command stub-stderr>',
'stdout': '<test command stub-stdout>',
},
}[cmd]
monkeypatch.setattr(
action_plugin,
'_low_level_execute_command',
_patched_low_level_execute_command,
)
action_plugin._connection = mocker.Mock()
monkeypatch.setattr(action_plugin, 'check_boot_time', lambda *_a, **_kw: 5)
monkeypatch.setattr(action_plugin, 'get_distribution', mocker.MagicMock())
monkeypatch.setattr(action_plugin, 'get_system_boot_time', lambda d: 0)
low_level_cmd_spy = mocker.spy(action_plugin, '_low_level_execute_command')
action_result = action_plugin.run()
assert low_level_cmd_spy.called
low_level_cmd_spy.assert_any_call(
task_args['reboot_command'], sudoable=True,
)
low_level_cmd_spy.assert_any_call(task_args['test_command'], sudoable=True)
assert low_level_cmd_spy.call_count == 2
assert low_level_cmd_spy.spy_return == {
'rc': 0,
'stderr': '<test command stub-stderr>',
'stdout': '<test command stub-stdout>',
}
assert 'failed' not in action_result
assert action_result == {'rebooted': True, 'changed': True, 'elapsed': 0}
def test_reboot_connection_local(action_plugin, module_task):
"""Verify that using local connection doesn't let reboot happen."""
expected_message = ' '.join(
(
'Running', module_task.action,
'with local connection would reboot the control node.',
),
)
expected_action_result = {
'changed': False,
'elapsed': 0,
'failed': True,
'msg': expected_message,
'rebooted': False,
}
action_result = action_plugin.run()
assert action_result == expected_action_result
| 6,718
|
Python
|
.py
| 178
| 30.561798
| 92
| 0.633231
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,996
|
test_paramiko_ssh.py
|
ansible_ansible/test/units/plugins/connection/test_paramiko_ssh.py
|
#
# (c) 2020 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
from io import StringIO
import pytest
from ansible.plugins.connection import paramiko_ssh as paramiko_ssh_module
from ansible.plugins.loader import connection_loader
from ansible.playbook.play_context import PlayContext
@pytest.fixture
def play_context():
play_context = PlayContext()
play_context.prompt = (
'[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: '
)
return play_context
@pytest.fixture()
def in_stream():
return StringIO()
def test_paramiko_connection_module(play_context, in_stream):
assert isinstance(
connection_loader.get('paramiko_ssh', play_context, in_stream),
paramiko_ssh_module.Connection)
def test_paramiko_connect(play_context, in_stream, mocker):
paramiko_ssh = connection_loader.get('paramiko_ssh', play_context, in_stream)
mocker.patch.object(paramiko_ssh, '_connect_uncached')
connection = paramiko_ssh._connect()
assert isinstance(connection, paramiko_ssh_module.Connection)
assert connection._connected is True
| 1,756
|
Python
|
.py
| 43
| 37.953488
| 81
| 0.77
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,997
|
test_ssh.py
|
ansible_ansible/test/units/plugins/connection/test_ssh.py
|
# -*- coding: utf-8 -*-
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
from io import StringIO
from selectors import SelectorKey, EVENT_READ
import pytest
from ansible.errors import AnsibleAuthenticationFailure
import unittest
from unittest.mock import patch, MagicMock, PropertyMock
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
import shlex
from ansible.module_utils.common.text.converters import to_bytes
from ansible.playbook.play_context import PlayContext
from ansible.plugins.connection import ssh
from ansible.plugins.loader import connection_loader, become_loader
class TestConnectionBaseClass(unittest.TestCase):
def test_plugins_connection_ssh_module(self):
play_context = PlayContext()
play_context.prompt = (
'[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: '
)
in_stream = StringIO()
self.assertIsInstance(ssh.Connection(play_context, in_stream), ssh.Connection)
def test_plugins_connection_ssh_basic(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
# connect just returns self, so assert that
res = conn._connect()
self.assertEqual(conn, res)
ssh.SSHPASS_AVAILABLE = False
self.assertFalse(conn._sshpass_available())
ssh.SSHPASS_AVAILABLE = True
self.assertTrue(conn._sshpass_available())
with patch('subprocess.Popen') as p:
ssh.SSHPASS_AVAILABLE = None
p.return_value = MagicMock()
self.assertTrue(conn._sshpass_available())
ssh.SSHPASS_AVAILABLE = None
p.return_value = None
p.side_effect = OSError()
self.assertFalse(conn._sshpass_available())
conn.close()
self.assertFalse(conn._connected)
def test_plugins_connection_ssh__build_command(self):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
conn.get_option = MagicMock()
conn.get_option.return_value = ""
conn._build_command('ssh', 'ssh')
def test_plugins_connection_ssh_exec_command(self):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
conn._build_command = MagicMock()
conn._build_command.return_value = 'ssh something something'
conn._run = MagicMock()
conn._run.return_value = (0, 'stdout', 'stderr')
conn.get_option = MagicMock()
conn.get_option.return_value = True
res, stdout, stderr = conn.exec_command('ssh')
res, stdout, stderr = conn.exec_command('ssh', 'this is some data')
def test_plugins_connection_ssh__examine_output(self):
pc = PlayContext()
new_stdin = StringIO()
become_success_token = b'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz'
conn = connection_loader.get('ssh', pc, new_stdin)
conn.set_become_plugin(become_loader.get('sudo'))
conn.become.check_password_prompt = MagicMock()
conn.become.check_success = MagicMock()
conn.become.check_incorrect_password = MagicMock()
conn.become.check_missing_password = MagicMock()
def _check_password_prompt(line):
return b'foo' in line
def _check_become_success(line):
return become_success_token in line
def _check_incorrect_password(line):
return b'incorrect password' in line
def _check_missing_password(line):
return b'bad password' in line
# test examining output for prompt
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = True
# override become plugin
conn.become.prompt = True
conn.become.check_password_prompt = MagicMock(side_effect=_check_password_prompt)
conn.become.check_success = MagicMock(side_effect=_check_become_success)
conn.become.check_incorrect_password = MagicMock(side_effect=_check_incorrect_password)
conn.become.check_missing_password = MagicMock(side_effect=_check_missing_password)
def get_option(option):
assert option == 'become_pass'
return 'password'
conn.become.get_option = get_option
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nfoo\nline 3\nthis should be the remainder', False)
self.assertEqual(output, b'line 1\nline 2\nline 3\n')
self.assertEqual(unprocessed, b'this should be the remainder')
self.assertTrue(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for become prompt
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
conn.become.prompt = False
pc.success_key = str(become_success_token)
conn.become.success = str(become_success_token)
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\n%s\nline 3\n' % become_success_token, False)
self.assertEqual(output, b'line 1\nline 2\nline 3\n')
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_prompt'])
self.assertTrue(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test we dont detect become success from ssh debug: lines
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
conn.become.prompt = True
pc.success_key = str(become_success_token)
conn.become.success = str(become_success_token)
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\ndebug1: %s\nline 3\n' % become_success_token, False)
self.assertEqual(output, b'line 1\nline 2\ndebug1: %s\nline 3\n' % become_success_token)
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_success'])
# test examining output for become failure
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
conn.become.prompt = False
pc.success_key = None
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nincorrect password\n', True)
self.assertEqual(output, b'line 1\nline 2\nincorrect password\n')
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertTrue(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for missing password
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
conn.become.prompt = False
pc.success_key = None
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nbad password\n', True)
self.assertEqual(output, b'line 1\nbad password\n')
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertTrue(conn._flags['become_nopasswd_error'])
@patch('time.sleep')
@patch('os.path.exists')
def test_plugins_connection_ssh_put_file(self, mock_ospe, mock_sleep):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
conn._build_command = MagicMock()
conn._bare_run = MagicMock()
mock_ospe.return_value = True
conn._build_command.return_value = 'some command to run'
conn._bare_run.return_value = (0, '', '')
conn.host = "some_host"
conn.set_option('reconnection_retries', 9)
conn.set_option('ssh_transfer_method', None) # default is smart
# Test when SFTP works
expected_in_data = b' '.join((b'put', to_bytes(shlex.quote('/path/to/in/file')), to_bytes(shlex.quote('/path/to/dest/file')))) + b'\n'
conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# Test filenames with unicode
expected_in_data = b' '.join((b'put',
to_bytes(shlex.quote('/path/to/in/file/with/unicode-fö〩')),
to_bytes(shlex.quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# Test when SFTP doesn't work but SCP does
conn._bare_run.side_effect = [(1, 'stdout', 'some errors'), (0, '', '')]
conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
conn._bare_run.side_effect = None
# Test that a non-zero rc raises an error
conn.set_option('ssh_transfer_method', 'sftp')
conn._bare_run.return_value = (1, 'stdout', 'some errors')
self.assertRaises(AnsibleError, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
# Test that rc=255 raises an error
conn._bare_run.return_value = (255, 'stdout', 'some errors')
self.assertRaises(AnsibleConnectionFailure, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
# Test that rc=256 raises an error
conn._bare_run.return_value = (256, 'stdout', 'some errors')
self.assertRaises(AnsibleError, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
# Test that a not-found path raises an error
mock_ospe.return_value = False
conn._bare_run.return_value = (0, 'stdout', '')
self.assertRaises(AnsibleFileNotFound, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
@patch('time.sleep')
def test_plugins_connection_ssh_fetch_file(self, mock_sleep):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
conn._build_command = MagicMock()
conn._bare_run = MagicMock()
conn._load_name = 'ssh'
conn._build_command.return_value = 'some command to run'
conn._bare_run.return_value = (0, '', '')
conn.host = "some_host"
conn.set_option('reconnection_retries', 9)
conn.set_option('ssh_transfer_method', None) # default is smart
# Test when SFTP works
expected_in_data = b' '.join((b'get', to_bytes(shlex.quote('/path/to/in/file')), to_bytes(shlex.quote('/path/to/dest/file')))) + b'\n'
conn.set_options({})
conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# Test when SFTP doesn't work but SCP does
conn._bare_run.side_effect = [(1, 'stdout', 'some errors'), (0, '', '')]
conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
conn._bare_run.side_effect = None
# Test when filename is unicode
expected_in_data = b' '.join((b'get',
to_bytes(shlex.quote('/path/to/in/file/with/unicode-fö〩')),
to_bytes(shlex.quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
conn.fetch_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
conn._bare_run.side_effect = None
# Test that a non-zero rc raises an error
conn.set_option('ssh_transfer_method', 'sftp')
conn._bare_run.return_value = (1, 'stdout', 'some errors')
self.assertRaises(AnsibleError, conn.fetch_file, '/path/to/bad/file', '/remote/path/to/file')
# Test that rc=255 raises an error
conn._bare_run.return_value = (255, 'stdout', 'some errors')
self.assertRaises(AnsibleConnectionFailure, conn.fetch_file, '/path/to/bad/file', '/remote/path/to/file')
# Test that rc=256 raises an error
conn._bare_run.return_value = (256, 'stdout', 'some errors')
self.assertRaises(AnsibleError, conn.fetch_file, '/path/to/bad/file', '/remote/path/to/file')
class MockSelector(object):
def __init__(self):
self.files_watched = 0
self.register = MagicMock(side_effect=self._register)
self.unregister = MagicMock(side_effect=self._unregister)
self.close = MagicMock()
self.get_map = MagicMock()
self.select = MagicMock()
def _register(self, *args, **kwargs):
self.files_watched += 1
def _unregister(self, *args, **kwargs):
self.files_watched -= 1
@pytest.fixture
def mock_run_env(request, mocker):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
conn.set_become_plugin(become_loader.get('sudo'))
conn._send_initial_data = MagicMock()
conn._examine_output = MagicMock()
conn._terminate_process = MagicMock()
conn._load_name = 'ssh'
conn.sshpass_pipe = [MagicMock(), MagicMock()]
request.cls.pc = pc
request.cls.conn = conn
mock_popen_res = MagicMock()
mock_popen_res.poll = MagicMock()
mock_popen_res.wait = MagicMock()
mock_popen_res.stdin = MagicMock()
mock_popen_res.stdin.fileno.return_value = 1000
mock_popen_res.stdout = MagicMock()
mock_popen_res.stdout.fileno.return_value = 1001
mock_popen_res.stderr = MagicMock()
mock_popen_res.stderr.fileno.return_value = 1002
mock_popen_res.returncode = 0
request.cls.mock_popen_res = mock_popen_res
mock_popen = mocker.patch('subprocess.Popen', return_value=mock_popen_res)
request.cls.mock_popen = mock_popen
request.cls.mock_selector = MockSelector()
mocker.patch('selectors.DefaultSelector', lambda: request.cls.mock_selector)
request.cls.mock_openpty = mocker.patch('pty.openpty')
mocker.patch('fcntl.fcntl')
mocker.patch('os.write')
mocker.patch('os.close')
@pytest.mark.usefixtures('mock_run_env')
class TestSSHConnectionRun(object):
# FIXME:
# These tests are little more than a smoketest. Need to enhance them
# a bit to check that they're calling the relevant functions and making
# complete coverage of the code paths
def test_no_escalation(self):
self.mock_popen_res.stdout.read.side_effect = [b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"my_stderr"]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "this is input data")
assert return_code == 0
assert b_stdout == b'my_stdout\nsecond_line'
assert b_stderr == b'my_stderr'
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is input data'
def test_with_password(self):
# test with a password set to trigger the sshpass write
self.pc.password = '12345'
self.mock_popen_res.stdout.read.side_effect = [b"some data", b"", b""]
self.mock_popen_res.stderr.read.side_effect = [b""]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run(["ssh", "is", "a", "cmd"], "this is more data")
assert return_code == 0
assert b_stdout == b'some data'
assert b_stderr == b''
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is more data'
def _password_with_prompt_examine_output(self, sourice, state, b_chunk, sudoable):
if state == 'awaiting_prompt':
self.conn._flags['become_prompt'] = True
else:
assert state == 'awaiting_escalation'
self.conn._flags['become_success'] = True
return (b'', b'')
def test_password_with_prompt(self):
# test with password prompting enabled
self.pc.password = None
self.conn.become.prompt = b'Password:'
self.conn._examine_output.side_effect = self._password_with_prompt_examine_output
self.mock_popen_res.stdout.read.side_effect = [b"Password:", b"Success", b""]
self.mock_popen_res.stderr.read.side_effect = [b""]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ),
(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "this is input data")
assert return_code == 0
assert b_stdout == b''
assert b_stderr == b''
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is input data'
def test_password_with_become(self):
# test with some become settings
self.pc.prompt = b'Password:'
self.conn.become.prompt = b'Password:'
self.pc.become = True
self.pc.success_key = 'BECOME-SUCCESS-abcdefg'
self.conn.become._id = 'abcdefg'
self.conn._examine_output.side_effect = self._password_with_prompt_examine_output
self.mock_popen_res.stdout.read.side_effect = [b"Password:", b"BECOME-SUCCESS-abcdefg", b"abc"]
self.mock_popen_res.stderr.read.side_effect = [b"123"]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "this is input data")
self.mock_popen_res.stdin.flush.assert_called_once_with()
assert return_code == 0
assert b_stdout == b'abc'
assert b_stderr == b'123'
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is input data'
def test_password_without_data(self):
# simulate no data input but Popen using new pty's fails
self.mock_popen.return_value = None
self.mock_popen.side_effect = [OSError(), self.mock_popen_res]
# simulate no data input
self.mock_openpty.return_value = (98, 99)
self.mock_popen_res.stdout.read.side_effect = [b"some data", b"", b""]
self.mock_popen_res.stderr.read.side_effect = [b""]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "")
assert return_code == 0
assert b_stdout == b'some data'
assert b_stderr == b''
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is False
@pytest.mark.usefixtures('mock_run_env')
class TestSSHConnectionRetries(object):
def test_incorrect_password(self, monkeypatch):
self.conn.set_option('host_key_checking', False)
self.conn.set_option('reconnection_retries', 5)
self.mock_popen_res.stdout.read.side_effect = [b'']
self.mock_popen_res.stderr.read.side_effect = [b'Permission denied, please try again.\r\n']
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[5] * 4)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
]
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = [b'sshpass', b'-d41', b'ssh', b'-C']
exception_info = pytest.raises(AnsibleAuthenticationFailure, self.conn.exec_command, 'sshpass', 'some data')
assert exception_info.value.message == ('Invalid/incorrect username/password. Skipping remaining 5 retries to prevent account lockout: '
'Permission denied, please try again.')
assert self.mock_popen.call_count == 1
def test_retry_then_success(self, monkeypatch):
self.conn.set_option('host_key_checking', False)
self.conn.set_option('reconnection_retries', 3)
monkeypatch.setattr('time.sleep', lambda x: None)
self.mock_popen_res.stdout.read.side_effect = [b"", b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"", b"my_stderr"]
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 3 + [0] * 4)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]
]
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'ssh'
return_code, b_stdout, b_stderr = self.conn.exec_command('ssh', 'some data')
assert return_code == 0
assert b_stdout == b'my_stdout\nsecond_line'
assert b_stderr == b'my_stderr'
def test_multiple_failures(self, monkeypatch):
self.conn.set_option('host_key_checking', False)
self.conn.set_option('reconnection_retries', 9)
monkeypatch.setattr('time.sleep', lambda x: None)
self.mock_popen_res.stdout.read.side_effect = [b""] * 10
self.mock_popen_res.stderr.read.side_effect = [b""] * 10
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 30)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
] * 10
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'ssh'
pytest.raises(AnsibleConnectionFailure, self.conn.exec_command, 'ssh', 'some data')
assert self.mock_popen.call_count == 10
def test_abitrary_exceptions(self, monkeypatch):
self.conn.set_option('host_key_checking', False)
self.conn.set_option('reconnection_retries', 9)
monkeypatch.setattr('time.sleep', lambda x: None)
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'ssh'
self.mock_popen.side_effect = [Exception('bad')] * 10
pytest.raises(Exception, self.conn.exec_command, 'ssh', 'some data')
assert self.mock_popen.call_count == 10
def test_put_file_retries(self, monkeypatch):
self.conn.set_option('host_key_checking', False)
self.conn.set_option('reconnection_retries', 3)
monkeypatch.setattr('time.sleep', lambda x: None)
monkeypatch.setattr('ansible.plugins.connection.ssh.os.path.exists', lambda x: True)
self.mock_popen_res.stdout.read.side_effect = [b"", b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"", b"my_stderr"]
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 4 + [0] * 4)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]
]
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'sftp'
return_code, b_stdout, b_stderr = self.conn.put_file('/path/to/in/file', '/path/to/dest/file')
assert return_code == 0
assert b_stdout == b"my_stdout\nsecond_line"
assert b_stderr == b"my_stderr"
assert self.mock_popen.call_count == 2
def test_fetch_file_retries(self, monkeypatch):
self.conn.set_option('host_key_checking', False)
self.conn.set_option('reconnection_retries', 3)
monkeypatch.setattr('time.sleep', lambda x: None)
self.mock_popen_res.stdout.read.side_effect = [b"", b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"", b"my_stderr"]
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 4 + [0] * 4)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]
]
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'sftp'
return_code, b_stdout, b_stderr = self.conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
assert return_code == 0
assert b_stdout == b"my_stdout\nsecond_line"
assert b_stderr == b"my_stderr"
assert self.mock_popen.call_count == 2
| 30,734
|
Python
|
.py
| 557
| 45.969479
| 144
| 0.64407
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,998
|
test_connection.py
|
ansible_ansible/test/units/plugins/connection/test_connection.py
|
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
from io import StringIO
import unittest
from ansible.playbook.play_context import PlayContext
from ansible.plugins.connection import ConnectionBase
from ansible.plugins.loader import become_loader
class NoOpConnection(ConnectionBase):
@property
def transport(self):
"""This method is never called by unit tests."""
def _connect(self):
"""This method is never called by unit tests."""
def exec_command(self):
"""This method is never called by unit tests."""
def put_file(self):
"""This method is never called by unit tests."""
def fetch_file(self):
"""This method is never called by unit tests."""
def close(self):
"""This method is never called by unit tests."""
class TestConnectionBaseClass(unittest.TestCase):
def setUp(self):
self.play_context = PlayContext()
self.play_context.prompt = (
'[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: '
)
self.in_stream = StringIO()
def tearDown(self):
pass
def test_subclass_error(self):
class ConnectionModule1(ConnectionBase):
pass
with self.assertRaises(TypeError):
ConnectionModule1() # pylint: disable=abstract-class-instantiated
def test_subclass_success(self):
self.assertIsInstance(NoOpConnection(self.play_context, self.in_stream), NoOpConnection)
def test_check_password_prompt(self):
local = (
b'[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: \n'
b'BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq\n'
)
ssh_pipelining_vvvv = b"""
debug3: mux_master_read_cb: channel 1 packet type 0x10000002 len 251
debug2: process_mux_new_session: channel 1: request tty 0, X 1, agent 1, subsys 0, term "xterm-256color", cmd "/bin/sh -c 'sudo -H -S -p "[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: " -u root /bin/sh -c '"'"'echo BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq; /bin/true'"'"' && sleep 0'", env 0
debug3: process_mux_new_session: got fds stdin 9, stdout 10, stderr 11
debug2: client_session2_setup: id 2
debug1: Sending command: /bin/sh -c 'sudo -H -S -p "[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: " -u root /bin/sh -c '"'"'echo BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq; /bin/true'"'"' && sleep 0'
debug2: channel 2: request exec confirm 1
debug2: channel 2: rcvd ext data 67
[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: debug2: channel 2: written 67 to efd 11
BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq
debug3: receive packet: type 98
""" # noqa
ssh_nopipelining_vvvv = b"""
debug3: mux_master_read_cb: channel 1 packet type 0x10000002 len 251
debug2: process_mux_new_session: channel 1: request tty 1, X 1, agent 1, subsys 0, term "xterm-256color", cmd "/bin/sh -c 'sudo -H -S -p "[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: " -u root /bin/sh -c '"'"'echo BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq; /bin/true'"'"' && sleep 0'", env 0
debug3: mux_client_request_session: session request sent
debug3: send packet: type 98
debug1: Sending command: /bin/sh -c 'sudo -H -S -p "[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: " -u root /bin/sh -c '"'"'echo BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq; /bin/true'"'"' && sleep 0'
debug2: channel 2: request exec confirm 1
debug2: exec request accepted on channel 2
[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: debug3: receive packet: type 2
debug3: Received SSH2_MSG_IGNORE
debug3: Received SSH2_MSG_IGNORE
BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq
debug3: receive packet: type 98
""" # noqa
ssh_novvvv = (
b'[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: \n'
b'BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq\n'
)
dns_issue = (
b'timeout waiting for privilege escalation password prompt:\n'
b'sudo: sudo: unable to resolve host tcloud014\n'
b'[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: \n'
b'BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq\n'
)
nothing = b''
in_front = b"""
debug1: Sending command: /bin/sh -c 'sudo -H -S -p "[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: " -u root /bin/sh -c '"'"'echo
"""
c = NoOpConnection(self.play_context, self.in_stream)
c.set_become_plugin(become_loader.get('sudo'))
c.become.prompt = '[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: '
self.assertTrue(c.become.check_password_prompt(local))
self.assertTrue(c.become.check_password_prompt(ssh_pipelining_vvvv))
self.assertTrue(c.become.check_password_prompt(ssh_nopipelining_vvvv))
self.assertTrue(c.become.check_password_prompt(ssh_novvvv))
self.assertTrue(c.become.check_password_prompt(dns_issue))
self.assertFalse(c.become.check_password_prompt(nothing))
self.assertFalse(c.become.check_password_prompt(in_front))
| 5,919
|
Python
|
.py
| 107
| 49.841121
| 320
| 0.721694
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,999
|
test_psrp.py
|
ansible_ansible/test/units/plugins/connection/test_psrp.py
|
# -*- coding: utf-8 -*-
# (c) 2018, Jordan Borean <jborean@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
import sys
import typing as t
from io import StringIO
from unittest.mock import MagicMock
from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import connection_loader
@pytest.fixture(autouse=True)
def psrp_connection():
"""Imports the psrp connection plugin with a mocked pypsrp module for testing"""
# Take a snapshot of sys.modules before we manipulate it
orig_modules = sys.modules.copy()
try:
sys.modules["pypsrp.complex_objects"] = MagicMock()
sys.modules["pypsrp.exceptions"] = MagicMock()
sys.modules["pypsrp.host"] = MagicMock()
sys.modules["pypsrp.powershell"] = MagicMock()
sys.modules["pypsrp.shell"] = MagicMock()
sys.modules["pypsrp.wsman"] = MagicMock()
sys.modules["requests.exceptions"] = MagicMock()
from ansible.plugins.connection import psrp
# Take a copy of the original import state vars before we set to an ok import
orig_has_psrp = psrp.HAS_PYPSRP
orig_psrp_imp_err = psrp.PYPSRP_IMP_ERR
yield psrp
psrp.HAS_PYPSRP = orig_has_psrp
psrp.PYPSRP_IMP_ERR = orig_psrp_imp_err
finally:
# Restore sys.modules back to our pre-shenanigans
sys.modules = orig_modules
class TestConnectionPSRP(object):
OPTIONS_DATA: tuple[tuple[dict[str, t.Any], dict[str, t.Any]], ...] = (
# default options
(
{},
{
'_psrp_auth': 'negotiate',
'_psrp_configuration_name': 'Microsoft.PowerShell',
'_psrp_host': 'inventory_hostname',
'_psrp_conn_kwargs': {
'server': 'inventory_hostname',
'port': 5986,
'username': None,
'password': None,
'ssl': True,
'path': 'wsman',
'auth': 'negotiate',
'cert_validation': True,
'connection_timeout': 30,
'encryption': 'auto',
'proxy': None,
'no_proxy': False,
'max_envelope_size': 153600,
'operation_timeout': 20,
'certificate_key_pem': None,
'certificate_pem': None,
'credssp_auth_mechanism': 'auto',
'credssp_disable_tlsv1_2': False,
'credssp_minimum_version': 2,
'negotiate_delegate': None,
'negotiate_hostname_override': None,
'negotiate_send_cbt': True,
'negotiate_service': 'WSMAN',
'read_timeout': 30,
'reconnection_backoff': 2.0,
'reconnection_retries': 0,
},
'_psrp_port': 5986,
'_psrp_user': None
},
),
# ssl=False when port defined to 5985
(
{'ansible_port': '5985'},
{
'_psrp_port': 5985,
'_psrp_conn_kwargs': {'ssl': False},
},
),
# ssl=True when port defined to not 5985
(
{'ansible_port': 1234},
{
'_psrp_port': 1234,
'_psrp_conn_kwargs': {'ssl': True},
},
),
# port 5986 when ssl=True
(
{'ansible_psrp_protocol': 'https'},
{
'_psrp_port': 5986,
'_psrp_conn_kwargs': {'ssl': True},
},
),
# port 5985 when ssl=False
(
{'ansible_psrp_protocol': 'http'},
{
'_psrp_port': 5985,
'_psrp_conn_kwargs': {'ssl': False},
},
),
# psrp extras
(
{'ansible_psrp_mock_test1': True},
{
'_psrp_conn_kwargs': {
'server': 'inventory_hostname',
'port': 5986,
'username': None,
'password': None,
'ssl': True,
'path': 'wsman',
'auth': 'negotiate',
'cert_validation': True,
'connection_timeout': 30,
'encryption': 'auto',
'proxy': None,
'no_proxy': False,
'max_envelope_size': 153600,
'operation_timeout': 20,
'certificate_key_pem': None,
'certificate_pem': None,
'credssp_auth_mechanism': 'auto',
'credssp_disable_tlsv1_2': False,
'credssp_minimum_version': 2,
'negotiate_delegate': None,
'negotiate_hostname_override': None,
'negotiate_send_cbt': True,
'negotiate_service': 'WSMAN',
'read_timeout': 30,
'reconnection_backoff': 2.0,
'reconnection_retries': 0,
},
},
),
# cert validation through string repr of bool
(
{'ansible_psrp_cert_validation': 'ignore'},
{
'_psrp_conn_kwargs': {'cert_validation': False},
},
),
# cert validation path
(
{'ansible_psrp_cert_trust_path': '/path/cert.pem'},
{
'_psrp_conn_kwargs': {'cert_validation': '/path/cert.pem'},
},
),
# ignore proxy boolean value
(
{'ansible_psrp_ignore_proxy': 'true'},
{
'_psrp_conn_kwargs': {'no_proxy': True},
}
),
# ignore proxy false-ish value
(
{'ansible_psrp_ignore_proxy': 'n'},
{
'_psrp_conn_kwargs': {'no_proxy': False},
}
),
# ignore proxy true-ish value
(
{'ansible_psrp_ignore_proxy': 'y'},
{
'_psrp_conn_kwargs': {'no_proxy': True},
}
),
)
@pytest.mark.parametrize('options, expected',
((o, e) for o, e in OPTIONS_DATA))
def test_set_options(self, options, expected):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('psrp', pc, new_stdin)
conn.set_options(var_options=options)
conn._build_kwargs()
for attr, expected in expected.items():
actual = getattr(conn, attr)
if attr == '_psrp_conn_kwargs':
for k, v in expected.items():
actual_v = actual[k]
assert actual_v == v, \
f"psrp Protocol kwarg '{k}', actual '{actual_v}' != expected '{v}'"
else:
assert actual == expected, \
"psrp attr '%s', actual '%s' != expected '%s'"\
% (attr, actual, expected)
| 7,302
|
Python
|
.py
| 196
| 23.581633
| 92
| 0.468322
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|