id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
13,700
|
test_hello.py
|
ansible_ansible/test/integration/targets/ansible-test-units/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py
|
from __future__ import annotations
from .....plugins.modules.hello import say_hello
def test_say_hello():
assert say_hello('Ansibull') == dict(message='Hello Ansibull')
| 176
|
Python
|
.py
| 4
| 41.25
| 66
| 0.739645
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,701
|
test_my_util.py
|
ansible_ansible/test/integration/targets/ansible-test-units/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py
|
from __future__ import annotations
from .....plugins.module_utils.my_util import hello
def test_hello():
assert hello('Ansibull') == 'Hello Ansibull'
| 157
|
Python
|
.py
| 4
| 36.5
| 51
| 0.733333
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,702
|
hello.py
|
ansible_ansible/test/integration/targets/ansible-test-units/ansible_collections/ns/col/plugins/modules/hello.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: hello
short_description: Hello test module
description: Hello test module.
options:
name:
description: Name to say hello to.
type: str
author:
- Ansible Core Team
"""
EXAMPLES = """
- hello:
"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
from ..module_utils.my_util import hello
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str'),
),
)
module.exit_json(**say_hello(module.params['name']))
def say_hello(name):
return dict(
message=hello(name),
)
if __name__ == '__main__':
main()
| 791
|
Python
|
.py
| 33
| 20.424242
| 92
| 0.671582
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,703
|
my_util.py
|
ansible_ansible/test/integration/targets/ansible-test-units/ansible_collections/ns/col/plugins/module_utils/my_util.py
|
from __future__ import annotations
def hello(name):
return 'Hello %s' % name
| 83
|
Python
|
.py
| 3
| 24.666667
| 34
| 0.705128
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,704
|
make_collection_dir.py
|
ansible_ansible/test/integration/targets/ansible-galaxy-collection-cli/files/make_collection_dir.py
|
from __future__ import annotations
import sys
import pathlib
paths = [
'ns-col-1.0.0.tar.gz',
'foo.txt',
'README.rst',
'GPL',
'LICENSES/MIT.txt',
'.reuse/dep5',
'artifacts/.gitkeep',
'plugins/vars/bar.yml',
'plugins/vars/bar.yml.license',
'plugins/vars/baz.yaml',
'plugins/vars/test.py',
'plugins/vars/docs.md',
'plugins/netconf/bar.yml',
'plugins/netconf/baz.yaml',
'plugins/netconf/test.py',
'plugins/netconf/docs.md',
'plugins/cache/bar.yml',
'plugins/cache/baz.yaml',
'plugins/cache/test.py',
'plugins/cache/docs.md',
'plugins/test/bar.yml',
'plugins/test/baz.yaml',
'plugins/test/test.py',
'plugins/test/docs.md',
'plugins/connection/bar.yml',
'plugins/connection/baz.yaml',
'plugins/connection/test.py',
'plugins/connection/docs.md',
'plugins/doc_fragments/bar.yml',
'plugins/doc_fragments/baz.yaml',
'plugins/doc_fragments/test.py',
'plugins/doc_fragments/docs.md',
'plugins/shell/bar.yml',
'plugins/shell/baz.yaml',
'plugins/shell/test.py',
'plugins/shell/docs.md',
'plugins/terminal/bar.yml',
'plugins/terminal/baz.yaml',
'plugins/terminal/test.py',
'plugins/terminal/docs.md',
'plugins/lookup/bar.yml',
'plugins/lookup/baz.yaml',
'plugins/lookup/test.py',
'plugins/lookup/docs.md',
'plugins/httpapi/bar.yml',
'plugins/httpapi/baz.yaml',
'plugins/httpapi/test.py',
'plugins/httpapi/docs.md',
'plugins/action/bar.yml',
'plugins/action/baz.yaml',
'plugins/action/test.py',
'plugins/action/docs.md',
'plugins/inventory/bar.yml',
'plugins/inventory/baz.yaml',
'plugins/inventory/test.py',
'plugins/inventory/docs.md',
'plugins/module_utils/bar.ps1',
'plugins/module_utils/test.py',
'plugins/module_utils/docs.md',
'plugins/module_utils/baz.yml',
'plugins/become/bar.yml',
'plugins/become/baz.yaml',
'plugins/become/test.py',
'plugins/become/docs.md',
'plugins/callback/bar.yml',
'plugins/callback/baz.yaml',
'plugins/callback/test.py',
'plugins/callback/docs.md',
'plugins/filter/bar.yml',
'plugins/filter/baz.yaml',
'plugins/filter/test.py',
'plugins/filter/docs.md',
'plugins/cliconf/bar.yml',
'plugins/cliconf/baz.yaml',
'plugins/cliconf/test.py',
'plugins/cliconf/docs.md',
'plugins/modules/foo.yml',
'plugins/modules/qux.ps1',
'plugins/modules/test2.py',
'plugins/modules/bar.yaml',
'plugins/modules/docs.md',
'plugins/strategy/bar.yml',
'plugins/strategy/baz.yaml',
'plugins/strategy/test.py',
'plugins/strategy/docs.md',
'tests/integration/targets/foo/aliases',
'tests/integration/targets/foo/tasks/main.yml',
'tests/output/foo',
'tests/units/test_foo.py',
'roles/foo/vars/main.yaml',
'roles/foo/tasks/main.yml',
'roles/foo/templates/foo.j2',
'playbooks/baz.json',
'playbooks/foo.yml',
'playbooks/bar.yaml',
'docs/foobar/qux/baz.txt',
'docs/foobar/qux/bar',
'docs/docsite/bar.yml',
'docs/docsite/baz.yaml',
'docs/docsite/apple.j2',
'docs/docsite/qux.json',
'docs/docsite/orange.txt',
'docs/docsite/foo.rst',
'changelogs/fragments/foo.yml',
'changelogs/fragments/bar.yaml'
]
root = pathlib.Path(sys.argv[1])
for path in paths:
print(path)
path = root / path
path.parent.mkdir(parents=True, exist_ok=True)
path.touch()
| 3,469
|
Python
|
.py
| 116
| 25.112069
| 51
| 0.66587
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,705
|
run-with-pty.py
|
ansible_ansible/test/integration/targets/ansible-test-no-tty/ansible_collections/ns/col/run-with-pty.py
|
#!/usr/bin/env python
"""Run a command using a PTY."""
from __future__ import annotations
import sys
if sys.version_info < (3, 10):
import vendored_pty as pty
else:
import pty
sys.exit(1 if pty.spawn(sys.argv[1:]) else 0)
| 233
|
Python
|
.py
| 9
| 23.666667
| 45
| 0.705882
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,706
|
vendored_pty.py
|
ansible_ansible/test/integration/targets/ansible-test-no-tty/ansible_collections/ns/col/vendored_pty.py
|
# Vendored copy of https://github.com/python/cpython/blob/3680ebed7f3e529d01996dd0318601f9f0d02b4b/Lib/pty.py
# PSF License (see licenses/PSF-license.txt or https://opensource.org/licenses/Python-2.0)
"""Pseudo terminal utilities."""
# Bugs: No signal handling. Doesn't set slave termios and window size.
# Only tested on Linux, FreeBSD, and macOS.
# See: W. Richard Stevens. 1992. Advanced Programming in the
# UNIX Environment. Chapter 19.
# Author: Steen Lumholt -- with additions by Guido.
from __future__ import annotations
from select import select
import os
import sys
import tty
# names imported directly for test mocking purposes
from os import close, waitpid
from tty import setraw, tcgetattr, tcsetattr
__all__ = ["openpty", "fork", "spawn"]
STDIN_FILENO = 0
STDOUT_FILENO = 1
STDERR_FILENO = 2
CHILD = 0
def openpty():
"""openpty() -> (master_fd, slave_fd)
Open a pty master/slave pair, using os.openpty() if possible."""
try:
return os.openpty()
except (AttributeError, OSError):
pass
master_fd, slave_name = _open_terminal()
slave_fd = slave_open(slave_name)
return master_fd, slave_fd
def master_open():
"""master_open() -> (master_fd, slave_name)
Open a pty master and return the fd, and the filename of the slave end.
Deprecated, use openpty() instead."""
try:
master_fd, slave_fd = os.openpty()
except (AttributeError, OSError):
pass
else:
slave_name = os.ttyname(slave_fd)
os.close(slave_fd)
return master_fd, slave_name
return _open_terminal()
def _open_terminal():
"""Open pty master and return (master_fd, tty_name)."""
for x in 'pqrstuvwxyzPQRST':
for y in '0123456789abcdef':
pty_name = '/dev/pty' + x + y
try:
fd = os.open(pty_name, os.O_RDWR)
except OSError:
continue
return (fd, '/dev/tty' + x + y)
raise OSError('out of pty devices')
def slave_open(tty_name):
"""slave_open(tty_name) -> slave_fd
Open the pty slave and acquire the controlling terminal, returning
opened filedescriptor.
Deprecated, use openpty() instead."""
result = os.open(tty_name, os.O_RDWR)
try:
from fcntl import ioctl, I_PUSH
except ImportError:
return result
try:
ioctl(result, I_PUSH, "ptem")
ioctl(result, I_PUSH, "ldterm")
except OSError:
pass
return result
def fork():
"""fork() -> (pid, master_fd)
Fork and make the child a session leader with a controlling terminal."""
try:
pid, fd = os.forkpty()
except (AttributeError, OSError):
pass
else:
if pid == CHILD:
try:
os.setsid()
except OSError:
# os.forkpty() already set us session leader
pass
return pid, fd
master_fd, slave_fd = openpty()
pid = os.fork()
if pid == CHILD:
# Establish a new session.
os.setsid()
os.close(master_fd)
# Slave becomes stdin/stdout/stderr of child.
os.dup2(slave_fd, STDIN_FILENO)
os.dup2(slave_fd, STDOUT_FILENO)
os.dup2(slave_fd, STDERR_FILENO)
if slave_fd > STDERR_FILENO:
os.close(slave_fd)
# Explicitly open the tty to make it become a controlling tty.
tmp_fd = os.open(os.ttyname(STDOUT_FILENO), os.O_RDWR)
os.close(tmp_fd)
else:
os.close(slave_fd)
# Parent and child process.
return pid, master_fd
def _writen(fd, data):
"""Write all the data to a descriptor."""
while data:
n = os.write(fd, data)
data = data[n:]
def _read(fd):
"""Default read function."""
return os.read(fd, 1024)
def _copy(master_fd, master_read=_read, stdin_read=_read):
"""Parent copy loop.
Copies
pty master -> standard output (master_read)
standard input -> pty master (stdin_read)"""
fds = [master_fd, STDIN_FILENO]
while fds:
rfds, _wfds, _xfds = select(fds, [], [])
if master_fd in rfds:
# Some OSes signal EOF by returning an empty byte string,
# some throw OSErrors.
try:
data = master_read(master_fd)
except OSError:
data = b""
if not data: # Reached EOF.
return # Assume the child process has exited and is
# unreachable, so we clean up.
else:
os.write(STDOUT_FILENO, data)
if STDIN_FILENO in rfds:
data = stdin_read(STDIN_FILENO)
if not data:
fds.remove(STDIN_FILENO)
else:
_writen(master_fd, data)
def spawn(argv, master_read=_read, stdin_read=_read):
"""Create a spawned process."""
if isinstance(argv, str):
argv = (argv,)
sys.audit('pty.spawn', argv)
pid, master_fd = fork()
if pid == CHILD:
os.execlp(argv[0], *argv)
try:
mode = tcgetattr(STDIN_FILENO)
setraw(STDIN_FILENO)
restore = True
except tty.error: # This is the same as termios.error
restore = False
try:
_copy(master_fd, master_read, stdin_read)
finally:
if restore:
tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode)
close(master_fd)
return waitpid(pid, 0)[1]
| 5,448
|
Python
|
.py
| 160
| 26.65
| 109
| 0.605934
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,707
|
assert-no-tty.py
|
ansible_ansible/test/integration/targets/ansible-test-no-tty/ansible_collections/ns/col/tests/integration/targets/no-tty/assert-no-tty.py
|
#!/usr/bin/env python
"""Assert no TTY is available."""
from __future__ import annotations
import sys
status = 0
for handle in sys.stdin, sys.stdout, sys.stderr:
if handle.isatty():
print(f'{handle} is a TTY', file=sys.stderr)
status += 1
sys.exit(status)
| 280
|
Python
|
.py
| 10
| 24.6
| 52
| 0.684211
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,708
|
test_constraints.py
|
ansible_ansible/test/integration/targets/ansible-test-units-constraints/ansible_collections/ns/col/tests/unit/plugins/modules/test_constraints.py
|
from __future__ import annotations
import botocore
def test_constraints():
assert botocore.__version__ == '1.13.50'
| 123
|
Python
|
.py
| 4
| 28
| 44
| 0.732759
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,709
|
validate.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/ps_only/plugins/modules/validate.py
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
DOCUMENTATION = r"""
module: validate
short_description: validate
description: validate
author: "validate (@validate)"
"""
EXAMPLES = r"""
"""
RETURN = r"""
"""
| 257
|
Python
|
.py
| 11
| 22.090909
| 92
| 0.728395
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,710
|
invalid_yaml_syntax.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/invalid_yaml_syntax.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
- key: "value"wrong
"""
EXAMPLES = """
- key: "value"wrong
"""
RETURN = """
- key: "value"wrong
"""
from ansible.module_utils.basic import AnsibleModule
def main():
AnsibleModule(argument_spec=dict())
if __name__ == '__main__':
main()
| 418
|
Python
|
.py
| 17
| 22.588235
| 92
| 0.686224
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,711
|
check_mode_attribute_1.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_1.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: check_mode_attribute_1
short_description: Test for check mode attribute 1
description: Test for check mode attribute 1.
author:
- Ansible Core Team
extends_documentation_fragment:
- ansible.builtin.action_common_attributes
attributes:
check_mode:
# doc says full support, code says none
support: full
diff_mode:
support: none
platform:
platforms: all
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
module = AnsibleModule(argument_spec=dict(), supports_check_mode=False)
module.exit_json()
| 775
|
Python
|
.py
| 26
| 27.269231
| 92
| 0.738896
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,712
|
check_mode_attribute_6.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_6.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: check_mode_attribute_6
short_description: Test for check mode attribute 6
description: Test for check mode attribute 6.
author:
- Ansible Core Team
extends_documentation_fragment:
- ansible.builtin.action_common_attributes
attributes:
check_mode:
# Everything is correct: docs says partial support *with details*, code claims (at least some) support
support: partial
details: Some details.
diff_mode:
support: none
platform:
platforms: all
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
module = AnsibleModule(argument_spec=dict(), supports_check_mode=True)
module.exit_json()
| 867
|
Python
|
.py
| 27
| 29.481481
| 106
| 0.742206
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,713
|
check_mode_attribute_7.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_7.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: check_mode_attribute_7
short_description: Test for check mode attribute 7
description: Test for check mode attribute 7.
author:
- Ansible Core Team
extends_documentation_fragment:
- ansible.builtin.action_common_attributes
attributes:
check_mode:
# Everything is correct: docs says full support, code claims (at least some) support
support: full
diff_mode:
support: none
platform:
platforms: all
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
module = AnsibleModule(argument_spec=dict(), supports_check_mode=True)
module.exit_json()
| 819
|
Python
|
.py
| 26
| 28.961538
| 92
| 0.742058
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,714
|
check_mode_attribute_3.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_3.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: check_mode_attribute_3
short_description: Test for check mode attribute 3
description: Test for check mode attribute 3.
author:
- Ansible Core Team
extends_documentation_fragment:
- ansible.builtin.action_common_attributes
attributes:
check_mode:
# doc says no support, code says some
support: none
diff_mode:
support: none
platform:
platforms: all
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
module = AnsibleModule(argument_spec=dict(), supports_check_mode=True)
module.exit_json()
| 772
|
Python
|
.py
| 26
| 27.153846
| 92
| 0.737838
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,715
|
check_mode_attribute_2.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_2.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: check_mode_attribute_2
short_description: Test for check mode attribute 2
description: Test for check mode attribute 2.
author:
- Ansible Core Team
extends_documentation_fragment:
- ansible.builtin.action_common_attributes
attributes:
check_mode:
# doc says partial support, code says none
support: partial
details: Whatever this means.
diff_mode:
support: none
platform:
platforms: all
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
module = AnsibleModule(argument_spec=dict(), supports_check_mode=False)
module.exit_json()
| 815
|
Python
|
.py
| 27
| 27.555556
| 92
| 0.740409
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,716
|
invalid_choice_value.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/invalid_choice_value.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: invalid_choice_value
short_description: Test for equal length of chocies with correct options
description: Test for equal length of chocies with correct options
author:
- Ansible Core Team
options:
caching:
description:
- Type of Caching.
type: str
choices:
- ReadOnly
- ReadWrite
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
if __name__ == "__main__":
module = AnsibleModule(
argument_spec=dict(caching=dict(type="str", choices=["ReadOnly", "ReadOnly"])),
supports_check_mode=False,
)
module.exit_json()
| 794
|
Python
|
.py
| 27
| 25.888889
| 92
| 0.700394
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,717
|
valid_argument_spec_context.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/valid_argument_spec_context.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: valid_argument_spec_context
short_description: Valid argument spec context schema test module
description: Valid argument spec context schema test module
author:
- Ansible Core Team
options:
foo:
description: foo
type: str
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
def main():
AnsibleModule(
argument_spec=dict(
foo=dict(
type='str',
context=dict(
extra_key='bar',
),
),
),
)
if __name__ == '__main__':
main()
| 774
|
Python
|
.py
| 30
| 20.133333
| 92
| 0.618207
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,718
|
invalid_argument_spec_extra_key.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/invalid_argument_spec_extra_key.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: invalid_argument_spec_extra_key
short_description: Invalid argument spec extra key schema test module
description: Invalid argument spec extra key schema test module
author:
- Ansible Core Team
options:
foo:
description: foo
type: str
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
def main():
AnsibleModule(
argument_spec=dict(
foo=dict(
type='str',
extra_key='bar',
),
),
)
if __name__ == '__main__':
main()
| 733
|
Python
|
.py
| 28
| 21.464286
| 92
| 0.649928
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,719
|
no_callable.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/no_callable.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: no_callable
short_description: No callale test module
description: No callable test module.
author:
- Ansible Core Team
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
module = AnsibleModule(argument_spec=dict())
module.exit_json()
| 493
|
Python
|
.py
| 16
| 28.8125
| 92
| 0.726115
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,720
|
invalid_argument_spec_incorrect_context.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/invalid_argument_spec_incorrect_context.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: invalid_argument_spec_incorrect_context
short_description: Invalid argument spec incorrect context schema test module
description: Invalid argument spec incorrect context schema test module
author:
- Ansible Core Team
options:
foo:
description: foo
type: str
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
def main():
AnsibleModule(
argument_spec=dict(
foo=dict(
type='str',
context='bar',
),
),
)
if __name__ == '__main__':
main()
| 755
|
Python
|
.py
| 28
| 22.25
| 92
| 0.662031
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,721
|
sidecar.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/sidecar.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
module = AnsibleModule(argument_spec=dict(
test=dict(type='str', choices=['foo', 'bar'], default='foo'),
))
module.exit_json(test='foo')
| 386
|
Python
|
.py
| 9
| 39.333333
| 92
| 0.687166
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,722
|
check_mode_attribute_4.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_4.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: check_mode_attribute_4
short_description: Test for check mode attribute 4
description: Test for check mode attribute 4.
author:
- Ansible Core Team
extends_documentation_fragment:
- ansible.builtin.action_common_attributes
attributes:
check_mode:
# documentation says some support, but no details
support: partial
diff_mode:
support: none
platform:
platforms: all
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
module = AnsibleModule(argument_spec=dict(), supports_check_mode=True)
module.exit_json()
| 787
|
Python
|
.py
| 26
| 27.730769
| 92
| 0.743046
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,723
|
_not_deprecated.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/_not_deprecated.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: _not_deprecated
short_description: This module is not deprecated
description: Its name has a leading underscore, but it is not deprecated.
author:
- Ansible Core Team
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
module = AnsibleModule(argument_spec=dict())
module.exit_json()
| 540
|
Python
|
.py
| 16
| 31.75
| 92
| 0.73166
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,724
|
option_name_casing.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/option_name_casing.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: option_name_casing
short_description: Option names equal up to casing
description: Option names equal up to casing.
author:
- Ansible Core Team
options:
foo:
description: Foo
type: str
aliases:
- bar
- FOO # this one is ok
Foo:
description: Foo alias
type: str
Bar:
description: Bar alias
type: str
bam:
description: Bar alias 2
aliases:
- baR
type: str
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
module = AnsibleModule(argument_spec=dict(
foo=dict(type='str', aliases=['bar', 'FOO']),
Foo=dict(type='str'),
Bar=dict(type='str'),
bam=dict(type='str', aliases=['baR'])
))
module.exit_json()
| 959
|
Python
|
.py
| 39
| 20.564103
| 92
| 0.648796
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,725
|
import_order.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/import_order.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.module_utils.basic import AnsibleModule
DOCUMENTATION = """
module: import_order
short_description: Import order test module
description: Import order test module.
author:
- Ansible Core Team
"""
EXAMPLES = """#"""
RETURN = """"""
if __name__ == '__main__':
module = AnsibleModule(argument_spec=dict())
module.exit_json()
| 498
|
Python
|
.py
| 16
| 29.0625
| 92
| 0.728421
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,726
|
wrong_aliases.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/wrong_aliases.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: wrong_aliases
short_description: Aliases that are attached to the wrong option in documentation
description: Aliases that are attached to the wrong option in documentation.
author:
- Ansible Core Team
options:
foo:
description: Foo.
type: str
aliases:
- bam
bar:
description: Bar.
type: str
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
def main():
AnsibleModule(
argument_spec=dict(
foo=dict(
type='str',
),
bar=dict(
type='str',
aliases=[
'bam'
],
),
),
)
if __name__ == '__main__':
main()
| 912
|
Python
|
.py
| 38
| 17.736842
| 92
| 0.583141
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,727
|
check_mode_attribute_5.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/check_mode_attribute_5.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = """
module: check_mode_attribute_5
short_description: Test for check mode attribute 5
description: Test for check mode attribute 5.
author:
- Ansible Core Team
extends_documentation_fragment:
- ansible.builtin.action_common_attributes
attributes:
check_mode:
# Everything is correct: both docs and code claim no support
support: none
diff_mode:
support: none
platform:
platforms: all
"""
EXAMPLES = """#"""
RETURN = """"""
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
module = AnsibleModule(argument_spec=dict(), supports_check_mode=False)
module.exit_json()
| 796
|
Python
|
.py
| 26
| 28.076923
| 92
| 0.742147
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,728
|
semantic_markup.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/semantic_markup.py
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = r"""
module: semantic_markup
short_description: Test semantic markup
description:
- Test semantic markup.
- RV(does.not.exist=true).
author:
- Ansible Core Team
options:
foo:
description:
- Test.
type: str
a1:
description:
- O(foo)
- O(foo=bar)
- O(foo[1]=bar)
- O(ignore:bar=baz)
- O(ansible.builtin.copy#module:path=/)
- V(foo)
- V(bar(1\\2\)3)
- V(C(foo\)).
- E(env(var\))
- RV(ansible.builtin.copy#module:backup)
- RV(bar=baz)
- RV(ignore:bam)
- RV(ignore:bam.bar=baz)
- RV(bar).
- P(ansible.builtin.file#lookup)
type: str
a2:
description: V(C\(foo\)).
type: str
a3:
description: RV(bam).
type: str
a4:
description: P(foo.bar#baz).
type: str
a5:
description: P(foo.bar.baz).
type: str
a6:
description: P(foo.bar.baz#woof).
type: str
a7:
description: E(foo\(bar).
type: str
a8:
description: O(bar).
type: str
a9:
description: O(bar=bam).
type: str
a10:
description: O(foo.bar=1).
type: str
a11:
description: Something with suboptions.
type: dict
suboptions:
b1:
description:
- V(C\(foo\)).
- RV(bam).
- P(foo.bar#baz).
- P(foo.bar.baz).
- P(foo.bar.baz#woof).
- E(foo\(bar).
- O(bar).
- O(bar=bam).
- O(foo.bar=1).
type: str
"""
EXAMPLES = """#"""
RETURN = r"""
bar:
description: Bar.
type: int
returned: success
sample: 5
"""
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
module = AnsibleModule(argument_spec=dict(
foo=dict(),
a1=dict(),
a2=dict(),
a3=dict(),
a4=dict(),
a5=dict(),
a6=dict(),
a7=dict(),
a8=dict(),
a9=dict(),
a10=dict(),
a11=dict(type='dict', options=dict(
b1=dict(),
))
))
module.exit_json()
| 2,205
|
Python
|
.py
| 105
| 15.285714
| 92
| 0.54786
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,729
|
import_order_lookup.py
|
ansible_ansible/test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/lookup/import_order_lookup.py
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.plugins.lookup import LookupBase
DOCUMENTATION = """
name: import_order_lookup
short_description: Import order lookup
description: Import order lookup.
"""
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
return []
| 404
|
Python
|
.py
| 11
| 34.181818
| 92
| 0.765464
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,730
|
test_context.py
|
ansible_ansible/test/units/test_context.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Toshio Kuratomi <tkuratomi@ansible.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible import context
class FakeOptions:
pass
def test_set_global_context():
options = FakeOptions()
options.tags = [u'production', u'webservers']
options.check_mode = True
options.start_at_task = u'Start with くらとみ'
expected = frozenset((('tags', (u'production', u'webservers')),
('check_mode', True),
('start_at_task', u'Start with くらとみ')))
context._init_global_context(options)
assert frozenset(context.CLIARGS.items()) == expected
| 763
|
Python
|
.py
| 17
| 37.529412
| 92
| 0.65928
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,731
|
test_no_tty.py
|
ansible_ansible/test/units/test_no_tty.py
|
from __future__ import annotations
import sys
def test_no_tty():
assert not sys.stdin.isatty()
assert not sys.stdout.isatty()
assert not sys.stderr.isatty()
| 172
|
Python
|
.py
| 6
| 25.166667
| 34
| 0.730061
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,732
|
test_mod_args.py
|
ansible_ansible/test/units/parsing/test_mod_args.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# Copyright 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.errors import AnsibleParserError
from ansible.module_utils.common.sentinel import Sentinel
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.plugins.loader import init_plugin_loader
class TestModArgsDwim:
# TODO: add tests that construct ModuleArgsParser with a task reference
# TODO: verify the AnsibleError raised on failure knows the task
# and the task knows the line numbers
INVALID_MULTIPLE_ACTIONS = (
({'action': 'shell echo hi', 'local_action': 'shell echo hi'}, "action and local_action are mutually exclusive"),
({'action': 'shell echo hi', 'shell': 'echo hi'}, "conflicting action statements: shell, shell"),
({'local_action': 'shell echo hi', 'shell': 'echo hi'}, "conflicting action statements: shell, shell"),
)
def _debug(self, mod, args, to):
print("RETURNED module = {0}".format(mod))
print(" args = {0}".format(args))
print(" to = {0}".format(to))
def test_basic_shell(self):
m = ModuleArgsParser(dict(shell='echo hi'))
mod, args, to = m.parse()
self._debug(mod, args, to)
assert mod == 'shell'
assert args == dict(
_raw_params='echo hi',
)
assert to is Sentinel
def test_basic_command(self):
m = ModuleArgsParser(dict(command='echo hi'))
mod, args, to = m.parse()
self._debug(mod, args, to)
assert mod == 'command'
assert args == dict(
_raw_params='echo hi',
)
assert to is Sentinel
def test_shell_with_modifiers(self):
m = ModuleArgsParser(dict(shell='/bin/foo creates=/tmp/baz removes=/tmp/bleep'))
mod, args, to = m.parse()
self._debug(mod, args, to)
assert mod == 'shell'
assert args == dict(
creates='/tmp/baz',
removes='/tmp/bleep',
_raw_params='/bin/foo',
)
assert to is Sentinel
def test_normal_usage(self):
m = ModuleArgsParser(dict(copy='src=a dest=b'))
mod, args, to = m.parse()
self._debug(mod, args, to)
assert mod, 'copy'
assert args, dict(src='a', dest='b')
assert to is Sentinel
def test_complex_args(self):
m = ModuleArgsParser(dict(copy=dict(src='a', dest='b')))
mod, args, to = m.parse()
self._debug(mod, args, to)
assert mod, 'copy'
assert args, dict(src='a', dest='b')
assert to is Sentinel
def test_action_with_complex(self):
m = ModuleArgsParser(dict(action=dict(module='copy', src='a', dest='b')))
mod, args, to = m.parse()
self._debug(mod, args, to)
assert mod == 'copy'
assert args == dict(src='a', dest='b')
assert to is Sentinel
def test_action_with_complex_and_complex_args(self):
m = ModuleArgsParser(dict(action=dict(module='copy', args=dict(src='a', dest='b'))))
mod, args, to = m.parse()
self._debug(mod, args, to)
assert mod == 'copy'
assert args == dict(src='a', dest='b')
assert to is Sentinel
def test_local_action_string(self):
m = ModuleArgsParser(dict(local_action='copy src=a dest=b'))
mod, args, delegate_to = m.parse()
self._debug(mod, args, delegate_to)
assert mod == 'copy'
assert args == dict(src='a', dest='b')
assert delegate_to == 'localhost'
@pytest.mark.parametrize("args_dict, msg", INVALID_MULTIPLE_ACTIONS)
def test_multiple_actions(self, args_dict, msg):
m = ModuleArgsParser(args_dict)
with pytest.raises(AnsibleParserError) as err:
m.parse()
assert err.value.args[0] == msg
def test_multiple_actions_ping_shell(self):
init_plugin_loader()
args_dict = {'ping': 'data=hi', 'shell': 'echo hi'}
m = ModuleArgsParser(args_dict)
with pytest.raises(AnsibleParserError) as err:
m.parse()
assert err.value.args[0] == f'conflicting action statements: {", ".join(args_dict)}'
def test_bogus_action(self):
init_plugin_loader()
args_dict = {'bogusaction': {}}
m = ModuleArgsParser(args_dict)
with pytest.raises(AnsibleParserError) as err:
m.parse()
assert err.value.args[0].startswith(f"couldn't resolve module/action '{next(iter(args_dict))}'")
| 4,661
|
Python
|
.py
| 106
| 35.820755
| 121
| 0.612155
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,733
|
test_splitter.py
|
ansible_ansible/test/units/parsing/test_splitter.py
|
# coding: utf-8
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
from ansible.parsing.splitter import split_args, parse_kv
from ansible.errors import AnsibleParserError
import pytest
SPLIT_DATA: tuple[tuple[str | None, list[str], dict[str, str]], ...] = (
(None,
[],
{}),
(u'',
[],
{}),
(u'a',
[u'a'],
{u'_raw_params': u'a'}),
(u'a=b',
[u'a=b'],
{u'a': u'b'}),
(u'a="foo bar"',
[u'a="foo bar"'],
{u'a': u'foo bar'}),
(u'"foo bar baz"',
[u'"foo bar baz"'],
{u'_raw_params': '"foo bar baz"'}),
(u'foo bar baz',
[u'foo', u'bar', u'baz'],
{u'_raw_params': u'foo bar baz'}),
(u'a=b c="foo bar"',
[u'a=b', u'c="foo bar"'],
{u'a': u'b', u'c': u'foo bar'}),
(u'a="echo \\"hello world\\"" b=bar',
[u'a="echo \\"hello world\\""', u'b=bar'],
{u'a': u'echo "hello world"', u'b': u'bar'}),
(u'a="nest\'ed"',
[u'a="nest\'ed"'],
{u'a': u'nest\'ed'}),
(u' ',
[u' '],
{u'_raw_params': u' '}),
(u'\\ ',
[u' '],
{u'_raw_params': u' '}),
(u'a\\=escaped',
[u'a\\=escaped'],
{u'_raw_params': u'a=escaped'}),
(u'a="multi\nline"',
[u'a="multi\nline"'],
{u'a': u'multi\nline'}),
(u'a="blank\n\nline"',
[u'a="blank\n\nline"'],
{u'a': u'blank\n\nline'}),
(u'a="blank\n\n\nlines"',
[u'a="blank\n\n\nlines"'],
{u'a': u'blank\n\n\nlines'}),
(u'a="a long\nmessage\\\nabout a thing\n"',
[u'a="a long\nmessage\\\nabout a thing\n"'],
{u'a': u'a long\nmessage\\\nabout a thing\n'}),
(u'a="multiline\nmessage1\\\n" b="multiline\nmessage2\\\n"',
[u'a="multiline\nmessage1\\\n"', u'b="multiline\nmessage2\\\n"'],
{u'a': 'multiline\nmessage1\\\n', u'b': u'multiline\nmessage2\\\n'}),
(u'line \\\ncontinuation',
[u'line', u'continuation'],
{u'_raw_params': u'line continuation'}),
(u'not jinja}}',
[u'not', u'jinja}}'],
{u'_raw_params': u'not jinja}}'}),
(u'a={{multiline\njinja}}',
[u'a={{multiline\njinja}}'],
{u'a': u'{{multiline\njinja}}'}),
(u'a={{jinja}}',
[u'a={{jinja}}'],
{u'a': u'{{jinja}}'}),
(u'a={{ jinja }}',
[u'a={{ jinja }}'],
{u'a': u'{{ jinja }}'}),
(u'a={% jinja %}',
[u'a={% jinja %}'],
{u'a': u'{% jinja %}'}),
(u'a={# jinja #}',
[u'a={# jinja #}'],
{u'a': u'{# jinja #}'}),
(u'a="{{jinja}}"',
[u'a="{{jinja}}"'],
{u'a': u'{{jinja}}'}),
(u'a={{ jinja }}{{jinja2}}',
[u'a={{ jinja }}{{jinja2}}'],
{u'a': u'{{ jinja }}{{jinja2}}'}),
(u'a="{{ jinja }}{{jinja2}}"',
[u'a="{{ jinja }}{{jinja2}}"'],
{u'a': u'{{ jinja }}{{jinja2}}'}),
(u'a={{jinja}} b={{jinja2}}',
[u'a={{jinja}}', u'b={{jinja2}}'],
{u'a': u'{{jinja}}', u'b': u'{{jinja2}}'}),
(u'a="{{jinja}}\n" b="{{jinja2}}\n"',
[u'a="{{jinja}}\n"', u'b="{{jinja2}}\n"'],
{u'a': u'{{jinja}}\n', u'b': u'{{jinja2}}\n'}),
(u'a="café eñyei"',
[u'a="café eñyei"'],
{u'a': u'café eñyei'}),
(u'a=café b=eñyei',
[u'a=café', u'b=eñyei'],
{u'a': u'café', u'b': u'eñyei'}),
(u'a={{ foo | some_filter(\' \', " ") }} b=bar',
[u'a={{ foo | some_filter(\' \', " ") }}', u'b=bar'],
{u'a': u'{{ foo | some_filter(\' \', " ") }}', u'b': u'bar'}),
(u'One\n Two\n Three\n',
[u'One\n ', u'Two\n ', u'Three\n'],
{u'_raw_params': u'One\n Two\n Three\n'}),
(u'\nOne\n Two\n Three\n',
[u'\n', u'One\n ', u'Two\n ', u'Three\n'],
{u'_raw_params': u'\nOne\n Two\n Three\n'}),
)
PARSE_KV_CHECK_RAW = (
(u'raw=yes', {u'_raw_params': u'raw=yes'}),
(u'creates=something', {u'creates': u'something'}),
)
PARSER_ERROR = (
'"',
"'",
'{{',
'{%',
'{#',
)
SPLIT_ARGS = tuple((test[0], test[1]) for test in SPLIT_DATA)
PARSE_KV = tuple((test[0], test[2]) for test in SPLIT_DATA)
@pytest.mark.parametrize("args, expected", SPLIT_ARGS, ids=[str(arg[0]) for arg in SPLIT_ARGS])
def test_split_args(args, expected):
assert split_args(args) == expected
@pytest.mark.parametrize("args, expected", PARSE_KV, ids=[str(arg[0]) for arg in PARSE_KV])
def test_parse_kv(args, expected):
assert parse_kv(args) == expected
@pytest.mark.parametrize("args, expected", PARSE_KV_CHECK_RAW, ids=[str(arg[0]) for arg in PARSE_KV_CHECK_RAW])
def test_parse_kv_check_raw(args, expected):
assert parse_kv(args, check_raw=True) == expected
@pytest.mark.parametrize("args", PARSER_ERROR)
def test_split_args_error(args):
with pytest.raises(AnsibleParserError):
split_args(args)
@pytest.mark.parametrize("args", PARSER_ERROR)
def test_parse_kv_error(args):
with pytest.raises(AnsibleParserError):
parse_kv(args)
| 5,681
|
Python
|
.py
| 158
| 29.936709
| 111
| 0.516564
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,734
|
test_unquote.py
|
ansible_ansible/test/units/parsing/test_unquote.py
|
# coding: utf-8
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
from ansible.parsing.quoting import unquote
import pytest
UNQUOTE_DATA = (
(u'1', u'1'),
(u'\'1\'', u'1'),
(u'"1"', u'1'),
(u'"1 \'2\'"', u'1 \'2\''),
(u'\'1 "2"\'', u'1 "2"'),
(u'\'1 \'2\'\'', u'1 \'2\''),
(u'"1\\"', u'"1\\"'),
(u'\'1\\\'', u'\'1\\\''),
(u'"1 \\"2\\" 3"', u'1 \\"2\\" 3'),
(u'\'1 \\\'2\\\' 3\'', u'1 \\\'2\\\' 3'),
(u'"', u'"'),
(u'\'', u'\''),
# Not entirely sure these are good but they match the current
# behaviour
(u'"1""2"', u'1""2'),
(u'\'1\'\'2\'', u'1\'\'2'),
(u'"1" 2 "3"', u'1" 2 "3'),
(u'"1"\'2\'"3"', u'1"\'2\'"3'),
)
@pytest.mark.parametrize("quoted, expected", UNQUOTE_DATA)
def test_unquote(quoted, expected):
assert unquote(quoted) == expected
| 1,518
|
Python
|
.py
| 43
| 32.395349
| 70
| 0.597686
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,735
|
test_ajson.py
|
ansible_ansible/test/units/parsing/test_ajson.py
|
# Copyright 2018, Matt Martz <matt@sivel.net>
# Copyright 2019, Andrew Klychkov @Andersson007 <aaklychkov@mail.ru>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import os
import json
import pytest
from collections.abc import Mapping
from datetime import date, datetime, timezone, timedelta
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
from ansible.utils.unsafe_proxy import AnsibleUnsafeText
def test_AnsibleJSONDecoder_vault():
with open(os.path.join(os.path.dirname(__file__), 'fixtures/ajson.json')) as f:
data = json.load(f, cls=AnsibleJSONDecoder)
assert isinstance(data['password'], AnsibleVaultEncryptedUnicode)
assert isinstance(data['bar']['baz'][0]['password'], AnsibleVaultEncryptedUnicode)
assert isinstance(data['foo']['password'], AnsibleVaultEncryptedUnicode)
def test_encode_decode_unsafe():
data = {
'key_value': AnsibleUnsafeText(u'{#NOTACOMMENT#}'),
'list': [AnsibleUnsafeText(u'{#NOTACOMMENT#}')],
'list_dict': [{'key_value': AnsibleUnsafeText(u'{#NOTACOMMENT#}')}]}
json_expected = (
'{"key_value": {"__ansible_unsafe": "{#NOTACOMMENT#}"}, '
'"list": [{"__ansible_unsafe": "{#NOTACOMMENT#}"}], '
'"list_dict": [{"key_value": {"__ansible_unsafe": "{#NOTACOMMENT#}"}}]}'
)
assert json.dumps(data, cls=AnsibleJSONEncoder, preprocess_unsafe=True, sort_keys=True) == json_expected
assert json.loads(json_expected, cls=AnsibleJSONDecoder) == data
def vault_data():
"""
Prepare AnsibleVaultEncryptedUnicode test data for AnsibleJSONEncoder.default().
Return a list of tuples (input, expected).
"""
with open(os.path.join(os.path.dirname(__file__), 'fixtures/ajson.json')) as f:
data = json.load(f, cls=AnsibleJSONDecoder)
data_0 = data['password']
data_1 = data['bar']['baz'][0]['password']
expected_0 = (u'$ANSIBLE_VAULT;1.1;AES256\n34646264306632313333393636316'
'562356435376162633631326264383934326565333633366238\n3863'
'373264326461623132613931346165636465346337310a32643431383'
'0316337393263616439\n646539373134633963666338613632666334'
'65663730303633323534363331316164623237363831\n35363335613'
'93238370a313330316263373938326162386433313336613532653538'
'376662306435\n3339\n')
expected_1 = (u'$ANSIBLE_VAULT;1.1;AES256\n34646264306632313333393636316'
'562356435376162633631326264383934326565333633366238\n3863'
'373264326461623132613931346165636465346337310a32643431383'
'0316337393263616439\n646539373134633963666338613632666334'
'65663730303633323534363331316164623237363831\n35363335613'
'93238370a313330316263373938326162386433313336613532653538'
'376662306435\n3338\n')
return [
(data_0, expected_0),
(data_1, expected_1),
]
class TestAnsibleJSONEncoder:
"""
Namespace for testing AnsibleJSONEncoder.
"""
@pytest.fixture(scope='class')
def mapping(self, request):
"""
Returns object of Mapping mock class.
The object is used for testing handling of Mapping objects
in AnsibleJSONEncoder.default().
Using a plain dictionary instead is not suitable because
it is handled by default encoder of the superclass (json.JSONEncoder).
"""
class M(Mapping):
"""Mock mapping class."""
def __init__(self, *args, **kwargs):
self.__dict__.update(*args, **kwargs)
def __getitem__(self, key):
return self.__dict__[key]
def __iter__(self):
return iter(self.__dict__)
def __len__(self):
return len(self.__dict__)
mapping = M(request.param)
assert isinstance(len(mapping), int) # ensure coverage of __len__
return mapping
@pytest.fixture
def ansible_json_encoder(self):
"""Return AnsibleJSONEncoder object."""
return AnsibleJSONEncoder()
###############
# Test methods:
@pytest.mark.parametrize(
'test_input,expected',
[
(datetime(2019, 5, 14, 13, 39, 38, 569047), '2019-05-14T13:39:38.569047'),
(datetime(2019, 5, 14, 13, 47, 16, 923866), '2019-05-14T13:47:16.923866'),
(date(2019, 5, 14), '2019-05-14'),
(date(2020, 5, 14), '2020-05-14'),
(datetime(2019, 6, 15, 14, 45, tzinfo=timezone.utc), '2019-06-15T14:45:00+00:00'),
(datetime(2019, 6, 15, 14, 45, tzinfo=timezone(timedelta(hours=1, minutes=40))), '2019-06-15T14:45:00+01:40'),
]
)
def test_date_datetime(self, ansible_json_encoder, test_input, expected):
"""
Test for passing datetime.date or datetime.datetime objects to AnsibleJSONEncoder.default().
"""
assert ansible_json_encoder.default(test_input) == expected
@pytest.mark.parametrize(
'mapping,expected',
[
({1: 1}, {1: 1}),
({2: 2}, {2: 2}),
({1: 2}, {1: 2}),
({2: 1}, {2: 1}),
], indirect=['mapping'],
)
def test_mapping(self, ansible_json_encoder, mapping, expected):
"""
Test for passing Mapping object to AnsibleJSONEncoder.default().
"""
assert ansible_json_encoder.default(mapping) == expected
@pytest.mark.parametrize('test_input,expected', vault_data())
def test_ansible_json_decoder_vault(self, ansible_json_encoder, test_input, expected):
"""
Test for passing AnsibleVaultEncryptedUnicode to AnsibleJSONEncoder.default().
"""
assert ansible_json_encoder.default(test_input) == {'__ansible_vault': expected}
assert json.dumps(test_input, cls=AnsibleJSONEncoder, preprocess_unsafe=True) == '{"__ansible_vault": "%s"}' % expected.replace('\n', '\\n')
@pytest.mark.parametrize(
'test_input,expected',
[
({1: 'first'}, {1: 'first'}),
({2: 'second'}, {2: 'second'}),
]
)
def test_default_encoder(self, ansible_json_encoder, test_input, expected):
"""
Test for the default encoder of AnsibleJSONEncoder.default().
If objects of different classes that are not tested above were passed,
AnsibleJSONEncoder.default() invokes 'default()' method of json.JSONEncoder superclass.
"""
assert ansible_json_encoder.default(test_input) == expected
@pytest.mark.parametrize('test_input', [1, 1.1, 'string', [1, 2], set('set'), True, None])
def test_default_encoder_unserializable(self, ansible_json_encoder, test_input):
"""
Test for the default encoder of AnsibleJSONEncoder.default(), not serializable objects.
It must fail with TypeError 'object is not serializable'.
"""
with pytest.raises(TypeError):
ansible_json_encoder.default(test_input)
| 7,201
|
Python
|
.py
| 148
| 39.864865
| 148
| 0.645037
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,736
|
test_dataloader.py
|
ansible_ansible/test/units/parsing/test_dataloader.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
import unittest
from unittest.mock import patch, mock_open
from ansible.errors import AnsibleParserError, yaml_strings, AnsibleFileNotFound
from ansible.parsing.vault import AnsibleVaultError
from ansible.module_utils.common.text.converters import to_text
from units.mock.vault_helper import TextVaultSecret
from ansible.parsing.dataloader import DataLoader
from units.mock.path import mock_unfrackpath_noop
class TestDataLoader(unittest.TestCase):
def setUp(self):
self._loader = DataLoader()
@patch('os.path.exists')
def test__is_role(self, p_exists):
p_exists.side_effect = lambda p: p == b'test_path/tasks/main.yml'
self.assertTrue(self._loader._is_role('test_path/tasks'))
self.assertTrue(self._loader._is_role('test_path/'))
@patch.object(DataLoader, '_get_file_contents')
def test_parse_json_from_file(self, mock_def):
mock_def.return_value = (b"""{"a": 1, "b": 2, "c": 3}""", True)
output = self._loader.load_from_file('dummy_json.txt')
self.assertEqual(output, dict(a=1, b=2, c=3))
@patch.object(DataLoader, '_get_file_contents')
def test_parse_yaml_from_file(self, mock_def):
mock_def.return_value = (b"""
a: 1
b: 2
c: 3
""", True)
output = self._loader.load_from_file('dummy_yaml.txt')
self.assertEqual(output, dict(a=1, b=2, c=3))
@patch.object(DataLoader, '_get_file_contents')
def test_parse_fail_from_file(self, mock_def):
mock_def.return_value = (b"""
TEXT:
***
NOT VALID
""", True)
self.assertRaises(AnsibleParserError, self._loader.load_from_file, 'dummy_yaml_bad.txt')
@patch('ansible.errors.AnsibleError._get_error_lines_from_file')
@patch.object(DataLoader, '_get_file_contents')
def test_tab_error(self, mock_def, mock_get_error_lines):
mock_def.return_value = (u"""---\nhosts: localhost\nvars:\n foo: bar\n\tblip: baz""", True)
mock_get_error_lines.return_value = ("""\tblip: baz""", """..foo: bar""")
with self.assertRaises(AnsibleParserError) as cm:
self._loader.load_from_file('dummy_yaml_text.txt')
self.assertIn(yaml_strings.YAML_COMMON_LEADING_TAB_ERROR, str(cm.exception))
self.assertIn('foo: bar', str(cm.exception))
@patch('ansible.parsing.dataloader.unfrackpath', mock_unfrackpath_noop)
@patch.object(DataLoader, '_is_role')
def test_path_dwim_relative(self, mock_is_role):
"""
simulate a nested dynamic include:
playbook.yml:
- hosts: localhost
roles:
- { role: 'testrole' }
testrole/tasks/main.yml:
- include_tasks: "include1.yml"
static: no
testrole/tasks/include1.yml:
- include_tasks: include2.yml
static: no
testrole/tasks/include2.yml:
- debug: msg="blah"
"""
mock_is_role.return_value = False
with patch('os.path.exists') as mock_os_path_exists:
mock_os_path_exists.return_value = False
self._loader.path_dwim_relative('/tmp/roles/testrole/tasks', 'tasks', 'included2.yml')
# Fetch first args for every call
# mock_os_path_exists.assert_any_call isn't used because os.path.normpath must be used in order to compare paths
called_args = [os.path.normpath(to_text(call[0][0])) for call in mock_os_path_exists.call_args_list]
# 'path_dwim_relative' docstrings say 'with or without explicitly named dirname subdirs':
self.assertIn('/tmp/roles/testrole/tasks/included2.yml', called_args)
self.assertIn('/tmp/roles/testrole/tasks/tasks/included2.yml', called_args)
# relative directories below are taken in account too:
self.assertIn('tasks/included2.yml', called_args)
self.assertIn('included2.yml', called_args)
def test_path_dwim_root(self):
self.assertEqual(self._loader.path_dwim('/'), '/')
def test_path_dwim_home(self):
self.assertEqual(self._loader.path_dwim('~'), os.path.expanduser('~'))
def test_path_dwim_tilde_slash(self):
self.assertEqual(self._loader.path_dwim('~/'), os.path.expanduser('~'))
def test_get_real_file(self):
self.assertEqual(self._loader.get_real_file(__file__), __file__)
def test_is_file(self):
self.assertTrue(self._loader.is_file(__file__))
def test_is_directory_positive(self):
self.assertTrue(self._loader.is_directory(os.path.dirname(__file__)))
def test_get_file_contents_none_path(self):
self.assertRaisesRegex(AnsibleParserError, 'Invalid filename',
self._loader._get_file_contents, None)
def test_get_file_contents_non_existent_path(self):
self.assertRaises(AnsibleFileNotFound, self._loader._get_file_contents, '/non_existent_file')
class TestPathDwimRelativeDataLoader(unittest.TestCase):
def setUp(self):
self._loader = DataLoader()
def test_all_slash(self):
self.assertEqual(self._loader.path_dwim_relative('/', '/', '/'), '/')
def test_path_endswith_role(self):
self.assertEqual(self._loader.path_dwim_relative(path='foo/bar/tasks/', dirname='/', source='/'), '/')
def test_path_endswith_role_main_yml(self):
self.assertIn('main.yml', self._loader.path_dwim_relative(path='foo/bar/tasks/', dirname='/', source='main.yml'))
def test_path_endswith_role_source_tilde(self):
self.assertEqual(self._loader.path_dwim_relative(path='foo/bar/tasks/', dirname='/', source='~/'), os.path.expanduser('~'))
class TestPathDwimRelativeStackDataLoader(unittest.TestCase):
def setUp(self):
self._loader = DataLoader()
def test_none(self):
self.assertRaisesRegex(AnsibleFileNotFound, 'on the Ansible Controller', self._loader.path_dwim_relative_stack, None, None, None)
def test_empty_strings(self):
self.assertEqual(self._loader.path_dwim_relative_stack('', '', ''), './')
def test_empty_lists(self):
self.assertEqual(self._loader.path_dwim_relative_stack([], '', '~/'), os.path.expanduser('~'))
def test_all_slash(self):
self.assertEqual(self._loader.path_dwim_relative_stack('/', '/', '/'), '/')
def test_path_endswith_role(self):
self.assertEqual(self._loader.path_dwim_relative_stack(paths=['foo/bar/tasks/'], dirname='/', source='/'), '/')
def test_path_endswith_role_source_tilde(self):
self.assertEqual(self._loader.path_dwim_relative_stack(paths=['foo/bar/tasks/'], dirname='/', source='~/'), os.path.expanduser('~'))
def test_path_endswith_role_source_main_yml(self):
self.assertRaises(AnsibleFileNotFound, self._loader.path_dwim_relative_stack, ['foo/bar/tasks/'], '/', 'main.yml')
def test_path_endswith_role_source_main_yml_source_in_dirname(self):
self.assertRaises(AnsibleFileNotFound, self._loader.path_dwim_relative_stack, 'foo/bar/tasks/', 'tasks', 'tasks/main.yml')
class TestDataLoaderWithVault(unittest.TestCase):
def setUp(self):
self._loader = DataLoader()
vault_secrets = [('default', TextVaultSecret('ansible'))]
self._loader.set_vault_secrets(vault_secrets)
self.test_vault_data_path = os.path.join(os.path.dirname(__file__), 'fixtures', 'vault.yml')
def tearDown(self):
pass
def test_get_real_file_vault(self):
real_file_path = self._loader.get_real_file(self.test_vault_data_path)
self.assertTrue(os.path.exists(real_file_path))
def test_get_real_file_vault_no_vault(self):
self._loader.set_vault_secrets(None)
self.assertRaises(AnsibleParserError, self._loader.get_real_file, self.test_vault_data_path)
def test_get_real_file_vault_wrong_password(self):
wrong_vault = [('default', TextVaultSecret('wrong_password'))]
self._loader.set_vault_secrets(wrong_vault)
self.assertRaises(AnsibleVaultError, self._loader.get_real_file, self.test_vault_data_path)
def test_get_real_file_not_a_path(self):
self.assertRaisesRegex(AnsibleParserError, 'Invalid filename', self._loader.get_real_file, None)
@patch.multiple(DataLoader, path_exists=lambda s, x: True, is_file=lambda s, x: True)
def test_parse_from_vault_1_1_file(self):
vaulted_data = """$ANSIBLE_VAULT;1.1;AES256
33343734386261666161626433386662623039356366656637303939306563376130623138626165
6436333766346533353463636566313332623130383662340a393835656134633665333861393331
37666233346464636263636530626332623035633135363732623332313534306438393366323966
3135306561356164310a343937653834643433343734653137383339323330626437313562306630
3035
"""
with patch('builtins.open', mock_open(read_data=vaulted_data.encode('utf-8'))):
output = self._loader.load_from_file('dummy_vault.txt', cache='none')
self.assertEqual(output, dict(foo='bar'))
# no cache used
self.assertFalse(self._loader._FILE_CACHE)
# vault cache entry written
output = self._loader.load_from_file('dummy_vault.txt', cache='vaulted')
self.assertEqual(output, dict(foo='bar'))
self.assertTrue(self._loader._FILE_CACHE)
# cache entry used
key = next(iter(self._loader._FILE_CACHE.keys()))
modified = {'changed': True}
self._loader._FILE_CACHE[key] = modified
output = self._loader.load_from_file('dummy_vault.txt', cache='vaulted')
self.assertEqual(output, modified)
| 10,366
|
Python
|
.py
| 187
| 47.823529
| 140
| 0.678526
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,737
|
test_dumper.py
|
ansible_ansible/test/units/parsing/yaml/test_dumper.py
|
# coding: utf-8
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import io
from jinja2.exceptions import UndefinedError
import unittest
from ansible.parsing import vault
from ansible.parsing.yaml import dumper, objects
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.template import AnsibleUndefined
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes
from units.mock.yaml_helper import YamlTestUtils
from units.mock.vault_helper import TextVaultSecret
from ansible.vars.manager import VarsWithSources
class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
def setUp(self):
self.vault_password = "hunter42"
vault_secret = TextVaultSecret(self.vault_password)
self.vault_secrets = [('vault_secret', vault_secret)]
self.good_vault = vault.VaultLib(self.vault_secrets)
self.vault = self.good_vault
self.stream = self._build_stream()
self.dumper = dumper.AnsibleDumper
def _build_stream(self, yaml_text=None):
text = yaml_text or u''
stream = io.StringIO(text)
return stream
def _loader(self, stream):
return AnsibleLoader(stream, vault_secrets=self.vault.secrets)
def test_ansible_vault_encrypted_unicode(self):
plaintext = 'This is a string we are going to encrypt.'
avu = objects.AnsibleVaultEncryptedUnicode.from_plaintext(plaintext, vault=self.vault,
secret=vault.match_secrets(self.vault_secrets, ['vault_secret'])[0][1])
yaml_out = self._dump_string(avu, dumper=self.dumper)
stream = self._build_stream(yaml_out)
loader = self._loader(stream)
data_from_yaml = loader.get_single_data()
self.assertEqual(plaintext, data_from_yaml.data)
def test_bytes(self):
b_text = u'tréma'.encode('utf-8')
unsafe_object = AnsibleUnsafeBytes(b_text)
yaml_out = self._dump_string(unsafe_object, dumper=self.dumper)
stream = self._build_stream(yaml_out)
loader = self._loader(stream)
data_from_yaml = loader.get_single_data()
result = b_text
self.assertEqual(result, data_from_yaml)
def test_unicode(self):
u_text = u'nöel'
unsafe_object = AnsibleUnsafeText(u_text)
yaml_out = self._dump_string(unsafe_object, dumper=self.dumper)
stream = self._build_stream(yaml_out)
loader = self._loader(stream)
data_from_yaml = loader.get_single_data()
self.assertEqual(u_text, data_from_yaml)
def test_vars_with_sources(self):
self._dump_string(VarsWithSources(), dumper=self.dumper)
def test_undefined(self):
undefined_object = AnsibleUndefined()
try:
yaml_out = self._dump_string(undefined_object, dumper=self.dumper)
except UndefinedError:
yaml_out = None
self.assertIsNone(yaml_out)
| 3,598
|
Python
|
.py
| 77
| 39.844156
| 137
| 0.702919
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,738
|
test_objects.py
|
ansible_ansible/test/units/parsing/yaml/test_objects.py
|
# This file is part of Ansible
# -*- coding: utf-8 -*-
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2016, Adrian Likins <alikins@redhat.com>
from __future__ import annotations
import unittest
from ansible.errors import AnsibleError
from ansible.module_utils.common.text.converters import to_native
from ansible.parsing import vault
from ansible.parsing.yaml.loader import AnsibleLoader
# module under test
from ansible.parsing.yaml import objects
from units.mock.yaml_helper import YamlTestUtils
from units.mock.vault_helper import TextVaultSecret
class TestAnsibleVaultUnicodeNoVault(unittest.TestCase, YamlTestUtils):
def test_empty_init(self):
self.assertRaises(TypeError, objects.AnsibleVaultEncryptedUnicode)
def test_empty_string_init(self):
seq = ''.encode('utf8')
self.assert_values(seq)
def test_empty_byte_string_init(self):
seq = b''
self.assert_values(seq)
def _assert_values(self, avu, seq):
self.assertIsInstance(avu, objects.AnsibleVaultEncryptedUnicode)
self.assertTrue(avu.vault is None)
# AnsibleVaultEncryptedUnicode without a vault should never == any string
self.assertNotEqual(avu, seq)
def assert_values(self, seq):
avu = objects.AnsibleVaultEncryptedUnicode(seq)
self._assert_values(avu, seq)
def test_single_char(self):
seq = 'a'.encode('utf8')
self.assert_values(seq)
def test_string(self):
seq = 'some letters'
self.assert_values(seq)
def test_byte_string(self):
seq = 'some letters'.encode('utf8')
self.assert_values(seq)
class TestAnsibleVaultEncryptedUnicode(unittest.TestCase, YamlTestUtils):
def setUp(self):
self.good_vault_password = "hunter42"
good_vault_secret = TextVaultSecret(self.good_vault_password)
self.good_vault_secrets = [('good_vault_password', good_vault_secret)]
self.good_vault = vault.VaultLib(self.good_vault_secrets)
# TODO: make this use two vault secret identities instead of two vaultSecrets
self.wrong_vault_password = 'not-hunter42'
wrong_vault_secret = TextVaultSecret(self.wrong_vault_password)
self.wrong_vault_secrets = [('wrong_vault_password', wrong_vault_secret)]
self.wrong_vault = vault.VaultLib(self.wrong_vault_secrets)
self.vault = self.good_vault
self.vault_secrets = self.good_vault_secrets
def _loader(self, stream):
return AnsibleLoader(stream, vault_secrets=self.vault_secrets)
def test_dump_load_cycle(self):
aveu = self._from_plaintext('the test string for TestAnsibleVaultEncryptedUnicode.test_dump_load_cycle')
self._dump_load_cycle(aveu)
def assert_values(self, avu, seq):
self.assertIsInstance(avu, objects.AnsibleVaultEncryptedUnicode)
self.assertEqual(avu, seq)
self.assertTrue(avu.vault is self.vault)
self.assertIsInstance(avu.vault, vault.VaultLib)
def _from_plaintext(self, seq):
id_secret = vault.match_encrypt_secret(self.good_vault_secrets)
return objects.AnsibleVaultEncryptedUnicode.from_plaintext(seq, vault=self.vault, secret=id_secret[1])
def test_empty_init(self):
self.assertRaises(TypeError, objects.AnsibleVaultEncryptedUnicode)
def test_empty_string_init_from_plaintext(self):
seq = ''
avu = self._from_plaintext(seq)
self.assert_values(avu, seq)
def test_empty_unicode_init_from_plaintext(self):
seq = u''
avu = self._from_plaintext(seq)
self.assert_values(avu, seq)
def test_string_from_plaintext(self):
seq = 'some letters'
avu = self._from_plaintext(seq)
self.assert_values(avu, seq)
def test_unicode_from_plaintext(self):
seq = u'some letters'
avu = self._from_plaintext(seq)
self.assert_values(avu, seq)
def test_unicode_from_plaintext_encode(self):
seq = u'some text here'
avu = self._from_plaintext(seq)
b_avu = avu.encode('utf-8', 'strict')
self.assertIsInstance(avu, objects.AnsibleVaultEncryptedUnicode)
self.assertEqual(b_avu, seq.encode('utf-8', 'strict'))
self.assertTrue(avu.vault is self.vault)
self.assertIsInstance(avu.vault, vault.VaultLib)
# TODO/FIXME: make sure bad password fails differently than 'thats not encrypted'
def test_empty_string_wrong_password(self):
seq = ''
self.vault = self.wrong_vault
avu = self._from_plaintext(seq)
def compare(avu, seq):
return avu == seq
self.assertRaises(AnsibleError, compare, avu, seq)
def test_vaulted_utf8_value_37258(self):
seq = u"aöffü"
avu = self._from_plaintext(seq)
self.assert_values(avu, seq)
def test_str_vaulted_utf8_value_37258(self):
seq = u"aöffü"
avu = self._from_plaintext(seq)
assert str(avu) == to_native(seq)
| 5,570
|
Python
|
.py
| 121
| 39.347107
| 112
| 0.703087
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,739
|
test_constructor.py
|
ansible_ansible/test/units/parsing/yaml/test_constructor.py
|
# -*- coding: utf-8 -*-
# (c) 2020 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from yaml import MappingNode, Mark, ScalarNode
from yaml.constructor import ConstructorError
import ansible.constants as C
from ansible.utils.display import Display
from ansible.parsing.yaml.constructor import AnsibleConstructor
@pytest.fixture
def dupe_node():
tag = 'tag:yaml.org,2002:map'
scalar_tag = 'tag:yaml.org,2002:str'
mark = Mark(tag, 0, 0, 0, None, None)
node = MappingNode(
tag,
[
(
ScalarNode(tag=scalar_tag, value='bar', start_mark=mark),
ScalarNode(tag=scalar_tag, value='baz', start_mark=mark)
),
(
ScalarNode(tag=scalar_tag, value='bar', start_mark=mark),
ScalarNode(tag=scalar_tag, value='qux', start_mark=mark)
),
],
start_mark=mark
)
return node
class Capture:
def __init__(self):
self.called = False
self.calls = []
def __call__(self, *args, **kwargs):
self.called = True
self.calls.append((
args,
kwargs
))
def test_duplicate_yaml_dict_key_ignore(dupe_node, monkeypatch):
monkeypatch.setattr(C, 'DUPLICATE_YAML_DICT_KEY', 'ignore')
cap = Capture()
monkeypatch.setattr(Display(), 'warning', cap)
ac = AnsibleConstructor()
ac.construct_mapping(dupe_node)
assert not cap.called
def test_duplicate_yaml_dict_key_warn(dupe_node, monkeypatch):
monkeypatch.setattr(C, 'DUPLICATE_YAML_DICT_KEY', 'warn')
cap = Capture()
monkeypatch.setattr(Display(), 'warning', cap)
ac = AnsibleConstructor()
ac.construct_mapping(dupe_node)
assert cap.called
expected = [
(
(
'While constructing a mapping from tag:yaml.org,2002:map, line 1, column 1, '
'found a duplicate dict key (bar). Using last defined value only.',
),
{}
)
]
assert cap.calls == expected
def test_duplicate_yaml_dict_key_error(dupe_node, monkeypatch, mocker):
monkeypatch.setattr(C, 'DUPLICATE_YAML_DICT_KEY', 'error')
ac = AnsibleConstructor()
pytest.raises(ConstructorError, ac.construct_mapping, dupe_node)
| 2,398
|
Python
|
.py
| 68
| 28.220588
| 93
| 0.635421
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,740
|
test_loader.py
|
ansible_ansible/test/units/parsing/yaml/test_loader.py
|
# coding: utf-8
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
from collections.abc import Sequence, Set, Mapping
from io import StringIO
import unittest
from ansible import errors
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing import vault
from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
from ansible.parsing.yaml.dumper import AnsibleDumper
from units.mock.yaml_helper import YamlTestUtils
from units.mock.vault_helper import TextVaultSecret
from yaml.parser import ParserError
from yaml.scanner import ScannerError
class NameStringIO(StringIO):
"""In py2.6, StringIO doesn't let you set name because a baseclass has it
as readonly property"""
name = None
def __init__(self, *args, **kwargs):
super(NameStringIO, self).__init__(*args, **kwargs)
class TestAnsibleLoaderBasic(unittest.TestCase):
def test_parse_number(self):
stream = StringIO(u"""
1
""")
loader = AnsibleLoader(stream, 'myfile.yml')
data = loader.get_single_data()
self.assertEqual(data, 1)
# No line/column info saved yet
def test_parse_string(self):
stream = StringIO(u"""
Ansible
""")
loader = AnsibleLoader(stream, 'myfile.yml')
data = loader.get_single_data()
self.assertEqual(data, u'Ansible')
self.assertIsInstance(data, str)
self.assertEqual(data.ansible_pos, ('myfile.yml', 2, 17))
def test_parse_utf8_string(self):
stream = StringIO(u"""
Cafè Eñyei
""")
loader = AnsibleLoader(stream, 'myfile.yml')
data = loader.get_single_data()
self.assertEqual(data, u'Cafè Eñyei')
self.assertIsInstance(data, str)
self.assertEqual(data.ansible_pos, ('myfile.yml', 2, 17))
def test_parse_dict(self):
stream = StringIO(u"""
webster: daniel
oed: oxford
""")
loader = AnsibleLoader(stream, 'myfile.yml')
data = loader.get_single_data()
self.assertEqual(data, {'webster': 'daniel', 'oed': 'oxford'})
self.assertEqual(len(data), 2)
self.assertIsInstance(list(data.keys())[0], str)
self.assertIsInstance(list(data.values())[0], str)
# Beginning of the first key
self.assertEqual(data.ansible_pos, ('myfile.yml', 2, 17))
self.assertEqual(data[u'webster'].ansible_pos, ('myfile.yml', 2, 26))
self.assertEqual(data[u'oed'].ansible_pos, ('myfile.yml', 3, 22))
def test_parse_list(self):
stream = StringIO(u"""
- a
- b
""")
loader = AnsibleLoader(stream, 'myfile.yml')
data = loader.get_single_data()
self.assertEqual(data, [u'a', u'b'])
self.assertEqual(len(data), 2)
self.assertIsInstance(data[0], str)
self.assertEqual(data.ansible_pos, ('myfile.yml', 2, 17))
self.assertEqual(data[0].ansible_pos, ('myfile.yml', 2, 19))
self.assertEqual(data[1].ansible_pos, ('myfile.yml', 3, 19))
def test_parse_short_dict(self):
stream = StringIO(u"""{"foo": "bar"}""")
loader = AnsibleLoader(stream, 'myfile.yml')
data = loader.get_single_data()
self.assertEqual(data, dict(foo=u'bar'))
self.assertEqual(data.ansible_pos, ('myfile.yml', 1, 1))
self.assertEqual(data[u'foo'].ansible_pos, ('myfile.yml', 1, 9))
stream = StringIO(u"""foo: bar""")
loader = AnsibleLoader(stream, 'myfile.yml')
data = loader.get_single_data()
self.assertEqual(data, dict(foo=u'bar'))
self.assertEqual(data.ansible_pos, ('myfile.yml', 1, 1))
self.assertEqual(data[u'foo'].ansible_pos, ('myfile.yml', 1, 6))
def test_error_conditions(self):
stream = StringIO(u"""{""")
loader = AnsibleLoader(stream, 'myfile.yml')
self.assertRaises(ParserError, loader.get_single_data)
def test_tab_error(self):
stream = StringIO(u"""---\nhosts: localhost\nvars:\n foo: bar\n\tblip: baz""")
loader = AnsibleLoader(stream, 'myfile.yml')
self.assertRaises(ScannerError, loader.get_single_data)
def test_front_matter(self):
stream = StringIO(u"""---\nfoo: bar""")
loader = AnsibleLoader(stream, 'myfile.yml')
data = loader.get_single_data()
self.assertEqual(data, dict(foo=u'bar'))
self.assertEqual(data.ansible_pos, ('myfile.yml', 2, 1))
self.assertEqual(data[u'foo'].ansible_pos, ('myfile.yml', 2, 6))
# Initial indent (See: #6348)
stream = StringIO(u""" - foo: bar\n baz: qux""")
loader = AnsibleLoader(stream, 'myfile.yml')
data = loader.get_single_data()
self.assertEqual(data, [{u'foo': u'bar', u'baz': u'qux'}])
self.assertEqual(data.ansible_pos, ('myfile.yml', 1, 2))
self.assertEqual(data[0].ansible_pos, ('myfile.yml', 1, 4))
self.assertEqual(data[0][u'foo'].ansible_pos, ('myfile.yml', 1, 9))
self.assertEqual(data[0][u'baz'].ansible_pos, ('myfile.yml', 2, 9))
class TestAnsibleLoaderVault(unittest.TestCase, YamlTestUtils):
def setUp(self):
self.vault_password = "hunter42"
vault_secret = TextVaultSecret(self.vault_password)
self.vault_secrets = [('vault_secret', vault_secret),
('default', vault_secret)]
self.vault = vault.VaultLib(self.vault_secrets)
@property
def vault_secret(self):
return vault.match_encrypt_secret(self.vault_secrets)[1]
def test_wrong_password(self):
plaintext = u"Ansible"
bob_password = "this is a different password"
bobs_secret = TextVaultSecret(bob_password)
bobs_secrets = [('default', bobs_secret)]
bobs_vault = vault.VaultLib(bobs_secrets)
ciphertext = bobs_vault.encrypt(plaintext, vault.match_encrypt_secret(bobs_secrets)[1])
try:
self.vault.decrypt(ciphertext)
except Exception as e:
self.assertIsInstance(e, errors.AnsibleError)
self.assertEqual(e.message, 'Decryption failed (no vault secrets were found that could decrypt)')
def _encrypt_plaintext(self, plaintext):
# Construct a yaml repr of a vault by hand
vaulted_var_bytes = self.vault.encrypt(plaintext, self.vault_secret)
# add yaml tag
vaulted_var = vaulted_var_bytes.decode()
lines = vaulted_var.splitlines()
lines2 = []
for line in lines:
lines2.append(' %s' % line)
vaulted_var = '\n'.join(lines2)
tagged_vaulted_var = u"""!vault |\n%s""" % vaulted_var
return tagged_vaulted_var
def _build_stream(self, yaml_text):
stream = NameStringIO(yaml_text)
stream.name = 'my.yml'
return stream
def _loader(self, stream):
return AnsibleLoader(stream, vault_secrets=self.vault.secrets)
def _load_yaml(self, yaml_text, password):
stream = self._build_stream(yaml_text)
loader = self._loader(stream)
data_from_yaml = loader.get_single_data()
return data_from_yaml
def test_dump_load_cycle(self):
avu = AnsibleVaultEncryptedUnicode.from_plaintext('The plaintext for test_dump_load_cycle.', self.vault, self.vault_secret)
self._dump_load_cycle(avu)
def test_embedded_vault_from_dump(self):
avu = AnsibleVaultEncryptedUnicode.from_plaintext('setec astronomy', self.vault, self.vault_secret)
blip = {'stuff1': [{'a dict key': 24},
{'shhh-ssh-secrets': avu,
'nothing to see here': 'move along'}],
'another key': 24.1}
blip = ['some string', 'another string', avu]
stream = NameStringIO()
self._dump_stream(blip, stream, dumper=AnsibleDumper)
stream.seek(0)
stream.seek(0)
loader = self._loader(stream)
data_from_yaml = loader.get_data()
stream2 = NameStringIO(u'')
# verify we can dump the object again
self._dump_stream(data_from_yaml, stream2, dumper=AnsibleDumper)
def test_embedded_vault(self):
plaintext_var = u"""This is the plaintext string."""
tagged_vaulted_var = self._encrypt_plaintext(plaintext_var)
another_vaulted_var = self._encrypt_plaintext(plaintext_var)
different_var = u"""A different string that is not the same as the first one."""
different_vaulted_var = self._encrypt_plaintext(different_var)
yaml_text = u"""---\nwebster: daniel\noed: oxford\nthe_secret: %s\nanother_secret: %s\ndifferent_secret: %s""" % (tagged_vaulted_var,
another_vaulted_var,
different_vaulted_var)
data_from_yaml = self._load_yaml(yaml_text, self.vault_password)
vault_string = data_from_yaml['the_secret']
self.assertEqual(plaintext_var, data_from_yaml['the_secret'])
test_dict = {}
test_dict[vault_string] = 'did this work?'
self.assertEqual(vault_string.data, vault_string)
# This looks weird and useless, but the object in question has a custom __eq__
self.assertEqual(vault_string, vault_string)
another_vault_string = data_from_yaml['another_secret']
different_vault_string = data_from_yaml['different_secret']
self.assertEqual(vault_string, another_vault_string)
self.assertNotEqual(vault_string, different_vault_string)
# More testing of __eq__/__ne__
self.assertTrue('some string' != vault_string)
self.assertNotEqual('some string', vault_string)
# Note this is a compare of the str/unicode of these, they are different types
# so we want to test self == other, and other == self etc
self.assertEqual(plaintext_var, vault_string)
self.assertEqual(vault_string, plaintext_var)
self.assertFalse(plaintext_var != vault_string)
self.assertFalse(vault_string != plaintext_var)
class TestAnsibleLoaderPlay(unittest.TestCase):
def setUp(self):
stream = NameStringIO(u"""
- hosts: localhost
vars:
number: 1
string: Ansible
utf8_string: Cafè Eñyei
dictionary:
webster: daniel
oed: oxford
list:
- a
- b
- 1
- 2
tasks:
- name: Test case
ping:
data: "{{ utf8_string }}"
- name: Test 2
ping:
data: "Cafè Eñyei"
- name: Test 3
command: "printf 'Cafè Eñyei\\n'"
""")
self.play_filename = '/path/to/myplay.yml'
stream.name = self.play_filename
self.loader = AnsibleLoader(stream)
self.data = self.loader.get_single_data()
def tearDown(self):
pass
def test_data_complete(self):
self.assertEqual(len(self.data), 1)
self.assertIsInstance(self.data, list)
self.assertEqual(frozenset(self.data[0].keys()), frozenset((u'hosts', u'vars', u'tasks')))
self.assertEqual(self.data[0][u'hosts'], u'localhost')
self.assertEqual(self.data[0][u'vars'][u'number'], 1)
self.assertEqual(self.data[0][u'vars'][u'string'], u'Ansible')
self.assertEqual(self.data[0][u'vars'][u'utf8_string'], u'Cafè Eñyei')
self.assertEqual(self.data[0][u'vars'][u'dictionary'], {
u'webster': u'daniel',
u'oed': u'oxford'
})
self.assertEqual(self.data[0][u'vars'][u'list'], [u'a', u'b', 1, 2])
self.assertEqual(self.data[0][u'tasks'], [
{u'name': u'Test case', u'ping': {u'data': u'{{ utf8_string }}'}},
{u'name': u'Test 2', u'ping': {u'data': u'Cafè Eñyei'}},
{u'name': u'Test 3', u'command': u'printf \'Cafè Eñyei\n\''},
])
def walk(self, data):
# Make sure there's no str in the data
self.assertNotIsInstance(data, bytes)
# Descend into various container types
if isinstance(data, str):
# strings are a sequence so we have to be explicit here
return
elif isinstance(data, (Sequence, Set)):
for element in data:
self.walk(element)
elif isinstance(data, Mapping):
for k, v in data.items():
self.walk(k)
self.walk(v)
# Scalars were all checked so we're good to go
return
def test_no_str_in_data(self):
# Checks that no strings are str type
self.walk(self.data)
def check_vars(self):
# Numbers don't have line/col information yet
# self.assertEqual(self.data[0][u'vars'][u'number'].ansible_pos, (self.play_filename, 4, 21))
self.assertEqual(self.data[0][u'vars'][u'string'].ansible_pos, (self.play_filename, 5, 29))
self.assertEqual(self.data[0][u'vars'][u'utf8_string'].ansible_pos, (self.play_filename, 6, 34))
self.assertEqual(self.data[0][u'vars'][u'dictionary'].ansible_pos, (self.play_filename, 8, 23))
self.assertEqual(self.data[0][u'vars'][u'dictionary'][u'webster'].ansible_pos, (self.play_filename, 8, 32))
self.assertEqual(self.data[0][u'vars'][u'dictionary'][u'oed'].ansible_pos, (self.play_filename, 9, 28))
self.assertEqual(self.data[0][u'vars'][u'list'].ansible_pos, (self.play_filename, 11, 23))
self.assertEqual(self.data[0][u'vars'][u'list'][0].ansible_pos, (self.play_filename, 11, 25))
self.assertEqual(self.data[0][u'vars'][u'list'][1].ansible_pos, (self.play_filename, 12, 25))
# Numbers don't have line/col info yet
# self.assertEqual(self.data[0][u'vars'][u'list'][2].ansible_pos, (self.play_filename, 13, 25))
# self.assertEqual(self.data[0][u'vars'][u'list'][3].ansible_pos, (self.play_filename, 14, 25))
def check_tasks(self):
#
# First Task
#
self.assertEqual(self.data[0][u'tasks'][0].ansible_pos, (self.play_filename, 16, 23))
self.assertEqual(self.data[0][u'tasks'][0][u'name'].ansible_pos, (self.play_filename, 16, 29))
self.assertEqual(self.data[0][u'tasks'][0][u'ping'].ansible_pos, (self.play_filename, 18, 25))
self.assertEqual(self.data[0][u'tasks'][0][u'ping'][u'data'].ansible_pos, (self.play_filename, 18, 31))
#
# Second Task
#
self.assertEqual(self.data[0][u'tasks'][1].ansible_pos, (self.play_filename, 20, 23))
self.assertEqual(self.data[0][u'tasks'][1][u'name'].ansible_pos, (self.play_filename, 20, 29))
self.assertEqual(self.data[0][u'tasks'][1][u'ping'].ansible_pos, (self.play_filename, 22, 25))
self.assertEqual(self.data[0][u'tasks'][1][u'ping'][u'data'].ansible_pos, (self.play_filename, 22, 31))
#
# Third Task
#
self.assertEqual(self.data[0][u'tasks'][2].ansible_pos, (self.play_filename, 24, 23))
self.assertEqual(self.data[0][u'tasks'][2][u'name'].ansible_pos, (self.play_filename, 24, 29))
self.assertEqual(self.data[0][u'tasks'][2][u'command'].ansible_pos, (self.play_filename, 25, 32))
def test_line_numbers(self):
# Check the line/column numbers are correct
# Note: Remember, currently dicts begin at the start of their first entry
self.assertEqual(self.data[0].ansible_pos, (self.play_filename, 2, 19))
self.assertEqual(self.data[0][u'hosts'].ansible_pos, (self.play_filename, 2, 26))
self.assertEqual(self.data[0][u'vars'].ansible_pos, (self.play_filename, 4, 21))
self.check_vars()
self.assertEqual(self.data[0][u'tasks'].ansible_pos, (self.play_filename, 16, 21))
self.check_tasks()
| 16,994
|
Python
|
.py
| 335
| 40.328358
| 144
| 0.609523
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,741
|
test_vault.py
|
ansible_ansible/test/units/parsing/vault/test_vault.py
|
# -*- coding: utf-8 -*-
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import io
import os
import tempfile
from binascii import hexlify
import pytest
import unittest
from unittest.mock import patch, MagicMock
from ansible import errors
from ansible.module_utils.common.text.converters import to_bytes, to_text
from ansible.parsing import vault
from units.mock.loader import DictDataLoader
from units.mock.vault_helper import TextVaultSecret
class TestUnhexlify(unittest.TestCase):
def test(self):
b_plain_data = b'some text to hexlify'
b_data = hexlify(b_plain_data)
res = vault._unhexlify(b_data)
self.assertEqual(res, b_plain_data)
def test_odd_length(self):
b_data = b'123456789abcdefghijklmnopqrstuvwxyz'
self.assertRaisesRegex(vault.AnsibleVaultFormatError,
'.*Vault format unhexlify error.*',
vault._unhexlify,
b_data)
def test_nonhex(self):
b_data = b'6z36316566653264333665333637623064303639353237620a636366633565663263336335656532'
self.assertRaisesRegex(vault.AnsibleVaultFormatError,
'.*Vault format unhexlify error.*Non-hexadecimal digit found',
vault._unhexlify,
b_data)
class TestParseVaulttext(unittest.TestCase):
def test(self):
vaulttext_envelope = u"""$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138"""
b_vaulttext_envelope = to_bytes(vaulttext_envelope, errors='strict', encoding='utf-8')
b_vaulttext, b_version, cipher_name, vault_id = vault.parse_vaulttext_envelope(b_vaulttext_envelope)
res = vault.parse_vaulttext(b_vaulttext)
self.assertIsInstance(res[0], bytes)
self.assertIsInstance(res[1], bytes)
self.assertIsInstance(res[2], bytes)
def test_non_hex(self):
vaulttext_envelope = u"""$ANSIBLE_VAULT;1.1;AES256
3336396J326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138"""
b_vaulttext_envelope = to_bytes(vaulttext_envelope, errors='strict', encoding='utf-8')
b_vaulttext, b_version, cipher_name, vault_id = vault.parse_vaulttext_envelope(b_vaulttext_envelope)
self.assertRaisesRegex(vault.AnsibleVaultFormatError,
'.*Vault format unhexlify error.*Non-hexadecimal digit found',
vault.parse_vaulttext,
b_vaulttext_envelope)
class TestVaultSecret(unittest.TestCase):
def test(self):
secret = vault.VaultSecret()
secret.load()
self.assertIsNone(secret._bytes)
def test_bytes(self):
some_text = u'私はガラスを食べられます。それは私を傷つけません。'
_bytes = to_bytes(some_text)
secret = vault.VaultSecret(_bytes)
secret.load()
self.assertEqual(secret.bytes, _bytes)
class TestPromptVaultSecret(unittest.TestCase):
def test_empty_prompt_formats(self):
secret = vault.PromptVaultSecret(vault_id='test_id', prompt_formats=[])
secret.load()
self.assertIsNone(secret._bytes)
@patch('ansible.parsing.vault.display.prompt', return_value='the_password')
def test_prompt_formats_none(self, mock_display_prompt):
secret = vault.PromptVaultSecret(vault_id='test_id')
secret.load()
self.assertEqual(secret._bytes, b'the_password')
@patch('ansible.parsing.vault.display.prompt', return_value='the_password')
def test_custom_prompt(self, mock_display_prompt):
secret = vault.PromptVaultSecret(vault_id='test_id',
prompt_formats=['The cow flies at midnight: '])
secret.load()
self.assertEqual(secret._bytes, b'the_password')
@patch('ansible.parsing.vault.display.prompt', side_effect=EOFError)
def test_prompt_eoferror(self, mock_display_prompt):
secret = vault.PromptVaultSecret(vault_id='test_id')
self.assertRaisesRegex(vault.AnsibleVaultError,
'EOFError.*test_id',
secret.load)
@patch('ansible.parsing.vault.display.prompt', side_effect=['first_password', 'second_password'])
def test_prompt_passwords_dont_match(self, mock_display_prompt):
secret = vault.PromptVaultSecret(vault_id='test_id',
prompt_formats=['Vault password: ',
'Confirm Vault password: '])
self.assertRaisesRegex(errors.AnsibleError,
'Passwords do not match',
secret.load)
class TestFileVaultSecret(unittest.TestCase):
def setUp(self):
self.vault_password = "test-vault-password"
text_secret = TextVaultSecret(self.vault_password)
self.vault_secrets = [('foo', text_secret)]
def test(self):
secret = vault.FileVaultSecret()
self.assertIsNone(secret._bytes)
self.assertIsNone(secret._text)
def test_repr_empty(self):
secret = vault.FileVaultSecret()
self.assertEqual(repr(secret), "FileVaultSecret()")
def test_repr(self):
tmp_file = tempfile.NamedTemporaryFile(delete=False)
fake_loader = DictDataLoader({tmp_file.name: 'sdfadf'})
secret = vault.FileVaultSecret(loader=fake_loader, filename=tmp_file.name)
filename = tmp_file.name
tmp_file.close()
self.assertEqual(repr(secret), "FileVaultSecret(filename='%s')" % filename)
def test_empty_bytes(self):
secret = vault.FileVaultSecret()
self.assertIsNone(secret.bytes)
def test_file(self):
password = 'some password'
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file.write(to_bytes(password))
tmp_file.close()
fake_loader = DictDataLoader({tmp_file.name: 'sdfadf'})
secret = vault.FileVaultSecret(loader=fake_loader, filename=tmp_file.name)
secret.load()
os.unlink(tmp_file.name)
self.assertEqual(secret.bytes, to_bytes(password))
def test_file_empty(self):
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file.write(to_bytes(''))
tmp_file.close()
fake_loader = DictDataLoader({tmp_file.name: ''})
secret = vault.FileVaultSecret(loader=fake_loader, filename=tmp_file.name)
self.assertRaisesRegex(vault.AnsibleVaultPasswordError,
'Invalid vault password was provided from file.*%s' % tmp_file.name,
secret.load)
os.unlink(tmp_file.name)
def test_file_encrypted(self):
vault_password = "test-vault-password"
text_secret = TextVaultSecret(vault_password)
vault_secrets = [('foo', text_secret)]
password = 'some password'
# 'some password' encrypted with 'test-ansible-password'
password_file_content = """$ANSIBLE_VAULT;1.1;AES256
61393863643638653437313566313632306462383837303132346434616433313438353634613762
3334363431623364386164616163326537366333353663650a663634306232363432626162353665
39623061353266373631636331643761306665343731376633623439313138396330346237653930
6432643864346136640a653364386634666461306231353765636662316335613235383565306437
3737
"""
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file.write(to_bytes(password_file_content))
tmp_file.close()
fake_loader = DictDataLoader({tmp_file.name: 'sdfadf'})
fake_loader._vault.secrets = vault_secrets
secret = vault.FileVaultSecret(loader=fake_loader, filename=tmp_file.name)
secret.load()
os.unlink(tmp_file.name)
self.assertEqual(secret.bytes, to_bytes(password))
def test_file_not_a_directory(self):
filename = '/dev/null/foobar'
fake_loader = DictDataLoader({filename: 'sdfadf'})
secret = vault.FileVaultSecret(loader=fake_loader, filename=filename)
self.assertRaisesRegex(errors.AnsibleError,
'.*Could not read vault password file.*/dev/null/foobar.*Not a directory',
secret.load)
def test_file_not_found(self):
tmp_file = tempfile.NamedTemporaryFile()
filename = os.path.realpath(tmp_file.name)
tmp_file.close()
fake_loader = DictDataLoader({filename: 'sdfadf'})
secret = vault.FileVaultSecret(loader=fake_loader, filename=filename)
self.assertRaisesRegex(errors.AnsibleError,
'.*Could not read vault password file.*%s.*' % filename,
secret.load)
class TestScriptVaultSecret(unittest.TestCase):
def test(self):
secret = vault.ScriptVaultSecret()
self.assertIsNone(secret._bytes)
self.assertIsNone(secret._text)
def _mock_popen(self, mock_popen, return_code=0, stdout=b'', stderr=b''):
def communicate():
return stdout, stderr
mock_popen.return_value = MagicMock(returncode=return_code)
mock_popen_instance = mock_popen.return_value
mock_popen_instance.communicate = communicate
@patch('ansible.parsing.vault.subprocess.Popen')
def test_read_file(self, mock_popen):
self._mock_popen(mock_popen, stdout=b'some_password')
secret = vault.ScriptVaultSecret()
with patch.object(secret, 'loader') as mock_loader:
mock_loader.is_executable = MagicMock(return_value=True)
secret.load()
@patch('ansible.parsing.vault.subprocess.Popen')
def test_read_file_empty(self, mock_popen):
self._mock_popen(mock_popen, stdout=b'')
secret = vault.ScriptVaultSecret()
with patch.object(secret, 'loader') as mock_loader:
mock_loader.is_executable = MagicMock(return_value=True)
self.assertRaisesRegex(vault.AnsibleVaultPasswordError,
'Invalid vault password was provided from script',
secret.load)
@patch('ansible.parsing.vault.subprocess.Popen')
def test_read_file_os_error(self, mock_popen):
self._mock_popen(mock_popen)
mock_popen.side_effect = OSError('That is not an executable')
secret = vault.ScriptVaultSecret()
with patch.object(secret, 'loader') as mock_loader:
mock_loader.is_executable = MagicMock(return_value=True)
self.assertRaisesRegex(errors.AnsibleError,
'Problem running vault password script.*',
secret.load)
@patch('ansible.parsing.vault.subprocess.Popen')
def test_read_file_not_executable(self, mock_popen):
self._mock_popen(mock_popen)
secret = vault.ScriptVaultSecret()
with patch.object(secret, 'loader') as mock_loader:
mock_loader.is_executable = MagicMock(return_value=False)
self.assertRaisesRegex(vault.AnsibleVaultError,
'The vault password script .* was not executable',
secret.load)
@patch('ansible.parsing.vault.subprocess.Popen')
def test_read_file_non_zero_return_code(self, mock_popen):
stderr = b'That did not work for a random reason'
rc = 37
self._mock_popen(mock_popen, return_code=rc, stderr=stderr)
secret = vault.ScriptVaultSecret(filename='/dev/null/some_vault_secret')
with patch.object(secret, 'loader') as mock_loader:
mock_loader.is_executable = MagicMock(return_value=True)
self.assertRaisesRegex(errors.AnsibleError,
r'Vault password script.*returned non-zero \(%s\): %s' % (rc, stderr),
secret.load)
class TestScriptIsClient(unittest.TestCase):
def test_randomname(self):
filename = 'randomname'
res = vault.script_is_client(filename)
self.assertFalse(res)
def test_something_dash_client(self):
filename = 'something-client'
res = vault.script_is_client(filename)
self.assertTrue(res)
def test_something_dash_client_somethingelse(self):
filename = 'something-client-somethingelse'
res = vault.script_is_client(filename)
self.assertFalse(res)
def test_something_dash_client_py(self):
filename = 'something-client.py'
res = vault.script_is_client(filename)
self.assertTrue(res)
def test_full_path_something_dash_client_py(self):
filename = '/foo/bar/something-client.py'
res = vault.script_is_client(filename)
self.assertTrue(res)
def test_full_path_something_dash_client(self):
filename = '/foo/bar/something-client'
res = vault.script_is_client(filename)
self.assertTrue(res)
def test_full_path_something_dash_client_in_dir(self):
filename = '/foo/bar/something-client/but/not/filename'
res = vault.script_is_client(filename)
self.assertFalse(res)
class TestGetFileVaultSecret(unittest.TestCase):
def test_file(self):
password = 'some password'
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file.write(to_bytes(password))
tmp_file.close()
fake_loader = DictDataLoader({tmp_file.name: 'sdfadf'})
secret = vault.get_file_vault_secret(filename=tmp_file.name, loader=fake_loader)
secret.load()
os.unlink(tmp_file.name)
self.assertEqual(secret.bytes, to_bytes(password))
def test_file_not_a_directory(self):
filename = '/dev/null/foobar'
fake_loader = DictDataLoader({filename: 'sdfadf'})
self.assertRaisesRegex(errors.AnsibleError,
'.*The vault password file %s was not found.*' % filename,
vault.get_file_vault_secret,
filename=filename,
loader=fake_loader)
def test_file_not_found(self):
tmp_file = tempfile.NamedTemporaryFile()
filename = os.path.realpath(tmp_file.name)
tmp_file.close()
fake_loader = DictDataLoader({filename: 'sdfadf'})
self.assertRaisesRegex(errors.AnsibleError,
'.*The vault password file %s was not found.*' % filename,
vault.get_file_vault_secret,
filename=filename,
loader=fake_loader)
class TestVaultIsEncrypted(unittest.TestCase):
def test_bytes_not_encrypted(self):
b_data = b"foobar"
self.assertFalse(vault.is_encrypted(b_data))
def test_bytes_encrypted(self):
b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
self.assertTrue(vault.is_encrypted(b_data))
def test_text_not_encrypted(self):
b_data = to_text(b"foobar")
self.assertFalse(vault.is_encrypted(b_data))
def test_text_encrypted(self):
b_data = to_text(b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible"))
self.assertTrue(vault.is_encrypted(b_data))
def test_invalid_text_not_ascii(self):
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % u"ァ ア ィ イ ゥ ウ ェ エ ォ オ カ ガ キ ギ ク グ ケ "
self.assertFalse(vault.is_encrypted(data))
def test_invalid_bytes_not_ascii(self):
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % u"ァ ア ィ イ ゥ ウ ェ エ ォ オ カ ガ キ ギ ク グ ケ "
b_data = to_bytes(data, encoding='utf-8')
self.assertFalse(vault.is_encrypted(b_data))
class TestVaultIsEncryptedFile(unittest.TestCase):
def test_binary_file_handle_not_encrypted(self):
b_data = b"foobar"
b_data_fo = io.BytesIO(b_data)
self.assertFalse(vault.is_encrypted_file(b_data_fo))
def test_text_file_handle_not_encrypted(self):
data = u"foobar"
data_fo = io.StringIO(data)
self.assertFalse(vault.is_encrypted_file(data_fo))
def test_binary_file_handle_encrypted(self):
b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
b_data_fo = io.BytesIO(b_data)
self.assertTrue(vault.is_encrypted_file(b_data_fo))
def test_text_file_handle_encrypted(self):
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % to_text(hexlify(b"ansible"))
data_fo = io.StringIO(data)
self.assertTrue(vault.is_encrypted_file(data_fo))
def test_binary_file_handle_invalid(self):
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % u"ァ ア ィ イ ゥ ウ ェ エ ォ オ カ ガ キ ギ ク グ ケ "
b_data = to_bytes(data)
b_data_fo = io.BytesIO(b_data)
self.assertFalse(vault.is_encrypted_file(b_data_fo, count=-1))
def test_text_file_handle_invalid(self):
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % u"ァ ア ィ イ ゥ ウ ェ エ ォ オ カ ガ キ ギ ク グ ケ "
data_fo = io.StringIO(data)
self.assertFalse(vault.is_encrypted_file(data_fo, count=-1))
def test_file_already_read_from_finds_header(self):
b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible\ntesting\nfile pos")
b_data_fo = io.BytesIO(b_data)
b_data_fo.read(42) # Arbitrary number
self.assertTrue(vault.is_encrypted_file(b_data_fo))
def test_file_already_read_from_saves_file_pos(self):
b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible\ntesting\nfile pos")
b_data_fo = io.BytesIO(b_data)
b_data_fo.read(69) # Arbitrary number
vault.is_encrypted_file(b_data_fo)
self.assertEqual(b_data_fo.tell(), 69)
def test_file_with_offset(self):
b_data = b"JUNK$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible\ntesting\nfile pos")
b_data_fo = io.BytesIO(b_data)
self.assertTrue(vault.is_encrypted_file(b_data_fo, start_pos=4))
def test_file_with_count(self):
b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible\ntesting\nfile pos")
vault_length = len(b_data)
b_data = b_data + u'ァ ア'.encode('utf-8')
b_data_fo = io.BytesIO(b_data)
self.assertTrue(vault.is_encrypted_file(b_data_fo, count=vault_length))
def test_file_with_offset_and_count(self):
b_data = b"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible\ntesting\nfile pos")
vault_length = len(b_data)
b_data = b'JUNK' + b_data + u'ァ ア'.encode('utf-8')
b_data_fo = io.BytesIO(b_data)
self.assertTrue(vault.is_encrypted_file(b_data_fo, start_pos=4, count=vault_length))
@pytest.mark.skipif(not vault.HAS_CRYPTOGRAPHY,
reason="Skipping cryptography tests because cryptography is not installed")
class TestVaultCipherAes256(unittest.TestCase):
def setUp(self):
self.vault_cipher = vault.VaultAES256()
def test(self):
self.assertIsInstance(self.vault_cipher, vault.VaultAES256)
# TODO: tag these as slow tests
def test_create_key_cryptography(self):
b_password = b'hunter42'
b_salt = os.urandom(32)
b_key_cryptography = self.vault_cipher._create_key_cryptography(b_password, b_salt, key_length=32, iv_length=16)
self.assertIsInstance(b_key_cryptography, bytes)
def test_create_key_known_cryptography(self):
b_password = b'hunter42'
# A fixed salt
b_salt = b'q' * 32 # q is the most random letter.
b_key_1 = self.vault_cipher._create_key_cryptography(b_password, b_salt, key_length=32, iv_length=16)
self.assertIsInstance(b_key_1, bytes)
# verify we get the same answer
# we could potentially run a few iterations of this and time it to see if it's roughly constant time
# and or that it exceeds some minimal time, but that would likely cause unreliable fails, esp in CI
b_key_2 = self.vault_cipher._create_key_cryptography(b_password, b_salt, key_length=32, iv_length=16)
self.assertIsInstance(b_key_2, bytes)
self.assertEqual(b_key_1, b_key_2)
def test_is_equal_is_equal(self):
self.assertTrue(self.vault_cipher._is_equal(b'abcdefghijklmnopqrstuvwxyz', b'abcdefghijklmnopqrstuvwxyz'))
def test_is_equal_unequal_length(self):
self.assertFalse(self.vault_cipher._is_equal(b'abcdefghijklmnopqrstuvwxyz', b'abcdefghijklmnopqrstuvwx and sometimes y'))
def test_is_equal_not_equal(self):
self.assertFalse(self.vault_cipher._is_equal(b'abcdefghijklmnopqrstuvwxyz', b'AbcdefghijKlmnopQrstuvwxZ'))
def test_is_equal_empty(self):
self.assertTrue(self.vault_cipher._is_equal(b'', b''))
def test_is_equal_non_ascii_equal(self):
utf8_data = to_bytes(u'私はガラスを食べられます。それは私を傷つけません。')
self.assertTrue(self.vault_cipher._is_equal(utf8_data, utf8_data))
def test_is_equal_non_ascii_unequal(self):
utf8_data = to_bytes(u'私はガラスを食べられます。それは私を傷つけません。')
utf8_data2 = to_bytes(u'Pot să mănânc sticlă și ea nu mă rănește.')
# Test for the len optimization path
self.assertFalse(self.vault_cipher._is_equal(utf8_data, utf8_data2))
# Test for the slower, char by char comparison path
self.assertFalse(self.vault_cipher._is_equal(utf8_data, utf8_data[:-1] + b'P'))
def test_is_equal_non_bytes(self):
""" Anything not a byte string should raise a TypeError """
self.assertRaises(TypeError, self.vault_cipher._is_equal, u"One fish", b"two fish")
self.assertRaises(TypeError, self.vault_cipher._is_equal, b"One fish", u"two fish")
self.assertRaises(TypeError, self.vault_cipher._is_equal, 1, b"red fish")
self.assertRaises(TypeError, self.vault_cipher._is_equal, b"blue fish", 2)
class TestMatchSecrets(unittest.TestCase):
def test_empty_tuple(self):
secrets = [tuple()]
vault_ids = ['vault_id_1']
self.assertRaises(ValueError,
vault.match_secrets,
secrets, vault_ids)
def test_empty_secrets(self):
matches = vault.match_secrets([], ['vault_id_1'])
self.assertEqual(matches, [])
def test_single_match(self):
secret = TextVaultSecret('password')
matches = vault.match_secrets([('default', secret)], ['default'])
self.assertEqual(matches, [('default', secret)])
def test_no_matches(self):
secret = TextVaultSecret('password')
matches = vault.match_secrets([('default', secret)], ['not_default'])
self.assertEqual(matches, [])
def test_multiple_matches(self):
secrets = [('vault_id1', TextVaultSecret('password1')),
('vault_id2', TextVaultSecret('password2')),
('vault_id1', TextVaultSecret('password3')),
('vault_id4', TextVaultSecret('password4'))]
vault_ids = ['vault_id1', 'vault_id4']
matches = vault.match_secrets(secrets, vault_ids)
self.assertEqual(len(matches), 3)
expected = [('vault_id1', TextVaultSecret('password1')),
('vault_id1', TextVaultSecret('password3')),
('vault_id4', TextVaultSecret('password4'))]
self.assertEqual([x for x, y in matches],
[a for a, b in expected])
@pytest.mark.skipif(not vault.HAS_CRYPTOGRAPHY,
reason="Skipping cryptography tests because cryptography is not installed")
class TestVaultLib(unittest.TestCase):
def setUp(self):
self.vault_password = "test-vault-password"
text_secret = TextVaultSecret(self.vault_password)
self.vault_secrets = [('default', text_secret),
('test_id', text_secret)]
self.v = vault.VaultLib(self.vault_secrets)
def _vault_secrets_from_password(self, vault_id, password):
return [(vault_id, TextVaultSecret(password))]
def test_encrypt(self):
plaintext = u'Some text to encrypt in a café'
b_vaulttext = self.v.encrypt(plaintext)
self.assertIsInstance(b_vaulttext, bytes)
b_header = b'$ANSIBLE_VAULT;1.1;AES256\n'
self.assertEqual(b_vaulttext[:len(b_header)], b_header)
def test_encrypt_vault_id(self):
plaintext = u'Some text to encrypt in a café'
b_vaulttext = self.v.encrypt(plaintext, vault_id='test_id')
self.assertIsInstance(b_vaulttext, bytes)
b_header = b'$ANSIBLE_VAULT;1.2;AES256;test_id\n'
self.assertEqual(b_vaulttext[:len(b_header)], b_header)
def test_encrypt_bytes(self):
plaintext = to_bytes(u'Some text to encrypt in a café')
b_vaulttext = self.v.encrypt(plaintext)
self.assertIsInstance(b_vaulttext, bytes)
b_header = b'$ANSIBLE_VAULT;1.1;AES256\n'
self.assertEqual(b_vaulttext[:len(b_header)], b_header)
def test_encrypt_no_secret_empty_secrets(self):
vault_secrets = []
v = vault.VaultLib(vault_secrets)
plaintext = u'Some text to encrypt in a café'
self.assertRaisesRegex(vault.AnsibleVaultError,
'.*A vault password must be specified to encrypt data.*',
v.encrypt,
plaintext)
def test_format_vaulttext_envelope(self):
cipher_name = "TEST"
b_ciphertext = b"ansible"
b_vaulttext = vault.format_vaulttext_envelope(b_ciphertext,
cipher_name,
version=self.v.b_version,
vault_id='default')
b_lines = b_vaulttext.split(b'\n')
self.assertGreater(len(b_lines), 1, msg="failed to properly add header")
b_header = b_lines[0]
# self.assertTrue(b_header.endswith(b';TEST'), msg="header does not end with cipher name")
b_header_parts = b_header.split(b';')
self.assertEqual(len(b_header_parts), 4, msg="header has the wrong number of parts")
self.assertEqual(b_header_parts[0], b'$ANSIBLE_VAULT', msg="header does not start with $ANSIBLE_VAULT")
self.assertEqual(b_header_parts[1], self.v.b_version, msg="header version is incorrect")
self.assertEqual(b_header_parts[2], b'TEST', msg="header does not end with cipher name")
# And just to verify, lets parse the results and compare
b_ciphertext2, b_version2, cipher_name2, vault_id2 = \
vault.parse_vaulttext_envelope(b_vaulttext)
self.assertEqual(b_ciphertext, b_ciphertext2)
self.assertEqual(self.v.b_version, b_version2)
self.assertEqual(cipher_name, cipher_name2)
self.assertEqual('default', vault_id2)
def test_parse_vaulttext_envelope(self):
b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\nansible"
b_ciphertext, b_version, cipher_name, vault_id = vault.parse_vaulttext_envelope(b_vaulttext)
b_lines = b_ciphertext.split(b'\n')
self.assertEqual(b_lines[0], b"ansible", msg="Payload was not properly split from the header")
self.assertEqual(cipher_name, u'TEST', msg="cipher name was not properly set")
self.assertEqual(b_version, b"9.9", msg="version was not properly set")
def test_parse_vaulttext_envelope_crlf(self):
b_vaulttext = b"$ANSIBLE_VAULT;9.9;TEST\r\nansible"
b_ciphertext, b_version, cipher_name, vault_id = vault.parse_vaulttext_envelope(b_vaulttext)
b_lines = b_ciphertext.split(b'\n')
self.assertEqual(b_lines[0], b"ansible", msg="Payload was not properly split from the header")
self.assertEqual(cipher_name, u'TEST', msg="cipher name was not properly set")
self.assertEqual(b_version, b"9.9", msg="version was not properly set")
def test_encrypt_decrypt_aes256(self):
self.v.cipher_name = u'AES256'
plaintext = u"foobar"
b_vaulttext = self.v.encrypt(plaintext)
b_plaintext = self.v.decrypt(b_vaulttext)
self.assertNotEqual(b_vaulttext, b"foobar", msg="encryption failed")
self.assertEqual(b_plaintext, b"foobar", msg="decryption failed")
def test_encrypt_decrypt_aes256_none_secrets(self):
vault_secrets = self._vault_secrets_from_password('default', 'ansible')
v = vault.VaultLib(vault_secrets)
plaintext = u"foobar"
b_vaulttext = v.encrypt(plaintext)
# VaultLib will default to empty {} if secrets is None
v_none = vault.VaultLib(None)
# so set secrets None explicitly
v_none.secrets = None
self.assertRaisesRegex(vault.AnsibleVaultError,
'.*A vault password must be specified to decrypt data.*',
v_none.decrypt,
b_vaulttext)
def test_encrypt_decrypt_aes256_empty_secrets(self):
vault_secrets = self._vault_secrets_from_password('default', 'ansible')
v = vault.VaultLib(vault_secrets)
plaintext = u"foobar"
b_vaulttext = v.encrypt(plaintext)
vault_secrets_empty = []
v_none = vault.VaultLib(vault_secrets_empty)
self.assertRaisesRegex(vault.AnsibleVaultError,
'.*Attempting to decrypt but no vault secrets found.*',
v_none.decrypt,
b_vaulttext)
def test_encrypt_decrypt_aes256_multiple_secrets_all_wrong(self):
plaintext = u'Some text to encrypt in a café'
b_vaulttext = self.v.encrypt(plaintext)
vault_secrets = [('default', TextVaultSecret('another-wrong-password')),
('wrong-password', TextVaultSecret('wrong-password'))]
v_multi = vault.VaultLib(vault_secrets)
self.assertRaisesRegex(errors.AnsibleError,
'.*Decryption failed.*',
v_multi.decrypt,
b_vaulttext,
filename='/dev/null/fake/filename')
def test_encrypt_decrypt_aes256_multiple_secrets_one_valid(self):
plaintext = u'Some text to encrypt in a café'
b_vaulttext = self.v.encrypt(plaintext)
correct_secret = TextVaultSecret(self.vault_password)
wrong_secret = TextVaultSecret('wrong-password')
vault_secrets = [('default', wrong_secret),
('corect_secret', correct_secret),
('wrong_secret', wrong_secret)]
v_multi = vault.VaultLib(vault_secrets)
b_plaintext = v_multi.decrypt(b_vaulttext)
self.assertNotEqual(b_vaulttext, to_bytes(plaintext), msg="encryption failed")
self.assertEqual(b_plaintext, to_bytes(plaintext), msg="decryption failed")
def test_encrypt_decrypt_aes256_existing_vault(self):
self.v.cipher_name = u'AES256'
b_orig_plaintext = b"Setec Astronomy"
vaulttext = u"""$ANSIBLE_VAULT;1.1;AES256
33363965326261303234626463623963633531343539616138316433353830356566396130353436
3562643163366231316662386565383735653432386435610a306664636137376132643732393835
63383038383730306639353234326630666539346233376330303938323639306661313032396437
6233623062366136310a633866373936313238333730653739323461656662303864663666653563
3138"""
b_plaintext = self.v.decrypt(vaulttext)
self.assertEqual(b_plaintext, b_plaintext, msg="decryption failed")
b_vaulttext = to_bytes(vaulttext, encoding='ascii', errors='strict')
b_plaintext = self.v.decrypt(b_vaulttext)
self.assertEqual(b_plaintext, b_orig_plaintext, msg="decryption failed")
def test_decrypt_and_get_vault_id(self):
b_expected_plaintext = to_bytes('foo bar\n')
vaulttext = """$ANSIBLE_VAULT;1.2;AES256;ansible_devel
65616435333934613466373335363332373764363365633035303466643439313864663837393234
3330656363343637313962633731333237313636633534630a386264363438363362326132363239
39363166646664346264383934393935653933316263333838386362633534326664646166663736
6462303664383765650a356637643633366663643566353036303162386237336233393065393164
6264"""
vault_secrets = self._vault_secrets_from_password('ansible_devel', 'ansible')
v = vault.VaultLib(vault_secrets)
b_vaulttext = to_bytes(vaulttext)
b_plaintext, vault_id_used, vault_secret_used = v.decrypt_and_get_vault_id(b_vaulttext)
self.assertEqual(b_expected_plaintext, b_plaintext)
self.assertEqual(vault_id_used, 'ansible_devel')
self.assertEqual(vault_secret_used.text, 'ansible')
def test_decrypt_non_default_1_2(self):
b_expected_plaintext = to_bytes('foo bar\n')
vaulttext = """$ANSIBLE_VAULT;1.2;AES256;ansible_devel
65616435333934613466373335363332373764363365633035303466643439313864663837393234
3330656363343637313962633731333237313636633534630a386264363438363362326132363239
39363166646664346264383934393935653933316263333838386362633534326664646166663736
6462303664383765650a356637643633366663643566353036303162386237336233393065393164
6264"""
vault_secrets = self._vault_secrets_from_password('default', 'ansible')
v = vault.VaultLib(vault_secrets)
b_vaulttext = to_bytes(vaulttext)
b_plaintext = v.decrypt(b_vaulttext)
self.assertEqual(b_expected_plaintext, b_plaintext)
b_ciphertext, b_version, cipher_name, vault_id = vault.parse_vaulttext_envelope(b_vaulttext)
self.assertEqual('ansible_devel', vault_id)
self.assertEqual(b'1.2', b_version)
def test_decrypt_decrypted(self):
plaintext = u"ansible"
self.assertRaises(errors.AnsibleError, self.v.decrypt, plaintext)
b_plaintext = b"ansible"
self.assertRaises(errors.AnsibleError, self.v.decrypt, b_plaintext)
def test_cipher_not_set(self):
plaintext = u"ansible"
self.v.encrypt(plaintext)
self.assertEqual(self.v.cipher_name, "AES256")
| 35,508
|
Python
|
.py
| 647
| 44.163833
| 129
| 0.662448
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,742
|
test_vault_editor.py
|
ansible_ansible/test/units/parsing/vault/test_vault_editor.py
|
# (c) 2014, James Tanner <tanner.jc@gmail.com>
# (c) 2014, James Cammarata, <jcammarata@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
import tempfile
from io import BytesIO, StringIO
import pytest
import unittest
from unittest.mock import patch
from ansible import errors
from ansible.parsing import vault
from ansible.parsing.vault import VaultLib, VaultEditor, match_encrypt_secret
from ansible.module_utils.common.text.converters import to_bytes, to_text
from units.mock.vault_helper import TextVaultSecret
v11_data = """$ANSIBLE_VAULT;1.1;AES256
62303130653266653331306264616235333735323636616539316433666463323964623162386137
3961616263373033353631316333623566303532663065310a393036623466376263393961326530
64336561613965383835646464623865663966323464653236343638373165343863623638316664
3631633031323837340a396530313963373030343933616133393566366137363761373930663833
3739"""
@pytest.mark.skipif(not vault.HAS_CRYPTOGRAPHY,
reason="Skipping cryptography tests because cryptography is not installed")
class TestVaultEditor(unittest.TestCase):
def setUp(self):
self._test_dir = None
self.vault_password = "test-vault-password"
vault_secret = TextVaultSecret(self.vault_password)
self.vault_secrets = [('vault_secret', vault_secret),
('default', vault_secret)]
@property
def vault_secret(self):
return match_encrypt_secret(self.vault_secrets)[1]
def tearDown(self):
if self._test_dir:
pass
# shutil.rmtree(self._test_dir)
self._test_dir = None
def _secrets(self, password):
vault_secret = TextVaultSecret(password)
vault_secrets = [('default', vault_secret)]
return vault_secrets
def test_methods_exist(self):
v = vault.VaultEditor(None)
slots = ['create_file',
'decrypt_file',
'edit_file',
'encrypt_file',
'rekey_file',
'read_data',
'write_data']
for slot in slots:
assert hasattr(v, slot), "VaultLib is missing the %s method" % slot
def _create_test_dir(self):
suffix = '_ansible_unit_test_%s_' % (self.__class__.__name__)
return tempfile.mkdtemp(suffix=suffix)
def _create_file(self, test_dir, name, content, symlink=False):
file_path = os.path.join(test_dir, name)
with open(file_path, 'wb') as opened_file:
opened_file.write(content)
return file_path
def _vault_editor(self, vault_secrets=None):
if vault_secrets is None:
vault_secrets = self._secrets(self.vault_password)
return VaultEditor(VaultLib(vault_secrets))
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_helper_empty_target(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
mock_sp_call.side_effect = self._faux_command
ve = self._vault_editor()
b_ciphertext = ve._edit_file_helper(src_file_path, self.vault_secret)
self.assertNotEqual(src_contents, b_ciphertext)
def test_stdin_binary(self):
stdin_data = '\0'
fake_stream = StringIO(stdin_data)
fake_stream.buffer = BytesIO(to_bytes(stdin_data))
with patch('sys.stdin', fake_stream):
ve = self._vault_editor()
data = ve.read_data('-')
self.assertEqual(data, b'\0')
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_helper_call_exception(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
error_txt = 'calling editor raised an exception'
mock_sp_call.side_effect = errors.AnsibleError(error_txt)
ve = self._vault_editor()
self.assertRaisesRegex(errors.AnsibleError,
error_txt,
ve._edit_file_helper,
src_file_path,
self.vault_secret)
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_helper_symlink_target(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
src_file_link_path = os.path.join(self._test_dir, 'a_link_to_dest_file')
os.symlink(src_file_path, src_file_link_path)
mock_sp_call.side_effect = self._faux_command
ve = self._vault_editor()
b_ciphertext = ve._edit_file_helper(src_file_link_path, self.vault_secret)
self.assertNotEqual(src_file_contents, b_ciphertext,
'b_ciphertext should be encrypted and not equal to src_contents')
def _faux_editor(self, editor_args, new_src_contents):
if editor_args[0] == 'shred':
return
tmp_path = editor_args[-1]
# simulate the tmp file being editted
with open(tmp_path, 'wb') as tmp_file:
tmp_file.write(new_src_contents)
def _faux_command(self, tmp_path):
pass
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_helper_no_change(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
# editor invocation doesn't change anything
def faux_editor(editor_args):
self._faux_editor(editor_args, src_file_contents)
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
ve._edit_file_helper(src_file_path, self.vault_secret, existing_data=src_file_contents)
with open(src_file_path, 'rb') as new_target_file:
new_target_file_contents = new_target_file.read()
self.assertEqual(src_file_contents, new_target_file_contents)
def _assert_file_is_encrypted(self, vault_editor, src_file_path, src_contents):
with open(src_file_path, 'rb') as new_src_file:
new_src_file_contents = new_src_file.read()
# TODO: assert that it is encrypted
self.assertTrue(vault.is_encrypted(new_src_file_contents))
src_file_plaintext = vault_editor.vault.decrypt(new_src_file_contents)
# the plaintext should not be encrypted
self.assertFalse(vault.is_encrypted(src_file_plaintext))
# and the new plaintext should match the original
self.assertEqual(src_file_plaintext, src_contents)
def _assert_file_is_link(self, src_file_link_path, src_file_path):
self.assertTrue(os.path.islink(src_file_link_path),
'The dest path (%s) should be a symlink to (%s) but is not' % (src_file_link_path, src_file_path))
def test_rekey_file(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
# FIXME: update to just set self._secrets or just a new vault secret id
new_password = 'password2:electricbugaloo'
new_vault_secret = TextVaultSecret(new_password)
new_vault_secrets = [('default', new_vault_secret)]
ve.rekey_file(src_file_path, vault.match_encrypt_secret(new_vault_secrets)[1])
# FIXME: can just update self._secrets here
new_ve = vault.VaultEditor(VaultLib(new_vault_secrets))
self._assert_file_is_encrypted(new_ve, src_file_path, src_file_contents)
def test_rekey_file_no_new_password(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
self.assertRaisesRegex(errors.AnsibleError,
'The value for the new_password to rekey',
ve.rekey_file,
src_file_path,
None)
def test_rekey_file_not_encrypted(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
new_password = 'password2:electricbugaloo'
self.assertRaisesRegex(errors.AnsibleError,
'input is not vault encrypted data',
ve.rekey_file,
src_file_path, new_password)
def test_plaintext(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
res = ve.plaintext(src_file_path)
self.assertEqual(src_file_contents, res)
def test_plaintext_not_encrypted(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
self.assertRaisesRegex(errors.AnsibleError,
'input is not vault encrypted data',
ve.plaintext,
src_file_path)
def test_encrypt_file(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
self._assert_file_is_encrypted(ve, src_file_path, src_file_contents)
def test_encrypt_file_symlink(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
src_file_link_path = os.path.join(self._test_dir, 'a_link_to_dest_file')
os.symlink(src_file_path, src_file_link_path)
ve = self._vault_editor()
ve.encrypt_file(src_file_link_path, self.vault_secret)
self._assert_file_is_encrypted(ve, src_file_path, src_file_contents)
self._assert_file_is_encrypted(ve, src_file_link_path, src_file_contents)
self._assert_file_is_link(src_file_link_path, src_file_path)
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_no_vault_id(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
new_src_contents = to_bytes("The info is different now.")
def faux_editor(editor_args):
self._faux_editor(editor_args, new_src_contents)
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
ve.edit_file(src_file_path)
with open(src_file_path, 'rb') as new_src_file:
new_src_file_contents = new_src_file.read()
self.assertTrue(b'$ANSIBLE_VAULT;1.1;AES256' in new_src_file_contents)
src_file_plaintext = ve.vault.decrypt(new_src_file_contents)
self.assertEqual(src_file_plaintext, new_src_contents)
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_with_vault_id(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
new_src_contents = to_bytes("The info is different now.")
def faux_editor(editor_args):
self._faux_editor(editor_args, new_src_contents)
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret,
vault_id='vault_secrets')
ve.edit_file(src_file_path)
with open(src_file_path, 'rb') as new_src_file:
new_src_file_contents = new_src_file.read()
self.assertTrue(b'$ANSIBLE_VAULT;1.2;AES256;vault_secrets' in new_src_file_contents)
src_file_plaintext = ve.vault.decrypt(new_src_file_contents)
self.assertEqual(src_file_plaintext, new_src_contents)
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_symlink(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
new_src_contents = to_bytes("The info is different now.")
def faux_editor(editor_args):
self._faux_editor(editor_args, new_src_contents)
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
src_file_link_path = os.path.join(self._test_dir, 'a_link_to_dest_file')
os.symlink(src_file_path, src_file_link_path)
ve.edit_file(src_file_link_path)
with open(src_file_path, 'rb') as new_src_file:
new_src_file_contents = new_src_file.read()
src_file_plaintext = ve.vault.decrypt(new_src_file_contents)
self._assert_file_is_link(src_file_link_path, src_file_path)
self.assertEqual(src_file_plaintext, new_src_contents)
# self.assertEqual(src_file_plaintext, new_src_contents,
# 'The decrypted plaintext of the editted file is not the expected contents.')
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_not_encrypted(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
ve = self._vault_editor()
self.assertRaisesRegex(errors.AnsibleError,
'input is not vault encrypted data',
ve.edit_file,
src_file_path)
def test_create_file_exists(self):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
ve = self._vault_editor()
self.assertRaisesRegex(errors.AnsibleError,
'please use .edit. instead',
ve.create_file,
src_file_path,
self.vault_secret)
def test_decrypt_file_exception(self):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
ve = self._vault_editor()
self.assertRaisesRegex(errors.AnsibleError,
'input is not vault encrypted data',
ve.decrypt_file,
src_file_path)
@patch.object(vault.VaultEditor, '_editor_shell_command')
def test_create_file(self, mock_editor_shell_command):
def sc_side_effect(filename):
return ['touch', filename]
mock_editor_shell_command.side_effect = sc_side_effect
tmp_file = tempfile.NamedTemporaryFile()
os.unlink(tmp_file.name)
_secrets = self._secrets('ansible')
ve = self._vault_editor(_secrets)
ve.create_file(tmp_file.name, vault.match_encrypt_secret(_secrets)[1])
self.assertTrue(os.path.exists(tmp_file.name))
def test_decrypt_1_1(self):
v11_file = tempfile.NamedTemporaryFile(delete=False)
with v11_file as f:
f.write(to_bytes(v11_data))
ve = self._vault_editor(self._secrets("ansible"))
# make sure the password functions for the cipher
ve.decrypt_file(v11_file.name)
# verify decrypted content
with open(v11_file.name, "rb") as f:
fdata = to_text(f.read())
os.unlink(v11_file.name)
assert fdata.strip() == "foo", "incorrect decryption of 1.1 file: %s" % fdata.strip()
def test_real_path_dash(self):
filename = '-'
ve = self._vault_editor()
res = ve._real_path(filename)
self.assertEqual(res, '-')
def test_real_path_not_dash(self):
filename = '/dev/null'
ve = self._vault_editor()
res = ve._real_path(filename)
self.assertNotEqual(res, '-')
| 18,393
|
Python
|
.py
| 345
| 43.011594
| 122
| 0.637628
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,743
|
test_addresses.py
|
ansible_ansible/test/units/parsing/utils/test_addresses.py
|
# -*- coding: utf-8 -*-
from __future__ import annotations
import unittest
from ansible.parsing.utils.addresses import parse_address
class TestParseAddress(unittest.TestCase):
tests = {
# IPv4 addresses
'192.0.2.3': ['192.0.2.3', None],
'192.0.2.3:23': ['192.0.2.3', 23],
# IPv6 addresses
'::': ['::', None],
'::1': ['::1', None],
'[::1]:442': ['::1', 442],
'abcd:ef98:7654:3210:abcd:ef98:7654:3210': ['abcd:ef98:7654:3210:abcd:ef98:7654:3210', None],
'[abcd:ef98:7654:3210:abcd:ef98:7654:3210]:42': ['abcd:ef98:7654:3210:abcd:ef98:7654:3210', 42],
'1234:5678:9abc:def0:1234:5678:9abc:def0': ['1234:5678:9abc:def0:1234:5678:9abc:def0', None],
'1234::9abc:def0:1234:5678:9abc:def0': ['1234::9abc:def0:1234:5678:9abc:def0', None],
'1234:5678::def0:1234:5678:9abc:def0': ['1234:5678::def0:1234:5678:9abc:def0', None],
'1234:5678:9abc::1234:5678:9abc:def0': ['1234:5678:9abc::1234:5678:9abc:def0', None],
'1234:5678:9abc:def0::5678:9abc:def0': ['1234:5678:9abc:def0::5678:9abc:def0', None],
'1234:5678:9abc:def0:1234::9abc:def0': ['1234:5678:9abc:def0:1234::9abc:def0', None],
'1234:5678:9abc:def0:1234:5678::def0': ['1234:5678:9abc:def0:1234:5678::def0', None],
'1234:5678:9abc:def0:1234:5678::': ['1234:5678:9abc:def0:1234:5678::', None],
'::9abc:def0:1234:5678:9abc:def0': ['::9abc:def0:1234:5678:9abc:def0', None],
'0:0:0:0:0:ffff:1.2.3.4': ['0:0:0:0:0:ffff:1.2.3.4', None],
'0:0:0:0:0:0:1.2.3.4': ['0:0:0:0:0:0:1.2.3.4', None],
'::ffff:1.2.3.4': ['::ffff:1.2.3.4', None],
'::1.2.3.4': ['::1.2.3.4', None],
'1234::': ['1234::', None],
# Hostnames
'some-host': ['some-host', None],
'some-host:80': ['some-host', 80],
'some.host.com:492': ['some.host.com', 492],
'[some.host.com]:493': ['some.host.com', 493],
'a-b.3foo_bar.com:23': ['a-b.3foo_bar.com', 23],
u'fóöbär': [u'fóöbär', None],
u'fóöbär:32': [u'fóöbär', 32],
u'fóöbär.éxàmplê.com:632': [u'fóöbär.éxàmplê.com', 632],
# Various errors
'': [None, None],
'some..host': [None, None],
'some.': [None, None],
'[example.com]': [None, None],
'some-': [None, None],
'some-.foo.com': [None, None],
'some.-foo.com': [None, None],
}
range_tests = {
'192.0.2.[3:10]': ['192.0.2.[3:10]', None],
'192.0.2.[3:10]:23': ['192.0.2.[3:10]', 23],
'abcd:ef98::7654:[1:9]': ['abcd:ef98::7654:[1:9]', None],
'[abcd:ef98::7654:[6:32]]:2222': ['abcd:ef98::7654:[6:32]', 2222],
'[abcd:ef98::7654:[9ab3:fcb7]]:2222': ['abcd:ef98::7654:[9ab3:fcb7]', 2222],
u'fóöb[a:c]r.éxàmplê.com:632': [u'fóöb[a:c]r.éxàmplê.com', 632],
'[a:b]foo.com': ['[a:b]foo.com', None],
'foo[a:b].com': ['foo[a:b].com', None],
'foo[a:b]:42': ['foo[a:b]', 42],
'foo[a-b]-.com': [None, None],
'foo[a-b]:32': [None, None],
'foo[x-y]': [None, None],
}
def test_without_ranges(self):
for t in self.tests:
test = self.tests[t]
try:
(host, port) = parse_address(t)
except Exception:
host = None
port = None
assert host == test[0]
assert port == test[1]
def test_with_ranges(self):
for t in self.range_tests:
test = self.range_tests[t]
try:
(host, port) = parse_address(t, allow_ranges=True)
except Exception:
host = None
port = None
assert host == test[0]
assert port == test[1]
| 3,798
|
Python
|
.py
| 81
| 36.975309
| 104
| 0.522225
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,744
|
test_jsonify.py
|
ansible_ansible/test/units/parsing/utils/test_jsonify.py
|
# -*- coding: utf-8 -*-
# (c) 2016, James Cammarata <jimi@sngx.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from ansible.parsing.utils.jsonify import jsonify
class TestJsonify(unittest.TestCase):
def test_jsonify_simple(self):
self.assertEqual(jsonify(dict(a=1, b=2, c=3)), '{"a": 1, "b": 2, "c": 3}')
def test_jsonify_simple_format(self):
res = jsonify(dict(a=1, b=2, c=3), format=True)
cleaned = "".join([x.strip() for x in res.splitlines()])
self.assertEqual(cleaned, '{"a": 1,"b": 2,"c": 3}')
def test_jsonify_unicode(self):
self.assertEqual(jsonify(dict(toshio=u'くらとみ')), u'{"toshio": "くらとみ"}')
def test_jsonify_empty(self):
self.assertEqual(jsonify(None), '{}')
| 1,420
|
Python
|
.py
| 31
| 42
| 82
| 0.703514
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,745
|
test_yaml.py
|
ansible_ansible/test/units/parsing/utils/test_yaml.py
|
# -*- coding: utf-8 -*-
# (c) 2017, Ansible Project
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import pytest
from ansible.errors import AnsibleParserError
from ansible.parsing.utils.yaml import from_yaml
def test_from_yaml_simple():
assert from_yaml(u'---\n- test: 1\n test2: "2"\n- caf\xe9: "caf\xe9"') == [{u'test': 1, u'test2': u"2"}, {u"caf\xe9": u"caf\xe9"}]
def test_bad_yaml():
with pytest.raises(AnsibleParserError):
from_yaml(u'foo: bar: baz')
| 1,123
|
Python
|
.py
| 26
| 41.307692
| 135
| 0.736697
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,746
|
test_host.py
|
ansible_ansible/test/units/inventory/test_host.py
|
# Copyright 2015 Marius Gedminas <marius@gedmin.as>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# for __setstate__/__getstate__ tests
from __future__ import annotations
import pickle
import unittest
from ansible.inventory.group import Group
from ansible.inventory.host import Host
class TestHost(unittest.TestCase):
ansible_port = 22
def setUp(self):
self.hostA = Host('a')
self.hostB = Host('b')
def test_equality(self):
self.assertEqual(self.hostA, self.hostA)
self.assertNotEqual(self.hostA, self.hostB)
self.assertNotEqual(self.hostA, Host('a'))
def test_hashability(self):
# equality implies the hash values are the same
self.assertEqual(hash(self.hostA), hash(Host('a')))
def test_get_vars(self):
host_vars = self.hostA.get_vars()
self.assertIsInstance(host_vars, dict)
def test_repr(self):
host_repr = repr(self.hostA)
self.assertIsInstance(host_repr, str)
def test_add_group(self):
group = Group('some_group')
group_len = len(self.hostA.groups)
self.hostA.add_group(group)
self.assertEqual(len(self.hostA.groups), group_len + 1)
def test_get_groups(self):
group = Group('some_group')
self.hostA.add_group(group)
groups = self.hostA.get_groups()
self.assertEqual(len(groups), 1)
for _group in groups:
self.assertIsInstance(_group, Group)
def test_equals_none(self):
other = None
assert not (self.hostA == other)
assert not (other == self.hostA)
assert self.hostA != other
assert other != self.hostA
self.assertNotEqual(self.hostA, other)
def test_serialize(self):
group = Group('some_group')
self.hostA.add_group(group)
data = self.hostA.serialize()
self.assertIsInstance(data, dict)
def test_serialize_then_deserialize(self):
group = Group('some_group')
self.hostA.add_group(group)
hostA_data = self.hostA.serialize()
hostA_clone = Host()
hostA_clone.deserialize(hostA_data)
self.assertEqual(self.hostA, hostA_clone)
def test_set_state(self):
group = Group('some_group')
self.hostA.add_group(group)
pickled_hostA = pickle.dumps(self.hostA)
hostA_clone = pickle.loads(pickled_hostA)
self.assertEqual(self.hostA, hostA_clone)
class TestHostWithPort(TestHost):
ansible_port = 8822
def setUp(self):
self.hostA = Host(name='a', port=self.ansible_port)
self.hostB = Host(name='b', port=self.ansible_port)
def test_get_vars_ansible_port(self):
host_vars = self.hostA.get_vars()
self.assertEqual(host_vars['ansible_port'], self.ansible_port)
| 3,409
|
Python
|
.py
| 85
| 33.729412
| 70
| 0.678994
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,747
|
test_group.py
|
ansible_ansible/test/units/inventory/test_group.py
|
# Copyright 2018 Alan Rominger <arominge@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from ansible.inventory.group import Group
from ansible.inventory.host import Host
from ansible.errors import AnsibleError
class TestGroup(unittest.TestCase):
def test_depth_update(self):
A = Group('A')
B = Group('B')
Z = Group('Z')
A.add_child_group(B)
A.add_child_group(Z)
self.assertEqual(A.depth, 0)
self.assertEqual(Z.depth, 1)
self.assertEqual(B.depth, 1)
def test_depth_update_dual_branches(self):
alpha = Group('alpha')
A = Group('A')
alpha.add_child_group(A)
B = Group('B')
A.add_child_group(B)
Z = Group('Z')
alpha.add_child_group(Z)
beta = Group('beta')
B.add_child_group(beta)
Z.add_child_group(beta)
self.assertEqual(alpha.depth, 0) # apex
self.assertEqual(beta.depth, 3) # alpha -> A -> B -> beta
omega = Group('omega')
omega.add_child_group(alpha)
# verify that both paths are traversed to get the max depth value
self.assertEqual(B.depth, 3) # omega -> alpha -> A -> B
self.assertEqual(beta.depth, 4) # B -> beta
def test_depth_recursion(self):
A = Group('A')
B = Group('B')
A.add_child_group(B)
# hypothetical of adding B as child group to A
A.parent_groups.append(B)
B.child_groups.append(A)
# can't update depths of groups, because of loop
with self.assertRaises(AnsibleError):
B._check_children_depth()
def test_loop_detection(self):
A = Group('A')
B = Group('B')
C = Group('C')
A.add_child_group(B)
B.add_child_group(C)
with self.assertRaises(AnsibleError):
C.add_child_group(A)
def test_direct_host_ordering(self):
"""Hosts are returned in order they are added
"""
group = Group('A')
# host names not added in alphabetical order
host_name_list = ['z', 'b', 'c', 'a', 'p', 'q']
expected_hosts = []
for host_name in host_name_list:
h = Host(host_name)
group.add_host(h)
expected_hosts.append(h)
assert group.get_hosts() == expected_hosts
def test_sub_group_host_ordering(self):
"""With multiple nested groups, asserts that hosts are returned
in deterministic order
"""
top_group = Group('A')
expected_hosts = []
for name in ['z', 'b', 'c', 'a', 'p', 'q']:
child = Group('group_{0}'.format(name))
top_group.add_child_group(child)
host = Host('host_{0}'.format(name))
child.add_host(host)
expected_hosts.append(host)
assert top_group.get_hosts() == expected_hosts
def test_populates_descendant_hosts(self):
A = Group('A')
B = Group('B')
C = Group('C')
h = Host('h')
C.add_host(h)
A.add_child_group(B) # B is child of A
B.add_child_group(C) # C is descendant of A
A.add_child_group(B)
self.assertEqual(set(h.groups), set([C, B, A]))
h2 = Host('h2')
C.add_host(h2)
self.assertEqual(set(h2.groups), set([C, B, A]))
def test_ancestor_example(self):
# see docstring for Group._walk_relationship
groups = {}
for name in ['A', 'B', 'C', 'D', 'E', 'F']:
groups[name] = Group(name)
# first row
groups['A'].add_child_group(groups['D'])
groups['B'].add_child_group(groups['D'])
groups['B'].add_child_group(groups['E'])
groups['C'].add_child_group(groups['D'])
# second row
groups['D'].add_child_group(groups['E'])
groups['D'].add_child_group(groups['F'])
groups['E'].add_child_group(groups['F'])
self.assertEqual(
set(groups['F'].get_ancestors()),
set([
groups['A'], groups['B'], groups['C'], groups['D'], groups['E']
])
)
def test_ancestors_recursive_loop_safe(self):
"""
The get_ancestors method may be referenced before circular parenting
checks, so the method is expected to be stable even with loops
"""
A = Group('A')
B = Group('B')
A.parent_groups.append(B)
B.parent_groups.append(A)
# finishes in finite time
self.assertEqual(A.get_ancestors(), set([A, B]))
| 5,194
|
Python
|
.py
| 136
| 30.147059
| 79
| 0.591865
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,748
|
test_variable_manager.py
|
ansible_ansible/test/units/vars/test_variable_manager.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
import unittest
from unittest.mock import MagicMock, patch
from ansible.playbook.play import Play
from units.mock.loader import DictDataLoader
from units.mock.path import mock_unfrackpath_noop
from ansible.vars.manager import VariableManager
class TestVariableManager(unittest.TestCase):
def test_basic_manager(self):
fake_loader = DictDataLoader({})
mock_inventory = MagicMock()
v = VariableManager(loader=fake_loader, inventory=mock_inventory)
variables = v.get_vars(use_cache=False)
# Check var manager expected values, never check: ['omit', 'vars']
# FIXME: add the following ['ansible_version', 'ansible_playbook_python', 'groups']
for varname, value in (('playbook_dir', os.path.abspath('.')), ):
self.assertEqual(variables[varname], value)
def test_variable_manager_extra_vars(self):
fake_loader = DictDataLoader({})
extra_vars = dict(a=1, b=2, c=3)
mock_inventory = MagicMock()
v = VariableManager(loader=fake_loader, inventory=mock_inventory)
# override internal extra_vars loading
v._extra_vars = extra_vars
myvars = v.get_vars(use_cache=False)
for key, val in extra_vars.items():
self.assertEqual(myvars.get(key), val)
def test_variable_manager_options_vars(self):
fake_loader = DictDataLoader({})
options_vars = dict(a=1, b=2, c=3)
mock_inventory = MagicMock()
v = VariableManager(loader=fake_loader, inventory=mock_inventory)
# override internal options_vars loading
v._extra_vars = options_vars
myvars = v.get_vars(use_cache=False)
for key, val in options_vars.items():
self.assertEqual(myvars.get(key), val)
def test_variable_manager_play_vars(self):
fake_loader = DictDataLoader({})
mock_play = MagicMock()
mock_play.get_vars.return_value = dict(foo="bar")
mock_play.get_roles.return_value = []
mock_play.get_vars_files.return_value = []
mock_inventory = MagicMock()
v = VariableManager(loader=fake_loader, inventory=mock_inventory)
self.assertEqual(v.get_vars(play=mock_play, use_cache=False).get("foo"), "bar")
def test_variable_manager_play_vars_files(self):
fake_loader = DictDataLoader({
__file__: """
foo: bar
"""
})
mock_play = MagicMock()
mock_play.get_vars.return_value = dict()
mock_play.get_roles.return_value = []
mock_play.get_vars_files.return_value = [__file__]
mock_inventory = MagicMock()
v = VariableManager(inventory=mock_inventory, loader=fake_loader)
self.assertEqual(v.get_vars(play=mock_play, use_cache=False).get("foo"), "bar")
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_variable_manager_role_vars_dependencies(self):
"""
Tests vars from role dependencies with duplicate dependencies.
"""
mock_inventory = MagicMock()
fake_loader = DictDataLoader({
# role common-role
'/etc/ansible/roles/common-role/tasks/main.yml': """
- debug: msg="{{role_var}}"
""",
# We do not need allow_duplicates: yes for this role
# because eliminating duplicates is done by the execution
# strategy, which we do not test here.
# role role1
'/etc/ansible/roles/role1/vars/main.yml': """
role_var: "role_var_from_role1"
""",
'/etc/ansible/roles/role1/meta/main.yml': """
dependencies:
- { role: common-role }
""",
# role role2
'/etc/ansible/roles/role2/vars/main.yml': """
role_var: "role_var_from_role2"
""",
'/etc/ansible/roles/role2/meta/main.yml': """
dependencies:
- { role: common-role }
""",
})
v = VariableManager(loader=fake_loader, inventory=mock_inventory)
play1 = Play.load(dict(
hosts=['all'],
roles=['role1', 'role2'],
), loader=fake_loader, variable_manager=v)
# The task defined by common-role exists twice because role1
# and role2 depend on common-role. Check that the tasks see
# different values of role_var.
blocks = play1.compile()
task = blocks[1].block[0]
res = v.get_vars(play=play1, task=task)
self.assertEqual(res['role_var'], 'role_var_from_role1')
task = blocks[2].block[0]
res = v.get_vars(play=play1, task=task)
self.assertEqual(res['role_var'], 'role_var_from_role2')
| 5,532
|
Python
|
.py
| 122
| 36.868852
| 92
| 0.63753
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,749
|
test_module_response_deepcopy.py
|
ansible_ansible/test/units/vars/test_module_response_deepcopy.py
|
# -*- coding: utf-8 -*-
# (c) 2018 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.vars.clean import module_response_deepcopy
def test_module_response_deepcopy_basic():
x = 42
y = module_response_deepcopy(x)
assert y == x
def test_module_response_deepcopy_atomic():
tests = [None, 42, 2**100, 3.14, True, False, 1j,
"hello", u"hello\u1234"]
for x in tests:
assert module_response_deepcopy(x) is x
def test_module_response_deepcopy_list():
x = [[1, 2], 3]
y = module_response_deepcopy(x)
assert y == x
assert x is not y
assert x[0] is not y[0]
def test_module_response_deepcopy_empty_tuple():
x = ()
y = module_response_deepcopy(x)
assert x is y
def test_module_response_deepcopy_tuple_of_immutables():
x = ((1, 2), 3)
y = module_response_deepcopy(x)
assert x is y
def test_module_response_deepcopy_dict():
x = {"foo": [1, 2], "bar": 3}
y = module_response_deepcopy(x)
assert y == x
assert x is not y
assert x["foo"] is not y["foo"]
| 1,172
|
Python
|
.py
| 34
| 29.970588
| 92
| 0.652135
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,750
|
vault_helper.py
|
ansible_ansible/test/units/mock/vault_helper.py
|
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
from ansible.module_utils.common.text.converters import to_bytes
from ansible.parsing.vault import VaultSecret
class TextVaultSecret(VaultSecret):
"""A secret piece of text. ie, a password. Tracks text encoding.
The text encoding of the text may not be the default text encoding so
we keep track of the encoding so we encode it to the same bytes."""
def __init__(self, text, encoding=None, errors=None, _bytes=None):
super(TextVaultSecret, self).__init__()
self.text = text
self.encoding = encoding or 'utf-8'
self._bytes = _bytes
self.errors = errors or 'strict'
@property
def bytes(self):
"""The text encoded with encoding, unless we specifically set _bytes."""
return self._bytes or to_bytes(self.text, encoding=self.encoding, errors=self.errors)
| 1,492
|
Python
|
.py
| 29
| 47.413793
| 93
| 0.73677
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,751
|
path.py
|
ansible_ansible/test/units/mock/path.py
|
from __future__ import annotations
from unittest.mock import MagicMock
from ansible.utils.path import unfrackpath
mock_unfrackpath_noop = MagicMock(spec_set=unfrackpath, side_effect=lambda x, *args, **kwargs: x)
| 215
|
Python
|
.py
| 4
| 52
| 97
| 0.8125
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,752
|
loader.py
|
ansible_ansible/test/units/mock/loader.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
from ansible.parsing.dataloader import DataLoader
from ansible.module_utils.common.text.converters import to_bytes, to_text
class DictDataLoader(DataLoader):
def __init__(self, file_mapping=None):
file_mapping = {} if file_mapping is None else file_mapping
assert isinstance(file_mapping, dict)
super(DictDataLoader, self).__init__()
self._file_mapping = file_mapping
self._build_known_directories()
self._vault_secrets = None
def load_from_file(self, path, cache='all', unsafe=False):
data = None
path = to_text(path)
if path in self._file_mapping:
data = self.load(self._file_mapping[path], path)
return data
# TODO: the real _get_file_contents returns a bytestring, so we actually convert the
# unicode/text it's created with to utf-8
def _get_file_contents(self, file_name):
return to_bytes(self._file_mapping[file_name]), False
def path_exists(self, path):
path = to_text(path)
return path in self._file_mapping or path in self._known_directories
def is_file(self, path):
path = to_text(path)
return path in self._file_mapping
def is_directory(self, path):
path = to_text(path)
return path in self._known_directories
def list_directory(self, path):
ret = []
path = to_text(path)
for x in (list(self._file_mapping.keys()) + self._known_directories):
if x.startswith(path):
if os.path.dirname(x) == path:
ret.append(os.path.basename(x))
return ret
def is_executable(self, path):
# FIXME: figure out a way to make paths return true for this
return False
def _add_known_directory(self, directory):
if directory not in self._known_directories:
self._known_directories.append(directory)
def _build_known_directories(self):
self._known_directories = []
for path in self._file_mapping:
dirname = os.path.dirname(path)
while dirname not in ('/', ''):
self._add_known_directory(dirname)
dirname = os.path.dirname(dirname)
def get_basedir(self):
return os.getcwd()
def set_vault_secrets(self, vault_secrets):
self._vault_secrets = vault_secrets
| 3,130
|
Python
|
.py
| 72
| 36.541667
| 88
| 0.667983
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,753
|
yaml_helper.py
|
ansible_ansible/test/units/mock/yaml_helper.py
|
from __future__ import annotations
import io
import yaml
from ansible.parsing.yaml.dumper import AnsibleDumper
class YamlTestUtils(object):
"""Mixin class to combine with a unittest.TestCase subclass."""
def _loader(self, stream):
"""Vault related tests will want to override this.
Vault cases should setup a AnsibleLoader that has the vault password."""
def _dump_stream(self, obj, stream, dumper=None):
"""Dump to a py2-unicode or py3-string stream."""
return yaml.dump(obj, stream, Dumper=dumper)
def _dump_string(self, obj, dumper=None):
"""Dump to a py2-unicode or py3-string"""
return yaml.dump(obj, Dumper=dumper)
def _dump_load_cycle(self, obj):
# Each pass though a dump or load revs the 'generation'
# obj to yaml string
string_from_object_dump = self._dump_string(obj, dumper=AnsibleDumper)
# wrap a stream/file like StringIO around that yaml
stream_from_object_dump = io.StringIO(string_from_object_dump)
loader = self._loader(stream_from_object_dump)
# load the yaml stream to create a new instance of the object (gen 2)
obj_2 = loader.get_data()
# dump the gen 2 objects directory to strings
string_from_object_dump_2 = self._dump_string(obj_2,
dumper=AnsibleDumper)
# The gen 1 and gen 2 yaml strings
self.assertEqual(string_from_object_dump, string_from_object_dump_2)
# the gen 1 (orig) and gen 2 py object
self.assertEqual(obj, obj_2)
# again! gen 3... load strings into py objects
stream_3 = io.StringIO(string_from_object_dump_2)
loader_3 = self._loader(stream_3)
obj_3 = loader_3.get_data()
string_from_object_dump_3 = self._dump_string(obj_3, dumper=AnsibleDumper)
self.assertEqual(obj, obj_3)
# should be transitive, but...
self.assertEqual(obj_2, obj_3)
self.assertEqual(string_from_object_dump, string_from_object_dump_3)
| 2,067
|
Python
|
.py
| 40
| 42.675
| 82
| 0.655738
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,754
|
procenv.py
|
ansible_ansible/test/units/mock/procenv.py
|
# (c) 2016, Matt Davis <mdavis@ansible.com>
# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import sys
import json
from contextlib import contextmanager
from io import BytesIO, StringIO
import unittest
from ansible.module_utils.common.text.converters import to_bytes
@contextmanager
def swap_stdin_and_argv(stdin_data='', argv_data=tuple()):
"""
context manager that temporarily masks the test runner's values for stdin and argv
"""
real_stdin = sys.stdin
real_argv = sys.argv
fake_stream = StringIO(stdin_data)
fake_stream.buffer = BytesIO(to_bytes(stdin_data))
try:
sys.stdin = fake_stream
sys.argv = argv_data
yield
finally:
sys.stdin = real_stdin
sys.argv = real_argv
class ModuleTestCase(unittest.TestCase):
def setUp(self):
module_args = {'_ansible_remote_tmp': '/tmp', '_ansible_keep_remote_files': False}
args = json.dumps(dict(ANSIBLE_MODULE_ARGS=module_args))
# unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually
self.stdin_swap = swap_stdin_and_argv(stdin_data=args)
self.stdin_swap.__enter__()
def tearDown(self):
# unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually
self.stdin_swap.__exit__(None, None, None)
| 2,058
|
Python
|
.py
| 50
| 37.1
| 105
| 0.724812
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,755
|
test_user_agent.py
|
ansible_ansible/test/units/galaxy/test_user_agent.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import platform
from ansible.galaxy import user_agent
from ansible.module_utils.ansible_release import __version__ as ansible_version
def test_user_agent():
res = user_agent.user_agent()
assert res.startswith('ansible-galaxy/%s' % ansible_version)
assert platform.system() in res
assert 'python:' in res
| 516
|
Python
|
.py
| 12
| 40.25
| 92
| 0.741483
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,756
|
test_api.py
|
ansible_ansible/test/units/galaxy/test_api.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import json
import os
import re
import pytest
import stat
import tarfile
import tempfile
import time
from io import BytesIO, StringIO
from unittest.mock import MagicMock
import ansible.constants as C
from ansible import context
from ansible.errors import AnsibleError
from ansible.galaxy import api as galaxy_api
from ansible.galaxy.api import CollectionVersionMetadata, GalaxyAPI, GalaxyError
from ansible.galaxy.token import BasicAuthToken, GalaxyToken, KeycloakToken
from ansible.module_utils.common.file import S_IRWU_RG_RO
from ansible.module_utils.common.text.converters import to_native, to_text
import urllib.error
from ansible.utils import context_objects as co
from ansible.utils.display import Display
@pytest.fixture(autouse=True)
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
# Required to initialise the GalaxyAPI object
context.CLIARGS._store = {'ignore_certs': False}
yield
co.GlobalCLIArgs._Singleton__instance = None
@pytest.fixture()
def collection_artifact(tmp_path_factory):
""" Creates a collection artifact tarball that is ready to be published """
output_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Output'))
tar_path = os.path.join(output_dir, 'namespace-collection-v1.0.0.tar.gz')
with tarfile.open(tar_path, 'w:gz') as tfile:
b_io = BytesIO(b"\x00\x01\x02\x03")
tar_info = tarfile.TarInfo('test')
tar_info.size = 4
tar_info.mode = S_IRWU_RG_RO
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
yield tar_path
@pytest.fixture()
def cache_dir(tmp_path_factory, monkeypatch):
cache_dir = to_text(tmp_path_factory.mktemp('Test ÅÑŚÌβŁÈ Galaxy Cache'))
monkeypatch.setattr(C, 'GALAXY_CACHE_DIR', cache_dir)
yield cache_dir
def get_test_galaxy_api(url, version, token_ins=None, token_value=None, no_cache=True):
token_value = token_value or "my token"
token_ins = token_ins or GalaxyToken(token_value)
api = GalaxyAPI(None, "test", url, no_cache=no_cache)
# Warning, this doesn't test g_connect() because _available_api_versions is set here. That means
# that urls for v2 servers have to append '/api/' themselves in the input data.
api._available_api_versions = {version: '%s' % version}
api.token = token_ins
return api
def get_v3_collection_versions(namespace='namespace', name='collection'):
pagination_path = f"/api/galaxy/content/community/v3/plugin/{namespace}/content/community/collections/index/{namespace}/{name}/versions"
page_versions = (('1.0.0', '1.0.1',), ('1.0.2', '1.0.3',), ('1.0.4', '1.0.5'),)
responses = [
{}, # TODO: initial response
]
first = f"{pagination_path}/?limit=100"
last = f"{pagination_path}/?limit=100&offset=200"
page_versions = [
{
"versions": ('1.0.0', '1.0.1',),
"url": first,
},
{
"versions": ('1.0.2', '1.0.3',),
"url": f"{pagination_path}/?limit=100&offset=100",
},
{
"versions": ('1.0.4', '1.0.5'),
"url": last,
},
]
previous = None
for page in range(0, len(page_versions)):
data = []
if page_versions[page]["url"] == last:
next_page = None
else:
next_page = page_versions[page + 1]["url"]
links = {"first": first, "last": last, "next": next_page, "previous": previous}
for version in page_versions[page]["versions"]:
data.append(
{
"version": f"{version}",
"href": f"{pagination_path}/{version}/",
"created_at": "2022-05-13T15:55:58.913107Z",
"updated_at": "2022-05-13T15:55:58.913121Z",
"requires_ansible": ">=2.9.10"
}
)
responses.append({"meta": {"count": 6}, "links": links, "data": data})
previous = page_versions[page]["url"]
return responses
def get_collection_versions(namespace='namespace', name='collection'):
base_url = 'https://galaxy.server.com/api/v2/collections/{0}/{1}/'.format(namespace, name)
versions_url = base_url + 'versions/'
# Response for collection info
responses = [
{
"id": 1000,
"href": base_url,
"name": name,
"namespace": {
"id": 30000,
"href": "https://galaxy.ansible.com/api/v1/namespaces/30000/",
"name": namespace,
},
"versions_url": versions_url,
"latest_version": {
"version": "1.0.5",
"href": versions_url + "1.0.5/"
},
"deprecated": False,
"created": "2021-02-09T16:55:42.749915-05:00",
"modified": "2021-02-09T16:55:42.749915-05:00",
}
]
# Paginated responses for versions
page_versions = (('1.0.0', '1.0.1',), ('1.0.2', '1.0.3',), ('1.0.4', '1.0.5'),)
last_page = None
for page in range(1, len(page_versions) + 1):
if page < len(page_versions):
next_page = versions_url + '?page={0}'.format(page + 1)
else:
next_page = None
version_results = []
for version in page_versions[int(page - 1)]:
version_results.append(
{'version': version, 'href': versions_url + '{0}/'.format(version)}
)
responses.append(
{
'count': 6,
'next': next_page,
'previous': last_page,
'results': version_results,
}
)
last_page = page
return responses
def test_api_no_auth():
api = GalaxyAPI(None, "test", "https://galaxy.ansible.com/api/")
actual = {}
api._add_auth_token(actual, "")
assert actual == {}
def test_api_no_auth_but_required():
expected = "No access token or username set. A token can be set with --api-key or at "
with pytest.raises(AnsibleError, match=expected):
GalaxyAPI(None, "test", "https://galaxy.ansible.com/api/")._add_auth_token({}, "", required=True)
def test_api_token_auth():
token = GalaxyToken(token=u"my_token")
api = GalaxyAPI(None, "test", "https://galaxy.ansible.com/api/", token=token)
actual = {}
api._add_auth_token(actual, "", required=True)
assert actual == {'Authorization': 'Token my_token'}
def test_api_token_auth_with_token_type(monkeypatch):
token = KeycloakToken(auth_url='https://api.test/')
mock_token_get = MagicMock()
mock_token_get.return_value = 'my_token'
monkeypatch.setattr(token, 'get', mock_token_get)
api = GalaxyAPI(None, "test", "https://galaxy.ansible.com/api/", token=token)
actual = {}
api._add_auth_token(actual, "", token_type="Bearer", required=True)
assert actual == {'Authorization': 'Bearer my_token'}
def test_api_token_auth_with_v3_url(monkeypatch):
token = KeycloakToken(auth_url='https://api.test/')
mock_token_get = MagicMock()
mock_token_get.return_value = 'my_token'
monkeypatch.setattr(token, 'get', mock_token_get)
api = GalaxyAPI(None, "test", "https://galaxy.ansible.com/api/", token=token)
actual = {}
api._add_auth_token(actual, "https://galaxy.ansible.com/api/v3/resource/name", required=True)
assert actual == {'Authorization': 'Bearer my_token'}
def test_api_token_auth_with_v2_url():
token = GalaxyToken(token=u"my_token")
api = GalaxyAPI(None, "test", "https://galaxy.ansible.com/api/", token=token)
actual = {}
# Add v3 to random part of URL but response should only see the v2 as the full URI path segment.
api._add_auth_token(actual, "https://galaxy.ansible.com/api/v2/resourcev3/name", required=True)
assert actual == {'Authorization': 'Token my_token'}
def test_api_basic_auth_password():
token = BasicAuthToken(username=u"user", password=u"pass")
api = GalaxyAPI(None, "test", "https://galaxy.ansible.com/api/", token=token)
actual = {}
api._add_auth_token(actual, "", required=True)
assert actual == {'Authorization': 'Basic dXNlcjpwYXNz'}
def test_api_basic_auth_no_password():
token = BasicAuthToken(username=u"user")
api = GalaxyAPI(None, "test", "https://galaxy.ansible.com/api/", token=token)
actual = {}
api._add_auth_token(actual, "", required=True)
assert actual == {'Authorization': 'Basic dXNlcjo='}
def test_api_dont_override_auth_header():
api = GalaxyAPI(None, "test", "https://galaxy.ansible.com/api/")
actual = {'Authorization': 'Custom token'}
api._add_auth_token(actual, "", required=True)
assert actual == {'Authorization': 'Custom token'}
def test_initialise_galaxy(monkeypatch):
mock_open = MagicMock()
mock_open.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
StringIO(u'{"token":"my token"}'),
]
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
api = GalaxyAPI(None, "test", "https://galaxy.ansible.com/api/")
actual = api.authenticate("github_token")
assert len(api.available_api_versions) == 2
assert api.available_api_versions['v1'] == u'v1/'
assert api.available_api_versions['v2'] == u'v2/'
assert actual == {u'token': u'my token'}
assert mock_open.call_count == 2
assert mock_open.mock_calls[0][1][0] == 'https://galaxy.ansible.com/api/'
assert 'ansible-galaxy' in mock_open.mock_calls[0][2]['http_agent']
assert mock_open.mock_calls[1][1][0] == 'https://galaxy.ansible.com/api/v1/tokens/'
assert 'ansible-galaxy' in mock_open.mock_calls[1][2]['http_agent']
assert mock_open.mock_calls[1][2]['data'] == 'github_token=github_token'
def test_initialise_galaxy_with_auth(monkeypatch):
mock_open = MagicMock()
mock_open.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
StringIO(u'{"token":"my token"}'),
]
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
api = GalaxyAPI(None, "test", "https://galaxy.ansible.com/api/", token=GalaxyToken(token='my_token'))
actual = api.authenticate("github_token")
assert len(api.available_api_versions) == 2
assert api.available_api_versions['v1'] == u'v1/'
assert api.available_api_versions['v2'] == u'v2/'
assert actual == {u'token': u'my token'}
assert mock_open.call_count == 2
assert mock_open.mock_calls[0][1][0] == 'https://galaxy.ansible.com/api/'
assert 'ansible-galaxy' in mock_open.mock_calls[0][2]['http_agent']
assert mock_open.mock_calls[1][1][0] == 'https://galaxy.ansible.com/api/v1/tokens/'
assert 'ansible-galaxy' in mock_open.mock_calls[1][2]['http_agent']
assert mock_open.mock_calls[1][2]['data'] == 'github_token=github_token'
def test_initialise_automation_hub(monkeypatch):
mock_open = MagicMock()
mock_open.side_effect = [
StringIO(u'{"available_versions":{"v2": "v2/", "v3":"v3/"}}'),
]
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
token = KeycloakToken(auth_url='https://api.test/')
mock_token_get = MagicMock()
mock_token_get.return_value = 'my_token'
monkeypatch.setattr(token, 'get', mock_token_get)
api = GalaxyAPI(None, "test", "https://galaxy.ansible.com/api/", token=token)
assert len(api.available_api_versions) == 2
assert api.available_api_versions['v2'] == u'v2/'
assert api.available_api_versions['v3'] == u'v3/'
assert mock_open.mock_calls[0][1][0] == 'https://galaxy.ansible.com/api/'
assert 'ansible-galaxy' in mock_open.mock_calls[0][2]['http_agent']
assert mock_open.mock_calls[0][2]['headers'] == {'Authorization': 'Bearer my_token'}
def test_initialise_unknown(monkeypatch):
mock_open = MagicMock()
mock_open.side_effect = [
urllib.error.HTTPError('https://galaxy.ansible.com/api/', 500, 'msg', {}, StringIO(u'{"msg":"raw error"}')),
urllib.error.HTTPError('https://galaxy.ansible.com/api/api/', 500, 'msg', {}, StringIO(u'{"msg":"raw error"}')),
]
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
api = GalaxyAPI(None, "test", "https://galaxy.ansible.com/api/", token=GalaxyToken(token='my_token'))
expected = "Error when finding available api versions from test (%s) (HTTP Code: 500, Message: msg)" \
% api.api_server
with pytest.raises(AnsibleError, match=re.escape(expected)):
api.authenticate("github_token")
def test_get_available_api_versions(monkeypatch):
mock_open = MagicMock()
mock_open.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/","v2":"v2/"}}'),
]
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
api = GalaxyAPI(None, "test", "https://galaxy.ansible.com/api/")
actual = api.available_api_versions
assert len(actual) == 2
assert actual['v1'] == u'v1/'
assert actual['v2'] == u'v2/'
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == 'https://galaxy.ansible.com/api/'
assert 'ansible-galaxy' in mock_open.mock_calls[0][2]['http_agent']
def test_publish_collection_missing_file():
fake_path = u'/fake/ÅÑŚÌβŁÈ/path'
expected = to_native("The collection path specified '%s' does not exist." % fake_path)
api = get_test_galaxy_api("https://galaxy.ansible.com/api/", "v2")
with pytest.raises(AnsibleError, match=expected):
api.publish_collection(fake_path)
def test_publish_collection_not_a_tarball():
expected = "The collection path specified '{0}' is not a tarball, use 'ansible-galaxy collection build' to " \
"create a proper release artifact."
api = get_test_galaxy_api("https://galaxy.ansible.com/api/", "v2")
with tempfile.NamedTemporaryFile(prefix=u'ÅÑŚÌβŁÈ') as temp_file:
temp_file.write(b"\x00")
temp_file.flush()
with pytest.raises(AnsibleError, match=expected.format(to_native(temp_file.name))):
api.publish_collection(temp_file.name)
def test_publish_collection_unsupported_version():
expected = "Galaxy action publish_collection requires API versions 'v2, v3' but only 'v1' are available on test " \
"https://galaxy.ansible.com/api/"
api = get_test_galaxy_api("https://galaxy.ansible.com/api/", "v1")
with pytest.raises(AnsibleError, match=expected):
api.publish_collection("path")
@pytest.mark.parametrize('api_version, collection_url', [
('v2', 'collections'),
('v3', 'artifacts/collections'),
])
def test_publish_collection(api_version, collection_url, collection_artifact, monkeypatch):
api = get_test_galaxy_api("https://galaxy.ansible.com/api/", api_version)
mock_call = MagicMock()
mock_call.return_value = {'task': 'http://task.url/'}
monkeypatch.setattr(api, '_call_galaxy', mock_call)
actual = api.publish_collection(collection_artifact)
assert actual == 'http://task.url/'
assert mock_call.call_count == 1
assert mock_call.mock_calls[0][1][0] == 'https://galaxy.ansible.com/api/%s/%s/' % (api_version, collection_url)
assert mock_call.mock_calls[0][2]['headers']['Content-length'] == len(mock_call.mock_calls[0][2]['args'])
assert mock_call.mock_calls[0][2]['headers']['Content-type'].startswith(
'multipart/form-data; boundary=')
assert mock_call.mock_calls[0][2]['args'].startswith(b'--')
assert mock_call.mock_calls[0][2]['method'] == 'POST'
assert mock_call.mock_calls[0][2]['auth_required'] is True
@pytest.mark.parametrize('api_version, collection_url, response, expected', [
('v2', 'collections', {},
'Error when publishing collection to test (%s) (HTTP Code: 500, Message: msg Code: Unknown)'),
('v2', 'collections', {
'message': u'Galaxy error messäge',
'code': 'GWE002',
}, u'Error when publishing collection to test (%s) (HTTP Code: 500, Message: Galaxy error messäge Code: GWE002)'),
('v3', 'artifact/collections', {},
'Error when publishing collection to test (%s) (HTTP Code: 500, Message: msg Code: Unknown)'),
('v3', 'artifact/collections', {
'errors': [
{
'code': 'conflict.collection_exists',
'detail': 'Collection "mynamespace-mycollection-4.1.1" already exists.',
'title': 'Conflict.',
'status': '400',
},
{
'code': 'quantum_improbability',
'title': u'Rändom(?) quantum improbability.',
'source': {'parameter': 'the_arrow_of_time'},
'meta': {'remediation': 'Try again before'},
},
],
}, u'Error when publishing collection to test (%s) (HTTP Code: 500, Message: Collection '
u'"mynamespace-mycollection-4.1.1" already exists. Code: conflict.collection_exists), (HTTP Code: 500, '
u'Message: Rändom(?) quantum improbability. Code: quantum_improbability)')
])
def test_publish_failure(api_version, collection_url, response, expected, collection_artifact, monkeypatch):
api = get_test_galaxy_api('https://galaxy.server.com/api/', api_version)
expected_url = '%s/api/%s/%s' % (api.api_server, api_version, collection_url)
mock_open = MagicMock()
mock_open.side_effect = urllib.error.HTTPError(expected_url, 500, 'msg', {},
StringIO(to_text(json.dumps(response))))
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
with pytest.raises(GalaxyError, match=re.escape(to_native(expected % api.api_server))):
api.publish_collection(collection_artifact)
@pytest.mark.parametrize('server_url, api_version, token_type, token_ins, import_uri, full_import_uri', [
('https://galaxy.server.com/api', 'v2', 'Token', GalaxyToken('my token'),
'1234',
'https://galaxy.server.com/api/v2/collection-imports/1234/'),
('https://galaxy.server.com/api/automation-hub/', 'v3', 'Bearer', KeycloakToken(auth_url='https://api.test/'),
'1234',
'https://galaxy.server.com/api/automation-hub/v3/imports/collections/1234/'),
])
def test_wait_import_task(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.return_value = StringIO(u'{"state":"success","finished_at":"time"}')
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
api.wait_import_task(import_uri)
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == full_import_uri
assert mock_open.mock_calls[0][2]['headers']['Authorization'] == '%s my token' % token_type
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == 'Waiting until Galaxy import task %s has completed' % full_import_uri
@pytest.mark.parametrize('server_url, api_version, token_type, token_ins, import_uri, full_import_uri', [
('https://galaxy.server.com/api/', 'v2', 'Token', GalaxyToken('my token'),
'1234',
'https://galaxy.server.com/api/v2/collection-imports/1234/'),
('https://galaxy.server.com/api/automation-hub', 'v3', 'Bearer', KeycloakToken(auth_url='https://api.test/'),
'1234',
'https://galaxy.server.com/api/automation-hub/v3/imports/collections/1234/'),
])
def test_wait_import_task_multiple_requests(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.side_effect = [
StringIO(u'{"state":"test"}'),
StringIO(u'{"state":"success","finished_at":"time"}'),
]
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
mock_vvv = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_vvv)
monkeypatch.setattr(time, 'sleep', MagicMock())
api.wait_import_task(import_uri)
assert mock_open.call_count == 2
assert mock_open.mock_calls[0][1][0] == full_import_uri
assert mock_open.mock_calls[0][2]['headers']['Authorization'] == '%s my token' % token_type
assert mock_open.mock_calls[1][1][0] == full_import_uri
assert mock_open.mock_calls[1][2]['headers']['Authorization'] == '%s my token' % token_type
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == 'Waiting until Galaxy import task %s has completed' % full_import_uri
assert mock_vvv.call_count == 1
assert mock_vvv.mock_calls[0][1][0] == \
'Galaxy import process has a status of test, wait 2 seconds before trying again'
@pytest.mark.parametrize('server_url, api_version, token_type, token_ins, import_uri, full_import_uri,', [
('https://galaxy.server.com/api/', 'v2', 'Token', GalaxyToken('my token'),
'1234',
'https://galaxy.server.com/api/v2/collection-imports/1234/'),
('https://galaxy.server.com/api/automation-hub/', 'v3', 'Bearer', KeycloakToken(auth_url='https://api.test/'),
'1234',
'https://galaxy.server.com/api/automation-hub/v3/imports/collections/1234/'),
])
def test_wait_import_task_with_failure(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.side_effect = [
StringIO(to_text(json.dumps({
'finished_at': 'some_time',
'state': 'failed',
'error': {
'code': 'GW001',
'description': u'Becäuse I said so!',
},
'messages': [
{
'level': 'ERrOR',
'message': u'Somé error',
},
{
'level': 'WARNiNG',
'message': u'Some wärning',
},
{
'level': 'INFO',
'message': u'Somé info',
},
],
}))),
]
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
mock_vvv = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_vvv)
mock_warn = MagicMock()
monkeypatch.setattr(Display, 'warning', mock_warn)
mock_err = MagicMock()
monkeypatch.setattr(Display, 'error', mock_err)
expected = to_native(u'Galaxy import process failed: Becäuse I said so! (Code: GW001)')
with pytest.raises(AnsibleError, match=re.escape(expected)):
api.wait_import_task(import_uri)
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == full_import_uri
assert mock_open.mock_calls[0][2]['headers']['Authorization'] == '%s my token' % token_type
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == 'Waiting until Galaxy import task %s has completed' % full_import_uri
assert mock_vvv.call_count == 1
assert mock_vvv.mock_calls[0][1][0] == u'Galaxy import message: INFO - Somé info'
assert mock_warn.call_count == 1
assert mock_warn.mock_calls[0][1][0] == u'Galaxy import warning message: Some wärning'
assert mock_err.call_count == 1
assert mock_err.mock_calls[0][1][0] == u'Galaxy import error message: Somé error'
@pytest.mark.parametrize('server_url, api_version, token_type, token_ins, import_uri, full_import_uri', [
('https://galaxy.server.com/api/', 'v2', 'Token', GalaxyToken('my_token'),
'1234',
'https://galaxy.server.com/api/v2/collection-imports/1234/'),
('https://galaxy.server.com/api/automation-hub/', 'v3', 'Bearer', KeycloakToken(auth_url='https://api.test/'),
'1234',
'https://galaxy.server.com/api/automation-hub/v3/imports/collections/1234/'),
])
def test_wait_import_task_with_failure_no_error(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.side_effect = [
StringIO(to_text(json.dumps({
'finished_at': 'some_time',
'state': 'failed',
'error': {},
'messages': [
{
'level': 'ERROR',
'message': u'Somé error',
},
{
'level': 'WARNING',
'message': u'Some wärning',
},
{
'level': 'INFO',
'message': u'Somé info',
},
],
}))),
]
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
mock_vvv = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_vvv)
mock_warn = MagicMock()
monkeypatch.setattr(Display, 'warning', mock_warn)
mock_err = MagicMock()
monkeypatch.setattr(Display, 'error', mock_err)
expected = 'Galaxy import process failed: Unknown error, see %s for more details \\(Code: UNKNOWN\\)' % full_import_uri
with pytest.raises(AnsibleError, match=expected):
api.wait_import_task(import_uri)
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == full_import_uri
assert mock_open.mock_calls[0][2]['headers']['Authorization'] == '%s my token' % token_type
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == 'Waiting until Galaxy import task %s has completed' % full_import_uri
assert mock_vvv.call_count == 1
assert mock_vvv.mock_calls[0][1][0] == u'Galaxy import message: INFO - Somé info'
assert mock_warn.call_count == 1
assert mock_warn.mock_calls[0][1][0] == u'Galaxy import warning message: Some wärning'
assert mock_err.call_count == 1
assert mock_err.mock_calls[0][1][0] == u'Galaxy import error message: Somé error'
@pytest.mark.parametrize('server_url, api_version, token_type, token_ins, import_uri, full_import_uri', [
('https://galaxy.server.com/api', 'v2', 'Token', GalaxyToken('my token'),
'1234',
'https://galaxy.server.com/api/v2/collection-imports/1234/'),
('https://galaxy.server.com/api/automation-hub', 'v3', 'Bearer', KeycloakToken(auth_url='https://api.test/'),
'1234',
'https://galaxy.server.com/api/automation-hub/v3/imports/collections/1234/'),
])
def test_wait_import_task_timeout(server_url, api_version, token_type, token_ins, import_uri, full_import_uri, monkeypatch):
api = get_test_galaxy_api(server_url, api_version, token_ins=token_ins)
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)
def return_response(*args, **kwargs):
return StringIO(u'{"state":"waiting"}')
mock_open = MagicMock()
mock_open.side_effect = return_response
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
mock_vvv = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_vvv)
monkeypatch.setattr(time, 'sleep', MagicMock())
expected = "Timeout while waiting for the Galaxy import process to finish, check progress at '%s'" % full_import_uri
with pytest.raises(AnsibleError, match=expected):
api.wait_import_task(import_uri, 1)
assert mock_open.call_count > 1
assert mock_open.mock_calls[0][1][0] == full_import_uri
assert mock_open.mock_calls[0][2]['headers']['Authorization'] == '%s my token' % token_type
assert mock_open.mock_calls[1][1][0] == full_import_uri
assert mock_open.mock_calls[1][2]['headers']['Authorization'] == '%s my token' % token_type
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == 'Waiting until Galaxy import task %s has completed' % full_import_uri
# expected_wait_msg = 'Galaxy import process has a status of waiting, wait {0} seconds before trying again'
assert mock_vvv.call_count > 9 # 1st is opening Galaxy token file.
# FIXME:
# assert mock_vvv.mock_calls[1][1][0] == expected_wait_msg.format(2)
# assert mock_vvv.mock_calls[2][1][0] == expected_wait_msg.format(3)
# assert mock_vvv.mock_calls[3][1][0] == expected_wait_msg.format(4)
# assert mock_vvv.mock_calls[4][1][0] == expected_wait_msg.format(6)
# assert mock_vvv.mock_calls[5][1][0] == expected_wait_msg.format(10)
# assert mock_vvv.mock_calls[6][1][0] == expected_wait_msg.format(15)
# assert mock_vvv.mock_calls[7][1][0] == expected_wait_msg.format(22)
# assert mock_vvv.mock_calls[8][1][0] == expected_wait_msg.format(30)
@pytest.mark.parametrize('api_version, token_type, version, token_ins', [
('v2', None, 'v2.1.13', None),
('v3', 'Bearer', 'v1.0.0', KeycloakToken(auth_url='https://api.test/api/automation-hub/')),
])
def test_get_collection_version_metadata_no_version(api_version, token_type, version, token_ins, monkeypatch):
api = get_test_galaxy_api('https://galaxy.server.com/api/', api_version, token_ins=token_ins)
if token_ins:
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.side_effect = [
StringIO(to_text(json.dumps({
'href': 'https://galaxy.server.com/api/{api}/namespace/name/versions/{version}/'.format(api=api_version, version=version),
'download_url': 'https://downloadme.com',
'artifact': {
'sha256': 'ac47b6fac117d7c171812750dacda655b04533cf56b31080b82d1c0db3c9d80f',
},
'namespace': {
'name': 'namespace',
},
'collection': {
'name': 'collection',
},
'version': version,
'metadata': {
'dependencies': {},
}
}))),
]
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
actual = api.get_collection_version_metadata('namespace', 'collection', version)
assert isinstance(actual, CollectionVersionMetadata)
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.download_url == u'https://downloadme.com'
assert actual.artifact_sha256 == u'ac47b6fac117d7c171812750dacda655b04533cf56b31080b82d1c0db3c9d80f'
assert actual.version == version
assert actual.dependencies == {}
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == '%s%s/collections/namespace/collection/versions/%s/' \
% (api.api_server, api_version, version)
# v2 calls dont need auth, so no authz header or token_type
if token_type:
assert mock_open.mock_calls[0][2]['headers']['Authorization'] == '%s my token' % token_type
@pytest.mark.parametrize('api_version, token_type, token_ins, version', [
('v2', None, None, '2.1.13'),
('v3', 'Bearer', KeycloakToken(auth_url='https://api.test/api/automation-hub/'), '1.0.0'),
])
def test_get_collection_signatures_backwards_compat(api_version, token_type, token_ins, version, monkeypatch):
api = get_test_galaxy_api('https://galaxy.server.com/api/', api_version, token_ins=token_ins)
if token_ins:
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.side_effect = [
StringIO("{}")
]
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
actual = api.get_collection_signatures('namespace', 'collection', version)
assert actual == []
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == '%s%s/collections/namespace/collection/versions/%s/' \
% (api.api_server, api_version, version)
# v2 calls dont need auth, so no authz header or token_type
if token_type:
assert mock_open.mock_calls[0][2]['headers']['Authorization'] == '%s my token' % token_type
@pytest.mark.parametrize('api_version, token_type, token_ins, version', [
('v2', None, None, '2.1.13'),
('v3', 'Bearer', KeycloakToken(auth_url='https://api.test/api/automation-hub/'), '1.0.0'),
])
def test_get_collection_signatures(api_version, token_type, token_ins, version, monkeypatch):
api = get_test_galaxy_api('https://galaxy.server.com/api/', api_version, token_ins=token_ins)
if token_ins:
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.side_effect = [
StringIO(to_text(json.dumps({
'signatures': [
{
"signature": "-----BEGIN PGP SIGNATURE-----\nSIGNATURE1\n-----END PGP SIGNATURE-----\n",
"pubkey_fingerprint": "FINGERPRINT",
"signing_service": "ansible-default",
"pulp_created": "2022-01-14T14:05:53.835605Z",
},
{
"signature": "-----BEGIN PGP SIGNATURE-----\nSIGNATURE2\n-----END PGP SIGNATURE-----\n",
"pubkey_fingerprint": "FINGERPRINT",
"signing_service": "ansible-default",
"pulp_created": "2022-01-14T14:05:53.835605Z",
},
],
}))),
]
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
actual = api.get_collection_signatures('namespace', 'collection', version)
assert actual == [
"-----BEGIN PGP SIGNATURE-----\nSIGNATURE1\n-----END PGP SIGNATURE-----\n",
"-----BEGIN PGP SIGNATURE-----\nSIGNATURE2\n-----END PGP SIGNATURE-----\n"
]
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == '%s%s/collections/namespace/collection/versions/%s/' \
% (api.api_server, api_version, version)
# v2 calls dont need auth, so no authz header or token_type
if token_type:
assert mock_open.mock_calls[0][2]['headers']['Authorization'] == '%s my token' % token_type
@pytest.mark.parametrize('api_version, token_type, token_ins, response', [
('v2', None, None, {
'count': 2,
'next': None,
'previous': None,
'results': [
{
'version': '1.0.0',
'href': 'https://galaxy.server.com/api/v2/collections/namespace/collection/versions/1.0.0',
},
{
'version': '1.0.1',
'href': 'https://galaxy.server.com/api/v2/collections/namespace/collection/versions/1.0.1',
},
],
}),
# TODO: Verify this once Automation Hub is actually out
('v3', 'Bearer', KeycloakToken(auth_url='https://api.test/'), {
'count': 2,
'next': None,
'previous': None,
'data': [
{
'version': '1.0.0',
'href': 'https://galaxy.server.com/api/v2/collections/namespace/collection/versions/1.0.0',
},
{
'version': '1.0.1',
'href': 'https://galaxy.server.com/api/v2/collections/namespace/collection/versions/1.0.1',
},
],
}),
])
def test_get_collection_versions(api_version, token_type, token_ins, response, monkeypatch):
api = get_test_galaxy_api('https://galaxy.server.com/api/', api_version, token_ins=token_ins)
if token_ins:
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.side_effect = [
StringIO(to_text(json.dumps(response))),
]
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
actual = api.get_collection_versions('namespace', 'collection')
assert actual == [u'1.0.0', u'1.0.1']
page_query = '?limit=100' if api_version == 'v3' else '?page_size=100'
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == 'https://galaxy.server.com/api/%s/collections/namespace/collection/' \
'versions/%s' % (api_version, page_query)
if token_ins:
assert mock_open.mock_calls[0][2]['headers']['Authorization'] == '%s my token' % token_type
@pytest.mark.parametrize('api_version, token_type, token_ins, responses', [
('v2', None, None, [
{
'count': 6,
'next': 'https://galaxy.server.com/api/v2/collections/namespace/collection/versions/?page=2&page_size=100',
'previous': None,
'results': [ # Pay no mind, using more manageable results than page_size would indicate
{
'version': '1.0.0',
'href': 'https://galaxy.server.com/api/v2/collections/namespace/collection/versions/1.0.0',
},
{
'version': '1.0.1',
'href': 'https://galaxy.server.com/api/v2/collections/namespace/collection/versions/1.0.1',
},
],
},
{
'count': 6,
'next': 'https://galaxy.server.com/api/v2/collections/namespace/collection/versions/?page=3&page_size=100',
'previous': 'https://galaxy.server.com/api/v2/collections/namespace/collection/versions',
'results': [
{
'version': '1.0.2',
'href': 'https://galaxy.server.com/api/v2/collections/namespace/collection/versions/1.0.2',
},
{
'version': '1.0.3',
'href': 'https://galaxy.server.com/api/v2/collections/namespace/collection/versions/1.0.3',
},
],
},
{
'count': 6,
'next': None,
'previous': 'https://galaxy.server.com/api/v2/collections/namespace/collection/versions/?page=2&page_size=100',
'results': [
{
'version': '1.0.4',
'href': 'https://galaxy.server.com/api/v2/collections/namespace/collection/versions/1.0.4',
},
{
'version': '1.0.5',
'href': 'https://galaxy.server.com/api/v2/collections/namespace/collection/versions/1.0.5',
},
],
},
]),
('v3', 'Bearer', KeycloakToken(auth_url='https://api.test/'), [
{
'count': 6,
'links': {
# v3 links are relative and the limit is included during pagination
'next': '/api/v3/collections/namespace/collection/versions/?limit=100&offset=100',
'previous': None,
},
'data': [
{
'version': '1.0.0',
'href': '/api/v3/collections/namespace/collection/versions/1.0.0',
},
{
'version': '1.0.1',
'href': '/api/v3/collections/namespace/collection/versions/1.0.1',
},
],
},
{
'count': 6,
'links': {
'next': '/api/v3/collections/namespace/collection/versions/?limit=100&offset=200',
'previous': '/api/v3/collections/namespace/collection/versions',
},
'data': [
{
'version': '1.0.2',
'href': '/api/v3/collections/namespace/collection/versions/1.0.2',
},
{
'version': '1.0.3',
'href': '/api/v3/collections/namespace/collection/versions/1.0.3',
},
],
},
{
'count': 6,
'links': {
'next': None,
'previous': '/api/v3/collections/namespace/collection/versions/?limit=100&offset=100',
},
'data': [
{
'version': '1.0.4',
'href': '/api/v3/collections/namespace/collection/versions/1.0.4',
},
{
'version': '1.0.5',
'href': '/api/v3/collections/namespace/collection/versions/1.0.5',
},
],
},
]),
])
def test_get_collection_versions_pagination(api_version, token_type, token_ins, responses, monkeypatch):
api = get_test_galaxy_api('https://galaxy.server.com/api/', api_version, token_ins=token_ins)
if token_ins:
mock_token_get = MagicMock()
mock_token_get.return_value = 'my token'
monkeypatch.setattr(token_ins, 'get', mock_token_get)
mock_open = MagicMock()
mock_open.side_effect = [StringIO(to_text(json.dumps(r))) for r in responses]
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
actual = api.get_collection_versions('namespace', 'collection')
assert actual == [u'1.0.0', u'1.0.1', u'1.0.2', u'1.0.3', u'1.0.4', u'1.0.5']
assert mock_open.call_count == 3
if api_version == 'v3':
query_1 = 'limit=100'
query_2 = 'limit=100&offset=100'
query_3 = 'limit=100&offset=200'
else:
query_1 = 'page_size=100'
query_2 = 'page=2&page_size=100'
query_3 = 'page=3&page_size=100'
assert mock_open.mock_calls[0][1][0] == 'https://galaxy.server.com/api/%s/collections/namespace/collection/' \
'versions/?%s' % (api_version, query_1)
assert mock_open.mock_calls[1][1][0] == 'https://galaxy.server.com/api/%s/collections/namespace/collection/' \
'versions/?%s' % (api_version, query_2)
assert mock_open.mock_calls[2][1][0] == 'https://galaxy.server.com/api/%s/collections/namespace/collection/' \
'versions/?%s' % (api_version, query_3)
if token_type:
assert mock_open.mock_calls[0][2]['headers']['Authorization'] == '%s my token' % token_type
assert mock_open.mock_calls[1][2]['headers']['Authorization'] == '%s my token' % token_type
assert mock_open.mock_calls[2][2]['headers']['Authorization'] == '%s my token' % token_type
@pytest.mark.parametrize('responses', [
[
{
'count': 2,
'results': [{'name': '3.5.1', }, {'name': '3.5.2'}],
'next_link': None,
'next': None,
'previous_link': None,
'previous': None
},
],
[
{
'count': 2,
'results': [{'name': '3.5.1'}],
'next_link': '/api/v1/roles/432/versions/?page=2&page_size=50',
'next': '/roles/432/versions/?page=2&page_size=50',
'previous_link': None,
'previous': None
},
{
'count': 2,
'results': [{'name': '3.5.2'}],
'next_link': None,
'next': None,
'previous_link': '/api/v1/roles/432/versions/?&page_size=50',
'previous': '/roles/432/versions/?page_size=50',
},
]
])
def test_get_role_versions_pagination(monkeypatch, responses):
api = get_test_galaxy_api('https://galaxy.com/api/', 'v1')
mock_open = MagicMock()
mock_open.side_effect = [StringIO(to_text(json.dumps(r))) for r in responses]
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
actual = api.fetch_role_related('versions', 432)
assert actual == [{'name': '3.5.1'}, {'name': '3.5.2'}]
assert mock_open.call_count == len(responses)
assert mock_open.mock_calls[0][1][0] == 'https://galaxy.com/api/v1/roles/432/versions/?page_size=50'
if len(responses) == 2:
assert mock_open.mock_calls[1][1][0] == 'https://galaxy.com/api/v1/roles/432/versions/?page=2&page_size=50'
def test_missing_cache_dir(cache_dir):
os.rmdir(cache_dir)
GalaxyAPI(None, "test", 'https://galaxy.ansible.com/', no_cache=False)
assert os.path.isdir(cache_dir)
assert stat.S_IMODE(os.stat(cache_dir).st_mode) == 0o700
cache_file = os.path.join(cache_dir, 'api.json')
with open(cache_file) as fd:
actual_cache = fd.read()
assert actual_cache == '{"version": 1}'
assert stat.S_IMODE(os.stat(cache_file).st_mode) == 0o600
def test_existing_cache(cache_dir):
cache_file = os.path.join(cache_dir, 'api.json')
cache_file_contents = '{"version": 1, "test": "json"}'
with open(cache_file, mode='w') as fd:
fd.write(cache_file_contents)
os.chmod(cache_file, 0o655)
GalaxyAPI(None, "test", 'https://galaxy.ansible.com/', no_cache=False)
assert os.path.isdir(cache_dir)
with open(cache_file) as fd:
actual_cache = fd.read()
assert actual_cache == cache_file_contents
assert stat.S_IMODE(os.stat(cache_file).st_mode) == 0o655
@pytest.mark.parametrize('content', [
'',
'value',
'{"de" "finit" "ely" [\'invalid"]}',
'[]',
'{"version": 2, "test": "json"}',
'{"version": 2, "key": "ÅÑŚÌβŁÈ"}',
])
def test_cache_invalid_cache_content(content, cache_dir):
cache_file = os.path.join(cache_dir, 'api.json')
with open(cache_file, mode='w') as fd:
fd.write(content)
os.chmod(cache_file, 0o664)
GalaxyAPI(None, "test", 'https://galaxy.ansible.com/', no_cache=False)
with open(cache_file) as fd:
actual_cache = fd.read()
assert actual_cache == '{"version": 1}'
assert stat.S_IMODE(os.stat(cache_file).st_mode) == 0o664
def test_cache_complete_pagination(cache_dir, monkeypatch):
responses = get_collection_versions()
cache_file = os.path.join(cache_dir, 'api.json')
api = get_test_galaxy_api('https://galaxy.server.com/api/', 'v2', no_cache=False)
mock_open = MagicMock(
side_effect=[
StringIO(to_text(json.dumps(r)))
for r in responses
]
)
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
actual_versions = api.get_collection_versions('namespace', 'collection')
assert actual_versions == [u'1.0.0', u'1.0.1', u'1.0.2', u'1.0.3', u'1.0.4', u'1.0.5']
with open(cache_file) as fd:
final_cache = json.loads(fd.read())
cached_server = final_cache['galaxy.server.com:']
cached_collection = cached_server['/api/v2/collections/namespace/collection/versions/']
cached_versions = [r['version'] for r in cached_collection['results']]
assert final_cache == api._cache
assert cached_versions == actual_versions
def test_cache_complete_pagination_v3(cache_dir, monkeypatch):
responses = get_v3_collection_versions()
cache_file = os.path.join(cache_dir, 'api.json')
api = get_test_galaxy_api('https://galaxy.server.com/api/', 'v3', no_cache=False)
mock_open = MagicMock(
side_effect=[
StringIO(to_text(json.dumps(r)))
for r in responses
]
)
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
actual_versions = api.get_collection_versions('namespace', 'collection')
assert actual_versions == [u'1.0.0', u'1.0.1', u'1.0.2', u'1.0.3', u'1.0.4', u'1.0.5']
with open(cache_file) as fd:
final_cache = json.loads(fd.read())
cached_server = final_cache['galaxy.server.com:']
cached_collection = cached_server['/api/v3/collections/namespace/collection/versions/']
cached_versions = [r['version'] for r in cached_collection['results']]
assert final_cache == api._cache
assert cached_versions == actual_versions
def test_cache_flaky_pagination(cache_dir, monkeypatch):
responses = get_collection_versions()
cache_file = os.path.join(cache_dir, 'api.json')
api = get_test_galaxy_api('https://galaxy.server.com/api/', 'v2', no_cache=False)
# First attempt, fail midway through
mock_open = MagicMock(
side_effect=[
StringIO(to_text(json.dumps(responses[0]))),
StringIO(to_text(json.dumps(responses[1]))),
urllib.error.HTTPError(responses[1]['next'], 500, 'Error', {}, StringIO()),
StringIO(to_text(json.dumps(responses[3]))),
]
)
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
expected = (
r'Error when getting available collection versions for namespace\.collection '
r'from test \(https://galaxy\.server\.com/api/\) '
r'\(HTTP Code: 500, Message: Error Code: Unknown\)'
)
with pytest.raises(GalaxyError, match=expected):
api.get_collection_versions('namespace', 'collection')
with open(cache_file) as fd:
final_cache = json.loads(fd.read())
assert final_cache == {
'version': 1,
'galaxy.server.com:': {
'modified': {
'namespace.collection': responses[0]['modified']
}
}
}
# Reset API
api = get_test_galaxy_api('https://galaxy.server.com/api/', 'v2', no_cache=False)
# Second attempt is successful so cache should be populated
mock_open = MagicMock(
side_effect=[
StringIO(to_text(json.dumps(r)))
for r in responses
]
)
monkeypatch.setattr(galaxy_api, 'open_url', mock_open)
actual_versions = api.get_collection_versions('namespace', 'collection')
assert actual_versions == [u'1.0.0', u'1.0.1', u'1.0.2', u'1.0.3', u'1.0.4', u'1.0.5']
with open(cache_file) as fd:
final_cache = json.loads(fd.read())
cached_server = final_cache['galaxy.server.com:']
cached_collection = cached_server['/api/v2/collections/namespace/collection/versions/']
cached_versions = [r['version'] for r in cached_collection['results']]
assert cached_versions == actual_versions
def test_world_writable_cache(cache_dir, monkeypatch):
mock_warning = MagicMock()
monkeypatch.setattr(Display, 'warning', mock_warning)
cache_file = os.path.join(cache_dir, 'api.json')
with open(cache_file, mode='w') as fd:
fd.write('{"version": 2}')
os.chmod(cache_file, 0o666)
api = GalaxyAPI(None, "test", 'https://galaxy.ansible.com/', no_cache=False)
assert api._cache is None
with open(cache_file) as fd:
actual_cache = fd.read()
assert actual_cache == '{"version": 2}'
assert stat.S_IMODE(os.stat(cache_file).st_mode) == 0o666
assert mock_warning.call_count == 1
assert mock_warning.call_args[0][0] == \
'Galaxy cache has world writable access (%s), ignoring it as a cache source.' % cache_file
def test_no_cache(cache_dir):
cache_file = os.path.join(cache_dir, 'api.json')
with open(cache_file, mode='w') as fd:
fd.write('random')
api = GalaxyAPI(None, "test", 'https://galaxy.ansible.com/')
assert api._cache is None
with open(cache_file) as fd:
actual_cache = fd.read()
assert actual_cache == 'random'
def test_clear_cache_with_no_cache(cache_dir):
cache_file = os.path.join(cache_dir, 'api.json')
with open(cache_file, mode='w') as fd:
fd.write('{"version": 1, "key": "value"}')
GalaxyAPI(None, "test", 'https://galaxy.ansible.com/', clear_response_cache=True)
assert not os.path.exists(cache_file)
def test_clear_cache(cache_dir):
cache_file = os.path.join(cache_dir, 'api.json')
with open(cache_file, mode='w') as fd:
fd.write('{"version": 1, "key": "value"}')
GalaxyAPI(None, "test", 'https://galaxy.ansible.com/', clear_response_cache=True, no_cache=False)
with open(cache_file) as fd:
actual_cache = fd.read()
assert actual_cache == '{"version": 1}'
assert stat.S_IMODE(os.stat(cache_file).st_mode) == 0o600
@pytest.mark.parametrize(['url', 'expected'], [
('http://hostname/path', 'hostname:'),
('http://hostname:80/path', 'hostname:80'),
('https://testing.com:invalid', 'testing.com:'),
('https://testing.com:1234', 'testing.com:1234'),
('https://username:password@testing.com/path', 'testing.com:'),
('https://username:password@testing.com:443/path', 'testing.com:443'),
])
def test_cache_id(url, expected):
actual = galaxy_api.get_cache_id(url)
assert actual == expected
| 53,226
|
Python
|
.py
| 1,100
| 40.149091
| 140
| 0.617404
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,757
|
test_role_requirements.py
|
ansible_ansible/test/units/galaxy/test_role_requirements.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2020, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.playbook.role.requirement import RoleRequirement
def test_null_role_url():
role = RoleRequirement.role_yaml_parse('')
assert role['src'] == ''
assert role['name'] == ''
assert role['scm'] is None
assert role['version'] is None
def test_git_file_role_url():
role = RoleRequirement.role_yaml_parse('git+file:///home/bennojoy/nginx')
assert role['src'] == 'file:///home/bennojoy/nginx'
assert role['name'] == 'nginx'
assert role['scm'] == 'git'
assert role['version'] is None
def test_https_role_url():
role = RoleRequirement.role_yaml_parse('https://github.com/bennojoy/nginx')
assert role['src'] == 'https://github.com/bennojoy/nginx'
assert role['name'] == 'nginx'
assert role['scm'] is None
assert role['version'] is None
def test_git_https_role_url():
role = RoleRequirement.role_yaml_parse('git+https://github.com/geerlingguy/ansible-role-composer.git')
assert role['src'] == 'https://github.com/geerlingguy/ansible-role-composer.git'
assert role['name'] == 'ansible-role-composer'
assert role['scm'] == 'git'
assert role['version'] is None
def test_git_version_role_url():
role = RoleRequirement.role_yaml_parse('git+https://github.com/geerlingguy/ansible-role-composer.git,main')
assert role['src'] == 'https://github.com/geerlingguy/ansible-role-composer.git'
assert role['name'] == 'ansible-role-composer'
assert role['scm'] == 'git'
assert role['version'] == 'main'
@pytest.mark.parametrize("url", [
('https://some.webserver.example.com/files/main.tar.gz'),
('https://some.webserver.example.com/files/main.tar.bz2'),
('https://some.webserver.example.com/files/main.tar.xz'),
])
def test_tar_role_url(url):
role = RoleRequirement.role_yaml_parse(url)
assert role['src'] == url
assert role['name'].startswith('main')
assert role['scm'] is None
assert role['version'] is None
def test_git_ssh_role_url():
role = RoleRequirement.role_yaml_parse('git@gitlab.company.com:mygroup/ansible-base.git')
assert role['src'] == 'git@gitlab.company.com:mygroup/ansible-base.git'
assert role['name'].startswith('ansible-base')
assert role['scm'] is None
assert role['version'] is None
def test_token_role_url():
role = RoleRequirement.role_yaml_parse('git+https://gitlab+deploy-token-312644:_aJQ9c3HWzmRR4knBNyx@gitlab.com/akasurde/ansible-demo')
assert role['src'] == 'https://gitlab+deploy-token-312644:_aJQ9c3HWzmRR4knBNyx@gitlab.com/akasurde/ansible-demo'
assert role['name'].startswith('ansible-demo')
assert role['scm'] == 'git'
assert role['version'] is None
def test_token_new_style_role_url():
role = RoleRequirement.role_yaml_parse({"src": "git+https://gitlab+deploy-token-312644:_aJQ9c3HWzmRR4knBNyx@gitlab.com/akasurde/ansible-demo"})
assert role['src'] == 'https://gitlab+deploy-token-312644:_aJQ9c3HWzmRR4knBNyx@gitlab.com/akasurde/ansible-demo'
assert role['name'].startswith('ansible-demo')
assert role['scm'] == 'git'
assert role['version'] == ''
| 3,287
|
Python
|
.py
| 65
| 46.292308
| 147
| 0.701031
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,758
|
test_collection.py
|
ansible_ansible/test/units/galaxy/test_collection.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import json
import os
import pytest
import re
import tarfile
import tempfile
import uuid
from hashlib import sha256
from io import BytesIO
from unittest.mock import MagicMock, mock_open, patch
import ansible.constants as C
from ansible import context
from ansible.cli.galaxy import GalaxyCLI
from ansible.config import manager
from ansible.errors import AnsibleError
from ansible.galaxy import api, collection, token
from ansible.module_utils.common.sentinel import Sentinel
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils.common.file import S_IRWU_RG_RO
import builtins
from ansible.utils import context_objects as co
from ansible.utils.display import Display
from ansible.utils.hashing import secure_hash_s
@pytest.fixture(autouse=True)
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
yield
co.GlobalCLIArgs._Singleton__instance = None
@pytest.fixture
def collection_path_suffix(request):
"""Return test collection path suffix or the default."""
return getattr(request, 'param', 'test-ÅÑŚÌβŁÈ Collections Input')
@pytest.fixture
def collection_input(tmp_path_factory, collection_path_suffix):
"""Create a collection skeleton directory for build tests."""
test_dir = to_text(tmp_path_factory.mktemp(collection_path_suffix))
namespace = 'ansible_namespace'
collection = 'collection'
skeleton = os.path.join(os.path.dirname(os.path.split(__file__)[0]), 'cli', 'test_data', 'collection_skeleton')
galaxy_args = ['ansible-galaxy', 'collection', 'init', '%s.%s' % (namespace, collection),
'-c', '--init-path', test_dir, '--collection-skeleton', skeleton]
GalaxyCLI(args=galaxy_args).run()
collection_dir = os.path.join(test_dir, namespace, collection)
output_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Output'))
return collection_dir, output_dir
@pytest.fixture()
def collection_artifact(monkeypatch, tmp_path_factory):
""" Creates a temp collection artifact and mocked open_url instance for publishing tests """
mock_open = MagicMock()
monkeypatch.setattr(collection.concrete_artifact_manager, 'open_url', mock_open)
mock_uuid = MagicMock()
mock_uuid.return_value.hex = 'uuid'
monkeypatch.setattr(uuid, 'uuid4', mock_uuid)
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
input_file = to_text(tmp_path / 'collection.tar.gz')
with tarfile.open(input_file, 'w:gz') as tfile:
b_io = BytesIO(b"\x00\x01\x02\x03")
tar_info = tarfile.TarInfo('test')
tar_info.size = 4
tar_info.mode = S_IRWU_RG_RO
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
return input_file, mock_open
@pytest.fixture()
def galaxy_yml_dir(request, tmp_path_factory):
b_test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
b_galaxy_yml = os.path.join(b_test_dir, b'galaxy.yml')
with open(b_galaxy_yml, 'wb') as galaxy_obj:
galaxy_obj.write(to_bytes(request.param))
yield b_test_dir
@pytest.fixture()
def tmp_tarfile(tmp_path_factory, manifest_info):
""" Creates a temporary tar file for _extract_tar_file tests """
filename = u'ÅÑŚÌβŁÈ'
temp_dir = to_bytes(tmp_path_factory.mktemp('test-%s Collections' % to_native(filename)))
tar_file = os.path.join(temp_dir, to_bytes('%s.tar.gz' % filename))
data = os.urandom(8)
with tarfile.open(tar_file, 'w:gz') as tfile:
b_io = BytesIO(data)
tar_info = tarfile.TarInfo(filename)
tar_info.size = len(data)
tar_info.mode = S_IRWU_RG_RO
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
b_data = to_bytes(json.dumps(manifest_info, indent=True), errors='surrogate_or_strict')
b_io = BytesIO(b_data)
tar_info = tarfile.TarInfo('MANIFEST.json')
tar_info.size = len(b_data)
tar_info.mode = S_IRWU_RG_RO
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
sha256_hash = sha256()
sha256_hash.update(data)
with tarfile.open(tar_file, 'r') as tfile:
yield temp_dir, tfile, filename, sha256_hash.hexdigest()
@pytest.fixture()
def galaxy_server():
context.CLIARGS._store = {'ignore_certs': False}
galaxy_api = api.GalaxyAPI(None, 'test_server', 'https://galaxy.ansible.com',
token=token.GalaxyToken(token='key'))
return galaxy_api
@pytest.fixture()
def manifest_template():
def get_manifest_info(namespace='ansible_namespace', name='collection', version='0.1.0'):
return {
"collection_info": {
"namespace": namespace,
"name": name,
"version": version,
"authors": [
"shertel"
],
"readme": "README.md",
"tags": [
"test",
"collection"
],
"description": "Test",
"license": [
"MIT"
],
"license_file": None,
"dependencies": {},
"repository": "https://github.com/{0}/{1}".format(namespace, name),
"documentation": None,
"homepage": None,
"issues": None
},
"file_manifest_file": {
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "files_manifest_checksum",
"format": 1
},
"format": 1
}
return get_manifest_info
@pytest.fixture()
def manifest_info(manifest_template):
return manifest_template()
@pytest.fixture()
def manifest(manifest_info):
b_data = to_bytes(json.dumps(manifest_info))
with patch.object(builtins, 'open', mock_open(read_data=b_data)) as m:
with open('MANIFEST.json', mode='rb') as fake_file:
yield fake_file, sha256(b_data).hexdigest()
@pytest.mark.parametrize(
'required_signature_count,valid',
[
("1", True),
("+1", True),
("all", True),
("+all", True),
("-1", False),
("invalid", False),
("1.5", False),
("+", False),
]
)
def test_cli_options(required_signature_count, valid, monkeypatch):
cli_args = [
'ansible-galaxy',
'collection',
'install',
'namespace.collection:1.0.0',
'--keyring',
'~/.ansible/pubring.kbx',
'--required-valid-signature-count',
required_signature_count
]
galaxy_cli = GalaxyCLI(args=cli_args)
mock_execute_install = MagicMock()
monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install)
if valid:
galaxy_cli.run()
else:
with pytest.raises(SystemExit, match='2') as error:
galaxy_cli.run()
@pytest.mark.parametrize(
"config,server",
[
(
# Options to create ini config
{
'url': 'https://galaxy.ansible.com',
'validate_certs': 'False',
},
# Expected server attributes
{
'validate_certs': False,
},
),
(
{
'url': 'https://galaxy.ansible.com',
'validate_certs': 'True',
},
{
'validate_certs': True,
},
),
],
)
def test_bool_type_server_config_options(config, server, monkeypatch):
cli_args = [
'ansible-galaxy',
'collection',
'install',
'namespace.collection:1.0.0',
]
config_lines = [
"[galaxy]",
"server_list=server1\n",
"[galaxy_server.server1]",
"url=%s" % config['url'],
"validate_certs=%s\n" % config['validate_certs'],
]
with tempfile.NamedTemporaryFile(suffix='.cfg') as tmp_file:
tmp_file.write(
to_bytes('\n'.join(config_lines))
)
tmp_file.flush()
with patch.object(C, 'GALAXY_SERVER_LIST', ['server1']):
with patch.object(C.config, '_config_file', tmp_file.name):
C.config._parse_config_file()
galaxy_cli = GalaxyCLI(args=cli_args)
mock_execute_install = MagicMock()
monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install)
galaxy_cli.run()
assert galaxy_cli.api_servers[0].name == 'server1'
assert galaxy_cli.api_servers[0].validate_certs == server['validate_certs']
@pytest.mark.parametrize('global_ignore_certs', [True, False])
def test_validate_certs(global_ignore_certs, monkeypatch):
cli_args = [
'ansible-galaxy',
'collection',
'install',
'namespace.collection:1.0.0',
]
if global_ignore_certs:
cli_args.append('--ignore-certs')
galaxy_cli = GalaxyCLI(args=cli_args)
mock_execute_install = MagicMock()
monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install)
galaxy_cli.run()
assert len(galaxy_cli.api_servers) == 1
assert galaxy_cli.api_servers[0].validate_certs is not global_ignore_certs
@pytest.mark.parametrize(
["ignore_certs_cli", "ignore_certs_cfg", "expected_validate_certs"],
[
(None, None, True),
(None, True, False),
(None, False, True),
(True, None, False),
(True, True, False),
(True, False, False),
]
)
def test_validate_certs_with_server_url(ignore_certs_cli, ignore_certs_cfg, expected_validate_certs, monkeypatch):
cli_args = [
'ansible-galaxy',
'collection',
'install',
'namespace.collection:1.0.0',
'-s',
'https://galaxy.ansible.com'
]
if ignore_certs_cli:
cli_args.append('--ignore-certs')
if ignore_certs_cfg is not None:
monkeypatch.setattr(C, 'GALAXY_IGNORE_CERTS', ignore_certs_cfg)
galaxy_cli = GalaxyCLI(args=cli_args)
mock_execute_install = MagicMock()
monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install)
galaxy_cli.run()
assert len(galaxy_cli.api_servers) == 1
assert galaxy_cli.api_servers[0].validate_certs == expected_validate_certs
@pytest.mark.parametrize(
["ignore_certs_cli", "ignore_certs_cfg", "expected_server2_validate_certs", "expected_server3_validate_certs"],
[
(None, None, True, True),
(None, True, True, False),
(None, False, True, True),
(True, None, False, False),
(True, True, False, False),
(True, False, False, False),
]
)
def test_validate_certs_server_config(ignore_certs_cfg, ignore_certs_cli, expected_server2_validate_certs, expected_server3_validate_certs, monkeypatch):
server_names = ['server1', 'server2', 'server3']
cfg_lines = [
"[galaxy]",
"server_list=server1,server2,server3",
"[galaxy_server.server1]",
"url=https://galaxy.ansible.com/api/",
"validate_certs=False",
"[galaxy_server.server2]",
"url=https://galaxy.ansible.com/api/",
"validate_certs=True",
"[galaxy_server.server3]",
"url=https://galaxy.ansible.com/api/",
]
cli_args = [
'ansible-galaxy',
'collection',
'install',
'namespace.collection:1.0.0',
]
if ignore_certs_cli:
cli_args.append('--ignore-certs')
if ignore_certs_cfg is not None:
monkeypatch.setattr(C, 'GALAXY_IGNORE_CERTS', ignore_certs_cfg)
monkeypatch.setattr(C, 'GALAXY_SERVER_LIST', server_names)
with tempfile.NamedTemporaryFile(suffix='.cfg') as tmp_file:
tmp_file.write(to_bytes('\n'.join(cfg_lines), errors='surrogate_or_strict'))
tmp_file.flush()
monkeypatch.setattr(C.config, '_config_file', tmp_file.name)
C.config._parse_config_file()
galaxy_cli = GalaxyCLI(args=cli_args)
mock_execute_install = MagicMock()
monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install)
galaxy_cli.run()
# (not) --ignore-certs > server's validate_certs > (not) GALAXY_IGNORE_CERTS > True
assert galaxy_cli.api_servers[0].validate_certs is False
assert galaxy_cli.api_servers[1].validate_certs is expected_server2_validate_certs
assert galaxy_cli.api_servers[2].validate_certs is expected_server3_validate_certs
@pytest.mark.parametrize(
["timeout_cli", "timeout_cfg", "timeout_fallback", "expected_timeout"],
[
(None, None, None, 60),
(None, None, 10, 10),
(None, 20, 10, 20),
(30, 20, 10, 30),
]
)
def test_timeout_server_config(timeout_cli, timeout_cfg, timeout_fallback, expected_timeout, monkeypatch):
cli_args = [
'ansible-galaxy',
'collection',
'install',
'namespace.collection:1.0.0',
]
if timeout_cli is not None:
cli_args.extend(["--timeout", f"{timeout_cli}"])
cfg_lines = ["[galaxy]", "server_list=server1"]
if timeout_fallback is not None:
cfg_lines.append(f"server_timeout={timeout_fallback}")
# fix default in server config since C.GALAXY_SERVER_TIMEOUT was already evaluated
server_additional = manager.GALAXY_SERVER_ADDITIONAL.copy()
server_additional['timeout']['default'] = timeout_fallback
monkeypatch.setattr(manager, 'GALAXY_SERVER_ADDITIONAL', server_additional)
cfg_lines.extend(["[galaxy_server.server1]", "url=https://galaxy.ansible.com/api/"])
if timeout_cfg is not None:
cfg_lines.append(f"timeout={timeout_cfg}")
monkeypatch.setattr(C, 'GALAXY_SERVER_LIST', ['server1'])
with tempfile.NamedTemporaryFile(suffix='.cfg') as tmp_file:
tmp_file.write(to_bytes('\n'.join(cfg_lines), errors='surrogate_or_strict'))
tmp_file.flush()
monkeypatch.setattr(C.config, '_config_file', tmp_file.name)
C.config._parse_config_file()
galaxy_cli = GalaxyCLI(args=cli_args)
mock_execute_install = MagicMock()
monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install)
galaxy_cli.run()
assert galaxy_cli.api_servers[0].timeout == expected_timeout
def test_build_collection_no_galaxy_yaml():
fake_path = u'/fake/ÅÑŚÌβŁÈ/path'
expected = to_native("The collection galaxy.yml path '%s/galaxy.yml' does not exist." % fake_path)
with pytest.raises(AnsibleError, match=expected):
collection.build_collection(fake_path, u'output', False)
def test_build_existing_output_file(collection_input):
input_dir, output_dir = collection_input
existing_output_dir = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
os.makedirs(existing_output_dir)
expected = "The output collection artifact '%s' already exists, but is a directory - aborting" \
% to_native(existing_output_dir)
with pytest.raises(AnsibleError, match=expected):
collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), False)
def test_build_existing_output_without_force(collection_input):
input_dir, output_dir = collection_input
existing_output = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
with open(existing_output, 'w+') as out_file:
out_file.write("random garbage")
out_file.flush()
expected = "The file '%s' already exists. You can use --force to re-create the collection artifact." \
% to_native(existing_output)
with pytest.raises(AnsibleError, match=expected):
collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), False)
@pytest.mark.parametrize(
'collection_path_suffix',
(
'test-ÅÑŚÌβŁÈ Collections Input 1 with_slash/',
'test-ÅÑŚÌβŁÈ Collections Input 2 no slash',
),
indirect=('collection_path_suffix', ),
)
def test_build_existing_output_with_force(collection_input):
input_dir, output_dir = collection_input
existing_output = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
with open(existing_output, 'w+') as out_file:
out_file.write("random garbage")
out_file.flush()
collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), True)
# Verify the file was replaced with an actual tar file
assert tarfile.is_tarfile(existing_output)
def test_build_with_existing_files_and_manifest(collection_input):
input_dir, output_dir = collection_input
with open(os.path.join(input_dir, 'MANIFEST.json'), "wb") as fd:
fd.write(b'{"collection_info": {"version": "6.6.6"}, "version": 1}')
with open(os.path.join(input_dir, 'FILES.json'), "wb") as fd:
fd.write(b'{"files": [], "format": 1}')
with open(os.path.join(input_dir, "plugins", "MANIFEST.json"), "wb") as fd:
fd.write(b"test data that should be in build")
collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), False)
output_artifact = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
assert tarfile.is_tarfile(output_artifact)
with tarfile.open(output_artifact, mode='r') as actual:
members = actual.getmembers()
manifest_file = [m for m in members if m.path == "MANIFEST.json"][0]
manifest_file_obj = actual.extractfile(manifest_file.name)
manifest_file_text = manifest_file_obj.read()
manifest_file_obj.close()
assert manifest_file_text != b'{"collection_info": {"version": "6.6.6"}, "version": 1}'
json_file = [m for m in members if m.path == "MANIFEST.json"][0]
json_file_obj = actual.extractfile(json_file.name)
json_file_text = json_file_obj.read()
json_file_obj.close()
assert json_file_text != b'{"files": [], "format": 1}'
sub_manifest_file = [m for m in members if m.path == "plugins/MANIFEST.json"][0]
sub_manifest_file_obj = actual.extractfile(sub_manifest_file.name)
sub_manifest_file_text = sub_manifest_file_obj.read()
sub_manifest_file_obj.close()
assert sub_manifest_file_text == b"test data that should be in build"
@pytest.mark.parametrize('galaxy_yml_dir', [b'namespace: value: broken'], indirect=True)
def test_invalid_yaml_galaxy_file(galaxy_yml_dir):
galaxy_file = os.path.join(galaxy_yml_dir, b'galaxy.yml')
expected = to_native(b"Failed to parse the galaxy.yml at '%s' with the following error:" % galaxy_file)
with pytest.raises(AnsibleError, match=expected):
collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
@pytest.mark.parametrize('galaxy_yml_dir', [b'namespace: test_namespace'], indirect=True)
def test_missing_required_galaxy_key(galaxy_yml_dir):
galaxy_file = os.path.join(galaxy_yml_dir, b'galaxy.yml')
expected = "The collection galaxy.yml at '%s' is missing the following mandatory keys: authors, name, " \
"readme, version" % to_native(galaxy_file)
with pytest.raises(AnsibleError, match=expected):
collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
@pytest.mark.parametrize('galaxy_yml_dir', [b'namespace: test_namespace'], indirect=True)
def test_galaxy_yaml_no_mandatory_keys(galaxy_yml_dir):
expected = "The collection galaxy.yml at '%s/galaxy.yml' is missing the " \
"following mandatory keys: authors, name, readme, version" % to_native(galaxy_yml_dir)
with pytest.raises(ValueError, match=expected):
assert collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir, require_build_metadata=False) == expected
@pytest.mark.parametrize('galaxy_yml_dir', [b'My life story is so very interesting'], indirect=True)
def test_galaxy_yaml_no_mandatory_keys_bad_yaml(galaxy_yml_dir):
expected = "The collection galaxy.yml at '%s/galaxy.yml' is incorrectly formatted." % to_native(galaxy_yml_dir)
with pytest.raises(AnsibleError, match=expected):
collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
@pytest.mark.parametrize('galaxy_yml_dir', [b"""
namespace: namespace
name: collection
authors: Jordan
version: 0.1.0
readme: README.md
invalid: value"""], indirect=True)
def test_warning_extra_keys(galaxy_yml_dir, monkeypatch):
display_mock = MagicMock()
monkeypatch.setattr(Display, 'warning', display_mock)
collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
assert display_mock.call_count == 1
assert display_mock.call_args[0][0] == "Found unknown keys in collection galaxy.yml at '%s/galaxy.yml': invalid"\
% to_text(galaxy_yml_dir)
@pytest.mark.parametrize('galaxy_yml_dir', [b"""
namespace: namespace
name: collection
authors: Jordan
version: 0.1.0
readme: README.md"""], indirect=True)
def test_defaults_galaxy_yml(galaxy_yml_dir):
actual = collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
assert actual['namespace'] == 'namespace'
assert actual['name'] == 'collection'
assert actual['authors'] == ['Jordan']
assert actual['version'] == '0.1.0'
assert actual['readme'] == 'README.md'
assert actual['description'] is None
assert actual['repository'] is None
assert actual['documentation'] is None
assert actual['homepage'] is None
assert actual['issues'] is None
assert actual['tags'] == []
assert actual['dependencies'] == {}
assert actual['license'] == []
@pytest.mark.parametrize('galaxy_yml_dir', [(b"""
namespace: namespace
name: collection
authors: Jordan
version: 0.1.0
readme: README.md
license: MIT"""), (b"""
namespace: namespace
name: collection
authors: Jordan
version: 0.1.0
readme: README.md
license:
- MIT""")], indirect=True)
def test_galaxy_yml_list_value(galaxy_yml_dir):
actual = collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
assert actual['license'] == ['MIT']
def test_build_ignore_files_and_folders(collection_input, monkeypatch):
input_dir = collection_input[0]
mock_display = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_display)
git_folder = os.path.join(input_dir, '.git')
retry_file = os.path.join(input_dir, 'ansible.retry')
tests_folder = os.path.join(input_dir, 'tests', 'output')
tests_output_file = os.path.join(tests_folder, 'result.txt')
os.makedirs(git_folder)
os.makedirs(tests_folder)
with open(retry_file, 'w+') as ignore_file:
ignore_file.write('random')
ignore_file.flush()
with open(tests_output_file, 'w+') as tests_file:
tests_file.write('random')
tests_file.flush()
actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel, None)
assert actual['format'] == 1
for manifest_entry in actual['files']:
assert manifest_entry['name'] not in ['.git', 'ansible.retry', 'galaxy.yml', 'tests/output', 'tests/output/result.txt']
expected_msgs = [
"Skipping '%s/galaxy.yml' for collection build" % to_text(input_dir),
"Skipping '%s' for collection build" % to_text(retry_file),
"Skipping '%s' for collection build" % to_text(git_folder),
"Skipping '%s' for collection build" % to_text(tests_folder),
]
assert mock_display.call_count == 4
assert mock_display.mock_calls[0][1][0] in expected_msgs
assert mock_display.mock_calls[1][1][0] in expected_msgs
assert mock_display.mock_calls[2][1][0] in expected_msgs
assert mock_display.mock_calls[3][1][0] in expected_msgs
def test_build_ignore_older_release_in_root(collection_input, monkeypatch):
input_dir = collection_input[0]
mock_display = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_display)
# This is expected to be ignored because it is in the root collection dir.
release_file = os.path.join(input_dir, 'namespace-collection-0.0.0.tar.gz')
# This is not expected to be ignored because it is not in the root collection dir.
fake_release_file = os.path.join(input_dir, 'plugins', 'namespace-collection-0.0.0.tar.gz')
for filename in [release_file, fake_release_file]:
with open(filename, 'w+') as file_obj:
file_obj.write('random')
file_obj.flush()
actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel, None)
assert actual['format'] == 1
plugin_release_found = False
for manifest_entry in actual['files']:
assert manifest_entry['name'] != 'namespace-collection-0.0.0.tar.gz'
if manifest_entry['name'] == 'plugins/namespace-collection-0.0.0.tar.gz':
plugin_release_found = True
assert plugin_release_found
expected_msgs = [
"Skipping '%s/galaxy.yml' for collection build" % to_text(input_dir),
"Skipping '%s' for collection build" % to_text(release_file)
]
assert mock_display.call_count == 2
assert mock_display.mock_calls[0][1][0] in expected_msgs
assert mock_display.mock_calls[1][1][0] in expected_msgs
def test_build_ignore_patterns(collection_input, monkeypatch):
input_dir = collection_input[0]
mock_display = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_display)
actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection',
['*.md', 'plugins/action', 'playbooks/*.j2'],
Sentinel, None)
assert actual['format'] == 1
expected_missing = [
'README.md',
'docs/My Collection.md',
'plugins/action',
'playbooks/templates/test.conf.j2',
'playbooks/templates/subfolder/test.conf.j2',
]
# Files or dirs that are close to a match but are not, make sure they are present
expected_present = [
'docs',
'roles/common/templates/test.conf.j2',
'roles/common/templates/subfolder/test.conf.j2',
]
actual_files = [e['name'] for e in actual['files']]
for m in expected_missing:
assert m not in actual_files
for p in expected_present:
assert p in actual_files
expected_msgs = [
"Skipping '%s/galaxy.yml' for collection build" % to_text(input_dir),
"Skipping '%s/README.md' for collection build" % to_text(input_dir),
"Skipping '%s/docs/My Collection.md' for collection build" % to_text(input_dir),
"Skipping '%s/plugins/action' for collection build" % to_text(input_dir),
"Skipping '%s/playbooks/templates/test.conf.j2' for collection build" % to_text(input_dir),
"Skipping '%s/playbooks/templates/subfolder/test.conf.j2' for collection build" % to_text(input_dir),
]
assert mock_display.call_count == len(expected_msgs)
assert mock_display.mock_calls[0][1][0] in expected_msgs
assert mock_display.mock_calls[1][1][0] in expected_msgs
assert mock_display.mock_calls[2][1][0] in expected_msgs
assert mock_display.mock_calls[3][1][0] in expected_msgs
assert mock_display.mock_calls[4][1][0] in expected_msgs
assert mock_display.mock_calls[5][1][0] in expected_msgs
def test_build_ignore_symlink_target_outside_collection(collection_input, monkeypatch):
input_dir, outside_dir = collection_input
mock_display = MagicMock()
monkeypatch.setattr(Display, 'warning', mock_display)
link_path = os.path.join(input_dir, 'plugins', 'connection')
os.symlink(outside_dir, link_path)
actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel, None)
for manifest_entry in actual['files']:
assert manifest_entry['name'] != 'plugins/connection'
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Skipping '%s' as it is a symbolic link to a directory outside " \
"the collection" % to_text(link_path)
def test_build_copy_symlink_target_inside_collection(collection_input):
input_dir = collection_input[0]
os.makedirs(os.path.join(input_dir, 'playbooks', 'roles'))
roles_link = os.path.join(input_dir, 'playbooks', 'roles', 'linked')
roles_target = os.path.join(input_dir, 'roles', 'linked')
roles_target_tasks = os.path.join(roles_target, 'tasks')
os.makedirs(roles_target_tasks)
with open(os.path.join(roles_target_tasks, 'main.yml'), 'w+') as tasks_main:
tasks_main.write("---\n- hosts: localhost\n tasks:\n - ping:")
tasks_main.flush()
os.symlink(roles_target, roles_link)
actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection', [], Sentinel, None)
linked_entries = [e for e in actual['files'] if e['name'].startswith('playbooks/roles/linked')]
assert len(linked_entries) == 1
assert linked_entries[0]['name'] == 'playbooks/roles/linked'
assert linked_entries[0]['ftype'] == 'dir'
def test_build_with_symlink_inside_collection(collection_input):
input_dir, output_dir = collection_input
os.makedirs(os.path.join(input_dir, 'playbooks', 'roles'))
roles_link = os.path.join(input_dir, 'playbooks', 'roles', 'linked')
file_link = os.path.join(input_dir, 'docs', 'README.md')
roles_target = os.path.join(input_dir, 'roles', 'linked')
roles_target_tasks = os.path.join(roles_target, 'tasks')
os.makedirs(roles_target_tasks)
with open(os.path.join(roles_target_tasks, 'main.yml'), 'w+') as tasks_main:
tasks_main.write("---\n- hosts: localhost\n tasks:\n - ping:")
tasks_main.flush()
os.symlink(roles_target, roles_link)
os.symlink(os.path.join(input_dir, 'README.md'), file_link)
collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), False)
output_artifact = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
assert tarfile.is_tarfile(output_artifact)
with tarfile.open(output_artifact, mode='r') as actual:
members = actual.getmembers()
linked_folder = [m for m in members if m.path == 'playbooks/roles/linked'][0]
assert linked_folder.type == tarfile.SYMTYPE
assert linked_folder.linkname == '../../roles/linked'
linked_file = [m for m in members if m.path == 'docs/README.md'][0]
assert linked_file.type == tarfile.SYMTYPE
assert linked_file.linkname == '../README.md'
linked_file_obj = actual.extractfile(linked_file.name)
actual_file = secure_hash_s(linked_file_obj.read())
linked_file_obj.close()
assert actual_file == '08f24200b9fbe18903e7a50930c9d0df0b8d7da3' # shasum test/units/cli/test_data/collection_skeleton/README.md
def test_publish_no_wait(galaxy_server, collection_artifact, monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
artifact_path, mock_open = collection_artifact
fake_import_uri = 'https://galaxy.server.com/api/v2/import/1234'
mock_publish = MagicMock()
mock_publish.return_value = fake_import_uri
monkeypatch.setattr(galaxy_server, 'publish_collection', mock_publish)
collection.publish_collection(artifact_path, galaxy_server, False, 0)
assert mock_publish.call_count == 1
assert mock_publish.mock_calls[0][1][0] == artifact_path
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == \
"Collection has been pushed to the Galaxy server %s %s, not waiting until import has completed due to " \
"--no-wait being set. Import task results can be found at %s" % (galaxy_server.name, galaxy_server.api_server,
fake_import_uri)
def test_publish_with_wait(galaxy_server, collection_artifact, monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
artifact_path, mock_open = collection_artifact
fake_import_uri = 'https://galaxy.server.com/api/v2/import/1234'
mock_publish = MagicMock()
mock_publish.return_value = fake_import_uri
monkeypatch.setattr(galaxy_server, 'publish_collection', mock_publish)
mock_wait = MagicMock()
monkeypatch.setattr(galaxy_server, 'wait_import_task', mock_wait)
collection.publish_collection(artifact_path, galaxy_server, True, 0)
assert mock_publish.call_count == 1
assert mock_publish.mock_calls[0][1][0] == artifact_path
assert mock_wait.call_count == 1
assert mock_wait.mock_calls[0][1][0] == '1234'
assert mock_display.mock_calls[0][1][0] == "Collection has been published to the Galaxy server test_server %s" \
% galaxy_server.api_server
def test_download_file(tmp_path_factory, monkeypatch):
temp_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
data = b"\x00\x01\x02\x03"
sha256_hash = sha256()
sha256_hash.update(data)
mock_open = MagicMock()
mock_open.return_value = BytesIO(data)
monkeypatch.setattr(collection.concrete_artifact_manager, 'open_url', mock_open)
expected = temp_dir
actual = collection._download_file('http://google.com/file', temp_dir, sha256_hash.hexdigest(), True)
assert actual.startswith(expected)
assert os.path.isfile(actual)
with open(actual, 'rb') as file_obj:
assert file_obj.read() == data
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == 'http://google.com/file'
def test_download_file_hash_mismatch(tmp_path_factory, monkeypatch):
temp_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
data = b"\x00\x01\x02\x03"
mock_open = MagicMock()
mock_open.return_value = BytesIO(data)
monkeypatch.setattr(collection.concrete_artifact_manager, 'open_url', mock_open)
expected = "Mismatch artifact hash with downloaded file"
with pytest.raises(AnsibleError, match=expected):
collection._download_file('http://google.com/file', temp_dir, 'bad', True)
def test_extract_tar_file_invalid_hash(tmp_tarfile):
temp_dir, tfile, filename, dummy = tmp_tarfile
expected = "Checksum mismatch for '%s' inside collection at '%s'" % (to_native(filename), to_native(tfile.name))
with pytest.raises(AnsibleError, match=expected):
collection._extract_tar_file(tfile, filename, temp_dir, temp_dir, "fakehash")
def test_extract_tar_file_missing_member(tmp_tarfile):
temp_dir, tfile, dummy, dummy = tmp_tarfile
expected = "Collection tar at '%s' does not contain the expected file 'missing'." % to_native(tfile.name)
with pytest.raises(AnsibleError, match=expected):
collection._extract_tar_file(tfile, 'missing', temp_dir, temp_dir)
def test_extract_tar_file_missing_parent_dir(tmp_tarfile):
temp_dir, tfile, filename, checksum = tmp_tarfile
output_dir = os.path.join(temp_dir, b'output')
output_file = os.path.join(output_dir, to_bytes(filename))
collection._extract_tar_file(tfile, filename, output_dir, temp_dir, checksum)
os.path.isfile(output_file)
def test_extract_tar_file_outside_dir(tmp_path_factory):
filename = u'ÅÑŚÌβŁÈ'
temp_dir = to_bytes(tmp_path_factory.mktemp('test-%s Collections' % to_native(filename)))
tar_file = os.path.join(temp_dir, to_bytes('%s.tar.gz' % filename))
data = os.urandom(8)
tar_filename = '../%s.sh' % filename
with tarfile.open(tar_file, 'w:gz') as tfile:
b_io = BytesIO(data)
tar_info = tarfile.TarInfo(tar_filename)
tar_info.size = len(data)
tar_info.mode = S_IRWU_RG_RO
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
expected = re.escape("Cannot extract tar entry '%s' as it will be placed outside the collection directory"
% to_native(tar_filename))
with tarfile.open(tar_file, 'r') as tfile:
with pytest.raises(AnsibleError, match=expected):
collection._extract_tar_file(tfile, tar_filename, os.path.join(temp_dir, to_bytes(filename)), temp_dir)
def test_require_one_of_collections_requirements_with_both():
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'verify', 'namespace.collection', '-r', 'requirements.yml'])
with pytest.raises(AnsibleError) as req_err:
cli._require_one_of_collections_requirements(('namespace.collection',), 'requirements.yml')
with pytest.raises(AnsibleError) as cli_err:
cli.run()
assert req_err.value.message == cli_err.value.message == 'The positional collection_name arg and --requirements-file are mutually exclusive.'
def test_require_one_of_collections_requirements_with_neither():
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'verify'])
with pytest.raises(AnsibleError) as req_err:
cli._require_one_of_collections_requirements((), '')
with pytest.raises(AnsibleError) as cli_err:
cli.run()
assert req_err.value.message == cli_err.value.message == 'You must specify a collection name or a requirements file.'
def test_require_one_of_collections_requirements_with_collections():
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'verify', 'namespace1.collection1', 'namespace2.collection1:1.0.0'])
collections = ('namespace1.collection1', 'namespace2.collection1:1.0.0',)
requirements = cli._require_one_of_collections_requirements(collections, '')['collections']
req_tuples = [('%s.%s' % (req.namespace, req.name), req.ver, req.src, req.type,) for req in requirements]
assert req_tuples == [('namespace1.collection1', '*', None, 'galaxy'), ('namespace2.collection1', '1.0.0', None, 'galaxy')]
@patch('ansible.cli.galaxy.GalaxyCLI._parse_requirements_file')
def test_require_one_of_collections_requirements_with_requirements(mock_parse_requirements_file, galaxy_server):
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'verify', '-r', 'requirements.yml', 'namespace.collection'])
mock_parse_requirements_file.return_value = {'collections': [('namespace.collection', '1.0.5', galaxy_server)]}
requirements = cli._require_one_of_collections_requirements((), 'requirements.yml')['collections']
assert mock_parse_requirements_file.call_count == 1
assert requirements == [('namespace.collection', '1.0.5', galaxy_server)]
@patch('ansible.cli.galaxy.GalaxyCLI.execute_verify', spec=True)
def test_call_GalaxyCLI(execute_verify):
galaxy_args = ['ansible-galaxy', 'collection', 'verify', 'namespace.collection']
GalaxyCLI(args=galaxy_args).run()
assert execute_verify.call_count == 1
@patch('ansible.cli.galaxy.GalaxyCLI.execute_verify')
def test_call_GalaxyCLI_with_implicit_role(execute_verify):
galaxy_args = ['ansible-galaxy', 'verify', 'namespace.implicit_role']
with pytest.raises(SystemExit):
GalaxyCLI(args=galaxy_args).run()
assert not execute_verify.called
@patch('ansible.cli.galaxy.GalaxyCLI.execute_verify')
def test_call_GalaxyCLI_with_role(execute_verify):
galaxy_args = ['ansible-galaxy', 'role', 'verify', 'namespace.role']
with pytest.raises(SystemExit):
GalaxyCLI(args=galaxy_args).run()
assert not execute_verify.called
@patch('ansible.cli.galaxy.verify_collections', spec=True)
def test_execute_verify_with_defaults(mock_verify_collections):
galaxy_args = ['ansible-galaxy', 'collection', 'verify', 'namespace.collection:1.0.4']
GalaxyCLI(args=galaxy_args).run()
assert mock_verify_collections.call_count == 1
print("Call args {0}".format(mock_verify_collections.call_args[0]))
requirements, search_paths, galaxy_apis, ignore_errors = mock_verify_collections.call_args[0]
assert [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type) for r in requirements] == [('namespace.collection', '1.0.4', None, 'galaxy')]
for install_path in search_paths:
assert install_path.endswith('ansible_collections')
assert galaxy_apis[0].api_server == 'https://galaxy.ansible.com'
assert ignore_errors is False
@patch('ansible.cli.galaxy.verify_collections', spec=True)
def test_execute_verify(mock_verify_collections):
GalaxyCLI(args=[
'ansible-galaxy', 'collection', 'verify', 'namespace.collection:1.0.4', '--ignore-certs',
'-p', '~/.ansible', '--ignore-errors', '--server', 'http://galaxy-dev.com',
]).run()
assert mock_verify_collections.call_count == 1
requirements, search_paths, galaxy_apis, ignore_errors = mock_verify_collections.call_args[0]
assert [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type) for r in requirements] == [('namespace.collection', '1.0.4', None, 'galaxy')]
for install_path in search_paths:
assert install_path.endswith('ansible_collections')
assert galaxy_apis[0].api_server == 'http://galaxy-dev.com'
assert ignore_errors is True
def test_verify_file_hash_deleted_file(manifest_info):
data = to_bytes(json.dumps(manifest_info))
digest = sha256(data).hexdigest()
namespace = manifest_info['collection_info']['namespace']
name = manifest_info['collection_info']['name']
version = manifest_info['collection_info']['version']
server = 'http://galaxy.ansible.com'
error_queue = []
with patch.object(builtins, 'open', mock_open(read_data=data)) as m:
with patch.object(collection.os.path, 'isfile', MagicMock(return_value=False)) as mock_isfile:
collection._verify_file_hash(b'path/', 'file', digest, error_queue)
mock_isfile.assert_called_once()
assert len(error_queue) == 1
assert error_queue[0].installed is None
assert error_queue[0].expected == digest
def test_verify_file_hash_matching_hash(manifest_info):
data = to_bytes(json.dumps(manifest_info))
digest = sha256(data).hexdigest()
namespace = manifest_info['collection_info']['namespace']
name = manifest_info['collection_info']['name']
version = manifest_info['collection_info']['version']
server = 'http://galaxy.ansible.com'
error_queue = []
with patch.object(builtins, 'open', mock_open(read_data=data)) as m:
with patch.object(collection.os.path, 'isfile', MagicMock(return_value=True)) as mock_isfile:
collection._verify_file_hash(b'path/', 'file', digest, error_queue)
mock_isfile.assert_called_once()
assert error_queue == []
def test_verify_file_hash_mismatching_hash(manifest_info):
data = to_bytes(json.dumps(manifest_info))
digest = sha256(data).hexdigest()
different_digest = 'not_{0}'.format(digest)
namespace = manifest_info['collection_info']['namespace']
name = manifest_info['collection_info']['name']
version = manifest_info['collection_info']['version']
server = 'http://galaxy.ansible.com'
error_queue = []
with patch.object(builtins, 'open', mock_open(read_data=data)) as m:
with patch.object(collection.os.path, 'isfile', MagicMock(return_value=True)) as mock_isfile:
collection._verify_file_hash(b'path/', 'file', different_digest, error_queue)
mock_isfile.assert_called_once()
assert len(error_queue) == 1
assert error_queue[0].installed == digest
assert error_queue[0].expected == different_digest
def test_consume_file(manifest):
manifest_file, checksum = manifest
assert checksum == collection._consume_file(manifest_file)
def test_consume_file_and_write_contents(manifest, manifest_info):
manifest_file, checksum = manifest
write_to = BytesIO()
actual_hash = collection._consume_file(manifest_file, write_to)
write_to.seek(0)
assert to_bytes(json.dumps(manifest_info)) == write_to.read()
assert actual_hash == checksum
def test_get_tar_file_member(tmp_tarfile):
temp_dir, tfile, filename, checksum = tmp_tarfile
with collection._get_tar_file_member(tfile, filename) as (tar_file_member, tar_file_obj):
assert isinstance(tar_file_member, tarfile.TarInfo)
assert isinstance(tar_file_obj, tarfile.ExFileObject)
def test_get_nonexistent_tar_file_member(tmp_tarfile):
temp_dir, tfile, filename, checksum = tmp_tarfile
file_does_not_exist = filename + 'nonexistent'
with pytest.raises(AnsibleError) as err:
collection._get_tar_file_member(tfile, file_does_not_exist)
assert to_text(err.value.message) == "Collection tar at '%s' does not contain the expected file '%s'." % (to_text(tfile.name), file_does_not_exist)
def test_get_tar_file_hash(tmp_tarfile):
temp_dir, tfile, filename, checksum = tmp_tarfile
assert checksum == collection._get_tar_file_hash(tfile.name, filename)
def test_get_json_from_tar_file(tmp_tarfile):
temp_dir, tfile, filename, checksum = tmp_tarfile
assert 'MANIFEST.json' in tfile.getnames()
data = collection._get_json_from_tar_file(tfile.name, 'MANIFEST.json')
assert isinstance(data, dict)
| 45,537
|
Python
|
.py
| 899
| 43.727475
| 153
| 0.675132
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,759
|
test_collection_install.py
|
ansible_ansible/test/units/galaxy/test_collection_install.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import copy
import json
import os
import pytest
import re
import shutil
import stat
import tarfile
import yaml
from io import BytesIO, StringIO
from unittest.mock import MagicMock, patch
import urllib.error
from ansible import context
from ansible.cli.galaxy import GalaxyCLI
from ansible.errors import AnsibleError
from ansible.galaxy import collection, api, dependency_resolution
from ansible.galaxy.dependency_resolution.dataclasses import Candidate, Requirement
from ansible.module_utils.common.file import S_IRWU_RG_RO, S_IRWXU_RXG_RXO
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils.common.process import get_bin_path
from ansible.utils import context_objects as co
from ansible.utils.display import Display
import ansible.constants as C
class RequirementCandidates():
def __init__(self):
self.candidates = []
def func_wrapper(self, func):
def run(*args, **kwargs):
self.candidates = func(*args, **kwargs)
return self.candidates
return run
def call_galaxy_cli(args):
orig = co.GlobalCLIArgs._Singleton__instance
co.GlobalCLIArgs._Singleton__instance = None
try:
GalaxyCLI(args=['ansible-galaxy', 'collection'] + args).run()
finally:
co.GlobalCLIArgs._Singleton__instance = orig
@pytest.fixture(autouse=True)
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
yield
co.GlobalCLIArgs._Singleton__instance = None
@pytest.fixture()
def collection_artifact(request, tmp_path_factory):
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
namespace = 'ansible_namespace'
collection = 'collection'
skeleton_path = os.path.join(os.path.dirname(os.path.split(__file__)[0]), 'cli', 'test_data', 'collection_skeleton')
collection_path = os.path.join(test_dir, namespace, collection)
call_galaxy_cli(['init', '%s.%s' % (namespace, collection), '-c', '--init-path', test_dir,
'--collection-skeleton', skeleton_path])
dependencies = getattr(request, 'param', {})
galaxy_yml = os.path.join(collection_path, 'galaxy.yml')
with open(galaxy_yml, 'rb+') as galaxy_obj:
existing_yaml = yaml.safe_load(galaxy_obj)
existing_yaml['dependencies'] = dependencies
galaxy_obj.seek(0)
galaxy_obj.write(to_bytes(yaml.safe_dump(existing_yaml)))
galaxy_obj.truncate()
# Create a file with +x in the collection so we can test the permissions
execute_path = os.path.join(collection_path, 'runme.sh')
with open(execute_path, mode='wb') as fd:
fd.write(b"echo hi")
os.chmod(execute_path, os.stat(execute_path).st_mode | stat.S_IEXEC)
call_galaxy_cli(['build', collection_path, '--output-path', test_dir])
collection_tar = os.path.join(test_dir, '%s-%s-0.1.0.tar.gz' % (namespace, collection))
return to_bytes(collection_path), to_bytes(collection_tar)
@pytest.fixture()
def galaxy_server():
context.CLIARGS._store = {'ignore_certs': False}
galaxy_api = api.GalaxyAPI(None, 'test_server', 'https://galaxy.ansible.com')
galaxy_api.get_collection_signatures = MagicMock(return_value=[])
return galaxy_api
def test_concrete_artifact_manager_scm_no_executable(monkeypatch):
url = 'https://github.com/org/repo'
version = 'commitish'
mock_subprocess_check_call = MagicMock()
monkeypatch.setattr(collection.concrete_artifact_manager.subprocess, 'check_call', mock_subprocess_check_call)
mock_mkdtemp = MagicMock(return_value='')
monkeypatch.setattr(collection.concrete_artifact_manager, 'mkdtemp', mock_mkdtemp)
mock_get_bin_path = MagicMock(side_effect=[ValueError('Failed to find required executable')])
monkeypatch.setattr(collection.concrete_artifact_manager, 'get_bin_path', mock_get_bin_path)
error = re.escape(
"Could not find git executable to extract the collection from the Git repository `https://github.com/org/repo`"
)
with pytest.raises(AnsibleError, match=error):
collection.concrete_artifact_manager._extract_collection_from_git(url, version, b'path')
@pytest.mark.parametrize(
'url,version,trailing_slash',
[
('https://github.com/org/repo', 'commitish', False),
('https://github.com/org/repo,commitish', None, False),
('https://github.com/org/repo/,commitish', None, True),
('https://github.com/org/repo#,commitish', None, False),
]
)
def test_concrete_artifact_manager_scm_cmd(url, version, trailing_slash, monkeypatch):
context.CLIARGS._store = {'ignore_certs': False}
mock_subprocess_check_call = MagicMock()
monkeypatch.setattr(collection.concrete_artifact_manager.subprocess, 'check_call', mock_subprocess_check_call)
mock_mkdtemp = MagicMock(return_value='')
monkeypatch.setattr(collection.concrete_artifact_manager, 'mkdtemp', mock_mkdtemp)
collection.concrete_artifact_manager._extract_collection_from_git(url, version, b'path')
assert mock_subprocess_check_call.call_count == 2
repo = 'https://github.com/org/repo'
if trailing_slash:
repo += '/'
git_executable = get_bin_path('git')
clone_cmd = [git_executable, 'clone', repo, '']
assert mock_subprocess_check_call.call_args_list[0].args[0] == clone_cmd
assert mock_subprocess_check_call.call_args_list[1].args[0] == (git_executable, 'checkout', 'commitish')
@pytest.mark.parametrize(
'url,version,trailing_slash',
[
('https://github.com/org/repo', 'HEAD', False),
('https://github.com/org/repo,HEAD', None, False),
('https://github.com/org/repo/,HEAD', None, True),
('https://github.com/org/repo#,HEAD', None, False),
('https://github.com/org/repo', None, False),
]
)
def test_concrete_artifact_manager_scm_cmd_shallow(url, version, trailing_slash, monkeypatch):
context.CLIARGS._store = {'ignore_certs': False}
mock_subprocess_check_call = MagicMock()
monkeypatch.setattr(collection.concrete_artifact_manager.subprocess, 'check_call', mock_subprocess_check_call)
mock_mkdtemp = MagicMock(return_value='')
monkeypatch.setattr(collection.concrete_artifact_manager, 'mkdtemp', mock_mkdtemp)
collection.concrete_artifact_manager._extract_collection_from_git(url, version, b'path')
assert mock_subprocess_check_call.call_count == 2
repo = 'https://github.com/org/repo'
if trailing_slash:
repo += '/'
git_executable = get_bin_path('git')
shallow_clone_cmd = [git_executable, 'clone', '--depth=1', repo, '']
assert mock_subprocess_check_call.call_args_list[0].args[0] == shallow_clone_cmd
assert mock_subprocess_check_call.call_args_list[1].args[0] == (git_executable, 'checkout', 'HEAD')
@pytest.mark.parametrize(
'ignore_certs_cli,ignore_certs_config,expected_ignore_certs',
[
(False, False, False),
(False, True, True),
(True, False, True),
]
)
def test_concrete_artifact_manager_scm_cmd_validate_certs(ignore_certs_cli, ignore_certs_config, expected_ignore_certs, monkeypatch):
context.CLIARGS._store = {'ignore_certs': ignore_certs_cli}
monkeypatch.setattr(C, 'GALAXY_IGNORE_CERTS', ignore_certs_config)
mock_subprocess_check_call = MagicMock()
monkeypatch.setattr(collection.concrete_artifact_manager.subprocess, 'check_call', mock_subprocess_check_call)
mock_mkdtemp = MagicMock(return_value='')
monkeypatch.setattr(collection.concrete_artifact_manager, 'mkdtemp', mock_mkdtemp)
url = 'https://github.com/org/repo'
version = 'HEAD'
collection.concrete_artifact_manager._extract_collection_from_git(url, version, b'path')
assert mock_subprocess_check_call.call_count == 2
git_executable = get_bin_path('git')
clone_cmd = [git_executable, 'clone', '--depth=1', url, '']
if expected_ignore_certs:
clone_cmd.extend(['-c', 'http.sslVerify=false'])
assert mock_subprocess_check_call.call_args_list[0].args[0] == clone_cmd
assert mock_subprocess_check_call.call_args_list[1].args[0] == (git_executable, 'checkout', 'HEAD')
def test_build_requirement_from_path(collection_artifact):
tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
actual = Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
assert actual.namespace == u'ansible_namespace'
assert actual.name == u'collection'
assert actual.src == collection_artifact[0]
assert actual.ver == u'0.1.0'
@pytest.mark.parametrize('version', ['1.1.1', '1.1.0', '1.0.0'])
def test_build_requirement_from_path_with_manifest(version, collection_artifact):
manifest_path = os.path.join(collection_artifact[0], b'MANIFEST.json')
manifest_value = json.dumps({
'collection_info': {
'namespace': 'namespace',
'name': 'name',
'version': version,
'dependencies': {
'ansible_namespace.collection': '*'
}
}
})
with open(manifest_path, 'wb') as manifest_obj:
manifest_obj.write(to_bytes(manifest_value))
tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
actual = Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
# While the folder name suggests a different collection, we treat MANIFEST.json as the source of truth.
assert actual.namespace == u'namespace'
assert actual.name == u'name'
assert actual.src == collection_artifact[0]
assert actual.ver == to_text(version)
def test_build_requirement_from_path_invalid_manifest(collection_artifact):
manifest_path = os.path.join(collection_artifact[0], b'MANIFEST.json')
with open(manifest_path, 'wb') as manifest_obj:
manifest_obj.write(b"not json")
tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
expected = "Collection tar file member MANIFEST.json does not contain a valid json string."
with pytest.raises(AnsibleError, match=expected):
Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
def test_build_artifact_from_path_no_version(collection_artifact, monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
# a collection artifact should always contain a valid version
manifest_path = os.path.join(collection_artifact[0], b'MANIFEST.json')
manifest_value = json.dumps({
'collection_info': {
'namespace': 'namespace',
'name': 'name',
'version': '',
'dependencies': {}
}
})
with open(manifest_path, 'wb') as manifest_obj:
manifest_obj.write(to_bytes(manifest_value))
tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
expected = (
'^Collection metadata file `.*` at `.*` is expected to have a valid SemVer '
'version value but got {empty_unicode_string!r}$'.
format(empty_unicode_string=u'')
)
with pytest.raises(AnsibleError, match=expected):
Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
def test_build_requirement_from_path_no_version(collection_artifact, monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
# version may be falsey/arbitrary strings for collections in development
manifest_path = os.path.join(collection_artifact[0], b'galaxy.yml')
metadata = {
'authors': ['Ansible'],
'readme': 'README.md',
'namespace': 'namespace',
'name': 'name',
'version': '',
'dependencies': {},
}
with open(manifest_path, 'wb') as manifest_obj:
manifest_obj.write(to_bytes(yaml.safe_dump(metadata)))
tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
actual = Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
# While the folder name suggests a different collection, we treat MANIFEST.json as the source of truth.
assert actual.namespace == u'namespace'
assert actual.name == u'name'
assert actual.src == collection_artifact[0]
assert actual.ver == u'*'
def test_build_requirement_from_tar(collection_artifact):
tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
actual = Requirement.from_requirement_dict({'name': to_text(collection_artifact[1])}, concrete_artifact_cm)
assert actual.namespace == u'ansible_namespace'
assert actual.name == u'collection'
assert actual.src == to_text(collection_artifact[1])
assert actual.ver == u'0.1.0'
def test_build_requirement_from_tar_url(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
test_url = 'https://example.com/org/repo/sample.tar.gz'
expected = fr"^Failed to download collection tar from '{to_text(test_url)}'"
with pytest.raises(AnsibleError, match=expected):
Requirement.from_requirement_dict({'name': test_url, 'type': 'url'}, concrete_artifact_cm)
def test_build_requirement_from_tar_url_wrong_type(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
test_url = 'https://example.com/org/repo/sample.tar.gz'
expected = fr"^Unable to find collection artifact file at '{to_text(test_url)}'\.$"
with pytest.raises(AnsibleError, match=expected):
# Specified wrong collection type for http URL
Requirement.from_requirement_dict({'name': test_url, 'type': 'file'}, concrete_artifact_cm)
def test_build_requirement_from_tar_fail_not_tar(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
test_file = os.path.join(test_dir, b'fake.tar.gz')
with open(test_file, 'wb') as test_obj:
test_obj.write(b"\x00\x01\x02\x03")
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
expected = "Collection artifact at '%s' is not a valid tar file." % to_native(test_file)
with pytest.raises(AnsibleError, match=expected):
Requirement.from_requirement_dict({'name': to_text(test_file)}, concrete_artifact_cm)
def test_build_requirement_from_tar_no_manifest(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
json_data = to_bytes(json.dumps(
{
'files': [],
'format': 1,
}
))
tar_path = os.path.join(test_dir, b'ansible-collections.tar.gz')
with tarfile.open(tar_path, 'w:gz') as tfile:
b_io = BytesIO(json_data)
tar_info = tarfile.TarInfo('FILES.json')
tar_info.size = len(json_data)
tar_info.mode = S_IRWU_RG_RO
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
expected = "Collection at '%s' does not contain the required file MANIFEST.json." % to_native(tar_path)
with pytest.raises(AnsibleError, match=expected):
Requirement.from_requirement_dict({'name': to_text(tar_path)}, concrete_artifact_cm)
def test_build_requirement_from_tar_no_files(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
json_data = to_bytes(json.dumps(
{
'collection_info': {},
}
))
tar_path = os.path.join(test_dir, b'ansible-collections.tar.gz')
with tarfile.open(tar_path, 'w:gz') as tfile:
b_io = BytesIO(json_data)
tar_info = tarfile.TarInfo('MANIFEST.json')
tar_info.size = len(json_data)
tar_info.mode = S_IRWU_RG_RO
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
with pytest.raises(KeyError, match='namespace'):
Requirement.from_requirement_dict({'name': to_text(tar_path)}, concrete_artifact_cm)
def test_build_requirement_from_tar_invalid_manifest(tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
json_data = b"not a json"
tar_path = os.path.join(test_dir, b'ansible-collections.tar.gz')
with tarfile.open(tar_path, 'w:gz') as tfile:
b_io = BytesIO(json_data)
tar_info = tarfile.TarInfo('MANIFEST.json')
tar_info.size = len(json_data)
tar_info.mode = S_IRWU_RG_RO
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
expected = "Collection tar file member MANIFEST.json does not contain a valid json string."
with pytest.raises(AnsibleError, match=expected):
Requirement.from_requirement_dict({'name': to_text(tar_path)}, concrete_artifact_cm)
def test_build_requirement_from_name(galaxy_server, monkeypatch, tmp_path_factory):
mock_get_versions = MagicMock()
mock_get_versions.return_value = ['2.1.9', '2.1.10']
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
mock_version_metadata = MagicMock(
namespace='namespace', name='collection',
version='2.1.10', artifact_sha256='', dependencies={}
)
monkeypatch.setattr(api.GalaxyAPI, 'get_collection_version_metadata', mock_version_metadata)
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
collections = ['namespace.collection']
requirements_file = None
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', collections[0]])
requirements = cli._require_one_of_collections_requirements(
collections, requirements_file, artifacts_manager=concrete_artifact_cm
)['collections']
actual = collection._resolve_depenency_map(
requirements, [galaxy_server], concrete_artifact_cm, None, True, False, False, False, False
)['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.ver == u'2.1.10'
assert actual.src == galaxy_server
assert mock_get_versions.call_count == 1
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
def test_build_requirement_from_name_with_prerelease(galaxy_server, monkeypatch, tmp_path_factory):
mock_get_versions = MagicMock()
mock_get_versions.return_value = ['1.0.1', '2.0.1-beta.1', '2.0.1']
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
mock_get_info = MagicMock()
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.1', None, None, {}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection'])
requirements = cli._require_one_of_collections_requirements(
['namespace.collection'], None, artifacts_manager=concrete_artifact_cm
)['collections']
actual = collection._resolve_depenency_map(
requirements, [galaxy_server], concrete_artifact_cm, None, True, False, False, False, False
)['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.src == galaxy_server
assert actual.ver == u'2.0.1'
assert mock_get_versions.call_count == 1
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
def test_build_requirement_from_name_with_prerelease_explicit(galaxy_server, monkeypatch, tmp_path_factory):
mock_get_versions = MagicMock()
mock_get_versions.return_value = ['1.0.1', '2.0.1-beta.1', '2.0.1']
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
mock_get_info = MagicMock()
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.1-beta.1', None, None,
{}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:2.0.1-beta.1'])
requirements = cli._require_one_of_collections_requirements(
['namespace.collection:2.0.1-beta.1'], None, artifacts_manager=concrete_artifact_cm
)['collections']
actual = collection._resolve_depenency_map(
requirements, [galaxy_server], concrete_artifact_cm, None, True, False, False, False, False
)['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.src == galaxy_server
assert actual.ver == u'2.0.1-beta.1'
assert mock_get_info.call_count == 1
assert mock_get_info.mock_calls[0][1] == ('namespace', 'collection', '2.0.1-beta.1')
def test_build_requirement_from_name_second_server(galaxy_server, monkeypatch, tmp_path_factory):
mock_get_versions = MagicMock()
mock_get_versions.return_value = ['1.0.1', '1.0.2', '1.0.3']
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
mock_get_info = MagicMock()
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '1.0.3', None, None, {}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
broken_server = copy.copy(galaxy_server)
broken_server.api_server = 'https://broken.com/'
mock_version_list = MagicMock()
mock_version_list.return_value = []
monkeypatch.setattr(broken_server, 'get_collection_versions', mock_version_list)
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:>1.0.1'])
requirements = cli._require_one_of_collections_requirements(
['namespace.collection:>1.0.1'], None, artifacts_manager=concrete_artifact_cm
)['collections']
actual = collection._resolve_depenency_map(
requirements, [broken_server, galaxy_server], concrete_artifact_cm, None, True, False, False, False, False
)['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.src == galaxy_server
assert actual.ver == u'1.0.3'
assert mock_version_list.call_count == 1
assert mock_version_list.mock_calls[0][1] == ('namespace', 'collection')
assert mock_get_versions.call_count == 1
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
def test_build_requirement_from_name_missing(galaxy_server, monkeypatch, tmp_path_factory):
mock_open = MagicMock()
mock_open.return_value = []
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_open)
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:>1.0.1'])
requirements = cli._require_one_of_collections_requirements(
['namespace.collection'], None, artifacts_manager=concrete_artifact_cm
)['collections']
expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n* namespace.collection:* (direct request)"
with pytest.raises(AnsibleError, match=re.escape(expected)):
collection._resolve_depenency_map(requirements, [galaxy_server, galaxy_server], concrete_artifact_cm, None, False, True, False, False, False)
def test_build_requirement_from_name_401_unauthorized(galaxy_server, monkeypatch, tmp_path_factory):
mock_open = MagicMock()
mock_open.side_effect = api.GalaxyError(urllib.error.HTTPError('https://galaxy.server.com', 401, 'msg', {},
StringIO()), "error")
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_open)
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:>1.0.1'])
requirements = cli._require_one_of_collections_requirements(
['namespace.collection'], None, artifacts_manager=concrete_artifact_cm
)['collections']
expected = "error (HTTP Code: 401, Message: msg)"
with pytest.raises(api.GalaxyError, match=re.escape(expected)):
collection._resolve_depenency_map(requirements, [galaxy_server, galaxy_server], concrete_artifact_cm, None, False, False, False, False, False)
def test_build_requirement_from_name_single_version(galaxy_server, monkeypatch, tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
multi_api_proxy = collection.galaxy_api_proxy.MultiGalaxyAPIProxy([galaxy_server], concrete_artifact_cm)
dep_provider = dependency_resolution.providers.CollectionDependencyProvider(apis=multi_api_proxy, concrete_artifacts_manager=concrete_artifact_cm)
matches = RequirementCandidates()
mock_find_matches = MagicMock(side_effect=matches.func_wrapper(dep_provider.find_matches), autospec=True)
monkeypatch.setattr(dependency_resolution.providers.CollectionDependencyProvider, 'find_matches', mock_find_matches)
mock_get_versions = MagicMock()
mock_get_versions.return_value = ['2.0.0']
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
mock_get_info = MagicMock()
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.0', None, None,
{}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:==2.0.0'])
requirements = cli._require_one_of_collections_requirements(
['namespace.collection:==2.0.0'], None, artifacts_manager=concrete_artifact_cm
)['collections']
actual = collection._resolve_depenency_map(
requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False, False)['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.src == galaxy_server
assert actual.ver == u'2.0.0'
assert [c.ver for c in matches.candidates] == [u'2.0.0']
assert mock_get_info.call_count == 1
assert mock_get_info.mock_calls[0][1] == ('namespace', 'collection', '2.0.0')
def test_build_requirement_from_name_multiple_versions_one_match(galaxy_server, monkeypatch, tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
multi_api_proxy = collection.galaxy_api_proxy.MultiGalaxyAPIProxy([galaxy_server], concrete_artifact_cm)
dep_provider = dependency_resolution.providers.CollectionDependencyProvider(apis=multi_api_proxy, concrete_artifacts_manager=concrete_artifact_cm)
matches = RequirementCandidates()
mock_find_matches = MagicMock(side_effect=matches.func_wrapper(dep_provider.find_matches), autospec=True)
monkeypatch.setattr(dependency_resolution.providers.CollectionDependencyProvider, 'find_matches', mock_find_matches)
mock_get_versions = MagicMock()
mock_get_versions.return_value = ['2.0.0', '2.0.1', '2.0.2']
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
mock_get_info = MagicMock()
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.1', None, None,
{}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:>=2.0.1,<2.0.2'])
requirements = cli._require_one_of_collections_requirements(
['namespace.collection:>=2.0.1,<2.0.2'], None, artifacts_manager=concrete_artifact_cm
)['collections']
actual = collection._resolve_depenency_map(
requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False, False)['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.src == galaxy_server
assert actual.ver == u'2.0.1'
assert [c.ver for c in matches.candidates] == [u'2.0.1']
assert mock_get_versions.call_count == 1
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
assert mock_get_info.call_count == 1
assert mock_get_info.mock_calls[0][1] == ('namespace', 'collection', '2.0.1')
def test_build_requirement_from_name_multiple_version_results(galaxy_server, monkeypatch, tmp_path_factory):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
multi_api_proxy = collection.galaxy_api_proxy.MultiGalaxyAPIProxy([galaxy_server], concrete_artifact_cm)
dep_provider = dependency_resolution.providers.CollectionDependencyProvider(apis=multi_api_proxy, concrete_artifacts_manager=concrete_artifact_cm)
matches = RequirementCandidates()
mock_find_matches = MagicMock(side_effect=matches.func_wrapper(dep_provider.find_matches), autospec=True)
monkeypatch.setattr(dependency_resolution.providers.CollectionDependencyProvider, 'find_matches', mock_find_matches)
mock_get_info = MagicMock()
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.5', None, None, {}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
mock_get_versions = MagicMock()
mock_get_versions.return_value = ['1.0.1', '1.0.2', '1.0.3']
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
mock_get_versions.return_value = ['2.0.0', '2.0.1', '2.0.2', '2.0.3', '2.0.4', '2.0.5']
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:!=2.0.2'])
requirements = cli._require_one_of_collections_requirements(
['namespace.collection:!=2.0.2'], None, artifacts_manager=concrete_artifact_cm
)['collections']
actual = collection._resolve_depenency_map(
requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False, False)['namespace.collection']
assert actual.namespace == u'namespace'
assert actual.name == u'collection'
assert actual.src == galaxy_server
assert actual.ver == u'2.0.5'
# should be ordered latest to earliest
assert [c.ver for c in matches.candidates] == [u'2.0.5', u'2.0.4', u'2.0.3', u'2.0.1', u'2.0.0']
assert mock_get_versions.call_count == 1
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
def test_candidate_with_conflict(monkeypatch, tmp_path_factory, galaxy_server):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
mock_get_info = MagicMock()
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.5', None, None, {}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
mock_get_versions = MagicMock()
mock_get_versions.return_value = ['2.0.5']
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:!=2.0.5'])
requirements = cli._require_one_of_collections_requirements(
['namespace.collection:!=2.0.5'], None, artifacts_manager=concrete_artifact_cm
)['collections']
expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n"
expected += "* namespace.collection:!=2.0.5 (direct request)"
with pytest.raises(AnsibleError, match=re.escape(expected)):
collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False, False)
def test_dep_candidate_with_conflict(monkeypatch, tmp_path_factory, galaxy_server):
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
mock_get_info_return = [
api.CollectionVersionMetadata('parent', 'collection', '2.0.5', None, None, {'namespace.collection': '!=1.0.0'}, None, None),
api.CollectionVersionMetadata('namespace', 'collection', '1.0.0', None, None, {}, None, None),
]
mock_get_info = MagicMock(side_effect=mock_get_info_return)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
mock_get_versions = MagicMock(side_effect=[['2.0.5'], ['1.0.0']])
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'parent.collection:2.0.5'])
requirements = cli._require_one_of_collections_requirements(
['parent.collection:2.0.5'], None, artifacts_manager=concrete_artifact_cm
)['collections']
expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n"
expected += "* namespace.collection:!=1.0.0 (dependency of parent.collection:2.0.5)"
with pytest.raises(AnsibleError, match=re.escape(expected)):
collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True, False, False, False)
def test_install_installed_collection(monkeypatch, tmp_path_factory, galaxy_server):
mock_installed_collections = MagicMock(return_value=[Candidate('namespace.collection', '1.2.3', None, 'dir', None)])
monkeypatch.setattr(collection, 'find_existing_collections', mock_installed_collections)
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
mock_get_info = MagicMock()
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '1.2.3', None, None, {}, None, None)
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
mock_get_versions = MagicMock(return_value=['1.2.3', '1.3.0'])
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection'])
cli.run()
expected = "Nothing to do. All requested collections are already installed. If you want to reinstall them, consider using `--force`."
assert mock_display.mock_calls[1][1][0] == expected
def test_install_collection(collection_artifact, monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
collection_tar = collection_artifact[1]
temp_path = os.path.join(os.path.split(collection_tar)[0], b'temp')
os.makedirs(temp_path)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
output_path = os.path.join(os.path.split(collection_tar)[0])
collection_path = os.path.join(output_path, b'ansible_namespace', b'collection')
os.makedirs(os.path.join(collection_path, b'delete_me')) # Create a folder to verify the install cleans out the dir
candidate = Candidate('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)
collection.install(candidate, to_text(output_path), concrete_artifact_cm)
# Ensure the temp directory is empty, nothing is left behind
assert os.listdir(temp_path) == []
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles',
b'runme.sh']
assert stat.S_IMODE(os.stat(os.path.join(collection_path, b'plugins')).st_mode) == S_IRWXU_RXG_RXO
assert stat.S_IMODE(os.stat(os.path.join(collection_path, b'README.md')).st_mode) == S_IRWU_RG_RO
assert stat.S_IMODE(os.stat(os.path.join(collection_path, b'runme.sh')).st_mode) == S_IRWXU_RXG_RXO
assert mock_display.call_count == 2
assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
% to_text(collection_path)
assert mock_display.mock_calls[1][1][0] == "ansible_namespace.collection:0.1.0 was installed successfully"
def test_install_collection_with_download(galaxy_server, collection_artifact, monkeypatch):
collection_path, collection_tar = collection_artifact
shutil.rmtree(collection_path)
collections_dir = ('%s' % os.path.sep).join(to_text(collection_path).split('%s' % os.path.sep)[:-2])
temp_path = os.path.join(os.path.split(collection_tar)[0], b'temp')
os.makedirs(temp_path)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
mock_download = MagicMock()
mock_download.return_value = collection_tar
monkeypatch.setattr(concrete_artifact_cm, 'get_galaxy_artifact_path', mock_download)
req = Candidate('ansible_namespace.collection', '0.1.0', 'https://downloadme.com', 'galaxy', None)
collection.install(req, to_text(collections_dir), concrete_artifact_cm)
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles',
b'runme.sh']
assert mock_display.call_count == 2
assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
% to_text(collection_path)
assert mock_display.mock_calls[1][1][0] == "ansible_namespace.collection:0.1.0 was installed successfully"
assert mock_download.call_count == 1
assert mock_download.mock_calls[0][1][0].src == 'https://downloadme.com'
assert mock_download.mock_calls[0][1][0].type == 'galaxy'
def test_install_collections_from_tar(collection_artifact, monkeypatch):
collection_path, collection_tar = collection_artifact
temp_path = os.path.split(collection_tar)[0]
shutil.rmtree(collection_path)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
collection.install_collections(
requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False, set())
assert os.path.isdir(collection_path)
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles',
b'runme.sh']
with open(os.path.join(collection_path, b'MANIFEST.json'), 'rb') as manifest_obj:
actual_manifest = json.loads(to_text(manifest_obj.read()))
assert actual_manifest['collection_info']['namespace'] == 'ansible_namespace'
assert actual_manifest['collection_info']['name'] == 'collection'
assert actual_manifest['collection_info']['version'] == '0.1.0'
# Filter out the progress cursor display calls.
display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1]
assert len(display_msgs) == 4
assert display_msgs[0] == "Process install dependency map"
assert display_msgs[1] == "Starting collection install process"
assert display_msgs[2] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" % to_text(collection_path)
# Makes sure we don't get stuck in some recursive loop
@pytest.mark.parametrize('collection_artifact', [
{'ansible_namespace.collection': '>=0.0.1'},
], indirect=True)
def test_install_collection_with_circular_dependency(collection_artifact, monkeypatch):
collection_path, collection_tar = collection_artifact
temp_path = os.path.split(collection_tar)[0]
shutil.rmtree(collection_path)
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
collection.install_collections(
requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False, set())
assert os.path.isdir(collection_path)
actual_files = os.listdir(collection_path)
actual_files.sort()
assert actual_files == [b'FILES.json', b'MANIFEST.json', b'README.md', b'docs', b'playbooks', b'plugins', b'roles',
b'runme.sh']
with open(os.path.join(collection_path, b'MANIFEST.json'), 'rb') as manifest_obj:
actual_manifest = json.loads(to_text(manifest_obj.read()))
assert actual_manifest['collection_info']['namespace'] == 'ansible_namespace'
assert actual_manifest['collection_info']['name'] == 'collection'
assert actual_manifest['collection_info']['version'] == '0.1.0'
assert actual_manifest['collection_info']['dependencies'] == {'ansible_namespace.collection': '>=0.0.1'}
# Filter out the progress cursor display calls.
display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1]
assert len(display_msgs) == 4
assert display_msgs[0] == "Process install dependency map"
assert display_msgs[1] == "Starting collection install process"
assert display_msgs[2] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" % to_text(collection_path)
assert display_msgs[3] == "ansible_namespace.collection:0.1.0 was installed successfully"
@pytest.mark.parametrize('collection_artifact', [
None,
{},
], indirect=True)
def test_install_collection_with_no_dependency(collection_artifact, monkeypatch):
collection_path, collection_tar = collection_artifact
temp_path = os.path.split(collection_tar)[0]
shutil.rmtree(collection_path)
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)]
collection.install_collections(
requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False, set())
assert os.path.isdir(collection_path)
with open(os.path.join(collection_path, b'MANIFEST.json'), 'rb') as manifest_obj:
actual_manifest = json.loads(to_text(manifest_obj.read()))
assert not actual_manifest['collection_info']['dependencies']
assert actual_manifest['collection_info']['namespace'] == 'ansible_namespace'
assert actual_manifest['collection_info']['name'] == 'collection'
assert actual_manifest['collection_info']['version'] == '0.1.0'
@pytest.mark.parametrize(
"signatures,required_successful_count,ignore_errors,expected_success",
[
([], 'all', [], True),
(["good_signature"], 'all', [], True),
(["good_signature", collection.gpg.GpgBadArmor(status='failed')], 'all', [], False),
([collection.gpg.GpgBadArmor(status='failed')], 'all', [], False),
# This is expected to succeed because ignored does not increment failed signatures.
# "all" signatures is not a specific number, so all == no (non-ignored) signatures in this case.
([collection.gpg.GpgBadArmor(status='failed')], 'all', ["BADARMOR"], True),
([collection.gpg.GpgBadArmor(status='failed'), "good_signature"], 'all', ["BADARMOR"], True),
([], '+all', [], False),
([collection.gpg.GpgBadArmor(status='failed')], '+all', ["BADARMOR"], False),
([], '1', [], True),
([], '+1', [], False),
(["good_signature"], '2', [], False),
(["good_signature", collection.gpg.GpgBadArmor(status='failed')], '2', [], False),
# This is expected to fail because ignored does not increment successful signatures.
# 2 signatures are required, but only 1 is successful.
(["good_signature", collection.gpg.GpgBadArmor(status='failed')], '2', ["BADARMOR"], False),
(["good_signature", "good_signature"], '2', [], True),
]
)
def test_verify_file_signatures(signatures: list[str], required_successful_count: str, ignore_errors: list[str], expected_success: bool) -> None:
def gpg_error_generator(results):
for result in results:
if isinstance(result, collection.gpg.GpgBaseError):
yield result
fqcn = 'ns.coll'
manifest_file = 'MANIFEST.json'
keyring = '~/.ansible/pubring.kbx'
with patch.object(collection, 'run_gpg_verify', MagicMock(return_value=("somestdout", 0,))):
with patch.object(collection, 'parse_gpg_errors', MagicMock(return_value=gpg_error_generator(signatures))):
assert collection.verify_file_signatures(
fqcn,
manifest_file,
signatures,
keyring,
required_successful_count,
ignore_errors
) == expected_success
| 48,892
|
Python
|
.py
| 770
| 57.012987
| 155
| 0.710629
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,760
|
test_token.py
|
ansible_ansible/test/units/galaxy/test_token.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import os
import pytest
from unittest.mock import MagicMock
import ansible.constants as C
from ansible.cli.galaxy import GalaxyCLI
from ansible.config import manager
from ansible.galaxy.token import GalaxyToken, NoTokenSentinel
from ansible.module_utils.common.text.converters import to_bytes, to_text
@pytest.fixture()
def b_token_file(request, tmp_path_factory):
b_test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Token'))
b_token_path = os.path.join(b_test_dir, b"token.yml")
token = getattr(request, 'param', None)
if token:
with open(b_token_path, 'wb') as token_fd:
token_fd.write(b"token: %s" % to_bytes(token))
orig_token_path = C.GALAXY_TOKEN_PATH
C.GALAXY_TOKEN_PATH = to_text(b_token_path)
try:
yield b_token_path
finally:
C.GALAXY_TOKEN_PATH = orig_token_path
def test_client_id(monkeypatch):
monkeypatch.setattr(C, 'GALAXY_SERVER_LIST', ['server1', 'server2'])
test_server_config = {option[0]: None for option in manager.GALAXY_SERVER_DEF}
test_server_config.update(
{
'url': 'http://my_galaxy_ng:8000/api/automation-hub/',
'auth_url': 'http://my_keycloak:8080/auth/realms/myco/protocol/openid-connect/token',
'client_id': 'galaxy-ng',
'token': 'access_token',
}
)
test_server_default = {option[0]: None for option in manager.GALAXY_SERVER_DEF}
test_server_default.update(
{
'url': 'https://cloud.redhat.com/api/automation-hub/',
'auth_url': 'https://sso.redhat.com/auth/realms/redhat-external/protocol/openid-connect/token',
'token': 'access_token',
}
)
get_plugin_options = MagicMock(side_effect=[test_server_config, test_server_default])
monkeypatch.setattr(C.config, 'get_plugin_options', get_plugin_options)
cli_args = [
'ansible-galaxy',
'collection',
'install',
'namespace.collection:1.0.0',
]
galaxy_cli = GalaxyCLI(args=cli_args)
mock_execute_install = MagicMock()
monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install)
galaxy_cli.run()
assert galaxy_cli.api_servers[0].token.client_id == 'galaxy-ng'
assert galaxy_cli.api_servers[1].token.client_id == 'cloud-services'
def test_token_explicit(b_token_file):
assert GalaxyToken(token="explicit").get() == "explicit"
@pytest.mark.parametrize('b_token_file', ['file'], indirect=True)
def test_token_explicit_override_file(b_token_file):
assert GalaxyToken(token="explicit").get() == "explicit"
@pytest.mark.parametrize('b_token_file', ['file'], indirect=True)
def test_token_from_file(b_token_file):
assert GalaxyToken().get() == "file"
def test_token_from_file_missing(b_token_file):
assert GalaxyToken().get() is None
@pytest.mark.parametrize('b_token_file', ['file'], indirect=True)
def test_token_none(b_token_file):
assert GalaxyToken(token=NoTokenSentinel).get() is None
| 3,213
|
Python
|
.py
| 72
| 38.958333
| 107
| 0.688646
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,761
|
test_role_install.py
|
ansible_ansible/test/units/galaxy/test_role_install.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import json
import os
import functools
import pytest
import tempfile
from io import StringIO
from ansible import context
from ansible.cli.galaxy import GalaxyCLI
from ansible.galaxy import api, role, Galaxy
from ansible.module_utils.common.text.converters import to_text
from ansible.utils import context_objects as co
def call_galaxy_cli(args):
orig = co.GlobalCLIArgs._Singleton__instance
co.GlobalCLIArgs._Singleton__instance = None
try:
return GalaxyCLI(args=['ansible-galaxy', 'role'] + args).run()
finally:
co.GlobalCLIArgs._Singleton__instance = orig
@pytest.fixture(autouse=True)
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
yield
co.GlobalCLIArgs._Singleton__instance = None
@pytest.fixture(autouse=True)
def galaxy_server():
context.CLIARGS._store = {'ignore_certs': False}
galaxy_api = api.GalaxyAPI(None, 'test_server', 'https://galaxy.ansible.com')
return galaxy_api
@pytest.fixture(autouse=True)
def init_role_dir(tmp_path_factory):
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Roles Input'))
namespace = 'ansible_namespace'
role = 'role'
skeleton_path = os.path.join(os.path.dirname(os.path.split(__file__)[0]), 'cli', 'test_data', 'role_skeleton')
call_galaxy_cli(['init', '%s.%s' % (namespace, role), '-c', '--init-path', test_dir, '--role-skeleton', skeleton_path])
def mock_NamedTemporaryFile(mocker, **args):
mock_ntf = mocker.MagicMock()
mock_ntf.write = mocker.MagicMock()
mock_ntf.close = mocker.MagicMock()
mock_ntf.name = None
return mock_ntf
@pytest.fixture
def init_mock_temp_file(mocker, monkeypatch):
monkeypatch.setattr(tempfile, 'NamedTemporaryFile', functools.partial(mock_NamedTemporaryFile, mocker))
@pytest.fixture(autouse=True)
def mock_role_download_api(mocker, monkeypatch):
mock_role_api = mocker.MagicMock()
mock_role_api.side_effect = [
StringIO(u''),
]
monkeypatch.setattr(role, 'open_url', mock_role_api)
return mock_role_api
def test_role_download_github(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
StringIO(u'{"results":[{"name": "0.0.1"},{"name": "0.0.2"}]}'),
]
monkeypatch.setattr(api, 'open_url', mock_api)
role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role', version="0.0.1").install()
assert mock_role_download_api.call_count == 1
assert mock_role_download_api.mock_calls[0][1][0] == 'https://github.com/test_owner/test_role/archive/0.0.1.tar.gz'
def test_role_download_github_default_version(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
StringIO(u'{"results":[{"name": "0.0.1"},{"name": "0.0.2"}]}'),
]
monkeypatch.setattr(api, 'open_url', mock_api)
role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role').install()
assert mock_role_download_api.call_count == 1
assert mock_role_download_api.mock_calls[0][1][0] == 'https://github.com/test_owner/test_role/archive/0.0.2.tar.gz'
def test_role_download_github_no_download_url_for_version(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
StringIO(u'{"results":[{"name": "0.0.1"},{"name": "0.0.2","download_url":"http://localhost:8080/test_owner/test_role/0.0.2.tar.gz"}]}'),
]
monkeypatch.setattr(api, 'open_url', mock_api)
role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role', version="0.0.1").install()
assert mock_role_download_api.call_count == 1
assert mock_role_download_api.mock_calls[0][1][0] == 'https://github.com/test_owner/test_role/archive/0.0.1.tar.gz'
@pytest.mark.parametrize(
'state,rc',
[('SUCCESS', 0), ('FAILED', 1),]
)
def test_role_import(state, rc, mocker, galaxy_server, monkeypatch):
responses = [
{"available_versions": {"v1": "v1/"}},
{"results": [{'id': 12345, 'github_user': 'user', 'github_repo': 'role', 'github_reference': None, 'summary_fields': {'role': {'name': 'role'}}}]},
{"results": [{'state': 'WAITING', 'id': 12345, 'summary_fields': {'task_messages': []}}]},
{"results": [{'state': state, 'id': 12345, 'summary_fields': {'task_messages': []}}]},
]
mock_api = mocker.MagicMock(side_effect=[StringIO(json.dumps(rsp)) for rsp in responses])
monkeypatch.setattr(api, 'open_url', mock_api)
assert call_galaxy_cli(['import', 'user', 'role']) == rc
def test_role_download_url(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
StringIO(u'{"results":[{"name": "0.0.1","download_url":"http://localhost:8080/test_owner/test_role/0.0.1.tar.gz"},'
u'{"name": "0.0.2","download_url":"http://localhost:8080/test_owner/test_role/0.0.2.tar.gz"}]}'),
]
monkeypatch.setattr(api, 'open_url', mock_api)
role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role', version="0.0.1").install()
assert mock_role_download_api.call_count == 1
assert mock_role_download_api.mock_calls[0][1][0] == 'http://localhost:8080/test_owner/test_role/0.0.1.tar.gz'
def test_role_download_url_default_version(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
StringIO(u'{"results":[{"name": "0.0.1","download_url":"http://localhost:8080/test_owner/test_role/0.0.1.tar.gz"},'
u'{"name": "0.0.2","download_url":"http://localhost:8080/test_owner/test_role/0.0.2.tar.gz"}]}'),
]
monkeypatch.setattr(api, 'open_url', mock_api)
role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role').install()
assert mock_role_download_api.call_count == 1
assert mock_role_download_api.mock_calls[0][1][0] == 'http://localhost:8080/test_owner/test_role/0.0.2.tar.gz'
| 7,118
|
Python
|
.py
| 127
| 50.88189
| 155
| 0.668203
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,762
|
test_play_context.py
|
ansible_ansible/test/units/playbook/test_play_context.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2017 Ansible Project
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible import constants as C
from ansible import context
from ansible.cli.arguments import option_helpers as opt_help
from ansible.playbook.play_context import PlayContext
from ansible.playbook.play import Play
from ansible.utils import context_objects as co
@pytest.fixture
def parser():
parser = opt_help.create_base_parser('testparser')
opt_help.add_runas_options(parser)
opt_help.add_meta_options(parser)
opt_help.add_runtask_options(parser)
opt_help.add_vault_options(parser)
opt_help.add_async_options(parser)
opt_help.add_connect_options(parser)
opt_help.add_subset_options(parser)
opt_help.add_check_options(parser)
opt_help.add_inventory_options(parser)
return parser
@pytest.fixture
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
yield
co.GlobalCLIArgs._Singleton__instance = None
def test_play_context(mocker, parser, reset_cli_args):
options = parser.parse_args(['-vv', '--check'])
context._init_global_context(options)
play = Play.load({})
play_context = PlayContext(play=play)
assert play_context.remote_addr is None
assert play_context.remote_user is None
assert play_context.password == ''
assert play_context.private_key_file == C.DEFAULT_PRIVATE_KEY_FILE
assert play_context.timeout == C.DEFAULT_TIMEOUT
assert getattr(play_context, 'verbosity', None) is None
assert play_context.check_mode is True
mock_play = mocker.MagicMock()
mock_play.force_handlers = True
play_context = PlayContext(play=mock_play)
assert play_context.force_handlers is True
mock_task = mocker.MagicMock()
mock_task.connection = 'mocktask'
mock_task.remote_user = 'mocktask'
mock_task.port = 1234
mock_task.no_log = True
mock_task.become = True
mock_task.become_method = 'mocktask'
mock_task.become_user = 'mocktaskroot'
mock_task.become_pass = 'mocktaskpass'
mock_task._local_action = False
mock_task.delegate_to = None
all_vars = dict(
ansible_connection='mock_inventory',
ansible_ssh_port=4321,
)
mock_templar = mocker.MagicMock()
play_context = PlayContext()
play_context = play_context.set_task_and_variable_override(task=mock_task, variables=all_vars, templar=mock_templar)
assert play_context.connection == 'mock_inventory'
assert play_context.remote_user == 'mocktask'
assert play_context.no_log is True
mock_task.no_log = False
play_context = play_context.set_task_and_variable_override(task=mock_task, variables=all_vars, templar=mock_templar)
assert play_context.no_log is False
| 2,877
|
Python
|
.py
| 70
| 36.671429
| 120
| 0.737711
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,763
|
test_playbook.py
|
ansible_ansible/test/units/playbook/test_playbook.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from ansible.errors import AnsibleParserError
from ansible.playbook import Playbook
from ansible.vars.manager import VariableManager
from units.mock.loader import DictDataLoader
class TestPlaybook(unittest.TestCase):
def test_empty_playbook(self):
fake_loader = DictDataLoader({})
p = Playbook(loader=fake_loader)
def test_basic_playbook(self):
fake_loader = DictDataLoader({
"test_file.yml": """
- hosts: all
""",
})
p = Playbook.load("test_file.yml", loader=fake_loader)
plays = p.get_plays()
def test_bad_playbook_files(self):
fake_loader = DictDataLoader({
# represents a playbook which is not a list of plays
"bad_list.yml": """
foo: bar
""",
# represents a playbook where a play entry is mis-formatted
"bad_entry.yml": """
-
- "This should be a mapping..."
""",
})
vm = VariableManager()
self.assertRaises(AnsibleParserError, Playbook.load, "bad_list.yml", vm, fake_loader)
self.assertRaises(AnsibleParserError, Playbook.load, "bad_entry.yml", vm, fake_loader)
| 1,987
|
Python
|
.py
| 49
| 34.326531
| 94
| 0.676867
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,764
|
test_conditional.py
|
ansible_ansible/test/units/playbook/test_conditional.py
|
from __future__ import annotations
import unittest
from units.mock.loader import DictDataLoader
from unittest.mock import MagicMock
from ansible.template import Templar
from ansible import errors
from ansible.playbook import conditional
class TestConditional(unittest.TestCase):
def setUp(self):
self.loader = DictDataLoader({})
self.cond = conditional.Conditional(loader=self.loader)
self.templar = Templar(loader=self.loader, variables={})
def _eval_con(self, when=None, variables=None):
when = when or []
variables = variables or {}
self.cond.when = when
ret = self.cond.evaluate_conditional(self.templar, variables)
return ret
def test_false(self):
when = [u"False"]
ret = self._eval_con(when, {})
self.assertFalse(ret)
def test_true(self):
when = [u"True"]
ret = self._eval_con(when, {})
self.assertTrue(ret)
def test_true_boolean(self):
self.cond.when = [True]
m = MagicMock()
ret = self.cond.evaluate_conditional(m, {})
self.assertTrue(ret)
self.assertFalse(m.is_template.called)
def test_false_boolean(self):
self.cond.when = [False]
m = MagicMock()
ret = self.cond.evaluate_conditional(m, {})
self.assertFalse(ret)
self.assertFalse(m.is_template.called)
def test_undefined(self):
when = [u"{{ some_undefined_thing }}"]
self.assertRaisesRegex(errors.AnsibleError, "The conditional check '{{ some_undefined_thing }}' failed",
self._eval_con, when, {})
def test_defined(self):
variables = {'some_defined_thing': True}
when = [u"{{ some_defined_thing }}"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_dict_defined_values(self):
variables = {'dict_value': 1,
'some_defined_dict': {'key1': 'value1',
'key2': '{{ dict_value }}'}}
when = [u"some_defined_dict"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_dict_defined_values_is_defined(self):
variables = {'dict_value': 1,
'some_defined_dict': {'key1': 'value1',
'key2': '{{ dict_value }}'}}
when = [u"some_defined_dict.key1 is defined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_dict_defined_multiple_values_is_defined(self):
variables = {'dict_value': 1,
'some_defined_dict': {'key1': 'value1',
'key2': '{{ dict_value }}'}}
when = [u"some_defined_dict.key1 is defined",
u"some_defined_dict.key2 is not undefined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_nested_hostvars_undefined_values(self):
variables = {'dict_value': 1,
'hostvars': {'host1': {'key1': 'value1',
'key2': '{{ dict_value }}'},
'host2': '{{ dict_value }}',
'host3': '{{ undefined_dict_value }}',
# no host4
},
'some_dict': {'some_dict_key1': '{{ hostvars["host3"] }}'}
}
when = [u"some_dict.some_dict_key1 == hostvars['host3']"]
# self._eval_con(when, variables)
self.assertRaisesRegex(errors.AnsibleError,
r"The conditional check 'some_dict.some_dict_key1 == hostvars\['host3'\]' failed",
# "The conditional check 'some_dict.some_dict_key1 == hostvars['host3']' failed",
# "The conditional check 'some_dict.some_dict_key1 == hostvars['host3']' failed.",
self._eval_con,
when, variables)
def test_dict_undefined_values_bare(self):
variables = {'dict_value': 1,
'some_defined_dict_with_undefined_values': {'key1': 'value1',
'key2': '{{ dict_value }}',
'key3': '{{ undefined_dict_value }}'
}}
# raises an exception when a non-string conditional is passed to extract_defined_undefined()
when = [u"some_defined_dict_with_undefined_values"]
self.assertRaisesRegex(errors.AnsibleError,
"The conditional check 'some_defined_dict_with_undefined_values' failed.",
self._eval_con,
when, variables)
def test_is_defined(self):
variables = {'some_defined_thing': True}
when = [u"some_defined_thing is defined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_is_undefined(self):
variables = {'some_defined_thing': True}
when = [u"some_defined_thing is undefined"]
ret = self._eval_con(when, variables)
self.assertFalse(ret)
def test_is_undefined_and_defined(self):
variables = {'some_defined_thing': True}
when = [u"some_defined_thing is undefined", u"some_defined_thing is defined"]
ret = self._eval_con(when, variables)
self.assertFalse(ret)
def test_is_undefined_and_defined_reversed(self):
variables = {'some_defined_thing': True}
when = [u"some_defined_thing is defined", u"some_defined_thing is undefined"]
ret = self._eval_con(when, variables)
self.assertFalse(ret)
def test_is_not_undefined(self):
variables = {'some_defined_thing': True}
when = [u"some_defined_thing is not undefined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_is_not_defined(self):
variables = {'some_defined_thing': True}
when = [u"some_undefined_thing is not defined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_is_hostvars_quotes_is_defined(self):
variables = {'hostvars': {'some_host': {}},
'compare_targets_single': "hostvars['some_host']",
'compare_targets_double': 'hostvars["some_host"]',
'compare_targets': {'double': '{{ compare_targets_double }}',
'single': "{{ compare_targets_single }}"},
}
when = [u"hostvars['some_host'] is defined",
u'hostvars["some_host"] is defined',
u"{{ compare_targets.double }} is defined",
u"{{ compare_targets.single }} is defined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_is_hostvars_quotes_is_defined_but_is_not_defined(self):
variables = {'hostvars': {'some_host': {}},
'compare_targets_single': "hostvars['some_host']",
'compare_targets_double': 'hostvars["some_host"]',
'compare_targets': {'double': '{{ compare_targets_double }}',
'single': "{{ compare_targets_single }}"},
}
when = [u"hostvars['some_host'] is defined",
u'hostvars["some_host"] is defined',
u"{{ compare_targets.triple }} is defined",
u"{{ compare_targets.quadruple }} is defined"]
self.assertRaisesRegex(errors.AnsibleError,
"The conditional check '{{ compare_targets.triple }} is defined' failed",
self._eval_con,
when, variables)
def test_is_hostvars_host_is_defined(self):
variables = {'hostvars': {'some_host': {}, }}
when = [u"hostvars['some_host'] is defined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_is_hostvars_host_undefined_is_defined(self):
variables = {'hostvars': {'some_host': {}, }}
when = [u"hostvars['some_undefined_host'] is defined"]
ret = self._eval_con(when, variables)
self.assertFalse(ret)
def test_is_hostvars_host_undefined_is_undefined(self):
variables = {'hostvars': {'some_host': {}, }}
when = [u"hostvars['some_undefined_host'] is undefined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_is_hostvars_host_undefined_is_not_defined(self):
variables = {'hostvars': {'some_host': {}, }}
when = [u"hostvars['some_undefined_host'] is not defined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
| 8,974
|
Python
|
.py
| 177
| 36.734463
| 113
| 0.543764
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,765
|
test_helpers.py
|
ansible_ansible/test/units/playbook/test_helpers.py
|
# (c) 2016, Adrian Likins <alikins@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
import unittest
from unittest.mock import MagicMock
from units.mock.loader import DictDataLoader
from ansible import errors
from ansible.playbook import helpers
from ansible.playbook.block import Block
from ansible.playbook.handler import Handler
from ansible.playbook.task import Task
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role.include import RoleInclude
from ansible.plugins.loader import init_plugin_loader
init_plugin_loader()
class MixinForMocks(object):
def _setup(self):
# This is not a very good mixin, lots of side effects
self.fake_loader = DictDataLoader({'include_test.yml': "",
'other_include_test.yml': ""})
self.mock_tqm = MagicMock(name='MockTaskQueueManager')
self.mock_play = MagicMock(name='MockPlay')
self.mock_play._attributes = []
self.mock_play._collections = None
self.mock_iterator = MagicMock(name='MockIterator')
self.mock_iterator._play = self.mock_play
self.mock_inventory = MagicMock(name='MockInventory')
self.mock_inventory._hosts_cache = dict()
# TODO: can we use a real VariableManager?
self.mock_variable_manager = MagicMock(name='MockVariableManager')
self.mock_variable_manager.get_vars.return_value = dict()
self.mock_block = MagicMock(name='MockBlock')
# On macOS /etc is actually /private/etc, tests fail when performing literal /etc checks
self.fake_role_loader = DictDataLoader({os.path.join(os.path.realpath("/etc"), "ansible/roles/bogus_role/tasks/main.yml"): """
- shell: echo 'hello world'
"""})
self._test_data_path = os.path.dirname(__file__)
self.fake_include_loader = DictDataLoader({"/dev/null/includes/test_include.yml": """
- include_tasks: other_test_include.yml
- shell: echo 'hello world'
""",
"/dev/null/includes/static_test_include.yml": """
- include_tasks: other_test_include.yml
- shell: echo 'hello static world'
""",
"/dev/null/includes/other_test_include.yml": """
- debug:
msg: other_test_include_debug
"""})
class TestLoadListOfTasks(unittest.TestCase, MixinForMocks):
def setUp(self):
self._setup()
def _assert_is_task_list_or_blocks(self, results):
self.assertIsInstance(results, list)
for result in results:
self.assertIsInstance(result, (Task, Block))
def test_ds_not_list(self):
ds = {}
self.assertRaises(AssertionError, helpers.load_list_of_tasks,
ds, self.mock_play, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None)
def test_ds_not_dict(self):
ds = [[]]
self.assertRaises(AssertionError, helpers.load_list_of_tasks,
ds, self.mock_play, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None)
def test_empty_task(self):
ds = [{}]
self.assertRaisesRegex(errors.AnsibleParserError,
"no module/action detected in task",
helpers.load_list_of_tasks,
ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
def test_empty_task_use_handlers(self):
ds = [{}]
self.assertRaisesRegex(errors.AnsibleParserError,
"no module/action detected in task.",
helpers.load_list_of_tasks,
ds,
use_handlers=True,
play=self.mock_play,
variable_manager=self.mock_variable_manager,
loader=self.fake_loader)
def test_one_bogus_block(self):
ds = [{'block': None}]
self.assertRaisesRegex(errors.AnsibleParserError,
"A malformed block was encountered",
helpers.load_list_of_tasks,
ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
def test_unknown_action(self):
action_name = 'foo_test_unknown_action'
ds = [{'action': action_name}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
self._assert_is_task_list_or_blocks(res)
self.assertEqual(res[0].action, action_name)
def test_block_unknown_action(self):
action_name = 'foo_test_block_unknown_action'
ds = [{
'block': [{'action': action_name}]
}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
self._assert_is_task_list_or_blocks(res)
self.assertIsInstance(res[0], Block)
self._assert_default_block(res[0])
def _assert_default_block(self, block):
# the expected defaults
self.assertIsInstance(block.block, list)
self.assertEqual(len(block.block), 1)
self.assertIsInstance(block.rescue, list)
self.assertEqual(len(block.rescue), 0)
self.assertIsInstance(block.always, list)
self.assertEqual(len(block.always), 0)
def test_block_use_handlers(self):
ds = [{'block': True}]
self.assertRaisesRegex(errors.AnsibleParserError,
"Using a block as a handler is not supported.",
helpers.load_list_of_tasks,
ds, play=self.mock_play, use_handlers=True,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
def test_one_bogus_include_tasks(self):
ds = [{'include_tasks': 'somefile.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
self.assertIsInstance(res, list)
self.assertEqual(len(res), 1)
self.assertIsInstance(res[0], TaskInclude)
def test_one_bogus_include_tasks_use_handlers(self):
ds = [{'include_tasks': 'somefile.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play, use_handlers=True,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
self.assertIsInstance(res, list)
self.assertEqual(len(res), 1)
self.assertIsInstance(res[0], TaskInclude)
def test_one_bogus_import_tasks(self):
ds = [{'import_tasks': 'somefile.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
self.assertIsInstance(res, list)
self.assertEqual(len(res), 0)
def test_one_include_tasks(self):
ds = [{'include_tasks': '/dev/null/includes/other_test_include.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self.assertEqual(len(res), 1)
self._assert_is_task_list_or_blocks(res)
def test_one_parent_include_tasks(self):
ds = [{'include_tasks': '/dev/null/includes/test_include.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self._assert_is_task_list_or_blocks(res)
self.assertIsInstance(res[0], TaskInclude)
self.assertIsNone(res[0]._parent)
def test_one_include_tasks_tags(self):
ds = [{'include_tasks': '/dev/null/includes/other_test_include.yml',
'tags': ['test_one_include_tags_tag1', 'and_another_tagB']
}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self._assert_is_task_list_or_blocks(res)
self.assertIsInstance(res[0], TaskInclude)
self.assertIn('test_one_include_tags_tag1', res[0].tags)
self.assertIn('and_another_tagB', res[0].tags)
def test_one_parent_include_tasks_tags(self):
ds = [{'include_tasks': '/dev/null/includes/test_include.yml',
# 'vars': {'tags': ['test_one_parent_include_tags_tag1', 'and_another_tag2']}
'tags': ['test_one_parent_include_tags_tag1', 'and_another_tag2']
}
]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self._assert_is_task_list_or_blocks(res)
self.assertIsInstance(res[0], TaskInclude)
self.assertIn('test_one_parent_include_tags_tag1', res[0].tags)
self.assertIn('and_another_tag2', res[0].tags)
def test_one_include_tasks_use_handlers(self):
ds = [{'include_tasks': '/dev/null/includes/other_test_include.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
use_handlers=True,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self._assert_is_task_list_or_blocks(res)
self.assertIsInstance(res[0], Handler)
def test_one_parent_include_tasks_use_handlers(self):
ds = [{'include_tasks': '/dev/null/includes/test_include.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
use_handlers=True,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self._assert_is_task_list_or_blocks(res)
self.assertIsInstance(res[0], Handler)
# default for Handler
self.assertEqual(res[0].listen, [])
# TODO/FIXME: this doesn't seen right
# figure out how to get the non-static errors to be raised, this seems to just ignore everything
def test_one_include_not_static(self):
ds = [{
'include_tasks': '/dev/null/includes/static_test_include.yml',
}]
# a_block = Block()
ti_ds = {'include_tasks': '/dev/null/includes/ssdftatic_test_include.yml'}
a_task_include = TaskInclude()
ti = a_task_include.load(ti_ds)
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
block=ti,
variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
self._assert_is_task_list_or_blocks(res)
self.assertIsInstance(res[0], Task)
self.assertEqual(res[0].args['_raw_params'], '/dev/null/includes/static_test_include.yml')
# TODO/FIXME: This two get stuck trying to make a mock_block into a TaskInclude
# def test_one_include(self):
# ds = [{'include': 'other_test_include.yml'}]
# res = helpers.load_list_of_tasks(ds, play=self.mock_play,
# block=self.mock_block,
# variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
# print(res)
# def test_one_parent_include(self):
# ds = [{'include': 'test_include.yml'}]
# res = helpers.load_list_of_tasks(ds, play=self.mock_play,
# block=self.mock_block,
# variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
# print(res)
def test_one_bogus_include_role(self):
ds = [{'include_role': {'name': 'bogus_role'}, 'collections': []}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
block=self.mock_block,
variable_manager=self.mock_variable_manager, loader=self.fake_role_loader)
self.assertEqual(len(res), 1)
self._assert_is_task_list_or_blocks(res)
def test_one_bogus_include_role_use_handlers(self):
ds = [{'include_role': {'name': 'bogus_role'}, 'collections': []}]
self.assertRaises(errors.AnsibleError, helpers.load_list_of_tasks,
ds,
self.mock_play,
True, # use_handlers
self.mock_block,
self.mock_variable_manager,
self.fake_role_loader)
class TestLoadListOfRoles(unittest.TestCase, MixinForMocks):
def setUp(self):
self._setup()
def test_ds_not_list(self):
ds = {}
self.assertRaises(AssertionError, helpers.load_list_of_roles,
ds, self.mock_play)
def test_empty_role(self):
ds = [{}]
self.assertRaisesRegex(errors.AnsibleError,
"role definitions must contain a role name",
helpers.load_list_of_roles,
ds, self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_role_loader)
def test_empty_role_just_name(self):
ds = [{'name': 'bogus_role'}]
res = helpers.load_list_of_roles(ds, self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_role_loader)
self.assertIsInstance(res, list)
for r in res:
self.assertIsInstance(r, RoleInclude)
def test_block_unknown_action(self):
ds = [{
'block': [{'action': 'foo_test_block_unknown_action'}]
}]
ds = [{'name': 'bogus_role'}]
res = helpers.load_list_of_roles(ds, self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_role_loader)
self.assertIsInstance(res, list)
for r in res:
self.assertIsInstance(r, RoleInclude)
class TestLoadListOfBlocks(unittest.TestCase, MixinForMocks):
def setUp(self):
self._setup()
def test_ds_not_list(self):
ds = {}
mock_play = MagicMock(name='MockPlay')
self.assertRaises(AssertionError, helpers.load_list_of_blocks,
ds, mock_play, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None)
def test_empty_block(self):
ds = [{}]
mock_play = MagicMock(name='MockPlay')
self.assertRaisesRegex(errors.AnsibleParserError,
"no module/action detected in task",
helpers.load_list_of_blocks,
ds, mock_play,
parent_block=None,
role=None,
task_include=None,
use_handlers=False,
variable_manager=None,
loader=None)
def test_block_unknown_action(self):
ds = [{'action': 'foo', 'collections': []}]
mock_play = MagicMock(name='MockPlay')
res = helpers.load_list_of_blocks(ds, mock_play, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None,
loader=None)
self.assertIsInstance(res, list)
for block in res:
self.assertIsInstance(block, Block)
| 17,396
|
Python
|
.py
| 311
| 41.209003
| 148
| 0.574516
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,766
|
test_play.py
|
ansible_ansible/test/units/playbook/test_play.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import pytest
from ansible.errors import AnsibleAssertionError, AnsibleParserError
from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
from ansible.playbook.block import Block
from ansible.playbook.play import Play
from ansible.playbook.role import Role
from ansible.playbook.task import Task
from units.mock.loader import DictDataLoader
def test_empty_play():
p = Play.load({})
assert str(p) == ''
def test_play_with_hosts_string():
p = Play.load({'hosts': 'foo'})
assert str(p) == 'foo'
# Test the caching since self.name should be set by previous call.
assert p.get_name() == 'foo'
def test_basic_play():
p = Play.load(dict(
name="test play",
hosts=['foo'],
gather_facts=False,
connection='local',
remote_user="root",
become=True,
become_user="testing",
))
assert p.name == 'test play'
assert p.hosts == ['foo']
assert p.connection == 'local'
def test_play_with_remote_user():
p = Play.load(dict(
name="test play",
hosts=['foo'],
user="testing",
gather_facts=False,
))
assert p.remote_user == "testing"
def test_play_with_user_conflict():
play_data = dict(
name="test play",
hosts=['foo'],
user="testing",
remote_user="testing",
)
with pytest.raises(AnsibleParserError):
Play.load(play_data)
def test_play_with_bad_ds_type():
play_data = []
with pytest.raises(AnsibleAssertionError, match=r"while preprocessing data \(\[\]\), ds should be a dict but was a <(?:class|type) 'list'>"):
Play.load(play_data)
def test_play_with_tasks():
p = Play.load(dict(
name="test play",
hosts=['foo'],
gather_facts=False,
tasks=[dict(action='shell echo "hello world"')],
))
assert len(p.tasks) == 1
assert isinstance(p.tasks[0], Block)
assert p.tasks[0].has_tasks() is True
def test_play_with_handlers():
p = Play.load(dict(
name="test play",
hosts=['foo'],
gather_facts=False,
handlers=[dict(action='shell echo "hello world"')],
))
assert len(p.handlers) >= 1
assert len(p.get_handlers()) >= 1
assert isinstance(p.handlers[0], Block)
assert p.handlers[0].has_tasks() is True
def test_play_with_pre_tasks():
p = Play.load(dict(
name="test play",
hosts=['foo'],
gather_facts=False,
pre_tasks=[dict(action='shell echo "hello world"')],
))
assert len(p.pre_tasks) >= 1
assert isinstance(p.pre_tasks[0], Block)
assert p.pre_tasks[0].has_tasks() is True
assert len(p.get_tasks()) >= 1
assert isinstance(p.get_tasks()[0][0], Task)
assert p.get_tasks()[0][0].action == 'shell'
def test_play_with_post_tasks():
p = Play.load(dict(
name="test play",
hosts=['foo'],
gather_facts=False,
post_tasks=[dict(action='shell echo "hello world"')],
))
assert len(p.post_tasks) >= 1
assert isinstance(p.post_tasks[0], Block)
assert p.post_tasks[0].has_tasks() is True
def test_play_with_roles(mocker):
mocker.patch('ansible.playbook.role.definition.RoleDefinition._load_role_path', return_value=('foo', '/etc/ansible/roles/foo'))
fake_loader = DictDataLoader({
'/etc/ansible/roles/foo/tasks.yml': """
- name: role task
shell: echo "hello world"
""",
})
mock_var_manager = mocker.MagicMock()
mock_var_manager.get_vars.return_value = {}
p = Play.load(dict(
name="test play",
hosts=['foo'],
gather_facts=False,
roles=['foo'],
), loader=fake_loader, variable_manager=mock_var_manager)
blocks = p.compile()
assert len(blocks) > 1
assert all(isinstance(block, Block) for block in blocks)
assert isinstance(p.get_roles()[0], Role)
def test_play_compile():
p = Play.load(dict(
name="test play",
hosts=['foo'],
gather_facts=False,
tasks=[dict(action='shell echo "hello world"')],
))
blocks = p.compile()
# with a single block, there will still be three
# implicit meta flush_handler blocks inserted
assert len(blocks) == 4
@pytest.mark.parametrize(
'value, expected',
(
('my_vars.yml', ['my_vars.yml']),
(['my_vars.yml'], ['my_vars.yml']),
(['my_vars1.yml', 'my_vars2.yml'], ['my_vars1.yml', 'my_vars2.yml']),
(None, []),
)
)
def test_play_with_vars_files(value, expected):
play = Play.load({
'name': 'Play with vars_files',
'hosts': ['testhost1'],
'vars_files': value,
})
assert play.vars_files == value
assert play.get_vars_files() == expected
@pytest.mark.parametrize('value', ([], tuple(), set(), {}, '', None, False, 0))
def test_play_empty_hosts(value):
with pytest.raises(AnsibleParserError, match='Hosts list cannot be empty'):
Play.load({'hosts': value})
@pytest.mark.parametrize('value', ([None], (None,), ['one', None]))
def test_play_none_hosts(value):
with pytest.raises(AnsibleParserError, match="Hosts list cannot contain values of 'None'"):
Play.load({'hosts': value})
@pytest.mark.parametrize(
'value',
(
{'one': None},
{'one': 'two'},
True,
1,
1.75,
AnsibleVaultEncryptedUnicode('secret'),
)
)
def test_play_invalid_hosts_sequence(value):
with pytest.raises(AnsibleParserError, match='Hosts list must be a sequence or string'):
Play.load({'hosts': value})
@pytest.mark.parametrize(
'value',
(
[[1, 'two']],
[{'one': None}],
[set((None, 'one'))],
['one', 'two', {'three': None}],
['one', 'two', {'three': 'four'}],
[AnsibleVaultEncryptedUnicode('secret')],
)
)
def test_play_invalid_hosts_value(value):
with pytest.raises(AnsibleParserError, match='Hosts list contains an invalid host value'):
Play.load({'hosts': value})
def test_play_with_vars():
play = Play.load({}, vars={'var1': 'val1'})
assert play.get_name() == ''
assert play.vars == {'var1': 'val1'}
assert play.get_vars() == {'var1': 'val1'}
def test_play_no_name_hosts_sequence():
play = Play.load({'hosts': ['host1', 'host2']})
assert play.get_name() == 'host1,host2'
def test_play_hosts_template_expression():
play = Play.load({'hosts': "{{ target_hosts }}"})
assert play.get_name() == '{{ target_hosts }}'
@pytest.mark.parametrize(
'call',
(
'_load_tasks',
'_load_pre_tasks',
'_load_post_tasks',
'_load_handlers',
'_load_roles',
)
)
def test_bad_blocks_roles(mocker, call):
mocker.patch('ansible.playbook.play.load_list_of_blocks', side_effect=AssertionError('Raised intentionally'))
mocker.patch('ansible.playbook.play.load_list_of_roles', side_effect=AssertionError('Raised intentionally'))
play = Play.load({})
with pytest.raises(AnsibleParserError, match='A malformed (block|(role declaration)) was encountered'):
getattr(play, call)('', None)
| 7,881
|
Python
|
.py
| 222
| 29.864865
| 145
| 0.636196
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,767
|
test_taggable.py
|
ansible_ansible/test/units/playbook/test_taggable.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from ansible.playbook.taggable import Taggable
from units.mock.loader import DictDataLoader
class TaggableTestObj(Taggable):
def __init__(self):
self._loader = DictDataLoader({})
self.tags = []
self._parent = None
class TestTaggable(unittest.TestCase):
def assert_evaluate_equal(self, test_value, tags, only_tags, skip_tags):
taggable_obj = TaggableTestObj()
taggable_obj.tags = tags
evaluate = taggable_obj.evaluate_tags(only_tags, skip_tags, {})
self.assertEqual(test_value, evaluate)
def test_evaluate_tags_tag_in_only_tags(self):
self.assert_evaluate_equal(True, ['tag1', 'tag2'], ['tag1'], [])
def test_evaluate_tags_tag_in_skip_tags(self):
self.assert_evaluate_equal(False, ['tag1', 'tag2'], [], ['tag1'])
def test_evaluate_tags_special_always_in_object_tags(self):
self.assert_evaluate_equal(True, ['tag', 'always'], ['random'], [])
def test_evaluate_tags_tag_in_skip_tags_special_always_in_object_tags(self):
self.assert_evaluate_equal(False, ['tag', 'always'], ['random'], ['tag'])
def test_evaluate_tags_special_always_in_skip_tags_and_always_in_tags(self):
self.assert_evaluate_equal(False, ['tag', 'always'], [], ['always'])
def test_evaluate_tags_special_tagged_in_only_tags_and_object_tagged(self):
self.assert_evaluate_equal(True, ['tag'], ['tagged'], [])
def test_evaluate_tags_special_tagged_in_only_tags_and_object_untagged(self):
self.assert_evaluate_equal(False, [], ['tagged'], [])
def test_evaluate_tags_special_tagged_in_skip_tags_and_object_tagged(self):
self.assert_evaluate_equal(False, ['tag'], [], ['tagged'])
def test_evaluate_tags_special_tagged_in_skip_tags_and_object_untagged(self):
self.assert_evaluate_equal(True, [], [], ['tagged'])
def test_evaluate_tags_special_untagged_in_only_tags_and_object_tagged(self):
self.assert_evaluate_equal(False, ['tag'], ['untagged'], [])
def test_evaluate_tags_special_untagged_in_only_tags_and_object_untagged(self):
self.assert_evaluate_equal(True, [], ['untagged'], [])
def test_evaluate_tags_special_untagged_in_skip_tags_and_object_tagged(self):
self.assert_evaluate_equal(True, ['tag'], [], ['untagged'])
def test_evaluate_tags_special_untagged_in_skip_tags_and_object_untagged(self):
self.assert_evaluate_equal(False, [], [], ['untagged'])
def test_evaluate_tags_special_all_in_only_tags(self):
self.assert_evaluate_equal(True, ['tag'], ['all'], ['untagged'])
def test_evaluate_tags_special_all_in_only_tags_and_object_untagged(self):
self.assert_evaluate_equal(True, [], ['all'], [])
def test_evaluate_tags_special_all_in_skip_tags(self):
self.assert_evaluate_equal(False, ['tag'], ['tag'], ['all'])
def test_evaluate_tags_special_all_in_only_tags_and_special_all_in_skip_tags(self):
self.assert_evaluate_equal(False, ['tag'], ['all'], ['all'])
def test_evaluate_tags_special_all_in_skip_tags_and_always_in_object_tags(self):
self.assert_evaluate_equal(True, ['tag', 'always'], [], ['all'])
def test_evaluate_tags_special_all_in_skip_tags_and_special_always_in_skip_tags_and_always_in_object_tags(self):
self.assert_evaluate_equal(False, ['tag', 'always'], [], ['all', 'always'])
def test_evaluate_tags_accepts_lists(self):
self.assert_evaluate_equal(True, ['tag1', 'tag2'], ['tag2'], [])
def test_evaluate_tags_with_repeated_tags(self):
self.assert_evaluate_equal(False, ['tag', 'tag'], [], ['tag'])
| 4,390
|
Python
|
.py
| 73
| 54.383562
| 116
| 0.687821
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,768
|
test_block.py
|
ansible_ansible/test/units/playbook/test_block.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from ansible.playbook.block import Block
from ansible.playbook.task import Task
from ansible.plugins.loader import init_plugin_loader
init_plugin_loader()
class TestBlock(unittest.TestCase):
def test_construct_empty_block(self):
b = Block()
def test_construct_block_with_role(self):
pass
def test_load_block_simple(self):
ds = dict(
block=[],
rescue=[],
always=[],
# otherwise=[],
)
b = Block.load(ds)
self.assertEqual(b.block, [])
self.assertEqual(b.rescue, [])
self.assertEqual(b.always, [])
# not currently used
# self.assertEqual(b.otherwise, [])
def test_load_block_with_tasks(self):
ds = dict(
block=[dict(action='block')],
rescue=[dict(action='rescue')],
always=[dict(action='always')],
# otherwise=[dict(action='otherwise')],
)
b = Block.load(ds)
self.assertEqual(len(b.block), 1)
self.assertIsInstance(b.block[0], Task)
self.assertEqual(len(b.rescue), 1)
self.assertIsInstance(b.rescue[0], Task)
self.assertEqual(len(b.always), 1)
self.assertIsInstance(b.always[0], Task)
# not currently used
# self.assertEqual(len(b.otherwise), 1)
# self.assertIsInstance(b.otherwise[0], Task)
def test_load_implicit_block(self):
ds = [dict(action='foo')]
b = Block.load(ds)
self.assertEqual(len(b.block), 1)
self.assertIsInstance(b.block[0], Task)
def test_deserialize(self):
ds = dict(
block=[dict(action='block')],
rescue=[dict(action='rescue')],
always=[dict(action='always')],
)
b = Block.load(ds)
data = dict(parent=ds, parent_type='Block')
b.deserialize(data)
self.assertIsInstance(b._parent, Block)
| 2,689
|
Python
|
.py
| 72
| 30.458333
| 70
| 0.642994
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,769
|
test_base.py
|
ansible_ansible/test/units/playbook/test_base.py
|
# (c) 2016, Adrian Likins <alikins@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from ansible.errors import AnsibleParserError, AnsibleAssertionError
from ansible.playbook.attribute import FieldAttribute, NonInheritableFieldAttribute
from ansible.template import Templar
from ansible.playbook import base
from ansible.utils.unsafe_proxy import AnsibleUnsafeText
from units.mock.loader import DictDataLoader
class TestBase(unittest.TestCase):
ClassUnderTest = base.Base
def setUp(self):
self.assorted_vars = {'var_2_key': 'var_2_value',
'var_1_key': 'var_1_value',
'a_list': ['a_list_1', 'a_list_2'],
'a_dict': {'a_dict_key': 'a_dict_value'},
'a_set': set(['set_1', 'set_2']),
'a_int': 42,
'a_float': 37.371,
'a_bool': True,
'a_none': None,
}
self.b = self.ClassUnderTest()
def _base_validate(self, ds):
bsc = self.ClassUnderTest()
parent = ExampleParentBaseSubClass()
bsc._parent = parent
bsc._dep_chain = [parent]
parent._dep_chain = None
bsc.load_data(ds)
fake_loader = DictDataLoader({})
templar = Templar(loader=fake_loader)
bsc.post_validate(templar)
return bsc
def test(self):
self.assertIsInstance(self.b, base.Base)
self.assertIsInstance(self.b, self.ClassUnderTest)
# dump me doesnt return anything or change anything so not much to assert
def test_dump_me_empty(self):
self.b.dump_me()
def test_dump_me(self):
ds = {'environment': [],
'vars': {'var_2_key': 'var_2_value',
'var_1_key': 'var_1_value'}}
b = self._base_validate(ds)
b.dump_me()
def _assert_copy(self, orig, copy):
self.assertIsInstance(copy, self.ClassUnderTest)
self.assertIsInstance(copy, base.Base)
self.assertEqual(len(orig.fattributes), len(copy.fattributes))
sentinel = 'Empty DS'
self.assertEqual(getattr(orig, '_ds', sentinel), getattr(copy, '_ds', sentinel))
def test_copy_empty(self):
copy = self.b.copy()
self._assert_copy(self.b, copy)
def test_copy_with_vars(self):
ds = {'vars': self.assorted_vars}
b = self._base_validate(ds)
copy = b.copy()
self._assert_copy(b, copy)
def test_serialize(self):
ds = {}
ds = {'environment': [],
'vars': self.assorted_vars
}
b = self._base_validate(ds)
ret = b.serialize()
self.assertIsInstance(ret, dict)
def test_deserialize(self):
data = {}
d = self.ClassUnderTest()
d.deserialize(data)
self.assertIn('_run_once', d.__dict__)
self.assertIn('_check_mode', d.__dict__)
data = {'no_log': False,
'remote_user': None,
'vars': self.assorted_vars,
'environment': [],
'run_once': False,
'connection': None,
'ignore_errors': False,
'port': 22,
'a_sentinel_with_an_unlikely_name': ['sure, a list']}
d = self.ClassUnderTest()
d.deserialize(data)
self.assertNotIn('_a_sentinel_with_an_unlikely_name', d.__dict__)
self.assertIn('_run_once', d.__dict__)
self.assertIn('_check_mode', d.__dict__)
def test_serialize_then_deserialize(self):
ds = {'environment': [],
'vars': self.assorted_vars}
b = self._base_validate(ds)
copy = b.copy()
ret = b.serialize()
b.deserialize(ret)
c = self.ClassUnderTest()
c.deserialize(ret)
# TODO: not a great test, but coverage...
self.maxDiff = None
self.assertDictEqual(b.serialize(), copy.serialize())
self.assertDictEqual(c.serialize(), copy.serialize())
def test_post_validate_empty(self):
fake_loader = DictDataLoader({})
templar = Templar(loader=fake_loader)
ret = self.b.post_validate(templar)
self.assertIsNone(ret)
def test_get_ds_none(self):
ds = self.b.get_ds()
self.assertIsNone(ds)
def test_load_data_ds_is_none(self):
self.assertRaises(AssertionError, self.b.load_data, None)
def test_load_data_invalid_attr(self):
ds = {'not_a_valid_attr': [],
'other': None}
self.assertRaises(AnsibleParserError, self.b.load_data, ds)
def test_load_data_invalid_attr_type(self):
ds = {'environment': True}
# environment is supposed to be a list. This
# seems like it shouldn't work?
ret = self.b.load_data(ds)
self.assertEqual(True, ret._environment)
def test_post_validate(self):
ds = {'environment': [],
'port': 443}
b = self._base_validate(ds)
self.assertEqual(b.port, 443)
self.assertEqual(b.environment, [])
def test_post_validate_invalid_attr_types(self):
ds = {'environment': [],
'port': 'some_port'}
b = self._base_validate(ds)
self.assertEqual(b.port, 'some_port')
def test_squash(self):
data = self.b.serialize()
self.b.squash()
squashed_data = self.b.serialize()
# TODO: assert something
self.assertFalse(data['squashed'])
self.assertTrue(squashed_data['squashed'])
def test_vars(self):
# vars as a dict.
ds = {'environment': [],
'vars': {'var_2_key': 'var_2_value',
'var_1_key': 'var_1_value'}}
b = self._base_validate(ds)
self.assertEqual(b.vars['var_1_key'], 'var_1_value')
def test_vars_list_of_dicts(self):
ds = {'environment': [],
'vars': [{'var_2_key': 'var_2_value'},
{'var_1_key': 'var_1_value'}]
}
self.assertRaises(AnsibleParserError, self.b.load_data, ds)
def test_vars_not_dict_or_list(self):
ds = {'environment': [],
'vars': 'I am a string, not a dict or a list of dicts'}
self.assertRaises(AnsibleParserError, self.b.load_data, ds)
def test_vars_not_valid_identifier(self):
ds = {'environment': [],
'vars': [{'var_2_key': 'var_2_value'},
{'1an-invalid identifer': 'var_1_value'}]
}
self.assertRaises(AnsibleParserError, self.b.load_data, ds)
def test_vars_is_list_but_not_of_dicts(self):
ds = {'environment': [],
'vars': ['foo', 'bar', 'this is a string not a dict']
}
self.assertRaises(AnsibleParserError, self.b.load_data, ds)
def test_vars_is_none(self):
# If vars is None, we should get a empty dict back
ds = {'environment': [],
'vars': None
}
b = self._base_validate(ds)
self.assertEqual(b.vars, {})
def test_validate_empty(self):
self.b.validate()
self.assertTrue(self.b._validated)
def test_getters(self):
# not sure why these exist, but here are tests anyway
loader = self.b.get_loader()
variable_manager = self.b.get_variable_manager()
self.assertEqual(loader, self.b._loader)
self.assertEqual(variable_manager, self.b._variable_manager)
class TestExtendValue(unittest.TestCase):
# _extend_value could be a module or staticmethod but since its
# not, the test is here.
def test_extend_value_list_newlist(self):
b = base.Base()
value_list = ['first', 'second']
new_value_list = ['new_first', 'new_second']
ret = b._extend_value(value_list, new_value_list)
self.assertEqual(value_list + new_value_list, ret)
def test_extend_value_list_newlist_prepend(self):
b = base.Base()
value_list = ['first', 'second']
new_value_list = ['new_first', 'new_second']
ret_prepend = b._extend_value(value_list, new_value_list, prepend=True)
self.assertEqual(new_value_list + value_list, ret_prepend)
def test_extend_value_newlist_list(self):
b = base.Base()
value_list = ['first', 'second']
new_value_list = ['new_first', 'new_second']
ret = b._extend_value(new_value_list, value_list)
self.assertEqual(new_value_list + value_list, ret)
def test_extend_value_newlist_list_prepend(self):
b = base.Base()
value_list = ['first', 'second']
new_value_list = ['new_first', 'new_second']
ret = b._extend_value(new_value_list, value_list, prepend=True)
self.assertEqual(value_list + new_value_list, ret)
def test_extend_value_string_newlist(self):
b = base.Base()
some_string = 'some string'
new_value_list = ['new_first', 'new_second']
ret = b._extend_value(some_string, new_value_list)
self.assertEqual([some_string] + new_value_list, ret)
def test_extend_value_string_newstring(self):
b = base.Base()
some_string = 'some string'
new_value_string = 'this is the new values'
ret = b._extend_value(some_string, new_value_string)
self.assertEqual([some_string, new_value_string], ret)
def test_extend_value_list_newstring(self):
b = base.Base()
value_list = ['first', 'second']
new_value_string = 'this is the new values'
ret = b._extend_value(value_list, new_value_string)
self.assertEqual(value_list + [new_value_string], ret)
def test_extend_value_none_none(self):
b = base.Base()
ret = b._extend_value(None, None)
self.assertEqual(len(ret), 0)
self.assertFalse(ret)
def test_extend_value_none_list(self):
b = base.Base()
ret = b._extend_value(None, ['foo'])
self.assertEqual(ret, ['foo'])
class ExampleException(Exception):
pass
# naming fails me...
class ExampleParentBaseSubClass(base.Base):
test_attr_parent_string = FieldAttribute(isa='string', default='A string attr for a class that may be a parent for testing')
def __init__(self):
super(ExampleParentBaseSubClass, self).__init__()
self._dep_chain = None
def get_dep_chain(self):
return self._dep_chain
class ExampleSubClass(base.Base):
test_attr_blip = NonInheritableFieldAttribute(isa='string', default='example sub class test_attr_blip',
always_post_validate=True)
def __init__(self):
super(ExampleSubClass, self).__init__()
class BaseSubClass(base.Base):
name = FieldAttribute(isa='string', default='', always_post_validate=True)
test_attr_bool = FieldAttribute(isa='bool', always_post_validate=True)
test_attr_int = FieldAttribute(isa='int', always_post_validate=True)
test_attr_float = FieldAttribute(isa='float', default=3.14159, always_post_validate=True)
test_attr_list = FieldAttribute(isa='list', listof=(str,), always_post_validate=True)
test_attr_list_no_listof = FieldAttribute(isa='list', always_post_validate=True)
test_attr_list_required = FieldAttribute(isa='list', listof=(str,), required=True,
default=list, always_post_validate=True)
test_attr_string = FieldAttribute(isa='string', default='the_test_attr_string_default_value')
test_attr_string_required = FieldAttribute(isa='string', required=True,
default='the_test_attr_string_default_value')
test_attr_percent = FieldAttribute(isa='percent', always_post_validate=True)
test_attr_set = FieldAttribute(isa='set', default=set, always_post_validate=True)
test_attr_dict = FieldAttribute(isa='dict', default=lambda: {'a_key': 'a_value'}, always_post_validate=True)
test_attr_class = FieldAttribute(isa='class', class_type=ExampleSubClass)
test_attr_class_post_validate = FieldAttribute(isa='class', class_type=ExampleSubClass,
always_post_validate=True)
test_attr_unknown_isa = FieldAttribute(isa='not_a_real_isa', always_post_validate=True)
test_attr_example = FieldAttribute(isa='string', default='the_default',
always_post_validate=True)
test_attr_none = FieldAttribute(isa='string', always_post_validate=True)
test_attr_preprocess = FieldAttribute(isa='string', default='the default for preprocess')
test_attr_method = FieldAttribute(isa='string', default='some attr with a getter',
always_post_validate=True)
test_attr_method_missing = FieldAttribute(isa='string', default='some attr with a missing getter',
always_post_validate=True)
def _get_attr_test_attr_method(self):
return 'foo bar'
def _validate_test_attr_example(self, attr, name, value):
if not isinstance(value, str):
raise ExampleException('test_attr_example is not a string: %s type=%s' % (value, type(value)))
def _post_validate_test_attr_example(self, attr, value, templar):
after_template_value = templar.template(value)
return after_template_value
def _post_validate_test_attr_none(self, attr, value, templar):
return None
# terrible name, but it is a TestBase subclass for testing subclasses of Base
class TestBaseSubClass(TestBase):
ClassUnderTest = BaseSubClass
def _base_validate(self, ds):
ds['test_attr_list_required'] = []
return super(TestBaseSubClass, self)._base_validate(ds)
def test_attr_bool(self):
ds = {'test_attr_bool': True}
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_bool, True)
def test_attr_int(self):
MOST_RANDOM_NUMBER = 37
ds = {'test_attr_int': MOST_RANDOM_NUMBER}
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_int, MOST_RANDOM_NUMBER)
def test_attr_int_del(self):
MOST_RANDOM_NUMBER = 37
ds = {'test_attr_int': MOST_RANDOM_NUMBER}
bsc = self._base_validate(ds)
del bsc.test_attr_int
self.assertNotIn('_test_attr_int', bsc.__dict__)
def test_attr_float(self):
roughly_pi = 4.0
ds = {'test_attr_float': roughly_pi}
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_float, roughly_pi)
def test_attr_percent(self):
percentage = '90%'
percentage_float = 90.0
ds = {'test_attr_percent': percentage}
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_percent, percentage_float)
# This method works hard and gives it its all and everything it's got. It doesn't
# leave anything on the field. It deserves to pass. It has earned it.
def test_attr_percent_110_percent(self):
percentage = '110.11%'
percentage_float = 110.11
ds = {'test_attr_percent': percentage}
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_percent, percentage_float)
# This method is just here for the paycheck.
def test_attr_percent_60_no_percent_sign(self):
percentage = '60'
percentage_float = 60.0
ds = {'test_attr_percent': percentage}
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_percent, percentage_float)
def test_attr_set(self):
test_set = set(['first_string_in_set', 'second_string_in_set'])
ds = {'test_attr_set': test_set}
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_set, test_set)
def test_attr_set_string(self):
test_data = ['something', 'other']
test_value = ','.join(test_data)
ds = {'test_attr_set': test_value}
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_set, set(test_data))
def test_attr_set_not_string_or_list(self):
test_value = 37.1
ds = {'test_attr_set': test_value}
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_set, set([test_value]))
def test_attr_dict(self):
test_dict = {'a_different_key': 'a_different_value'}
ds = {'test_attr_dict': test_dict}
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_dict, test_dict)
def test_attr_dict_string(self):
test_value = 'just_some_random_string'
ds = {'test_attr_dict': test_value}
self.assertRaisesRegex(AnsibleParserError, 'is not a dictionary', self._base_validate, ds)
def test_attr_class(self):
esc = ExampleSubClass()
ds = {'test_attr_class': esc}
bsc = self._base_validate(ds)
self.assertIs(bsc.test_attr_class, esc)
def test_attr_class_wrong_type(self):
not_a_esc = ExampleSubClass
ds = {'test_attr_class': not_a_esc}
bsc = self._base_validate(ds)
self.assertIs(bsc.test_attr_class, not_a_esc)
def test_attr_class_post_validate(self):
esc = ExampleSubClass()
ds = {'test_attr_class_post_validate': esc}
bsc = self._base_validate(ds)
self.assertIs(bsc.test_attr_class_post_validate, esc)
def test_attr_class_post_validate_class_not_instance(self):
not_a_esc = ExampleSubClass
ds = {'test_attr_class_post_validate': not_a_esc}
self.assertRaisesRegex(AnsibleParserError, "is not a valid.*got a <class 'type'> instead",
self._base_validate, ds)
def test_attr_class_post_validate_wrong_class(self):
not_a_esc = 37
ds = {'test_attr_class_post_validate': not_a_esc}
self.assertRaisesRegex(AnsibleParserError, 'is not a valid.*got a.*int.*instead',
self._base_validate, ds)
def test_attr_remote_user(self):
ds = {'remote_user': 'testuser'}
bsc = self._base_validate(ds)
# TODO: attempt to verify we called parent getters etc
self.assertEqual(bsc.remote_user, 'testuser')
def test_attr_example_undefined(self):
ds = {'test_attr_example': '{{ some_var_that_shouldnt_exist_to_test_omit }}'}
exc_regex_str = 'test_attr_example.*has an invalid value, which includes an undefined variable.*some_var_that_shouldnt*'
self.assertRaises(AnsibleParserError)
def test_attr_name_undefined(self):
ds = {'name': '{{ some_var_that_shouldnt_exist_to_test_omit }}'}
bsc = self._base_validate(ds)
# the attribute 'name' is special cases in post_validate
self.assertEqual(bsc.name, '{{ some_var_that_shouldnt_exist_to_test_omit }}')
def test_subclass_validate_method(self):
ds = {'test_attr_list': ['string_list_item_1', 'string_list_item_2'],
'test_attr_example': 'the_test_attr_example_value_string'}
# Not throwing an exception here is the test
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_example, 'the_test_attr_example_value_string')
def test_subclass_validate_method_invalid(self):
ds = {'test_attr_example': [None]}
self.assertRaises(ExampleException, self._base_validate, ds)
def test_attr_none(self):
ds = {'test_attr_none': 'foo'}
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_none, None)
def test_attr_string(self):
the_string_value = "the new test_attr_string_value"
ds = {'test_attr_string': the_string_value}
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_string, the_string_value)
def test_attr_string_invalid_list(self):
ds = {'test_attr_string': ['The new test_attr_string', 'value, however in a list']}
self.assertRaises(AnsibleParserError, self._base_validate, ds)
def test_attr_string_required(self):
the_string_value = "the new test_attr_string_required_value"
ds = {'test_attr_string_required': the_string_value}
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_string_required, the_string_value)
def test_attr_list_invalid(self):
ds = {'test_attr_list': {}}
self.assertRaises(AnsibleParserError, self._base_validate, ds)
def test_attr_list(self):
string_list = ['foo', 'bar']
ds = {'test_attr_list': string_list}
bsc = self._base_validate(ds)
self.assertEqual(string_list, bsc._test_attr_list)
def test_attr_list_none(self):
ds = {'test_attr_list': None}
bsc = self._base_validate(ds)
self.assertEqual(None, bsc._test_attr_list)
def test_attr_list_no_listof(self):
test_list = ['foo', 'bar', 123]
ds = {'test_attr_list_no_listof': test_list}
bsc = self._base_validate(ds)
self.assertEqual(test_list, bsc._test_attr_list_no_listof)
def test_attr_list_required(self):
string_list = ['foo', 'bar']
ds = {'test_attr_list_required': string_list}
bsc = self.ClassUnderTest()
bsc.load_data(ds)
fake_loader = DictDataLoader({})
templar = Templar(loader=fake_loader)
bsc.post_validate(templar)
self.assertEqual(string_list, bsc._test_attr_list_required)
def test_attr_list_required_empty_string(self):
string_list = [""]
ds = {'test_attr_list_required': string_list}
bsc = self.ClassUnderTest()
bsc.load_data(ds)
fake_loader = DictDataLoader({})
templar = Templar(loader=fake_loader)
self.assertRaisesRegex(AnsibleParserError, 'cannot have empty values',
bsc.post_validate, templar)
def test_attr_unknown(self):
self.assertRaises(
AnsibleAssertionError,
self._base_validate,
{'test_attr_unknown_isa': True}
)
def test_attr_method(self):
ds = {'test_attr_method': 'value from the ds'}
bsc = self._base_validate(ds)
# The value returned by the subclasses _get_attr_test_attr_method
self.assertEqual(bsc.test_attr_method, 'foo bar')
def test_attr_method_missing(self):
a_string = 'The value set from the ds'
ds = {'test_attr_method_missing': a_string}
bsc = self._base_validate(ds)
self.assertEqual(bsc.test_attr_method_missing, a_string)
def test_get_validated_value_string_rewrap_unsafe(self):
attribute = FieldAttribute(isa='string')
value = AnsibleUnsafeText(u'bar')
templar = Templar(None)
bsc = self.ClassUnderTest()
result = bsc.get_validated_value('foo', attribute, value, templar)
self.assertIsInstance(result, AnsibleUnsafeText)
self.assertEqual(result, AnsibleUnsafeText(u'bar'))
| 23,630
|
Python
|
.py
| 500
| 37.916
| 128
| 0.619761
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,770
|
test_collectionsearch.py
|
ansible_ansible/test/units/playbook/test_collectionsearch.py
|
# (c) 2020 Ansible Project
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
from ansible.errors import AnsibleParserError
from ansible.playbook.play import Play
from ansible.playbook.task import Task
from ansible.playbook.block import Block
import pytest
def test_collection_static_warning(capsys):
"""Test that collection name is not templated.
Also, make sure that users see the warning message for the referenced name.
"""
collection_name = "foo.{{bar}}"
p = Play.load(dict(
name="test play",
hosts=['foo'],
gather_facts=False,
connection='local',
collections=collection_name,
))
assert collection_name in p.collections
std_out, std_err = capsys.readouterr()
assert '[WARNING]: "collections" is not templatable, but we found: %s' % collection_name in std_err
assert '' == std_out
def test_collection_invalid_data_play():
"""Test that collection as a dict at the play level fails with parser error"""
collection_name = {'name': 'foo'}
with pytest.raises(AnsibleParserError):
Play.load(dict(
name="test play",
hosts=['foo'],
gather_facts=False,
connection='local',
collections=collection_name,
))
def test_collection_invalid_data_task():
"""Test that collection as a dict at the task level fails with parser error"""
collection_name = {'name': 'foo'}
with pytest.raises(AnsibleParserError):
Task.load(dict(
name="test task",
collections=collection_name,
))
def test_collection_invalid_data_block():
"""Test that collection as a dict at the block level fails with parser error"""
collection_name = {'name': 'foo'}
with pytest.raises(AnsibleParserError):
Block.load(dict(
block=[dict(name="test task", collections=collection_name)]
))
| 2,546
|
Python
|
.py
| 64
| 34.53125
| 103
| 0.697166
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,771
|
test_included_file.py
|
ansible_ansible/test/units/playbook/test_included_file.py
|
# (c) 2016, Adrian Likins <alikins@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
import pytest
from unittest.mock import MagicMock
from units.mock.loader import DictDataLoader
from ansible.playbook.block import Block
from ansible.playbook.task import Task
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role_include import IncludeRole
from ansible.executor import task_result
from ansible.playbook.included_file import IncludedFile
from ansible.errors import AnsibleParserError
@pytest.fixture
def mock_iterator():
mock_iterator = MagicMock(name='MockIterator')
mock_iterator._play = MagicMock(name='MockPlay')
return mock_iterator
@pytest.fixture
def mock_variable_manager():
# TODO: can we use a real VariableManager?
mock_variable_manager = MagicMock(name='MockVariableManager')
mock_variable_manager.get_vars.return_value = dict()
return mock_variable_manager
def test_equals_ok():
uuid = '111-111'
parent = MagicMock(name='MockParent')
parent._uuid = uuid
task = MagicMock(name='MockTask')
task._uuid = uuid
task._parent = parent
inc_a = IncludedFile('a.yml', {}, {}, task)
inc_b = IncludedFile('a.yml', {}, {}, task)
assert inc_a == inc_b
def test_equals_different_tasks():
parent = MagicMock(name='MockParent')
parent._uuid = '111-111'
task_a = MagicMock(name='MockTask')
task_a._uuid = '11-11'
task_a._parent = parent
task_b = MagicMock(name='MockTask')
task_b._uuid = '22-22'
task_b._parent = parent
inc_a = IncludedFile('a.yml', {}, {}, task_a)
inc_b = IncludedFile('a.yml', {}, {}, task_b)
assert inc_a != inc_b
def test_equals_different_parents():
parent_a = MagicMock(name='MockParent')
parent_a._uuid = '111-111'
parent_b = MagicMock(name='MockParent')
parent_b._uuid = '222-222'
task_a = MagicMock(name='MockTask')
task_a._uuid = '11-11'
task_a._parent = parent_a
task_b = MagicMock(name='MockTask')
task_b._uuid = '11-11'
task_b._parent = parent_b
inc_a = IncludedFile('a.yml', {}, {}, task_a)
inc_b = IncludedFile('a.yml', {}, {}, task_b)
assert inc_a != inc_b
def test_included_file_instantiation():
filename = 'somefile.yml'
inc_file = IncludedFile(filename=filename, args={}, vars={}, task=None)
assert isinstance(inc_file, IncludedFile)
assert inc_file._filename == filename
assert inc_file._args == {}
assert inc_file._vars == {}
assert inc_file._task is None
def test_process_include_tasks_results(mock_iterator, mock_variable_manager):
hostname = "testhost1"
hostname2 = "testhost2"
parent_task_ds = {'debug': 'msg=foo'}
parent_task = Task.load(parent_task_ds)
parent_task._play = None
task_ds = {'include_tasks': 'include_test.yml'}
loaded_task = TaskInclude.load(task_ds, task_include=parent_task)
return_data = {'include': 'include_test.yml'}
# The task in the TaskResult has to be a TaskInclude so it has a .static attr
result1 = task_result.TaskResult(host=hostname, task=loaded_task, return_data=return_data)
result2 = task_result.TaskResult(host=hostname2, task=loaded_task, return_data=return_data)
results = [result1, result2]
fake_loader = DictDataLoader({'include_test.yml': ""})
res = IncludedFile.process_include_results(results, mock_iterator, fake_loader, mock_variable_manager)
assert isinstance(res, list)
assert len(res) == 1
assert res[0]._filename == os.path.join(os.getcwd(), 'include_test.yml')
assert res[0]._hosts == ['testhost1', 'testhost2']
assert res[0]._args == {}
assert res[0]._vars == {}
def test_process_include_tasks_diff_files(mock_iterator, mock_variable_manager):
hostname = "testhost1"
hostname2 = "testhost2"
parent_task_ds = {'debug': 'msg=foo'}
parent_task = Task.load(parent_task_ds)
parent_task._play = None
task_ds = {'include_tasks': 'include_test.yml'}
loaded_task = TaskInclude.load(task_ds, task_include=parent_task)
loaded_task._play = None
child_task_ds = {'include_tasks': 'other_include_test.yml'}
loaded_child_task = TaskInclude.load(child_task_ds, task_include=loaded_task)
loaded_child_task._play = None
return_data = {'include': 'include_test.yml'}
# The task in the TaskResult has to be a TaskInclude so it has a .static attr
result1 = task_result.TaskResult(host=hostname, task=loaded_task, return_data=return_data)
return_data = {'include': 'other_include_test.yml'}
result2 = task_result.TaskResult(host=hostname2, task=loaded_child_task, return_data=return_data)
results = [result1, result2]
fake_loader = DictDataLoader({'include_test.yml': "",
'other_include_test.yml': ""})
res = IncludedFile.process_include_results(results, mock_iterator, fake_loader, mock_variable_manager)
assert isinstance(res, list)
assert res[0]._filename == os.path.join(os.getcwd(), 'include_test.yml')
assert res[1]._filename == os.path.join(os.getcwd(), 'other_include_test.yml')
assert res[0]._hosts == ['testhost1']
assert res[1]._hosts == ['testhost2']
assert res[0]._args == {}
assert res[1]._args == {}
assert res[0]._vars == {}
assert res[1]._vars == {}
def test_process_include_tasks_simulate_free(mock_iterator, mock_variable_manager):
hostname = "testhost1"
hostname2 = "testhost2"
parent_task_ds = {'debug': 'msg=foo'}
parent_task1 = Task.load(parent_task_ds)
parent_task2 = Task.load(parent_task_ds)
parent_task1._play = None
parent_task2._play = None
task_ds = {'include_tasks': 'include_test.yml'}
loaded_task1 = TaskInclude.load(task_ds, task_include=parent_task1)
loaded_task2 = TaskInclude.load(task_ds, task_include=parent_task2)
return_data = {'include': 'include_test.yml'}
# The task in the TaskResult has to be a TaskInclude so it has a .static attr
result1 = task_result.TaskResult(host=hostname, task=loaded_task1, return_data=return_data)
result2 = task_result.TaskResult(host=hostname2, task=loaded_task2, return_data=return_data)
results = [result1, result2]
fake_loader = DictDataLoader({'include_test.yml': ""})
res = IncludedFile.process_include_results(results, mock_iterator, fake_loader, mock_variable_manager)
assert isinstance(res, list)
assert len(res) == 2
assert res[0]._filename == os.path.join(os.getcwd(), 'include_test.yml')
assert res[1]._filename == os.path.join(os.getcwd(), 'include_test.yml')
assert res[0]._hosts == ['testhost1']
assert res[1]._hosts == ['testhost2']
assert res[0]._args == {}
assert res[1]._args == {}
assert res[0]._vars == {}
assert res[1]._vars == {}
def test_process_include_simulate_free_block_role_tasks(mock_iterator,
mock_variable_manager):
"""Test loading the same role returns different included files
In the case of free, we may end up with included files from roles that
have the same parent but are different tasks. Previously the comparison
for equality did not check if the tasks were the same and only checked
that the parents were the same. This lead to some tasks being run
incorrectly and some tasks being silently dropped."""
fake_loader = DictDataLoader({
'include_test.yml': "",
'/etc/ansible/roles/foo_role/tasks/task1.yml': """
- debug: msg=task1
""",
'/etc/ansible/roles/foo_role/tasks/task2.yml': """
- debug: msg=task2
""",
})
hostname = "testhost1"
hostname2 = "testhost2"
role1_ds = {
'name': 'task1 include',
'include_role': {
'name': 'foo_role',
'tasks_from': 'task1.yml'
}
}
role2_ds = {
'name': 'task2 include',
'include_role': {
'name': 'foo_role',
'tasks_from': 'task2.yml'
}
}
parent_task_ds = {
'block': [
role1_ds,
role2_ds
]
}
parent_block = Block.load(parent_task_ds, loader=fake_loader)
parent_block._play = None
include_role1_ds = {
'include_args': {
'name': 'foo_role',
'tasks_from': 'task1.yml'
}
}
include_role2_ds = {
'include_args': {
'name': 'foo_role',
'tasks_from': 'task2.yml'
}
}
include_role1 = IncludeRole.load(role1_ds,
block=parent_block,
loader=fake_loader)
include_role2 = IncludeRole.load(role2_ds,
block=parent_block,
loader=fake_loader)
result1 = task_result.TaskResult(host=hostname,
task=include_role1,
return_data=include_role1_ds)
result2 = task_result.TaskResult(host=hostname2,
task=include_role2,
return_data=include_role2_ds)
results = [result1, result2]
res = IncludedFile.process_include_results(results,
mock_iterator,
fake_loader,
mock_variable_manager)
assert isinstance(res, list)
# we should get two different includes
assert len(res) == 2
assert res[0]._filename == 'foo_role'
assert res[1]._filename == 'foo_role'
# with different tasks
assert res[0]._task != res[1]._task
assert res[0]._hosts == ['testhost1']
assert res[1]._hosts == ['testhost2']
assert res[0]._args == {}
assert res[1]._args == {}
assert res[0]._vars == {}
assert res[1]._vars == {}
def test_empty_raw_params():
parent_task_ds = {'debug': 'msg=foo'}
parent_task = Task.load(parent_task_ds)
parent_task._play = None
task_ds_list = [
{
'include_tasks': ''
},
{
'import_tasks': ''
}
]
for task_ds in task_ds_list:
with pytest.raises(AnsibleParserError):
TaskInclude.load(task_ds, task_include=parent_task)
| 11,002
|
Python
|
.py
| 260
| 35.061538
| 106
| 0.637752
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,772
|
test_task.py
|
ansible_ansible/test/units/playbook/test_task.py
|
# Copyright: (c) Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import unittest
from unittest.mock import patch
from ansible import errors
from ansible.parsing.yaml import objects
from ansible.playbook.task import Task
from ansible.plugins.loader import init_plugin_loader
basic_command_task = dict(
name='Test Task',
command='echo hi'
)
kv_command_task = dict(
action='command echo hi'
)
# See #36848
kv_bad_args_str = '- apk: sdfs sf sdf 37'
kv_bad_args_ds = {'apk': 'sdfs sf sdf 37'}
class TestTask(unittest.TestCase):
def setUp(self):
self._task_base = {'name': 'test', 'action': 'debug'}
def tearDown(self):
pass
def test_construct_empty_task(self):
Task()
def test_construct_task_with_role(self):
pass
def test_construct_task_with_block(self):
pass
def test_construct_task_with_role_and_block(self):
pass
def test_load_task_simple(self):
t = Task.load(basic_command_task)
assert t is not None
self.assertEqual(t.get_name(), basic_command_task['name'])
self.assertEqual(t.action, 'command')
self.assertEqual(t.args, dict(_raw_params='echo hi'))
def test_load_task_kv_form(self):
t = Task.load(kv_command_task)
self.assertEqual(t.action, 'command')
self.assertEqual(t.args, dict(_raw_params='echo hi'))
@patch.object(errors.AnsibleError, '_get_error_lines_from_file')
def test_load_task_kv_form_error_36848(self, mock_get_err_lines):
init_plugin_loader()
ds = objects.AnsibleMapping(kv_bad_args_ds)
ds.ansible_pos = ('test_task_faux_playbook.yml', 1, 1)
mock_get_err_lines.return_value = (kv_bad_args_str, '')
with self.assertRaises(errors.AnsibleParserError) as cm:
Task.load(ds)
self.assertIsInstance(cm.exception, errors.AnsibleParserError)
self.assertEqual(cm.exception.obj, ds)
self.assertEqual(cm.exception.obj, kv_bad_args_ds)
self.assertIn("The error appears to be in 'test_task_faux_playbook.yml", cm.exception.message)
self.assertIn(kv_bad_args_str, cm.exception.message)
self.assertIn('apk', cm.exception.message)
self.assertEqual(cm.exception.message.count('The offending line'), 1)
self.assertEqual(cm.exception.message.count('The error appears to be in'), 1)
def test_task_auto_name(self):
self.assertNotIn('name', kv_command_task)
t = Task.load(kv_command_task)
self.assertEqual(t.get_name(), 'command')
def test_delay(self):
good_params = [
(0, 0),
(0.1, 0.1),
('0.3', 0.3),
('0.03', 0.03),
('12', 12),
(12, 12),
(1.2, 1.2),
('1.2', 1.2),
('1.0', 1),
]
for delay, expected in good_params:
with self.subTest(f'type "{type(delay)}" was not cast to float', delay=delay, expected=expected):
p = dict(delay=delay)
p.update(self._task_base)
t = Task().load_data(p)
self.assertEqual(t.get_validated_value('delay', t.fattributes.get('delay'), delay, None), expected)
bad_params = [
('E', ValueError),
('1.E', ValueError),
('E.1', ValueError),
]
for delay, expected in bad_params:
with self.subTest(f'type "{type(delay)} was cast to float w/o error', delay=delay, expected=expected):
p = dict(delay=delay)
p.update(self._task_base)
t = Task().load_data(p)
with self.assertRaises(expected):
dummy = t.get_validated_value('delay', t.fattributes.get('delay'), delay, None)
def test_task_auto_name_with_role(self):
pass
def test_load_task_complex_form(self):
pass
def test_can_load_module_complex_form(self):
pass
def test_local_action_implies_delegate(self):
pass
def test_local_action_conflicts_with_delegate(self):
pass
def test_delegate_to_parses(self):
pass
| 4,244
|
Python
|
.py
| 104
| 32.336538
| 115
| 0.618098
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,773
|
test_attribute.py
|
ansible_ansible/test/units/playbook/test_attribute.py
|
# (c) 2015, Marius Gedminas <marius@gedmin.as>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from ansible.playbook.attribute import Attribute
class TestAttribute(unittest.TestCase):
def setUp(self):
self.one = Attribute(priority=100)
self.two = Attribute(priority=0)
def test_eq(self):
self.assertTrue(self.one == self.one)
self.assertFalse(self.one == self.two)
def test_ne(self):
self.assertFalse(self.one != self.one)
self.assertTrue(self.one != self.two)
def test_lt(self):
self.assertFalse(self.one < self.one)
self.assertTrue(self.one < self.two)
self.assertFalse(self.two < self.one)
def test_gt(self):
self.assertFalse(self.one > self.one)
self.assertFalse(self.one > self.two)
self.assertTrue(self.two > self.one)
def test_le(self):
self.assertTrue(self.one <= self.one)
self.assertTrue(self.one <= self.two)
self.assertFalse(self.two <= self.one)
def test_ge(self):
self.assertTrue(self.one >= self.one)
self.assertFalse(self.one >= self.two)
self.assertTrue(self.two >= self.one)
| 1,833
|
Python
|
.py
| 45
| 35.666667
| 70
| 0.701182
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,774
|
test_include_role.py
|
ansible_ansible/test/units/playbook/role/test_include_role.py
|
# (c) 2016, Daniel Miranda <danielkza2@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from unittest.mock import patch
from ansible.playbook import Play
from ansible.playbook.role_include import IncludeRole
from ansible.playbook.task import Task
from ansible.vars.manager import VariableManager
from units.mock.loader import DictDataLoader
from units.mock.path import mock_unfrackpath_noop
class TestIncludeRole(unittest.TestCase):
def setUp(self):
self.loader = DictDataLoader({
'/etc/ansible/roles/l1/tasks/main.yml': """
- shell: echo 'hello world from l1'
- include_role: name=l2
""",
'/etc/ansible/roles/l1/tasks/alt.yml': """
- shell: echo 'hello world from l1 alt'
- include_role: name=l2 tasks_from=alt defaults_from=alt
""",
'/etc/ansible/roles/l1/defaults/main.yml': """
test_variable: l1-main
l1_variable: l1-main
""",
'/etc/ansible/roles/l1/defaults/alt.yml': """
test_variable: l1-alt
l1_variable: l1-alt
""",
'/etc/ansible/roles/l2/tasks/main.yml': """
- shell: echo 'hello world from l2'
- include_role: name=l3
""",
'/etc/ansible/roles/l2/tasks/alt.yml': """
- shell: echo 'hello world from l2 alt'
- include_role: name=l3 tasks_from=alt defaults_from=alt
""",
'/etc/ansible/roles/l2/defaults/main.yml': """
test_variable: l2-main
l2_variable: l2-main
""",
'/etc/ansible/roles/l2/defaults/alt.yml': """
test_variable: l2-alt
l2_variable: l2-alt
""",
'/etc/ansible/roles/l3/tasks/main.yml': """
- shell: echo 'hello world from l3'
""",
'/etc/ansible/roles/l3/tasks/alt.yml': """
- shell: echo 'hello world from l3 alt'
""",
'/etc/ansible/roles/l3/defaults/main.yml': """
test_variable: l3-main
l3_variable: l3-main
""",
'/etc/ansible/roles/l3/defaults/alt.yml': """
test_variable: l3-alt
l3_variable: l3-alt
"""
})
self.var_manager = VariableManager(loader=self.loader)
def tearDown(self):
pass
def flatten_tasks(self, tasks):
for task in tasks:
if isinstance(task, IncludeRole):
blocks, handlers = task.get_block_list(loader=self.loader)
for block in blocks:
yield from self.flatten_tasks(block.block)
elif isinstance(task, Task):
yield task
else:
yield from self.flatten_tasks(task.block)
def get_tasks_vars(self, play, tasks):
for task in self.flatten_tasks(tasks):
if task.implicit:
# skip meta: role_complete
continue
role = task._role
yield (role.get_name(),
self.var_manager.get_vars(play=play, task=task))
@patch('ansible.playbook.role.definition.unfrackpath',
mock_unfrackpath_noop)
def test_simple(self):
"""Test one-level include with default tasks and variables"""
play = Play.load(dict(
name="test play",
hosts=['foo'],
gather_facts=False,
tasks=[
{'include_role': 'name=l3'}
]
), loader=self.loader, variable_manager=self.var_manager)
tasks = play.compile()
tested = False
for role, task_vars in self.get_tasks_vars(play, tasks):
tested = True
self.assertEqual(task_vars.get('l3_variable'), 'l3-main')
self.assertEqual(task_vars.get('test_variable'), 'l3-main')
self.assertTrue(tested)
@patch('ansible.playbook.role.definition.unfrackpath',
mock_unfrackpath_noop)
def test_simple_alt_files(self):
"""Test one-level include with alternative tasks and variables"""
play = Play.load(dict(
name="test play",
hosts=['foo'],
gather_facts=False,
tasks=[{'include_role': 'name=l3 tasks_from=alt defaults_from=alt'}]),
loader=self.loader, variable_manager=self.var_manager)
tasks = play.compile()
tested = False
for role, task_vars in self.get_tasks_vars(play, tasks):
tested = True
self.assertEqual(task_vars.get('l3_variable'), 'l3-alt')
self.assertEqual(task_vars.get('test_variable'), 'l3-alt')
self.assertTrue(tested)
@patch('ansible.playbook.role.definition.unfrackpath',
mock_unfrackpath_noop)
def test_nested(self):
"""
Test nested includes with default tasks and variables.
Variables from outer roles should be inherited, but overridden in inner
roles.
"""
play = Play.load(dict(
name="test play",
hosts=['foo'],
gather_facts=False,
tasks=[
{'include_role': 'name=l1'}
]
), loader=self.loader, variable_manager=self.var_manager)
tasks = play.compile()
expected_roles = ['l1', 'l2', 'l3']
for role, task_vars in self.get_tasks_vars(play, tasks):
expected_roles.remove(role)
# Outer-most role must not have variables from inner roles yet
if role == 'l1':
self.assertEqual(task_vars.get('l1_variable'), 'l1-main')
self.assertEqual(task_vars.get('l2_variable'), None)
self.assertEqual(task_vars.get('l3_variable'), None)
self.assertEqual(task_vars.get('test_variable'), 'l1-main')
# Middle role must have variables from outer role, but not inner
elif role == 'l2':
self.assertEqual(task_vars.get('l1_variable'), 'l1-main')
self.assertEqual(task_vars.get('l2_variable'), 'l2-main')
self.assertEqual(task_vars.get('l3_variable'), None)
self.assertEqual(task_vars.get('test_variable'), 'l2-main')
# Inner role must have variables from both outer roles
elif role == 'l3':
self.assertEqual(task_vars.get('l1_variable'), 'l1-main')
self.assertEqual(task_vars.get('l2_variable'), 'l2-main')
self.assertEqual(task_vars.get('l3_variable'), 'l3-main')
self.assertEqual(task_vars.get('test_variable'), 'l3-main')
else:
self.fail() # pragma: nocover
self.assertFalse(expected_roles)
@patch('ansible.playbook.role.definition.unfrackpath',
mock_unfrackpath_noop)
def test_nested_alt_files(self):
"""
Test nested includes with alternative tasks and variables.
Variables from outer roles should be inherited, but overridden in inner
roles.
"""
play = Play.load(dict(
name="test play",
hosts=['foo'],
gather_facts=False,
tasks=[
{'include_role': 'name=l1 tasks_from=alt defaults_from=alt'}
]
), loader=self.loader, variable_manager=self.var_manager)
tasks = play.compile()
expected_roles = ['l1', 'l2', 'l3']
for role, task_vars in self.get_tasks_vars(play, tasks):
expected_roles.remove(role)
# Outer-most role must not have variables from inner roles yet
if role == 'l1':
self.assertEqual(task_vars.get('l1_variable'), 'l1-alt')
self.assertEqual(task_vars.get('l2_variable'), None)
self.assertEqual(task_vars.get('l3_variable'), None)
self.assertEqual(task_vars.get('test_variable'), 'l1-alt')
# Middle role must have variables from outer role, but not inner
elif role == 'l2':
self.assertEqual(task_vars.get('l1_variable'), 'l1-alt')
self.assertEqual(task_vars.get('l2_variable'), 'l2-alt')
self.assertEqual(task_vars.get('l3_variable'), None)
self.assertEqual(task_vars.get('test_variable'), 'l2-alt')
# Inner role must have variables from both outer roles
elif role == 'l3':
self.assertEqual(task_vars.get('l1_variable'), 'l1-alt')
self.assertEqual(task_vars.get('l2_variable'), 'l2-alt')
self.assertEqual(task_vars.get('l3_variable'), 'l3-alt')
self.assertEqual(task_vars.get('test_variable'), 'l3-alt')
else:
self.fail() # pragma: nocover
self.assertFalse(expected_roles)
| 9,623
|
Python
|
.py
| 214
| 33.378505
| 82
| 0.577735
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,775
|
test_role.py
|
ansible_ansible/test/units/playbook/role/test_role.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
from collections.abc import Container
import pytest
import unittest
from unittest.mock import patch, MagicMock
from ansible.errors import AnsibleParserError
from ansible.playbook.block import Block
from units.mock.loader import DictDataLoader
from units.mock.path import mock_unfrackpath_noop
from ansible.playbook.role import Role
from ansible.playbook.role.include import RoleInclude
from ansible.playbook.role import hash_params
class TestHashParams(unittest.TestCase):
def test(self):
params = {'foo': 'bar'}
res = hash_params(params)
self._assert_set(res)
self._assert_hashable(res)
@staticmethod
def _assert_hashable(res):
hash(res)
def _assert_set(self, res):
self.assertIsInstance(res, frozenset)
def test_dict_tuple(self):
params = {'foo': (1, 'bar',)}
res = hash_params(params)
self._assert_set(res)
def test_tuple(self):
params = (1, None, 'foo')
res = hash_params(params)
self._assert_hashable(res)
def test_tuple_dict(self):
params = ({'foo': 'bar'}, 37)
res = hash_params(params)
self._assert_hashable(res)
def test_list(self):
params = ['foo', 'bar', 1, 37, None]
res = hash_params(params)
self._assert_set(res)
self._assert_hashable(res)
def test_dict_with_list_value(self):
params = {'foo': [1, 4, 'bar']}
res = hash_params(params)
self._assert_set(res)
self._assert_hashable(res)
def test_empty_set(self):
params = set([])
res = hash_params(params)
self._assert_hashable(res)
self._assert_set(res)
def test_generator(self):
def my_generator():
yield
params = my_generator()
res = hash_params(params)
self._assert_hashable(res)
assert list(params)
def test_container_but_not_iterable(self):
# This is a Container that is not iterable, which is unlikely but...
class MyContainer(Container):
def __init__(self, _some_thing):
pass
def __contains__(self, item):
"""Implementation omitted, since it will never be called."""
params = MyContainer('foo bar')
with pytest.raises(TypeError) as ex:
hash_params(params)
assert ex.value.args == ("'MyContainer' object is not iterable",)
def test_param_dict_dupe_values(self):
params1 = {'foo': False}
params2 = {'bar': False}
res1 = hash_params(params1)
res2 = hash_params(params2)
hash1 = hash(res1)
hash2 = hash(res2)
self.assertNotEqual(res1, res2)
self.assertNotEqual(hash1, hash2)
def test_param_dupe(self):
params1 = {
# 'from_files': {},
'tags': [],
u'testvalue': False,
u'testvalue2': True,
# 'when': []
}
params2 = {
# 'from_files': {},
'tags': [],
u'testvalue': True,
u'testvalue2': False,
# 'when': []
}
res1 = hash_params(params1)
res2 = hash_params(params2)
self.assertNotEqual(hash(res1), hash(res2))
self.assertNotEqual(res1, res2)
params_dict = {}
params_dict[res1] = 'params1'
params_dict[res2] = 'params2'
self.assertEqual(len(params_dict), 2)
del params_dict[res2]
self.assertEqual(len(params_dict), 1)
for key in params_dict:
self.assertTrue(key in params_dict)
self.assertIn(key, params_dict)
class TestRole(unittest.TestCase):
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_load_role_with_tasks(self):
fake_loader = DictDataLoader({
"/etc/ansible/roles/foo_tasks/tasks/main.yml": """
- shell: echo 'hello world'
""",
})
mock_play = MagicMock()
mock_play.role_cache = {}
i = RoleInclude.load('foo_tasks', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
self.assertEqual(str(r), 'foo_tasks')
self.assertEqual(len(r._task_blocks), 1)
assert isinstance(r._task_blocks[0], Block)
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_load_role_with_tasks_dir_vs_file(self):
fake_loader = DictDataLoader({
"/etc/ansible/roles/foo_tasks/tasks/custom_main/foo.yml": """
- command: bar
""",
"/etc/ansible/roles/foo_tasks/tasks/custom_main.yml": """
- command: baz
""",
})
mock_play = MagicMock()
mock_play.role_cache = {}
i = RoleInclude.load('foo_tasks', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play, from_files=dict(tasks='custom_main'))
self.assertEqual(r._task_blocks[0]._ds[0]['command'], 'baz')
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_load_role_with_handlers(self):
fake_loader = DictDataLoader({
"/etc/ansible/roles/foo_handlers/handlers/main.yml": """
- name: test handler
shell: echo 'hello world'
""",
})
mock_play = MagicMock()
mock_play.role_cache = {}
i = RoleInclude.load('foo_handlers', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
self.assertEqual(len(r._handler_blocks), 1)
assert isinstance(r._handler_blocks[0], Block)
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_load_role_with_vars(self):
fake_loader = DictDataLoader({
"/etc/ansible/roles/foo_vars/defaults/main.yml": """
foo: bar
""",
"/etc/ansible/roles/foo_vars/vars/main.yml": """
foo: bam
""",
})
mock_play = MagicMock()
mock_play.role_cache = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
self.assertEqual(r._default_vars, dict(foo='bar'))
self.assertEqual(r._role_vars, dict(foo='bam'))
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_load_role_with_vars_dirs(self):
fake_loader = DictDataLoader({
"/etc/ansible/roles/foo_vars/defaults/main/foo.yml": """
foo: bar
""",
"/etc/ansible/roles/foo_vars/vars/main/bar.yml": """
foo: bam
""",
})
mock_play = MagicMock()
mock_play.role_cache = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
self.assertEqual(r._default_vars, dict(foo='bar'))
self.assertEqual(r._role_vars, dict(foo='bam'))
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_load_role_with_vars_nested_dirs(self):
fake_loader = DictDataLoader({
"/etc/ansible/roles/foo_vars/defaults/main/foo/bar.yml": """
foo: bar
""",
"/etc/ansible/roles/foo_vars/vars/main/bar/foo.yml": """
foo: bam
""",
})
mock_play = MagicMock()
mock_play.role_cache = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
self.assertEqual(r._default_vars, dict(foo='bar'))
self.assertEqual(r._role_vars, dict(foo='bam'))
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_load_role_with_vars_nested_dirs_combined(self):
fake_loader = DictDataLoader({
"/etc/ansible/roles/foo_vars/defaults/main/foo/bar.yml": """
foo: bar
a: 1
""",
"/etc/ansible/roles/foo_vars/defaults/main/bar/foo.yml": """
foo: bam
b: 2
""",
})
mock_play = MagicMock()
mock_play.role_cache = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
self.assertEqual(r._default_vars, dict(foo='bar', a=1, b=2))
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_load_role_with_vars_dir_vs_file(self):
fake_loader = DictDataLoader({
"/etc/ansible/roles/foo_vars/vars/main/foo.yml": """
foo: bar
""",
"/etc/ansible/roles/foo_vars/vars/main.yml": """
foo: bam
""",
})
mock_play = MagicMock()
mock_play.role_cache = {}
i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
self.assertEqual(r._role_vars, dict(foo='bam'))
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_load_role_with_metadata(self):
fake_loader = DictDataLoader({
'/etc/ansible/roles/foo_metadata/meta/main.yml': """
allow_duplicates: true
dependencies:
- bar_metadata
galaxy_info:
a: 1
b: 2
c: 3
""",
'/etc/ansible/roles/bar_metadata/meta/main.yml': """
dependencies:
- baz_metadata
""",
'/etc/ansible/roles/baz_metadata/meta/main.yml': """
dependencies:
- bam_metadata
""",
'/etc/ansible/roles/bam_metadata/meta/main.yml': """
dependencies: []
""",
'/etc/ansible/roles/bad1_metadata/meta/main.yml': """
1
""",
'/etc/ansible/roles/bad2_metadata/meta/main.yml': """
foo: bar
""",
'/etc/ansible/roles/recursive1_metadata/meta/main.yml': """
dependencies: ['recursive2_metadata']
""",
'/etc/ansible/roles/recursive2_metadata/meta/main.yml': """
dependencies: ['recursive1_metadata']
""",
})
mock_play = MagicMock()
mock_play.collections = None
mock_play.role_cache = {}
i = RoleInclude.load('foo_metadata', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
role_deps = r.get_direct_dependencies()
self.assertEqual(len(role_deps), 1)
self.assertEqual(type(role_deps[0]), Role)
self.assertEqual(len(role_deps[0].get_parents()), 1)
self.assertEqual(role_deps[0].get_parents()[0], r)
self.assertEqual(r._metadata.allow_duplicates, True)
self.assertEqual(r._metadata.galaxy_info, dict(a=1, b=2, c=3))
all_deps = r.get_all_dependencies()
self.assertEqual(len(all_deps), 3)
self.assertEqual(all_deps[0].get_name(), 'bam_metadata')
self.assertEqual(all_deps[1].get_name(), 'baz_metadata')
self.assertEqual(all_deps[2].get_name(), 'bar_metadata')
i = RoleInclude.load('bad1_metadata', play=mock_play, loader=fake_loader)
self.assertRaises(AnsibleParserError, Role.load, i, play=mock_play)
i = RoleInclude.load('bad2_metadata', play=mock_play, loader=fake_loader)
self.assertRaises(AnsibleParserError, Role.load, i, play=mock_play)
# TODO: re-enable this test once Ansible has proper role dep cycle detection
# that doesn't rely on stack overflows being recoverable (as they aren't in Py3.7+)
# see https://github.com/ansible/ansible/issues/61527
# i = RoleInclude.load('recursive1_metadata', play=mock_play, loader=fake_loader)
# self.assertRaises(AnsibleError, Role.load, i, play=mock_play)
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_load_role_complex(self):
# FIXME: add tests for the more complex uses of
# params and tags/when statements
fake_loader = DictDataLoader({
"/etc/ansible/roles/foo_complex/tasks/main.yml": """
- shell: echo 'hello world'
""",
})
mock_play = MagicMock()
mock_play.role_cache = {}
i = RoleInclude.load(dict(role='foo_complex'), play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
self.assertEqual(r.get_name(), "foo_complex")
| 13,480
|
Python
|
.py
| 321
| 32.392523
| 91
| 0.595424
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,776
|
test_manager.py
|
ansible_ansible/test/units/config/test_manager.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import os
import os.path
import pytest
from ansible.config.manager import ConfigManager, ensure_type, resolve_path, get_config_type
from ansible.errors import AnsibleOptionsError, AnsibleError
from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
curdir = os.path.dirname(__file__)
cfg_file = os.path.join(curdir, 'test.cfg')
cfg_file2 = os.path.join(curdir, 'test2.cfg')
cfg_file3 = os.path.join(curdir, 'test3.cfg')
ensure_test_data = [
('a,b', 'list', list),
(['a', 'b'], 'list', list),
('y', 'bool', bool),
('yes', 'bool', bool),
('on', 'bool', bool),
('1', 'bool', bool),
('true', 'bool', bool),
('t', 'bool', bool),
(1, 'bool', bool),
(1.0, 'bool', bool),
(True, 'bool', bool),
('n', 'bool', bool),
('no', 'bool', bool),
('off', 'bool', bool),
('0', 'bool', bool),
('false', 'bool', bool),
('f', 'bool', bool),
(0, 'bool', bool),
(0.0, 'bool', bool),
(False, 'bool', bool),
('10', 'int', int),
(20, 'int', int),
('0.10', 'float', float),
(0.2, 'float', float),
('/tmp/test.yml', 'pathspec', list),
('/tmp/test.yml,/home/test2.yml', 'pathlist', list),
('a', 'str', str),
('a', 'string', str),
('Café', 'string', str),
('', 'string', str),
('29', 'str', str),
('13.37', 'str', str),
('123j', 'string', str),
('0x123', 'string', str),
('true', 'string', str),
('True', 'string', str),
(0, 'str', str),
(29, 'str', str),
(13.37, 'str', str),
(123j, 'string', str),
(0x123, 'string', str),
(True, 'string', str),
('None', 'none', type(None))
]
ensure_unquoting_test_data = [
('"value"', '"value"', 'str', 'env: ENVVAR', None),
('"value"', '"value"', 'str', os.path.join(curdir, 'test.yml'), 'yaml'),
('"value"', 'value', 'str', cfg_file, 'ini'),
('\'value\'', 'value', 'str', cfg_file, 'ini'),
('\'\'value\'\'', '\'value\'', 'str', cfg_file, 'ini'),
('""value""', '"value"', 'str', cfg_file, 'ini')
]
class TestConfigManager:
@classmethod
def setup_class(cls):
cls.manager = ConfigManager(cfg_file, os.path.join(curdir, 'test.yml'))
@classmethod
def teardown_class(cls):
cls.manager = None
@pytest.mark.parametrize("value, expected_type, python_type", ensure_test_data)
def test_ensure_type(self, value, expected_type, python_type):
assert isinstance(ensure_type(value, expected_type), python_type)
@pytest.mark.parametrize("value, expected_value, value_type, origin, origin_ftype", ensure_unquoting_test_data)
def test_ensure_type_unquoting(self, value, expected_value, value_type, origin, origin_ftype):
actual_value = ensure_type(value, value_type, origin, origin_ftype)
assert actual_value == expected_value
def test_resolve_path(self):
assert os.path.join(curdir, 'test.yml') == resolve_path('./test.yml', cfg_file)
def test_resolve_path_cwd(self):
assert os.path.join(os.getcwd(), 'test.yml') == resolve_path('{{CWD}}/test.yml')
assert os.path.join(os.getcwd(), 'test.yml') == resolve_path('./test.yml')
def test_value_and_origin_from_ini(self):
assert self.manager.get_config_value_and_origin('config_entry') == ('fromini', cfg_file)
def test_value_from_ini(self):
assert self.manager.get_config_value('config_entry') == 'fromini'
def test_value_and_origin_from_alt_ini(self):
assert self.manager.get_config_value_and_origin('config_entry', cfile=cfg_file2) == ('fromini2', cfg_file2)
def test_value_from_alt_ini(self):
assert self.manager.get_config_value('config_entry', cfile=cfg_file2) == 'fromini2'
def test_config_types(self):
assert get_config_type('/tmp/ansible.ini') == 'ini'
assert get_config_type('/tmp/ansible.cfg') == 'ini'
assert get_config_type('/tmp/ansible.yaml') == 'yaml'
assert get_config_type('/tmp/ansible.yml') == 'yaml'
def test_config_types_negative(self):
with pytest.raises(AnsibleOptionsError) as exec_info:
get_config_type('/tmp/ansible.txt')
assert "Unsupported configuration file extension for" in str(exec_info.value)
def test_read_config_yaml_file(self):
assert isinstance(self.manager._read_config_yaml_file(os.path.join(curdir, 'test.yml')), dict)
def test_read_config_yaml_file_negative(self):
with pytest.raises(AnsibleError) as exec_info:
self.manager._read_config_yaml_file(os.path.join(curdir, 'test_non_existent.yml'))
assert "Missing base YAML definition file (bad install?)" in str(exec_info.value)
def test_entry_as_vault_var(self):
class MockVault:
def decrypt(self, value, filename=None, obj=None):
return value
vault_var = AnsibleVaultEncryptedUnicode(b"vault text")
vault_var.vault = MockVault()
actual_value, actual_origin = self.manager._loop_entries({'name': vault_var}, [{'name': 'name'}])
assert actual_value == "vault text"
assert actual_origin == "name"
@pytest.mark.parametrize("value_type", ("str", "string", None))
def test_ensure_type_with_vaulted_str(self, value_type):
class MockVault:
def decrypt(self, value, filename=None, obj=None):
return value
vault_var = AnsibleVaultEncryptedUnicode(b"vault text")
vault_var.vault = MockVault()
actual_value = ensure_type(vault_var, value_type)
assert actual_value == "vault text"
@pytest.mark.parametrize(("key", "expected_value"), (
("COLOR_UNREACHABLE", "bright red"),
("COLOR_VERBOSE", "rgb013"),
("COLOR_DEBUG", "gray10")))
def test_256color_support(key, expected_value):
# GIVEN: a config file containing 256-color values with default definitions
manager = ConfigManager(cfg_file3)
# WHEN: get config values
actual_value = manager.get_config_value(key)
# THEN: no error
assert actual_value == expected_value
| 6,215
|
Python
|
.py
| 138
| 39.086957
| 115
| 0.627357
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,777
|
test_find_ini_config_file.py
|
ansible_ansible/test/units/config/manager/test_find_ini_config_file.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import os
import os.path
import stat
import pytest
from ansible.config.manager import find_ini_config_file
from ansible.module_utils.common.text.converters import to_text
real_exists = os.path.exists
real_isdir = os.path.isdir
working_dir = os.path.dirname(__file__)
cfg_in_cwd = os.path.join(working_dir, 'ansible.cfg')
cfg_dir = os.path.join(working_dir, 'data')
cfg_file = os.path.join(cfg_dir, 'ansible.cfg')
alt_cfg_file = os.path.join(cfg_dir, 'test.cfg')
cfg_in_homedir = os.path.expanduser('~/.ansible.cfg')
@pytest.fixture
def setup_env(request, monkeypatch):
cur_config = os.environ.get('ANSIBLE_CONFIG', None)
cfg_path = request.param[0]
if cfg_path is None and cur_config:
monkeypatch.delenv('ANSIBLE_CONFIG')
else:
monkeypatch.setenv('ANSIBLE_CONFIG', request.param[0])
yield
@pytest.fixture
def setup_existing_files(request, monkeypatch):
def _os_path_exists(path):
if to_text(path) in (request.param[0]):
return True
else:
return False
def _os_access(path, access):
assert to_text(path) in (request.param[0])
return True
# Enable user and system dirs so that we know cwd takes precedence
monkeypatch.setattr("os.path.exists", _os_path_exists)
monkeypatch.setattr("os.access", _os_access)
monkeypatch.setattr("os.getcwd", lambda: os.path.dirname(cfg_dir))
monkeypatch.setattr("os.path.isdir", lambda path: True if to_text(path) == cfg_dir else real_isdir(path))
class TestFindIniFile:
# This tells us to run twice, once with a file specified and once with a directory
@pytest.mark.parametrize('setup_env, expected', (([alt_cfg_file], alt_cfg_file), ([cfg_dir], cfg_file)), indirect=['setup_env'])
# This just passes the list of files that exist to the fixture
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_in_cwd, alt_cfg_file, cfg_file)]],
indirect=['setup_existing_files'])
def test_env_has_cfg_file(self, setup_env, setup_existing_files, expected):
"""ANSIBLE_CONFIG is specified, use it"""
warnings = set()
assert find_ini_config_file(warnings) == expected
assert warnings == set()
@pytest.mark.parametrize('setup_env', ([alt_cfg_file], [cfg_dir]), indirect=['setup_env'])
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_in_cwd)]],
indirect=['setup_existing_files'])
def test_env_has_no_cfg_file(self, setup_env, setup_existing_files):
"""ANSIBLE_CONFIG is specified but the file does not exist"""
warnings = set()
# since the cfg file specified by ANSIBLE_CONFIG doesn't exist, the one at cwd that does
# exist should be returned
assert find_ini_config_file(warnings) == cfg_in_cwd
assert warnings == set()
# ANSIBLE_CONFIG not specified
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# All config files are present
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_in_cwd, cfg_file, alt_cfg_file)]],
indirect=['setup_existing_files'])
def test_ini_in_cwd(self, setup_env, setup_existing_files):
"""ANSIBLE_CONFIG not specified. Use the cwd cfg"""
warnings = set()
assert find_ini_config_file(warnings) == cfg_in_cwd
assert warnings == set()
# ANSIBLE_CONFIG not specified
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# No config in cwd
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_file, alt_cfg_file)]],
indirect=['setup_existing_files'])
def test_ini_in_homedir(self, setup_env, setup_existing_files):
"""First config found is in the homedir"""
warnings = set()
assert find_ini_config_file(warnings) == cfg_in_homedir
assert warnings == set()
# ANSIBLE_CONFIG not specified
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# No config in cwd
@pytest.mark.parametrize('setup_existing_files', [[('/etc/ansible/ansible.cfg', cfg_file, alt_cfg_file)]], indirect=['setup_existing_files'])
def test_ini_in_systemdir(self, setup_env, setup_existing_files):
"""First config found is the system config"""
warnings = set()
assert find_ini_config_file(warnings) == '/etc/ansible/ansible.cfg'
assert warnings == set()
# ANSIBLE_CONFIG not specified
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# No config in cwd
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_file, alt_cfg_file)]],
indirect=['setup_existing_files'])
def test_cwd_does_not_exist(self, setup_env, setup_existing_files, monkeypatch):
"""Smoketest current working directory doesn't exist"""
def _os_stat(path):
raise OSError('%s does not exist' % path)
monkeypatch.setattr('os.stat', _os_stat)
warnings = set()
assert find_ini_config_file(warnings) == cfg_in_homedir
assert warnings == set()
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# No config in cwd
@pytest.mark.parametrize('setup_existing_files', [[list()]], indirect=['setup_existing_files'])
def test_no_config(self, setup_env, setup_existing_files):
"""No config present, no config found"""
warnings = set()
assert find_ini_config_file(warnings) is None
assert warnings == set()
# ANSIBLE_CONFIG not specified
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# All config files are present except in cwd
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_file, alt_cfg_file)]],
indirect=['setup_existing_files'])
def test_no_cwd_cfg_no_warning_on_writable(self, setup_env, setup_existing_files, monkeypatch):
"""If the cwd is writable but there is no config file there, move on with no warning"""
real_stat = os.stat
def _os_stat(path):
assert path == working_dir
from posix import stat_result
stat_info = list(real_stat(path))
stat_info[stat.ST_MODE] |= stat.S_IWOTH
return stat_result(stat_info)
monkeypatch.setattr('os.stat', _os_stat)
warnings = set()
assert find_ini_config_file(warnings) == cfg_in_homedir
assert len(warnings) == 0
# ANSIBLE_CONFIG not specified
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# All config files are present
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_in_cwd, cfg_file, alt_cfg_file)]],
indirect=['setup_existing_files'])
def test_cwd_warning_on_writable(self, setup_env, setup_existing_files, monkeypatch):
"""If the cwd is writable, warn and skip it """
real_stat = os.stat
def _os_stat(path):
assert path == working_dir
from posix import stat_result
stat_info = list(real_stat(path))
stat_info[stat.ST_MODE] |= stat.S_IWOTH
return stat_result(stat_info)
monkeypatch.setattr('os.stat', _os_stat)
warnings = set()
assert find_ini_config_file(warnings) == cfg_in_homedir
assert len(warnings) == 1
warning = warnings.pop()
assert u'Ansible is being run in a world writable directory' in warning
assert u'ignoring it as an ansible.cfg source' in warning
# ANSIBLE_CONFIG is sepcified
@pytest.mark.parametrize('setup_env, expected', (([alt_cfg_file], alt_cfg_file), ([cfg_in_cwd], cfg_in_cwd)), indirect=['setup_env'])
# All config files are present
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_in_cwd, cfg_file, alt_cfg_file)]],
indirect=['setup_existing_files'])
def test_no_warning_on_writable_if_env_used(self, setup_env, setup_existing_files, monkeypatch, expected):
"""If the cwd is writable but ANSIBLE_CONFIG was used, no warning should be issued"""
real_stat = os.stat
def _os_stat(path):
if path != working_dir:
return real_stat(path)
from posix import stat_result
stat_info = list(real_stat(path))
stat_info[stat.ST_MODE] |= stat.S_IWOTH
return stat_result(stat_info)
monkeypatch.setattr('os.stat', _os_stat)
warnings = set()
assert find_ini_config_file(warnings) == expected
assert warnings == set()
# ANSIBLE_CONFIG not specified
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# All config files are present
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_in_cwd, cfg_file, alt_cfg_file)]],
indirect=['setup_existing_files'])
def test_cwd_warning_on_writable_no_warning_set(self, setup_env, setup_existing_files, monkeypatch):
"""Smoketest that the function succeeds even though no warning set was passed in"""
real_stat = os.stat
def _os_stat(path):
assert path == working_dir
from posix import stat_result
stat_info = list(real_stat(path))
stat_info[stat.ST_MODE] |= stat.S_IWOTH
return stat_result(stat_info)
monkeypatch.setattr('os.stat', _os_stat)
assert find_ini_config_file() == cfg_in_homedir
| 10,402
|
Python
|
.py
| 196
| 43.663265
| 145
| 0.631936
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,778
|
conftest.py
|
ansible_ansible/test/units/utils/conftest.py
|
# Copyright: Contributors to the Ansible project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.utils.display import Display
@pytest.fixture()
def display_resource(request):
Display._Singleton__instance = None
def teardown():
Display._Singleton__instance = None
request.addfinalizer(teardown)
| 428
|
Python
|
.py
| 11
| 35.454545
| 92
| 0.768293
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,779
|
test_unsafe_proxy.py
|
ansible_ansible/test/units/utils/test_unsafe_proxy.py
|
# -*- coding: utf-8 -*-
# (c) 2018 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.utils.unsafe_proxy import AnsibleUnsafe, AnsibleUnsafeBytes, AnsibleUnsafeText, wrap_var
from ansible.module_utils.common.text.converters import to_text, to_bytes
def test_wrap_var_text():
assert isinstance(wrap_var(u'foo'), AnsibleUnsafeText)
def test_wrap_var_bytes():
assert isinstance(wrap_var(b'foo'), AnsibleUnsafeBytes)
def test_wrap_var_string():
assert isinstance(wrap_var('foo'), AnsibleUnsafeText)
def test_wrap_var_dict():
assert isinstance(wrap_var(dict(foo='bar')), dict)
assert not isinstance(wrap_var(dict(foo='bar')), AnsibleUnsafe)
assert isinstance(wrap_var(dict(foo=u'bar'))['foo'], AnsibleUnsafeText)
def test_wrap_var_dict_None():
assert wrap_var(dict(foo=None))['foo'] is None
assert not isinstance(wrap_var(dict(foo=None))['foo'], AnsibleUnsafe)
def test_wrap_var_list():
assert isinstance(wrap_var(['foo']), list)
assert not isinstance(wrap_var(['foo']), AnsibleUnsafe)
assert isinstance(wrap_var([u'foo'])[0], AnsibleUnsafeText)
def test_wrap_var_list_None():
assert wrap_var([None])[0] is None
assert not isinstance(wrap_var([None])[0], AnsibleUnsafe)
def test_wrap_var_set():
assert isinstance(wrap_var(set(['foo'])), set)
assert not isinstance(wrap_var(set(['foo'])), AnsibleUnsafe)
for item in wrap_var(set([u'foo'])):
assert isinstance(item, AnsibleUnsafeText)
def test_wrap_var_set_None():
for item in wrap_var(set([None])):
assert item is None
assert not isinstance(item, AnsibleUnsafe)
def test_wrap_var_tuple():
assert isinstance(wrap_var(('foo',)), tuple)
assert not isinstance(wrap_var(('foo',)), AnsibleUnsafe)
assert isinstance(wrap_var(('foo',))[0], AnsibleUnsafe)
def test_wrap_var_tuple_None():
assert wrap_var((None,))[0] is None
assert not isinstance(wrap_var((None,))[0], AnsibleUnsafe)
def test_wrap_var_None():
assert wrap_var(None) is None
assert not isinstance(wrap_var(None), AnsibleUnsafe)
def test_wrap_var_unsafe_text():
assert isinstance(wrap_var(AnsibleUnsafeText(u'foo')), AnsibleUnsafeText)
def test_wrap_var_unsafe_bytes():
assert isinstance(wrap_var(AnsibleUnsafeBytes(b'foo')), AnsibleUnsafeBytes)
def test_wrap_var_no_ref():
thing = {
'foo': {
'bar': 'baz'
},
'bar': ['baz', 'qux'],
'baz': ('qux',),
'none': None,
'text': 'text',
}
wrapped_thing = wrap_var(thing)
assert thing is not wrapped_thing
assert thing['foo'] is not wrapped_thing['foo']
assert thing['bar'][0] is not wrapped_thing['bar'][0]
assert thing['baz'][0] is not wrapped_thing['baz'][0]
assert thing['none'] is wrapped_thing['none']
assert thing['text'] is not wrapped_thing['text']
def test_AnsibleUnsafeText():
assert isinstance(AnsibleUnsafeText(u'foo'), AnsibleUnsafe)
def test_AnsibleUnsafeBytes():
assert isinstance(AnsibleUnsafeBytes(b'foo'), AnsibleUnsafe)
def test_to_text_unsafe():
assert isinstance(to_text(AnsibleUnsafeBytes(b'foo')), AnsibleUnsafeText)
assert to_text(AnsibleUnsafeBytes(b'foo')) == AnsibleUnsafeText(u'foo')
def test_to_bytes_unsafe():
assert isinstance(to_bytes(AnsibleUnsafeText(u'foo')), AnsibleUnsafeBytes)
assert to_bytes(AnsibleUnsafeText(u'foo')) == AnsibleUnsafeBytes(b'foo')
| 3,534
|
Python
|
.py
| 76
| 41.710526
| 101
| 0.703335
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,780
|
test_cleanup_tmp_file.py
|
ansible_ansible/test/units/utils/test_cleanup_tmp_file.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import os
import tempfile
from ansible.utils.path import cleanup_tmp_file
def test_cleanup_tmp_file_file():
tmp_fd, tmp = tempfile.mkstemp()
cleanup_tmp_file(tmp)
assert not os.path.exists(tmp)
def test_cleanup_tmp_file_dir():
tmp = tempfile.mkdtemp()
cleanup_tmp_file(tmp)
assert not os.path.exists(tmp)
def test_cleanup_tmp_file_nonexistant():
assert None is cleanup_tmp_file('nope')
def test_cleanup_tmp_file_failure(mocker, capsys):
tmp = tempfile.mkdtemp()
rmtree = mocker.patch('shutil.rmtree', side_effect=OSError('test induced failure'))
cleanup_tmp_file(tmp)
out, err = capsys.readouterr()
assert out == ''
assert err == ''
rmtree.assert_called_once()
def test_cleanup_tmp_file_failure_warning(mocker, capsys):
tmp = tempfile.mkdtemp()
rmtree = mocker.patch('shutil.rmtree', side_effect=OSError('test induced failure'))
cleanup_tmp_file(tmp, warn=True)
out, err = capsys.readouterr()
assert out == 'Unable to remove temporary file test induced failure\n'
assert err == ''
rmtree.assert_called_once()
| 1,299
|
Python
|
.py
| 33
| 35.363636
| 92
| 0.71303
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,781
|
test_isidentifier.py
|
ansible_ansible/test/units/utils/test_isidentifier.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.utils.vars import isidentifier
# Originally posted at: http://stackoverflow.com/a/29586366
@pytest.mark.parametrize(
"identifier", [
"foo", "foo1_23",
]
)
def test_valid_identifier(identifier):
assert isidentifier(identifier)
@pytest.mark.parametrize(
"identifier", [
"pass", "foo ", " foo", "1234", "1234abc", "", " ", "foo bar", "no-dashed-names-for-you",
"True", "False", "None"
]
)
def test_invalid_identifier(identifier):
assert not isidentifier(identifier)
def test_non_ascii():
"""In Python 3 non-ascii characters are allowed as opposed to Python 2. The
isidentifier method ensures that those are treated as keywords on both
Python 2 and 3.
"""
assert not isidentifier("křížek")
| 987
|
Python
|
.py
| 28
| 31.321429
| 99
| 0.69418
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,782
|
test_plugin_docs.py
|
ansible_ansible/test/units/utils/test_plugin_docs.py
|
# -*- coding: utf-8 -*-
# (c) 2020 Felix Fontein <felix@fontein.de>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import copy
import pytest
from ansible.utils.plugin_docs import (
add_collection_to_versions_and_dates,
)
ADD_TESTS = [
(
# Module options
True,
False,
{
'author': 'x',
'version_added': '1.0.0',
'deprecated': {
'removed_in': '2.0.0',
},
'options': {
'test': {
'description': '',
'type': 'str',
'version_added': '1.1.0',
'deprecated': {
# should not be touched since this isn't a plugin
'removed_in': '2.0.0',
},
'env': [
# should not be touched since this isn't a plugin
{
'version_added': '1.3.0',
'deprecated': {
'version': '2.0.0',
},
},
],
'ini': [
# should not be touched since this isn't a plugin
{
'version_added': '1.3.0',
'deprecated': {
'version': '2.0.0',
},
},
],
'vars': [
# should not be touched since this isn't a plugin
{
'version_added': '1.3.0',
'deprecated': {
'removed_at_date': '2020-01-01',
},
},
],
},
'subtest': {
'description': '',
'type': 'dict',
'deprecated': {
# should not be touched since this isn't a plugin
'version': '2.0.0',
},
'suboptions': {
'suboption': {
'description': '',
'type': 'int',
'version_added': '1.2.0',
}
},
}
},
},
{
'author': 'x',
'version_added': '1.0.0',
'version_added_collection': 'foo.bar',
'deprecated': {
'removed_in': '2.0.0',
'removed_from_collection': 'foo.bar',
},
'options': {
'test': {
'description': '',
'type': 'str',
'version_added': '1.1.0',
'version_added_collection': 'foo.bar',
'deprecated': {
# should not be touched since this isn't a plugin
'removed_in': '2.0.0',
},
'env': [
# should not be touched since this isn't a plugin
{
'version_added': '1.3.0',
'deprecated': {
'version': '2.0.0',
},
},
],
'ini': [
# should not be touched since this isn't a plugin
{
'version_added': '1.3.0',
'deprecated': {
'version': '2.0.0',
},
},
],
'vars': [
# should not be touched since this isn't a plugin
{
'version_added': '1.3.0',
'deprecated': {
'removed_at_date': '2020-01-01',
},
},
],
},
'subtest': {
'description': '',
'type': 'dict',
'deprecated': {
# should not be touched since this isn't a plugin
'version': '2.0.0',
},
'suboptions': {
'suboption': {
'description': '',
'type': 'int',
'version_added': '1.2.0',
'version_added_collection': 'foo.bar',
}
},
}
},
},
),
(
# Module options
True,
False,
{
'author': 'x',
'deprecated': {
'removed_at_date': '2020-01-01',
},
},
{
'author': 'x',
'deprecated': {
'removed_at_date': '2020-01-01',
'removed_from_collection': 'foo.bar',
},
},
),
(
# Plugin options
False,
False,
{
'author': 'x',
'version_added': '1.0.0',
'deprecated': {
'removed_in': '2.0.0',
},
'options': {
'test': {
'description': '',
'type': 'str',
'version_added': '1.1.0',
'deprecated': {
# should not be touched since this is the wrong name
'removed_in': '2.0.0',
},
'env': [
{
'version_added': '1.3.0',
'deprecated': {
'version': '2.0.0',
},
},
],
'ini': [
{
'version_added': '1.3.0',
'deprecated': {
'version': '2.0.0',
},
},
],
'vars': [
{
'version_added': '1.3.0',
'deprecated': {
'removed_at_date': '2020-01-01',
},
},
],
},
'subtest': {
'description': '',
'type': 'dict',
'deprecated': {
'version': '2.0.0',
},
'suboptions': {
'suboption': {
'description': '',
'type': 'int',
'version_added': '1.2.0',
}
},
}
},
},
{
'author': 'x',
'version_added': '1.0.0',
'version_added_collection': 'foo.bar',
'deprecated': {
'removed_in': '2.0.0',
'removed_from_collection': 'foo.bar',
},
'options': {
'test': {
'description': '',
'type': 'str',
'version_added': '1.1.0',
'version_added_collection': 'foo.bar',
'deprecated': {
# should not be touched since this is the wrong name
'removed_in': '2.0.0',
},
'env': [
{
'version_added': '1.3.0',
'version_added_collection': 'foo.bar',
'deprecated': {
'version': '2.0.0',
'collection_name': 'foo.bar',
},
},
],
'ini': [
{
'version_added': '1.3.0',
'version_added_collection': 'foo.bar',
'deprecated': {
'version': '2.0.0',
'collection_name': 'foo.bar',
},
},
],
'vars': [
{
'version_added': '1.3.0',
'version_added_collection': 'foo.bar',
'deprecated': {
'removed_at_date': '2020-01-01',
'collection_name': 'foo.bar',
},
},
],
},
'subtest': {
'description': '',
'type': 'dict',
'deprecated': {
'version': '2.0.0',
'collection_name': 'foo.bar',
},
'suboptions': {
'suboption': {
'description': '',
'type': 'int',
'version_added': '1.2.0',
'version_added_collection': 'foo.bar',
}
},
}
},
},
),
(
# Return values
True, # this value is ignored
True,
{
'rv1': {
'version_added': '1.0.0',
'type': 'dict',
'contains': {
'srv1': {
'version_added': '1.1.0',
},
'srv2': {
},
}
},
},
{
'rv1': {
'version_added': '1.0.0',
'version_added_collection': 'foo.bar',
'type': 'dict',
'contains': {
'srv1': {
'version_added': '1.1.0',
'version_added_collection': 'foo.bar',
},
'srv2': {
},
}
},
},
),
]
@pytest.mark.parametrize('is_module,return_docs,fragment,expected_fragment', ADD_TESTS)
def test_add(is_module, return_docs, fragment, expected_fragment):
fragment_copy = copy.deepcopy(fragment)
add_collection_to_versions_and_dates(fragment_copy, 'foo.bar', is_module, return_docs)
assert fragment_copy == expected_fragment
| 11,253
|
Python
|
.py
| 324
| 14.830247
| 92
| 0.279004
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,783
|
test_listify.py
|
ansible_ansible/test/units/utils/test_listify.py
|
# Copyright: Contributors to the Ansible project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.template import Templar
from ansible.utils.listify import listify_lookup_plugin_terms
from units.mock.loader import DictDataLoader
@pytest.mark.parametrize(
("test_input", "expected"),
[
pytest.param(
[],
[],
id="empty-list",
),
pytest.param(
"foo",
["foo"],
id="string-types",
),
pytest.param(
["foo"],
["foo"],
id="list-types",
),
],
)
def test_listify_lookup_plugin_terms(test_input, expected):
fake_loader = DictDataLoader({})
templar = Templar(loader=fake_loader)
terms = listify_lookup_plugin_terms(
test_input, templar=templar, fail_on_undefined=False
)
assert terms == expected
def test_negative_listify_lookup_plugin_terms():
fake_loader = DictDataLoader({})
templar = Templar(loader=fake_loader)
with pytest.raises(TypeError, match=".*got an unexpected keyword argument 'loader'"):
listify_lookup_plugin_terms(
"foo", templar=templar, loader=fake_loader, fail_on_undefined=False
)
| 1,341
|
Python
|
.py
| 41
| 25.707317
| 92
| 0.634109
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,784
|
test_vars.py
|
ansible_ansible/test/units/utils/test_vars.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2015, Toshio Kuraotmi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
from collections import defaultdict
from unittest import mock
import unittest
from ansible.errors import AnsibleError
from ansible.utils.vars import combine_vars, merge_hash
from ansible.vars.manager import VarsWithSources
class TestVariableUtils(unittest.TestCase):
combine_vars_merge_data = (
dict(
a=dict(a=1),
b=dict(b=2),
result=dict(a=1, b=2),
),
dict(
a=dict(a=1),
b=VarsWithSources().new_vars_with_sources(dict(b=2), dict(b='task vars')),
result=dict(a=1, b=2),
),
dict(
a=dict(a=1, c=dict(foo='bar')),
b=dict(b=2, c=dict(baz='bam')),
result=dict(a=1, b=2, c=dict(foo='bar', baz='bam'))
),
dict(
a=defaultdict(a=1, c=defaultdict(foo='bar')),
b=dict(b=2, c=dict(baz='bam')),
result=defaultdict(a=1, b=2, c=defaultdict(foo='bar', baz='bam'))
),
)
combine_vars_replace_data = (
dict(
a=dict(a=1),
b=dict(b=2),
result=dict(a=1, b=2)
),
dict(
a=dict(a=1),
b=VarsWithSources().new_vars_with_sources(dict(b=2), dict(b='task vars')),
result=dict(a=1, b=2),
),
dict(
a=dict(a=1, c=dict(foo='bar')),
b=dict(b=2, c=dict(baz='bam')),
result=dict(a=1, b=2, c=dict(baz='bam'))
),
dict(
a=defaultdict(a=1, c=dict(foo='bar')),
b=dict(b=2, c=defaultdict(baz='bam')),
result=defaultdict(a=1, b=2, c=defaultdict(baz='bam'))
),
)
def test_combine_vars_improper_args(self):
with mock.patch('ansible.constants.DEFAULT_HASH_BEHAVIOUR', 'replace'):
with self.assertRaises(AnsibleError):
combine_vars([1, 2, 3], dict(a=1))
with self.assertRaises(AnsibleError):
combine_vars(dict(a=1), [1, 2, 3])
with mock.patch('ansible.constants.DEFAULT_HASH_BEHAVIOUR', 'merge'):
with self.assertRaises(AnsibleError):
combine_vars([1, 2, 3], dict(a=1))
with self.assertRaises(AnsibleError):
combine_vars(dict(a=1), [1, 2, 3])
def test_combine_vars_replace(self):
with mock.patch('ansible.constants.DEFAULT_HASH_BEHAVIOUR', 'replace'):
for test in self.combine_vars_replace_data:
self.assertEqual(combine_vars(test['a'], test['b']), test['result'])
def test_combine_vars_merge(self):
with mock.patch('ansible.constants.DEFAULT_HASH_BEHAVIOUR', 'merge'):
for test in self.combine_vars_merge_data:
self.assertEqual(combine_vars(test['a'], test['b']), test['result'])
merge_hash_data = {
"low_prio": {
"a": {
"a'": {
"x": "low_value",
"y": "low_value",
"list": ["low_value"]
}
},
"b": [1, 1, 2, 3]
},
"high_prio": {
"a": {
"a'": {
"y": "high_value",
"z": "high_value",
"list": ["high_value"]
}
},
"b": [3, 4, 4, {"5": "value"}]
}
}
def test_merge_hash_simple(self):
for test in self.combine_vars_merge_data:
self.assertEqual(merge_hash(test['a'], test['b']), test['result'])
low = self.merge_hash_data['low_prio']
high = self.merge_hash_data['high_prio']
expected = {
"a": {
"a'": {
"x": "low_value",
"y": "high_value",
"z": "high_value",
"list": ["high_value"]
}
},
"b": high['b']
}
self.assertEqual(merge_hash(low, high), expected)
def test_merge_hash_non_recursive_and_list_replace(self):
low = self.merge_hash_data['low_prio']
high = self.merge_hash_data['high_prio']
expected = high
self.assertEqual(merge_hash(low, high, False, 'replace'), expected)
def test_merge_hash_non_recursive_and_list_keep(self):
low = self.merge_hash_data['low_prio']
high = self.merge_hash_data['high_prio']
expected = {
"a": high['a'],
"b": low['b']
}
self.assertEqual(merge_hash(low, high, False, 'keep'), expected)
def test_merge_hash_non_recursive_and_list_append(self):
low = self.merge_hash_data['low_prio']
high = self.merge_hash_data['high_prio']
expected = {
"a": high['a'],
"b": low['b'] + high['b']
}
self.assertEqual(merge_hash(low, high, False, 'append'), expected)
def test_merge_hash_non_recursive_and_list_prepend(self):
low = self.merge_hash_data['low_prio']
high = self.merge_hash_data['high_prio']
expected = {
"a": high['a'],
"b": high['b'] + low['b']
}
self.assertEqual(merge_hash(low, high, False, 'prepend'), expected)
def test_merge_hash_non_recursive_and_list_append_rp(self):
low = self.merge_hash_data['low_prio']
high = self.merge_hash_data['high_prio']
expected = {
"a": high['a'],
"b": [1, 1, 2] + high['b']
}
self.assertEqual(merge_hash(low, high, False, 'append_rp'), expected)
def test_merge_hash_non_recursive_and_list_prepend_rp(self):
low = self.merge_hash_data['low_prio']
high = self.merge_hash_data['high_prio']
expected = {
"a": high['a'],
"b": high['b'] + [1, 1, 2]
}
self.assertEqual(merge_hash(low, high, False, 'prepend_rp'), expected)
def test_merge_hash_recursive_and_list_replace(self):
low = self.merge_hash_data['low_prio']
high = self.merge_hash_data['high_prio']
expected = {
"a": {
"a'": {
"x": "low_value",
"y": "high_value",
"z": "high_value",
"list": ["high_value"]
}
},
"b": high['b']
}
self.assertEqual(merge_hash(low, high, True, 'replace'), expected)
def test_merge_hash_recursive_and_list_keep(self):
low = self.merge_hash_data['low_prio']
high = self.merge_hash_data['high_prio']
expected = {
"a": {
"a'": {
"x": "low_value",
"y": "high_value",
"z": "high_value",
"list": ["low_value"]
}
},
"b": low['b']
}
self.assertEqual(merge_hash(low, high, True, 'keep'), expected)
def test_merge_hash_recursive_and_list_append(self):
low = self.merge_hash_data['low_prio']
high = self.merge_hash_data['high_prio']
expected = {
"a": {
"a'": {
"x": "low_value",
"y": "high_value",
"z": "high_value",
"list": ["low_value", "high_value"]
}
},
"b": low['b'] + high['b']
}
self.assertEqual(merge_hash(low, high, True, 'append'), expected)
def test_merge_hash_recursive_and_list_prepend(self):
low = self.merge_hash_data['low_prio']
high = self.merge_hash_data['high_prio']
expected = {
"a": {
"a'": {
"x": "low_value",
"y": "high_value",
"z": "high_value",
"list": ["high_value", "low_value"]
}
},
"b": high['b'] + low['b']
}
self.assertEqual(merge_hash(low, high, True, 'prepend'), expected)
def test_merge_hash_recursive_and_list_append_rp(self):
low = self.merge_hash_data['low_prio']
high = self.merge_hash_data['high_prio']
expected = {
"a": {
"a'": {
"x": "low_value",
"y": "high_value",
"z": "high_value",
"list": ["low_value", "high_value"]
}
},
"b": [1, 1, 2] + high['b']
}
self.assertEqual(merge_hash(low, high, True, 'append_rp'), expected)
def test_merge_hash_recursive_and_list_prepend_rp(self):
low = self.merge_hash_data['low_prio']
high = self.merge_hash_data['high_prio']
expected = {
"a": {
"a'": {
"x": "low_value",
"y": "high_value",
"z": "high_value",
"list": ["high_value", "low_value"]
}
},
"b": high['b'] + [1, 1, 2]
}
self.assertEqual(merge_hash(low, high, True, 'prepend_rp'), expected)
| 9,911
|
Python
|
.py
| 262
| 26.503817
| 86
| 0.49813
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,785
|
test_helpers.py
|
ansible_ansible/test/units/utils/test_helpers.py
|
# (c) 2015, Marius Gedminas <marius@gedmin.as>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from datetime import datetime
from ansible.utils.helpers import pct_to_int, object_to_dict, deduplicate_list
pct_to_int_testdata = [
pytest.param(
1, 100, 1, 1, id="positive_percentage"
),
pytest.param(
-1, 100, 1, -1, id="negative_percentage"
),
pytest.param(
"1%", 10, 1, 1, id="string_percentage"
),
pytest.param(
"1%", 10, 0, 0, id="string_percentage_with_zero_min_value"
),
pytest.param(
"1", 100, 1, 1, id="string_percentage_without_sign"
),
pytest.param(
"10%", 100, 1, 10, id="string_percentage_two_digit"
)
]
@pytest.mark.parametrize("value,num_items,min_value,expected", pct_to_int_testdata)
def test_pct_to_int(value, num_items, min_value, expected):
assert pct_to_int(value, num_items, min_value) == expected
def test_object_to_dict():
test_dict = object_to_dict(datetime(2024, 7, 30))
assert test_dict['day'] == 30
assert test_dict['year'] == 2024
assert test_dict['month'] == 7
test_dict_without_day = object_to_dict(datetime(2024, 7, 30), exclude=['day'])
assert 'day' not in list(test_dict_without_day.keys())
def test_deduplicate_list():
assert deduplicate_list([1, 2, 2, 3]) == [1, 2, 3]
assert deduplicate_list([1, 2, 3]) == [1, 2, 3]
| 1,488
|
Python
|
.py
| 39
| 33.435897
| 92
| 0.651114
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,786
|
test_context_objects.py
|
ansible_ansible/test/units/utils/test_context_objects.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Toshio Kuratomi <tkuratomi@ansible.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import argparse
import pytest
from ansible.module_utils.common.collections import ImmutableDict
from ansible.utils import context_objects as co
MAKE_IMMUTABLE_DATA = ((u'くらとみ', u'くらとみ'),
(42, 42),
({u'café': u'くらとみ'}, ImmutableDict({u'café': u'くらとみ'})),
([1, u'café', u'くらとみ'], (1, u'café', u'くらとみ')),
(set((1, u'café', u'くらとみ')), frozenset((1, u'café', u'くらとみ'))),
({u'café': [1, set(u'ñ')]},
ImmutableDict({u'café': (1, frozenset(u'ñ'))})),
([set((1, 2)), {u'くらとみ': 3}],
(frozenset((1, 2)), ImmutableDict({u'くらとみ': 3}))),
)
@pytest.mark.parametrize('data, expected', MAKE_IMMUTABLE_DATA)
def test_make_immutable(data, expected):
assert co._make_immutable(data) == expected
def test_cliargs_from_dict():
old_dict = {'tags': [u'production', u'webservers'],
'check_mode': True,
'start_at_task': u'Start with くらとみ'}
expected = frozenset((('tags', (u'production', u'webservers')),
('check_mode', True),
('start_at_task', u'Start with くらとみ')))
assert frozenset(co.CLIArgs(old_dict).items()) == expected
def test_cliargs():
class FakeOptions:
pass
options = FakeOptions()
options.tags = [u'production', u'webservers']
options.check_mode = True
options.start_at_task = u'Start with くらとみ'
expected = frozenset((('tags', (u'production', u'webservers')),
('check_mode', True),
('start_at_task', u'Start with くらとみ')))
assert frozenset(co.CLIArgs.from_options(options).items()) == expected
def test_cliargs_argparse():
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('integers', metavar='N', type=int, nargs='+',
help='an integer for the accumulator')
parser.add_argument('--sum', dest='accumulate', action='store_const',
const=sum, default=max,
help='sum the integers (default: find the max)')
args = parser.parse_args([u'--sum', u'1', u'2'])
expected = frozenset((('accumulate', sum), ('integers', (1, 2))))
assert frozenset(co.CLIArgs.from_options(args).items()) == expected
| 2,759
|
Python
|
.py
| 50
| 41.5
| 92
| 0.568925
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,787
|
test_display.py
|
ansible_ansible/test/units/utils/test_display.py
|
# -*- coding: utf-8 -*-
# (c) 2020 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import locale
import sys
import unicodedata
from unittest.mock import MagicMock
import pytest
from ansible.utils.display import _LIBC, _MAX_INT, Display, get_text_width
from ansible.utils.multiprocessing import context as multiprocessing_context
@pytest.fixture
def problematic_wcswidth_chars():
locale.setlocale(locale.LC_ALL, 'C.UTF-8')
candidates = set(chr(c) for c in range(sys.maxunicode) if unicodedata.category(chr(c)) == 'Cf')
problematic = [candidate for candidate in candidates if _LIBC.wcswidth(candidate, _MAX_INT) == -1]
if not problematic:
# Newer distributions (Ubuntu 22.04, Fedora 38) include a libc which does not report problematic characters.
pytest.skip("no problematic wcswidth chars found") # pragma: nocover
return problematic
def test_get_text_width():
locale.setlocale(locale.LC_ALL, '')
assert get_text_width(u'コンニチハ') == 10
assert get_text_width(u'abコcd') == 6
assert get_text_width(u'café') == 4
assert get_text_width(u'four') == 4
assert get_text_width(u'\u001B') == 0
assert get_text_width(u'ab\u0000') == 2
assert get_text_width(u'abコ\u0000') == 4
assert get_text_width(u'üöÄüêÆ') == 4
assert get_text_width(u'\x08') == 0
assert get_text_width(u'\x08\x08') == 0
assert get_text_width(u'ab\x08cd') == 3
assert get_text_width(u'ab\x1bcd') == 3
assert get_text_width(u'ab\x7fcd') == 3
assert get_text_width(u'ab\x94cd') == 3
pytest.raises(TypeError, get_text_width, 1)
pytest.raises(TypeError, get_text_width, b'four')
def test_get_text_width_no_locale(problematic_wcswidth_chars):
pytest.raises(EnvironmentError, get_text_width, problematic_wcswidth_chars[0])
def test_Display_banner_get_text_width(monkeypatch, display_resource):
locale.setlocale(locale.LC_ALL, '')
display = Display()
display_mock = MagicMock()
monkeypatch.setattr(display, 'display', display_mock)
display.banner(u'üöÄüêÆ', color=False, cows=False)
args, kwargs = display_mock.call_args
msg = args[0]
stars = u' %s' % (75 * u'*')
assert msg.endswith(stars)
def test_Display_banner_get_text_width_fallback(monkeypatch, display_resource):
locale.setlocale(locale.LC_ALL, 'C.UTF-8')
display = Display()
display_mock = MagicMock()
monkeypatch.setattr(display, 'display', display_mock)
display.banner(u'\U000110cd', color=False, cows=False)
args, kwargs = display_mock.call_args
msg = args[0]
stars = u' %s' % (78 * u'*')
assert msg.endswith(stars)
def test_Display_set_queue_parent(display_resource):
display = Display()
pytest.raises(RuntimeError, display.set_queue, 'foo')
def test_Display_set_queue_fork(display_resource):
def test():
display = Display()
display.set_queue('foo')
assert display._final_q == 'foo'
p = multiprocessing_context.Process(target=test)
p.start()
p.join()
assert p.exitcode == 0
def test_Display_display_fork(display_resource):
def test():
queue = MagicMock()
display = Display()
display.set_queue(queue)
display.display('foo')
queue.send_display.assert_called_once_with('display', 'foo')
p = multiprocessing_context.Process(target=test)
p.start()
p.join()
assert p.exitcode == 0
def test_Display_display_warn_fork(display_resource):
def test():
queue = MagicMock()
display = Display()
display.set_queue(queue)
display.warning('foo')
queue.send_display.assert_called_once_with('warning', 'foo')
p = multiprocessing_context.Process(target=test)
p.start()
p.join()
assert p.exitcode == 0
def test_Display_display_lock(monkeypatch, display_resource):
lock = MagicMock()
display = Display()
monkeypatch.setattr(display, '_lock', lock)
display.display('foo')
lock.__enter__.assert_called_once_with()
def test_Display_display_lock_fork(monkeypatch, display_resource):
lock = MagicMock()
display = Display()
monkeypatch.setattr(display, '_lock', lock)
monkeypatch.setattr(display, '_final_q', MagicMock())
display.display('foo')
lock.__enter__.assert_not_called()
| 4,422
|
Python
|
.py
| 107
| 36.308411
| 116
| 0.689325
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,788
|
test_encrypt.py
|
ansible_ansible/test/units/utils/test_encrypt.py
|
# (c) 2018, Matthias Fuchs <matthias.s.fuchs@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import pytest
from ansible.errors import AnsibleError, AnsibleFilterError
from ansible.plugins.filter.core import get_encrypted_password
from ansible.utils import encrypt
def assert_hash(expected, secret, algorithm, **settings):
assert encrypt.do_encrypt(secret, algorithm, **settings) == expected
assert encrypt.PasslibHash(algorithm).hash(secret, **settings) == expected
@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test')
def test_passlib():
expected = "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7"
assert encrypt.passlib_or_crypt("123", "sha256_crypt", salt="12345678", rounds=5000) == expected
@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test')
def test_encrypt_with_ident():
assert_hash("$2$12$123456789012345678901ufd3hZRrev.WXCbemqGIV/gmWaTGLImm",
secret="123", algorithm="bcrypt", salt='1234567890123456789012', ident='2')
assert_hash("$2y$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu",
secret="123", algorithm="bcrypt", salt='1234567890123456789012', ident='2y')
assert_hash("$2a$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu",
secret="123", algorithm="bcrypt", salt='1234567890123456789012', ident='2a')
assert_hash("$2b$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu",
secret="123", algorithm="bcrypt", salt='1234567890123456789012', ident='2b')
assert_hash("$2b$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu",
secret="123", algorithm="bcrypt", salt='1234567890123456789012')
# negative test: sha256_crypt does not take ident as parameter so ignore it
assert_hash("$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7",
secret="123", algorithm="sha256_crypt", salt="12345678", rounds=5000, ident='invalid_ident')
# If passlib is not installed. this is identical to the test_encrypt_with_rounds_no_passlib() test
@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test')
def test_encrypt_with_rounds():
assert_hash("$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7",
secret="123", algorithm="sha256_crypt", salt="12345678", rounds=5000)
assert_hash("$5$rounds=10000$12345678$JBinliYMFEcBeAXKZnLjenhgEhTmJBvZn3aR8l70Oy/",
secret="123", algorithm="sha256_crypt", salt="12345678", rounds=10000)
assert_hash("$6$12345678$LcV9LQiaPekQxZ.OfkMADjFdSO2k9zfbDQrHPVcYjSLqSdjLYpsgqviYvTEP/R41yPmhH3CCeEDqVhW1VHr3L.",
secret="123", algorithm="sha512_crypt", salt="12345678", rounds=5000)
# If passlib is not installed. this is identical to the test_encrypt_default_rounds_no_passlib() test
@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test')
def test_encrypt_default_rounds():
assert_hash("$1$12345678$tRy4cXc3kmcfRZVj4iFXr/",
secret="123", algorithm="md5_crypt", salt="12345678")
assert_hash("$5$rounds=535000$12345678$uy3TurUPaY71aioJi58HvUY8jkbhSQU8HepbyaNngv.",
secret="123", algorithm="sha256_crypt", salt="12345678")
assert_hash("$6$rounds=656000$12345678$InMy49UwxyCh2pGJU1NpOhVSElDDzKeyuC6n6E9O34BCUGVNYADnI.rcA3m.Vro9BiZpYmjEoNhpREqQcbvQ80",
secret="123", algorithm="sha512_crypt", salt="12345678")
assert encrypt.PasslibHash("md5_crypt").hash("123")
@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test')
def test_password_hash_filter_passlib():
with pytest.raises(AnsibleFilterError):
get_encrypted_password("123", "sha257", salt="12345678")
# Uses passlib default rounds value for sha256 matching crypt behaviour
assert get_encrypted_password("123", "sha256", salt="12345678") == "$5$rounds=535000$12345678$uy3TurUPaY71aioJi58HvUY8jkbhSQU8HepbyaNngv."
assert get_encrypted_password("123", "sha256", salt="12345678", rounds=5000) == "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7"
assert (get_encrypted_password("123", "sha256", salt="12345678", rounds=10000) ==
"$5$rounds=10000$12345678$JBinliYMFEcBeAXKZnLjenhgEhTmJBvZn3aR8l70Oy/")
assert (get_encrypted_password("123", "sha512", salt="12345678", rounds=6000) ==
"$6$rounds=6000$12345678$l/fC67BdJwZrJ7qneKGP1b6PcatfBr0dI7W6JLBrsv8P1wnv/0pu4WJsWq5p6WiXgZ2gt9Aoir3MeORJxg4.Z/")
assert (get_encrypted_password("123", "sha512", salt="12345678", rounds=5000) ==
"$6$12345678$LcV9LQiaPekQxZ.OfkMADjFdSO2k9zfbDQrHPVcYjSLqSdjLYpsgqviYvTEP/R41yPmhH3CCeEDqVhW1VHr3L.")
assert get_encrypted_password("123", "crypt16", salt="12") == "12pELHK2ME3McUFlHxel6uMM"
# Try algorithm that uses a raw salt
assert get_encrypted_password("123", "pbkdf2_sha256")
# Try algorithm with ident
assert get_encrypted_password("123", "pbkdf2_sha256", ident='invalid_ident')
@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test')
def test_do_encrypt_passlib():
with pytest.raises(AnsibleError):
encrypt.do_encrypt("123", "sha257_crypt", salt="12345678")
# Uses passlib default rounds value for sha256 matching crypt behaviour.
assert encrypt.do_encrypt("123", "sha256_crypt", salt="12345678") == "$5$rounds=535000$12345678$uy3TurUPaY71aioJi58HvUY8jkbhSQU8HepbyaNngv."
assert encrypt.do_encrypt("123", "md5_crypt", salt="12345678") == "$1$12345678$tRy4cXc3kmcfRZVj4iFXr/"
assert encrypt.do_encrypt("123", "crypt16", salt="12") == "12pELHK2ME3McUFlHxel6uMM"
assert encrypt.do_encrypt("123", "bcrypt",
salt='1234567890123456789012',
ident='2a') == "$2a$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu"
def test_random_salt():
res = encrypt.random_salt()
expected_salt_candidate_chars = u'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789./'
assert len(res) == 8
for res_char in res:
assert res_char in expected_salt_candidate_chars
def test_passlib_bcrypt_salt(recwarn):
passlib_exc = pytest.importorskip("passlib.exc")
secret = 'foo'
salt = '1234567890123456789012'
repaired_salt = '123456789012345678901u'
expected = '$2b$12$123456789012345678901uMv44x.2qmQeefEGb3bcIRc1mLuO7bqa'
ident = '2b'
p = encrypt.PasslibHash('bcrypt')
result = p.hash(secret, salt=salt, ident=ident)
passlib_warnings = [w.message for w in recwarn if isinstance(w.message, passlib_exc.PasslibHashWarning)]
assert len(passlib_warnings) == 0
assert result == expected
recwarn.clear()
result = p.hash(secret, salt=repaired_salt, ident=ident)
assert result == expected
| 7,019
|
Python
|
.py
| 98
| 64.846939
| 144
| 0.740665
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,789
|
test_shlex.py
|
ansible_ansible/test/units/utils/test_shlex.py
|
# (c) 2015, Marius Gedminas <marius@gedmin.as>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import unittest
from ansible.utils.shlex import shlex_split
class TestSplit(unittest.TestCase):
def test_trivial(self):
self.assertEqual(shlex_split("a b c"), ["a", "b", "c"])
def test_unicode(self):
self.assertEqual(shlex_split(u"a b \u010D"), [u"a", u"b", u"\u010D"])
def test_quoted(self):
self.assertEqual(shlex_split('"a b" c'), ["a b", "c"])
def test_comments(self):
self.assertEqual(shlex_split('"a b" c # d', comments=True), ["a b", "c"])
def test_error(self):
self.assertRaises(ValueError, shlex_split, 'a "b')
| 1,326
|
Python
|
.py
| 30
| 40.866667
| 81
| 0.709953
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,790
|
test_version.py
|
ansible_ansible/test/units/utils/test_version.py
|
# -*- coding: utf-8 -*-
# (c) 2020 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
from ansible.module_utils.compat.version import LooseVersion, StrictVersion
import pytest
from ansible.utils.version import _Alpha, _Numeric, SemanticVersion
EQ = [
('1.0.0', '1.0.0', True),
('1.0.0', '1.0.0-beta', False),
('1.0.0-beta2+build1', '1.0.0-beta.2+build.1', False),
('1.0.0-beta+build', '1.0.0-beta+build', True),
('1.0.0-beta+build1', '1.0.0-beta+build2', True),
('1.0.0-beta+a', '1.0.0-alpha+bar', False),
]
NE = [
('1.0.0', '1.0.0', False),
('1.0.0', '1.0.0-beta', True),
('1.0.0-beta2+build1', '1.0.0-beta.2+build.1', True),
('1.0.0-beta+build', '1.0.0-beta+build', False),
('1.0.0-beta+a', '1.0.0-alpha+bar', True),
]
LT = [
('1.0.0', '2.0.0', True),
('1.0.0-beta', '2.0.0-alpha', True),
('1.0.0-alpha', '2.0.0-beta', True),
('1.0.0-alpha', '1.0.0', True),
('1.0.0-beta', '1.0.0-alpha3', False),
('1.0.0+foo', '1.0.0-alpha', False),
('1.0.0-beta.1', '1.0.0-beta.a', True),
('1.0.0-beta+a', '1.0.0-alpha+bar', False),
]
GT = [
('1.0.0', '2.0.0', False),
('1.0.0-beta', '2.0.0-alpha', False),
('1.0.0-alpha', '2.0.0-beta', False),
('1.0.0-alpha', '1.0.0', False),
('1.0.0-beta', '1.0.0-alpha3', True),
('1.0.0+foo', '1.0.0-alpha', True),
('1.0.0-beta.1', '1.0.0-beta.a', False),
('1.0.0-beta+a', '1.0.0-alpha+bar', True),
]
LE = [
('1.0.0', '1.0.0', True),
('1.0.0', '2.0.0', True),
('1.0.0-alpha', '1.0.0-beta', True),
('1.0.0-beta', '1.0.0-alpha', False),
]
GE = [
('1.0.0', '1.0.0', True),
('1.0.0', '2.0.0', False),
('1.0.0-alpha', '1.0.0-beta', False),
('1.0.0-beta', '1.0.0-alpha', True),
]
VALID = [
"0.0.4",
"1.2.3",
"10.20.30",
"1.1.2-prerelease+meta",
"1.1.2+meta",
"1.1.2+meta-valid",
"1.0.0-alpha",
"1.0.0-beta",
"1.0.0-alpha.beta",
"1.0.0-alpha.beta.1",
"1.0.0-alpha.1",
"1.0.0-alpha0.valid",
"1.0.0-alpha.0valid",
"1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay",
"1.0.0-rc.1+build.1",
"2.0.0-rc.1+build.123",
"1.2.3-beta",
"10.2.3-DEV-SNAPSHOT",
"1.2.3-SNAPSHOT-123",
"1.0.0",
"2.0.0",
"1.1.7",
"2.0.0+build.1848",
"2.0.1-alpha.1227",
"1.0.0-alpha+beta",
"1.2.3----RC-SNAPSHOT.12.9.1--.12+788",
"1.2.3----R-S.12.9.1--.12+meta",
"1.2.3----RC-SNAPSHOT.12.9.1--.12",
"1.0.0+0.build.1-rc.10000aaa-kk-0.1",
"99999999999999999999999.999999999999999999.99999999999999999",
"1.0.0-0A.is.legal",
]
INVALID = [
"1",
"1.2",
"1.2.3-0123",
"1.2.3-0123.0123",
"1.1.2+.123",
"+invalid",
"-invalid",
"-invalid+invalid",
"-invalid.01",
"alpha",
"alpha.beta",
"alpha.beta.1",
"alpha.1",
"alpha+beta",
"alpha_beta",
"alpha.",
"alpha..",
"beta",
"1.0.0-alpha_beta",
"-alpha.",
"1.0.0-alpha..",
"1.0.0-alpha..1",
"1.0.0-alpha...1",
"1.0.0-alpha....1",
"1.0.0-alpha.....1",
"1.0.0-alpha......1",
"1.0.0-alpha.......1",
"01.1.1",
"1.01.1",
"1.1.01",
"1.2",
"1.2.3.DEV",
"1.2-SNAPSHOT",
"1.2.31.2.3----RC-SNAPSHOT.12.09.1--..12+788",
"1.2-RC-SNAPSHOT",
"-1.0.3-gamma+b7718",
"+justmeta",
"9.8.7+meta+meta",
"9.8.7-whatever+meta+meta",
]
PRERELEASE = [
('1.0.0-alpha', True),
('1.0.0-alpha.1', True),
('1.0.0-0.3.7', True),
('1.0.0-x.7.z.92', True),
('0.1.2', False),
('0.1.2+bob', False),
('1.0.0', False),
]
STABLE = [
('1.0.0-alpha', False),
('1.0.0-alpha.1', False),
('1.0.0-0.3.7', False),
('1.0.0-x.7.z.92', False),
('0.1.2', False),
('0.1.2+bob', False),
('1.0.0', True),
('1.0.0+bob', True),
]
LOOSE_VERSION = [
(LooseVersion('1'), SemanticVersion('1.0.0')),
(LooseVersion('1-alpha'), SemanticVersion('1.0.0-alpha')),
(LooseVersion('1.0.0-alpha+build'), SemanticVersion('1.0.0-alpha+build')),
]
LOOSE_VERSION_INVALID = [
LooseVersion('1.a.3'),
LooseVersion(),
'bar',
StrictVersion('1.2.3'),
]
def test_semanticversion_none():
assert SemanticVersion().major is None
@pytest.mark.parametrize('left,right,expected', EQ)
def test_eq(left, right, expected):
assert (SemanticVersion(left) == SemanticVersion(right)) is expected
@pytest.mark.parametrize('left,right,expected', NE)
def test_ne(left, right, expected):
assert (SemanticVersion(left) != SemanticVersion(right)) is expected
@pytest.mark.parametrize('left,right,expected', LT)
def test_lt(left, right, expected):
assert (SemanticVersion(left) < SemanticVersion(right)) is expected
@pytest.mark.parametrize('left,right,expected', LE)
def test_le(left, right, expected):
assert (SemanticVersion(left) <= SemanticVersion(right)) is expected
@pytest.mark.parametrize('left,right,expected', GT)
def test_gt(left, right, expected):
assert (SemanticVersion(left) > SemanticVersion(right)) is expected
@pytest.mark.parametrize('left,right,expected', GE)
def test_ge(left, right, expected):
assert (SemanticVersion(left) >= SemanticVersion(right)) is expected
@pytest.mark.parametrize('value', VALID)
def test_valid(value):
SemanticVersion(value)
@pytest.mark.parametrize('value', INVALID)
def test_invalid(value):
pytest.raises(ValueError, SemanticVersion, value)
def test_example_precedence():
# https://semver.org/#spec-item-11
sv = SemanticVersion
assert sv('1.0.0') < sv('2.0.0') < sv('2.1.0') < sv('2.1.1')
assert sv('1.0.0-alpha') < sv('1.0.0')
assert sv('1.0.0-alpha') < sv('1.0.0-alpha.1') < sv('1.0.0-alpha.beta')
assert sv('1.0.0-beta') < sv('1.0.0-beta.2') < sv('1.0.0-beta.11') < sv('1.0.0-rc.1') < sv('1.0.0')
@pytest.mark.parametrize('value,expected', PRERELEASE)
def test_prerelease(value, expected):
assert SemanticVersion(value).is_prerelease is expected
@pytest.mark.parametrize('value,expected', STABLE)
def test_stable(value, expected):
assert SemanticVersion(value).is_stable is expected
@pytest.mark.parametrize('value,expected', LOOSE_VERSION)
def test_from_loose_version(value, expected):
assert SemanticVersion.from_loose_version(value) == expected
@pytest.mark.parametrize('value', LOOSE_VERSION_INVALID)
def test_from_loose_version_invalid(value):
pytest.raises((AttributeError, ValueError), SemanticVersion.from_loose_version, value)
def test_comparison_with_string():
assert SemanticVersion('1.0.0') > '0.1.0'
def test_alpha():
assert _Alpha('a') == _Alpha('a')
assert _Alpha('a') == 'a'
assert _Alpha('a') != _Alpha('b')
assert _Alpha('a') != 1
assert _Alpha('a') < _Alpha('b')
assert _Alpha('a') < 'c'
assert _Alpha('a') > _Numeric(1)
with pytest.raises(ValueError):
_Alpha('a') < None
assert _Alpha('a') <= _Alpha('a')
assert _Alpha('a') <= _Alpha('b')
assert _Alpha('b') >= _Alpha('a')
assert _Alpha('b') >= _Alpha('b')
# The following 3*6 tests check that all comparison operators perform
# as expected. DO NOT remove any of them, or reformulate them (to remove
# the explicit `not`)!
assert _Alpha('a') == _Alpha('a')
assert not _Alpha('a') != _Alpha('a') # pylint: disable=unneeded-not
assert not _Alpha('a') < _Alpha('a') # pylint: disable=unneeded-not
assert _Alpha('a') <= _Alpha('a')
assert not _Alpha('a') > _Alpha('a') # pylint: disable=unneeded-not
assert _Alpha('a') >= _Alpha('a')
assert not _Alpha('a') == _Alpha('b') # pylint: disable=unneeded-not
assert _Alpha('a') != _Alpha('b')
assert _Alpha('a') < _Alpha('b')
assert _Alpha('a') <= _Alpha('b')
assert not _Alpha('a') > _Alpha('b') # pylint: disable=unneeded-not
assert not _Alpha('a') >= _Alpha('b') # pylint: disable=unneeded-not
assert not _Alpha('b') == _Alpha('a') # pylint: disable=unneeded-not
assert _Alpha('b') != _Alpha('a')
assert not _Alpha('b') < _Alpha('a') # pylint: disable=unneeded-not
assert not _Alpha('b') <= _Alpha('a') # pylint: disable=unneeded-not
assert _Alpha('b') > _Alpha('a')
assert _Alpha('b') >= _Alpha('a')
def test_numeric():
assert _Numeric(1) == _Numeric(1)
assert _Numeric(1) == 1
assert _Numeric(1) != _Numeric(2)
assert _Numeric(1) != 'a'
assert _Numeric(1) < _Numeric(2)
assert _Numeric(1) < 3
assert _Numeric(1) < _Alpha('b')
with pytest.raises(ValueError):
_Numeric(1) < None
assert _Numeric(1) <= _Numeric(1)
assert _Numeric(1) <= _Numeric(2)
assert _Numeric(2) >= _Numeric(1)
assert _Numeric(2) >= _Numeric(2)
# The following 3*6 tests check that all comparison operators perform
# as expected. DO NOT remove any of them, or reformulate them (to remove
# the explicit `not`)!
assert _Numeric(1) == _Numeric(1)
assert not _Numeric(1) != _Numeric(1) # pylint: disable=unneeded-not
assert not _Numeric(1) < _Numeric(1) # pylint: disable=unneeded-not
assert _Numeric(1) <= _Numeric(1)
assert not _Numeric(1) > _Numeric(1) # pylint: disable=unneeded-not
assert _Numeric(1) >= _Numeric(1)
assert not _Numeric(1) == _Numeric(2) # pylint: disable=unneeded-not
assert _Numeric(1) != _Numeric(2)
assert _Numeric(1) < _Numeric(2)
assert _Numeric(1) <= _Numeric(2)
assert not _Numeric(1) > _Numeric(2) # pylint: disable=unneeded-not
assert not _Numeric(1) >= _Numeric(2) # pylint: disable=unneeded-not
assert not _Numeric(2) == _Numeric(1) # pylint: disable=unneeded-not
assert _Numeric(2) != _Numeric(1)
assert not _Numeric(2) < _Numeric(1) # pylint: disable=unneeded-not
assert not _Numeric(2) <= _Numeric(1) # pylint: disable=unneeded-not
assert _Numeric(2) > _Numeric(1)
assert _Numeric(2) >= _Numeric(1)
| 9,944
|
Python
|
.py
| 275
| 31.789091
| 103
| 0.599272
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,791
|
test_collection_loader.py
|
ansible_ansible/test/units/utils/collection_loader/test_collection_loader.py
|
from __future__ import annotations
import inspect
import os
import pkgutil
import pytest
import re
import sys
from importlib import import_module
from ansible.modules import ping as ping_module
from ansible.utils.collection_loader import AnsibleCollectionConfig, AnsibleCollectionRef
from ansible.utils.collection_loader._collection_finder import (
_AnsibleCollectionFinder, _AnsibleCollectionLoader, _AnsibleCollectionNSPkgLoader, _AnsibleCollectionPkgLoader,
_AnsibleCollectionPkgLoaderBase, _AnsibleCollectionRootPkgLoader, _AnsibleNSTraversable, _AnsiblePathHookFinder,
_get_collection_name_from_path, _get_collection_role_path, _get_collection_metadata, _iter_modules_impl
)
from ansible.utils.collection_loader._collection_config import _EventSource
from unittest.mock import MagicMock, NonCallableMagicMock, patch
# fixture to ensure we always clean up the import stuff when we're done
@pytest.fixture(autouse=True, scope='function')
def teardown(*args, **kwargs):
yield
reset_collections_loader_state()
# BEGIN STANDALONE TESTS - these exercise behaviors of the individual components without the import machinery
@pytest.mark.filterwarnings(
'ignore:'
r'find_module\(\) is deprecated and slated for removal in Python 3\.12; use find_spec\(\) instead'
':DeprecationWarning',
'ignore:'
r'FileFinder\.find_loader\(\) is deprecated and slated for removal in Python 3\.12; use find_spec\(\) instead'
':DeprecationWarning',
)
@pytest.mark.skipif(sys.version_info >= (3, 12), reason='Testing Python 2 codepath (find_module) on Python 3, <= 3.11')
def test_find_module_py3_lt_312():
dir_to_a_file = os.path.dirname(ping_module.__file__)
path_hook_finder = _AnsiblePathHookFinder(_AnsibleCollectionFinder(), dir_to_a_file)
# setuptools may fall back to find_module on Python 3 if find_spec returns None
# see https://github.com/pypa/setuptools/pull/2918
assert path_hook_finder.find_spec('missing') is None
assert path_hook_finder.find_module('missing') is None
@pytest.mark.skipif(sys.version_info < (3, 12), reason='Testing Python 2 codepath (find_module) on Python >= 3.12')
def test_find_module_py3_gt_311():
dir_to_a_file = os.path.dirname(ping_module.__file__)
path_hook_finder = _AnsiblePathHookFinder(_AnsibleCollectionFinder(), dir_to_a_file)
# setuptools may fall back to find_module on Python 3 if find_spec returns None
# see https://github.com/pypa/setuptools/pull/2918
assert path_hook_finder.find_spec('missing') is None
def test_finder_setup():
# ensure scalar path is listified
f = _AnsibleCollectionFinder(paths='/bogus/bogus')
assert isinstance(f._n_collection_paths, list)
# ensure sys.path paths that have an ansible_collections dir are added to the end of the collections paths
with patch.object(sys, 'path', ['/bogus', default_test_collection_paths[1], '/morebogus', default_test_collection_paths[0]]):
with patch('os.path.isdir', side_effect=lambda x: b'bogus' not in x):
f = _AnsibleCollectionFinder(paths=['/explicit', '/other'])
assert f._n_collection_paths == ['/explicit', '/other', default_test_collection_paths[1], default_test_collection_paths[0]]
configured_paths = ['/bogus']
playbook_paths = ['/playbookdir']
with patch.object(sys, 'path', ['/bogus', '/playbookdir']) and patch('os.path.isdir', side_effect=lambda x: b'bogus' in x):
f = _AnsibleCollectionFinder(paths=configured_paths)
assert f._n_collection_paths == configured_paths
f.set_playbook_paths(playbook_paths)
assert f._n_collection_paths == extend_paths(playbook_paths, 'collections') + configured_paths
# ensure scalar playbook_paths gets listified
f.set_playbook_paths(playbook_paths[0])
assert f._n_collection_paths == extend_paths(playbook_paths, 'collections') + configured_paths
def test_finder_not_interested():
f = get_default_finder()
assert f.find_module('nothanks') is None
assert f.find_module('nothanks.sub', path=['/bogus/dir']) is None
def test_finder_ns():
# ensure we can still load ansible_collections and ansible_collections.ansible when they don't exist on disk
f = _AnsibleCollectionFinder(paths=['/bogus/bogus'])
loader = f.find_module('ansible_collections')
assert isinstance(loader, _AnsibleCollectionRootPkgLoader)
loader = f.find_module('ansible_collections.ansible', path=['/bogus/bogus'])
assert isinstance(loader, _AnsibleCollectionNSPkgLoader)
f = get_default_finder()
loader = f.find_module('ansible_collections')
assert isinstance(loader, _AnsibleCollectionRootPkgLoader)
# path is not allowed for top-level
with pytest.raises(ValueError):
f.find_module('ansible_collections', path=['whatever'])
# path is required for subpackages
with pytest.raises(ValueError):
f.find_module('ansible_collections.whatever', path=None)
paths = [os.path.join(p, 'ansible_collections/nonexistns') for p in default_test_collection_paths]
# test missing
loader = f.find_module('ansible_collections.nonexistns', paths)
assert loader is None
# keep these up top to make sure the loader install/remove are working, since we rely on them heavily in the tests
def test_loader_remove():
fake_mp = [MagicMock(), _AnsibleCollectionFinder(), MagicMock(), _AnsibleCollectionFinder()]
fake_ph = [MagicMock().m1, MagicMock().m2, _AnsibleCollectionFinder()._ansible_collection_path_hook, NonCallableMagicMock]
# must nest until 2.6 compilation is totally donezo
with patch.object(sys, 'meta_path', fake_mp):
with patch.object(sys, 'path_hooks', fake_ph):
_AnsibleCollectionFinder()._remove()
assert len(sys.meta_path) == 2
# no AnsibleCollectionFinders on the meta path after remove is called
assert all((not isinstance(mpf, _AnsibleCollectionFinder) for mpf in sys.meta_path))
assert len(sys.path_hooks) == 3
# none of the remaining path hooks should point at an AnsibleCollectionFinder
assert all((not isinstance(ph.__self__, _AnsibleCollectionFinder) for ph in sys.path_hooks if hasattr(ph, '__self__')))
assert AnsibleCollectionConfig.collection_finder is None
def test_loader_install():
fake_mp = [MagicMock(), _AnsibleCollectionFinder(), MagicMock(), _AnsibleCollectionFinder()]
fake_ph = [MagicMock().m1, MagicMock().m2, _AnsibleCollectionFinder()._ansible_collection_path_hook, NonCallableMagicMock]
# must nest until 2.6 compilation is totally donezo
with patch.object(sys, 'meta_path', fake_mp):
with patch.object(sys, 'path_hooks', fake_ph):
f = _AnsibleCollectionFinder()
f._install()
assert len(sys.meta_path) == 3 # should have removed the existing ACFs and installed a new one
assert sys.meta_path[0] is f # at the front
# the rest of the meta_path should not be AnsibleCollectionFinders
assert all((not isinstance(mpf, _AnsibleCollectionFinder) for mpf in sys.meta_path[1:]))
assert len(sys.path_hooks) == 4 # should have removed the existing ACF path hooks and installed a new one
# the first path hook should be ours, make sure it's pointing at the right instance
assert hasattr(sys.path_hooks[0], '__self__') and sys.path_hooks[0].__self__ is f
# the rest of the path_hooks should not point at an AnsibleCollectionFinder
assert all((not isinstance(ph.__self__, _AnsibleCollectionFinder) for ph in sys.path_hooks[1:] if hasattr(ph, '__self__')))
assert AnsibleCollectionConfig.collection_finder is f
with pytest.raises(ValueError):
AnsibleCollectionConfig.collection_finder = f
def test_finder_coll():
f = get_default_finder()
tests = [
{'name': 'ansible_collections.testns.testcoll', 'test_paths': [default_test_collection_paths]},
{'name': 'ansible_collections.ansible.builtin', 'test_paths': [['/bogus'], default_test_collection_paths]},
]
# ensure finder works for legit paths and bogus paths
for test_dict in tests:
# splat the dict values to our locals
globals().update(test_dict)
parent_pkg = name.rpartition('.')[0]
for paths in test_paths:
paths = [os.path.join(p, parent_pkg.replace('.', '/')) for p in paths]
loader = f.find_module(name, path=paths)
assert isinstance(loader, _AnsibleCollectionPkgLoader)
def test_root_loader_not_interested():
with pytest.raises(ImportError):
_AnsibleCollectionRootPkgLoader('not_ansible_collections_toplevel', path_list=[])
with pytest.raises(ImportError):
_AnsibleCollectionRootPkgLoader('ansible_collections.somens', path_list=['/bogus'])
def test_root_loader():
name = 'ansible_collections'
# ensure this works even when ansible_collections doesn't exist on disk
for paths in [], default_test_collection_paths:
sys.modules.pop(name, None)
loader = _AnsibleCollectionRootPkgLoader(name, paths)
assert repr(loader).startswith('_AnsibleCollectionRootPkgLoader(path=')
module = loader.load_module(name)
assert module.__name__ == name
assert module.__path__ == [p for p in extend_paths(paths, name) if os.path.isdir(p)]
# even if the dir exists somewhere, this loader doesn't support get_data, so make __file__ a non-file
assert module.__file__ == '<ansible_synthetic_collection_package>'
assert module.__package__ == name
assert sys.modules.get(name) == module
def test_nspkg_loader_not_interested():
with pytest.raises(ImportError):
_AnsibleCollectionNSPkgLoader('not_ansible_collections_toplevel.something', path_list=[])
with pytest.raises(ImportError):
_AnsibleCollectionNSPkgLoader('ansible_collections.somens.somecoll', path_list=[])
def test_nspkg_loader_load_module():
# ensure the loader behaves on the toplevel and ansible packages for both legit and missing/bogus paths
for name in ['ansible_collections.ansible', 'ansible_collections.testns']:
parent_pkg = name.partition('.')[0]
module_to_load = name.rpartition('.')[2]
paths = extend_paths(default_test_collection_paths, parent_pkg)
existing_child_paths = [p for p in extend_paths(paths, module_to_load) if os.path.exists(p)]
sys.modules.pop(name, None)
loader = _AnsibleCollectionNSPkgLoader(name, path_list=paths)
assert repr(loader).startswith('_AnsibleCollectionNSPkgLoader(path=')
module = loader.load_module(name)
assert module.__name__ == name
assert isinstance(module.__loader__, _AnsibleCollectionNSPkgLoader)
assert module.__path__ == existing_child_paths
assert module.__package__ == name
assert module.__file__ == '<ansible_synthetic_collection_package>'
assert sys.modules.get(name) == module
def test_collpkg_loader_not_interested():
with pytest.raises(ImportError):
_AnsibleCollectionPkgLoader('not_ansible_collections', path_list=[])
with pytest.raises(ImportError):
_AnsibleCollectionPkgLoader('ansible_collections.ns', path_list=['/bogus/bogus'])
def test_collpkg_loader_load_module():
reset_collections_loader_state()
with patch('ansible.utils.collection_loader.AnsibleCollectionConfig') as p:
for name in ['ansible_collections.ansible.builtin', 'ansible_collections.testns.testcoll']:
parent_pkg = name.rpartition('.')[0]
module_to_load = name.rpartition('.')[2]
paths = extend_paths(default_test_collection_paths, parent_pkg)
existing_child_paths = [p for p in extend_paths(paths, module_to_load) if os.path.exists(p)]
is_builtin = 'ansible.builtin' in name
sys.modules.pop(name, None)
loader = _AnsibleCollectionPkgLoader(name, path_list=paths)
assert repr(loader).startswith('_AnsibleCollectionPkgLoader(path=')
module = loader.load_module(name)
assert module.__name__ == name
assert isinstance(module.__loader__, _AnsibleCollectionPkgLoader)
if is_builtin:
assert module.__path__ == []
else:
assert module.__path__ == [existing_child_paths[0]]
assert module.__package__ == name
if is_builtin:
assert module.__file__ == '<ansible_synthetic_collection_package>'
else:
assert module.__file__.endswith('__synthetic__') and os.path.isdir(os.path.dirname(module.__file__))
assert sys.modules.get(name) == module
assert hasattr(module, '_collection_meta') and isinstance(module._collection_meta, dict)
# FIXME: validate _collection_meta contents match what's on disk (or not)
# verify the module has metadata, then try loading it with busted metadata
assert module._collection_meta
_collection_finder = import_module('ansible.utils.collection_loader._collection_finder')
with patch.object(_collection_finder, '_meta_yml_to_dict', side_effect=Exception('bang')):
with pytest.raises(Exception) as ex:
_AnsibleCollectionPkgLoader(name, path_list=paths).load_module(name)
assert 'error parsing collection metadata' in str(ex.value)
def test_coll_loader():
with patch('ansible.utils.collection_loader.AnsibleCollectionConfig'):
with pytest.raises(ValueError):
# not a collection
_AnsibleCollectionLoader('ansible_collections')
with pytest.raises(ValueError):
# bogus paths
_AnsibleCollectionLoader('ansible_collections.testns.testcoll', path_list=[])
# FIXME: more
def test_path_hook_setup():
with patch.object(sys, 'path_hooks', []):
found_hook = None
pathhook_exc = None
try:
found_hook = _AnsiblePathHookFinder._get_filefinder_path_hook()
except Exception as phe:
pathhook_exc = phe
assert str(pathhook_exc) == 'need exactly one FileFinder import hook (found 0)'
assert repr(_AnsiblePathHookFinder(object(), '/bogus/path')) == "_AnsiblePathHookFinder(path='/bogus/path')"
def test_path_hook_importerror():
# ensure that AnsiblePathHookFinder.find_module swallows ImportError from path hook delegation on Py3, eg if the delegated
# path hook gets passed a file on sys.path (python36.zip)
reset_collections_loader_state()
path_to_a_file = os.path.join(default_test_collection_paths[0], 'ansible_collections/testns/testcoll/plugins/action/my_action.py')
# it's a bug if the following pops an ImportError...
assert _AnsiblePathHookFinder(_AnsibleCollectionFinder(), path_to_a_file).find_module('foo.bar.my_action') is None
def test_new_or_existing_module():
module_name = 'blar.test.module'
pkg_name = module_name.rpartition('.')[0]
# create new module case
nuke_module_prefix(module_name)
with _AnsibleCollectionPkgLoaderBase._new_or_existing_module(module_name, __package__=pkg_name) as new_module:
# the module we just created should now exist in sys.modules
assert sys.modules.get(module_name) is new_module
assert new_module.__name__ == module_name
# the module should stick since we didn't raise an exception in the contextmgr
assert sys.modules.get(module_name) is new_module
# reuse existing module case
with _AnsibleCollectionPkgLoaderBase._new_or_existing_module(module_name, __attr1__=42, blar='yo') as existing_module:
assert sys.modules.get(module_name) is new_module # should be the same module we created earlier
assert hasattr(existing_module, '__package__') and existing_module.__package__ == pkg_name
assert hasattr(existing_module, '__attr1__') and existing_module.__attr1__ == 42
assert hasattr(existing_module, 'blar') and existing_module.blar == 'yo'
# exception during update existing shouldn't zap existing module from sys.modules
with pytest.raises(ValueError) as ve:
with _AnsibleCollectionPkgLoaderBase._new_or_existing_module(module_name) as existing_module:
err_to_raise = ValueError('bang')
raise err_to_raise
# make sure we got our error
assert ve.value is err_to_raise
# and that the module still exists
assert sys.modules.get(module_name) is existing_module
# test module removal after exception during creation
nuke_module_prefix(module_name)
with pytest.raises(ValueError) as ve:
with _AnsibleCollectionPkgLoaderBase._new_or_existing_module(module_name) as new_module:
err_to_raise = ValueError('bang')
raise err_to_raise
# make sure we got our error
assert ve.value is err_to_raise
# and that the module was removed
assert sys.modules.get(module_name) is None
def test_iter_modules_impl():
modules_trailer = 'ansible_collections/testns/testcoll/plugins'
modules_pkg_prefix = modules_trailer.replace('/', '.') + '.'
modules_path = os.path.join(default_test_collection_paths[0], modules_trailer)
modules = list(_iter_modules_impl([modules_path], modules_pkg_prefix))
assert modules
assert set([('ansible_collections.testns.testcoll.plugins.action', True),
('ansible_collections.testns.testcoll.plugins.module_utils', True),
('ansible_collections.testns.testcoll.plugins.modules', True)]) == set(modules)
modules_trailer = 'ansible_collections/testns/testcoll/plugins/modules'
modules_pkg_prefix = modules_trailer.replace('/', '.') + '.'
modules_path = os.path.join(default_test_collection_paths[0], modules_trailer)
modules = list(_iter_modules_impl([modules_path], modules_pkg_prefix))
assert modules
assert len(modules) == 1
assert modules[0][0] == 'ansible_collections.testns.testcoll.plugins.modules.amodule' # name
assert modules[0][1] is False # is_pkg
# FIXME: more
# BEGIN IN-CIRCUIT TESTS - these exercise behaviors of the loader when wired up to the import machinery
def test_import_from_collection(monkeypatch):
collection_root = os.path.join(os.path.dirname(__file__), 'fixtures', 'collections')
collection_path = os.path.join(collection_root, 'ansible_collections/testns/testcoll/plugins/module_utils/my_util.py')
# THIS IS UNSTABLE UNDER A DEBUGGER
# the trace we're expecting to be generated when running the code below:
# answer = question()
expected_trace_log = [
(collection_path, 5, 'call'),
(collection_path, 6, 'line'),
(collection_path, 6, 'return'),
]
# define the collection root before any ansible code has been loaded
# otherwise config will have already been loaded and changing the environment will have no effect
monkeypatch.setenv('ANSIBLE_COLLECTIONS_PATH', collection_root)
finder = _AnsibleCollectionFinder(paths=[collection_root])
reset_collections_loader_state(finder)
from ansible_collections.testns.testcoll.plugins.module_utils.my_util import question
original_trace_function = sys.gettrace()
trace_log = []
if original_trace_function: # pragma: nocover
# enable tracing while preserving the existing trace function (coverage)
def my_trace_function(frame, event, arg):
trace_log.append((frame.f_code.co_filename, frame.f_lineno, event))
# the original trace function expects to have itself set as the trace function
sys.settrace(original_trace_function)
# call the original trace function
original_trace_function(frame, event, arg)
# restore our trace function
sys.settrace(my_trace_function)
return my_trace_function
else: # pragma: nocover
# no existing trace function, so our trace function is much simpler
def my_trace_function(frame, event, arg):
trace_log.append((frame.f_code.co_filename, frame.f_lineno, event))
return my_trace_function
sys.settrace(my_trace_function)
try:
# run a minimal amount of code while the trace is running
# adding more code here, including use of a context manager, will add more to our trace
answer = question()
finally:
sys.settrace(original_trace_function)
# make sure 'import ... as ...' works on builtin synthetic collections
# the following import is not supported (it tries to find module_utils in ansible.plugins)
# import ansible_collections.ansible.builtin.plugins.module_utils as c1
import ansible_collections.ansible.builtin.plugins.action as c2
import ansible_collections.ansible.builtin.plugins as c3
import ansible_collections.ansible.builtin as c4
import ansible_collections.ansible as c5
import ansible_collections as c6
# make sure 'import ...' works on builtin synthetic collections
import ansible_collections.ansible.builtin.plugins.module_utils
import ansible_collections.ansible.builtin.plugins.action
assert ansible_collections.ansible.builtin.plugins.action == c3.action == c2
import ansible_collections.ansible.builtin.plugins
assert ansible_collections.ansible.builtin.plugins == c4.plugins == c3
import ansible_collections.ansible.builtin
assert ansible_collections.ansible.builtin == c5.builtin == c4
import ansible_collections.ansible
assert ansible_collections.ansible == c6.ansible == c5
import ansible_collections
assert ansible_collections == c6
# make sure 'from ... import ...' works on builtin synthetic collections
from ansible_collections.ansible import builtin
from ansible_collections.ansible.builtin import plugins
assert builtin.plugins == plugins
from ansible_collections.ansible.builtin.plugins import action
from ansible_collections.ansible.builtin.plugins.action import command
assert action.command == command
from ansible_collections.ansible.builtin.plugins.module_utils import basic
from ansible_collections.ansible.builtin.plugins.module_utils.basic import AnsibleModule
assert basic.AnsibleModule == AnsibleModule
# make sure relative imports work from collections code
# these require __package__ to be set correctly
import ansible_collections.testns.testcoll.plugins.module_utils.my_other_util
import ansible_collections.testns.testcoll.plugins.action.my_action
# verify that code loaded from a collection does not inherit __future__ statements from the collection loader
# if the collection code inherits the annotations future feature from the collection loader this will fail
assert inspect.get_annotations(question)['return'] is float
# verify that the filename and line number reported by the trace is correct
# this makes sure that collection loading preserves file paths and line numbers
assert trace_log == expected_trace_log
def test_eventsource():
es = _EventSource()
# fire when empty should succeed
es.fire(42)
handler1 = MagicMock()
handler2 = MagicMock()
es += handler1
es.fire(99, my_kwarg='blah')
handler1.assert_called_with(99, my_kwarg='blah')
es += handler2
es.fire(123, foo='bar')
handler1.assert_called_with(123, foo='bar')
handler2.assert_called_with(123, foo='bar')
es -= handler2
handler1.reset_mock()
handler2.reset_mock()
es.fire(123, foo='bar')
handler1.assert_called_with(123, foo='bar')
handler2.assert_not_called()
es -= handler1
handler1.reset_mock()
es.fire('blah', kwarg=None)
handler1.assert_not_called()
handler2.assert_not_called()
es -= handler1 # should succeed silently
handler_bang = MagicMock(side_effect=Exception('bang'))
es += handler_bang
with pytest.raises(Exception) as ex:
es.fire(123)
assert 'bang' in str(ex.value)
handler_bang.assert_called_with(123)
with pytest.raises(ValueError):
es += 42
def test_on_collection_load():
finder = get_default_finder()
reset_collections_loader_state(finder)
load_handler = MagicMock()
AnsibleCollectionConfig.on_collection_load += load_handler
m = import_module('ansible_collections.testns.testcoll')
load_handler.assert_called_once_with(collection_name='testns.testcoll', collection_path=os.path.dirname(m.__file__))
_meta = _get_collection_metadata('testns.testcoll')
assert _meta
# FIXME: compare to disk
finder = get_default_finder()
reset_collections_loader_state(finder)
AnsibleCollectionConfig.on_collection_load += MagicMock(side_effect=Exception('bang'))
with pytest.raises(Exception) as ex:
import_module('ansible_collections.testns.testcoll')
assert 'bang' in str(ex.value)
def test_default_collection_config():
finder = get_default_finder()
reset_collections_loader_state(finder)
assert AnsibleCollectionConfig.default_collection is None
AnsibleCollectionConfig.default_collection = 'foo.bar'
assert AnsibleCollectionConfig.default_collection == 'foo.bar'
def test_default_collection_detection():
finder = get_default_finder()
reset_collections_loader_state(finder)
# we're clearly not under a collection path
assert _get_collection_name_from_path('/') is None
# something that looks like a collection path but isn't importable by our finder
assert _get_collection_name_from_path('/foo/ansible_collections/bogusns/boguscoll/bar') is None
# legit, at the top of the collection
live_collection_path = os.path.join(os.path.dirname(__file__), 'fixtures/collections/ansible_collections/testns/testcoll')
assert _get_collection_name_from_path(live_collection_path) == 'testns.testcoll'
# legit, deeper inside the collection
live_collection_deep_path = os.path.join(live_collection_path, 'plugins/modules')
assert _get_collection_name_from_path(live_collection_deep_path) == 'testns.testcoll'
# this one should be hidden by the real testns.testcoll, so should not resolve
masked_collection_path = os.path.join(os.path.dirname(__file__), 'fixtures/collections_masked/ansible_collections/testns/testcoll')
assert _get_collection_name_from_path(masked_collection_path) is None
@pytest.mark.parametrize(
'role_name,collection_list,expected_collection_name,expected_path_suffix',
[
('some_role', ['testns.testcoll', 'ansible.bogus'], 'testns.testcoll', 'testns/testcoll/roles/some_role'),
('testns.testcoll.some_role', ['ansible.bogus', 'testns.testcoll'], 'testns.testcoll', 'testns/testcoll/roles/some_role'),
('testns.testcoll.some_role', [], 'testns.testcoll', 'testns/testcoll/roles/some_role'),
('testns.testcoll.some_role', None, 'testns.testcoll', 'testns/testcoll/roles/some_role'),
('some_role', [], None, None),
('some_role', None, None, None),
])
def test_collection_role_name_location(role_name, collection_list, expected_collection_name, expected_path_suffix):
finder = get_default_finder()
reset_collections_loader_state(finder)
expected_path = None
if expected_path_suffix:
expected_path = os.path.join(os.path.dirname(__file__), 'fixtures/collections/ansible_collections', expected_path_suffix)
found = _get_collection_role_path(role_name, collection_list)
if found:
assert found[0] == role_name.rpartition('.')[2]
assert found[1] == expected_path
assert found[2] == expected_collection_name
else:
assert expected_collection_name is None and expected_path_suffix is None
def test_bogus_imports():
finder = get_default_finder()
reset_collections_loader_state(finder)
# ensure ImportError on known-bogus imports
bogus_imports = ['bogus_toplevel', 'ansible_collections.bogusns', 'ansible_collections.testns.boguscoll',
'ansible_collections.testns.testcoll.bogussub', 'ansible_collections.ansible.builtin.bogussub']
for bogus_import in bogus_imports:
with pytest.raises(ImportError):
import_module(bogus_import)
def test_empty_vs_no_code():
finder = get_default_finder()
reset_collections_loader_state(finder)
from ansible_collections.testns import testcoll # synthetic package with no code on disk
from ansible_collections.testns.testcoll.plugins import module_utils # real package with empty code file
# ensure synthetic packages have no code object at all (prevent bogus coverage entries)
assert testcoll.__loader__.get_source(testcoll.__name__) is None
assert testcoll.__loader__.get_code(testcoll.__name__) is None
# ensure empty package inits do have a code object
assert module_utils.__loader__.get_source(module_utils.__name__) == b''
assert module_utils.__loader__.get_code(module_utils.__name__) is not None
def test_finder_playbook_paths():
finder = get_default_finder()
reset_collections_loader_state(finder)
import ansible_collections
import ansible_collections.ansible
import ansible_collections.testns
# ensure the package modules look like we expect
assert hasattr(ansible_collections, '__path__') and len(ansible_collections.__path__) > 0
assert hasattr(ansible_collections.ansible, '__path__') and len(ansible_collections.ansible.__path__) > 0
assert hasattr(ansible_collections.testns, '__path__') and len(ansible_collections.testns.__path__) > 0
# these shouldn't be visible yet, since we haven't added the playbook dir
with pytest.raises(ImportError):
import ansible_collections.ansible.playbook_adj_other
with pytest.raises(ImportError):
import ansible_collections.testns.playbook_adj_other
assert AnsibleCollectionConfig.playbook_paths == []
playbook_path_fixture_dir = os.path.join(os.path.dirname(__file__), 'fixtures/playbook_path')
# configure the playbook paths
AnsibleCollectionConfig.playbook_paths = [playbook_path_fixture_dir]
# playbook paths go to the front of the line
assert AnsibleCollectionConfig.collection_paths[0] == os.path.join(playbook_path_fixture_dir, 'collections')
# playbook paths should be updated on the existing root ansible_collections path, as well as on the 'ansible' namespace (but no others!)
assert ansible_collections.__path__[0] == os.path.join(playbook_path_fixture_dir, 'collections/ansible_collections')
assert ansible_collections.ansible.__path__[0] == os.path.join(playbook_path_fixture_dir, 'collections/ansible_collections/ansible')
assert all('playbook_path' not in p for p in ansible_collections.testns.__path__)
# should succeed since we fixed up the package path
import ansible_collections.ansible.playbook_adj_other
# should succeed since we didn't import freshns before hacking in the path
import ansible_collections.freshns.playbook_adj_other
# should fail since we've already imported something from this path and didn't fix up its package path
with pytest.raises(ImportError):
import ansible_collections.testns.playbook_adj_other
def test_toplevel_iter_modules():
finder = get_default_finder()
reset_collections_loader_state(finder)
modules = list(pkgutil.iter_modules(default_test_collection_paths, ''))
assert len(modules) == 1
assert modules[0][1] == 'ansible_collections'
def test_iter_modules_namespaces():
finder = get_default_finder()
reset_collections_loader_state(finder)
paths = extend_paths(default_test_collection_paths, 'ansible_collections')
modules = list(pkgutil.iter_modules(paths, 'ansible_collections.'))
assert len(modules) == 2
assert all(m[2] is True for m in modules)
assert all(isinstance(m[0], _AnsiblePathHookFinder) for m in modules)
assert set(['ansible_collections.testns', 'ansible_collections.ansible']) == set(m[1] for m in modules)
def test_collection_get_data():
finder = get_default_finder()
reset_collections_loader_state(finder)
# something that's there
d = pkgutil.get_data('ansible_collections.testns.testcoll', 'plugins/action/my_action.py')
assert b'hello from my_action.py' in d
# something that's not there
d = pkgutil.get_data('ansible_collections.testns.testcoll', 'bogus/bogus')
assert d is None
with pytest.raises(ValueError):
plugins_pkg = import_module('ansible_collections.ansible.builtin')
assert not os.path.exists(os.path.dirname(plugins_pkg.__file__))
d = pkgutil.get_data('ansible_collections.ansible.builtin', 'plugins/connection/local.py')
@pytest.mark.parametrize(
'ref,ref_type,expected_collection,expected_subdirs,expected_resource,expected_python_pkg_name',
[
('ns.coll.myaction', 'action', 'ns.coll', '', 'myaction', 'ansible_collections.ns.coll.plugins.action'),
('ns.coll.subdir1.subdir2.myaction', 'action', 'ns.coll', 'subdir1.subdir2', 'myaction', 'ansible_collections.ns.coll.plugins.action.subdir1.subdir2'),
('ns.coll.myrole', 'role', 'ns.coll', '', 'myrole', 'ansible_collections.ns.coll.roles.myrole'),
('ns.coll.subdir1.subdir2.myrole', 'role', 'ns.coll', 'subdir1.subdir2', 'myrole', 'ansible_collections.ns.coll.roles.subdir1.subdir2.myrole'),
])
def test_fqcr_parsing_valid(ref, ref_type, expected_collection,
expected_subdirs, expected_resource, expected_python_pkg_name):
assert AnsibleCollectionRef.is_valid_fqcr(ref, ref_type)
r = AnsibleCollectionRef.from_fqcr(ref, ref_type)
assert r.collection == expected_collection
assert r.subdirs == expected_subdirs
assert r.resource == expected_resource
assert r.n_python_package_name == expected_python_pkg_name
r = AnsibleCollectionRef.try_parse_fqcr(ref, ref_type)
assert r.collection == expected_collection
assert r.subdirs == expected_subdirs
assert r.resource == expected_resource
assert r.n_python_package_name == expected_python_pkg_name
@pytest.mark.parametrize(
('fqcn', 'expected'),
(
('ns1.coll2', True),
('ns1#coll2', False),
('def.coll3', False),
('ns4.return', False),
('assert.this', False),
('import.that', False),
('.that', False),
('this.', False),
('.', False),
('', False),
),
)
def test_fqcn_validation(fqcn, expected):
"""Vefiry that is_valid_collection_name validates FQCN correctly."""
assert AnsibleCollectionRef.is_valid_collection_name(fqcn) is expected
@pytest.mark.parametrize(
'ref,ref_type,expected_error_type,expected_error_expression',
[
('no_dots_at_all_action', 'action', ValueError, 'is not a valid collection reference'),
('no_nscoll.myaction', 'action', ValueError, 'is not a valid collection reference'),
('no_nscoll%myaction', 'action', ValueError, 'is not a valid collection reference'),
('ns.coll.myaction', 'bogus', ValueError, 'invalid collection ref_type'),
])
def test_fqcr_parsing_invalid(ref, ref_type, expected_error_type, expected_error_expression):
assert not AnsibleCollectionRef.is_valid_fqcr(ref, ref_type)
with pytest.raises(expected_error_type) as curerr:
AnsibleCollectionRef.from_fqcr(ref, ref_type)
assert re.search(expected_error_expression, str(curerr.value))
r = AnsibleCollectionRef.try_parse_fqcr(ref, ref_type)
assert r is None
@pytest.mark.parametrize(
'name,subdirs,resource,ref_type,python_pkg_name',
[
('ns.coll', None, 'res', 'doc_fragments', 'ansible_collections.ns.coll.plugins.doc_fragments'),
('ns.coll', 'subdir1', 'res', 'doc_fragments', 'ansible_collections.ns.coll.plugins.doc_fragments.subdir1'),
('ns.coll', 'subdir1.subdir2', 'res', 'action', 'ansible_collections.ns.coll.plugins.action.subdir1.subdir2'),
])
def test_collectionref_components_valid(name, subdirs, resource, ref_type, python_pkg_name):
x = AnsibleCollectionRef(name, subdirs, resource, ref_type)
assert x.collection == name
if subdirs:
assert x.subdirs == subdirs
else:
assert x.subdirs == ''
assert x.resource == resource
assert x.ref_type == ref_type
assert x.n_python_package_name == python_pkg_name
@pytest.mark.parametrize(
'dirname,expected_result',
[
('become_plugins', 'become'),
('cache_plugins', 'cache'),
('connection_plugins', 'connection'),
('library', 'modules'),
('filter_plugins', 'filter'),
('bogus_plugins', ValueError),
(None, ValueError)
]
)
def test_legacy_plugin_dir_to_plugin_type(dirname, expected_result):
if isinstance(expected_result, str):
assert AnsibleCollectionRef.legacy_plugin_dir_to_plugin_type(dirname) == expected_result
else:
with pytest.raises(expected_result):
AnsibleCollectionRef.legacy_plugin_dir_to_plugin_type(dirname)
@pytest.mark.parametrize(
'name,subdirs,resource,ref_type,expected_error_type,expected_error_expression',
[
('bad_ns', '', 'resource', 'action', ValueError, 'invalid collection name'),
('ns.coll.', '', 'resource', 'action', ValueError, 'invalid collection name'),
('ns.coll', 'badsubdir#', 'resource', 'action', ValueError, 'invalid subdirs entry'),
('ns.coll', 'badsubdir.', 'resource', 'action', ValueError, 'invalid subdirs entry'),
('ns.coll', '.badsubdir', 'resource', 'action', ValueError, 'invalid subdirs entry'),
('ns.coll', '', 'resource', 'bogus', ValueError, 'invalid collection ref_type'),
])
def test_collectionref_components_invalid(name, subdirs, resource, ref_type, expected_error_type, expected_error_expression):
with pytest.raises(expected_error_type) as curerr:
AnsibleCollectionRef(name, subdirs, resource, ref_type)
assert re.search(expected_error_expression, str(curerr.value))
def test_importlib_resources():
from importlib.resources import files
from pathlib import Path
f = get_default_finder()
reset_collections_loader_state(f)
ansible_collections_ns = files('ansible_collections')
ansible_ns = files('ansible_collections.ansible')
testns = files('ansible_collections.testns')
testcoll = files('ansible_collections.testns.testcoll')
testcoll2 = files('ansible_collections.testns.testcoll2')
module_utils = files('ansible_collections.testns.testcoll.plugins.module_utils')
assert isinstance(ansible_collections_ns, _AnsibleNSTraversable)
assert isinstance(ansible_ns, _AnsibleNSTraversable)
assert isinstance(testcoll, Path)
assert isinstance(module_utils, Path)
assert ansible_collections_ns.is_dir()
assert ansible_ns.is_dir()
assert testcoll.is_dir()
assert module_utils.is_dir()
first_path = Path(default_test_collection_paths[0])
second_path = Path(default_test_collection_paths[1])
testns_paths = []
ansible_ns_paths = []
for path in default_test_collection_paths[:2]:
ansible_ns_paths.append(Path(path) / 'ansible_collections' / 'ansible')
testns_paths.append(Path(path) / 'ansible_collections' / 'testns')
assert testns._paths == testns_paths
# NOTE: The next two asserts check for subsets to accommodate running the unit tests when externally installed collections are available.
assert set(ansible_ns_paths).issubset(ansible_ns._paths)
assert set(Path(p) / 'ansible_collections' for p in default_test_collection_paths[:2]).issubset(ansible_collections_ns._paths)
assert testcoll2 == second_path / 'ansible_collections' / 'testns' / 'testcoll2'
assert {p.name for p in module_utils.glob('*.py')} == {'__init__.py', 'my_other_util.py', 'my_util.py'}
nestcoll_mu_init = first_path / 'ansible_collections' / 'testns' / 'testcoll' / 'plugins' / 'module_utils' / '__init__.py'
assert next(module_utils.glob('__init__.py')) == nestcoll_mu_init
# BEGIN TEST SUPPORT
default_test_collection_paths = [
os.path.join(os.path.dirname(__file__), 'fixtures', 'collections'),
os.path.join(os.path.dirname(__file__), 'fixtures', 'collections_masked'),
'/bogus/bogussub'
]
def get_default_finder():
return _AnsibleCollectionFinder(paths=default_test_collection_paths)
def extend_paths(path_list, suffix):
suffix = suffix.replace('.', '/')
return [os.path.join(p, suffix) for p in path_list]
def nuke_module_prefix(prefix):
for module_to_nuke in [m for m in sys.modules if m.startswith(prefix)]:
sys.modules.pop(module_to_nuke)
def reset_collections_loader_state(metapath_finder=None):
_AnsibleCollectionFinder._remove()
nuke_module_prefix('ansible_collections')
nuke_module_prefix('ansible.modules')
nuke_module_prefix('ansible.plugins')
# FIXME: better to move this someplace else that gets cleaned up automatically?
_AnsibleCollectionLoader._redirected_package_map = {}
AnsibleCollectionConfig._default_collection = None
AnsibleCollectionConfig._on_collection_load = _EventSource()
if metapath_finder:
metapath_finder._install()
| 41,323
|
Python
|
.py
| 718
| 50.896936
| 159
| 0.71023
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,792
|
shouldnotload.py
|
ansible_ansible/test/units/utils/collection_loader/fixtures/collections/ansible_collections/ansible/builtin/plugins/modules/shouldnotload.py
|
from __future__ import annotations # pragma: nocover
raise Exception('this module should never be loaded') # pragma: nocover
| 128
|
Python
|
.py
| 2
| 62.5
| 72
| 0.768
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,793
|
my_action.py
|
ansible_ansible/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py
|
from __future__ import annotations
from ..module_utils.my_util import question # pylint: disable=unused-import
def action_code():
raise Exception('hello from my_action.py, this code should never execute') # pragma: nocover
| 232
|
Python
|
.py
| 4
| 55.25
| 97
| 0.764444
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,794
|
amodule.py
|
ansible_ansible/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/amodule.py
|
from __future__ import annotations # pragma: nocover
raise Exception('hello from amodule.py, this code should never execute') # pragma: nocover
| 148
|
Python
|
.py
| 2
| 72
| 91
| 0.770833
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,795
|
__init__.py
|
ansible_ansible/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/__init__.py
|
from __future__ import annotations # pragma: nocover
raise Exception('this should never run') # pragma: nocover
| 116
|
Python
|
.py
| 2
| 56
| 59
| 0.758929
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,796
|
my_other_util.py
|
ansible_ansible/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_other_util.py
|
from __future__ import annotations
from .my_util import question # pylint: disable=unused-import
| 99
|
Python
|
.py
| 2
| 48
| 62
| 0.791667
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,797
|
my_util.py
|
ansible_ansible/test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py
|
# WARNING: Changing line numbers of code in this file will break collection tests that use tracing to check paths and line numbers.
# Also, do not import annotations from __future__ as this will break detection of __future__ inheritance.
def question() -> float:
return 3 / 2
| 291
|
Python
|
.py
| 4
| 70.25
| 131
| 0.733333
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,798
|
__init__.py
|
ansible_ansible/test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/__init__.py
|
from __future__ import annotations # pragma: nocover
raise Exception('this code should never execute') # pragma: nocover
| 125
|
Python
|
.py
| 2
| 60.5
| 68
| 0.768595
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|
13,799
|
__init__.py
|
ansible_ansible/test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/ansible/__init__.py
|
from __future__ import annotations # pragma: nocover
raise Exception('this code should never execute') # pragma: nocover
| 125
|
Python
|
.py
| 2
| 60.5
| 68
| 0.768595
|
ansible/ansible
| 62,258
| 23,791
| 861
|
GPL-3.0
|
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
|