311 lines
14 KiB
Diff
311 lines
14 KiB
Diff
|
From 676b731e6f7d60ce6fd48c0d1c883fc85f5c6537 Mon Sep 17 00:00:00 2001
|
||
|
From: Matt Clay <matt@mystile.com>
|
||
|
Date: Fri, 14 Apr 2023 15:13:58 -0700
|
||
|
Subject: [PATCH] ansible-test - Replace pytest-forked (#80525)
|
||
|
|
||
|
- Unit tests now report warnings generated during test runs.
|
||
|
- Python 3.12 warnings about `os.fork` usage with threads (due to `pytest-xdist`) are suppressed.
|
||
|
- Added integration tests to verify forked test behavior.
|
||
|
---
|
||
|
.../fragments/ansible-test-pytest-forked.yml | 5 +
|
||
|
.../ansible-test-units-assertions/runme.sh | 2 +-
|
||
|
.../targets/ansible-test-units-forked/aliases | 5 +
|
||
|
.../plugins/modules/test_ansible_forked.py | 43 ++++++++
|
||
|
.../ansible-test-units-forked/runme.sh | 45 ++++++++
|
||
|
.../ansible_test/_data/requirements/units.txt | 1 -
|
||
|
.../_internal/commands/units/__init__.py | 3 +-
|
||
|
.../pylint/config/ansible-test-target.cfg | 2 +
|
||
|
.../target/pytest/plugins/ansible_forked.py | 103 ++++++++++++++++++
|
||
|
9 files changed, 206 insertions(+), 3 deletions(-)
|
||
|
create mode 100644 changelogs/fragments/ansible-test-pytest-forked.yml
|
||
|
create mode 100644 test/integration/targets/ansible-test-units-forked/aliases
|
||
|
create mode 100644 test/integration/targets/ansible-test-units-forked/ansible_collections/ns/col/tests/unit/plugins/modules/test_ansible_forked.py
|
||
|
create mode 100755 test/integration/targets/ansible-test-units-forked/runme.sh
|
||
|
create mode 100644 test/lib/ansible_test/_util/target/pytest/plugins/ansible_forked.py
|
||
|
|
||
|
diff --git a/changelogs/fragments/ansible-test-pytest-forked.yml b/changelogs/fragments/ansible-test-pytest-forked.yml
|
||
|
new file mode 100644
|
||
|
index 00000000000000..f8fae8139460a3
|
||
|
--- /dev/null
|
||
|
+++ b/changelogs/fragments/ansible-test-pytest-forked.yml
|
||
|
@@ -0,0 +1,5 @@
|
||
|
+minor_changes:
|
||
|
+ - ansible-test - Replace the ``pytest-forked`` pytest plugin with a custom plugin.
|
||
|
+bugfixes:
|
||
|
+ - ansible-test - Unit tests now report warnings generated during test runs.
|
||
|
+ Previously only warnings generated during test collection were reported.
|
||
|
diff --git a/test/integration/targets/ansible-test-units-assertions/runme.sh b/test/integration/targets/ansible-test-units-assertions/runme.sh
|
||
|
index 3511e765004a73..86fe5c818112f7 100755
|
||
|
--- a/test/integration/targets/ansible-test-units-assertions/runme.sh
|
||
|
+++ b/test/integration/targets/ansible-test-units-assertions/runme.sh
|
||
|
@@ -8,7 +8,7 @@ options=$("${TEST_DIR}"/../ansible-test/venv-pythons.py --only-versions)
|
||
|
IFS=', ' read -r -a pythons <<< "${options}"
|
||
|
|
||
|
for python in "${pythons[@]}"; do
|
||
|
- if ansible-test units --color --truncate 0 --python "${python}" --requirements "${@}" 2>&1 | tee pytest.log; then
|
||
|
+ if ansible-test units --truncate 0 --python "${python}" --requirements "${@}" 2>&1 | tee pytest.log; then
|
||
|
echo "Test did not fail as expected."
|
||
|
exit 1
|
||
|
fi
|
||
|
diff --git a/test/integration/targets/ansible-test-units-forked/aliases b/test/integration/targets/ansible-test-units-forked/aliases
|
||
|
new file mode 100644
|
||
|
index 00000000000000..79d7dbd7b09e91
|
||
|
--- /dev/null
|
||
|
+++ b/test/integration/targets/ansible-test-units-forked/aliases
|
||
|
@@ -0,0 +1,5 @@
|
||
|
+shippable/posix/group3 # runs in the distro test containers
|
||
|
+shippable/generic/group1 # runs in the default test container
|
||
|
+context/controller
|
||
|
+needs/target/collection
|
||
|
+needs/target/ansible-test
|
||
|
diff --git a/test/integration/targets/ansible-test-units-forked/ansible_collections/ns/col/tests/unit/plugins/modules/test_ansible_forked.py b/test/integration/targets/ansible-test-units-forked/ansible_collections/ns/col/tests/unit/plugins/modules/test_ansible_forked.py
|
||
|
new file mode 100644
|
||
|
index 00000000000000..828099c65e4cf3
|
||
|
--- /dev/null
|
||
|
+++ b/test/integration/targets/ansible-test-units-forked/ansible_collections/ns/col/tests/unit/plugins/modules/test_ansible_forked.py
|
||
|
@@ -0,0 +1,43 @@
|
||
|
+"""Unit tests to verify the functionality of the ansible-forked pytest plugin."""
|
||
|
+from __future__ import absolute_import, division, print_function
|
||
|
+
|
||
|
+__metaclass__ = type
|
||
|
+
|
||
|
+import os
|
||
|
+import pytest
|
||
|
+import signal
|
||
|
+import sys
|
||
|
+import warnings
|
||
|
+
|
||
|
+
|
||
|
+warnings.warn("This verifies that warnings generated during test collection are reported.")
|
||
|
+
|
||
|
+
|
||
|
+@pytest.mark.xfail
|
||
|
+def test_kill_xfail():
|
||
|
+ os.kill(os.getpid(), signal.SIGKILL) # causes pytest to report stdout and stderr
|
||
|
+
|
||
|
+
|
||
|
+def test_kill():
|
||
|
+ os.kill(os.getpid(), signal.SIGKILL) # causes pytest to report stdout and stderr
|
||
|
+
|
||
|
+
|
||
|
+@pytest.mark.xfail
|
||
|
+def test_exception_xfail():
|
||
|
+ sys.stdout.write("This stdout message should be hidden due to xfail.")
|
||
|
+ sys.stderr.write("This stderr message should be hidden due to xfail.")
|
||
|
+ raise Exception("This error is expected, but should be hidden due to xfail.")
|
||
|
+
|
||
|
+
|
||
|
+def test_exception():
|
||
|
+ sys.stdout.write("This stdout message should be reported since we're throwing an exception.")
|
||
|
+ sys.stderr.write("This stderr message should be reported since we're throwing an exception.")
|
||
|
+ raise Exception("This error is expected and should be visible.")
|
||
|
+
|
||
|
+
|
||
|
+def test_warning():
|
||
|
+ warnings.warn("This verifies that warnings generated at test time are reported.")
|
||
|
+
|
||
|
+
|
||
|
+def test_passed():
|
||
|
+ pass
|
||
|
diff --git a/test/integration/targets/ansible-test-units-forked/runme.sh b/test/integration/targets/ansible-test-units-forked/runme.sh
|
||
|
new file mode 100755
|
||
|
index 00000000000000..c39f3c492440e7
|
||
|
--- /dev/null
|
||
|
+++ b/test/integration/targets/ansible-test-units-forked/runme.sh
|
||
|
@@ -0,0 +1,45 @@
|
||
|
+#!/usr/bin/env bash
|
||
|
+
|
||
|
+source ../collection/setup.sh
|
||
|
+
|
||
|
+set -x
|
||
|
+
|
||
|
+options=$("${TEST_DIR}"/../ansible-test/venv-pythons.py --only-versions)
|
||
|
+IFS=', ' read -r -a pythons <<< "${options}"
|
||
|
+
|
||
|
+for python in "${pythons[@]}"; do
|
||
|
+ echo "*** Checking Python ${python} ***"
|
||
|
+
|
||
|
+ if ansible-test units --truncate 0 --target-python "venv/${python}" "${@}" > output.log 2>&1 ; then
|
||
|
+ cat output.log
|
||
|
+ echo "Unit tests on Python ${python} did not fail as expected. See output above."
|
||
|
+ exit 1
|
||
|
+ fi
|
||
|
+
|
||
|
+ cat output.log
|
||
|
+ echo "Unit tests on Python ${python} failed as expected. See output above. Checking for expected output ..."
|
||
|
+
|
||
|
+ # Verify that the appropriate tests pased, failed or xfailed.
|
||
|
+ grep 'PASSED tests/unit/plugins/modules/test_ansible_forked.py::test_passed' output.log
|
||
|
+ grep 'PASSED tests/unit/plugins/modules/test_ansible_forked.py::test_warning' output.log
|
||
|
+ grep 'XFAIL tests/unit/plugins/modules/test_ansible_forked.py::test_kill_xfail' output.log
|
||
|
+ grep 'FAILED tests/unit/plugins/modules/test_ansible_forked.py::test_kill' output.log
|
||
|
+ grep 'FAILED tests/unit/plugins/modules/test_ansible_forked.py::test_exception' output.log
|
||
|
+ grep 'XFAIL tests/unit/plugins/modules/test_ansible_forked.py::test_exception_xfail' output.log
|
||
|
+
|
||
|
+ # Verify that warnings are properly surfaced.
|
||
|
+ grep 'UserWarning: This verifies that warnings generated at test time are reported.' output.log
|
||
|
+ grep 'UserWarning: This verifies that warnings generated during test collection are reported.' output.log
|
||
|
+
|
||
|
+ # Verify there are no unexpected warnings.
|
||
|
+ grep 'Warning' output.log | grep -v 'UserWarning: This verifies that warnings generated ' && exit 1
|
||
|
+
|
||
|
+ # Verify that details from failed tests are properly surfaced.
|
||
|
+ grep "^Test CRASHED with exit code -9.$" output.log
|
||
|
+ grep "^This stdout message should be reported since we're throwing an exception.$" output.log
|
||
|
+ grep "^This stderr message should be reported since we're throwing an exception.$" output.log
|
||
|
+ grep '^> *raise Exception("This error is expected and should be visible.")$' output.log
|
||
|
+ grep "^E *Exception: This error is expected and should be visible.$" output.log
|
||
|
+
|
||
|
+ echo "*** Done Checking Python ${python} ***"
|
||
|
+done
|
||
|
diff --git a/test/lib/ansible_test/_data/requirements/units.txt b/test/lib/ansible_test/_data/requirements/units.txt
|
||
|
index d2f56d35a92a5c..d723a65fc663d0 100644
|
||
|
--- a/test/lib/ansible_test/_data/requirements/units.txt
|
||
|
+++ b/test/lib/ansible_test/_data/requirements/units.txt
|
||
|
@@ -2,5 +2,4 @@ mock
|
||
|
pytest
|
||
|
pytest-mock
|
||
|
pytest-xdist
|
||
|
-pytest-forked
|
||
|
pyyaml # required by the collection loader (only needed for collections)
|
||
|
diff --git a/test/lib/ansible_test/_internal/commands/units/__init__.py b/test/lib/ansible_test/_internal/commands/units/__init__.py
|
||
|
index 7d192e1be67148..78dd8498156615 100644
|
||
|
--- a/test/lib/ansible_test/_internal/commands/units/__init__.py
|
||
|
+++ b/test/lib/ansible_test/_internal/commands/units/__init__.py
|
||
|
@@ -253,7 +253,6 @@ def command_units(args: UnitsConfig) -> None:
|
||
|
|
||
|
cmd = [
|
||
|
'pytest',
|
||
|
- '--forked',
|
||
|
'-r', 'a',
|
||
|
'-n', str(args.num_workers) if args.num_workers else 'auto',
|
||
|
'--color', 'yes' if args.color else 'no',
|
||
|
@@ -275,6 +274,8 @@ def command_units(args: UnitsConfig) -> None:
|
||
|
if data_context().content.collection:
|
||
|
plugins.append('ansible_pytest_collections')
|
||
|
|
||
|
+ plugins.append('ansible_forked')
|
||
|
+
|
||
|
if plugins:
|
||
|
env['PYTHONPATH'] += ':%s' % os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'pytest/plugins')
|
||
|
env['PYTEST_PLUGINS'] = ','.join(plugins)
|
||
|
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg
|
||
|
index e35301dd81c1bd..f8a0a8af3ff21c 100644
|
||
|
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg
|
||
|
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg
|
||
|
@@ -57,3 +57,5 @@ preferred-modules =
|
||
|
# Listing them here makes it possible to enable the import-error check.
|
||
|
ignored-modules =
|
||
|
py,
|
||
|
+ pytest,
|
||
|
+ _pytest.runner,
|
||
|
diff --git a/test/lib/ansible_test/_util/target/pytest/plugins/ansible_forked.py b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_forked.py
|
||
|
new file mode 100644
|
||
|
index 00000000000000..d00d9e93d1be00
|
||
|
--- /dev/null
|
||
|
+++ b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_forked.py
|
||
|
@@ -0,0 +1,103 @@
|
||
|
+"""Run each test in its own fork. PYTEST_DONT_REWRITE"""
|
||
|
+# MIT License (see licenses/MIT-license.txt or https://opensource.org/licenses/MIT)
|
||
|
+# Based on code originally from:
|
||
|
+# https://github.com/pytest-dev/pytest-forked
|
||
|
+# https://github.com/pytest-dev/py
|
||
|
+# TIP: Disable pytest-xdist when debugging internal errors in this plugin.
|
||
|
+from __future__ import absolute_import, division, print_function
|
||
|
+
|
||
|
+__metaclass__ = type
|
||
|
+
|
||
|
+import os
|
||
|
+import pickle
|
||
|
+import tempfile
|
||
|
+import warnings
|
||
|
+
|
||
|
+from pytest import Item, hookimpl
|
||
|
+
|
||
|
+try:
|
||
|
+ from pytest import TestReport
|
||
|
+except ImportError:
|
||
|
+ from _pytest.runner import TestReport # Backwards compatibility with pytest < 7. Remove once Python 2.7 is not supported.
|
||
|
+
|
||
|
+from _pytest.runner import runtestprotocol
|
||
|
+
|
||
|
+
|
||
|
+@hookimpl(tryfirst=True)
|
||
|
+def pytest_runtest_protocol(item, nextitem): # type: (Item, Item | None) -> object | None
|
||
|
+ """Entry point for enabling this plugin."""
|
||
|
+ # This is needed because pytest-xdist creates an OS thread (using execnet).
|
||
|
+ # See: https://github.com/pytest-dev/execnet/blob/d6aa1a56773c2e887515d63e50b1d08338cb78a7/execnet/gateway_base.py#L51
|
||
|
+ warnings.filterwarnings("ignore", "^This process .* is multi-threaded, use of .* may lead to deadlocks in the child.$", DeprecationWarning)
|
||
|
+
|
||
|
+ item_hook = item.ihook
|
||
|
+ item_hook.pytest_runtest_logstart(nodeid=item.nodeid, location=item.location)
|
||
|
+
|
||
|
+ reports = run_item(item, nextitem)
|
||
|
+
|
||
|
+ for report in reports:
|
||
|
+ item_hook.pytest_runtest_logreport(report=report)
|
||
|
+
|
||
|
+ item_hook.pytest_runtest_logfinish(nodeid=item.nodeid, location=item.location)
|
||
|
+
|
||
|
+ return True
|
||
|
+
|
||
|
+
|
||
|
+def run_item(item, nextitem): # type: (Item, Item | None) -> list[TestReport]
|
||
|
+ """Run the item in a child process and return a list of reports."""
|
||
|
+ with tempfile.NamedTemporaryFile() as temp_file:
|
||
|
+ pid = os.fork()
|
||
|
+
|
||
|
+ if not pid:
|
||
|
+ temp_file.delete = False
|
||
|
+ run_child(item, nextitem, temp_file.name)
|
||
|
+
|
||
|
+ return run_parent(item, pid, temp_file.name)
|
||
|
+
|
||
|
+
|
||
|
+def run_child(item, nextitem, result_path): # type: (Item, Item | None, str) -> None
|
||
|
+ """Run the item, record the result and exit. Called in the child process."""
|
||
|
+ with warnings.catch_warnings(record=True) as captured_warnings:
|
||
|
+ reports = runtestprotocol(item, nextitem=nextitem, log=False)
|
||
|
+
|
||
|
+ with open(result_path, "wb") as result_file:
|
||
|
+ pickle.dump((reports, captured_warnings), result_file)
|
||
|
+
|
||
|
+ os._exit(0) # noqa
|
||
|
+
|
||
|
+
|
||
|
+def run_parent(item, pid, result_path): # type: (Item, int, str) -> list[TestReport]
|
||
|
+ """Wait for the child process to exit and return the test reports. Called in the parent process."""
|
||
|
+ exit_code = waitstatus_to_exitcode(os.waitpid(pid, 0)[1])
|
||
|
+
|
||
|
+ if exit_code:
|
||
|
+ reason = "Test CRASHED with exit code {}.".format(exit_code)
|
||
|
+ report = TestReport(item.nodeid, item.location, {x: 1 for x in item.keywords}, "failed", reason, "call", user_properties=item.user_properties)
|
||
|
+
|
||
|
+ if item.get_closest_marker("xfail"):
|
||
|
+ report.outcome = "skipped"
|
||
|
+ report.wasxfail = reason
|
||
|
+
|
||
|
+ reports = [report]
|
||
|
+ else:
|
||
|
+ with open(result_path, "rb") as result_file:
|
||
|
+ reports, captured_warnings = pickle.load(result_file) # type: list[TestReport], list[warnings.WarningMessage]
|
||
|
+
|
||
|
+ for warning in captured_warnings:
|
||
|
+ warnings.warn_explicit(warning.message, warning.category, warning.filename, warning.lineno)
|
||
|
+
|
||
|
+ return reports
|
||
|
+
|
||
|
+
|
||
|
+def waitstatus_to_exitcode(status): # type: (int) -> int
|
||
|
+ """Convert a wait status to an exit code."""
|
||
|
+ # This function was added in Python 3.9.
|
||
|
+ # See: https://docs.python.org/3/library/os.html#os.waitstatus_to_exitcode
|
||
|
+
|
||
|
+ if os.WIFEXITED(status):
|
||
|
+ return os.WEXITSTATUS(status)
|
||
|
+
|
||
|
+ if os.WIFSIGNALED(status):
|
||
|
+ return -os.WTERMSIG(status)
|
||
|
+
|
||
|
+ raise ValueError(status)
|