diff --git a/changelogs/fragments/72497-ansible-test-import-plugins.yml b/changelogs/fragments/72497-ansible-test-import-plugins.yml new file mode 100644 index 00000000000..1c05068fe2d --- /dev/null +++ b/changelogs/fragments/72497-ansible-test-import-plugins.yml @@ -0,0 +1,2 @@ +minor_changes: +- "ansible-test - the ``import`` sanity test now also tries to import all non-module and non-module_utils Python files in ``lib/ansible/`` resp. ``plugins/`` (https://github.com/ansible/ansible/pull/72497)." diff --git a/docs/docsite/rst/dev_guide/testing/sanity/ansible-requirements.rst b/docs/docsite/rst/dev_guide/testing/sanity/ansible-requirements.rst new file mode 100644 index 00000000000..f348b07f00b --- /dev/null +++ b/docs/docsite/rst/dev_guide/testing/sanity/ansible-requirements.rst @@ -0,0 +1,4 @@ +ansible-requirements +==================== + +``test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt`` must be an identical copy of ``requirements.txt`` found in the project's root. diff --git a/test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt b/test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt new file mode 100644 index 00000000000..40cf83a6475 --- /dev/null +++ b/test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt @@ -0,0 +1,13 @@ +# Note: this requirements.txt file is used to specify what dependencies are +# needed to make the package run rather than for deployment of a tested set of +# packages. Thus, this should be the loosest set possible (only required +# packages, not optional ones, and with the widest range of versions that could +# be suitable) +jinja2 +PyYAML +cryptography +packaging +# NOTE: resolvelib 0.x version bumps should be considered major/breaking +# NOTE: and we should update the upper cap with care, at least until 1.0 +# NOTE: Ref: https://github.com/sarugaku/resolvelib/issues/69 +resolvelib >= 0.5.3, < 0.6.0 # dependency resolver used by ansible-galaxy diff --git a/test/lib/ansible_test/_data/sanity/import/importer.py b/test/lib/ansible_test/_data/sanity/import/importer.py index e23e28e0181..5fae766c68c 100755 --- a/test/lib/ansible_test/_data/sanity/import/importer.py +++ b/test/lib/ansible_test/_data/sanity/import/importer.py @@ -27,6 +27,7 @@ def main(): external_python = os.environ.get('SANITY_EXTERNAL_PYTHON') or sys.executable collection_full_name = os.environ.get('SANITY_COLLECTION_FULL_NAME') collection_root = os.environ.get('ANSIBLE_COLLECTIONS_PATH') + import_type = os.environ.get('SANITY_IMPORTER_TYPE') try: # noinspection PyCompatibility @@ -103,15 +104,16 @@ def main(): # remove all modules under the ansible package list(map(sys.modules.pop, [m for m in sys.modules if m.partition('.')[0] == ansible.__name__])) - # pre-load an empty ansible package to prevent unwanted code in __init__.py from loading - # this more accurately reflects the environment that AnsiballZ runs modules under - # it also avoids issues with imports in the ansible package that are not allowed - ansible_module = types.ModuleType(ansible.__name__) - ansible_module.__file__ = ansible.__file__ - ansible_module.__path__ = ansible.__path__ - ansible_module.__package__ = ansible.__package__ + if import_type == 'module': + # pre-load an empty ansible package to prevent unwanted code in __init__.py from loading + # this more accurately reflects the environment that AnsiballZ runs modules under + # it also avoids issues with imports in the ansible package that are not allowed + ansible_module = types.ModuleType(ansible.__name__) + ansible_module.__file__ = ansible.__file__ + ansible_module.__path__ = ansible.__path__ + ansible_module.__package__ = ansible.__package__ - sys.modules[ansible.__name__] = ansible_module + sys.modules[ansible.__name__] = ansible_module class ImporterAnsibleModuleException(Exception): """Exception thrown during initialization of ImporterAnsibleModule.""" @@ -123,10 +125,11 @@ def main(): class RestrictedModuleLoader: """Python module loader that restricts inappropriate imports.""" - def __init__(self, path, name): + def __init__(self, path, name, restrict_to_module_paths): self.path = path self.name = name self.loaded_modules = set() + self.restrict_to_module_paths = restrict_to_module_paths def find_module(self, fullname, path=None): """Return self if the given fullname is restricted, otherwise return None. @@ -138,6 +141,9 @@ def main(): return None # ignore modules that are already being loaded if is_name_in_namepace(fullname, ['ansible']): + if not self.restrict_to_module_paths: + return None # for non-modules, everything in the ansible namespace is allowed + if fullname in ('ansible.module_utils.basic', 'ansible.module_utils.common.removed'): return self # intercept loading so we can modify the result @@ -153,6 +159,9 @@ def main(): if not collection_loader: return self # restrict access to collections when we are not testing a collection + if not self.restrict_to_module_paths: + return None # for non-modules, everything in the ansible namespace is allowed + if is_name_in_namepace(fullname, ['ansible_collections...plugins.module_utils', self.name]): return None # module_utils and module under test are always allowed @@ -196,24 +205,25 @@ def main(): self.loaded_modules.add(fullname) return import_module(fullname) - def run(): + def run(restrict_to_module_paths): """Main program function.""" base_dir = os.getcwd() messages = set() for path in sys.argv[1:] or sys.stdin.read().splitlines(): name = convert_relative_path_to_name(path) - test_python_module(path, name, base_dir, messages) + test_python_module(path, name, base_dir, messages, restrict_to_module_paths) if messages: sys.exit(10) - def test_python_module(path, name, base_dir, messages): + def test_python_module(path, name, base_dir, messages, restrict_to_module_paths): """Test the given python module by importing it. :type path: str :type name: str :type base_dir: str :type messages: set[str] + :type restrict_to_module_paths: bool """ if name in sys.modules: return # cannot be tested because it has already been loaded @@ -230,13 +240,13 @@ def main(): try: with monitor_sys_modules(path, messages): - with restrict_imports(path, name, messages): + with restrict_imports(path, name, messages, restrict_to_module_paths): with capture_output(capture_normal): import_module(name) if run_main: with monitor_sys_modules(path, messages): - with restrict_imports(path, name, messages): + with restrict_imports(path, name, messages, restrict_to_module_paths): with capture_output(capture_main): runpy.run_module(name, run_name='__main__', alter_sys=True) except ImporterAnsibleModuleException: @@ -398,13 +408,14 @@ def main(): print(message) @contextlib.contextmanager - def restrict_imports(path, name, messages): + def restrict_imports(path, name, messages, restrict_to_module_paths): """Restrict available imports. :type path: str :type name: str :type messages: set[str] + :type restrict_to_module_paths: bool """ - restricted_loader = RestrictedModuleLoader(path, name) + restricted_loader = RestrictedModuleLoader(path, name, restrict_to_module_paths) # noinspection PyTypeChecker sys.meta_path.insert(0, restricted_loader) @@ -413,6 +424,10 @@ def main(): try: yield finally: + if import_type == 'plugin': + from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder + _AnsibleCollectionFinder._remove() # pylint: disable=protected-access + if sys.meta_path[0] != restricted_loader: report_message(path, 0, 0, 'metapath', 'changes to sys.meta_path[0] are not permitted', messages) @@ -457,6 +472,26 @@ def main(): with warnings.catch_warnings(): warnings.simplefilter('error') + if sys.version_info[0] == 2: + warnings.filterwarnings( + "ignore", + "Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography," + " and will be removed in a future release.") + warnings.filterwarnings( + "ignore", + "Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography," + " and will be removed in the next release.") + if sys.version_info[:2] == (3, 5): + warnings.filterwarnings( + "ignore", + "Python 3.5 support will be dropped in the next release ofcryptography. Please upgrade your Python.") + warnings.filterwarnings( + "ignore", + "Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.") + warnings.filterwarnings( + "ignore", + "The _yaml extension module is now located at yaml._yaml and its location is subject to change. To use the " + "LibYAML-based parser and emitter, import from `yaml`: `from yaml import CLoader as Loader, CDumper as Dumper`.") try: yield @@ -464,7 +499,7 @@ def main(): sys.stdout = old_stdout sys.stderr = old_stderr - run() + run(import_type == 'module') if __name__ == '__main__': diff --git a/test/lib/ansible_test/_internal/executor.py b/test/lib/ansible_test/_internal/executor.py index 0caf8695d4b..39c77017031 100644 --- a/test/lib/ansible_test/_internal/executor.py +++ b/test/lib/ansible_test/_internal/executor.py @@ -9,7 +9,6 @@ import re import time import textwrap import functools -import hashlib import difflib import filecmp import random @@ -44,7 +43,6 @@ from .cloud import ( from .io import ( make_dirs, open_text_file, - read_binary_file, read_text_file, write_text_file, ) @@ -70,6 +68,7 @@ from .util import ( SUPPORTED_PYTHON_VERSIONS, str_to_version, version_to_str, + get_hash, ) from .util_common import ( @@ -2026,7 +2025,7 @@ class EnvironmentDescription: pip_paths = dict((v, find_executable('pip%s' % v, required=False)) for v in sorted(versions)) program_versions = dict((v, self.get_version([python_paths[v], version_check], warnings)) for v in sorted(python_paths) if python_paths[v]) pip_interpreters = dict((v, self.get_shebang(pip_paths[v])) for v in sorted(pip_paths) if pip_paths[v]) - known_hosts_hash = self.get_hash(os.path.expanduser('~/.ssh/known_hosts')) + known_hosts_hash = get_hash(os.path.expanduser('~/.ssh/known_hosts')) for version in sorted(versions): self.check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings) @@ -2166,21 +2165,6 @@ class EnvironmentDescription: with open_text_file(path) as script_fd: return script_fd.readline().strip() - @staticmethod - def get_hash(path): - """ - :type path: str - :rtype: str | None - """ - if not os.path.exists(path): - return None - - file_hash = hashlib.sha256() - - file_hash.update(read_binary_file(path)) - - return file_hash.hexdigest() - class NoChangesDetected(ApplicationWarning): """Exception when change detection was performed, but no changes were found.""" diff --git a/test/lib/ansible_test/_internal/sanity/import.py b/test/lib/ansible_test/_internal/sanity/import.py index 7d4776ae67b..3473063526c 100644 --- a/test/lib/ansible_test/_internal/sanity/import.py +++ b/test/lib/ansible_test/_internal/sanity/import.py @@ -20,6 +20,7 @@ from ..target import ( ) from ..util import ( + ANSIBLE_TEST_DATA_ROOT, SubprocessError, remove_tree, display, @@ -27,6 +28,8 @@ from ..util import ( is_subdir, generate_pip_command, find_python, + get_hash, + REMOTE_ONLY_PYTHON_VERSIONS, ) from ..util_common import ( @@ -41,6 +44,7 @@ from ..ansible_util import ( from ..executor import ( generate_pip_install, + install_cryptography, ) from ..config import ( @@ -60,12 +64,21 @@ from ..data import ( ) +def _get_module_test(module_restrictions): # type: (bool) -> t.Callable[[str], bool] + """Create a predicate which tests whether a path can be used by modules or not.""" + module_path = data_context().content.module_path + module_utils_path = data_context().content.module_utils_path + if module_restrictions: + return lambda path: is_subdir(path, module_path) or is_subdir(path, module_utils_path) + return lambda path: not (is_subdir(path, module_path) or is_subdir(path, module_utils_path)) + + class ImportTest(SanityMultipleVersion): """Sanity test for proper import exception handling.""" def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] """Return the given list of test targets, filtered to include only those relevant for the test.""" return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and - (is_subdir(target.path, data_context().content.module_path) or is_subdir(target.path, data_context().content.module_utils_path))] + any(is_subdir(target.path, path) for path in data_context().content.plugin_paths.values())] def test(self, args, targets, python_version): """ @@ -92,91 +105,112 @@ class ImportTest(SanityMultipleVersion): temp_root = os.path.join(ResultType.TMP.path, 'sanity', 'import') - # create a clean virtual environment to minimize the available imports beyond the python standard library - virtual_environment_path = os.path.join(temp_root, 'minimal-py%s' % python_version.replace('.', '')) - virtual_environment_bin = os.path.join(virtual_environment_path, 'bin') + messages = [] - remove_tree(virtual_environment_path) + for import_type, test, add_ansible_requirements in ( + ('module', _get_module_test(True), False), + ('plugin', _get_module_test(False), True), + ): + if import_type == 'plugin' and python_version in REMOTE_ONLY_PYTHON_VERSIONS: + continue - if not create_virtual_environment(args, python_version, virtual_environment_path): - display.warning("Skipping sanity test '%s' on Python %s due to missing virtual environment support." % (self.name, python_version)) - return SanitySkipped(self.name, python_version) + data = '\n'.join([path for path in paths if test(path)]) + if not data: + continue - # add the importer to our virtual environment so it can be accessed through the coverage injector - importer_path = os.path.join(virtual_environment_bin, 'importer.py') - yaml_to_json_path = os.path.join(virtual_environment_bin, 'yaml_to_json.py') - if not args.explain: - os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'importer.py')), importer_path) - os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'yaml_to_json.py')), yaml_to_json_path) + requirements_file = None - # activate the virtual environment - env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH']) + # create a clean virtual environment to minimize the available imports beyond the python standard library + virtual_environment_dirname = 'minimal-py%s' % python_version.replace('.', '') + if add_ansible_requirements: + requirements_file = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'sanity.import-plugins.txt') + virtual_environment_dirname += '-requirements-%s' % get_hash(requirements_file) + virtual_environment_path = os.path.join(temp_root, virtual_environment_dirname) + virtual_environment_bin = os.path.join(virtual_environment_path, 'bin') - env.update( - SANITY_TEMP_PATH=ResultType.TMP.path, - ) + remove_tree(virtual_environment_path) + + if not create_virtual_environment(args, python_version, virtual_environment_path): + display.warning("Skipping sanity test '%s' on Python %s due to missing virtual environment support." % (self.name, python_version)) + return SanitySkipped(self.name, python_version) + + # add the importer to our virtual environment so it can be accessed through the coverage injector + importer_path = os.path.join(virtual_environment_bin, 'importer.py') + yaml_to_json_path = os.path.join(virtual_environment_bin, 'yaml_to_json.py') + if not args.explain: + os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'importer.py')), importer_path) + os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'yaml_to_json.py')), yaml_to_json_path) + + # activate the virtual environment + env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH']) - if data_context().content.collection: env.update( - SANITY_COLLECTION_FULL_NAME=data_context().content.collection.full_name, - SANITY_EXTERNAL_PYTHON=python, + SANITY_TEMP_PATH=ResultType.TMP.path, + SANITY_IMPORTER_TYPE=import_type, ) - virtualenv_python = os.path.join(virtual_environment_bin, 'python') - virtualenv_pip = generate_pip_command(virtualenv_python) + if data_context().content.collection: + env.update( + SANITY_COLLECTION_FULL_NAME=data_context().content.collection.full_name, + SANITY_EXTERNAL_PYTHON=python, + ) - # make sure coverage is available in the virtual environment if needed - if args.coverage: - run_command(args, generate_pip_install(virtualenv_pip, '', packages=['setuptools']), env=env, capture=capture_pip) - run_command(args, generate_pip_install(virtualenv_pip, '', packages=['coverage']), env=env, capture=capture_pip) + virtualenv_python = os.path.join(virtual_environment_bin, 'python') + virtualenv_pip = generate_pip_command(virtualenv_python) - try: - # In some environments pkg_resources is installed as a separate pip package which needs to be removed. - # For example, using Python 3.8 on Ubuntu 18.04 a virtualenv is created with only pip and setuptools. - # However, a venv is created with an additional pkg-resources package which is independent of setuptools. - # Making sure pkg-resources is removed preserves the import test consistency between venv and virtualenv. - # Additionally, in the above example, the pyparsing package vendored with pkg-resources is out-of-date and generates deprecation warnings. - # Thus it is important to remove pkg-resources to prevent system installed packages from generating deprecation warnings. - run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'pkg-resources'], env=env, capture=capture_pip) - except SubprocessError: - pass + # make sure requirements are installed if needed + if requirements_file: + install_cryptography(args, virtualenv_python, python_version, virtualenv_pip) + run_command(args, generate_pip_install(virtualenv_pip, 'sanity', context='import-plugins'), env=env, capture=capture_pip) - run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'setuptools'], env=env, capture=capture_pip) - run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'pip'], env=env, capture=capture_pip) + # make sure coverage is available in the virtual environment if needed + if args.coverage: + run_command(args, generate_pip_install(virtualenv_pip, '', packages=['setuptools']), env=env, capture=capture_pip) + run_command(args, generate_pip_install(virtualenv_pip, '', packages=['coverage']), env=env, capture=capture_pip) - cmd = ['importer.py'] + try: + # In some environments pkg_resources is installed as a separate pip package which needs to be removed. + # For example, using Python 3.8 on Ubuntu 18.04 a virtualenv is created with only pip and setuptools. + # However, a venv is created with an additional pkg-resources package which is independent of setuptools. + # Making sure pkg-resources is removed preserves the import test consistency between venv and virtualenv. + # Additionally, in the above example, the pyparsing package vendored with pkg-resources is out-of-date and generates deprecation warnings. + # Thus it is important to remove pkg-resources to prevent system installed packages from generating deprecation warnings. + run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'pkg-resources'], env=env, capture=capture_pip) + except SubprocessError: + pass - data = '\n'.join(paths) + run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'setuptools'], env=env, capture=capture_pip) + run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'pip'], env=env, capture=capture_pip) - display.info(data, verbosity=4) + display.info(import_type + ': ' + data, verbosity=4) - results = [] + cmd = ['importer.py'] - try: - with coverage_context(args): - stdout, stderr = intercept_command(args, cmd, self.name, env, capture=True, data=data, python_version=python_version, - virtualenv=virtualenv_python) + try: + with coverage_context(args): + stdout, stderr = intercept_command(args, cmd, self.name, env, capture=True, data=data, python_version=python_version, + virtualenv=virtualenv_python) - if stdout or stderr: - raise SubprocessError(cmd, stdout=stdout, stderr=stderr) - except SubprocessError as ex: - if ex.status != 10 or ex.stderr or not ex.stdout: - raise + if stdout or stderr: + raise SubprocessError(cmd, stdout=stdout, stderr=stderr) + except SubprocessError as ex: + if ex.status != 10 or ex.stderr or not ex.stdout: + raise - pattern = r'^(?P[^:]*):(?P[0-9]+):(?P[0-9]+): (?P.*)$' + pattern = r'^(?P[^:]*):(?P[0-9]+):(?P[0-9]+): (?P.*)$' - results = parse_to_list_of_dict(pattern, ex.stdout) + parsed = parse_to_list_of_dict(pattern, ex.stdout) - relative_temp_root = os.path.relpath(temp_root, data_context().content.root) + os.path.sep + relative_temp_root = os.path.relpath(temp_root, data_context().content.root) + os.path.sep - results = [SanityMessage( - message=r['message'], - path=os.path.relpath(r['path'], relative_temp_root) if r['path'].startswith(relative_temp_root) else r['path'], - line=int(r['line']), - column=int(r['column']), - ) for r in results] + messages += [SanityMessage( + message=r['message'], + path=os.path.relpath(r['path'], relative_temp_root) if r['path'].startswith(relative_temp_root) else r['path'], + line=int(r['line']), + column=int(r['column']), + ) for r in parsed] - results = settings.process_errors(results, paths) + results = settings.process_errors(messages, paths) if results: return SanityFailure(self.name, messages=results, python_version=python_version) diff --git a/test/lib/ansible_test/_internal/util.py b/test/lib/ansible_test/_internal/util.py index 14cd084a51c..ebc14783f68 100644 --- a/test/lib/ansible_test/_internal/util.py +++ b/test/lib/ansible_test/_internal/util.py @@ -5,6 +5,7 @@ __metaclass__ = type import contextlib import errno import fcntl +import hashlib import inspect import os import pkgutil @@ -53,6 +54,7 @@ from .encoding import ( from .io import ( open_binary_file, + read_binary_file, read_text_file, ) @@ -857,4 +859,19 @@ def open_zipfile(path, mode='r'): zib_obj.close() +def get_hash(path): + """ + :type path: str + :rtype: str | None + """ + if not os.path.exists(path): + return None + + file_hash = hashlib.sha256() + + file_hash.update(read_binary_file(path)) + + return file_hash.hexdigest() + + display = Display() # pylint: disable=locally-disabled, invalid-name diff --git a/test/sanity/code-smell/ansible-requirements.json b/test/sanity/code-smell/ansible-requirements.json new file mode 100644 index 00000000000..4bc356be187 --- /dev/null +++ b/test/sanity/code-smell/ansible-requirements.json @@ -0,0 +1,7 @@ +{ + "prefixes": [ + "requirements.txt", + "test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt" + ], + "output": "path-line-column-message" +} diff --git a/test/sanity/code-smell/ansible-requirements.py b/test/sanity/code-smell/ansible-requirements.py new file mode 100755 index 00000000000..c270b32d52d --- /dev/null +++ b/test/sanity/code-smell/ansible-requirements.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def read_file(path): + try: + with open(path, 'r') as f: + return f.read() + except Exception as ex: # pylint: disable=broad-except + print('%s:%d:%d: unable to read required file %s' % (path, 0, 0, re.sub(r'\s+', ' ', str(ex)))) + return None + + +def main(): + ORIGINAL_FILE = 'requirements.txt' + VENDORED_COPY = 'test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt' + + original_requirements = read_file(ORIGINAL_FILE) + vendored_requirements = read_file(VENDORED_COPY) + + if original_requirements is not None and vendored_requirements is not None: + if original_requirements != vendored_requirements: + print('%s:%d:%d: must be identical to %s' % (VENDORED_COPY, 0, 0, ORIGINAL_FILE)) + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/test-constraints.py b/test/sanity/code-smell/test-constraints.py index 1c88995f5b8..21dea5fab2c 100755 --- a/test/sanity/code-smell/test-constraints.py +++ b/test/sanity/code-smell/test-constraints.py @@ -12,6 +12,12 @@ def main(): requirements = {} for path in sys.argv[1:] or sys.stdin.read().splitlines(): + if path == 'test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt': + # This file is an exact copy of requirements.txt that is used in the import + # sanity test. There is a code-smell test which ensures that the two files + # are identical, and it is only used inside an empty venv, so we can ignore + # it here. + continue with open(path, 'r') as path_fd: requirements[path] = parse_requirements(path_fd.read().splitlines())