Import sanity test for plugins (#72497)

This commit is contained in:
Felix Fontein 2021-02-12 23:09:50 +01:00 committed by GitHub
parent 4059b37ab1
commit 1f3a90270b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 231 additions and 98 deletions

View file

@ -0,0 +1,2 @@
minor_changes:
- "ansible-test - the ``import`` sanity test now also tries to import all non-module and non-module_utils Python files in ``lib/ansible/`` resp. ``plugins/`` (https://github.com/ansible/ansible/pull/72497)."

View file

@ -0,0 +1,4 @@
ansible-requirements
====================
``test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt`` must be an identical copy of ``requirements.txt`` found in the project's root.

View file

@ -0,0 +1,13 @@
# Note: this requirements.txt file is used to specify what dependencies are
# needed to make the package run rather than for deployment of a tested set of
# packages. Thus, this should be the loosest set possible (only required
# packages, not optional ones, and with the widest range of versions that could
# be suitable)
jinja2
PyYAML
cryptography
packaging
# NOTE: resolvelib 0.x version bumps should be considered major/breaking
# NOTE: and we should update the upper cap with care, at least until 1.0
# NOTE: Ref: https://github.com/sarugaku/resolvelib/issues/69
resolvelib >= 0.5.3, < 0.6.0 # dependency resolver used by ansible-galaxy

View file

@ -27,6 +27,7 @@ def main():
external_python = os.environ.get('SANITY_EXTERNAL_PYTHON') or sys.executable external_python = os.environ.get('SANITY_EXTERNAL_PYTHON') or sys.executable
collection_full_name = os.environ.get('SANITY_COLLECTION_FULL_NAME') collection_full_name = os.environ.get('SANITY_COLLECTION_FULL_NAME')
collection_root = os.environ.get('ANSIBLE_COLLECTIONS_PATH') collection_root = os.environ.get('ANSIBLE_COLLECTIONS_PATH')
import_type = os.environ.get('SANITY_IMPORTER_TYPE')
try: try:
# noinspection PyCompatibility # noinspection PyCompatibility
@ -103,15 +104,16 @@ def main():
# remove all modules under the ansible package # remove all modules under the ansible package
list(map(sys.modules.pop, [m for m in sys.modules if m.partition('.')[0] == ansible.__name__])) list(map(sys.modules.pop, [m for m in sys.modules if m.partition('.')[0] == ansible.__name__]))
# pre-load an empty ansible package to prevent unwanted code in __init__.py from loading if import_type == 'module':
# this more accurately reflects the environment that AnsiballZ runs modules under # pre-load an empty ansible package to prevent unwanted code in __init__.py from loading
# it also avoids issues with imports in the ansible package that are not allowed # this more accurately reflects the environment that AnsiballZ runs modules under
ansible_module = types.ModuleType(ansible.__name__) # it also avoids issues with imports in the ansible package that are not allowed
ansible_module.__file__ = ansible.__file__ ansible_module = types.ModuleType(ansible.__name__)
ansible_module.__path__ = ansible.__path__ ansible_module.__file__ = ansible.__file__
ansible_module.__package__ = ansible.__package__ ansible_module.__path__ = ansible.__path__
ansible_module.__package__ = ansible.__package__
sys.modules[ansible.__name__] = ansible_module sys.modules[ansible.__name__] = ansible_module
class ImporterAnsibleModuleException(Exception): class ImporterAnsibleModuleException(Exception):
"""Exception thrown during initialization of ImporterAnsibleModule.""" """Exception thrown during initialization of ImporterAnsibleModule."""
@ -123,10 +125,11 @@ def main():
class RestrictedModuleLoader: class RestrictedModuleLoader:
"""Python module loader that restricts inappropriate imports.""" """Python module loader that restricts inappropriate imports."""
def __init__(self, path, name): def __init__(self, path, name, restrict_to_module_paths):
self.path = path self.path = path
self.name = name self.name = name
self.loaded_modules = set() self.loaded_modules = set()
self.restrict_to_module_paths = restrict_to_module_paths
def find_module(self, fullname, path=None): def find_module(self, fullname, path=None):
"""Return self if the given fullname is restricted, otherwise return None. """Return self if the given fullname is restricted, otherwise return None.
@ -138,6 +141,9 @@ def main():
return None # ignore modules that are already being loaded return None # ignore modules that are already being loaded
if is_name_in_namepace(fullname, ['ansible']): if is_name_in_namepace(fullname, ['ansible']):
if not self.restrict_to_module_paths:
return None # for non-modules, everything in the ansible namespace is allowed
if fullname in ('ansible.module_utils.basic', 'ansible.module_utils.common.removed'): if fullname in ('ansible.module_utils.basic', 'ansible.module_utils.common.removed'):
return self # intercept loading so we can modify the result return self # intercept loading so we can modify the result
@ -153,6 +159,9 @@ def main():
if not collection_loader: if not collection_loader:
return self # restrict access to collections when we are not testing a collection return self # restrict access to collections when we are not testing a collection
if not self.restrict_to_module_paths:
return None # for non-modules, everything in the ansible namespace is allowed
if is_name_in_namepace(fullname, ['ansible_collections...plugins.module_utils', self.name]): if is_name_in_namepace(fullname, ['ansible_collections...plugins.module_utils', self.name]):
return None # module_utils and module under test are always allowed return None # module_utils and module under test are always allowed
@ -196,24 +205,25 @@ def main():
self.loaded_modules.add(fullname) self.loaded_modules.add(fullname)
return import_module(fullname) return import_module(fullname)
def run(): def run(restrict_to_module_paths):
"""Main program function.""" """Main program function."""
base_dir = os.getcwd() base_dir = os.getcwd()
messages = set() messages = set()
for path in sys.argv[1:] or sys.stdin.read().splitlines(): for path in sys.argv[1:] or sys.stdin.read().splitlines():
name = convert_relative_path_to_name(path) name = convert_relative_path_to_name(path)
test_python_module(path, name, base_dir, messages) test_python_module(path, name, base_dir, messages, restrict_to_module_paths)
if messages: if messages:
sys.exit(10) sys.exit(10)
def test_python_module(path, name, base_dir, messages): def test_python_module(path, name, base_dir, messages, restrict_to_module_paths):
"""Test the given python module by importing it. """Test the given python module by importing it.
:type path: str :type path: str
:type name: str :type name: str
:type base_dir: str :type base_dir: str
:type messages: set[str] :type messages: set[str]
:type restrict_to_module_paths: bool
""" """
if name in sys.modules: if name in sys.modules:
return # cannot be tested because it has already been loaded return # cannot be tested because it has already been loaded
@ -230,13 +240,13 @@ def main():
try: try:
with monitor_sys_modules(path, messages): with monitor_sys_modules(path, messages):
with restrict_imports(path, name, messages): with restrict_imports(path, name, messages, restrict_to_module_paths):
with capture_output(capture_normal): with capture_output(capture_normal):
import_module(name) import_module(name)
if run_main: if run_main:
with monitor_sys_modules(path, messages): with monitor_sys_modules(path, messages):
with restrict_imports(path, name, messages): with restrict_imports(path, name, messages, restrict_to_module_paths):
with capture_output(capture_main): with capture_output(capture_main):
runpy.run_module(name, run_name='__main__', alter_sys=True) runpy.run_module(name, run_name='__main__', alter_sys=True)
except ImporterAnsibleModuleException: except ImporterAnsibleModuleException:
@ -398,13 +408,14 @@ def main():
print(message) print(message)
@contextlib.contextmanager @contextlib.contextmanager
def restrict_imports(path, name, messages): def restrict_imports(path, name, messages, restrict_to_module_paths):
"""Restrict available imports. """Restrict available imports.
:type path: str :type path: str
:type name: str :type name: str
:type messages: set[str] :type messages: set[str]
:type restrict_to_module_paths: bool
""" """
restricted_loader = RestrictedModuleLoader(path, name) restricted_loader = RestrictedModuleLoader(path, name, restrict_to_module_paths)
# noinspection PyTypeChecker # noinspection PyTypeChecker
sys.meta_path.insert(0, restricted_loader) sys.meta_path.insert(0, restricted_loader)
@ -413,6 +424,10 @@ def main():
try: try:
yield yield
finally: finally:
if import_type == 'plugin':
from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder
_AnsibleCollectionFinder._remove() # pylint: disable=protected-access
if sys.meta_path[0] != restricted_loader: if sys.meta_path[0] != restricted_loader:
report_message(path, 0, 0, 'metapath', 'changes to sys.meta_path[0] are not permitted', messages) report_message(path, 0, 0, 'metapath', 'changes to sys.meta_path[0] are not permitted', messages)
@ -457,6 +472,26 @@ def main():
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter('error') warnings.simplefilter('error')
if sys.version_info[0] == 2:
warnings.filterwarnings(
"ignore",
"Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography,"
" and will be removed in a future release.")
warnings.filterwarnings(
"ignore",
"Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography,"
" and will be removed in the next release.")
if sys.version_info[:2] == (3, 5):
warnings.filterwarnings(
"ignore",
"Python 3.5 support will be dropped in the next release ofcryptography. Please upgrade your Python.")
warnings.filterwarnings(
"ignore",
"Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.")
warnings.filterwarnings(
"ignore",
"The _yaml extension module is now located at yaml._yaml and its location is subject to change. To use the "
"LibYAML-based parser and emitter, import from `yaml`: `from yaml import CLoader as Loader, CDumper as Dumper`.")
try: try:
yield yield
@ -464,7 +499,7 @@ def main():
sys.stdout = old_stdout sys.stdout = old_stdout
sys.stderr = old_stderr sys.stderr = old_stderr
run() run(import_type == 'module')
if __name__ == '__main__': if __name__ == '__main__':

View file

@ -9,7 +9,6 @@ import re
import time import time
import textwrap import textwrap
import functools import functools
import hashlib
import difflib import difflib
import filecmp import filecmp
import random import random
@ -44,7 +43,6 @@ from .cloud import (
from .io import ( from .io import (
make_dirs, make_dirs,
open_text_file, open_text_file,
read_binary_file,
read_text_file, read_text_file,
write_text_file, write_text_file,
) )
@ -70,6 +68,7 @@ from .util import (
SUPPORTED_PYTHON_VERSIONS, SUPPORTED_PYTHON_VERSIONS,
str_to_version, str_to_version,
version_to_str, version_to_str,
get_hash,
) )
from .util_common import ( from .util_common import (
@ -2026,7 +2025,7 @@ class EnvironmentDescription:
pip_paths = dict((v, find_executable('pip%s' % v, required=False)) for v in sorted(versions)) pip_paths = dict((v, find_executable('pip%s' % v, required=False)) for v in sorted(versions))
program_versions = dict((v, self.get_version([python_paths[v], version_check], warnings)) for v in sorted(python_paths) if python_paths[v]) program_versions = dict((v, self.get_version([python_paths[v], version_check], warnings)) for v in sorted(python_paths) if python_paths[v])
pip_interpreters = dict((v, self.get_shebang(pip_paths[v])) for v in sorted(pip_paths) if pip_paths[v]) pip_interpreters = dict((v, self.get_shebang(pip_paths[v])) for v in sorted(pip_paths) if pip_paths[v])
known_hosts_hash = self.get_hash(os.path.expanduser('~/.ssh/known_hosts')) known_hosts_hash = get_hash(os.path.expanduser('~/.ssh/known_hosts'))
for version in sorted(versions): for version in sorted(versions):
self.check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings) self.check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings)
@ -2166,21 +2165,6 @@ class EnvironmentDescription:
with open_text_file(path) as script_fd: with open_text_file(path) as script_fd:
return script_fd.readline().strip() return script_fd.readline().strip()
@staticmethod
def get_hash(path):
"""
:type path: str
:rtype: str | None
"""
if not os.path.exists(path):
return None
file_hash = hashlib.sha256()
file_hash.update(read_binary_file(path))
return file_hash.hexdigest()
class NoChangesDetected(ApplicationWarning): class NoChangesDetected(ApplicationWarning):
"""Exception when change detection was performed, but no changes were found.""" """Exception when change detection was performed, but no changes were found."""

View file

@ -20,6 +20,7 @@ from ..target import (
) )
from ..util import ( from ..util import (
ANSIBLE_TEST_DATA_ROOT,
SubprocessError, SubprocessError,
remove_tree, remove_tree,
display, display,
@ -27,6 +28,8 @@ from ..util import (
is_subdir, is_subdir,
generate_pip_command, generate_pip_command,
find_python, find_python,
get_hash,
REMOTE_ONLY_PYTHON_VERSIONS,
) )
from ..util_common import ( from ..util_common import (
@ -41,6 +44,7 @@ from ..ansible_util import (
from ..executor import ( from ..executor import (
generate_pip_install, generate_pip_install,
install_cryptography,
) )
from ..config import ( from ..config import (
@ -60,12 +64,21 @@ from ..data import (
) )
def _get_module_test(module_restrictions): # type: (bool) -> t.Callable[[str], bool]
"""Create a predicate which tests whether a path can be used by modules or not."""
module_path = data_context().content.module_path
module_utils_path = data_context().content.module_utils_path
if module_restrictions:
return lambda path: is_subdir(path, module_path) or is_subdir(path, module_utils_path)
return lambda path: not (is_subdir(path, module_path) or is_subdir(path, module_utils_path))
class ImportTest(SanityMultipleVersion): class ImportTest(SanityMultipleVersion):
"""Sanity test for proper import exception handling.""" """Sanity test for proper import exception handling."""
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test.""" """Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and
(is_subdir(target.path, data_context().content.module_path) or is_subdir(target.path, data_context().content.module_utils_path))] any(is_subdir(target.path, path) for path in data_context().content.plugin_paths.values())]
def test(self, args, targets, python_version): def test(self, args, targets, python_version):
""" """
@ -92,91 +105,112 @@ class ImportTest(SanityMultipleVersion):
temp_root = os.path.join(ResultType.TMP.path, 'sanity', 'import') temp_root = os.path.join(ResultType.TMP.path, 'sanity', 'import')
# create a clean virtual environment to minimize the available imports beyond the python standard library messages = []
virtual_environment_path = os.path.join(temp_root, 'minimal-py%s' % python_version.replace('.', ''))
virtual_environment_bin = os.path.join(virtual_environment_path, 'bin')
remove_tree(virtual_environment_path) for import_type, test, add_ansible_requirements in (
('module', _get_module_test(True), False),
('plugin', _get_module_test(False), True),
):
if import_type == 'plugin' and python_version in REMOTE_ONLY_PYTHON_VERSIONS:
continue
if not create_virtual_environment(args, python_version, virtual_environment_path): data = '\n'.join([path for path in paths if test(path)])
display.warning("Skipping sanity test '%s' on Python %s due to missing virtual environment support." % (self.name, python_version)) if not data:
return SanitySkipped(self.name, python_version) continue
# add the importer to our virtual environment so it can be accessed through the coverage injector requirements_file = None
importer_path = os.path.join(virtual_environment_bin, 'importer.py')
yaml_to_json_path = os.path.join(virtual_environment_bin, 'yaml_to_json.py')
if not args.explain:
os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'importer.py')), importer_path)
os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'yaml_to_json.py')), yaml_to_json_path)
# activate the virtual environment # create a clean virtual environment to minimize the available imports beyond the python standard library
env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH']) virtual_environment_dirname = 'minimal-py%s' % python_version.replace('.', '')
if add_ansible_requirements:
requirements_file = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'sanity.import-plugins.txt')
virtual_environment_dirname += '-requirements-%s' % get_hash(requirements_file)
virtual_environment_path = os.path.join(temp_root, virtual_environment_dirname)
virtual_environment_bin = os.path.join(virtual_environment_path, 'bin')
env.update( remove_tree(virtual_environment_path)
SANITY_TEMP_PATH=ResultType.TMP.path,
) if not create_virtual_environment(args, python_version, virtual_environment_path):
display.warning("Skipping sanity test '%s' on Python %s due to missing virtual environment support." % (self.name, python_version))
return SanitySkipped(self.name, python_version)
# add the importer to our virtual environment so it can be accessed through the coverage injector
importer_path = os.path.join(virtual_environment_bin, 'importer.py')
yaml_to_json_path = os.path.join(virtual_environment_bin, 'yaml_to_json.py')
if not args.explain:
os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'importer.py')), importer_path)
os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'yaml_to_json.py')), yaml_to_json_path)
# activate the virtual environment
env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH'])
if data_context().content.collection:
env.update( env.update(
SANITY_COLLECTION_FULL_NAME=data_context().content.collection.full_name, SANITY_TEMP_PATH=ResultType.TMP.path,
SANITY_EXTERNAL_PYTHON=python, SANITY_IMPORTER_TYPE=import_type,
) )
virtualenv_python = os.path.join(virtual_environment_bin, 'python') if data_context().content.collection:
virtualenv_pip = generate_pip_command(virtualenv_python) env.update(
SANITY_COLLECTION_FULL_NAME=data_context().content.collection.full_name,
SANITY_EXTERNAL_PYTHON=python,
)
# make sure coverage is available in the virtual environment if needed virtualenv_python = os.path.join(virtual_environment_bin, 'python')
if args.coverage: virtualenv_pip = generate_pip_command(virtualenv_python)
run_command(args, generate_pip_install(virtualenv_pip, '', packages=['setuptools']), env=env, capture=capture_pip)
run_command(args, generate_pip_install(virtualenv_pip, '', packages=['coverage']), env=env, capture=capture_pip)
try: # make sure requirements are installed if needed
# In some environments pkg_resources is installed as a separate pip package which needs to be removed. if requirements_file:
# For example, using Python 3.8 on Ubuntu 18.04 a virtualenv is created with only pip and setuptools. install_cryptography(args, virtualenv_python, python_version, virtualenv_pip)
# However, a venv is created with an additional pkg-resources package which is independent of setuptools. run_command(args, generate_pip_install(virtualenv_pip, 'sanity', context='import-plugins'), env=env, capture=capture_pip)
# Making sure pkg-resources is removed preserves the import test consistency between venv and virtualenv.
# Additionally, in the above example, the pyparsing package vendored with pkg-resources is out-of-date and generates deprecation warnings.
# Thus it is important to remove pkg-resources to prevent system installed packages from generating deprecation warnings.
run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'pkg-resources'], env=env, capture=capture_pip)
except SubprocessError:
pass
run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'setuptools'], env=env, capture=capture_pip) # make sure coverage is available in the virtual environment if needed
run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'pip'], env=env, capture=capture_pip) if args.coverage:
run_command(args, generate_pip_install(virtualenv_pip, '', packages=['setuptools']), env=env, capture=capture_pip)
run_command(args, generate_pip_install(virtualenv_pip, '', packages=['coverage']), env=env, capture=capture_pip)
cmd = ['importer.py'] try:
# In some environments pkg_resources is installed as a separate pip package which needs to be removed.
# For example, using Python 3.8 on Ubuntu 18.04 a virtualenv is created with only pip and setuptools.
# However, a venv is created with an additional pkg-resources package which is independent of setuptools.
# Making sure pkg-resources is removed preserves the import test consistency between venv and virtualenv.
# Additionally, in the above example, the pyparsing package vendored with pkg-resources is out-of-date and generates deprecation warnings.
# Thus it is important to remove pkg-resources to prevent system installed packages from generating deprecation warnings.
run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'pkg-resources'], env=env, capture=capture_pip)
except SubprocessError:
pass
data = '\n'.join(paths) run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'setuptools'], env=env, capture=capture_pip)
run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'pip'], env=env, capture=capture_pip)
display.info(data, verbosity=4) display.info(import_type + ': ' + data, verbosity=4)
results = [] cmd = ['importer.py']
try: try:
with coverage_context(args): with coverage_context(args):
stdout, stderr = intercept_command(args, cmd, self.name, env, capture=True, data=data, python_version=python_version, stdout, stderr = intercept_command(args, cmd, self.name, env, capture=True, data=data, python_version=python_version,
virtualenv=virtualenv_python) virtualenv=virtualenv_python)
if stdout or stderr: if stdout or stderr:
raise SubprocessError(cmd, stdout=stdout, stderr=stderr) raise SubprocessError(cmd, stdout=stdout, stderr=stderr)
except SubprocessError as ex: except SubprocessError as ex:
if ex.status != 10 or ex.stderr or not ex.stdout: if ex.status != 10 or ex.stderr or not ex.stdout:
raise raise
pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<message>.*)$' pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<message>.*)$'
results = parse_to_list_of_dict(pattern, ex.stdout) parsed = parse_to_list_of_dict(pattern, ex.stdout)
relative_temp_root = os.path.relpath(temp_root, data_context().content.root) + os.path.sep relative_temp_root = os.path.relpath(temp_root, data_context().content.root) + os.path.sep
results = [SanityMessage( messages += [SanityMessage(
message=r['message'], message=r['message'],
path=os.path.relpath(r['path'], relative_temp_root) if r['path'].startswith(relative_temp_root) else r['path'], path=os.path.relpath(r['path'], relative_temp_root) if r['path'].startswith(relative_temp_root) else r['path'],
line=int(r['line']), line=int(r['line']),
column=int(r['column']), column=int(r['column']),
) for r in results] ) for r in parsed]
results = settings.process_errors(results, paths) results = settings.process_errors(messages, paths)
if results: if results:
return SanityFailure(self.name, messages=results, python_version=python_version) return SanityFailure(self.name, messages=results, python_version=python_version)

View file

@ -5,6 +5,7 @@ __metaclass__ = type
import contextlib import contextlib
import errno import errno
import fcntl import fcntl
import hashlib
import inspect import inspect
import os import os
import pkgutil import pkgutil
@ -53,6 +54,7 @@ from .encoding import (
from .io import ( from .io import (
open_binary_file, open_binary_file,
read_binary_file,
read_text_file, read_text_file,
) )
@ -857,4 +859,19 @@ def open_zipfile(path, mode='r'):
zib_obj.close() zib_obj.close()
def get_hash(path):
"""
:type path: str
:rtype: str | None
"""
if not os.path.exists(path):
return None
file_hash = hashlib.sha256()
file_hash.update(read_binary_file(path))
return file_hash.hexdigest()
display = Display() # pylint: disable=locally-disabled, invalid-name display = Display() # pylint: disable=locally-disabled, invalid-name

View file

@ -0,0 +1,7 @@
{
"prefixes": [
"requirements.txt",
"test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt"
],
"output": "path-line-column-message"
}

View file

@ -0,0 +1,31 @@
#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import sys
def read_file(path):
try:
with open(path, 'r') as f:
return f.read()
except Exception as ex: # pylint: disable=broad-except
print('%s:%d:%d: unable to read required file %s' % (path, 0, 0, re.sub(r'\s+', ' ', str(ex))))
return None
def main():
ORIGINAL_FILE = 'requirements.txt'
VENDORED_COPY = 'test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt'
original_requirements = read_file(ORIGINAL_FILE)
vendored_requirements = read_file(VENDORED_COPY)
if original_requirements is not None and vendored_requirements is not None:
if original_requirements != vendored_requirements:
print('%s:%d:%d: must be identical to %s' % (VENDORED_COPY, 0, 0, ORIGINAL_FILE))
if __name__ == '__main__':
main()

View file

@ -12,6 +12,12 @@ def main():
requirements = {} requirements = {}
for path in sys.argv[1:] or sys.stdin.read().splitlines(): for path in sys.argv[1:] or sys.stdin.read().splitlines():
if path == 'test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt':
# This file is an exact copy of requirements.txt that is used in the import
# sanity test. There is a code-smell test which ensures that the two files
# are identical, and it is only used inside an empty venv, so we can ignore
# it here.
continue
with open(path, 'r') as path_fd: with open(path, 'r') as path_fd:
requirements[path] = parse_requirements(path_fd.read().splitlines()) requirements[path] = parse_requirements(path_fd.read().splitlines())