2017-02-14 03:49:36 +01:00
|
|
|
"""Analyze python import statements."""
|
2019-07-12 08:46:20 +02:00
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
2017-02-14 03:49:36 +01:00
|
|
|
|
|
|
|
import ast
|
|
|
|
import os
|
2020-07-29 19:15:57 +02:00
|
|
|
import re
|
2017-02-14 03:49:36 +01:00
|
|
|
|
2021-05-06 02:37:59 +02:00
|
|
|
from .. import types as t
|
2019-08-28 08:40:06 +02:00
|
|
|
|
2021-05-06 02:37:59 +02:00
|
|
|
from ..io import (
|
2020-02-29 23:11:25 +01:00
|
|
|
read_binary_file,
|
2020-02-04 20:21:53 +01:00
|
|
|
)
|
|
|
|
|
2021-05-06 02:37:59 +02:00
|
|
|
from ..util import (
|
2017-02-14 03:49:36 +01:00
|
|
|
display,
|
|
|
|
ApplicationError,
|
2019-08-29 00:13:00 +02:00
|
|
|
is_subdir,
|
2017-02-14 03:49:36 +01:00
|
|
|
)
|
|
|
|
|
2021-05-06 02:37:59 +02:00
|
|
|
from ..data import (
|
2019-07-23 04:24:48 +02:00
|
|
|
data_context,
|
|
|
|
)
|
|
|
|
|
2017-03-23 02:07:53 +01:00
|
|
|
VIRTUAL_PACKAGES = set([
|
|
|
|
'ansible.module_utils.six',
|
|
|
|
])
|
|
|
|
|
2017-02-14 03:49:36 +01:00
|
|
|
|
|
|
|
def get_python_module_utils_imports(compile_targets):
|
2017-03-23 02:07:53 +01:00
|
|
|
"""Return a dictionary of module_utils names mapped to sets of python file paths.
|
2017-02-14 03:49:36 +01:00
|
|
|
:type compile_targets: list[TestTarget]
|
|
|
|
:rtype: dict[str, set[str]]
|
|
|
|
"""
|
2017-03-23 02:07:53 +01:00
|
|
|
|
|
|
|
module_utils = enumerate_module_utils()
|
2019-07-23 04:24:48 +02:00
|
|
|
|
2017-03-23 02:07:53 +01:00
|
|
|
virtual_utils = set(m for m in module_utils if any(m.startswith('%s.' % v) for v in VIRTUAL_PACKAGES))
|
|
|
|
module_utils -= virtual_utils
|
2017-02-14 03:49:36 +01:00
|
|
|
|
|
|
|
imports_by_target_path = {}
|
|
|
|
|
|
|
|
for target in compile_targets:
|
|
|
|
imports_by_target_path[target.path] = extract_python_module_utils_imports(target.path, module_utils)
|
|
|
|
|
2019-08-28 08:40:06 +02:00
|
|
|
def recurse_import(import_name, depth=0, seen=None): # type: (str, int, t.Optional[t.Set[str]]) -> t.Set[str]
|
|
|
|
"""Recursively expand module_utils imports from module_utils files."""
|
2017-02-14 03:49:36 +01:00
|
|
|
display.info('module_utils import: %s%s' % (' ' * depth, import_name), verbosity=4)
|
|
|
|
|
|
|
|
if seen is None:
|
|
|
|
seen = set([import_name])
|
|
|
|
|
|
|
|
results = set([import_name])
|
|
|
|
|
2017-03-23 02:07:53 +01:00
|
|
|
# virtual packages depend on the modules they contain instead of the reverse
|
|
|
|
if import_name in VIRTUAL_PACKAGES:
|
|
|
|
for sub_import in sorted(virtual_utils):
|
|
|
|
if sub_import.startswith('%s.' % import_name):
|
|
|
|
if sub_import in seen:
|
|
|
|
continue
|
|
|
|
|
|
|
|
seen.add(sub_import)
|
|
|
|
|
|
|
|
matches = sorted(recurse_import(sub_import, depth + 1, seen))
|
|
|
|
|
|
|
|
for result in matches:
|
|
|
|
results.add(result)
|
|
|
|
|
2020-03-20 04:25:15 +01:00
|
|
|
import_path = get_import_path(import_name)
|
2017-03-23 02:07:53 +01:00
|
|
|
|
|
|
|
if import_path not in imports_by_target_path:
|
2020-03-20 04:25:15 +01:00
|
|
|
import_path = get_import_path(import_name, package=True)
|
2017-03-23 02:07:53 +01:00
|
|
|
|
|
|
|
if import_path not in imports_by_target_path:
|
|
|
|
raise ApplicationError('Cannot determine path for module_utils import: %s' % import_name)
|
|
|
|
|
|
|
|
# process imports in reverse so the deepest imports come first
|
|
|
|
for name in sorted(imports_by_target_path[import_path], reverse=True):
|
|
|
|
if name in virtual_utils:
|
|
|
|
continue
|
2017-02-14 03:49:36 +01:00
|
|
|
|
|
|
|
if name in seen:
|
|
|
|
continue
|
|
|
|
|
|
|
|
seen.add(name)
|
|
|
|
|
|
|
|
matches = sorted(recurse_import(name, depth + 1, seen))
|
|
|
|
|
|
|
|
for result in matches:
|
|
|
|
results.add(result)
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
for module_util in module_utils:
|
|
|
|
# recurse over module_utils imports while excluding self
|
|
|
|
module_util_imports = recurse_import(module_util)
|
|
|
|
module_util_imports.remove(module_util)
|
|
|
|
|
|
|
|
# add recursive imports to all path entries which import this module_util
|
|
|
|
for target_path in imports_by_target_path:
|
|
|
|
if module_util in imports_by_target_path[target_path]:
|
|
|
|
for module_util_import in sorted(module_util_imports):
|
|
|
|
if module_util_import not in imports_by_target_path[target_path]:
|
|
|
|
display.info('%s inherits import %s via %s' % (target_path, module_util_import, module_util), verbosity=6)
|
|
|
|
imports_by_target_path[target_path].add(module_util_import)
|
|
|
|
|
2017-03-23 02:07:53 +01:00
|
|
|
imports = dict([(module_util, set()) for module_util in module_utils | virtual_utils])
|
2017-02-14 03:49:36 +01:00
|
|
|
|
|
|
|
for target_path in imports_by_target_path:
|
|
|
|
for module_util in imports_by_target_path[target_path]:
|
|
|
|
imports[module_util].add(target_path)
|
|
|
|
|
2017-03-23 02:07:53 +01:00
|
|
|
# for purposes of mapping module_utils to paths, treat imports of virtual utils the same as the parent package
|
|
|
|
for virtual_util in virtual_utils:
|
|
|
|
parent_package = '.'.join(virtual_util.split('.')[:-1])
|
|
|
|
imports[virtual_util] = imports[parent_package]
|
|
|
|
display.info('%s reports imports from parent package %s' % (virtual_util, parent_package), verbosity=6)
|
|
|
|
|
2017-02-14 03:49:36 +01:00
|
|
|
for module_util in sorted(imports):
|
2017-05-03 17:19:44 +02:00
|
|
|
if not imports[module_util]:
|
2020-03-27 21:58:04 +01:00
|
|
|
package_path = get_import_path(module_util, package=True)
|
|
|
|
|
|
|
|
if os.path.exists(package_path) and not os.path.getsize(package_path):
|
|
|
|
continue # ignore empty __init__.py files
|
|
|
|
|
2017-02-14 03:49:36 +01:00
|
|
|
display.warning('No imports found which use the "%s" module_util.' % module_util)
|
|
|
|
|
|
|
|
return imports
|
|
|
|
|
|
|
|
|
2019-07-23 04:24:48 +02:00
|
|
|
def get_python_module_utils_name(path): # type: (str) -> str
|
|
|
|
"""Return a namespace and name from the given module_utils path."""
|
|
|
|
base_path = data_context().content.module_utils_path
|
|
|
|
|
|
|
|
if data_context().content.collection:
|
2020-03-27 21:58:04 +01:00
|
|
|
prefix = 'ansible_collections.' + data_context().content.collection.prefix + 'plugins.module_utils'
|
2019-07-23 04:24:48 +02:00
|
|
|
else:
|
2020-03-27 21:58:04 +01:00
|
|
|
prefix = 'ansible.module_utils'
|
2019-07-23 04:24:48 +02:00
|
|
|
|
|
|
|
if path.endswith('/__init__.py'):
|
|
|
|
path = os.path.dirname(path)
|
|
|
|
|
2020-03-27 21:58:04 +01:00
|
|
|
if path == base_path:
|
|
|
|
name = prefix
|
|
|
|
else:
|
|
|
|
name = prefix + '.' + os.path.splitext(os.path.relpath(path, base_path))[0].replace(os.path.sep, '.')
|
2019-07-23 04:24:48 +02:00
|
|
|
|
|
|
|
return name
|
|
|
|
|
|
|
|
|
2017-03-23 02:07:53 +01:00
|
|
|
def enumerate_module_utils():
|
|
|
|
"""Return a list of available module_utils imports.
|
|
|
|
:rtype: set[str]
|
|
|
|
"""
|
|
|
|
module_utils = []
|
|
|
|
|
2019-07-23 04:24:48 +02:00
|
|
|
for path in data_context().content.walk_files(data_context().content.module_utils_path):
|
|
|
|
ext = os.path.splitext(path)[1]
|
2019-07-16 01:20:03 +02:00
|
|
|
|
2020-03-20 17:20:46 +01:00
|
|
|
if ext != '.py':
|
2019-07-16 01:20:03 +02:00
|
|
|
continue
|
|
|
|
|
2019-07-23 04:24:48 +02:00
|
|
|
module_utils.append(get_python_module_utils_name(path))
|
2017-03-23 02:07:53 +01:00
|
|
|
|
|
|
|
return set(module_utils)
|
|
|
|
|
|
|
|
|
2017-02-14 03:49:36 +01:00
|
|
|
def extract_python_module_utils_imports(path, module_utils):
|
|
|
|
"""Return a list of module_utils imports found in the specified source file.
|
|
|
|
:type path: str
|
|
|
|
:type module_utils: set[str]
|
|
|
|
:rtype: set[str]
|
|
|
|
"""
|
2020-02-29 23:11:25 +01:00
|
|
|
# Python code must be read as bytes to avoid a SyntaxError when the source uses comments to declare the file encoding.
|
|
|
|
# See: https://www.python.org/dev/peps/pep-0263
|
|
|
|
# Specifically: If a Unicode string with a coding declaration is passed to compile(), a SyntaxError will be raised.
|
|
|
|
code = read_binary_file(path)
|
2020-02-04 20:21:53 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
tree = ast.parse(code)
|
|
|
|
except SyntaxError as ex:
|
|
|
|
# Treat this error as a warning so tests can be executed as best as possible.
|
|
|
|
# The compile test will detect and report this syntax error.
|
|
|
|
display.warning('%s:%s Syntax error extracting module_utils imports: %s' % (path, ex.lineno, ex.msg))
|
|
|
|
return set()
|
|
|
|
|
|
|
|
finder = ModuleUtilFinder(path, module_utils)
|
|
|
|
finder.visit(tree)
|
|
|
|
return finder.imports
|
2017-02-14 03:49:36 +01:00
|
|
|
|
|
|
|
|
2020-03-20 04:25:15 +01:00
|
|
|
def get_import_path(name, package=False): # type: (str, bool) -> str
|
|
|
|
"""Return a path from an import name."""
|
|
|
|
if package:
|
|
|
|
filename = os.path.join(name.replace('.', '/'), '__init__.py')
|
|
|
|
else:
|
|
|
|
filename = '%s.py' % name.replace('.', '/')
|
|
|
|
|
2020-03-21 01:49:33 +01:00
|
|
|
if name.startswith('ansible.module_utils.') or name == 'ansible.module_utils':
|
2020-03-20 04:25:15 +01:00
|
|
|
path = os.path.join('lib', filename)
|
2020-03-27 21:58:04 +01:00
|
|
|
elif data_context().content.collection and (
|
|
|
|
name.startswith('ansible_collections.%s.plugins.module_utils.' % data_context().content.collection.full_name) or
|
|
|
|
name == 'ansible_collections.%s.plugins.module_utils' % data_context().content.collection.full_name):
|
2020-03-20 04:25:15 +01:00
|
|
|
path = '/'.join(filename.split('/')[3:])
|
|
|
|
else:
|
|
|
|
raise Exception('Unexpected import name: %s' % name)
|
|
|
|
|
|
|
|
return path
|
|
|
|
|
|
|
|
|
2020-07-29 19:15:57 +02:00
|
|
|
def path_to_module(path): # type: (str) -> str
|
|
|
|
"""Convert the given path to a module name."""
|
|
|
|
module = os.path.splitext(path)[0].replace(os.path.sep, '.')
|
|
|
|
|
|
|
|
if module.endswith('.__init__'):
|
|
|
|
module = module[:-9]
|
|
|
|
|
|
|
|
return module
|
|
|
|
|
|
|
|
|
|
|
|
def relative_to_absolute(name, level, module, path, lineno): # type: (str, int, str, str, int) -> str
|
|
|
|
"""Convert a relative import to an absolute import."""
|
|
|
|
if level <= 0:
|
|
|
|
absolute_name = name
|
|
|
|
elif not module:
|
|
|
|
display.warning('Cannot resolve relative import "%s%s" in unknown module at %s:%d' % ('.' * level, name, path, lineno))
|
|
|
|
absolute_name = 'relative.nomodule'
|
|
|
|
else:
|
|
|
|
parts = module.split('.')
|
|
|
|
|
|
|
|
if level >= len(parts):
|
|
|
|
display.warning('Cannot resolve relative import "%s%s" above module "%s" at %s:%d' % ('.' * level, name, module, path, lineno))
|
|
|
|
absolute_name = 'relative.abovelevel'
|
|
|
|
else:
|
|
|
|
absolute_name = '.'.join(parts[:-level] + [name])
|
|
|
|
|
|
|
|
return absolute_name
|
|
|
|
|
|
|
|
|
2017-02-14 03:49:36 +01:00
|
|
|
class ModuleUtilFinder(ast.NodeVisitor):
|
|
|
|
"""AST visitor to find valid module_utils imports."""
|
|
|
|
def __init__(self, path, module_utils):
|
|
|
|
"""Return a list of module_utils imports found in the specified source file.
|
|
|
|
:type path: str
|
|
|
|
:type module_utils: set[str]
|
|
|
|
"""
|
|
|
|
self.path = path
|
|
|
|
self.module_utils = module_utils
|
|
|
|
self.imports = set()
|
|
|
|
|
2017-03-23 02:07:53 +01:00
|
|
|
# implicitly import parent package
|
|
|
|
|
|
|
|
if path.endswith('/__init__.py'):
|
|
|
|
path = os.path.split(path)[0]
|
|
|
|
|
|
|
|
if path.startswith('lib/ansible/module_utils/'):
|
|
|
|
package = os.path.split(path)[0].replace('/', '.')[4:]
|
|
|
|
|
|
|
|
if package != 'ansible.module_utils' and package not in VIRTUAL_PACKAGES:
|
|
|
|
self.add_import(package, 0)
|
|
|
|
|
2020-07-29 19:15:57 +02:00
|
|
|
self.module = None
|
|
|
|
|
|
|
|
if data_context().content.is_ansible:
|
|
|
|
# Various parts of the Ansible source tree execute within diffent modules.
|
|
|
|
# To support import analysis, each file which uses relative imports must reside under a path defined here.
|
|
|
|
# The mapping is a tuple consisting of a path pattern to match and a replacement path.
|
|
|
|
# During analyis, any relative imports not covered here will result in warnings, which can be fixed by adding the appropriate entry.
|
|
|
|
path_map = (
|
|
|
|
('^hacking/build_library/build_ansible/', 'build_ansible/'),
|
|
|
|
('^lib/ansible/', 'ansible/'),
|
|
|
|
('^test/lib/ansible_test/_data/sanity/validate-modules/', 'validate_modules/'),
|
|
|
|
('^test/units/', 'test/units/'),
|
|
|
|
('^test/lib/ansible_test/_internal/', 'ansible_test/_internal/'),
|
|
|
|
('^test/integration/targets/.*/ansible_collections/(?P<ns>[^/]*)/(?P<col>[^/]*)/', r'ansible_collections/\g<ns>/\g<col>/'),
|
|
|
|
('^test/integration/targets/.*/library/', 'ansible/modules/'),
|
|
|
|
)
|
|
|
|
|
|
|
|
for pattern, replacement in path_map:
|
|
|
|
if re.search(pattern, self.path):
|
|
|
|
revised_path = re.sub(pattern, replacement, self.path)
|
|
|
|
self.module = path_to_module(revised_path)
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# This assumes that all files within the collection are executed by Ansible as part of the collection.
|
|
|
|
# While that will usually be true, there are exceptions which will result in this resolution being incorrect.
|
|
|
|
self.module = path_to_module(os.path.join(data_context().content.collection.directory, self.path))
|
|
|
|
|
2017-02-14 03:49:36 +01:00
|
|
|
# noinspection PyPep8Naming
|
|
|
|
# pylint: disable=locally-disabled, invalid-name
|
|
|
|
def visit_Import(self, node):
|
|
|
|
"""
|
|
|
|
:type node: ast.Import
|
|
|
|
"""
|
|
|
|
self.generic_visit(node)
|
|
|
|
|
2020-03-20 04:25:15 +01:00
|
|
|
# import ansible.module_utils.MODULE[.MODULE]
|
|
|
|
# import ansible_collections.{ns}.{col}.plugins.module_utils.module_utils.MODULE[.MODULE]
|
|
|
|
self.add_imports([alias.name for alias in node.names], node.lineno)
|
2017-02-14 03:49:36 +01:00
|
|
|
|
|
|
|
# noinspection PyPep8Naming
|
|
|
|
# pylint: disable=locally-disabled, invalid-name
|
|
|
|
def visit_ImportFrom(self, node):
|
|
|
|
"""
|
|
|
|
:type node: ast.ImportFrom
|
|
|
|
"""
|
|
|
|
self.generic_visit(node)
|
|
|
|
|
|
|
|
if not node.module:
|
|
|
|
return
|
|
|
|
|
2020-07-29 19:15:57 +02:00
|
|
|
module = relative_to_absolute(node.module, node.level, self.module, self.path, node.lineno)
|
|
|
|
|
|
|
|
if not module.startswith('ansible'):
|
2020-03-20 04:25:15 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
# from ansible.module_utils import MODULE[, MODULE]
|
|
|
|
# from ansible.module_utils.MODULE[.MODULE] import MODULE[, MODULE]
|
|
|
|
# from ansible_collections.{ns}.{col}.plugins.module_utils import MODULE[, MODULE]
|
|
|
|
# from ansible_collections.{ns}.{col}.plugins.module_utils.MODULE[.MODULE] import MODULE[, MODULE]
|
2020-07-29 19:15:57 +02:00
|
|
|
self.add_imports(['%s.%s' % (module, alias.name) for alias in node.names], node.lineno)
|
2017-02-14 03:49:36 +01:00
|
|
|
|
|
|
|
def add_import(self, name, line_number):
|
|
|
|
"""
|
|
|
|
:type name: str
|
|
|
|
:type line_number: int
|
|
|
|
"""
|
2017-03-23 02:07:53 +01:00
|
|
|
import_name = name
|
|
|
|
|
2020-03-20 04:25:15 +01:00
|
|
|
while self.is_module_util_name(name):
|
2017-03-23 02:07:53 +01:00
|
|
|
if name in self.module_utils:
|
|
|
|
if name not in self.imports:
|
|
|
|
display.info('%s:%d imports module_utils: %s' % (self.path, line_number, name), verbosity=5)
|
|
|
|
self.imports.add(name)
|
2017-02-14 03:49:36 +01:00
|
|
|
|
2017-03-23 02:07:53 +01:00
|
|
|
return # duplicate imports are ignored
|
2017-02-14 03:49:36 +01:00
|
|
|
|
2017-03-23 02:07:53 +01:00
|
|
|
name = '.'.join(name.split('.')[:-1])
|
2017-02-14 03:49:36 +01:00
|
|
|
|
2019-08-29 00:13:00 +02:00
|
|
|
if is_subdir(self.path, data_context().content.test_path):
|
2017-03-23 02:07:53 +01:00
|
|
|
return # invalid imports in tests are ignored
|
2017-02-14 03:49:36 +01:00
|
|
|
|
2017-04-07 19:01:15 +02:00
|
|
|
# Treat this error as a warning so tests can be executed as best as possible.
|
|
|
|
# This error should be detected by unit or integration tests.
|
|
|
|
display.warning('%s:%d Invalid module_utils import: %s' % (self.path, line_number, import_name))
|
2020-03-20 04:25:15 +01:00
|
|
|
|
|
|
|
def add_imports(self, names, line_no): # type: (t.List[str], int) -> None
|
|
|
|
"""Add the given import names if they are module_utils imports."""
|
|
|
|
for name in names:
|
|
|
|
if self.is_module_util_name(name):
|
|
|
|
self.add_import(name, line_no)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def is_module_util_name(name): # type: (str) -> bool
|
|
|
|
"""Return True if the given name is a module_util name for the content under test. External module_utils are ignored."""
|
|
|
|
if data_context().content.is_ansible and name.startswith('ansible.module_utils.'):
|
|
|
|
return True
|
|
|
|
|
|
|
|
if data_context().content.collection and name.startswith('ansible_collections.%s.plugins.module_utils.' % data_context().content.collection.full_name):
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|