2016-11-30 06:21:53 +01:00
|
|
|
"""Test target identification, iteration and inclusion/exclusion."""
|
2019-07-12 08:46:20 +02:00
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-08-23 20:09:50 +02:00
|
|
|
import collections
|
2016-11-30 06:21:53 +01:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import itertools
|
|
|
|
import abc
|
|
|
|
|
2019-08-10 00:18:19 +02:00
|
|
|
from . import types as t
|
|
|
|
|
2020-02-04 20:21:53 +01:00
|
|
|
from .encoding import (
|
|
|
|
to_bytes,
|
2020-12-04 18:12:14 +01:00
|
|
|
to_text,
|
2020-02-04 20:21:53 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
from .io import (
|
|
|
|
read_text_file,
|
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .util import (
|
2017-08-19 02:21:11 +02:00
|
|
|
ApplicationError,
|
2019-01-15 04:57:32 +01:00
|
|
|
display,
|
2018-09-20 08:20:27 +02:00
|
|
|
read_lines_without_comments,
|
2019-07-16 03:49:05 +02:00
|
|
|
is_subdir,
|
2019-07-23 04:24:48 +02:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .data import (
|
2019-07-23 04:24:48 +02:00
|
|
|
data_context,
|
2017-08-19 02:21:11 +02:00
|
|
|
)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
MODULE_EXTENSIONS = '.py', '.ps1'
|
|
|
|
|
2019-08-10 00:18:19 +02:00
|
|
|
try:
|
|
|
|
TCompletionTarget = t.TypeVar('TCompletionTarget', bound='CompletionTarget')
|
|
|
|
except AttributeError:
|
|
|
|
TCompletionTarget = None # pylint: disable=invalid-name
|
|
|
|
|
|
|
|
try:
|
|
|
|
TIntegrationTarget = t.TypeVar('TIntegrationTarget', bound='IntegrationTarget')
|
|
|
|
except AttributeError:
|
|
|
|
TIntegrationTarget = None # pylint: disable=invalid-name
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
def find_target_completion(target_func, prefix):
|
|
|
|
"""
|
|
|
|
:type target_func: () -> collections.Iterable[CompletionTarget]
|
|
|
|
:type prefix: unicode
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
targets = target_func()
|
|
|
|
short = os.environ.get('COMP_TYPE') == '63' # double tab completion from bash
|
|
|
|
matches = walk_completion_targets(targets, prefix, short)
|
|
|
|
return matches
|
|
|
|
except Exception as ex: # pylint: disable=locally-disabled, broad-except
|
2018-10-02 21:21:36 +02:00
|
|
|
return [u'%s' % ex]
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
def walk_completion_targets(targets, prefix, short=False):
|
|
|
|
"""
|
|
|
|
:type targets: collections.Iterable[CompletionTarget]
|
|
|
|
:type prefix: str
|
|
|
|
:type short: bool
|
|
|
|
:rtype: tuple[str]
|
|
|
|
"""
|
|
|
|
aliases = set(alias for target in targets for alias in target.aliases)
|
|
|
|
|
|
|
|
if prefix.endswith('/') and prefix in aliases:
|
|
|
|
aliases.remove(prefix)
|
|
|
|
|
|
|
|
matches = [alias for alias in aliases if alias.startswith(prefix) and '/' not in alias[len(prefix):-1]]
|
|
|
|
|
|
|
|
if short:
|
|
|
|
offset = len(os.path.dirname(prefix))
|
|
|
|
if offset:
|
|
|
|
offset += 1
|
|
|
|
relative_matches = [match[offset:] for match in matches if len(match) > offset]
|
|
|
|
if len(relative_matches) > 1:
|
|
|
|
matches = relative_matches
|
|
|
|
|
|
|
|
return tuple(sorted(matches))
|
|
|
|
|
|
|
|
|
|
|
|
def walk_internal_targets(targets, includes=None, excludes=None, requires=None):
|
|
|
|
"""
|
|
|
|
:type targets: collections.Iterable[T <= CompletionTarget]
|
|
|
|
:type includes: list[str]
|
|
|
|
:type excludes: list[str]
|
|
|
|
:type requires: list[str]
|
|
|
|
:rtype: tuple[T <= CompletionTarget]
|
|
|
|
"""
|
|
|
|
targets = tuple(targets)
|
|
|
|
|
2019-08-26 23:02:55 +02:00
|
|
|
include_targets = sorted(filter_targets(targets, includes, errors=True, directories=False), key=lambda include_target: include_target.name)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
if requires:
|
|
|
|
require_targets = set(filter_targets(targets, requires, errors=True, directories=False))
|
2019-08-26 23:02:55 +02:00
|
|
|
include_targets = [require_target for require_target in include_targets if require_target in require_targets]
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
if excludes:
|
|
|
|
list(filter_targets(targets, excludes, errors=True, include=False, directories=False))
|
|
|
|
|
|
|
|
internal_targets = set(filter_targets(include_targets, excludes, errors=False, include=False, directories=False))
|
2019-08-26 23:02:55 +02:00
|
|
|
return tuple(sorted(internal_targets, key=lambda sort_target: sort_target.name))
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
2019-08-10 00:18:19 +02:00
|
|
|
def filter_targets(targets, # type: t.Iterable[TCompletionTarget]
|
|
|
|
patterns, # type: t.List[str]
|
|
|
|
include=True, # type: bool
|
|
|
|
directories=True, # type: bool
|
|
|
|
errors=True, # type: bool
|
|
|
|
): # type: (...) -> t.Iterable[TCompletionTarget]
|
|
|
|
"""Iterate over the given targets and filter them based on the supplied arguments."""
|
2016-11-30 06:21:53 +01:00
|
|
|
unmatched = set(patterns or ())
|
2017-03-06 21:05:28 +01:00
|
|
|
compiled_patterns = dict((p, re.compile('^%s$' % p)) for p in patterns) if patterns else None
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
for target in targets:
|
|
|
|
matched_directories = set()
|
|
|
|
match = False
|
|
|
|
|
|
|
|
if patterns:
|
|
|
|
for alias in target.aliases:
|
|
|
|
for pattern in patterns:
|
2017-03-06 21:05:28 +01:00
|
|
|
if compiled_patterns[pattern].match(alias):
|
2016-11-30 06:21:53 +01:00
|
|
|
match = True
|
|
|
|
|
|
|
|
try:
|
|
|
|
unmatched.remove(pattern)
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if alias.endswith('/'):
|
|
|
|
if target.base_path and len(target.base_path) > len(alias):
|
|
|
|
matched_directories.add(target.base_path)
|
|
|
|
else:
|
|
|
|
matched_directories.add(alias)
|
|
|
|
elif include:
|
|
|
|
match = True
|
|
|
|
if not target.base_path:
|
|
|
|
matched_directories.add('.')
|
|
|
|
for alias in target.aliases:
|
|
|
|
if alias.endswith('/'):
|
|
|
|
if target.base_path and len(target.base_path) > len(alias):
|
|
|
|
matched_directories.add(target.base_path)
|
|
|
|
else:
|
|
|
|
matched_directories.add(alias)
|
|
|
|
|
|
|
|
if match != include:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if directories and matched_directories:
|
2020-12-04 18:12:14 +01:00
|
|
|
yield DirectoryTarget(to_text(sorted(matched_directories, key=len)[0]), target.modules)
|
2016-11-30 06:21:53 +01:00
|
|
|
else:
|
|
|
|
yield target
|
|
|
|
|
|
|
|
if errors:
|
|
|
|
if unmatched:
|
|
|
|
raise TargetPatternsNotMatched(unmatched)
|
|
|
|
|
|
|
|
|
|
|
|
def walk_module_targets():
|
|
|
|
"""
|
|
|
|
:rtype: collections.Iterable[TestTarget]
|
|
|
|
"""
|
2019-07-23 04:24:48 +02:00
|
|
|
for target in walk_test_targets(path=data_context().content.module_path, module_path=data_context().content.module_path, extensions=MODULE_EXTENSIONS):
|
2016-11-30 06:21:53 +01:00
|
|
|
if not target.module:
|
|
|
|
continue
|
|
|
|
|
|
|
|
yield target
|
|
|
|
|
|
|
|
|
|
|
|
def walk_units_targets():
|
|
|
|
"""
|
|
|
|
:rtype: collections.Iterable[TestTarget]
|
|
|
|
"""
|
2019-07-23 04:24:48 +02:00
|
|
|
return walk_test_targets(path=data_context().content.unit_path, module_path=data_context().content.unit_module_path, extensions=('.py',), prefix='test_')
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
2019-08-24 03:08:21 +02:00
|
|
|
def walk_compile_targets(include_symlinks=True):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2019-08-24 03:08:21 +02:00
|
|
|
:type include_symlinks: bool
|
2016-11-30 06:21:53 +01:00
|
|
|
:rtype: collections.Iterable[TestTarget]
|
|
|
|
"""
|
2019-08-24 03:08:21 +02:00
|
|
|
return walk_test_targets(module_path=data_context().content.module_path, extensions=('.py',), extra_dirs=('bin',), include_symlinks=include_symlinks)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
2019-08-27 23:03:23 +02:00
|
|
|
def walk_powershell_targets(include_symlinks=True):
|
|
|
|
"""
|
|
|
|
:rtype: collections.Iterable[TestTarget]
|
|
|
|
"""
|
|
|
|
return walk_test_targets(module_path=data_context().content.module_path, extensions=('.ps1', '.psm1'), include_symlinks=include_symlinks)
|
|
|
|
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
def walk_sanity_targets():
|
|
|
|
"""
|
|
|
|
:rtype: collections.Iterable[TestTarget]
|
|
|
|
"""
|
2019-08-24 03:08:21 +02:00
|
|
|
return walk_test_targets(module_path=data_context().content.module_path, include_symlinks=True, include_symlinked_directories=True)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
2017-08-23 20:09:50 +02:00
|
|
|
def walk_posix_integration_targets(include_hidden=False):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2017-08-23 20:09:50 +02:00
|
|
|
:type include_hidden: bool
|
2016-11-30 06:21:53 +01:00
|
|
|
:rtype: collections.Iterable[IntegrationTarget]
|
|
|
|
"""
|
|
|
|
for target in walk_integration_targets():
|
2017-08-23 20:09:50 +02:00
|
|
|
if 'posix/' in target.aliases or (include_hidden and 'hidden/posix/' in target.aliases):
|
2016-11-30 06:21:53 +01:00
|
|
|
yield target
|
|
|
|
|
|
|
|
|
2017-08-23 20:09:50 +02:00
|
|
|
def walk_network_integration_targets(include_hidden=False):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2017-08-23 20:09:50 +02:00
|
|
|
:type include_hidden: bool
|
2016-11-30 06:21:53 +01:00
|
|
|
:rtype: collections.Iterable[IntegrationTarget]
|
|
|
|
"""
|
|
|
|
for target in walk_integration_targets():
|
2017-08-23 20:09:50 +02:00
|
|
|
if 'network/' in target.aliases or (include_hidden and 'hidden/network/' in target.aliases):
|
2016-11-30 06:21:53 +01:00
|
|
|
yield target
|
|
|
|
|
|
|
|
|
2017-08-23 20:09:50 +02:00
|
|
|
def walk_windows_integration_targets(include_hidden=False):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2017-08-23 20:09:50 +02:00
|
|
|
:type include_hidden: bool
|
2016-11-30 06:21:53 +01:00
|
|
|
:rtype: collections.Iterable[IntegrationTarget]
|
|
|
|
"""
|
|
|
|
for target in walk_integration_targets():
|
2017-08-23 20:09:50 +02:00
|
|
|
if 'windows/' in target.aliases or (include_hidden and 'hidden/windows/' in target.aliases):
|
2016-11-30 06:21:53 +01:00
|
|
|
yield target
|
|
|
|
|
|
|
|
|
|
|
|
def walk_integration_targets():
|
|
|
|
"""
|
|
|
|
:rtype: collections.Iterable[IntegrationTarget]
|
|
|
|
"""
|
2019-08-28 08:40:06 +02:00
|
|
|
path = data_context().content.integration_targets_path
|
2019-07-16 03:49:05 +02:00
|
|
|
modules = frozenset(target.module for target in walk_module_targets())
|
2019-08-29 12:39:03 +02:00
|
|
|
paths = data_context().content.walk_files(path)
|
2016-11-30 06:21:53 +01:00
|
|
|
prefixes = load_integration_prefixes()
|
2019-08-29 12:39:03 +02:00
|
|
|
targets_path_tuple = tuple(path.split(os.path.sep))
|
|
|
|
|
|
|
|
entry_dirs = (
|
|
|
|
'defaults',
|
|
|
|
'files',
|
|
|
|
'handlers',
|
|
|
|
'meta',
|
|
|
|
'tasks',
|
|
|
|
'templates',
|
|
|
|
'vars',
|
|
|
|
)
|
|
|
|
|
|
|
|
entry_files = (
|
|
|
|
'main.yml',
|
|
|
|
'main.yaml',
|
|
|
|
)
|
|
|
|
|
|
|
|
entry_points = []
|
|
|
|
|
|
|
|
for entry_dir in entry_dirs:
|
|
|
|
for entry_file in entry_files:
|
|
|
|
entry_points.append(os.path.join(os.path.sep, entry_dir, entry_file))
|
|
|
|
|
|
|
|
# any directory with at least one file is a target
|
|
|
|
path_tuples = set(tuple(os.path.dirname(p).split(os.path.sep))
|
|
|
|
for p in paths)
|
|
|
|
|
|
|
|
# also detect targets which are ansible roles, looking for standard entry points
|
|
|
|
path_tuples.update(tuple(os.path.dirname(os.path.dirname(p)).split(os.path.sep))
|
|
|
|
for p in paths if any(p.endswith(entry_point) for entry_point in entry_points))
|
|
|
|
|
|
|
|
# remove the top-level directory if it was included
|
|
|
|
if targets_path_tuple in path_tuples:
|
|
|
|
path_tuples.remove(targets_path_tuple)
|
|
|
|
|
|
|
|
previous_path_tuple = None
|
|
|
|
paths = []
|
|
|
|
|
|
|
|
for path_tuple in sorted(path_tuples):
|
|
|
|
if previous_path_tuple and previous_path_tuple == path_tuple[:len(previous_path_tuple)]:
|
|
|
|
# ignore nested directories
|
|
|
|
continue
|
|
|
|
|
|
|
|
previous_path_tuple = path_tuple
|
|
|
|
paths.append(os.path.sep.join(path_tuple))
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
for path in paths:
|
2020-12-04 18:12:14 +01:00
|
|
|
yield IntegrationTarget(to_text(path), modules, prefixes)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
def load_integration_prefixes():
|
|
|
|
"""
|
|
|
|
:rtype: dict[str, str]
|
|
|
|
"""
|
2019-08-28 08:40:06 +02:00
|
|
|
path = data_context().content.integration_path
|
2019-07-23 04:24:48 +02:00
|
|
|
file_paths = sorted(f for f in data_context().content.get_files(path) if os.path.splitext(os.path.basename(f))[0] == 'target-prefixes')
|
2016-11-30 06:21:53 +01:00
|
|
|
prefixes = {}
|
|
|
|
|
2019-07-16 05:49:41 +02:00
|
|
|
for file_path in file_paths:
|
|
|
|
prefix = os.path.splitext(file_path)[1][1:]
|
2020-02-04 20:21:53 +01:00
|
|
|
prefixes.update(dict((k, prefix) for k in read_text_file(file_path).splitlines()))
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
return prefixes
|
|
|
|
|
|
|
|
|
2019-08-24 03:08:21 +02:00
|
|
|
def walk_test_targets(path=None, module_path=None, extensions=None, prefix=None, extra_dirs=None, include_symlinks=False, include_symlinked_directories=False):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type path: str | None
|
|
|
|
:type module_path: str | None
|
|
|
|
:type extensions: tuple[str] | None
|
|
|
|
:type prefix: str | None
|
2017-08-17 21:04:43 +02:00
|
|
|
:type extra_dirs: tuple[str] | None
|
2019-08-24 03:08:21 +02:00
|
|
|
:type include_symlinks: bool
|
|
|
|
:type include_symlinked_directories: bool
|
2016-11-30 06:21:53 +01:00
|
|
|
:rtype: collections.Iterable[TestTarget]
|
|
|
|
"""
|
2019-07-23 04:24:48 +02:00
|
|
|
if path:
|
2019-08-24 03:08:21 +02:00
|
|
|
file_paths = data_context().content.walk_files(path, include_symlinked_directories=include_symlinked_directories)
|
2019-07-23 04:24:48 +02:00
|
|
|
else:
|
2019-08-24 03:08:21 +02:00
|
|
|
file_paths = data_context().content.all_files(include_symlinked_directories=include_symlinked_directories)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-07-16 05:49:41 +02:00
|
|
|
for file_path in file_paths:
|
|
|
|
name, ext = os.path.splitext(os.path.basename(file_path))
|
|
|
|
|
|
|
|
if extensions and ext not in extensions:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if prefix and not name.startswith(prefix):
|
|
|
|
continue
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-08-24 03:08:21 +02:00
|
|
|
symlink = os.path.islink(to_bytes(file_path.rstrip(os.path.sep)))
|
|
|
|
|
|
|
|
if symlink and not include_symlinks:
|
|
|
|
continue
|
2017-08-17 21:04:43 +02:00
|
|
|
|
2020-12-04 18:12:14 +01:00
|
|
|
yield TestTarget(to_text(file_path), module_path, prefix, path, symlink)
|
2017-08-17 21:04:43 +02:00
|
|
|
|
2019-07-16 05:49:41 +02:00
|
|
|
file_paths = []
|
2017-08-17 21:04:43 +02:00
|
|
|
|
|
|
|
if extra_dirs:
|
|
|
|
for extra_dir in extra_dirs:
|
2019-07-23 04:24:48 +02:00
|
|
|
for file_path in data_context().content.get_files(extra_dir):
|
|
|
|
file_paths.append(file_path)
|
2019-07-16 05:49:41 +02:00
|
|
|
|
|
|
|
for file_path in file_paths:
|
2019-08-24 03:08:21 +02:00
|
|
|
symlink = os.path.islink(to_bytes(file_path.rstrip(os.path.sep)))
|
|
|
|
|
|
|
|
if symlink and not include_symlinks:
|
2019-07-16 05:49:41 +02:00
|
|
|
continue
|
|
|
|
|
2019-08-24 03:08:21 +02:00
|
|
|
yield TestTarget(file_path, module_path, prefix, path, symlink)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
2017-07-11 00:41:32 +02:00
|
|
|
def analyze_integration_target_dependencies(integration_targets):
|
|
|
|
"""
|
2017-07-29 01:17:56 +02:00
|
|
|
:type integration_targets: list[IntegrationTarget]
|
2017-07-11 00:41:32 +02:00
|
|
|
:rtype: dict[str,set[str]]
|
|
|
|
"""
|
2019-08-28 08:40:06 +02:00
|
|
|
real_target_root = os.path.realpath(data_context().content.integration_targets_path) + '/'
|
2019-01-15 04:57:32 +01:00
|
|
|
|
2019-07-16 03:49:05 +02:00
|
|
|
role_targets = [target for target in integration_targets if target.type == 'role']
|
|
|
|
hidden_role_target_names = set(target.name for target in role_targets if 'hidden/' in target.aliases)
|
2018-11-27 21:38:27 +01:00
|
|
|
|
2017-08-23 20:09:50 +02:00
|
|
|
dependencies = collections.defaultdict(set)
|
|
|
|
|
|
|
|
# handle setup dependencies
|
|
|
|
for target in integration_targets:
|
|
|
|
for setup_target_name in target.setup_always + target.setup_once:
|
|
|
|
dependencies[setup_target_name].add(target.name)
|
2017-07-11 00:41:32 +02:00
|
|
|
|
2019-01-15 04:57:32 +01:00
|
|
|
# handle target dependencies
|
|
|
|
for target in integration_targets:
|
|
|
|
for need_target in target.needs_target:
|
|
|
|
dependencies[need_target].add(target.name)
|
|
|
|
|
|
|
|
# handle symlink dependencies between targets
|
|
|
|
# this use case is supported, but discouraged
|
|
|
|
for target in integration_targets:
|
2019-07-23 04:24:48 +02:00
|
|
|
for path in data_context().content.walk_files(target.path):
|
2019-08-24 03:08:21 +02:00
|
|
|
if not os.path.islink(to_bytes(path.rstrip(os.path.sep))):
|
2019-07-16 05:49:41 +02:00
|
|
|
continue
|
2019-01-15 04:57:32 +01:00
|
|
|
|
2019-07-16 05:49:41 +02:00
|
|
|
real_link_path = os.path.realpath(path)
|
2019-01-15 04:57:32 +01:00
|
|
|
|
2019-07-16 05:49:41 +02:00
|
|
|
if not real_link_path.startswith(real_target_root):
|
|
|
|
continue
|
2019-01-15 04:57:32 +01:00
|
|
|
|
2019-07-16 05:49:41 +02:00
|
|
|
link_target = real_link_path[len(real_target_root):].split('/')[0]
|
2019-01-15 04:57:32 +01:00
|
|
|
|
2019-07-16 05:49:41 +02:00
|
|
|
if link_target == target.name:
|
|
|
|
continue
|
2019-01-15 04:57:32 +01:00
|
|
|
|
2019-07-16 05:49:41 +02:00
|
|
|
dependencies[link_target].add(target.name)
|
2019-01-15 04:57:32 +01:00
|
|
|
|
2017-07-11 00:41:32 +02:00
|
|
|
# intentionally primitive analysis of role meta to avoid a dependency on pyyaml
|
2019-01-15 04:57:32 +01:00
|
|
|
# script based targets are scanned as they may execute a playbook with role dependencies
|
|
|
|
for target in integration_targets:
|
|
|
|
meta_dir = os.path.join(target.path, 'meta')
|
2017-07-11 00:41:32 +02:00
|
|
|
|
|
|
|
if not os.path.isdir(meta_dir):
|
|
|
|
continue
|
|
|
|
|
2019-07-23 04:24:48 +02:00
|
|
|
meta_paths = data_context().content.get_files(meta_dir)
|
2017-07-11 00:41:32 +02:00
|
|
|
|
|
|
|
for meta_path in meta_paths:
|
|
|
|
if os.path.exists(meta_path):
|
2020-02-04 20:21:53 +01:00
|
|
|
# try and decode the file as a utf-8 string, skip if it contains invalid chars (binary file)
|
|
|
|
try:
|
|
|
|
meta_lines = read_text_file(meta_path).splitlines()
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
continue
|
2017-07-11 00:41:32 +02:00
|
|
|
|
|
|
|
for meta_line in meta_lines:
|
|
|
|
if re.search(r'^ *#.*$', meta_line):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not meta_line.strip():
|
|
|
|
continue
|
|
|
|
|
|
|
|
for hidden_target_name in hidden_role_target_names:
|
|
|
|
if hidden_target_name in meta_line:
|
2019-01-15 04:57:32 +01:00
|
|
|
dependencies[hidden_target_name].add(target.name)
|
2017-07-11 00:41:32 +02:00
|
|
|
|
2018-11-27 21:38:27 +01:00
|
|
|
while True:
|
|
|
|
changes = 0
|
|
|
|
|
|
|
|
for dummy, dependent_target_names in dependencies.items():
|
|
|
|
for dependent_target_name in list(dependent_target_names):
|
|
|
|
new_target_names = dependencies.get(dependent_target_name)
|
|
|
|
|
|
|
|
if new_target_names:
|
|
|
|
for new_target_name in new_target_names:
|
|
|
|
if new_target_name not in dependent_target_names:
|
|
|
|
dependent_target_names.add(new_target_name)
|
|
|
|
changes += 1
|
|
|
|
|
|
|
|
if not changes:
|
|
|
|
break
|
|
|
|
|
2019-01-15 04:57:32 +01:00
|
|
|
for target_name in sorted(dependencies):
|
|
|
|
consumers = dependencies[target_name]
|
|
|
|
|
|
|
|
if not consumers:
|
|
|
|
continue
|
|
|
|
|
|
|
|
display.info('%s:' % target_name, verbosity=4)
|
|
|
|
|
|
|
|
for consumer in sorted(consumers):
|
|
|
|
display.info(' %s' % consumer, verbosity=4)
|
|
|
|
|
2017-07-11 00:41:32 +02:00
|
|
|
return dependencies
|
|
|
|
|
|
|
|
|
2019-07-12 22:17:20 +02:00
|
|
|
class CompletionTarget:
|
2016-11-30 06:21:53 +01:00
|
|
|
"""Command-line argument completion target base class."""
|
|
|
|
__metaclass__ = abc.ABCMeta
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.name = None
|
|
|
|
self.path = None
|
|
|
|
self.base_path = None
|
|
|
|
self.modules = tuple()
|
|
|
|
self.aliases = tuple()
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
if isinstance(other, CompletionTarget):
|
|
|
|
return self.__repr__() == other.__repr__()
|
2017-05-03 17:19:44 +02:00
|
|
|
|
|
|
|
return False
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
def __ne__(self, other):
|
|
|
|
return not self.__eq__(other)
|
|
|
|
|
|
|
|
def __lt__(self, other):
|
|
|
|
return self.name.__lt__(other.name)
|
|
|
|
|
|
|
|
def __gt__(self, other):
|
|
|
|
return self.name.__gt__(other.name)
|
|
|
|
|
|
|
|
def __hash__(self):
|
|
|
|
return hash(self.__repr__())
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
if self.modules:
|
|
|
|
return '%s (%s)' % (self.name, ', '.join(self.modules))
|
|
|
|
|
|
|
|
return self.name
|
|
|
|
|
|
|
|
|
|
|
|
class DirectoryTarget(CompletionTarget):
|
|
|
|
"""Directory target."""
|
|
|
|
def __init__(self, path, modules):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
:type modules: tuple[str]
|
|
|
|
"""
|
|
|
|
super(DirectoryTarget, self).__init__()
|
|
|
|
|
|
|
|
self.name = path
|
|
|
|
self.path = path
|
|
|
|
self.modules = modules
|
|
|
|
|
|
|
|
|
|
|
|
class TestTarget(CompletionTarget):
|
|
|
|
"""Generic test target."""
|
2019-08-24 03:08:21 +02:00
|
|
|
def __init__(self, path, module_path, module_prefix, base_path, symlink=None):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
:type module_path: str | None
|
|
|
|
:type module_prefix: str | None
|
|
|
|
:type base_path: str
|
2019-08-24 03:08:21 +02:00
|
|
|
:type symlink: bool | None
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
super(TestTarget, self).__init__()
|
|
|
|
|
2019-08-24 03:08:21 +02:00
|
|
|
if symlink is None:
|
|
|
|
symlink = os.path.islink(to_bytes(path.rstrip(os.path.sep)))
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
self.name = path
|
|
|
|
self.path = path
|
|
|
|
self.base_path = base_path + '/' if base_path else None
|
2019-08-24 03:08:21 +02:00
|
|
|
self.symlink = symlink
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
name, ext = os.path.splitext(os.path.basename(self.path))
|
|
|
|
|
2019-07-16 03:49:05 +02:00
|
|
|
if module_path and is_subdir(path, module_path) and name != '__init__' and ext in MODULE_EXTENSIONS:
|
2016-11-30 06:21:53 +01:00
|
|
|
self.module = name[len(module_prefix or ''):].lstrip('_')
|
2018-10-15 07:59:52 +02:00
|
|
|
self.modules = (self.module,)
|
2016-11-30 06:21:53 +01:00
|
|
|
else:
|
|
|
|
self.module = None
|
|
|
|
self.modules = tuple()
|
|
|
|
|
|
|
|
aliases = [self.path, self.module]
|
|
|
|
parts = self.path.split('/')
|
|
|
|
|
|
|
|
for i in range(1, len(parts)):
|
|
|
|
alias = '%s/' % '/'.join(parts[:i])
|
|
|
|
aliases.append(alias)
|
|
|
|
|
|
|
|
aliases = [a for a in aliases if a]
|
|
|
|
|
|
|
|
self.aliases = tuple(sorted(aliases))
|
|
|
|
|
|
|
|
|
|
|
|
class IntegrationTarget(CompletionTarget):
|
|
|
|
"""Integration test target."""
|
|
|
|
non_posix = frozenset((
|
|
|
|
'network',
|
|
|
|
'windows',
|
|
|
|
))
|
|
|
|
|
|
|
|
categories = frozenset(non_posix | frozenset((
|
|
|
|
'posix',
|
|
|
|
'module',
|
|
|
|
'needs',
|
|
|
|
'skip',
|
|
|
|
)))
|
|
|
|
|
|
|
|
def __init__(self, path, modules, prefixes):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
:type modules: frozenset[str]
|
|
|
|
:type prefixes: dict[str, str]
|
|
|
|
"""
|
|
|
|
super(IntegrationTarget, self).__init__()
|
|
|
|
|
2019-08-29 12:39:03 +02:00
|
|
|
self.relative_path = os.path.relpath(path, data_context().content.integration_targets_path)
|
|
|
|
self.name = self.relative_path.replace(os.path.sep, '.')
|
2016-11-30 06:21:53 +01:00
|
|
|
self.path = path
|
|
|
|
|
|
|
|
# script_path and type
|
|
|
|
|
2019-08-29 07:18:20 +02:00
|
|
|
file_paths = data_context().content.get_files(path)
|
|
|
|
runme_path = os.path.join(path, 'runme.sh')
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-08-29 07:18:20 +02:00
|
|
|
if runme_path in file_paths:
|
2016-11-30 06:21:53 +01:00
|
|
|
self.type = 'script'
|
2019-08-29 07:18:20 +02:00
|
|
|
self.script_path = runme_path
|
2016-11-30 06:21:53 +01:00
|
|
|
else:
|
2019-01-16 08:17:25 +01:00
|
|
|
self.type = 'role' # ansible will consider these empty roles, so ansible-test should as well
|
2019-08-29 07:18:20 +02:00
|
|
|
self.script_path = None
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
# static_aliases
|
|
|
|
|
2019-08-29 07:18:20 +02:00
|
|
|
aliases_path = os.path.join(path, 'aliases')
|
|
|
|
|
|
|
|
if aliases_path in file_paths:
|
2018-09-20 08:20:27 +02:00
|
|
|
static_aliases = tuple(read_lines_without_comments(aliases_path, remove_blank_lines=True))
|
2019-08-29 07:18:20 +02:00
|
|
|
else:
|
2016-11-30 06:21:53 +01:00
|
|
|
static_aliases = tuple()
|
|
|
|
|
|
|
|
# modules
|
|
|
|
|
|
|
|
if self.name in modules:
|
2017-08-23 20:09:50 +02:00
|
|
|
module_name = self.name
|
2016-11-30 06:21:53 +01:00
|
|
|
elif self.name.startswith('win_') and self.name[4:] in modules:
|
2017-08-23 20:09:50 +02:00
|
|
|
module_name = self.name[4:]
|
2016-11-30 06:21:53 +01:00
|
|
|
else:
|
2017-08-23 20:09:50 +02:00
|
|
|
module_name = None
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-08-23 20:09:50 +02:00
|
|
|
self.modules = tuple(sorted(a for a in static_aliases + tuple([module_name]) if a in modules))
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
# groups
|
|
|
|
|
|
|
|
groups = [self.type]
|
|
|
|
groups += [a for a in static_aliases if a not in modules]
|
|
|
|
groups += ['module/%s' % m for m in self.modules]
|
|
|
|
|
|
|
|
if not self.modules:
|
|
|
|
groups.append('non_module')
|
|
|
|
|
|
|
|
if 'destructive' not in groups:
|
|
|
|
groups.append('non_destructive')
|
|
|
|
|
|
|
|
if '_' in self.name:
|
|
|
|
prefix = self.name[:self.name.find('_')]
|
|
|
|
else:
|
|
|
|
prefix = None
|
|
|
|
|
|
|
|
if prefix in prefixes:
|
|
|
|
group = prefixes[prefix]
|
|
|
|
|
|
|
|
if group != prefix:
|
|
|
|
group = '%s/%s' % (group, prefix)
|
|
|
|
|
|
|
|
groups.append(group)
|
|
|
|
|
|
|
|
if self.name.startswith('win_'):
|
|
|
|
groups.append('windows')
|
|
|
|
|
|
|
|
if self.name.startswith('connection_'):
|
|
|
|
groups.append('connection')
|
|
|
|
|
|
|
|
if self.name.startswith('setup_') or self.name.startswith('prepare_'):
|
|
|
|
groups.append('hidden')
|
|
|
|
|
|
|
|
if self.type not in ('script', 'role'):
|
|
|
|
groups.append('hidden')
|
|
|
|
|
2019-08-28 08:40:06 +02:00
|
|
|
targets_relative_path = data_context().content.integration_targets_path
|
|
|
|
|
2020-04-16 01:22:17 +02:00
|
|
|
# Collect skip entries before group expansion to avoid registering more specific skip entries as less specific versions.
|
|
|
|
self.skips = tuple(g for g in groups if g.startswith('skip/'))
|
|
|
|
|
2019-01-12 09:18:17 +01:00
|
|
|
# Collect file paths before group expansion to avoid including the directories.
|
|
|
|
# Ignore references to test targets, as those must be defined using `needs/target/*` or other target references.
|
|
|
|
self.needs_file = tuple(sorted(set('/'.join(g.split('/')[2:]) for g in groups if
|
2019-08-28 08:40:06 +02:00
|
|
|
g.startswith('needs/file/') and not g.startswith('needs/file/%s/' % targets_relative_path))))
|
2019-01-12 09:18:17 +01:00
|
|
|
|
2020-02-21 00:27:08 +01:00
|
|
|
# network platform
|
|
|
|
networks = [g.split('/')[1] for g in groups if g.startswith('network/')]
|
|
|
|
self.network_platform = networks[0] if networks else None
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
for group in itertools.islice(groups, 0, len(groups)):
|
|
|
|
if '/' in group:
|
|
|
|
parts = group.split('/')
|
|
|
|
for i in range(1, len(parts)):
|
|
|
|
groups.append('/'.join(parts[:i]))
|
|
|
|
|
|
|
|
if not any(g in self.non_posix for g in groups):
|
|
|
|
groups.append('posix')
|
|
|
|
|
|
|
|
# aliases
|
|
|
|
|
|
|
|
aliases = [self.name] + \
|
|
|
|
['%s/' % g for g in groups] + \
|
|
|
|
['%s/%s' % (g, self.name) for g in groups if g not in self.categories]
|
|
|
|
|
|
|
|
if 'hidden/' in aliases:
|
|
|
|
aliases = ['hidden/'] + ['hidden/%s' % a for a in aliases if not a.startswith('hidden/')]
|
|
|
|
|
|
|
|
self.aliases = tuple(sorted(set(aliases)))
|
|
|
|
|
2017-08-23 20:09:50 +02:00
|
|
|
# configuration
|
|
|
|
|
|
|
|
self.setup_once = tuple(sorted(set(g.split('/')[2] for g in groups if g.startswith('setup/once/'))))
|
|
|
|
self.setup_always = tuple(sorted(set(g.split('/')[2] for g in groups if g.startswith('setup/always/'))))
|
2019-01-15 04:57:32 +01:00
|
|
|
self.needs_target = tuple(sorted(set(g.split('/')[2] for g in groups if g.startswith('needs/target/'))))
|
2017-08-23 20:09:50 +02:00
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
class TargetPatternsNotMatched(ApplicationError):
|
|
|
|
"""One or more targets were not matched when a match was required."""
|
|
|
|
def __init__(self, patterns):
|
|
|
|
"""
|
|
|
|
:type patterns: set[str]
|
|
|
|
"""
|
|
|
|
self.patterns = sorted(patterns)
|
|
|
|
|
|
|
|
if len(patterns) > 1:
|
|
|
|
message = 'Target patterns not matched:\n%s' % '\n'.join(self.patterns)
|
|
|
|
else:
|
|
|
|
message = 'Target pattern not matched: %s' % self.patterns[0]
|
|
|
|
|
|
|
|
super(TargetPatternsNotMatched, self).__init__(message)
|