Update to ansible-test to prepare for collections.
Changes to simplify merging of collections support.
This commit is contained in:
parent
f0eaf1fb39
commit
2ed78b650f
2 changed files with 51 additions and 39 deletions
|
@ -848,13 +848,12 @@ def complete_network_testcase(prefix, parsed_args, **_):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
test_dir = 'test/integration/targets/%s/tests' % parsed_args.include[0]
|
test_dir = 'test/integration/targets/%s/tests' % parsed_args.include[0]
|
||||||
connections = os.listdir(test_dir)
|
connection_dirs = [path for path in [os.path.join(test_dir, name) for name in os.listdir(test_dir)] if os.path.isdir(path)]
|
||||||
|
|
||||||
for conn in connections:
|
for connection_dir in connection_dirs:
|
||||||
if os.path.isdir(os.path.join(test_dir, conn)):
|
for testcase in os.listdir(connection_dir):
|
||||||
for testcase in os.listdir(os.path.join(test_dir, conn)):
|
if testcase.startswith(prefix):
|
||||||
if testcase.startswith(prefix):
|
testcases.append(testcase.split('.')[0])
|
||||||
testcases.append(testcase.split('.')[0])
|
|
||||||
|
|
||||||
return testcases
|
return testcases
|
||||||
|
|
||||||
|
|
|
@ -209,12 +209,11 @@ def walk_integration_targets():
|
||||||
"""
|
"""
|
||||||
path = 'test/integration/targets'
|
path = 'test/integration/targets'
|
||||||
modules = frozenset(target.module for target in walk_module_targets())
|
modules = frozenset(target.module for target in walk_module_targets())
|
||||||
paths = sorted(os.path.join(path, p) for p in os.listdir(path))
|
paths = sorted(path for path in [os.path.join(path, p) for p in os.listdir(path)] if os.path.isdir(path))
|
||||||
prefixes = load_integration_prefixes()
|
prefixes = load_integration_prefixes()
|
||||||
|
|
||||||
for path in paths:
|
for path in paths:
|
||||||
if os.path.isdir(path):
|
yield IntegrationTarget(path, modules, prefixes)
|
||||||
yield IntegrationTarget(path, modules, prefixes)
|
|
||||||
|
|
||||||
|
|
||||||
def load_integration_prefixes():
|
def load_integration_prefixes():
|
||||||
|
@ -222,12 +221,12 @@ def load_integration_prefixes():
|
||||||
:rtype: dict[str, str]
|
:rtype: dict[str, str]
|
||||||
"""
|
"""
|
||||||
path = 'test/integration'
|
path = 'test/integration'
|
||||||
names = sorted(f for f in os.listdir(path) if os.path.splitext(f)[0] == 'target-prefixes')
|
file_paths = sorted(os.path.join(path, f) for f in os.listdir(path) if os.path.splitext(f)[0] == 'target-prefixes')
|
||||||
prefixes = {}
|
prefixes = {}
|
||||||
|
|
||||||
for name in names:
|
for file_path in file_paths:
|
||||||
prefix = os.path.splitext(name)[1][1:]
|
prefix = os.path.splitext(file_path)[1][1:]
|
||||||
with open(os.path.join(path, name), 'r') as prefix_fd:
|
with open(file_path, 'r') as prefix_fd:
|
||||||
prefixes.update(dict((k, prefix) for k in prefix_fd.read().splitlines()))
|
prefixes.update(dict((k, prefix) for k in prefix_fd.read().splitlines()))
|
||||||
|
|
||||||
return prefixes
|
return prefixes
|
||||||
|
@ -242,6 +241,8 @@ def walk_test_targets(path=None, module_path=None, extensions=None, prefix=None,
|
||||||
:type extra_dirs: tuple[str] | None
|
:type extra_dirs: tuple[str] | None
|
||||||
:rtype: collections.Iterable[TestTarget]
|
:rtype: collections.Iterable[TestTarget]
|
||||||
"""
|
"""
|
||||||
|
file_paths = []
|
||||||
|
|
||||||
for root, _dir_names, file_names in os.walk(path or '.', topdown=False):
|
for root, _dir_names, file_names in os.walk(path or '.', topdown=False):
|
||||||
if root.endswith('/__pycache__'):
|
if root.endswith('/__pycache__'):
|
||||||
continue
|
continue
|
||||||
|
@ -256,25 +257,28 @@ def walk_test_targets(path=None, module_path=None, extensions=None, prefix=None,
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
name, ext = os.path.splitext(os.path.basename(file_name))
|
if file_name.startswith('.'):
|
||||||
|
|
||||||
if name.startswith('.'):
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if extensions and ext not in extensions:
|
file_paths.append(os.path.join(root, file_name))
|
||||||
|
|
||||||
|
for file_path in file_paths:
|
||||||
|
name, ext = os.path.splitext(os.path.basename(file_path))
|
||||||
|
|
||||||
|
if extensions and ext not in extensions:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if prefix and not name.startswith(prefix):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if os.path.islink(file_path):
|
||||||
|
# special case to allow a symlink of ansible_release.py -> ../release.py
|
||||||
|
if file_path != 'lib/ansible/module_utils/ansible_release.py':
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if prefix and not name.startswith(prefix):
|
yield TestTarget(file_path, module_path, prefix, path)
|
||||||
continue
|
|
||||||
|
|
||||||
file_path = os.path.join(root, file_name)
|
file_paths = []
|
||||||
|
|
||||||
if os.path.islink(file_path):
|
|
||||||
# special case to allow a symlink of ansible_release.py -> ../release.py
|
|
||||||
if file_path != 'lib/ansible/module_utils/ansible_release.py':
|
|
||||||
continue
|
|
||||||
|
|
||||||
yield TestTarget(file_path, module_path, prefix, path)
|
|
||||||
|
|
||||||
if extra_dirs:
|
if extra_dirs:
|
||||||
for extra_dir in extra_dirs:
|
for extra_dir in extra_dirs:
|
||||||
|
@ -283,8 +287,14 @@ def walk_test_targets(path=None, module_path=None, extensions=None, prefix=None,
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
file_path = os.path.join(extra_dir, file_name)
|
file_path = os.path.join(extra_dir, file_name)
|
||||||
|
|
||||||
if os.path.isfile(file_path) and not os.path.islink(file_path):
|
if os.path.isfile(file_path):
|
||||||
yield TestTarget(file_path, module_path, prefix, path)
|
file_paths.append(file_path)
|
||||||
|
|
||||||
|
for file_path in file_paths:
|
||||||
|
if os.path.islink(file_path):
|
||||||
|
continue
|
||||||
|
|
||||||
|
yield TestTarget(file_path, module_path, prefix, path)
|
||||||
|
|
||||||
|
|
||||||
def analyze_integration_target_dependencies(integration_targets):
|
def analyze_integration_target_dependencies(integration_targets):
|
||||||
|
@ -312,24 +322,27 @@ def analyze_integration_target_dependencies(integration_targets):
|
||||||
# handle symlink dependencies between targets
|
# handle symlink dependencies between targets
|
||||||
# this use case is supported, but discouraged
|
# this use case is supported, but discouraged
|
||||||
for target in integration_targets:
|
for target in integration_targets:
|
||||||
|
paths = []
|
||||||
|
|
||||||
for root, _dummy, file_names in os.walk(target.path):
|
for root, _dummy, file_names in os.walk(target.path):
|
||||||
for name in file_names:
|
for name in file_names:
|
||||||
path = os.path.join(root, name)
|
paths.append(os.path.join(root, name))
|
||||||
|
|
||||||
if not os.path.islink(path):
|
for path in paths:
|
||||||
continue
|
if not os.path.islink(path):
|
||||||
|
continue
|
||||||
|
|
||||||
real_link_path = os.path.realpath(path)
|
real_link_path = os.path.realpath(path)
|
||||||
|
|
||||||
if not real_link_path.startswith(real_target_root):
|
if not real_link_path.startswith(real_target_root):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
link_target = real_link_path[len(real_target_root):].split('/')[0]
|
link_target = real_link_path[len(real_target_root):].split('/')[0]
|
||||||
|
|
||||||
if link_target == target.name:
|
if link_target == target.name:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
dependencies[link_target].add(target.name)
|
dependencies[link_target].add(target.name)
|
||||||
|
|
||||||
# intentionally primitive analysis of role meta to avoid a dependency on pyyaml
|
# intentionally primitive analysis of role meta to avoid a dependency on pyyaml
|
||||||
# script based targets are scanned as they may execute a playbook with role dependencies
|
# script based targets are scanned as they may execute a playbook with role dependencies
|
||||||
|
|
Loading…
Reference in a new issue