2016-11-30 06:21:53 +01:00
|
|
|
"""Execute Ansible tests."""
|
2019-07-12 08:46:20 +02:00
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-05-11 13:05:21 +02:00
|
|
|
import json
|
2016-11-30 06:21:53 +01:00
|
|
|
import os
|
2017-08-26 00:14:47 +02:00
|
|
|
import datetime
|
2017-03-08 09:47:21 +01:00
|
|
|
import re
|
2016-11-30 06:21:53 +01:00
|
|
|
import time
|
|
|
|
import textwrap
|
|
|
|
import functools
|
2017-07-06 09:47:28 +02:00
|
|
|
import hashlib
|
2018-10-04 06:41:27 +02:00
|
|
|
import difflib
|
|
|
|
import filecmp
|
2019-03-13 15:14:12 +01:00
|
|
|
import random
|
|
|
|
import string
|
|
|
|
import shutil
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from . import types as t
|
2019-07-11 22:03:49 +02:00
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .thread import (
|
2019-08-05 23:56:38 +02:00
|
|
|
WrappedThread,
|
|
|
|
)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .core_ci import (
|
2016-11-30 06:21:53 +01:00
|
|
|
AnsibleCoreCI,
|
2017-01-18 21:51:24 +01:00
|
|
|
SshKey,
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .manage_ci import (
|
2016-11-30 06:21:53 +01:00
|
|
|
ManageWindowsCI,
|
2017-01-08 08:36:35 +01:00
|
|
|
ManageNetworkCI,
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .cloud import (
|
2017-05-05 10:23:00 +02:00
|
|
|
cloud_filter,
|
|
|
|
cloud_init,
|
|
|
|
get_cloud_environment,
|
|
|
|
get_cloud_platforms,
|
2019-03-01 03:25:49 +01:00
|
|
|
CloudEnvironmentConfig,
|
2017-05-05 10:23:00 +02:00
|
|
|
)
|
|
|
|
|
2020-02-04 20:21:53 +01:00
|
|
|
from .io import (
|
|
|
|
make_dirs,
|
|
|
|
open_text_file,
|
|
|
|
read_binary_file,
|
|
|
|
read_text_file,
|
|
|
|
write_text_file,
|
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .util import (
|
2016-11-30 06:21:53 +01:00
|
|
|
ApplicationWarning,
|
|
|
|
ApplicationError,
|
|
|
|
SubprocessError,
|
|
|
|
display,
|
|
|
|
remove_tree,
|
2017-05-11 13:05:21 +02:00
|
|
|
find_executable,
|
|
|
|
raw_command,
|
2018-05-09 18:24:39 +02:00
|
|
|
get_available_port,
|
2018-09-18 17:37:14 +02:00
|
|
|
generate_pip_command,
|
|
|
|
find_python,
|
2019-05-24 22:10:33 +02:00
|
|
|
cmd_quote,
|
2019-08-09 01:14:19 +02:00
|
|
|
ANSIBLE_LIB_ROOT,
|
2019-08-06 02:40:00 +02:00
|
|
|
ANSIBLE_TEST_DATA_ROOT,
|
2019-08-09 20:06:21 +02:00
|
|
|
ANSIBLE_TEST_CONFIG_ROOT,
|
2019-08-26 23:02:55 +02:00
|
|
|
get_ansible_version,
|
2019-08-27 23:03:23 +02:00
|
|
|
tempdir,
|
|
|
|
open_zipfile,
|
2019-08-28 18:10:17 +02:00
|
|
|
SUPPORTED_PYTHON_VERSIONS,
|
2020-03-29 06:33:13 +02:00
|
|
|
str_to_version,
|
2018-05-09 18:24:39 +02:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .util_common import (
|
2020-06-10 00:40:56 +02:00
|
|
|
get_docker_completion,
|
|
|
|
get_network_settings,
|
|
|
|
get_remote_completion,
|
2019-07-11 07:00:34 +02:00
|
|
|
get_python_path,
|
|
|
|
intercept_command,
|
|
|
|
named_temporary_file,
|
|
|
|
run_command,
|
2019-08-28 08:40:06 +02:00
|
|
|
write_json_test_results,
|
|
|
|
ResultType,
|
2019-08-29 12:39:03 +02:00
|
|
|
handle_layout_messages,
|
2019-07-11 07:00:34 +02:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .docker_util import (
|
2018-05-09 18:24:39 +02:00
|
|
|
docker_pull,
|
|
|
|
docker_run,
|
2018-10-13 00:20:00 +02:00
|
|
|
docker_available,
|
|
|
|
docker_rm,
|
2018-05-09 18:24:39 +02:00
|
|
|
get_docker_container_id,
|
|
|
|
get_docker_container_ip,
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .ansible_util import (
|
2016-11-30 06:21:53 +01:00
|
|
|
ansible_environment,
|
2019-06-05 07:08:23 +02:00
|
|
|
check_pyyaml,
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .target import (
|
2016-11-30 06:21:53 +01:00
|
|
|
IntegrationTarget,
|
|
|
|
walk_internal_targets,
|
|
|
|
walk_posix_integration_targets,
|
|
|
|
walk_network_integration_targets,
|
|
|
|
walk_windows_integration_targets,
|
2019-08-10 00:18:19 +02:00
|
|
|
TIntegrationTarget,
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
2020-04-26 03:55:39 +02:00
|
|
|
from .ci import (
|
|
|
|
get_ci_provider,
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .classification import (
|
2016-11-30 06:21:53 +01:00
|
|
|
categorize_changes,
|
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .config import (
|
2017-07-07 01:14:44 +02:00
|
|
|
TestConfig,
|
|
|
|
EnvironmentConfig,
|
|
|
|
IntegrationConfig,
|
|
|
|
NetworkIntegrationConfig,
|
|
|
|
PosixIntegrationConfig,
|
|
|
|
ShellConfig,
|
|
|
|
WindowsIntegrationConfig,
|
2019-08-10 00:18:19 +02:00
|
|
|
TIntegrationConfig,
|
2017-07-07 01:14:44 +02:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .metadata import (
|
2018-04-13 01:15:28 +02:00
|
|
|
ChangeDescription,
|
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .integration import (
|
2019-01-10 23:43:21 +01:00
|
|
|
integration_test_environment,
|
2019-03-01 03:25:49 +01:00
|
|
|
integration_test_config_file,
|
2019-03-13 15:14:12 +01:00
|
|
|
setup_common_temp_dir,
|
2019-08-21 21:12:37 +02:00
|
|
|
get_inventory_relative_path,
|
|
|
|
check_inventory,
|
|
|
|
delegate_inventory,
|
2019-01-10 23:43:21 +01:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .data import (
|
2019-07-23 04:24:48 +02:00
|
|
|
data_context,
|
|
|
|
)
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
HTTPTESTER_HOSTS = (
|
|
|
|
'ansible.http.tests',
|
|
|
|
'sni1.ansible.http.tests',
|
|
|
|
'fail.ansible.http.tests',
|
|
|
|
)
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-01-24 19:31:39 +01:00
|
|
|
def check_startup():
|
|
|
|
"""Checks to perform at startup before running commands."""
|
|
|
|
check_legacy_modules()
|
|
|
|
|
|
|
|
|
|
|
|
def check_legacy_modules():
|
|
|
|
"""Detect conflicts with legacy core/extras module directories to avoid problems later."""
|
|
|
|
for directory in 'core', 'extras':
|
|
|
|
path = 'lib/ansible/modules/%s' % directory
|
|
|
|
|
2019-07-15 22:47:16 +02:00
|
|
|
for root, _dir_names, file_names in os.walk(path):
|
2017-01-24 19:31:39 +01:00
|
|
|
if file_names:
|
|
|
|
# the directory shouldn't exist, but if it does, it must contain no files
|
|
|
|
raise ApplicationError('Files prohibited in "%s". '
|
|
|
|
'These are most likely legacy modules from version 2.2 or earlier.' % root)
|
|
|
|
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
def create_shell_command(command):
|
|
|
|
"""
|
|
|
|
:type command: list[str]
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
optional_vars = (
|
|
|
|
'TERM',
|
|
|
|
)
|
|
|
|
|
|
|
|
cmd = ['/usr/bin/env']
|
|
|
|
cmd += ['%s=%s' % (var, os.environ[var]) for var in optional_vars if var in os.environ]
|
|
|
|
cmd += command
|
|
|
|
|
|
|
|
return cmd
|
|
|
|
|
|
|
|
|
2020-03-29 06:33:13 +02:00
|
|
|
def get_setuptools_version(args, python): # type: (EnvironmentConfig, str) -> t.Tuple[int]
|
|
|
|
"""Return the setuptools version for the given python."""
|
|
|
|
try:
|
|
|
|
return str_to_version(raw_command([python, '-c', 'import setuptools; print(setuptools.__version__)'], capture=True)[0])
|
|
|
|
except SubprocessError:
|
|
|
|
if args.explain:
|
|
|
|
return tuple() # ignore errors in explain mode in case setuptools is not aleady installed
|
|
|
|
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
|
|
def get_cryptography_requirement(args, python_version): # type: (EnvironmentConfig, str) -> str
|
|
|
|
"""
|
|
|
|
Return the correct cryptography requirement for the given python version.
|
|
|
|
The version of cryptograpy installed depends on the python version and setuptools version.
|
|
|
|
"""
|
|
|
|
python = find_python(python_version)
|
|
|
|
setuptools_version = get_setuptools_version(args, python)
|
|
|
|
|
|
|
|
if setuptools_version >= (18, 5):
|
|
|
|
if python_version == '2.6':
|
|
|
|
# cryptography 2.2+ requires python 2.7+
|
|
|
|
# see https://github.com/pyca/cryptography/blob/master/CHANGELOG.rst#22---2018-03-19
|
|
|
|
cryptography = 'cryptography < 2.2'
|
|
|
|
else:
|
|
|
|
cryptography = 'cryptography'
|
|
|
|
else:
|
|
|
|
# cryptography 2.1+ requires setuptools 18.5+
|
|
|
|
# see https://github.com/pyca/cryptography/blob/62287ae18383447585606b9d0765c0f1b8a9777c/setup.py#L26
|
|
|
|
cryptography = 'cryptography < 2.1'
|
|
|
|
|
|
|
|
return cryptography
|
|
|
|
|
|
|
|
|
2020-06-10 18:39:25 +02:00
|
|
|
def install_command_requirements(args, python_version=None, context=None, enable_pyyaml_check=False):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
2018-09-18 17:37:14 +02:00
|
|
|
:type python_version: str | None
|
2020-06-09 22:38:36 +02:00
|
|
|
:type context: str | None
|
2020-06-10 18:39:25 +02:00
|
|
|
:type enable_pyyaml_check: bool
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2019-07-23 04:24:48 +02:00
|
|
|
if not args.explain:
|
2019-08-28 08:40:06 +02:00
|
|
|
make_dirs(ResultType.COVERAGE.path)
|
|
|
|
make_dirs(ResultType.DATA.path)
|
2019-07-23 04:24:48 +02:00
|
|
|
|
2018-11-16 23:50:01 +01:00
|
|
|
if isinstance(args, ShellConfig):
|
|
|
|
if args.raw:
|
|
|
|
return
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
generate_egg_info(args)
|
|
|
|
|
|
|
|
if not args.requirements:
|
|
|
|
return
|
|
|
|
|
2018-03-07 23:02:31 +01:00
|
|
|
if isinstance(args, ShellConfig):
|
|
|
|
return
|
|
|
|
|
2017-03-08 09:47:21 +01:00
|
|
|
packages = []
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
if isinstance(args, TestConfig):
|
|
|
|
if args.coverage:
|
2017-03-08 09:47:21 +01:00
|
|
|
packages.append('coverage')
|
2017-03-02 21:36:46 +01:00
|
|
|
if args.junit:
|
2017-03-08 09:47:21 +01:00
|
|
|
packages.append('junit-xml')
|
|
|
|
|
2018-09-18 17:37:14 +02:00
|
|
|
if not python_version:
|
|
|
|
python_version = args.python_version
|
|
|
|
|
|
|
|
pip = generate_pip_command(find_python(python_version))
|
2017-10-26 09:21:46 +02:00
|
|
|
|
2020-06-10 18:39:25 +02:00
|
|
|
# skip packages which have aleady been installed for python_version
|
|
|
|
|
|
|
|
try:
|
|
|
|
package_cache = install_command_requirements.package_cache
|
|
|
|
except AttributeError:
|
|
|
|
package_cache = install_command_requirements.package_cache = {}
|
|
|
|
|
|
|
|
installed_packages = package_cache.setdefault(python_version, set())
|
|
|
|
skip_packages = [package for package in packages if package in installed_packages]
|
|
|
|
|
|
|
|
for package in skip_packages:
|
|
|
|
packages.remove(package)
|
|
|
|
|
|
|
|
installed_packages.update(packages)
|
2020-03-29 06:33:13 +02:00
|
|
|
|
2020-06-09 22:38:36 +02:00
|
|
|
if args.command != 'sanity':
|
2020-06-10 18:39:25 +02:00
|
|
|
install_ansible_test_requirements(args, pip)
|
|
|
|
|
2020-06-09 22:38:36 +02:00
|
|
|
# make sure setuptools is available before trying to install cryptography
|
|
|
|
# the installed version of setuptools affects the version of cryptography to install
|
|
|
|
run_command(args, generate_pip_install(pip, '', packages=['setuptools']))
|
2020-03-29 06:33:13 +02:00
|
|
|
|
2020-06-09 22:38:36 +02:00
|
|
|
# install the latest cryptography version that the current requirements can support
|
|
|
|
# use a custom constraints file to avoid the normal constraints file overriding the chosen version of cryptography
|
|
|
|
# if not installed here later install commands may try to install an unsupported version due to the presence of older setuptools
|
|
|
|
# this is done instead of upgrading setuptools to allow tests to function with older distribution provided versions of setuptools
|
|
|
|
run_command(args, generate_pip_install(pip, '',
|
|
|
|
packages=[get_cryptography_requirement(args, python_version)],
|
|
|
|
constraints=os.path.join(ANSIBLE_TEST_DATA_ROOT, 'cryptography-constraints.txt')))
|
2020-03-29 06:33:13 +02:00
|
|
|
|
2020-06-09 22:38:36 +02:00
|
|
|
commands = [generate_pip_install(pip, args.command, packages=packages, context=context)]
|
2017-05-05 10:23:00 +02:00
|
|
|
|
2017-07-15 04:11:25 +02:00
|
|
|
if isinstance(args, IntegrationConfig):
|
2017-10-20 17:48:01 +02:00
|
|
|
for cloud_platform in get_cloud_platforms(args):
|
2017-10-26 09:21:46 +02:00
|
|
|
commands.append(generate_pip_install(pip, '%s.cloud.%s' % (args.command, cloud_platform)))
|
|
|
|
|
|
|
|
commands = [cmd for cmd in commands if cmd]
|
2017-05-05 10:23:00 +02:00
|
|
|
|
2020-06-10 18:39:25 +02:00
|
|
|
if not commands:
|
|
|
|
return # no need to detect changes or run pip check since we are not making any changes
|
|
|
|
|
2017-10-20 17:48:01 +02:00
|
|
|
# only look for changes when more than one requirements file is needed
|
|
|
|
detect_pip_changes = len(commands) > 1
|
2017-03-08 09:47:21 +01:00
|
|
|
|
2017-10-20 17:48:01 +02:00
|
|
|
# first pass to install requirements, changes expected unless environment is already set up
|
2020-06-10 18:39:25 +02:00
|
|
|
install_ansible_test_requirements(args, pip)
|
2017-10-26 09:21:46 +02:00
|
|
|
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
|
2017-10-20 17:48:01 +02:00
|
|
|
|
2019-04-06 07:34:38 +02:00
|
|
|
if changes:
|
|
|
|
# second pass to check for conflicts in requirements, changes are not expected here
|
|
|
|
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
|
2017-10-20 17:48:01 +02:00
|
|
|
|
2019-04-06 07:34:38 +02:00
|
|
|
if changes:
|
|
|
|
raise ApplicationError('Conflicts detected in requirements. The following commands reported changes during verification:\n%s' %
|
2019-05-24 22:10:33 +02:00
|
|
|
'\n'.join((' '.join(cmd_quote(c) for c in cmd) for cmd in changes)))
|
2017-10-20 17:48:01 +02:00
|
|
|
|
2020-01-28 02:19:42 +01:00
|
|
|
if args.pip_check:
|
|
|
|
# ask pip to check for conflicts between installed packages
|
|
|
|
try:
|
|
|
|
run_command(args, pip + ['check', '--disable-pip-version-check'], capture=True)
|
|
|
|
except SubprocessError as ex:
|
|
|
|
if ex.stderr.strip() == 'ERROR: unknown command "check"':
|
|
|
|
display.warning('Cannot check pip requirements for conflicts because "pip check" is not supported.')
|
|
|
|
else:
|
|
|
|
raise
|
2017-10-20 17:48:01 +02:00
|
|
|
|
2020-06-10 18:39:25 +02:00
|
|
|
if enable_pyyaml_check:
|
|
|
|
# pyyaml may have been one of the requirements that was installed, so perform an optional check for it
|
|
|
|
check_pyyaml(args, python_version, required=False)
|
|
|
|
|
|
|
|
|
|
|
|
def install_ansible_test_requirements(args, pip): # type: (EnvironmentConfig, t.List[str]) -> None
|
|
|
|
"""Install requirements for ansible-test for the given pip if not already installed."""
|
|
|
|
try:
|
|
|
|
installed = install_command_requirements.installed
|
|
|
|
except AttributeError:
|
|
|
|
installed = install_command_requirements.installed = set()
|
|
|
|
|
|
|
|
if tuple(pip) in installed:
|
|
|
|
return
|
|
|
|
|
|
|
|
# make sure basic ansible-test requirements are met, including making sure that pip is recent enough to support constraints
|
|
|
|
# virtualenvs created by older distributions may include very old pip versions, such as those created in the centos6 test container (pip 6.0.8)
|
|
|
|
run_command(args, generate_pip_install(pip, 'ansible-test', use_constraints=False))
|
|
|
|
|
|
|
|
installed.add(tuple(pip))
|
|
|
|
|
2017-10-20 17:48:01 +02:00
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
def run_pip_commands(args, pip, commands, detect_pip_changes=False):
|
2017-10-20 17:48:01 +02:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
2018-03-14 19:35:59 +01:00
|
|
|
:type pip: list[str]
|
2017-10-20 17:48:01 +02:00
|
|
|
:type commands: list[list[str]]
|
|
|
|
:type detect_pip_changes: bool
|
|
|
|
:rtype: list[list[str]]
|
|
|
|
"""
|
|
|
|
changes = []
|
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
after_list = pip_list(args, pip) if detect_pip_changes else None
|
2017-03-02 21:36:46 +01:00
|
|
|
|
2017-10-20 17:48:01 +02:00
|
|
|
for cmd in commands:
|
|
|
|
if not cmd:
|
|
|
|
continue
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-10-20 17:48:01 +02:00
|
|
|
before_list = after_list
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-04-06 07:34:38 +02:00
|
|
|
run_command(args, cmd)
|
2017-10-20 17:48:01 +02:00
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
after_list = pip_list(args, pip) if detect_pip_changes else None
|
2017-10-20 17:48:01 +02:00
|
|
|
|
|
|
|
if before_list != after_list:
|
|
|
|
changes.append(cmd)
|
|
|
|
|
|
|
|
return changes
|
|
|
|
|
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
def pip_list(args, pip):
|
2017-10-20 17:48:01 +02:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
2018-03-14 19:35:59 +01:00
|
|
|
:type pip: list[str]
|
2017-10-20 17:48:01 +02:00
|
|
|
:rtype: str
|
|
|
|
"""
|
2019-07-15 22:47:16 +02:00
|
|
|
stdout = run_command(args, pip + ['list'], capture=True)[0]
|
2017-10-20 17:48:01 +02:00
|
|
|
return stdout
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
def generate_egg_info(args):
|
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
"""
|
2019-08-09 01:14:19 +02:00
|
|
|
if args.explain:
|
2019-07-16 01:20:03 +02:00
|
|
|
return
|
|
|
|
|
2020-01-23 20:47:08 +01:00
|
|
|
ansible_version = get_ansible_version()
|
|
|
|
|
|
|
|
# inclusion of the version number in the path is optional
|
|
|
|
# see: https://setuptools.readthedocs.io/en/latest/formats.html#filename-embedded-metadata
|
2020-03-30 20:00:45 +02:00
|
|
|
egg_info_path = ANSIBLE_LIB_ROOT + '_base-%s.egg-info' % ansible_version
|
2020-01-23 20:47:08 +01:00
|
|
|
|
|
|
|
if os.path.exists(egg_info_path):
|
|
|
|
return
|
|
|
|
|
2020-03-30 20:00:45 +02:00
|
|
|
egg_info_path = ANSIBLE_LIB_ROOT + '_base.egg-info'
|
2019-08-09 01:14:19 +02:00
|
|
|
|
|
|
|
if os.path.exists(egg_info_path):
|
2016-11-30 06:21:53 +01:00
|
|
|
return
|
|
|
|
|
2019-08-09 01:14:19 +02:00
|
|
|
# minimal PKG-INFO stub following the format defined in PEP 241
|
|
|
|
# required for older setuptools versions to avoid a traceback when importing pkg_resources from packages like cryptography
|
|
|
|
# newer setuptools versions are happy with an empty directory
|
|
|
|
# including a stub here means we don't need to locate the existing file or have setup.py generate it when running from source
|
|
|
|
pkg_info = '''
|
|
|
|
Metadata-Version: 1.0
|
|
|
|
Name: ansible
|
|
|
|
Version: %s
|
|
|
|
Platform: UNKNOWN
|
|
|
|
Summary: Radically simple IT automation
|
|
|
|
Author-email: info@ansible.com
|
|
|
|
License: GPLv3+
|
|
|
|
''' % get_ansible_version()
|
|
|
|
|
|
|
|
pkg_info_path = os.path.join(egg_info_path, 'PKG-INFO')
|
|
|
|
|
2019-08-28 08:40:06 +02:00
|
|
|
write_text_file(pkg_info_path, pkg_info.lstrip(), create_directories=True)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
2020-06-09 22:38:36 +02:00
|
|
|
def generate_pip_install(pip, command, packages=None, constraints=None, use_constraints=True, context=None):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2018-03-14 19:35:59 +01:00
|
|
|
:type pip: list[str]
|
2016-11-30 06:21:53 +01:00
|
|
|
:type command: str
|
2017-03-08 09:47:21 +01:00
|
|
|
:type packages: list[str] | None
|
2020-03-29 06:33:13 +02:00
|
|
|
:type constraints: str | None
|
|
|
|
:type use_constraints: bool
|
2020-06-09 22:38:36 +02:00
|
|
|
:type context: str | None
|
2017-02-14 03:49:36 +01:00
|
|
|
:rtype: list[str] | None
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2020-03-29 06:33:13 +02:00
|
|
|
constraints = constraints or os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'constraints.txt')
|
2020-06-09 22:38:36 +02:00
|
|
|
requirements = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', '%s.txt' % ('%s.%s' % (command, context) if context else command))
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-03-08 09:47:21 +01:00
|
|
|
options = []
|
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
if os.path.exists(requirements) and os.path.getsize(requirements):
|
|
|
|
options += ['-r', requirements]
|
2017-03-08 09:47:21 +01:00
|
|
|
|
2020-06-09 22:38:36 +02:00
|
|
|
if command == 'sanity' and data_context().content.is_ansible:
|
|
|
|
requirements = os.path.join(data_context().content.sanity_path, 'code-smell', '%s.requirements.txt' % context)
|
|
|
|
|
|
|
|
if os.path.exists(requirements) and os.path.getsize(requirements):
|
|
|
|
options += ['-r', requirements]
|
2019-08-07 16:13:28 +02:00
|
|
|
|
2019-09-12 11:00:33 +02:00
|
|
|
if command == 'units':
|
|
|
|
requirements = os.path.join(data_context().content.unit_path, 'requirements.txt')
|
|
|
|
|
|
|
|
if os.path.exists(requirements) and os.path.getsize(requirements):
|
|
|
|
options += ['-r', requirements]
|
|
|
|
|
|
|
|
if command in ('integration', 'windows-integration', 'network-integration'):
|
|
|
|
requirements = os.path.join(data_context().content.integration_path, 'requirements.txt')
|
|
|
|
|
|
|
|
if os.path.exists(requirements) and os.path.getsize(requirements):
|
|
|
|
options += ['-r', requirements]
|
|
|
|
|
|
|
|
requirements = os.path.join(data_context().content.integration_path, '%s.requirements.txt' % command)
|
|
|
|
|
|
|
|
if os.path.exists(requirements) and os.path.getsize(requirements):
|
|
|
|
options += ['-r', requirements]
|
|
|
|
|
2017-03-08 09:47:21 +01:00
|
|
|
if packages:
|
|
|
|
options += packages
|
|
|
|
|
|
|
|
if not options:
|
2016-11-30 06:21:53 +01:00
|
|
|
return None
|
|
|
|
|
2020-03-29 06:33:13 +02:00
|
|
|
if use_constraints:
|
|
|
|
options.extend(['-c', constraints])
|
|
|
|
|
|
|
|
return pip + ['install', '--disable-pip-version-check'] + options
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
def command_shell(args):
|
|
|
|
"""
|
|
|
|
:type args: ShellConfig
|
|
|
|
"""
|
|
|
|
if args.delegate:
|
|
|
|
raise Delegate()
|
|
|
|
|
|
|
|
install_command_requirements(args)
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
if args.inject_httptester:
|
|
|
|
inject_httptester(args)
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
cmd = create_shell_command(['bash', '-i'])
|
|
|
|
run_command(args, cmd)
|
|
|
|
|
|
|
|
|
|
|
|
def command_posix_integration(args):
|
|
|
|
"""
|
|
|
|
:type args: PosixIntegrationConfig
|
|
|
|
"""
|
2019-08-29 12:39:03 +02:00
|
|
|
handle_layout_messages(data_context().content.integration_messages)
|
|
|
|
|
2019-08-21 21:12:37 +02:00
|
|
|
inventory_relative_path = get_inventory_relative_path(args)
|
|
|
|
inventory_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, os.path.basename(inventory_relative_path))
|
2019-01-16 01:59:55 +01:00
|
|
|
|
2017-08-23 20:09:50 +02:00
|
|
|
all_targets = tuple(walk_posix_integration_targets(include_hidden=True))
|
|
|
|
internal_targets = command_integration_filter(args, all_targets)
|
2019-08-21 21:12:37 +02:00
|
|
|
command_integration_filtered(args, internal_targets, all_targets, inventory_path)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
def command_network_integration(args):
|
|
|
|
"""
|
|
|
|
:type args: NetworkIntegrationConfig
|
|
|
|
"""
|
2019-08-29 12:39:03 +02:00
|
|
|
handle_layout_messages(data_context().content.integration_messages)
|
|
|
|
|
2019-08-21 21:12:37 +02:00
|
|
|
inventory_relative_path = get_inventory_relative_path(args)
|
|
|
|
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
|
2017-05-18 19:37:53 +02:00
|
|
|
|
2017-07-15 01:52:11 +02:00
|
|
|
if args.inventory:
|
2019-08-28 08:40:06 +02:00
|
|
|
inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory)
|
2017-07-15 01:52:11 +02:00
|
|
|
else:
|
2019-08-21 21:12:37 +02:00
|
|
|
inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
|
2017-07-15 01:52:11 +02:00
|
|
|
|
2019-08-22 19:08:27 +02:00
|
|
|
if args.no_temp_workdir:
|
|
|
|
# temporary solution to keep DCI tests working
|
|
|
|
inventory_exists = os.path.exists(inventory_path)
|
|
|
|
else:
|
|
|
|
inventory_exists = os.path.isfile(inventory_path)
|
|
|
|
|
|
|
|
if not args.explain and not args.platform and not inventory_exists:
|
2017-07-15 01:52:11 +02:00
|
|
|
raise ApplicationError(
|
|
|
|
'Inventory not found: %s\n'
|
|
|
|
'Use --inventory to specify the inventory path.\n'
|
|
|
|
'Use --platform to provision resources and generate an inventory file.\n'
|
2019-08-21 21:12:37 +02:00
|
|
|
'See also inventory template: %s' % (inventory_path, template_path)
|
2017-07-15 01:52:11 +02:00
|
|
|
)
|
2017-05-18 19:37:53 +02:00
|
|
|
|
2019-08-21 21:12:37 +02:00
|
|
|
check_inventory(args, inventory_path)
|
|
|
|
delegate_inventory(args, inventory_path)
|
|
|
|
|
2017-08-23 20:09:50 +02:00
|
|
|
all_targets = tuple(walk_network_integration_targets(include_hidden=True))
|
2017-10-26 09:21:46 +02:00
|
|
|
internal_targets = command_integration_filter(args, all_targets, init_callback=network_init)
|
2019-08-05 23:56:38 +02:00
|
|
|
instances = [] # type: t.List[WrappedThread]
|
2017-01-08 08:36:35 +01:00
|
|
|
|
|
|
|
if args.platform:
|
2019-03-13 15:14:12 +01:00
|
|
|
get_python_path(args, args.python_executable) # initialize before starting threads
|
2017-11-15 21:00:10 +01:00
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
|
2017-01-08 08:36:35 +01:00
|
|
|
|
|
|
|
for platform_version in args.platform:
|
|
|
|
platform, version = platform_version.split('/', 1)
|
2017-10-26 09:21:46 +02:00
|
|
|
config = configs.get(platform_version)
|
2017-01-18 01:24:05 +01:00
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
if not config:
|
2017-01-18 01:24:05 +01:00
|
|
|
continue
|
|
|
|
|
2019-08-05 23:56:38 +02:00
|
|
|
instance = WrappedThread(functools.partial(network_run, args, platform, version, config))
|
2017-01-08 08:36:35 +01:00
|
|
|
instance.daemon = True
|
|
|
|
instance.start()
|
|
|
|
instances.append(instance)
|
|
|
|
|
|
|
|
while any(instance.is_alive() for instance in instances):
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
remotes = [instance.wait_for_result() for instance in instances]
|
|
|
|
inventory = network_inventory(remotes)
|
|
|
|
|
2019-08-21 21:12:37 +02:00
|
|
|
display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
|
2017-01-18 01:24:05 +01:00
|
|
|
|
2017-01-08 08:36:35 +01:00
|
|
|
if not args.explain:
|
2019-08-28 08:40:06 +02:00
|
|
|
write_text_file(inventory_path, inventory)
|
2017-01-08 08:36:35 +01:00
|
|
|
|
2017-11-15 02:08:48 +01:00
|
|
|
success = False
|
|
|
|
|
|
|
|
try:
|
2019-08-21 21:12:37 +02:00
|
|
|
command_integration_filtered(args, internal_targets, all_targets, inventory_path)
|
2017-11-15 02:08:48 +01:00
|
|
|
success = True
|
|
|
|
finally:
|
|
|
|
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
|
|
|
|
for instance in instances:
|
|
|
|
instance.result.stop()
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
2019-08-10 00:18:19 +02:00
|
|
|
def network_init(args, internal_targets): # type: (NetworkIntegrationConfig, t.Tuple[IntegrationTarget, ...]) -> None
|
|
|
|
"""Initialize platforms for network integration tests."""
|
2017-10-26 09:21:46 +02:00
|
|
|
if not args.platform:
|
|
|
|
return
|
|
|
|
|
|
|
|
if args.metadata.instance_config is not None:
|
|
|
|
return
|
|
|
|
|
2019-07-11 22:03:49 +02:00
|
|
|
platform_targets = set(a for target in internal_targets for a in target.aliases if a.startswith('network/'))
|
2017-10-26 09:21:46 +02:00
|
|
|
|
2019-08-05 23:56:38 +02:00
|
|
|
instances = [] # type: t.List[WrappedThread]
|
2017-10-26 09:21:46 +02:00
|
|
|
|
|
|
|
# generate an ssh key (if needed) up front once, instead of for each instance
|
|
|
|
SshKey(args)
|
|
|
|
|
|
|
|
for platform_version in args.platform:
|
|
|
|
platform, version = platform_version.split('/', 1)
|
|
|
|
platform_target = 'network/%s/' % platform
|
|
|
|
|
2018-01-10 18:32:53 +01:00
|
|
|
if platform_target not in platform_targets:
|
2017-10-26 09:21:46 +02:00
|
|
|
display.warning('Skipping "%s" because selected tests do not target the "%s" platform.' % (
|
|
|
|
platform_version, platform))
|
|
|
|
continue
|
|
|
|
|
2019-08-05 23:56:38 +02:00
|
|
|
instance = WrappedThread(functools.partial(network_start, args, platform, version))
|
2017-10-26 09:21:46 +02:00
|
|
|
instance.daemon = True
|
|
|
|
instance.start()
|
|
|
|
instances.append(instance)
|
|
|
|
|
|
|
|
while any(instance.is_alive() for instance in instances):
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
|
|
|
|
|
|
|
|
|
|
|
|
def network_start(args, platform, version):
|
2017-01-08 08:36:35 +01:00
|
|
|
"""
|
|
|
|
:type args: NetworkIntegrationConfig
|
|
|
|
:type platform: str
|
|
|
|
:type version: str
|
|
|
|
:rtype: AnsibleCoreCI
|
|
|
|
"""
|
2017-11-29 09:46:08 +01:00
|
|
|
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider)
|
2017-01-08 08:36:35 +01:00
|
|
|
core_ci.start()
|
2017-10-26 09:21:46 +02:00
|
|
|
|
|
|
|
return core_ci.save()
|
|
|
|
|
|
|
|
|
|
|
|
def network_run(args, platform, version, config):
|
|
|
|
"""
|
|
|
|
:type args: NetworkIntegrationConfig
|
|
|
|
:type platform: str
|
|
|
|
:type version: str
|
|
|
|
:type config: dict[str, str]
|
|
|
|
:rtype: AnsibleCoreCI
|
|
|
|
"""
|
2017-11-29 09:46:08 +01:00
|
|
|
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider, load=False)
|
2017-10-26 09:21:46 +02:00
|
|
|
core_ci.load(config)
|
2017-01-08 08:36:35 +01:00
|
|
|
core_ci.wait()
|
|
|
|
|
|
|
|
manage = ManageNetworkCI(core_ci)
|
|
|
|
manage.wait()
|
|
|
|
|
|
|
|
return core_ci
|
|
|
|
|
|
|
|
|
|
|
|
def network_inventory(remotes):
|
|
|
|
"""
|
|
|
|
:type remotes: list[AnsibleCoreCI]
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
groups = dict([(remote.platform, []) for remote in remotes])
|
2018-01-09 23:52:36 +01:00
|
|
|
net = []
|
2017-01-08 08:36:35 +01:00
|
|
|
|
|
|
|
for remote in remotes:
|
2017-01-16 21:31:17 +01:00
|
|
|
options = dict(
|
|
|
|
ansible_host=remote.connection.hostname,
|
|
|
|
ansible_user=remote.connection.username,
|
2017-11-22 19:28:09 +01:00
|
|
|
ansible_ssh_private_key_file=os.path.abspath(remote.ssh_key.key),
|
2017-01-16 21:31:17 +01:00
|
|
|
)
|
|
|
|
|
2020-02-21 00:27:08 +01:00
|
|
|
settings = get_network_settings(remote.args, remote.platform, remote.version)
|
|
|
|
|
|
|
|
options.update(settings.inventory_vars)
|
|
|
|
|
2017-01-08 08:36:35 +01:00
|
|
|
groups[remote.platform].append(
|
2017-01-16 21:31:17 +01:00
|
|
|
'%s %s' % (
|
2017-07-04 00:00:16 +02:00
|
|
|
remote.name.replace('.', '-'),
|
2017-01-16 21:31:17 +01:00
|
|
|
' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
|
2017-01-08 08:36:35 +01:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2018-01-09 23:52:36 +01:00
|
|
|
net.append(remote.platform)
|
|
|
|
|
|
|
|
groups['net:children'] = net
|
|
|
|
|
2017-01-08 08:36:35 +01:00
|
|
|
template = ''
|
|
|
|
|
|
|
|
for group in groups:
|
|
|
|
hosts = '\n'.join(groups[group])
|
|
|
|
|
2017-01-18 01:24:05 +01:00
|
|
|
template += textwrap.dedent("""
|
2017-01-08 08:36:35 +01:00
|
|
|
[%s]
|
|
|
|
%s
|
2017-01-18 01:24:05 +01:00
|
|
|
""") % (group, hosts)
|
2017-01-08 08:36:35 +01:00
|
|
|
|
2017-01-18 01:24:05 +01:00
|
|
|
inventory = template
|
2017-01-08 08:36:35 +01:00
|
|
|
|
|
|
|
return inventory
|
|
|
|
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
def command_windows_integration(args):
|
|
|
|
"""
|
|
|
|
:type args: WindowsIntegrationConfig
|
|
|
|
"""
|
2019-08-29 12:39:03 +02:00
|
|
|
handle_layout_messages(data_context().content.integration_messages)
|
|
|
|
|
2019-08-21 21:12:37 +02:00
|
|
|
inventory_relative_path = get_inventory_relative_path(args)
|
|
|
|
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
|
|
|
|
|
|
|
|
if args.inventory:
|
2019-08-28 08:40:06 +02:00
|
|
|
inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory)
|
2019-08-21 21:12:37 +02:00
|
|
|
else:
|
|
|
|
inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
|
|
|
|
|
|
|
|
if not args.explain and not args.windows and not os.path.isfile(inventory_path):
|
|
|
|
raise ApplicationError(
|
|
|
|
'Inventory not found: %s\n'
|
|
|
|
'Use --inventory to specify the inventory path.\n'
|
|
|
|
'Use --windows to provision resources and generate an inventory file.\n'
|
|
|
|
'See also inventory template: %s' % (inventory_path, template_path)
|
|
|
|
)
|
2017-05-18 19:37:53 +02:00
|
|
|
|
2019-08-21 21:12:37 +02:00
|
|
|
check_inventory(args, inventory_path)
|
|
|
|
delegate_inventory(args, inventory_path)
|
2017-05-18 19:37:53 +02:00
|
|
|
|
2017-08-23 20:09:50 +02:00
|
|
|
all_targets = tuple(walk_windows_integration_targets(include_hidden=True))
|
2017-10-26 09:21:46 +02:00
|
|
|
internal_targets = command_integration_filter(args, all_targets, init_callback=windows_init)
|
2019-08-05 23:56:38 +02:00
|
|
|
instances = [] # type: t.List[WrappedThread]
|
2018-10-16 10:09:17 +02:00
|
|
|
pre_target = None
|
|
|
|
post_target = None
|
2018-10-13 00:20:00 +02:00
|
|
|
httptester_id = None
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
if args.windows:
|
2019-03-13 15:14:12 +01:00
|
|
|
get_python_path(args, args.python_executable) # initialize before starting threads
|
2017-11-15 21:00:10 +01:00
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
for version in args.windows:
|
2017-10-26 09:21:46 +02:00
|
|
|
config = configs['windows/%s' % version]
|
|
|
|
|
2019-08-05 23:56:38 +02:00
|
|
|
instance = WrappedThread(functools.partial(windows_run, args, version, config))
|
2016-11-30 06:21:53 +01:00
|
|
|
instance.daemon = True
|
|
|
|
instance.start()
|
|
|
|
instances.append(instance)
|
|
|
|
|
|
|
|
while any(instance.is_alive() for instance in instances):
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
remotes = [instance.wait_for_result() for instance in instances]
|
|
|
|
inventory = windows_inventory(remotes)
|
|
|
|
|
2019-08-21 21:12:37 +02:00
|
|
|
display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
|
2017-01-18 01:24:05 +01:00
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
if not args.explain:
|
2019-08-28 08:40:06 +02:00
|
|
|
write_text_file(inventory_path, inventory)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-07-11 22:03:49 +02:00
|
|
|
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in internal_targets)
|
2018-10-13 00:20:00 +02:00
|
|
|
# if running under Docker delegation, the httptester may have already been started
|
|
|
|
docker_httptester = bool(os.environ.get("HTTPTESTER", False))
|
|
|
|
|
|
|
|
if use_httptester and not docker_available() and not docker_httptester:
|
|
|
|
display.warning('Assuming --disable-httptester since `docker` is not available.')
|
2018-10-16 10:09:17 +02:00
|
|
|
elif use_httptester:
|
2018-10-13 00:20:00 +02:00
|
|
|
if docker_httptester:
|
|
|
|
# we are running in a Docker container that is linked to the httptester container, we just need to
|
|
|
|
# forward these requests to the linked hostname
|
|
|
|
first_host = HTTPTESTER_HOSTS[0]
|
|
|
|
ssh_options = ["-R", "8080:%s:80" % first_host, "-R", "8443:%s:443" % first_host]
|
|
|
|
else:
|
|
|
|
# we are running directly and need to start the httptester container ourselves and forward the port
|
|
|
|
# from there manually set so HTTPTESTER env var is set during the run
|
|
|
|
args.inject_httptester = True
|
|
|
|
httptester_id, ssh_options = start_httptester(args)
|
|
|
|
|
|
|
|
# to get this SSH command to run in the background we need to set to run in background (-f) and disable
|
|
|
|
# the pty allocation (-T)
|
|
|
|
ssh_options.insert(0, "-fT")
|
|
|
|
|
|
|
|
# create a script that will continue to run in the background until the script is deleted, this will
|
|
|
|
# cleanup and close the connection
|
2018-10-16 10:09:17 +02:00
|
|
|
def forward_ssh_ports(target):
|
|
|
|
"""
|
|
|
|
:type target: IntegrationTarget
|
|
|
|
"""
|
|
|
|
if 'needs/httptester/' not in target.aliases:
|
|
|
|
return
|
|
|
|
|
|
|
|
for remote in [r for r in remotes if r.version != '2008']:
|
|
|
|
manage = ManageWindowsCI(remote)
|
2019-08-06 02:40:00 +02:00
|
|
|
manage.upload(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'windows-httptester.ps1'), watcher_path)
|
2018-10-16 10:09:17 +02:00
|
|
|
|
2019-01-29 22:50:23 +01:00
|
|
|
# We cannot pass an array of string with -File so we just use a delimiter for multiple values
|
|
|
|
script = "powershell.exe -NoProfile -ExecutionPolicy Bypass -File .\\%s -Hosts \"%s\"" \
|
|
|
|
% (watcher_path, "|".join(HTTPTESTER_HOSTS))
|
2018-10-16 10:09:17 +02:00
|
|
|
if args.verbosity > 3:
|
|
|
|
script += " -Verbose"
|
|
|
|
manage.ssh(script, options=ssh_options, force_pty=False)
|
|
|
|
|
|
|
|
def cleanup_ssh_ports(target):
|
|
|
|
"""
|
|
|
|
:type target: IntegrationTarget
|
|
|
|
"""
|
|
|
|
if 'needs/httptester/' not in target.aliases:
|
|
|
|
return
|
|
|
|
|
|
|
|
for remote in [r for r in remotes if r.version != '2008']:
|
|
|
|
# delete the tmp file that keeps the http-tester alive
|
|
|
|
manage = ManageWindowsCI(remote)
|
2019-01-29 22:50:23 +01:00
|
|
|
manage.ssh("cmd.exe /c \"del %s /F /Q\"" % watcher_path, force_pty=False)
|
2018-10-13 00:20:00 +02:00
|
|
|
|
2018-10-16 10:09:17 +02:00
|
|
|
watcher_path = "ansible-test-http-watcher-%s.ps1" % time.time()
|
|
|
|
pre_target = forward_ssh_ports
|
|
|
|
post_target = cleanup_ssh_ports
|
2018-10-13 00:20:00 +02:00
|
|
|
|
2019-08-28 08:40:06 +02:00
|
|
|
def run_playbook(playbook, run_playbook_vars): # type: (str, t.Dict[str, t.Any]) -> None
|
2019-08-27 23:03:23 +02:00
|
|
|
playbook_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'playbooks', playbook)
|
2019-08-28 08:40:06 +02:00
|
|
|
command = ['ansible-playbook', '-i', inventory_path, playbook_path, '-e', json.dumps(run_playbook_vars)]
|
2019-08-27 23:03:23 +02:00
|
|
|
if args.verbosity:
|
|
|
|
command.append('-%s' % ('v' * args.verbosity))
|
|
|
|
|
|
|
|
env = ansible_environment(args)
|
|
|
|
intercept_command(args, command, '', env, disable_coverage=True)
|
|
|
|
|
|
|
|
remote_temp_path = None
|
|
|
|
|
|
|
|
if args.coverage and not args.coverage_check:
|
|
|
|
# Create the remote directory that is writable by everyone. Use Ansible to talk to the remote host.
|
|
|
|
remote_temp_path = 'C:\\ansible_test_coverage_%s' % time.time()
|
|
|
|
playbook_vars = {'remote_temp_path': remote_temp_path}
|
|
|
|
run_playbook('windows_coverage_setup.yml', playbook_vars)
|
|
|
|
|
2017-11-15 02:08:48 +01:00
|
|
|
success = False
|
|
|
|
|
|
|
|
try:
|
2019-08-21 21:12:37 +02:00
|
|
|
command_integration_filtered(args, internal_targets, all_targets, inventory_path, pre_target=pre_target,
|
2019-08-27 23:03:23 +02:00
|
|
|
post_target=post_target, remote_temp_path=remote_temp_path)
|
2017-11-15 02:08:48 +01:00
|
|
|
success = True
|
|
|
|
finally:
|
2018-10-16 10:09:17 +02:00
|
|
|
if httptester_id:
|
|
|
|
docker_rm(args, httptester_id)
|
2018-10-13 00:20:00 +02:00
|
|
|
|
2019-08-27 23:03:23 +02:00
|
|
|
if remote_temp_path:
|
|
|
|
# Zip up the coverage files that were generated and fetch it back to localhost.
|
|
|
|
with tempdir() as local_temp_path:
|
|
|
|
playbook_vars = {'remote_temp_path': remote_temp_path, 'local_temp_path': local_temp_path}
|
|
|
|
run_playbook('windows_coverage_teardown.yml', playbook_vars)
|
|
|
|
|
|
|
|
for filename in os.listdir(local_temp_path):
|
|
|
|
with open_zipfile(os.path.join(local_temp_path, filename)) as coverage_zip:
|
2019-08-28 08:40:06 +02:00
|
|
|
coverage_zip.extractall(ResultType.COVERAGE.path)
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2017-11-15 02:08:48 +01:00
|
|
|
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
|
|
|
|
for instance in instances:
|
|
|
|
instance.result.stop()
|
2017-10-26 09:21:46 +02:00
|
|
|
|
|
|
|
|
2018-09-21 20:38:22 +02:00
|
|
|
# noinspection PyUnusedLocal
|
2017-10-26 09:21:46 +02:00
|
|
|
def windows_init(args, internal_targets): # pylint: disable=locally-disabled, unused-argument
|
|
|
|
"""
|
|
|
|
:type args: WindowsIntegrationConfig
|
|
|
|
:type internal_targets: tuple[IntegrationTarget]
|
|
|
|
"""
|
|
|
|
if not args.windows:
|
|
|
|
return
|
|
|
|
|
|
|
|
if args.metadata.instance_config is not None:
|
|
|
|
return
|
|
|
|
|
2019-08-05 23:56:38 +02:00
|
|
|
instances = [] # type: t.List[WrappedThread]
|
2017-10-26 09:21:46 +02:00
|
|
|
|
|
|
|
for version in args.windows:
|
2019-08-05 23:56:38 +02:00
|
|
|
instance = WrappedThread(functools.partial(windows_start, args, version))
|
2017-10-26 09:21:46 +02:00
|
|
|
instance.daemon = True
|
|
|
|
instance.start()
|
|
|
|
instances.append(instance)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
while any(instance.is_alive() for instance in instances):
|
|
|
|
time.sleep(1)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
|
|
|
|
|
|
|
|
|
|
|
|
def windows_start(args, version):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: WindowsIntegrationConfig
|
|
|
|
:type version: str
|
|
|
|
:rtype: AnsibleCoreCI
|
|
|
|
"""
|
2017-11-29 09:46:08 +01:00
|
|
|
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider)
|
2016-11-30 06:21:53 +01:00
|
|
|
core_ci.start()
|
2017-10-26 09:21:46 +02:00
|
|
|
|
|
|
|
return core_ci.save()
|
|
|
|
|
|
|
|
|
|
|
|
def windows_run(args, version, config):
|
|
|
|
"""
|
|
|
|
:type args: WindowsIntegrationConfig
|
|
|
|
:type version: str
|
|
|
|
:type config: dict[str, str]
|
|
|
|
:rtype: AnsibleCoreCI
|
|
|
|
"""
|
2017-11-29 09:46:08 +01:00
|
|
|
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider, load=False)
|
2017-10-26 09:21:46 +02:00
|
|
|
core_ci.load(config)
|
2016-11-30 06:21:53 +01:00
|
|
|
core_ci.wait()
|
|
|
|
|
|
|
|
manage = ManageWindowsCI(core_ci)
|
|
|
|
manage.wait()
|
|
|
|
|
|
|
|
return core_ci
|
|
|
|
|
|
|
|
|
|
|
|
def windows_inventory(remotes):
|
|
|
|
"""
|
|
|
|
:type remotes: list[AnsibleCoreCI]
|
|
|
|
:rtype: str
|
|
|
|
"""
|
2017-01-16 21:31:17 +01:00
|
|
|
hosts = []
|
|
|
|
|
|
|
|
for remote in remotes:
|
|
|
|
options = dict(
|
|
|
|
ansible_host=remote.connection.hostname,
|
|
|
|
ansible_user=remote.connection.username,
|
|
|
|
ansible_password=remote.connection.password,
|
|
|
|
ansible_port=remote.connection.port,
|
|
|
|
)
|
|
|
|
|
2019-03-08 01:38:02 +01:00
|
|
|
# used for the connection_windows_ssh test target
|
|
|
|
if remote.ssh_key:
|
|
|
|
options["ansible_ssh_private_key_file"] = os.path.abspath(remote.ssh_key.key)
|
|
|
|
|
2019-08-20 16:17:32 +02:00
|
|
|
if remote.name == 'windows-2008':
|
|
|
|
options.update(
|
|
|
|
# force 2008 to use PSRP for the connection plugin
|
|
|
|
ansible_connection='psrp',
|
|
|
|
ansible_psrp_auth='basic',
|
|
|
|
ansible_psrp_cert_validation='ignore',
|
|
|
|
)
|
|
|
|
elif remote.name == 'windows-2016':
|
|
|
|
options.update(
|
|
|
|
# force 2016 to use NTLM + HTTP message encryption
|
|
|
|
ansible_connection='winrm',
|
|
|
|
ansible_winrm_server_cert_validation='ignore',
|
|
|
|
ansible_winrm_transport='ntlm',
|
|
|
|
ansible_winrm_scheme='http',
|
|
|
|
ansible_port='5985',
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
options.update(
|
|
|
|
ansible_connection='winrm',
|
|
|
|
ansible_winrm_server_cert_validation='ignore',
|
|
|
|
)
|
|
|
|
|
2017-01-16 21:31:17 +01:00
|
|
|
hosts.append(
|
|
|
|
'%s %s' % (
|
|
|
|
remote.name.replace('/', '_'),
|
|
|
|
' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
|
|
|
|
)
|
|
|
|
)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
template = """
|
|
|
|
[windows]
|
|
|
|
%s
|
|
|
|
|
|
|
|
# support winrm binary module tests (temporary solution)
|
2019-08-13 05:14:12 +02:00
|
|
|
[testhost:children]
|
2016-11-30 06:21:53 +01:00
|
|
|
windows
|
|
|
|
"""
|
|
|
|
|
|
|
|
template = textwrap.dedent(template)
|
|
|
|
inventory = template % ('\n'.join(hosts))
|
|
|
|
|
|
|
|
return inventory
|
|
|
|
|
|
|
|
|
2019-08-10 00:18:19 +02:00
|
|
|
def command_integration_filter(args, # type: TIntegrationConfig
|
|
|
|
targets, # type: t.Iterable[TIntegrationTarget]
|
|
|
|
init_callback=None, # type: t.Callable[[TIntegrationConfig, t.Tuple[TIntegrationTarget, ...]], None]
|
|
|
|
): # type: (...) -> t.Tuple[TIntegrationTarget, ...]
|
|
|
|
"""Filter the given integration test targets."""
|
2017-08-23 20:09:50 +02:00
|
|
|
targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
|
2016-11-30 06:21:53 +01:00
|
|
|
changes = get_changes_filter(args)
|
2018-10-02 21:26:14 +02:00
|
|
|
|
|
|
|
# special behavior when the --changed-all-target target is selected based on changes
|
|
|
|
if args.changed_all_target in changes:
|
|
|
|
# act as though the --changed-all-target target was in the include list
|
|
|
|
if args.changed_all_mode == 'include' and args.changed_all_target not in args.include:
|
|
|
|
args.include.append(args.changed_all_target)
|
|
|
|
args.delegate_args += ['--include', args.changed_all_target]
|
|
|
|
# act as though the --changed-all-target target was in the exclude list
|
|
|
|
elif args.changed_all_mode == 'exclude' and args.changed_all_target not in args.exclude:
|
|
|
|
args.exclude.append(args.changed_all_target)
|
|
|
|
|
|
|
|
require = args.require + changes
|
|
|
|
exclude = args.exclude
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
internal_targets = walk_internal_targets(targets, args.include, exclude, require)
|
|
|
|
environment_exclude = get_integration_filter(args, internal_targets)
|
|
|
|
|
2017-05-05 10:23:00 +02:00
|
|
|
environment_exclude += cloud_filter(args, internal_targets)
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
if environment_exclude:
|
|
|
|
exclude += environment_exclude
|
|
|
|
internal_targets = walk_internal_targets(targets, args.include, exclude, require)
|
|
|
|
|
|
|
|
if not internal_targets:
|
|
|
|
raise AllTargetsSkipped()
|
|
|
|
|
2019-07-11 22:03:49 +02:00
|
|
|
if args.start_at and not any(target.name == args.start_at for target in internal_targets):
|
2016-11-30 06:21:53 +01:00
|
|
|
raise ApplicationError('Start at target matches nothing: %s' % args.start_at)
|
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
if init_callback:
|
|
|
|
init_callback(args, internal_targets)
|
|
|
|
|
2017-05-05 10:23:00 +02:00
|
|
|
cloud_init(args, internal_targets)
|
|
|
|
|
2019-08-28 08:40:06 +02:00
|
|
|
vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
|
2019-08-15 00:53:30 +02:00
|
|
|
|
|
|
|
if os.path.exists(vars_file_src):
|
|
|
|
def integration_config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
|
|
|
|
"""
|
|
|
|
Add the integration config vars file to the payload file list.
|
|
|
|
This will preserve the file during delegation even if the file is ignored by source control.
|
|
|
|
"""
|
2020-04-21 07:04:25 +02:00
|
|
|
files.append((vars_file_src, data_context().content.integration_vars_path))
|
2019-08-15 00:53:30 +02:00
|
|
|
|
|
|
|
data_context().register_payload_callback(integration_config_callback)
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
if args.delegate:
|
2018-10-02 21:26:14 +02:00
|
|
|
raise Delegate(require=require, exclude=exclude, integration_targets=internal_targets)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
install_command_requirements(args)
|
|
|
|
|
|
|
|
return internal_targets
|
|
|
|
|
|
|
|
|
2019-08-27 23:03:23 +02:00
|
|
|
def command_integration_filtered(args, targets, all_targets, inventory_path, pre_target=None, post_target=None,
|
|
|
|
remote_temp_path=None):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type targets: tuple[IntegrationTarget]
|
2017-08-23 20:09:50 +02:00
|
|
|
:type all_targets: tuple[IntegrationTarget]
|
2019-01-16 01:59:55 +01:00
|
|
|
:type inventory_path: str
|
2018-10-16 10:09:17 +02:00
|
|
|
:type pre_target: (IntegrationTarget) -> None | None
|
|
|
|
:type post_target: (IntegrationTarget) -> None | None
|
2019-08-27 23:03:23 +02:00
|
|
|
:type remote_temp_path: str | None
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
found = False
|
2017-07-15 01:52:11 +02:00
|
|
|
passed = []
|
|
|
|
failed = []
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
targets_iter = iter(targets)
|
2017-08-23 20:09:50 +02:00
|
|
|
all_targets_dict = dict((target.name, target) for target in all_targets)
|
|
|
|
|
|
|
|
setup_errors = []
|
|
|
|
setup_targets_executed = set()
|
|
|
|
|
|
|
|
for target in all_targets:
|
|
|
|
for setup_target in target.setup_once + target.setup_always:
|
|
|
|
if setup_target not in all_targets_dict:
|
|
|
|
setup_errors.append('Target "%s" contains invalid setup target: %s' % (target.name, setup_target))
|
|
|
|
|
|
|
|
if setup_errors:
|
|
|
|
raise ApplicationError('Found %d invalid setup aliases:\n%s' % (len(setup_errors), '\n'.join(setup_errors)))
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-06-05 07:08:23 +02:00
|
|
|
check_pyyaml(args, args.python_version)
|
|
|
|
|
2019-08-29 10:21:38 +02:00
|
|
|
test_dir = os.path.join(ResultType.TMP.path, 'output_dir')
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-07-15 04:11:25 +02:00
|
|
|
if not args.explain and any('needs/ssh/' in target.aliases for target in targets):
|
2016-11-30 06:21:53 +01:00
|
|
|
max_tries = 20
|
|
|
|
display.info('SSH service required for tests. Checking to make sure we can connect.')
|
|
|
|
for i in range(1, max_tries + 1):
|
|
|
|
try:
|
|
|
|
run_command(args, ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'], capture=True)
|
|
|
|
display.info('SSH service responded.')
|
|
|
|
break
|
2017-05-03 17:19:44 +02:00
|
|
|
except SubprocessError:
|
2016-11-30 06:21:53 +01:00
|
|
|
if i == max_tries:
|
2017-05-03 17:19:44 +02:00
|
|
|
raise
|
2016-11-30 06:21:53 +01:00
|
|
|
seconds = 3
|
|
|
|
display.warning('SSH service not responding. Waiting %d second(s) before checking again.' % seconds)
|
|
|
|
time.sleep(seconds)
|
|
|
|
|
2018-10-13 00:20:00 +02:00
|
|
|
# Windows is different as Ansible execution is done locally but the host is remote
|
|
|
|
if args.inject_httptester and not isinstance(args, WindowsIntegrationConfig):
|
2018-05-09 18:24:39 +02:00
|
|
|
inject_httptester(args)
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
start_at_task = args.start_at_task
|
|
|
|
|
2017-08-26 00:14:47 +02:00
|
|
|
results = {}
|
|
|
|
|
2019-07-11 22:03:49 +02:00
|
|
|
current_environment = None # type: t.Optional[EnvironmentDescription]
|
2018-10-04 06:41:27 +02:00
|
|
|
|
2019-03-13 15:14:12 +01:00
|
|
|
# common temporary directory path that will be valid on both the controller and the remote
|
|
|
|
# it must be common because it will be referenced in environment variables that are shared across multiple hosts
|
2019-07-15 22:47:16 +02:00
|
|
|
common_temp_path = '/tmp/ansible-test-%s' % ''.join(random.choice(string.ascii_letters + string.digits) for _idx in range(8))
|
2019-03-13 15:14:12 +01:00
|
|
|
|
|
|
|
setup_common_temp_dir(args, common_temp_path)
|
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
try:
|
|
|
|
for target in targets_iter:
|
|
|
|
if args.start_at and not found:
|
|
|
|
found = target.name == args.start_at
|
|
|
|
|
|
|
|
if not found:
|
|
|
|
continue
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
if args.list_targets:
|
|
|
|
print(target.name)
|
2016-11-30 06:21:53 +01:00
|
|
|
continue
|
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
tries = 2 if args.retry_on_error else 1
|
|
|
|
verbosity = args.verbosity
|
2017-07-15 04:11:25 +02:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
cloud_environment = get_cloud_environment(args, target)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
original_environment = current_environment if current_environment else EnvironmentDescription(args)
|
|
|
|
current_environment = None
|
2017-05-05 10:23:00 +02:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
display.info('>>> Environment Description\n%s' % original_environment, verbosity=3)
|
2017-05-11 13:05:21 +02:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
try:
|
|
|
|
while tries:
|
|
|
|
tries -= 1
|
2017-05-11 13:05:21 +02:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
try:
|
|
|
|
if cloud_environment:
|
|
|
|
cloud_environment.setup_once()
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-03-13 15:14:12 +01:00
|
|
|
run_setup_targets(args, test_dir, target.setup_once, all_targets_dict, setup_targets_executed, inventory_path, common_temp_path, False)
|
2018-03-07 23:02:31 +01:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
start_time = time.time()
|
2017-08-26 00:14:47 +02:00
|
|
|
|
2019-03-13 15:14:12 +01:00
|
|
|
run_setup_targets(args, test_dir, target.setup_always, all_targets_dict, setup_targets_executed, inventory_path, common_temp_path, True)
|
2017-08-26 00:14:47 +02:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
if not args.explain:
|
|
|
|
# create a fresh test directory for each test target
|
|
|
|
remove_tree(test_dir)
|
|
|
|
make_dirs(test_dir)
|
2017-08-23 20:09:50 +02:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
if pre_target:
|
|
|
|
pre_target(target)
|
2017-08-23 20:09:50 +02:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
try:
|
|
|
|
if target.script_path:
|
2019-08-27 23:03:23 +02:00
|
|
|
command_integration_script(args, target, test_dir, inventory_path, common_temp_path,
|
|
|
|
remote_temp_path=remote_temp_path)
|
2019-03-08 01:44:26 +01:00
|
|
|
else:
|
2019-08-27 23:03:23 +02:00
|
|
|
command_integration_role(args, target, start_at_task, test_dir, inventory_path,
|
|
|
|
common_temp_path, remote_temp_path=remote_temp_path)
|
2019-03-08 01:44:26 +01:00
|
|
|
start_at_task = None
|
|
|
|
finally:
|
|
|
|
if post_target:
|
|
|
|
post_target(target)
|
2018-10-16 10:09:17 +02:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
end_time = time.time()
|
2017-07-15 01:52:11 +02:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
results[target.name] = dict(
|
|
|
|
name=target.name,
|
|
|
|
type=target.type,
|
|
|
|
aliases=target.aliases,
|
|
|
|
modules=target.modules,
|
|
|
|
run_time_seconds=int(end_time - start_time),
|
|
|
|
setup_once=target.setup_once,
|
|
|
|
setup_always=target.setup_always,
|
|
|
|
coverage=args.coverage,
|
|
|
|
coverage_label=args.coverage_label,
|
|
|
|
python_version=args.python_version,
|
|
|
|
)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
break
|
|
|
|
except SubprocessError:
|
|
|
|
if cloud_environment:
|
|
|
|
cloud_environment.on_failure(target, tries)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
if not original_environment.validate(target.name, throw=False):
|
|
|
|
raise
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
if not tries:
|
|
|
|
raise
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
display.warning('Retrying test target "%s" with maximum verbosity.' % target.name)
|
|
|
|
display.verbosity = args.verbosity = 6
|
2017-08-26 00:14:47 +02:00
|
|
|
|
2019-03-08 01:44:26 +01:00
|
|
|
start_time = time.time()
|
|
|
|
current_environment = EnvironmentDescription(args)
|
|
|
|
end_time = time.time()
|
|
|
|
|
|
|
|
EnvironmentDescription.check(original_environment, current_environment, target.name, throw=True)
|
|
|
|
|
|
|
|
results[target.name]['validation_seconds'] = int(end_time - start_time)
|
|
|
|
|
|
|
|
passed.append(target)
|
|
|
|
except Exception as ex:
|
|
|
|
failed.append(target)
|
|
|
|
|
|
|
|
if args.continue_on_error:
|
|
|
|
display.error(ex)
|
|
|
|
continue
|
|
|
|
|
|
|
|
display.notice('To resume at this test target, use the option: --start-at %s' % target.name)
|
|
|
|
|
|
|
|
next_target = next(targets_iter, None)
|
|
|
|
|
|
|
|
if next_target:
|
|
|
|
display.notice('To resume after this test target, use the option: --start-at %s' % next_target.name)
|
|
|
|
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
display.verbosity = args.verbosity = verbosity
|
|
|
|
|
|
|
|
finally:
|
|
|
|
if not args.explain:
|
2019-03-13 15:14:12 +01:00
|
|
|
if args.coverage:
|
2019-08-28 08:40:06 +02:00
|
|
|
coverage_temp_path = os.path.join(common_temp_path, ResultType.COVERAGE.name)
|
|
|
|
coverage_save_path = ResultType.COVERAGE.path
|
2019-03-13 15:14:12 +01:00
|
|
|
|
|
|
|
for filename in os.listdir(coverage_temp_path):
|
|
|
|
shutil.copy(os.path.join(coverage_temp_path, filename), os.path.join(coverage_save_path, filename))
|
|
|
|
|
|
|
|
remove_tree(common_temp_path)
|
|
|
|
|
2019-08-28 08:40:06 +02:00
|
|
|
result_name = '%s-%s.json' % (
|
|
|
|
args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
|
2019-03-08 01:44:26 +01:00
|
|
|
|
|
|
|
data = dict(
|
|
|
|
targets=results,
|
|
|
|
)
|
2017-08-26 00:14:47 +02:00
|
|
|
|
2019-08-28 08:40:06 +02:00
|
|
|
write_json_test_results(ResultType.DATA, result_name, data)
|
2017-08-26 00:14:47 +02:00
|
|
|
|
2017-07-15 01:52:11 +02:00
|
|
|
if failed:
|
|
|
|
raise ApplicationError('The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s' % (
|
|
|
|
len(failed), len(passed) + len(failed), '\n'.join(target.name for target in failed)))
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
def start_httptester(args):
|
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:rtype: str, list[str]
|
|
|
|
"""
|
|
|
|
|
|
|
|
# map ports from remote -> localhost -> container
|
|
|
|
# passing through localhost is only used when ansible-test is not already running inside a docker container
|
|
|
|
ports = [
|
|
|
|
dict(
|
|
|
|
remote=8080,
|
|
|
|
container=80,
|
|
|
|
),
|
|
|
|
dict(
|
|
|
|
remote=8443,
|
|
|
|
container=443,
|
|
|
|
),
|
|
|
|
]
|
|
|
|
|
|
|
|
container_id = get_docker_container_id()
|
|
|
|
|
|
|
|
if container_id:
|
|
|
|
display.info('Running in docker container: %s' % container_id, verbosity=1)
|
|
|
|
else:
|
|
|
|
for item in ports:
|
|
|
|
item['localhost'] = get_available_port()
|
|
|
|
|
|
|
|
docker_pull(args, args.httptester)
|
|
|
|
|
|
|
|
httptester_id = run_httptester(args, dict((port['localhost'], port['container']) for port in ports if 'localhost' in port))
|
|
|
|
|
|
|
|
if container_id:
|
|
|
|
container_host = get_docker_container_ip(args, httptester_id)
|
|
|
|
display.info('Found httptester container address: %s' % container_host, verbosity=1)
|
|
|
|
else:
|
|
|
|
container_host = 'localhost'
|
|
|
|
|
|
|
|
ssh_options = []
|
|
|
|
|
|
|
|
for port in ports:
|
|
|
|
ssh_options += ['-R', '%d:%s:%d' % (port['remote'], container_host, port.get('localhost', port['container']))]
|
|
|
|
|
|
|
|
return httptester_id, ssh_options
|
|
|
|
|
|
|
|
|
|
|
|
def run_httptester(args, ports=None):
|
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type ports: dict[int, int] | None
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
options = [
|
|
|
|
'--detach',
|
|
|
|
]
|
|
|
|
|
|
|
|
if ports:
|
|
|
|
for localhost_port, container_port in ports.items():
|
|
|
|
options += ['-p', '%d:%d' % (localhost_port, container_port)]
|
|
|
|
|
2019-07-15 22:47:16 +02:00
|
|
|
httptester_id = docker_run(args, args.httptester, options=options)[0]
|
2018-05-09 18:24:39 +02:00
|
|
|
|
|
|
|
if args.explain:
|
|
|
|
httptester_id = 'httptester_id'
|
|
|
|
else:
|
|
|
|
httptester_id = httptester_id.strip()
|
|
|
|
|
|
|
|
return httptester_id
|
|
|
|
|
|
|
|
|
|
|
|
def inject_httptester(args):
|
|
|
|
"""
|
|
|
|
:type args: CommonConfig
|
|
|
|
"""
|
|
|
|
comment = ' # ansible-test httptester\n'
|
|
|
|
append_lines = ['127.0.0.1 %s%s' % (host, comment) for host in HTTPTESTER_HOSTS]
|
2020-02-04 20:21:53 +01:00
|
|
|
hosts_path = '/etc/hosts'
|
2018-05-09 18:24:39 +02:00
|
|
|
|
2020-02-04 20:21:53 +01:00
|
|
|
original_lines = read_text_file(hosts_path).splitlines(True)
|
2018-05-09 18:24:39 +02:00
|
|
|
|
2020-02-04 20:21:53 +01:00
|
|
|
if not any(line.endswith(comment) for line in original_lines):
|
|
|
|
write_text_file(hosts_path, ''.join(original_lines + append_lines))
|
2018-05-09 18:24:39 +02:00
|
|
|
|
|
|
|
# determine which forwarding mechanism to use
|
|
|
|
pfctl = find_executable('pfctl', required=False)
|
|
|
|
iptables = find_executable('iptables', required=False)
|
|
|
|
|
|
|
|
if pfctl:
|
|
|
|
kldload = find_executable('kldload', required=False)
|
|
|
|
|
|
|
|
if kldload:
|
|
|
|
try:
|
|
|
|
run_command(args, ['kldload', 'pf'], capture=True)
|
|
|
|
except SubprocessError:
|
|
|
|
pass # already loaded
|
|
|
|
|
|
|
|
rules = '''
|
|
|
|
rdr pass inet proto tcp from any to any port 80 -> 127.0.0.1 port 8080
|
|
|
|
rdr pass inet proto tcp from any to any port 443 -> 127.0.0.1 port 8443
|
|
|
|
'''
|
|
|
|
cmd = ['pfctl', '-ef', '-']
|
|
|
|
|
|
|
|
try:
|
|
|
|
run_command(args, cmd, capture=True, data=rules)
|
|
|
|
except SubprocessError:
|
|
|
|
pass # non-zero exit status on success
|
|
|
|
|
|
|
|
elif iptables:
|
|
|
|
ports = [
|
|
|
|
(80, 8080),
|
|
|
|
(443, 8443),
|
|
|
|
]
|
|
|
|
|
|
|
|
for src, dst in ports:
|
|
|
|
rule = ['-o', 'lo', '-p', 'tcp', '--dport', str(src), '-j', 'REDIRECT', '--to-port', str(dst)]
|
|
|
|
|
|
|
|
try:
|
|
|
|
# check for existing rule
|
|
|
|
cmd = ['iptables', '-t', 'nat', '-C', 'OUTPUT'] + rule
|
|
|
|
run_command(args, cmd, capture=True)
|
|
|
|
except SubprocessError:
|
|
|
|
# append rule when it does not exist
|
|
|
|
cmd = ['iptables', '-t', 'nat', '-A', 'OUTPUT'] + rule
|
|
|
|
run_command(args, cmd, capture=True)
|
|
|
|
else:
|
|
|
|
raise ApplicationError('No supported port forwarding mechanism detected.')
|
|
|
|
|
|
|
|
|
2019-03-13 15:14:12 +01:00
|
|
|
def run_setup_targets(args, test_dir, target_names, targets_dict, targets_executed, inventory_path, temp_path, always):
|
2017-08-23 20:09:50 +02:00
|
|
|
"""
|
2018-03-07 23:02:31 +01:00
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type test_dir: str
|
|
|
|
:type target_names: list[str]
|
|
|
|
:type targets_dict: dict[str, IntegrationTarget]
|
|
|
|
:type targets_executed: set[str]
|
2019-01-16 01:59:55 +01:00
|
|
|
:type inventory_path: str
|
2019-03-13 15:14:12 +01:00
|
|
|
:type temp_path: str
|
2018-03-07 23:02:31 +01:00
|
|
|
:type always: bool
|
2017-08-23 20:09:50 +02:00
|
|
|
"""
|
|
|
|
for target_name in target_names:
|
|
|
|
if not always and target_name in targets_executed:
|
|
|
|
continue
|
|
|
|
|
|
|
|
target = targets_dict[target_name]
|
|
|
|
|
|
|
|
if not args.explain:
|
|
|
|
# create a fresh test directory for each test target
|
|
|
|
remove_tree(test_dir)
|
|
|
|
make_dirs(test_dir)
|
|
|
|
|
|
|
|
if target.script_path:
|
2019-03-13 15:14:12 +01:00
|
|
|
command_integration_script(args, target, test_dir, inventory_path, temp_path)
|
2017-08-23 20:09:50 +02:00
|
|
|
else:
|
2019-03-13 15:14:12 +01:00
|
|
|
command_integration_role(args, target, None, test_dir, inventory_path, temp_path)
|
2017-08-23 20:09:50 +02:00
|
|
|
|
|
|
|
targets_executed.add(target_name)
|
|
|
|
|
|
|
|
|
2019-03-01 03:25:49 +01:00
|
|
|
def integration_environment(args, target, test_dir, inventory_path, ansible_config, env_config):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
2017-05-05 10:23:00 +02:00
|
|
|
:type target: IntegrationTarget
|
2018-10-12 01:54:18 +02:00
|
|
|
:type test_dir: str
|
2019-01-16 01:59:55 +01:00
|
|
|
:type inventory_path: str
|
2019-01-10 23:43:21 +01:00
|
|
|
:type ansible_config: str | None
|
2019-03-01 03:25:49 +01:00
|
|
|
:type env_config: CloudEnvironmentConfig | None
|
2016-11-30 06:21:53 +01:00
|
|
|
:rtype: dict[str, str]
|
|
|
|
"""
|
2019-01-10 23:43:21 +01:00
|
|
|
env = ansible_environment(args, ansible_config=ansible_config)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
if args.inject_httptester:
|
|
|
|
env.update(dict(
|
|
|
|
HTTPTESTER='1',
|
|
|
|
))
|
|
|
|
|
2019-03-01 03:25:49 +01:00
|
|
|
callback_plugins = ['junit'] + (env_config.callback_plugins or [] if env_config else [])
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
integration = dict(
|
2019-08-28 08:40:06 +02:00
|
|
|
JUNIT_OUTPUT_DIR=ResultType.JUNIT.path,
|
2019-03-01 03:25:49 +01:00
|
|
|
ANSIBLE_CALLBACK_WHITELIST=','.join(sorted(set(callback_plugins))),
|
2020-04-26 03:55:39 +02:00
|
|
|
ANSIBLE_TEST_CI=args.metadata.ci_provider or get_ci_provider().code,
|
2019-05-06 21:46:47 +02:00
|
|
|
ANSIBLE_TEST_COVERAGE='check' if args.coverage_check else ('yes' if args.coverage else ''),
|
2018-10-12 01:54:18 +02:00
|
|
|
OUTPUT_DIR=test_dir,
|
2019-01-16 01:59:55 +01:00
|
|
|
INVENTORY_PATH=os.path.abspath(inventory_path),
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
2017-07-07 21:37:08 +02:00
|
|
|
if args.debug_strategy:
|
|
|
|
env.update(dict(ANSIBLE_STRATEGY='debug'))
|
|
|
|
|
2017-07-28 19:56:25 +02:00
|
|
|
if 'non_local/' in target.aliases:
|
|
|
|
if args.coverage:
|
2019-03-22 15:59:29 +01:00
|
|
|
display.warning('Skipping coverage reporting on Ansible modules for non-local test: %s' % target.name)
|
2017-07-28 19:56:25 +02:00
|
|
|
|
|
|
|
env.update(dict(ANSIBLE_TEST_REMOTE_INTERPRETER=''))
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
env.update(integration)
|
|
|
|
|
|
|
|
return env
|
|
|
|
|
|
|
|
|
2019-08-27 23:03:23 +02:00
|
|
|
def command_integration_script(args, target, test_dir, inventory_path, temp_path, remote_temp_path=None):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type target: IntegrationTarget
|
2018-10-12 01:54:18 +02:00
|
|
|
:type test_dir: str
|
2019-01-16 01:59:55 +01:00
|
|
|
:type inventory_path: str
|
2019-03-13 15:14:12 +01:00
|
|
|
:type temp_path: str
|
2019-08-27 23:03:23 +02:00
|
|
|
:type remote_temp_path: str | None
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
display.info('Running %s integration test script' % target.name)
|
|
|
|
|
2019-03-01 03:25:49 +01:00
|
|
|
env_config = None
|
|
|
|
|
|
|
|
if isinstance(args, PosixIntegrationConfig):
|
|
|
|
cloud_environment = get_cloud_environment(args, target)
|
|
|
|
|
|
|
|
if cloud_environment:
|
|
|
|
env_config = cloud_environment.get_environment_config()
|
|
|
|
|
2019-01-10 23:43:21 +01:00
|
|
|
with integration_test_environment(args, target, inventory_path) as test_env:
|
|
|
|
cmd = ['./%s' % os.path.basename(target.script_path)]
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-01-10 23:43:21 +01:00
|
|
|
if args.verbosity:
|
|
|
|
cmd.append('-' + ('v' * args.verbosity))
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-03-01 03:25:49 +01:00
|
|
|
env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config)
|
2019-08-29 12:39:03 +02:00
|
|
|
cwd = os.path.join(test_env.targets_dir, target.relative_path)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-10-10 21:42:03 +02:00
|
|
|
env.update(dict(
|
|
|
|
# support use of adhoc ansible commands in collections without specifying the fully qualified collection name
|
|
|
|
ANSIBLE_PLAYBOOK_DIR=cwd,
|
|
|
|
))
|
|
|
|
|
2019-03-01 03:25:49 +01:00
|
|
|
if env_config and env_config.env_vars:
|
|
|
|
env.update(env_config.env_vars)
|
|
|
|
|
|
|
|
with integration_test_config_file(args, env_config, test_env.integration_dir) as config_path:
|
|
|
|
if config_path:
|
|
|
|
cmd += ['-e', '@%s' % config_path]
|
|
|
|
|
2019-03-22 15:59:29 +01:00
|
|
|
module_coverage = 'non_local/' not in target.aliases
|
2019-08-27 23:03:23 +02:00
|
|
|
intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd, temp_path=temp_path,
|
|
|
|
remote_temp_path=remote_temp_path, module_coverage=module_coverage)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
2019-08-27 23:03:23 +02:00
|
|
|
def command_integration_role(args, target, start_at_task, test_dir, inventory_path, temp_path, remote_temp_path=None):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type target: IntegrationTarget
|
2017-08-23 20:09:50 +02:00
|
|
|
:type start_at_task: str | None
|
2018-10-12 01:54:18 +02:00
|
|
|
:type test_dir: str
|
2019-01-16 01:59:55 +01:00
|
|
|
:type inventory_path: str
|
2019-03-13 15:14:12 +01:00
|
|
|
:type temp_path: str
|
2019-08-27 23:03:23 +02:00
|
|
|
:type remote_temp_path: str | None
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
display.info('Running %s integration test role' % target.name)
|
|
|
|
|
2019-03-01 03:25:49 +01:00
|
|
|
env_config = None
|
|
|
|
|
2019-08-15 00:53:30 +02:00
|
|
|
vars_files = []
|
|
|
|
variables = dict(
|
|
|
|
output_dir=test_dir,
|
|
|
|
)
|
|
|
|
|
2017-02-16 23:26:31 +01:00
|
|
|
if isinstance(args, WindowsIntegrationConfig):
|
2016-11-30 06:21:53 +01:00
|
|
|
hosts = 'windows'
|
|
|
|
gather_facts = False
|
2019-08-15 00:53:30 +02:00
|
|
|
variables.update(dict(
|
|
|
|
win_output_dir=r'C:\ansible_testing',
|
|
|
|
))
|
2017-02-16 23:26:31 +01:00
|
|
|
elif isinstance(args, NetworkIntegrationConfig):
|
2020-02-21 00:27:08 +01:00
|
|
|
hosts = target.network_platform
|
2016-11-30 06:21:53 +01:00
|
|
|
gather_facts = False
|
|
|
|
else:
|
|
|
|
hosts = 'testhost'
|
|
|
|
gather_facts = True
|
|
|
|
|
2017-05-05 10:23:00 +02:00
|
|
|
cloud_environment = get_cloud_environment(args, target)
|
|
|
|
|
|
|
|
if cloud_environment:
|
2019-03-01 03:25:49 +01:00
|
|
|
env_config = cloud_environment.get_environment_config()
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-01-10 23:43:21 +01:00
|
|
|
with integration_test_environment(args, target, inventory_path) as test_env:
|
2019-08-15 00:53:30 +02:00
|
|
|
if os.path.exists(test_env.vars_file):
|
|
|
|
vars_files.append(os.path.relpath(test_env.vars_file, test_env.integration_dir))
|
|
|
|
|
2019-03-01 03:25:49 +01:00
|
|
|
play = dict(
|
|
|
|
hosts=hosts,
|
|
|
|
gather_facts=gather_facts,
|
2019-08-15 00:53:30 +02:00
|
|
|
vars_files=vars_files,
|
|
|
|
vars=variables,
|
2019-03-01 03:25:49 +01:00
|
|
|
roles=[
|
|
|
|
target.name,
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
if env_config:
|
2019-08-15 17:53:07 +02:00
|
|
|
if env_config.ansible_vars:
|
|
|
|
variables.update(env_config.ansible_vars)
|
|
|
|
|
2019-03-01 03:25:49 +01:00
|
|
|
play.update(dict(
|
|
|
|
environment=env_config.env_vars,
|
|
|
|
module_defaults=env_config.module_defaults,
|
|
|
|
))
|
|
|
|
|
|
|
|
playbook = json.dumps([play], indent=4, sort_keys=True)
|
|
|
|
|
2019-01-10 23:43:21 +01:00
|
|
|
with named_temporary_file(args=args, directory=test_env.integration_dir, prefix='%s-' % target.name, suffix='.yml', content=playbook) as playbook_path:
|
|
|
|
filename = os.path.basename(playbook_path)
|
2019-01-16 01:59:55 +01:00
|
|
|
|
2019-01-10 23:43:21 +01:00
|
|
|
display.info('>>> Playbook: %s\n%s' % (filename, playbook.strip()), verbosity=3)
|
2019-01-16 01:59:55 +01:00
|
|
|
|
2019-04-12 20:52:44 +02:00
|
|
|
cmd = ['ansible-playbook', filename, '-i', os.path.relpath(test_env.inventory_path, test_env.integration_dir)]
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-01-10 23:43:21 +01:00
|
|
|
if start_at_task:
|
|
|
|
cmd += ['--start-at-task', start_at_task]
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-01-10 23:43:21 +01:00
|
|
|
if args.tags:
|
|
|
|
cmd += ['--tags', args.tags]
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-01-10 23:43:21 +01:00
|
|
|
if args.skip_tags:
|
|
|
|
cmd += ['--skip-tags', args.skip_tags]
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-01-10 23:43:21 +01:00
|
|
|
if args.diff:
|
|
|
|
cmd += ['--diff']
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-01-10 23:43:21 +01:00
|
|
|
if isinstance(args, NetworkIntegrationConfig):
|
|
|
|
if args.testcase:
|
|
|
|
cmd += ['-e', 'testcase=%s' % args.testcase]
|
2017-05-25 16:47:52 +02:00
|
|
|
|
2019-01-10 23:43:21 +01:00
|
|
|
if args.verbosity:
|
|
|
|
cmd.append('-' + ('v' * args.verbosity))
|
2017-05-25 16:47:52 +02:00
|
|
|
|
2019-03-01 03:25:49 +01:00
|
|
|
env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config)
|
2019-01-10 23:43:21 +01:00
|
|
|
cwd = test_env.integration_dir
|
2017-06-30 20:43:34 +02:00
|
|
|
|
2019-10-10 21:42:03 +02:00
|
|
|
env.update(dict(
|
|
|
|
# support use of adhoc ansible commands in collections without specifying the fully qualified collection name
|
|
|
|
ANSIBLE_PLAYBOOK_DIR=cwd,
|
|
|
|
))
|
|
|
|
|
2019-08-29 12:39:03 +02:00
|
|
|
env['ANSIBLE_ROLES_PATH'] = test_env.targets_dir
|
2018-02-15 00:40:35 +01:00
|
|
|
|
2019-03-22 15:59:29 +01:00
|
|
|
module_coverage = 'non_local/' not in target.aliases
|
2019-08-27 23:03:23 +02:00
|
|
|
intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd, temp_path=temp_path,
|
|
|
|
remote_temp_path=remote_temp_path, module_coverage=module_coverage)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_changes_filter(args):
|
|
|
|
"""
|
|
|
|
:type args: TestConfig
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
paths = detect_changes(args)
|
|
|
|
|
2018-04-13 01:15:28 +02:00
|
|
|
if not args.metadata.change_description:
|
|
|
|
if paths:
|
|
|
|
changes = categorize_changes(args, paths, args.command)
|
|
|
|
else:
|
|
|
|
changes = ChangeDescription()
|
|
|
|
|
|
|
|
args.metadata.change_description = changes
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
if paths is None:
|
|
|
|
return [] # change detection not enabled, do not filter targets
|
|
|
|
|
|
|
|
if not paths:
|
|
|
|
raise NoChangesDetected()
|
|
|
|
|
2018-04-13 01:15:28 +02:00
|
|
|
if args.metadata.change_description.targets is None:
|
2016-11-30 06:21:53 +01:00
|
|
|
raise NoTestsForChanges()
|
|
|
|
|
2018-04-13 01:15:28 +02:00
|
|
|
return args.metadata.change_description.targets
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
def detect_changes(args):
|
|
|
|
"""
|
|
|
|
:type args: TestConfig
|
|
|
|
:rtype: list[str] | None
|
|
|
|
"""
|
2020-04-26 03:55:39 +02:00
|
|
|
if args.changed:
|
|
|
|
paths = get_ci_provider().detect_changes(args)
|
2016-11-30 06:21:53 +01:00
|
|
|
elif args.changed_from or args.changed_path:
|
|
|
|
paths = args.changed_path or []
|
|
|
|
if args.changed_from:
|
2020-02-04 20:21:53 +01:00
|
|
|
paths += read_text_file(args.changed_from).splitlines()
|
2016-11-30 06:21:53 +01:00
|
|
|
else:
|
|
|
|
return None # change detection not enabled
|
|
|
|
|
2017-09-07 01:40:04 +02:00
|
|
|
if paths is None:
|
|
|
|
return None # act as though change detection not enabled, do not filter targets
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
display.info('Detected changes in %d file(s).' % len(paths))
|
|
|
|
|
|
|
|
for path in paths:
|
|
|
|
display.info(path, verbosity=1)
|
|
|
|
|
|
|
|
return paths
|
|
|
|
|
|
|
|
|
|
|
|
def get_integration_filter(args, targets):
|
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type targets: tuple[IntegrationTarget]
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
if args.docker:
|
|
|
|
return get_integration_docker_filter(args, targets)
|
|
|
|
|
|
|
|
if args.remote:
|
|
|
|
return get_integration_remote_filter(args, targets)
|
|
|
|
|
|
|
|
return get_integration_local_filter(args, targets)
|
|
|
|
|
|
|
|
|
2018-04-13 01:15:28 +02:00
|
|
|
def common_integration_filter(args, targets, exclude):
|
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type targets: tuple[IntegrationTarget]
|
|
|
|
:type exclude: list[str]
|
|
|
|
"""
|
|
|
|
override_disabled = set(target for target in args.include if target.startswith('disabled/'))
|
|
|
|
|
|
|
|
if not args.allow_disabled:
|
|
|
|
skip = 'disabled/'
|
|
|
|
override = [target.name for target in targets if override_disabled & set(target.aliases)]
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
|
|
|
|
if skipped:
|
|
|
|
exclude.extend(skipped)
|
|
|
|
display.warning('Excluding tests marked "%s" which require --allow-disabled or prefixing with "disabled/": %s'
|
|
|
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
|
|
|
|
|
|
|
override_unsupported = set(target for target in args.include if target.startswith('unsupported/'))
|
|
|
|
|
|
|
|
if not args.allow_unsupported:
|
|
|
|
skip = 'unsupported/'
|
|
|
|
override = [target.name for target in targets if override_unsupported & set(target.aliases)]
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
|
|
|
|
if skipped:
|
|
|
|
exclude.extend(skipped)
|
|
|
|
display.warning('Excluding tests marked "%s" which require --allow-unsupported or prefixing with "unsupported/": %s'
|
|
|
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
|
|
|
|
|
|
|
override_unstable = set(target for target in args.include if target.startswith('unstable/'))
|
|
|
|
|
|
|
|
if args.allow_unstable_changed:
|
|
|
|
override_unstable |= set(args.metadata.change_description.focused_targets or [])
|
|
|
|
|
|
|
|
if not args.allow_unstable:
|
|
|
|
skip = 'unstable/'
|
|
|
|
override = [target.name for target in targets if override_unstable & set(target.aliases)]
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
|
|
|
|
if skipped:
|
|
|
|
exclude.extend(skipped)
|
|
|
|
display.warning('Excluding tests marked "%s" which require --allow-unstable or prefixing with "unstable/": %s'
|
|
|
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
|
|
|
|
2018-10-12 07:32:46 +02:00
|
|
|
# only skip a Windows test if using --windows and all the --windows versions are defined in the aliases as skip/windows/%s
|
|
|
|
if isinstance(args, WindowsIntegrationConfig) and args.windows:
|
|
|
|
all_skipped = []
|
|
|
|
not_skipped = []
|
|
|
|
|
|
|
|
for target in targets:
|
|
|
|
if "skip/windows/" not in target.aliases:
|
|
|
|
continue
|
|
|
|
|
|
|
|
skip_valid = []
|
|
|
|
skip_missing = []
|
|
|
|
for version in args.windows:
|
|
|
|
if "skip/windows/%s/" % version in target.aliases:
|
|
|
|
skip_valid.append(version)
|
|
|
|
else:
|
|
|
|
skip_missing.append(version)
|
|
|
|
|
|
|
|
if skip_missing and skip_valid:
|
|
|
|
not_skipped.append((target.name, skip_valid, skip_missing))
|
|
|
|
elif skip_valid:
|
|
|
|
all_skipped.append(target.name)
|
|
|
|
|
|
|
|
if all_skipped:
|
|
|
|
exclude.extend(all_skipped)
|
|
|
|
skip_aliases = ["skip/windows/%s/" % w for w in args.windows]
|
|
|
|
display.warning('Excluding tests marked "%s" which are set to skip with --windows %s: %s'
|
|
|
|
% ('", "'.join(skip_aliases), ', '.join(args.windows), ', '.join(all_skipped)))
|
|
|
|
|
|
|
|
if not_skipped:
|
|
|
|
for target, skip_valid, skip_missing in not_skipped:
|
|
|
|
# warn when failing to skip due to lack of support for skipping only some versions
|
|
|
|
display.warning('Including test "%s" which was marked to skip for --windows %s but not %s.'
|
|
|
|
% (target, ', '.join(skip_valid), ', '.join(skip_missing)))
|
|
|
|
|
2018-04-13 01:15:28 +02:00
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
def get_integration_local_filter(args, targets):
|
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type targets: tuple[IntegrationTarget]
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
exclude = []
|
|
|
|
|
2018-04-13 01:15:28 +02:00
|
|
|
common_integration_filter(args, targets, exclude)
|
|
|
|
|
2018-04-04 03:53:53 +02:00
|
|
|
if not args.allow_root and os.getuid() != 0:
|
2016-11-30 06:21:53 +01:00
|
|
|
skip = 'needs/root/'
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases]
|
|
|
|
if skipped:
|
|
|
|
exclude.append(skip)
|
2018-04-04 03:53:53 +02:00
|
|
|
display.warning('Excluding tests marked "%s" which require --allow-root or running as root: %s'
|
2016-11-30 06:21:53 +01:00
|
|
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
|
|
|
|
2018-04-04 03:53:53 +02:00
|
|
|
override_destructive = set(target for target in args.include if target.startswith('destructive/'))
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-04-04 03:53:53 +02:00
|
|
|
if not args.allow_destructive:
|
2016-11-30 06:21:53 +01:00
|
|
|
skip = 'destructive/'
|
2018-04-04 03:53:53 +02:00
|
|
|
override = [target.name for target in targets if override_destructive & set(target.aliases)]
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
|
2016-11-30 06:21:53 +01:00
|
|
|
if skipped:
|
2018-04-04 03:53:53 +02:00
|
|
|
exclude.extend(skipped)
|
|
|
|
display.warning('Excluding tests marked "%s" which require --allow-destructive or prefixing with "destructive/" to run locally: %s'
|
2016-11-30 06:21:53 +01:00
|
|
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
exclude_targets_by_python_version(targets, args.python_version, exclude)
|
2017-05-18 19:37:53 +02:00
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
return exclude
|
|
|
|
|
|
|
|
|
|
|
|
def get_integration_docker_filter(args, targets):
|
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type targets: tuple[IntegrationTarget]
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
exclude = []
|
|
|
|
|
2018-04-13 01:15:28 +02:00
|
|
|
common_integration_filter(args, targets, exclude)
|
|
|
|
|
2018-09-21 08:09:54 +02:00
|
|
|
skip = 'skip/docker/'
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases]
|
|
|
|
if skipped:
|
|
|
|
exclude.append(skip)
|
|
|
|
display.warning('Excluding tests marked "%s" which cannot run under docker: %s'
|
|
|
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
if not args.docker_privileged:
|
|
|
|
skip = 'needs/privileged/'
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases]
|
|
|
|
if skipped:
|
|
|
|
exclude.append(skip)
|
|
|
|
display.warning('Excluding tests marked "%s" which require --docker-privileged to run under docker: %s'
|
|
|
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
python_version = get_python_version(args, get_docker_completion(), args.docker_raw)
|
2017-10-26 09:21:46 +02:00
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
exclude_targets_by_python_version(targets, python_version, exclude)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
return exclude
|
|
|
|
|
|
|
|
|
|
|
|
def get_integration_remote_filter(args, targets):
|
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type targets: tuple[IntegrationTarget]
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
2020-04-16 01:22:17 +02:00
|
|
|
remote = args.parsed_remote
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
exclude = []
|
|
|
|
|
2018-04-13 01:15:28 +02:00
|
|
|
common_integration_filter(args, targets, exclude)
|
|
|
|
|
2020-04-16 01:22:17 +02:00
|
|
|
skips = {
|
|
|
|
'skip/%s' % remote.platform: remote.platform,
|
|
|
|
'skip/%s/%s' % (remote.platform, remote.version): '%s %s' % (remote.platform, remote.version),
|
|
|
|
'skip/%s%s' % (remote.platform, remote.version): '%s %s' % (remote.platform, remote.version), # legacy syntax, use above format
|
|
|
|
}
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2020-04-16 01:22:17 +02:00
|
|
|
if remote.arch:
|
|
|
|
skips.update({
|
|
|
|
'skip/%s/%s' % (remote.arch, remote.platform): '%s on %s' % (remote.platform, remote.arch),
|
|
|
|
'skip/%s/%s/%s' % (remote.arch, remote.platform, remote.version): '%s %s on %s' % (remote.platform, remote.version, remote.arch),
|
|
|
|
})
|
|
|
|
|
|
|
|
for skip, description in skips.items():
|
|
|
|
skipped = [target.name for target in targets if skip in target.skips]
|
|
|
|
if skipped:
|
|
|
|
exclude.append(skip + '/')
|
|
|
|
display.warning('Excluding tests marked "%s" which are not supported on %s: %s' % (skip, description, ', '.join(skipped)))
|
2018-11-17 01:38:47 +01:00
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
python_version = get_python_version(args, get_remote_completion(), args.remote)
|
|
|
|
|
|
|
|
exclude_targets_by_python_version(targets, python_version, exclude)
|
2017-10-26 09:21:46 +02:00
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
return exclude
|
|
|
|
|
|
|
|
|
|
|
|
def exclude_targets_by_python_version(targets, python_version, exclude):
|
|
|
|
"""
|
|
|
|
:type targets: tuple[IntegrationTarget]
|
|
|
|
:type python_version: str
|
|
|
|
:type exclude: list[str]
|
|
|
|
"""
|
|
|
|
if not python_version:
|
|
|
|
display.warning('Python version unknown. Unable to skip tests based on Python version.')
|
|
|
|
return
|
|
|
|
|
|
|
|
python_major_version = python_version.split('.')[0]
|
|
|
|
|
|
|
|
skip = 'skip/python%s/' % python_version
|
2017-10-26 09:21:46 +02:00
|
|
|
skipped = [target.name for target in targets if skip in target.aliases]
|
|
|
|
if skipped:
|
|
|
|
exclude.append(skip)
|
2019-03-28 00:40:27 +01:00
|
|
|
display.warning('Excluding tests marked "%s" which are not supported on python %s: %s'
|
2017-10-26 09:21:46 +02:00
|
|
|
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
|
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
skip = 'skip/python%s/' % python_major_version
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases]
|
|
|
|
if skipped:
|
|
|
|
exclude.append(skip)
|
|
|
|
display.warning('Excluding tests marked "%s" which are not supported on python %s: %s'
|
|
|
|
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
|
|
|
|
|
|
|
|
|
|
|
|
def get_python_version(args, configs, name):
|
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type configs: dict[str, dict[str, str]]
|
|
|
|
:type name: str
|
|
|
|
"""
|
|
|
|
config = configs.get(name, {})
|
|
|
|
config_python = config.get('python')
|
|
|
|
|
|
|
|
if not config or not config_python:
|
|
|
|
if args.python:
|
|
|
|
return args.python
|
|
|
|
|
|
|
|
display.warning('No Python version specified. '
|
|
|
|
'Use completion config or the --python option to specify one.', unique=True)
|
|
|
|
|
|
|
|
return '' # failure to provide a version may result in failures or reduced functionality later
|
|
|
|
|
|
|
|
supported_python_versions = config_python.split(',')
|
|
|
|
default_python_version = supported_python_versions[0]
|
|
|
|
|
|
|
|
if args.python and args.python not in supported_python_versions:
|
|
|
|
raise ApplicationError('Python %s is not supported by %s. Supported Python version(s) are: %s' % (
|
|
|
|
args.python, name, ', '.join(sorted(supported_python_versions))))
|
|
|
|
|
|
|
|
python_version = args.python or default_python_version
|
|
|
|
|
|
|
|
return python_version
|
|
|
|
|
|
|
|
|
|
|
|
def get_python_interpreter(args, configs, name):
|
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type configs: dict[str, dict[str, str]]
|
|
|
|
:type name: str
|
|
|
|
"""
|
|
|
|
if args.python_interpreter:
|
|
|
|
return args.python_interpreter
|
|
|
|
|
|
|
|
config = configs.get(name, {})
|
|
|
|
|
|
|
|
if not config:
|
|
|
|
if args.python:
|
|
|
|
guess = 'python%s' % args.python
|
|
|
|
else:
|
|
|
|
guess = 'python'
|
|
|
|
|
|
|
|
display.warning('Using "%s" as the Python interpreter. '
|
|
|
|
'Use completion config or the --python-interpreter option to specify the path.' % guess, unique=True)
|
|
|
|
|
|
|
|
return guess
|
|
|
|
|
|
|
|
python_version = get_python_version(args, configs, name)
|
|
|
|
|
|
|
|
python_dir = config.get('python_dir', '/usr/bin')
|
|
|
|
python_interpreter = os.path.join(python_dir, 'python%s' % python_version)
|
|
|
|
python_interpreter = config.get('python%s' % python_version, python_interpreter)
|
|
|
|
|
|
|
|
return python_interpreter
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
2019-07-12 22:17:20 +02:00
|
|
|
class EnvironmentDescription:
|
2017-05-11 13:05:21 +02:00
|
|
|
"""Description of current running environment."""
|
2017-07-15 04:11:25 +02:00
|
|
|
def __init__(self, args):
|
|
|
|
"""Initialize snapshot of environment configuration.
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
"""
|
|
|
|
self.args = args
|
|
|
|
|
|
|
|
if self.args.explain:
|
|
|
|
self.data = {}
|
|
|
|
return
|
|
|
|
|
2018-10-04 06:41:27 +02:00
|
|
|
warnings = []
|
|
|
|
|
2017-05-11 13:05:21 +02:00
|
|
|
versions = ['']
|
|
|
|
versions += SUPPORTED_PYTHON_VERSIONS
|
|
|
|
versions += list(set(v.split('.')[0] for v in SUPPORTED_PYTHON_VERSIONS))
|
|
|
|
|
2019-08-06 02:40:00 +02:00
|
|
|
version_check = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'versions.py')
|
2017-05-11 13:05:21 +02:00
|
|
|
python_paths = dict((v, find_executable('python%s' % v, required=False)) for v in sorted(versions))
|
|
|
|
pip_paths = dict((v, find_executable('pip%s' % v, required=False)) for v in sorted(versions))
|
2019-07-10 02:31:04 +02:00
|
|
|
program_versions = dict((v, self.get_version([python_paths[v], version_check], warnings)) for v in sorted(python_paths) if python_paths[v])
|
2017-05-11 13:05:21 +02:00
|
|
|
pip_interpreters = dict((v, self.get_shebang(pip_paths[v])) for v in sorted(pip_paths) if pip_paths[v])
|
2017-07-06 09:47:28 +02:00
|
|
|
known_hosts_hash = self.get_hash(os.path.expanduser('~/.ssh/known_hosts'))
|
2017-05-11 13:05:21 +02:00
|
|
|
|
2018-10-04 06:41:27 +02:00
|
|
|
for version in sorted(versions):
|
|
|
|
self.check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings)
|
|
|
|
|
|
|
|
for warning in warnings:
|
|
|
|
display.warning(warning, unique=True)
|
|
|
|
|
2017-05-11 13:05:21 +02:00
|
|
|
self.data = dict(
|
|
|
|
python_paths=python_paths,
|
|
|
|
pip_paths=pip_paths,
|
2018-10-04 06:41:27 +02:00
|
|
|
program_versions=program_versions,
|
2017-05-11 13:05:21 +02:00
|
|
|
pip_interpreters=pip_interpreters,
|
2017-07-06 09:47:28 +02:00
|
|
|
known_hosts_hash=known_hosts_hash,
|
2018-10-04 06:41:27 +02:00
|
|
|
warnings=warnings,
|
2017-05-11 13:05:21 +02:00
|
|
|
)
|
|
|
|
|
2018-10-04 06:41:27 +02:00
|
|
|
@staticmethod
|
|
|
|
def check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings):
|
|
|
|
"""
|
|
|
|
:type version: str
|
|
|
|
:param python_paths: dict[str, str]
|
|
|
|
:param pip_paths: dict[str, str]
|
|
|
|
:param pip_interpreters: dict[str, str]
|
|
|
|
:param warnings: list[str]
|
|
|
|
"""
|
|
|
|
python_label = 'Python%s' % (' %s' % version if version else '')
|
|
|
|
|
|
|
|
pip_path = pip_paths.get(version)
|
|
|
|
python_path = python_paths.get(version)
|
|
|
|
|
|
|
|
if not python_path and not pip_path:
|
|
|
|
# neither python or pip is present for this version
|
|
|
|
return
|
|
|
|
|
|
|
|
if not python_path:
|
|
|
|
warnings.append('A %s interpreter was not found, yet a matching pip was found at "%s".' % (python_label, pip_path))
|
|
|
|
return
|
|
|
|
|
|
|
|
if not pip_path:
|
|
|
|
warnings.append('A %s interpreter was found at "%s", yet a matching pip was not found.' % (python_label, python_path))
|
|
|
|
return
|
|
|
|
|
|
|
|
pip_shebang = pip_interpreters.get(version)
|
|
|
|
|
|
|
|
match = re.search(r'#!\s*(?P<command>[^\s]+)', pip_shebang)
|
|
|
|
|
|
|
|
if not match:
|
|
|
|
warnings.append('A %s pip was found at "%s", but it does not have a valid shebang: %s' % (python_label, pip_path, pip_shebang))
|
|
|
|
return
|
|
|
|
|
|
|
|
pip_interpreter = os.path.realpath(match.group('command'))
|
|
|
|
python_interpreter = os.path.realpath(python_path)
|
|
|
|
|
|
|
|
if pip_interpreter == python_interpreter:
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
identical = filecmp.cmp(pip_interpreter, python_interpreter)
|
|
|
|
except OSError:
|
|
|
|
identical = False
|
|
|
|
|
|
|
|
if identical:
|
|
|
|
return
|
|
|
|
|
|
|
|
warnings.append('A %s pip was found at "%s", but it uses interpreter "%s" instead of "%s".' % (
|
|
|
|
python_label, pip_path, pip_interpreter, python_interpreter))
|
|
|
|
|
2017-05-11 13:05:21 +02:00
|
|
|
def __str__(self):
|
|
|
|
"""
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
return json.dumps(self.data, sort_keys=True, indent=4)
|
|
|
|
|
|
|
|
def validate(self, target_name, throw):
|
|
|
|
"""
|
|
|
|
:type target_name: str
|
|
|
|
:type throw: bool
|
|
|
|
:rtype: bool
|
|
|
|
"""
|
2017-07-15 04:11:25 +02:00
|
|
|
current = EnvironmentDescription(self.args)
|
2017-05-11 13:05:21 +02:00
|
|
|
|
2018-10-04 06:41:27 +02:00
|
|
|
return self.check(self, current, target_name, throw)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def check(original, current, target_name, throw):
|
|
|
|
"""
|
|
|
|
:type original: EnvironmentDescription
|
|
|
|
:type current: EnvironmentDescription
|
|
|
|
:type target_name: str
|
|
|
|
:type throw: bool
|
|
|
|
:rtype: bool
|
|
|
|
"""
|
|
|
|
original_json = str(original)
|
2017-05-11 13:05:21 +02:00
|
|
|
current_json = str(current)
|
|
|
|
|
|
|
|
if original_json == current_json:
|
|
|
|
return True
|
|
|
|
|
2018-10-04 06:41:27 +02:00
|
|
|
unified_diff = '\n'.join(difflib.unified_diff(
|
|
|
|
a=original_json.splitlines(),
|
|
|
|
b=current_json.splitlines(),
|
|
|
|
fromfile='original.json',
|
|
|
|
tofile='current.json',
|
|
|
|
lineterm='',
|
|
|
|
))
|
|
|
|
|
2017-05-11 13:05:21 +02:00
|
|
|
message = ('Test target "%s" has changed the test environment!\n'
|
|
|
|
'If these changes are necessary, they must be reverted before the test finishes.\n'
|
|
|
|
'>>> Original Environment\n'
|
|
|
|
'%s\n'
|
|
|
|
'>>> Current Environment\n'
|
2018-10-04 06:41:27 +02:00
|
|
|
'%s\n'
|
|
|
|
'>>> Environment Diff\n'
|
|
|
|
'%s'
|
|
|
|
% (target_name, original_json, current_json, unified_diff))
|
2017-05-11 13:05:21 +02:00
|
|
|
|
|
|
|
if throw:
|
|
|
|
raise ApplicationError(message)
|
|
|
|
|
|
|
|
display.error(message)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
@staticmethod
|
2018-10-04 06:41:27 +02:00
|
|
|
def get_version(command, warnings):
|
2017-05-11 13:05:21 +02:00
|
|
|
"""
|
|
|
|
:type command: list[str]
|
2019-07-11 22:03:49 +02:00
|
|
|
:type warnings: list[text]
|
2018-10-04 06:41:27 +02:00
|
|
|
:rtype: list[str]
|
2017-05-11 13:05:21 +02:00
|
|
|
"""
|
2017-07-12 01:00:08 +02:00
|
|
|
try:
|
|
|
|
stdout, stderr = raw_command(command, capture=True, cmd_verbosity=2)
|
2018-10-04 06:41:27 +02:00
|
|
|
except SubprocessError as ex:
|
|
|
|
warnings.append(u'%s' % ex)
|
2017-07-12 01:00:08 +02:00
|
|
|
return None # all failures are equal, we don't care why it failed, only that it did
|
|
|
|
|
2018-10-04 06:41:27 +02:00
|
|
|
return [line.strip() for line in ((stdout or '').strip() + (stderr or '').strip()).splitlines()]
|
2017-05-11 13:05:21 +02:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_shebang(path):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
:rtype: str
|
|
|
|
"""
|
2020-02-04 20:21:53 +01:00
|
|
|
with open_text_file(path) as script_fd:
|
2018-10-04 06:41:27 +02:00
|
|
|
return script_fd.readline().strip()
|
2017-05-11 13:05:21 +02:00
|
|
|
|
2017-07-06 09:47:28 +02:00
|
|
|
@staticmethod
|
|
|
|
def get_hash(path):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
:rtype: str | None
|
|
|
|
"""
|
|
|
|
if not os.path.exists(path):
|
|
|
|
return None
|
|
|
|
|
|
|
|
file_hash = hashlib.md5()
|
|
|
|
|
2020-02-04 20:21:53 +01:00
|
|
|
file_hash.update(read_binary_file(path))
|
2017-07-06 09:47:28 +02:00
|
|
|
|
|
|
|
return file_hash.hexdigest()
|
|
|
|
|
2017-05-11 13:05:21 +02:00
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
class NoChangesDetected(ApplicationWarning):
|
|
|
|
"""Exception when change detection was performed, but no changes were found."""
|
|
|
|
def __init__(self):
|
|
|
|
super(NoChangesDetected, self).__init__('No changes detected.')
|
|
|
|
|
|
|
|
|
|
|
|
class NoTestsForChanges(ApplicationWarning):
|
|
|
|
"""Exception when changes detected, but no tests trigger as a result."""
|
|
|
|
def __init__(self):
|
|
|
|
super(NoTestsForChanges, self).__init__('No tests found for detected changes.')
|
|
|
|
|
|
|
|
|
|
|
|
class Delegate(Exception):
|
|
|
|
"""Trigger command delegation."""
|
2018-05-09 18:24:39 +02:00
|
|
|
def __init__(self, exclude=None, require=None, integration_targets=None):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type exclude: list[str] | None
|
|
|
|
:type require: list[str] | None
|
2018-05-09 18:24:39 +02:00
|
|
|
:type integration_targets: tuple[IntegrationTarget] | None
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
super(Delegate, self).__init__()
|
|
|
|
|
|
|
|
self.exclude = exclude or []
|
|
|
|
self.require = require or []
|
2018-05-09 18:24:39 +02:00
|
|
|
self.integration_targets = integration_targets or tuple()
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
class AllTargetsSkipped(ApplicationWarning):
|
|
|
|
"""All targets skipped."""
|
|
|
|
def __init__(self):
|
|
|
|
super(AllTargetsSkipped, self).__init__('All targets skipped.')
|