2016-11-30 06:21:53 +01:00
|
|
|
"""Delegate test execution to another environment."""
|
2019-07-12 08:46:20 +02:00
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
import os
|
2017-05-11 07:25:02 +02:00
|
|
|
import re
|
2016-11-30 06:21:53 +01:00
|
|
|
import sys
|
2017-03-15 20:17:42 +01:00
|
|
|
import tempfile
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-08-28 18:10:17 +02:00
|
|
|
from . import types as t
|
|
|
|
|
2020-04-10 00:06:12 +02:00
|
|
|
from .io import (
|
|
|
|
make_dirs,
|
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .executor import (
|
2016-11-30 06:21:53 +01:00
|
|
|
SUPPORTED_PYTHON_VERSIONS,
|
2018-05-09 18:24:39 +02:00
|
|
|
HTTPTESTER_HOSTS,
|
2016-11-30 06:21:53 +01:00
|
|
|
create_shell_command,
|
2018-05-09 18:24:39 +02:00
|
|
|
run_httptester,
|
|
|
|
start_httptester,
|
2019-03-28 00:40:27 +01:00
|
|
|
get_python_interpreter,
|
|
|
|
get_python_version,
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .config import (
|
2017-03-08 09:47:21 +01:00
|
|
|
TestConfig,
|
2017-07-07 01:14:44 +02:00
|
|
|
EnvironmentConfig,
|
|
|
|
IntegrationConfig,
|
2019-08-21 21:12:37 +02:00
|
|
|
WindowsIntegrationConfig,
|
|
|
|
NetworkIntegrationConfig,
|
2017-07-07 01:14:44 +02:00
|
|
|
ShellConfig,
|
|
|
|
SanityConfig,
|
|
|
|
UnitsConfig,
|
2017-03-08 09:47:21 +01:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .core_ci import (
|
2016-11-30 06:21:53 +01:00
|
|
|
AnsibleCoreCI,
|
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .manage_ci import (
|
2016-11-30 06:21:53 +01:00
|
|
|
ManagePosixCI,
|
2018-08-13 01:27:59 +02:00
|
|
|
ManageWindowsCI,
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .util import (
|
2016-11-30 06:21:53 +01:00
|
|
|
ApplicationError,
|
2017-05-18 19:37:53 +02:00
|
|
|
common_environment,
|
2018-05-09 18:24:39 +02:00
|
|
|
display,
|
2019-08-09 02:21:38 +02:00
|
|
|
ANSIBLE_BIN_PATH,
|
2019-08-06 02:40:00 +02:00
|
|
|
ANSIBLE_TEST_DATA_ROOT,
|
2019-09-10 03:32:29 +02:00
|
|
|
ANSIBLE_LIB_ROOT,
|
|
|
|
ANSIBLE_TEST_ROOT,
|
2019-08-28 18:10:17 +02:00
|
|
|
tempdir,
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .util_common import (
|
2019-07-11 07:00:34 +02:00
|
|
|
run_command,
|
2019-08-28 08:40:06 +02:00
|
|
|
ResultType,
|
2019-09-10 03:32:29 +02:00
|
|
|
create_interpreter_wrapper,
|
2020-06-10 00:40:56 +02:00
|
|
|
get_docker_completion,
|
|
|
|
get_remote_completion,
|
2019-07-11 07:00:34 +02:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .docker_util import (
|
2017-05-05 10:23:00 +02:00
|
|
|
docker_exec,
|
|
|
|
docker_get,
|
|
|
|
docker_pull,
|
|
|
|
docker_put,
|
|
|
|
docker_rm,
|
|
|
|
docker_run,
|
2018-05-09 18:24:39 +02:00
|
|
|
docker_available,
|
2018-09-19 01:48:59 +02:00
|
|
|
docker_network_disconnect,
|
|
|
|
get_docker_networks,
|
2020-10-23 03:45:03 +02:00
|
|
|
get_docker_preferred_network_name,
|
|
|
|
get_docker_hostname,
|
|
|
|
is_docker_user_defined_network,
|
2017-05-05 10:23:00 +02:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .cloud import (
|
2017-05-05 10:23:00 +02:00
|
|
|
get_cloud_providers,
|
|
|
|
)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .target import (
|
2018-05-09 18:24:39 +02:00
|
|
|
IntegrationTarget,
|
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .data import (
|
2019-07-23 04:24:48 +02:00
|
|
|
data_context,
|
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .payload import (
|
2019-07-16 01:20:03 +02:00
|
|
|
create_payload,
|
|
|
|
)
|
|
|
|
|
2019-08-28 18:10:17 +02:00
|
|
|
from .venv import (
|
|
|
|
create_virtual_environment,
|
|
|
|
)
|
|
|
|
|
2020-04-26 03:55:39 +02:00
|
|
|
from .ci import (
|
|
|
|
get_ci_provider,
|
|
|
|
)
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
def check_delegation_args(args):
|
|
|
|
"""
|
|
|
|
:type args: CommonConfig
|
|
|
|
"""
|
|
|
|
if not isinstance(args, EnvironmentConfig):
|
|
|
|
return
|
|
|
|
|
|
|
|
if args.docker:
|
|
|
|
get_python_version(args, get_docker_completion(), args.docker_raw)
|
|
|
|
elif args.remote:
|
|
|
|
get_python_version(args, get_remote_completion(), args.remote)
|
|
|
|
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
def delegate(args, exclude, require, integration_targets):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type exclude: list[str]
|
|
|
|
:type require: list[str]
|
2018-05-09 18:24:39 +02:00
|
|
|
:type integration_targets: tuple[IntegrationTarget]
|
2017-03-15 20:17:42 +01:00
|
|
|
:rtype: bool
|
|
|
|
"""
|
|
|
|
if isinstance(args, TestConfig):
|
2020-04-26 03:55:39 +02:00
|
|
|
args.metadata.ci_provider = get_ci_provider().code
|
|
|
|
|
2020-04-21 07:04:25 +02:00
|
|
|
make_dirs(ResultType.TMP.path)
|
|
|
|
|
|
|
|
with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=ResultType.TMP.path) as metadata_fd:
|
|
|
|
args.metadata_path = os.path.join(ResultType.TMP.relative_path, os.path.basename(metadata_fd.name))
|
2017-03-15 20:17:42 +01:00
|
|
|
args.metadata.to_file(args.metadata_path)
|
|
|
|
|
|
|
|
try:
|
2018-05-09 18:24:39 +02:00
|
|
|
return delegate_command(args, exclude, require, integration_targets)
|
2017-03-15 20:17:42 +01:00
|
|
|
finally:
|
|
|
|
args.metadata_path = None
|
|
|
|
else:
|
2018-05-09 18:24:39 +02:00
|
|
|
return delegate_command(args, exclude, require, integration_targets)
|
2017-03-15 20:17:42 +01:00
|
|
|
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
def delegate_command(args, exclude, require, integration_targets):
|
2017-03-15 20:17:42 +01:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type exclude: list[str]
|
|
|
|
:type require: list[str]
|
2018-05-09 18:24:39 +02:00
|
|
|
:type integration_targets: tuple[IntegrationTarget]
|
2017-03-15 20:17:42 +01:00
|
|
|
:rtype: bool
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2019-08-28 18:10:17 +02:00
|
|
|
if args.venv:
|
|
|
|
delegate_venv(args, exclude, require, integration_targets)
|
|
|
|
return True
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
if args.docker:
|
2018-05-09 18:24:39 +02:00
|
|
|
delegate_docker(args, exclude, require, integration_targets)
|
2016-11-30 06:21:53 +01:00
|
|
|
return True
|
|
|
|
|
|
|
|
if args.remote:
|
2018-05-09 18:24:39 +02:00
|
|
|
delegate_remote(args, exclude, require, integration_targets)
|
2016-11-30 06:21:53 +01:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2019-08-28 18:10:17 +02:00
|
|
|
def delegate_venv(args, # type: EnvironmentConfig
|
|
|
|
exclude, # type: t.List[str]
|
|
|
|
require, # type: t.List[str]
|
|
|
|
integration_targets, # type: t.Tuple[IntegrationTarget, ...]
|
|
|
|
): # type: (...) -> None
|
|
|
|
"""Delegate ansible-test execution to a virtual environment using venv or virtualenv."""
|
|
|
|
if args.python:
|
|
|
|
versions = (args.python_version,)
|
|
|
|
else:
|
|
|
|
versions = SUPPORTED_PYTHON_VERSIONS
|
|
|
|
|
|
|
|
if args.httptester:
|
|
|
|
needs_httptester = sorted(target.name for target in integration_targets if 'needs/httptester/' in target.aliases)
|
|
|
|
|
|
|
|
if needs_httptester:
|
|
|
|
display.warning('Use --docker or --remote to enable httptester for tests marked "needs/httptester": %s' % ', '.join(needs_httptester))
|
|
|
|
|
2019-12-06 18:25:19 +01:00
|
|
|
if args.venv_system_site_packages:
|
|
|
|
suffix = '-ssp'
|
|
|
|
else:
|
|
|
|
suffix = ''
|
|
|
|
|
|
|
|
venvs = dict((version, os.path.join(ResultType.TMP.path, 'delegation', 'python%s%s' % (version, suffix))) for version in versions)
|
|
|
|
venvs = dict((version, path) for version, path in venvs.items() if create_virtual_environment(args, version, path, args.venv_system_site_packages))
|
2019-08-28 18:10:17 +02:00
|
|
|
|
|
|
|
if not venvs:
|
|
|
|
raise ApplicationError('No usable virtual environment support found.')
|
|
|
|
|
|
|
|
options = {
|
|
|
|
'--venv': 0,
|
2019-12-06 18:25:19 +01:00
|
|
|
'--venv-system-site-packages': 0,
|
2019-08-28 18:10:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
with tempdir() as inject_path:
|
|
|
|
for version, path in venvs.items():
|
2019-09-10 03:32:29 +02:00
|
|
|
create_interpreter_wrapper(os.path.join(path, 'bin', 'python'), os.path.join(inject_path, 'python%s' % version))
|
2019-08-28 18:10:17 +02:00
|
|
|
|
|
|
|
python_interpreter = os.path.join(inject_path, 'python%s' % args.python_version)
|
|
|
|
|
|
|
|
cmd = generate_command(args, python_interpreter, ANSIBLE_BIN_PATH, data_context().content.root, options, exclude, require)
|
|
|
|
|
|
|
|
if isinstance(args, TestConfig):
|
|
|
|
if args.coverage and not args.coverage_label:
|
|
|
|
cmd += ['--coverage-label', 'venv']
|
|
|
|
|
|
|
|
env = common_environment()
|
|
|
|
|
2019-09-10 03:32:29 +02:00
|
|
|
with tempdir() as library_path:
|
|
|
|
# expose ansible and ansible_test to the virtual environment (only required when running from an install)
|
|
|
|
os.symlink(ANSIBLE_LIB_ROOT, os.path.join(library_path, 'ansible'))
|
|
|
|
os.symlink(ANSIBLE_TEST_ROOT, os.path.join(library_path, 'ansible_test'))
|
|
|
|
|
|
|
|
env.update(
|
2020-02-04 20:21:53 +01:00
|
|
|
PATH=inject_path + os.path.pathsep + env['PATH'],
|
2019-09-10 03:32:29 +02:00
|
|
|
PYTHONPATH=library_path,
|
|
|
|
)
|
|
|
|
|
|
|
|
run_command(args, cmd, env=env)
|
2019-08-28 18:10:17 +02:00
|
|
|
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
def delegate_docker(args, exclude, require, integration_targets):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type exclude: list[str]
|
|
|
|
:type require: list[str]
|
2018-05-09 18:24:39 +02:00
|
|
|
:type integration_targets: tuple[IntegrationTarget]
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
test_image = args.docker
|
|
|
|
privileged = args.docker_privileged
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
if isinstance(args, ShellConfig):
|
|
|
|
use_httptester = args.httptester
|
|
|
|
else:
|
|
|
|
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in integration_targets)
|
|
|
|
|
|
|
|
if use_httptester:
|
|
|
|
docker_pull(args, args.httptester)
|
2016-12-15 04:48:30 +01:00
|
|
|
|
|
|
|
docker_pull(args, test_image)
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
httptester_id = None
|
2016-11-30 06:21:53 +01:00
|
|
|
test_id = None
|
2020-04-07 03:38:29 +02:00
|
|
|
success = False
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
options = {
|
|
|
|
'--docker': 1,
|
|
|
|
'--docker-privileged': 0,
|
|
|
|
'--docker-util': 1,
|
|
|
|
}
|
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
python_interpreter = get_python_interpreter(args, get_docker_completion(), args.docker_raw)
|
2019-07-16 01:20:03 +02:00
|
|
|
|
2020-04-21 07:04:25 +02:00
|
|
|
pwd = '/root'
|
|
|
|
ansible_root = os.path.join(pwd, 'ansible')
|
2019-07-23 04:24:48 +02:00
|
|
|
|
|
|
|
if data_context().content.collection:
|
2020-04-21 07:04:25 +02:00
|
|
|
content_root = os.path.join(pwd, data_context().content.collection.directory)
|
2019-07-23 04:24:48 +02:00
|
|
|
else:
|
2020-04-21 07:04:25 +02:00
|
|
|
content_root = ansible_root
|
2019-07-16 01:20:03 +02:00
|
|
|
|
2019-08-29 00:13:00 +02:00
|
|
|
remote_results_root = os.path.join(content_root, data_context().content.results_path)
|
2019-08-28 08:40:06 +02:00
|
|
|
|
2020-04-21 07:04:25 +02:00
|
|
|
cmd = generate_command(args, python_interpreter, os.path.join(ansible_root, 'bin'), content_root, options, exclude, require)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
if isinstance(args, TestConfig):
|
|
|
|
if args.coverage and not args.coverage_label:
|
2018-08-17 06:16:15 +02:00
|
|
|
image_label = args.docker_raw
|
2017-05-11 07:25:02 +02:00
|
|
|
image_label = re.sub('[^a-zA-Z0-9]+', '-', image_label)
|
|
|
|
cmd += ['--coverage-label', 'docker-%s' % image_label]
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
if isinstance(args, IntegrationConfig):
|
|
|
|
if not args.allow_destructive:
|
|
|
|
cmd.append('--allow-destructive')
|
|
|
|
|
2017-01-04 20:24:56 +01:00
|
|
|
cmd_options = []
|
|
|
|
|
2017-07-07 21:37:08 +02:00
|
|
|
if isinstance(args, ShellConfig) or (isinstance(args, IntegrationConfig) and args.debug_strategy):
|
2017-01-04 20:24:56 +01:00
|
|
|
cmd_options.append('-it')
|
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as local_source_fd:
|
|
|
|
try:
|
2019-07-16 01:20:03 +02:00
|
|
|
create_payload(args, local_source_fd.name)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
if use_httptester:
|
|
|
|
httptester_id = run_httptester(args)
|
2016-11-30 06:21:53 +01:00
|
|
|
else:
|
2018-05-09 18:24:39 +02:00
|
|
|
httptester_id = None
|
2017-05-11 07:25:02 +02:00
|
|
|
|
|
|
|
test_options = [
|
|
|
|
'--detach',
|
|
|
|
'--volume', '/sys/fs/cgroup:/sys/fs/cgroup:ro',
|
|
|
|
'--privileged=%s' % str(privileged).lower(),
|
2016-11-30 06:21:53 +01:00
|
|
|
]
|
|
|
|
|
2018-03-26 23:45:50 +02:00
|
|
|
if args.docker_memory:
|
|
|
|
test_options.extend([
|
|
|
|
'--memory=%d' % args.docker_memory,
|
2018-04-16 22:49:12 +02:00
|
|
|
'--memory-swap=%d' % args.docker_memory,
|
2018-03-26 23:45:50 +02:00
|
|
|
])
|
|
|
|
|
2017-06-30 23:50:09 +02:00
|
|
|
docker_socket = '/var/run/docker.sock'
|
|
|
|
|
2018-08-30 20:36:57 +02:00
|
|
|
if args.docker_seccomp != 'default':
|
|
|
|
test_options += ['--security-opt', 'seccomp=%s' % args.docker_seccomp]
|
|
|
|
|
2020-10-23 03:45:03 +02:00
|
|
|
if get_docker_hostname() != 'localhost' or os.path.exists(docker_socket):
|
2017-06-30 23:50:09 +02:00
|
|
|
test_options += ['--volume', '%s:%s' % (docker_socket, docker_socket)]
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
if httptester_id:
|
2020-10-13 06:16:07 +02:00
|
|
|
test_options += ['--env', 'HTTPTESTER=1', '--env', 'KRB5_PASSWORD=%s' % args.httptester_krb5_password]
|
2018-05-09 18:24:39 +02:00
|
|
|
|
2020-10-23 03:45:03 +02:00
|
|
|
network = get_docker_preferred_network_name(args)
|
|
|
|
|
|
|
|
if not is_docker_user_defined_network(network):
|
|
|
|
# legacy links are required when using the default bridge network instead of user-defined networks
|
|
|
|
for host in HTTPTESTER_HOSTS:
|
|
|
|
test_options += ['--link', '%s:%s' % (httptester_id, host)]
|
2017-05-05 10:23:00 +02:00
|
|
|
|
2017-07-15 04:11:25 +02:00
|
|
|
if isinstance(args, IntegrationConfig):
|
2017-05-11 07:25:02 +02:00
|
|
|
cloud_platforms = get_cloud_providers(args)
|
2017-05-05 10:23:00 +02:00
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
for cloud_platform in cloud_platforms:
|
|
|
|
test_options += cloud_platform.get_docker_run_options()
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-07-15 22:47:16 +02:00
|
|
|
test_id = docker_run(args, test_image, options=test_options)[0]
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
if args.explain:
|
|
|
|
test_id = 'test_id'
|
|
|
|
else:
|
|
|
|
test_id = test_id.strip()
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
# write temporary files to /root since /tmp isn't ready immediately on container start
|
2019-08-06 02:40:00 +02:00
|
|
|
docker_put(args, test_id, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'docker.sh'), '/root/docker.sh')
|
2017-05-11 07:25:02 +02:00
|
|
|
docker_exec(args, test_id, ['/bin/bash', '/root/docker.sh'])
|
2020-04-21 07:04:25 +02:00
|
|
|
docker_put(args, test_id, local_source_fd.name, '/root/test.tgz')
|
|
|
|
docker_exec(args, test_id, ['tar', 'oxzf', '/root/test.tgz', '-C', '/root'])
|
2017-04-27 05:21:11 +02:00
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
# docker images are only expected to have a single python version available
|
|
|
|
if isinstance(args, UnitsConfig) and not args.python:
|
|
|
|
cmd += ['--python', 'default']
|
|
|
|
|
2018-09-18 17:37:14 +02:00
|
|
|
# run unit tests unprivileged to prevent stray writes to the source tree
|
2018-09-19 01:48:59 +02:00
|
|
|
# also disconnect from the network once requirements have been installed
|
2018-09-18 17:37:14 +02:00
|
|
|
if isinstance(args, UnitsConfig):
|
|
|
|
writable_dirs = [
|
2019-08-28 08:40:06 +02:00
|
|
|
os.path.join(content_root, ResultType.JUNIT.relative_path),
|
|
|
|
os.path.join(content_root, ResultType.COVERAGE.relative_path),
|
2018-09-18 17:37:14 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
docker_exec(args, test_id, ['mkdir', '-p'] + writable_dirs)
|
|
|
|
docker_exec(args, test_id, ['chmod', '777'] + writable_dirs)
|
|
|
|
docker_exec(args, test_id, ['chmod', '755', '/root'])
|
2019-07-16 01:20:03 +02:00
|
|
|
docker_exec(args, test_id, ['chmod', '644', os.path.join(content_root, args.metadata_path)])
|
2018-09-18 17:37:14 +02:00
|
|
|
|
|
|
|
docker_exec(args, test_id, ['useradd', 'pytest', '--create-home'])
|
|
|
|
|
|
|
|
docker_exec(args, test_id, cmd + ['--requirements-mode', 'only'], options=cmd_options)
|
|
|
|
|
2018-09-19 01:48:59 +02:00
|
|
|
networks = get_docker_networks(args, test_id)
|
|
|
|
|
2020-05-12 19:46:22 +02:00
|
|
|
if networks is not None:
|
|
|
|
for network in networks:
|
|
|
|
docker_network_disconnect(args, test_id, network)
|
|
|
|
else:
|
|
|
|
display.warning('Network disconnection is not supported (this is normal under podman). '
|
|
|
|
'Tests will not be isolated from the network. Network-related tests may misbehave.')
|
2018-09-19 01:48:59 +02:00
|
|
|
|
2018-09-18 17:37:14 +02:00
|
|
|
cmd += ['--requirements-mode', 'skip']
|
|
|
|
|
|
|
|
cmd_options += ['--user', 'pytest']
|
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
try:
|
|
|
|
docker_exec(args, test_id, cmd, options=cmd_options)
|
2020-04-07 03:38:29 +02:00
|
|
|
# docker_exec will throw SubprocessError if not successful
|
|
|
|
# If we make it here, all the prep work earlier and the docker_exec line above were all successful.
|
|
|
|
success = True
|
2017-05-11 07:25:02 +02:00
|
|
|
finally:
|
2019-08-29 00:13:00 +02:00
|
|
|
local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path))
|
2019-08-28 08:40:06 +02:00
|
|
|
|
|
|
|
remote_test_root = os.path.dirname(remote_results_root)
|
|
|
|
remote_results_name = os.path.basename(remote_results_root)
|
|
|
|
remote_temp_file = os.path.join('/root', remote_results_name + '.tgz')
|
|
|
|
|
2020-04-10 00:06:12 +02:00
|
|
|
make_dirs(local_test_root) # make sure directory exists for collections which have no tests
|
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
with tempfile.NamedTemporaryFile(prefix='ansible-result-', suffix='.tgz') as local_result_fd:
|
2019-09-30 23:01:58 +02:00
|
|
|
docker_exec(args, test_id, ['tar', 'czf', remote_temp_file, '--exclude', ResultType.TMP.name, '-C', remote_test_root, remote_results_name])
|
2019-08-28 08:40:06 +02:00
|
|
|
docker_get(args, test_id, remote_temp_file, local_result_fd.name)
|
|
|
|
run_command(args, ['tar', 'oxzf', local_result_fd.name, '-C', local_test_root])
|
2016-11-30 06:21:53 +01:00
|
|
|
finally:
|
2018-05-09 18:24:39 +02:00
|
|
|
if httptester_id:
|
|
|
|
docker_rm(args, httptester_id)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
if test_id:
|
2020-04-07 03:38:29 +02:00
|
|
|
if args.docker_terminate == 'always' or (args.docker_terminate == 'success' and success):
|
|
|
|
docker_rm(args, test_id)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
def delegate_remote(args, exclude, require, integration_targets):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type exclude: list[str]
|
|
|
|
:type require: list[str]
|
2018-05-09 18:24:39 +02:00
|
|
|
:type integration_targets: tuple[IntegrationTarget]
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2020-04-16 01:22:17 +02:00
|
|
|
remote = args.parsed_remote
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2020-04-16 01:22:17 +02:00
|
|
|
core_ci = AnsibleCoreCI(args, remote.platform, remote.version, stage=args.remote_stage, provider=args.remote_provider, arch=remote.arch)
|
2017-05-11 15:21:11 +02:00
|
|
|
success = False
|
2018-11-16 23:50:01 +01:00
|
|
|
raw = False
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
if isinstance(args, ShellConfig):
|
|
|
|
use_httptester = args.httptester
|
2018-11-16 23:50:01 +01:00
|
|
|
raw = args.raw
|
2018-05-09 18:24:39 +02:00
|
|
|
else:
|
|
|
|
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in integration_targets)
|
|
|
|
|
|
|
|
if use_httptester and not docker_available():
|
|
|
|
display.warning('Assuming --disable-httptester since `docker` is not available.')
|
|
|
|
use_httptester = False
|
|
|
|
|
|
|
|
httptester_id = None
|
|
|
|
ssh_options = []
|
2019-07-16 01:20:03 +02:00
|
|
|
content_root = None
|
2018-05-09 18:24:39 +02:00
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
try:
|
|
|
|
core_ci.start()
|
2018-05-09 18:24:39 +02:00
|
|
|
|
|
|
|
if use_httptester:
|
|
|
|
httptester_id, ssh_options = start_httptester(args)
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
core_ci.wait()
|
|
|
|
|
2019-07-23 04:24:48 +02:00
|
|
|
python_version = get_python_version(args, get_remote_completion(), args.remote)
|
|
|
|
|
2020-04-16 01:22:17 +02:00
|
|
|
if remote.platform == 'windows':
|
2018-08-13 01:27:59 +02:00
|
|
|
# Windows doesn't need the ansible-test fluff, just run the SSH command
|
|
|
|
manage = ManageWindowsCI(core_ci)
|
2019-07-23 04:24:48 +02:00
|
|
|
manage.setup(python_version)
|
|
|
|
|
2018-08-13 01:27:59 +02:00
|
|
|
cmd = ['powershell.exe']
|
2018-11-16 23:50:01 +01:00
|
|
|
elif raw:
|
|
|
|
manage = ManagePosixCI(core_ci)
|
2019-07-23 04:24:48 +02:00
|
|
|
manage.setup(python_version)
|
|
|
|
|
2018-11-16 23:50:01 +01:00
|
|
|
cmd = create_shell_command(['bash'])
|
2018-08-13 01:27:59 +02:00
|
|
|
else:
|
2019-07-23 04:24:48 +02:00
|
|
|
manage = ManagePosixCI(core_ci)
|
|
|
|
pwd = manage.setup(python_version)
|
|
|
|
|
2018-08-13 01:27:59 +02:00
|
|
|
options = {
|
|
|
|
'--remote': 1,
|
|
|
|
}
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
python_interpreter = get_python_interpreter(args, get_remote_completion(), args.remote)
|
2019-07-16 01:20:03 +02:00
|
|
|
|
2020-04-21 07:04:25 +02:00
|
|
|
ansible_root = os.path.join(pwd, 'ansible')
|
2019-07-23 04:24:48 +02:00
|
|
|
|
|
|
|
if data_context().content.collection:
|
2020-04-21 07:04:25 +02:00
|
|
|
content_root = os.path.join(pwd, data_context().content.collection.directory)
|
2019-07-23 04:24:48 +02:00
|
|
|
else:
|
2020-04-21 07:04:25 +02:00
|
|
|
content_root = ansible_root
|
2019-07-16 01:20:03 +02:00
|
|
|
|
2020-04-21 07:04:25 +02:00
|
|
|
cmd = generate_command(args, python_interpreter, os.path.join(ansible_root, 'bin'), content_root, options, exclude, require)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-08-13 01:27:59 +02:00
|
|
|
if httptester_id:
|
2020-10-13 06:16:07 +02:00
|
|
|
cmd += ['--inject-httptester', '--httptester-krb5-password', args.httptester_krb5_password]
|
2018-05-09 18:24:39 +02:00
|
|
|
|
2018-08-13 01:27:59 +02:00
|
|
|
if isinstance(args, TestConfig):
|
|
|
|
if args.coverage and not args.coverage_label:
|
2020-04-16 01:22:17 +02:00
|
|
|
cmd += ['--coverage-label', 'remote-%s-%s' % (remote.platform, remote.version)]
|
2017-05-11 07:25:02 +02:00
|
|
|
|
2018-08-13 01:27:59 +02:00
|
|
|
if isinstance(args, IntegrationConfig):
|
|
|
|
if not args.allow_destructive:
|
|
|
|
cmd.append('--allow-destructive')
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-08-13 01:27:59 +02:00
|
|
|
# remote instances are only expected to have a single python version available
|
|
|
|
if isinstance(args, UnitsConfig) and not args.python:
|
|
|
|
cmd += ['--python', 'default']
|
2017-04-27 05:21:11 +02:00
|
|
|
|
2017-07-15 04:11:25 +02:00
|
|
|
if isinstance(args, IntegrationConfig):
|
2017-05-05 14:01:27 +02:00
|
|
|
cloud_platforms = get_cloud_providers(args)
|
2017-05-05 10:23:00 +02:00
|
|
|
|
2017-05-05 14:01:27 +02:00
|
|
|
for cloud_platform in cloud_platforms:
|
|
|
|
ssh_options += cloud_platform.get_remote_ssh_options()
|
2017-05-05 10:23:00 +02:00
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
try:
|
2017-05-05 10:23:00 +02:00
|
|
|
manage.ssh(cmd, ssh_options)
|
2017-05-11 15:21:11 +02:00
|
|
|
success = True
|
2016-11-30 06:21:53 +01:00
|
|
|
finally:
|
2018-11-16 23:50:01 +01:00
|
|
|
download = False
|
|
|
|
|
2020-04-16 01:22:17 +02:00
|
|
|
if remote.platform != 'windows':
|
2018-11-16 23:50:01 +01:00
|
|
|
download = True
|
|
|
|
|
|
|
|
if isinstance(args, ShellConfig):
|
|
|
|
if args.raw:
|
|
|
|
download = False
|
|
|
|
|
2019-07-16 01:20:03 +02:00
|
|
|
if download and content_root:
|
2019-08-29 00:13:00 +02:00
|
|
|
local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path))
|
2019-08-28 08:40:06 +02:00
|
|
|
|
2019-08-29 00:13:00 +02:00
|
|
|
remote_results_root = os.path.join(content_root, data_context().content.results_path)
|
2019-08-28 08:40:06 +02:00
|
|
|
remote_results_name = os.path.basename(remote_results_root)
|
|
|
|
remote_temp_path = os.path.join('/tmp', remote_results_name)
|
|
|
|
|
2020-01-30 00:56:51 +01:00
|
|
|
# AIX cp and GNU cp provide different options, no way could be found to have a common
|
|
|
|
# pattern and achieve the same goal
|
2020-04-16 01:22:17 +02:00
|
|
|
cp_opts = '-hr' if remote.platform in ['aix', 'ibmi'] else '-a'
|
2020-01-30 00:56:51 +01:00
|
|
|
|
|
|
|
manage.ssh('rm -rf {0} && mkdir {0} && cp {1} {2}/* {0}/ && chmod -R a+r {0}'.format(remote_temp_path, cp_opts, remote_results_root))
|
2019-08-28 08:40:06 +02:00
|
|
|
manage.download(remote_temp_path, local_test_root)
|
2016-11-30 06:21:53 +01:00
|
|
|
finally:
|
2017-05-11 15:21:11 +02:00
|
|
|
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
|
|
|
|
core_ci.stop()
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
if httptester_id:
|
|
|
|
docker_rm(args, httptester_id)
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-08-09 02:21:38 +02:00
|
|
|
def generate_command(args, python_interpreter, ansible_bin_path, content_root, options, exclude, require):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
2019-03-28 00:40:27 +01:00
|
|
|
:type python_interpreter: str | None
|
2019-08-09 02:21:38 +02:00
|
|
|
:type ansible_bin_path: str
|
2019-07-16 01:20:03 +02:00
|
|
|
:type content_root: str
|
2016-11-30 06:21:53 +01:00
|
|
|
:type options: dict[str, int]
|
|
|
|
:type exclude: list[str]
|
|
|
|
:type require: list[str]
|
2017-02-14 03:49:36 +01:00
|
|
|
:rtype: list[str]
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
options['--color'] = 1
|
|
|
|
|
2019-08-09 02:21:38 +02:00
|
|
|
cmd = [os.path.join(ansible_bin_path, 'ansible-test')]
|
2019-01-11 23:37:35 +01:00
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
if python_interpreter:
|
|
|
|
cmd = [python_interpreter] + cmd
|
|
|
|
|
2019-01-11 23:37:35 +01:00
|
|
|
# Force the encoding used during delegation.
|
|
|
|
# This is only needed because ansible-test relies on Python's file system encoding.
|
|
|
|
# Environments that do not have the locale configured are thus unable to work with unicode file paths.
|
|
|
|
# Examples include FreeBSD and some Linux containers.
|
2019-07-16 01:20:03 +02:00
|
|
|
env_vars = dict(
|
|
|
|
LC_ALL='en_US.UTF-8',
|
|
|
|
ANSIBLE_TEST_CONTENT_ROOT=content_root,
|
|
|
|
)
|
|
|
|
|
|
|
|
env_args = ['%s=%s' % (key, env_vars[key]) for key in sorted(env_vars)]
|
|
|
|
|
|
|
|
cmd = ['/usr/bin/env'] + env_args + cmd
|
2019-01-11 23:37:35 +01:00
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
cmd += list(filter_options(args, sys.argv[1:], options, exclude, require))
|
|
|
|
cmd += ['--color', 'yes' if args.color else 'no']
|
|
|
|
|
|
|
|
if args.requirements:
|
|
|
|
cmd += ['--requirements']
|
|
|
|
|
|
|
|
if isinstance(args, ShellConfig):
|
|
|
|
cmd = create_shell_command(cmd)
|
2017-02-20 22:24:14 +01:00
|
|
|
elif isinstance(args, SanityConfig):
|
2020-04-26 03:55:39 +02:00
|
|
|
base_branch = args.base_branch or get_ci_provider().get_base_branch()
|
|
|
|
|
|
|
|
if base_branch:
|
|
|
|
cmd += ['--base-branch', base_branch]
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
return cmd
|
|
|
|
|
|
|
|
|
|
|
|
def filter_options(args, argv, options, exclude, require):
|
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type argv: list[str]
|
|
|
|
:type options: dict[str, int]
|
|
|
|
:type exclude: list[str]
|
|
|
|
:type require: list[str]
|
|
|
|
:rtype: collections.Iterable[str]
|
|
|
|
"""
|
|
|
|
options = options.copy()
|
|
|
|
|
|
|
|
options['--requirements'] = 0
|
2018-02-18 05:34:27 +01:00
|
|
|
options['--truncate'] = 1
|
2018-02-20 01:42:37 +01:00
|
|
|
options['--redact'] = 0
|
2019-09-17 07:40:58 +02:00
|
|
|
options['--no-redact'] = 0
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
if isinstance(args, TestConfig):
|
|
|
|
options.update({
|
|
|
|
'--changed': 0,
|
|
|
|
'--tracked': 0,
|
|
|
|
'--untracked': 0,
|
|
|
|
'--ignore-committed': 0,
|
|
|
|
'--ignore-staged': 0,
|
|
|
|
'--ignore-unstaged': 0,
|
|
|
|
'--changed-from': 1,
|
|
|
|
'--changed-path': 1,
|
2017-03-15 20:17:42 +01:00
|
|
|
'--metadata': 1,
|
2018-10-02 21:26:14 +02:00
|
|
|
'--exclude': 1,
|
|
|
|
'--require': 1,
|
2016-11-30 06:21:53 +01:00
|
|
|
})
|
2017-02-20 22:24:14 +01:00
|
|
|
elif isinstance(args, SanityConfig):
|
|
|
|
options.update({
|
|
|
|
'--base-branch': 1,
|
|
|
|
})
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2020-01-28 02:22:02 +01:00
|
|
|
if isinstance(args, IntegrationConfig):
|
|
|
|
options.update({
|
|
|
|
'--no-temp-unicode': 0,
|
|
|
|
'--no-pip-check': 0,
|
|
|
|
})
|
|
|
|
|
2019-08-21 21:12:37 +02:00
|
|
|
if isinstance(args, (NetworkIntegrationConfig, WindowsIntegrationConfig)):
|
|
|
|
options.update({
|
|
|
|
'--inventory': 1,
|
|
|
|
})
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
remaining = 0
|
|
|
|
|
|
|
|
for arg in argv:
|
|
|
|
if not arg.startswith('-') and remaining:
|
|
|
|
remaining -= 1
|
|
|
|
continue
|
|
|
|
|
|
|
|
remaining = 0
|
|
|
|
|
|
|
|
parts = arg.split('=', 1)
|
|
|
|
key = parts[0]
|
|
|
|
|
|
|
|
if key in options:
|
|
|
|
remaining = options[key] - len(parts) + 1
|
|
|
|
continue
|
|
|
|
|
|
|
|
yield arg
|
|
|
|
|
2018-10-02 21:26:14 +02:00
|
|
|
for arg in args.delegate_args:
|
|
|
|
yield arg
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
for target in exclude:
|
|
|
|
yield '--exclude'
|
|
|
|
yield target
|
|
|
|
|
|
|
|
for target in require:
|
|
|
|
yield '--require'
|
|
|
|
yield target
|
2017-03-15 20:17:42 +01:00
|
|
|
|
2017-03-20 23:31:57 +01:00
|
|
|
if isinstance(args, TestConfig):
|
|
|
|
if args.metadata_path:
|
|
|
|
yield '--metadata'
|
|
|
|
yield args.metadata_path
|
2018-02-18 05:34:27 +01:00
|
|
|
|
|
|
|
yield '--truncate'
|
|
|
|
yield '%d' % args.truncate
|
2018-02-20 01:42:37 +01:00
|
|
|
|
|
|
|
if args.redact:
|
|
|
|
yield '--redact'
|
2019-09-17 07:40:58 +02:00
|
|
|
else:
|
|
|
|
yield '--no-redact'
|
2020-01-28 02:22:02 +01:00
|
|
|
|
|
|
|
if isinstance(args, IntegrationConfig):
|
|
|
|
if args.no_temp_unicode:
|
|
|
|
yield '--no-temp-unicode'
|
|
|
|
|
|
|
|
if not args.pip_check:
|
|
|
|
yield '--no-pip-check'
|