2016-11-30 06:21:53 +01:00
|
|
|
"""Delegate test execution to another environment."""
|
2019-07-12 08:46:20 +02:00
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
import os
|
2017-05-11 07:25:02 +02:00
|
|
|
import re
|
2016-11-30 06:21:53 +01:00
|
|
|
import sys
|
2017-03-15 20:17:42 +01:00
|
|
|
import tempfile
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .executor import (
|
2016-11-30 06:21:53 +01:00
|
|
|
SUPPORTED_PYTHON_VERSIONS,
|
2018-05-09 18:24:39 +02:00
|
|
|
HTTPTESTER_HOSTS,
|
2016-11-30 06:21:53 +01:00
|
|
|
create_shell_command,
|
2018-05-09 18:24:39 +02:00
|
|
|
run_httptester,
|
|
|
|
start_httptester,
|
2019-03-28 00:40:27 +01:00
|
|
|
get_python_interpreter,
|
|
|
|
get_python_version,
|
|
|
|
get_docker_completion,
|
|
|
|
get_remote_completion,
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .config import (
|
2017-03-08 09:47:21 +01:00
|
|
|
TestConfig,
|
2017-07-07 01:14:44 +02:00
|
|
|
EnvironmentConfig,
|
|
|
|
IntegrationConfig,
|
2019-08-21 21:12:37 +02:00
|
|
|
WindowsIntegrationConfig,
|
|
|
|
NetworkIntegrationConfig,
|
2017-07-07 01:14:44 +02:00
|
|
|
ShellConfig,
|
|
|
|
SanityConfig,
|
|
|
|
UnitsConfig,
|
2017-03-08 09:47:21 +01:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .core_ci import (
|
2016-11-30 06:21:53 +01:00
|
|
|
AnsibleCoreCI,
|
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .manage_ci import (
|
2016-11-30 06:21:53 +01:00
|
|
|
ManagePosixCI,
|
2018-08-13 01:27:59 +02:00
|
|
|
ManageWindowsCI,
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .util import (
|
2016-11-30 06:21:53 +01:00
|
|
|
ApplicationError,
|
2017-05-18 19:37:53 +02:00
|
|
|
common_environment,
|
|
|
|
pass_vars,
|
2018-05-09 18:24:39 +02:00
|
|
|
display,
|
2019-08-09 02:21:38 +02:00
|
|
|
ANSIBLE_BIN_PATH,
|
2019-08-06 02:40:00 +02:00
|
|
|
ANSIBLE_TEST_DATA_ROOT,
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .util_common import (
|
2019-07-11 07:00:34 +02:00
|
|
|
run_command,
|
2019-08-28 08:40:06 +02:00
|
|
|
ResultType,
|
2019-07-11 07:00:34 +02:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .docker_util import (
|
2017-05-05 10:23:00 +02:00
|
|
|
docker_exec,
|
|
|
|
docker_get,
|
|
|
|
docker_pull,
|
|
|
|
docker_put,
|
|
|
|
docker_rm,
|
|
|
|
docker_run,
|
2018-05-09 18:24:39 +02:00
|
|
|
docker_available,
|
2018-09-19 01:48:59 +02:00
|
|
|
docker_network_disconnect,
|
|
|
|
get_docker_networks,
|
2017-05-05 10:23:00 +02:00
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .cloud import (
|
2017-05-05 10:23:00 +02:00
|
|
|
get_cloud_providers,
|
|
|
|
)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .target import (
|
2018-05-09 18:24:39 +02:00
|
|
|
IntegrationTarget,
|
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .data import (
|
2019-07-23 04:24:48 +02:00
|
|
|
data_context,
|
|
|
|
)
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from .payload import (
|
2019-07-16 01:20:03 +02:00
|
|
|
create_payload,
|
|
|
|
)
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
def check_delegation_args(args):
|
|
|
|
"""
|
|
|
|
:type args: CommonConfig
|
|
|
|
"""
|
|
|
|
if not isinstance(args, EnvironmentConfig):
|
|
|
|
return
|
|
|
|
|
|
|
|
if args.docker:
|
|
|
|
get_python_version(args, get_docker_completion(), args.docker_raw)
|
|
|
|
elif args.remote:
|
|
|
|
get_python_version(args, get_remote_completion(), args.remote)
|
|
|
|
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
def delegate(args, exclude, require, integration_targets):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type exclude: list[str]
|
|
|
|
:type require: list[str]
|
2018-05-09 18:24:39 +02:00
|
|
|
:type integration_targets: tuple[IntegrationTarget]
|
2017-03-15 20:17:42 +01:00
|
|
|
:rtype: bool
|
|
|
|
"""
|
|
|
|
if isinstance(args, TestConfig):
|
2019-07-23 04:24:48 +02:00
|
|
|
with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=data_context().content.root) as metadata_fd:
|
2017-03-15 20:17:42 +01:00
|
|
|
args.metadata_path = os.path.basename(metadata_fd.name)
|
|
|
|
args.metadata.to_file(args.metadata_path)
|
|
|
|
|
|
|
|
try:
|
2018-05-09 18:24:39 +02:00
|
|
|
return delegate_command(args, exclude, require, integration_targets)
|
2017-03-15 20:17:42 +01:00
|
|
|
finally:
|
|
|
|
args.metadata_path = None
|
|
|
|
else:
|
2018-05-09 18:24:39 +02:00
|
|
|
return delegate_command(args, exclude, require, integration_targets)
|
2017-03-15 20:17:42 +01:00
|
|
|
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
def delegate_command(args, exclude, require, integration_targets):
|
2017-03-15 20:17:42 +01:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type exclude: list[str]
|
|
|
|
:type require: list[str]
|
2018-05-09 18:24:39 +02:00
|
|
|
:type integration_targets: tuple[IntegrationTarget]
|
2017-03-15 20:17:42 +01:00
|
|
|
:rtype: bool
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
if args.tox:
|
2018-05-09 18:24:39 +02:00
|
|
|
delegate_tox(args, exclude, require, integration_targets)
|
2016-11-30 06:21:53 +01:00
|
|
|
return True
|
|
|
|
|
|
|
|
if args.docker:
|
2018-05-09 18:24:39 +02:00
|
|
|
delegate_docker(args, exclude, require, integration_targets)
|
2016-11-30 06:21:53 +01:00
|
|
|
return True
|
|
|
|
|
|
|
|
if args.remote:
|
2018-05-09 18:24:39 +02:00
|
|
|
delegate_remote(args, exclude, require, integration_targets)
|
2016-11-30 06:21:53 +01:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
def delegate_tox(args, exclude, require, integration_targets):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type exclude: list[str]
|
|
|
|
:type require: list[str]
|
2018-05-09 18:24:39 +02:00
|
|
|
:type integration_targets: tuple[IntegrationTarget]
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
if args.python:
|
2018-10-15 07:59:52 +02:00
|
|
|
versions = (args.python_version,)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
if args.python_version not in SUPPORTED_PYTHON_VERSIONS:
|
|
|
|
raise ApplicationError('tox does not support Python version %s' % args.python_version)
|
2016-11-30 06:21:53 +01:00
|
|
|
else:
|
|
|
|
versions = SUPPORTED_PYTHON_VERSIONS
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
if args.httptester:
|
|
|
|
needs_httptester = sorted(target.name for target in integration_targets if 'needs/httptester/' in target.aliases)
|
|
|
|
|
|
|
|
if needs_httptester:
|
|
|
|
display.warning('Use --docker or --remote to enable httptester for tests marked "needs/httptester": %s' % ', '.join(needs_httptester))
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
options = {
|
|
|
|
'--tox': args.tox_args,
|
2016-12-14 01:25:16 +01:00
|
|
|
'--tox-sitepackages': 0,
|
2016-11-30 06:21:53 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
for version in versions:
|
2019-08-06 02:40:00 +02:00
|
|
|
tox = ['tox', '-c', os.path.join(ANSIBLE_TEST_DATA_ROOT, 'tox.ini'), '-e', 'py' + version.replace('.', '')]
|
2016-12-14 01:25:16 +01:00
|
|
|
|
|
|
|
if args.tox_sitepackages:
|
|
|
|
tox.append('--sitepackages')
|
|
|
|
|
|
|
|
tox.append('--')
|
|
|
|
|
2019-08-09 02:21:38 +02:00
|
|
|
cmd = generate_command(args, None, ANSIBLE_BIN_PATH, data_context().content.root, options, exclude, require)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
if not args.python:
|
|
|
|
cmd += ['--python', version]
|
|
|
|
|
2019-06-05 06:59:56 +02:00
|
|
|
# newer versions of tox do not support older python versions and will silently fall back to a different version
|
|
|
|
# passing this option will allow the delegated ansible-test to verify it is running under the expected python version
|
|
|
|
# tox 3.0.0 dropped official python 2.6 support: https://tox.readthedocs.io/en/latest/changelog.html#v3-0-0-2018-04-02
|
|
|
|
# tox 3.1.3 is the first version to support python 3.8 and later: https://tox.readthedocs.io/en/latest/changelog.html#v3-1-3-2018-08-03
|
|
|
|
# tox 3.1.3 appears to still work with python 2.6, making it a good version to use when supporting all python versions we use
|
|
|
|
# virtualenv 16.0.0 dropped python 2.6 support: https://virtualenv.pypa.io/en/latest/changes/#v16-0-0-2018-05-16
|
|
|
|
cmd += ['--check-python', version]
|
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
if isinstance(args, TestConfig):
|
|
|
|
if args.coverage and not args.coverage_label:
|
|
|
|
cmd += ['--coverage-label', 'tox-%s' % version]
|
|
|
|
|
2017-05-18 19:37:53 +02:00
|
|
|
env = common_environment()
|
|
|
|
|
|
|
|
# temporary solution to permit ansible-test delegated to tox to provision remote resources
|
|
|
|
optional = (
|
|
|
|
'SHIPPABLE',
|
|
|
|
'SHIPPABLE_BUILD_ID',
|
|
|
|
'SHIPPABLE_JOB_NUMBER',
|
|
|
|
)
|
|
|
|
|
|
|
|
env.update(pass_vars(required=[], optional=optional))
|
|
|
|
|
|
|
|
run_command(args, tox + cmd, env=env)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
def delegate_docker(args, exclude, require, integration_targets):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type exclude: list[str]
|
|
|
|
:type require: list[str]
|
2018-05-09 18:24:39 +02:00
|
|
|
:type integration_targets: tuple[IntegrationTarget]
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
test_image = args.docker
|
|
|
|
privileged = args.docker_privileged
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
if isinstance(args, ShellConfig):
|
|
|
|
use_httptester = args.httptester
|
|
|
|
else:
|
|
|
|
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in integration_targets)
|
|
|
|
|
|
|
|
if use_httptester:
|
|
|
|
docker_pull(args, args.httptester)
|
2016-12-15 04:48:30 +01:00
|
|
|
|
|
|
|
docker_pull(args, test_image)
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
httptester_id = None
|
2016-11-30 06:21:53 +01:00
|
|
|
test_id = None
|
|
|
|
|
|
|
|
options = {
|
|
|
|
'--docker': 1,
|
|
|
|
'--docker-privileged': 0,
|
|
|
|
'--docker-util': 1,
|
|
|
|
}
|
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
python_interpreter = get_python_interpreter(args, get_docker_completion(), args.docker_raw)
|
2019-07-16 01:20:03 +02:00
|
|
|
|
|
|
|
install_root = '/root/ansible'
|
2019-07-23 04:24:48 +02:00
|
|
|
|
|
|
|
if data_context().content.collection:
|
|
|
|
content_root = os.path.join(install_root, data_context().content.collection.directory)
|
|
|
|
else:
|
|
|
|
content_root = install_root
|
2019-07-16 01:20:03 +02:00
|
|
|
|
2019-08-28 08:40:06 +02:00
|
|
|
remote_results_root = os.path.join(content_root, data_context().results_relative)
|
|
|
|
|
2019-08-09 02:21:38 +02:00
|
|
|
cmd = generate_command(args, python_interpreter, os.path.join(install_root, 'bin'), content_root, options, exclude, require)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
if isinstance(args, TestConfig):
|
|
|
|
if args.coverage and not args.coverage_label:
|
2018-08-17 06:16:15 +02:00
|
|
|
image_label = args.docker_raw
|
2017-05-11 07:25:02 +02:00
|
|
|
image_label = re.sub('[^a-zA-Z0-9]+', '-', image_label)
|
|
|
|
cmd += ['--coverage-label', 'docker-%s' % image_label]
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
if isinstance(args, IntegrationConfig):
|
|
|
|
if not args.allow_destructive:
|
|
|
|
cmd.append('--allow-destructive')
|
|
|
|
|
2017-01-04 20:24:56 +01:00
|
|
|
cmd_options = []
|
|
|
|
|
2017-07-07 21:37:08 +02:00
|
|
|
if isinstance(args, ShellConfig) or (isinstance(args, IntegrationConfig) and args.debug_strategy):
|
2017-01-04 20:24:56 +01:00
|
|
|
cmd_options.append('-it')
|
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as local_source_fd:
|
|
|
|
try:
|
2019-07-16 01:20:03 +02:00
|
|
|
create_payload(args, local_source_fd.name)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
if use_httptester:
|
|
|
|
httptester_id = run_httptester(args)
|
2016-11-30 06:21:53 +01:00
|
|
|
else:
|
2018-05-09 18:24:39 +02:00
|
|
|
httptester_id = None
|
2017-05-11 07:25:02 +02:00
|
|
|
|
|
|
|
test_options = [
|
|
|
|
'--detach',
|
|
|
|
'--volume', '/sys/fs/cgroup:/sys/fs/cgroup:ro',
|
|
|
|
'--privileged=%s' % str(privileged).lower(),
|
2016-11-30 06:21:53 +01:00
|
|
|
]
|
|
|
|
|
2018-03-26 23:45:50 +02:00
|
|
|
if args.docker_memory:
|
|
|
|
test_options.extend([
|
|
|
|
'--memory=%d' % args.docker_memory,
|
2018-04-16 22:49:12 +02:00
|
|
|
'--memory-swap=%d' % args.docker_memory,
|
2018-03-26 23:45:50 +02:00
|
|
|
])
|
|
|
|
|
2017-06-30 23:50:09 +02:00
|
|
|
docker_socket = '/var/run/docker.sock'
|
|
|
|
|
2018-08-30 20:36:57 +02:00
|
|
|
if args.docker_seccomp != 'default':
|
|
|
|
test_options += ['--security-opt', 'seccomp=%s' % args.docker_seccomp]
|
|
|
|
|
2017-06-30 23:50:09 +02:00
|
|
|
if os.path.exists(docker_socket):
|
|
|
|
test_options += ['--volume', '%s:%s' % (docker_socket, docker_socket)]
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
if httptester_id:
|
|
|
|
test_options += ['--env', 'HTTPTESTER=1']
|
|
|
|
|
|
|
|
for host in HTTPTESTER_HOSTS:
|
|
|
|
test_options += ['--link', '%s:%s' % (httptester_id, host)]
|
2017-05-05 10:23:00 +02:00
|
|
|
|
2017-07-15 04:11:25 +02:00
|
|
|
if isinstance(args, IntegrationConfig):
|
2017-05-11 07:25:02 +02:00
|
|
|
cloud_platforms = get_cloud_providers(args)
|
2017-05-05 10:23:00 +02:00
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
for cloud_platform in cloud_platforms:
|
|
|
|
test_options += cloud_platform.get_docker_run_options()
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-07-15 22:47:16 +02:00
|
|
|
test_id = docker_run(args, test_image, options=test_options)[0]
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
if args.explain:
|
|
|
|
test_id = 'test_id'
|
|
|
|
else:
|
|
|
|
test_id = test_id.strip()
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
# write temporary files to /root since /tmp isn't ready immediately on container start
|
2019-08-06 02:40:00 +02:00
|
|
|
docker_put(args, test_id, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'docker.sh'), '/root/docker.sh')
|
2017-05-11 07:25:02 +02:00
|
|
|
docker_exec(args, test_id, ['/bin/bash', '/root/docker.sh'])
|
|
|
|
docker_put(args, test_id, local_source_fd.name, '/root/ansible.tgz')
|
|
|
|
docker_exec(args, test_id, ['mkdir', '/root/ansible'])
|
|
|
|
docker_exec(args, test_id, ['tar', 'oxzf', '/root/ansible.tgz', '-C', '/root/ansible'])
|
2017-04-27 05:21:11 +02:00
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
# docker images are only expected to have a single python version available
|
|
|
|
if isinstance(args, UnitsConfig) and not args.python:
|
|
|
|
cmd += ['--python', 'default']
|
|
|
|
|
2018-09-18 17:37:14 +02:00
|
|
|
# run unit tests unprivileged to prevent stray writes to the source tree
|
2018-09-19 01:48:59 +02:00
|
|
|
# also disconnect from the network once requirements have been installed
|
2018-09-18 17:37:14 +02:00
|
|
|
if isinstance(args, UnitsConfig):
|
|
|
|
writable_dirs = [
|
2019-08-28 08:40:06 +02:00
|
|
|
os.path.join(content_root, ResultType.JUNIT.relative_path),
|
|
|
|
os.path.join(content_root, ResultType.COVERAGE.relative_path),
|
2018-09-18 17:37:14 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
docker_exec(args, test_id, ['mkdir', '-p'] + writable_dirs)
|
|
|
|
docker_exec(args, test_id, ['chmod', '777'] + writable_dirs)
|
|
|
|
docker_exec(args, test_id, ['chmod', '755', '/root'])
|
2019-07-16 01:20:03 +02:00
|
|
|
docker_exec(args, test_id, ['chmod', '644', os.path.join(content_root, args.metadata_path)])
|
2018-09-18 17:37:14 +02:00
|
|
|
|
|
|
|
docker_exec(args, test_id, ['useradd', 'pytest', '--create-home'])
|
|
|
|
|
|
|
|
docker_exec(args, test_id, cmd + ['--requirements-mode', 'only'], options=cmd_options)
|
|
|
|
|
2018-09-19 01:48:59 +02:00
|
|
|
networks = get_docker_networks(args, test_id)
|
|
|
|
|
|
|
|
for network in networks:
|
|
|
|
docker_network_disconnect(args, test_id, network)
|
|
|
|
|
2018-09-18 17:37:14 +02:00
|
|
|
cmd += ['--requirements-mode', 'skip']
|
|
|
|
|
|
|
|
cmd_options += ['--user', 'pytest']
|
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
try:
|
|
|
|
docker_exec(args, test_id, cmd, options=cmd_options)
|
|
|
|
finally:
|
2019-08-28 08:40:06 +02:00
|
|
|
local_test_root = os.path.dirname(data_context().results)
|
|
|
|
|
|
|
|
remote_test_root = os.path.dirname(remote_results_root)
|
|
|
|
remote_results_name = os.path.basename(remote_results_root)
|
|
|
|
remote_temp_file = os.path.join('/root', remote_results_name + '.tgz')
|
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
with tempfile.NamedTemporaryFile(prefix='ansible-result-', suffix='.tgz') as local_result_fd:
|
2019-08-28 08:40:06 +02:00
|
|
|
docker_exec(args, test_id, ['tar', 'czf', remote_temp_file, '-C', remote_test_root, remote_results_name])
|
|
|
|
docker_get(args, test_id, remote_temp_file, local_result_fd.name)
|
|
|
|
run_command(args, ['tar', 'oxzf', local_result_fd.name, '-C', local_test_root])
|
2016-11-30 06:21:53 +01:00
|
|
|
finally:
|
2018-05-09 18:24:39 +02:00
|
|
|
if httptester_id:
|
|
|
|
docker_rm(args, httptester_id)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-05-11 07:25:02 +02:00
|
|
|
if test_id:
|
|
|
|
docker_rm(args, test_id)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
def delegate_remote(args, exclude, require, integration_targets):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type exclude: list[str]
|
|
|
|
:type require: list[str]
|
2018-05-09 18:24:39 +02:00
|
|
|
:type integration_targets: tuple[IntegrationTarget]
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
parts = args.remote.split('/', 1)
|
|
|
|
|
|
|
|
platform = parts[0]
|
|
|
|
version = parts[1]
|
|
|
|
|
2017-11-29 09:46:08 +01:00
|
|
|
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider)
|
2017-05-11 15:21:11 +02:00
|
|
|
success = False
|
2018-11-16 23:50:01 +01:00
|
|
|
raw = False
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
if isinstance(args, ShellConfig):
|
|
|
|
use_httptester = args.httptester
|
2018-11-16 23:50:01 +01:00
|
|
|
raw = args.raw
|
2018-05-09 18:24:39 +02:00
|
|
|
else:
|
|
|
|
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in integration_targets)
|
|
|
|
|
|
|
|
if use_httptester and not docker_available():
|
|
|
|
display.warning('Assuming --disable-httptester since `docker` is not available.')
|
|
|
|
use_httptester = False
|
|
|
|
|
|
|
|
httptester_id = None
|
|
|
|
ssh_options = []
|
2019-07-16 01:20:03 +02:00
|
|
|
content_root = None
|
2018-05-09 18:24:39 +02:00
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
try:
|
|
|
|
core_ci.start()
|
2018-05-09 18:24:39 +02:00
|
|
|
|
|
|
|
if use_httptester:
|
|
|
|
httptester_id, ssh_options = start_httptester(args)
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
core_ci.wait()
|
|
|
|
|
2019-07-23 04:24:48 +02:00
|
|
|
python_version = get_python_version(args, get_remote_completion(), args.remote)
|
|
|
|
|
2018-08-13 01:27:59 +02:00
|
|
|
if platform == 'windows':
|
|
|
|
# Windows doesn't need the ansible-test fluff, just run the SSH command
|
|
|
|
manage = ManageWindowsCI(core_ci)
|
2019-07-23 04:24:48 +02:00
|
|
|
manage.setup(python_version)
|
|
|
|
|
2018-08-13 01:27:59 +02:00
|
|
|
cmd = ['powershell.exe']
|
2018-11-16 23:50:01 +01:00
|
|
|
elif raw:
|
|
|
|
manage = ManagePosixCI(core_ci)
|
2019-07-23 04:24:48 +02:00
|
|
|
manage.setup(python_version)
|
|
|
|
|
2018-11-16 23:50:01 +01:00
|
|
|
cmd = create_shell_command(['bash'])
|
2018-08-13 01:27:59 +02:00
|
|
|
else:
|
2019-07-23 04:24:48 +02:00
|
|
|
manage = ManagePosixCI(core_ci)
|
|
|
|
pwd = manage.setup(python_version)
|
|
|
|
|
2018-08-13 01:27:59 +02:00
|
|
|
options = {
|
|
|
|
'--remote': 1,
|
|
|
|
}
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
python_interpreter = get_python_interpreter(args, get_remote_completion(), args.remote)
|
2019-07-16 01:20:03 +02:00
|
|
|
|
2019-07-23 04:24:48 +02:00
|
|
|
install_root = os.path.join(pwd, 'ansible')
|
|
|
|
|
|
|
|
if data_context().content.collection:
|
|
|
|
content_root = os.path.join(install_root, data_context().content.collection.directory)
|
|
|
|
else:
|
|
|
|
content_root = install_root
|
2019-07-16 01:20:03 +02:00
|
|
|
|
2019-08-09 02:21:38 +02:00
|
|
|
cmd = generate_command(args, python_interpreter, os.path.join(install_root, 'bin'), content_root, options, exclude, require)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-08-13 01:27:59 +02:00
|
|
|
if httptester_id:
|
|
|
|
cmd += ['--inject-httptester']
|
2018-05-09 18:24:39 +02:00
|
|
|
|
2018-08-13 01:27:59 +02:00
|
|
|
if isinstance(args, TestConfig):
|
|
|
|
if args.coverage and not args.coverage_label:
|
|
|
|
cmd += ['--coverage-label', 'remote-%s-%s' % (platform, version)]
|
2017-05-11 07:25:02 +02:00
|
|
|
|
2018-08-13 01:27:59 +02:00
|
|
|
if isinstance(args, IntegrationConfig):
|
|
|
|
if not args.allow_destructive:
|
|
|
|
cmd.append('--allow-destructive')
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-08-13 01:27:59 +02:00
|
|
|
# remote instances are only expected to have a single python version available
|
|
|
|
if isinstance(args, UnitsConfig) and not args.python:
|
|
|
|
cmd += ['--python', 'default']
|
2017-04-27 05:21:11 +02:00
|
|
|
|
2017-07-15 04:11:25 +02:00
|
|
|
if isinstance(args, IntegrationConfig):
|
2017-05-05 14:01:27 +02:00
|
|
|
cloud_platforms = get_cloud_providers(args)
|
2017-05-05 10:23:00 +02:00
|
|
|
|
2017-05-05 14:01:27 +02:00
|
|
|
for cloud_platform in cloud_platforms:
|
|
|
|
ssh_options += cloud_platform.get_remote_ssh_options()
|
2017-05-05 10:23:00 +02:00
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
try:
|
2017-05-05 10:23:00 +02:00
|
|
|
manage.ssh(cmd, ssh_options)
|
2017-05-11 15:21:11 +02:00
|
|
|
success = True
|
2016-11-30 06:21:53 +01:00
|
|
|
finally:
|
2018-11-16 23:50:01 +01:00
|
|
|
download = False
|
|
|
|
|
2018-08-13 01:27:59 +02:00
|
|
|
if platform != 'windows':
|
2018-11-16 23:50:01 +01:00
|
|
|
download = True
|
|
|
|
|
|
|
|
if isinstance(args, ShellConfig):
|
|
|
|
if args.raw:
|
|
|
|
download = False
|
|
|
|
|
2019-07-16 01:20:03 +02:00
|
|
|
if download and content_root:
|
2019-08-28 08:40:06 +02:00
|
|
|
local_test_root = os.path.dirname(data_context().results)
|
|
|
|
|
|
|
|
remote_results_root = os.path.join(content_root, data_context().results_relative)
|
|
|
|
remote_results_name = os.path.basename(remote_results_root)
|
|
|
|
remote_temp_path = os.path.join('/tmp', remote_results_name)
|
|
|
|
|
|
|
|
manage.ssh('rm -rf {0} && cp -a {1} {0} && chmod -R a+r {0}'.format(remote_temp_path, remote_results_root))
|
|
|
|
manage.download(remote_temp_path, local_test_root)
|
2016-11-30 06:21:53 +01:00
|
|
|
finally:
|
2017-05-11 15:21:11 +02:00
|
|
|
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
|
|
|
|
core_ci.stop()
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
if httptester_id:
|
|
|
|
docker_rm(args, httptester_id)
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-08-09 02:21:38 +02:00
|
|
|
def generate_command(args, python_interpreter, ansible_bin_path, content_root, options, exclude, require):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
2019-03-28 00:40:27 +01:00
|
|
|
:type python_interpreter: str | None
|
2019-08-09 02:21:38 +02:00
|
|
|
:type ansible_bin_path: str
|
2019-07-16 01:20:03 +02:00
|
|
|
:type content_root: str
|
2016-11-30 06:21:53 +01:00
|
|
|
:type options: dict[str, int]
|
|
|
|
:type exclude: list[str]
|
|
|
|
:type require: list[str]
|
2017-02-14 03:49:36 +01:00
|
|
|
:rtype: list[str]
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
options['--color'] = 1
|
|
|
|
|
2019-08-09 02:21:38 +02:00
|
|
|
cmd = [os.path.join(ansible_bin_path, 'ansible-test')]
|
2019-01-11 23:37:35 +01:00
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
if python_interpreter:
|
|
|
|
cmd = [python_interpreter] + cmd
|
|
|
|
|
2019-01-11 23:37:35 +01:00
|
|
|
# Force the encoding used during delegation.
|
|
|
|
# This is only needed because ansible-test relies on Python's file system encoding.
|
|
|
|
# Environments that do not have the locale configured are thus unable to work with unicode file paths.
|
|
|
|
# Examples include FreeBSD and some Linux containers.
|
2019-07-16 01:20:03 +02:00
|
|
|
env_vars = dict(
|
|
|
|
LC_ALL='en_US.UTF-8',
|
|
|
|
ANSIBLE_TEST_CONTENT_ROOT=content_root,
|
|
|
|
)
|
|
|
|
|
|
|
|
env_args = ['%s=%s' % (key, env_vars[key]) for key in sorted(env_vars)]
|
|
|
|
|
|
|
|
cmd = ['/usr/bin/env'] + env_args + cmd
|
2019-01-11 23:37:35 +01:00
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
cmd += list(filter_options(args, sys.argv[1:], options, exclude, require))
|
|
|
|
cmd += ['--color', 'yes' if args.color else 'no']
|
|
|
|
|
|
|
|
if args.requirements:
|
|
|
|
cmd += ['--requirements']
|
|
|
|
|
|
|
|
if isinstance(args, ShellConfig):
|
|
|
|
cmd = create_shell_command(cmd)
|
2017-02-20 22:24:14 +01:00
|
|
|
elif isinstance(args, SanityConfig):
|
|
|
|
if args.base_branch:
|
|
|
|
cmd += ['--base-branch', args.base_branch]
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
return cmd
|
|
|
|
|
|
|
|
|
|
|
|
def filter_options(args, argv, options, exclude, require):
|
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type argv: list[str]
|
|
|
|
:type options: dict[str, int]
|
|
|
|
:type exclude: list[str]
|
|
|
|
:type require: list[str]
|
|
|
|
:rtype: collections.Iterable[str]
|
|
|
|
"""
|
|
|
|
options = options.copy()
|
|
|
|
|
|
|
|
options['--requirements'] = 0
|
2018-02-18 05:34:27 +01:00
|
|
|
options['--truncate'] = 1
|
2018-02-20 01:42:37 +01:00
|
|
|
options['--redact'] = 0
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
if isinstance(args, TestConfig):
|
|
|
|
options.update({
|
|
|
|
'--changed': 0,
|
|
|
|
'--tracked': 0,
|
|
|
|
'--untracked': 0,
|
|
|
|
'--ignore-committed': 0,
|
|
|
|
'--ignore-staged': 0,
|
|
|
|
'--ignore-unstaged': 0,
|
|
|
|
'--changed-from': 1,
|
|
|
|
'--changed-path': 1,
|
2017-03-15 20:17:42 +01:00
|
|
|
'--metadata': 1,
|
2018-10-02 21:26:14 +02:00
|
|
|
'--exclude': 1,
|
|
|
|
'--require': 1,
|
2016-11-30 06:21:53 +01:00
|
|
|
})
|
2017-02-20 22:24:14 +01:00
|
|
|
elif isinstance(args, SanityConfig):
|
|
|
|
options.update({
|
|
|
|
'--base-branch': 1,
|
|
|
|
})
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-08-21 21:12:37 +02:00
|
|
|
if isinstance(args, (NetworkIntegrationConfig, WindowsIntegrationConfig)):
|
|
|
|
options.update({
|
|
|
|
'--inventory': 1,
|
|
|
|
})
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
remaining = 0
|
|
|
|
|
|
|
|
for arg in argv:
|
|
|
|
if not arg.startswith('-') and remaining:
|
|
|
|
remaining -= 1
|
|
|
|
continue
|
|
|
|
|
|
|
|
remaining = 0
|
|
|
|
|
|
|
|
parts = arg.split('=', 1)
|
|
|
|
key = parts[0]
|
|
|
|
|
|
|
|
if key in options:
|
|
|
|
remaining = options[key] - len(parts) + 1
|
|
|
|
continue
|
|
|
|
|
|
|
|
yield arg
|
|
|
|
|
2018-10-02 21:26:14 +02:00
|
|
|
for arg in args.delegate_args:
|
|
|
|
yield arg
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
for target in exclude:
|
|
|
|
yield '--exclude'
|
|
|
|
yield target
|
|
|
|
|
|
|
|
for target in require:
|
|
|
|
yield '--require'
|
|
|
|
yield target
|
2017-03-15 20:17:42 +01:00
|
|
|
|
2017-03-20 23:31:57 +01:00
|
|
|
if isinstance(args, TestConfig):
|
|
|
|
if args.metadata_path:
|
|
|
|
yield '--metadata'
|
|
|
|
yield args.metadata_path
|
2018-02-18 05:34:27 +01:00
|
|
|
|
|
|
|
yield '--truncate'
|
|
|
|
yield '%d' % args.truncate
|
2018-02-20 01:42:37 +01:00
|
|
|
|
|
|
|
if args.redact:
|
|
|
|
yield '--redact'
|