Overhaul ansible-test test path handling. (#61416)
* Remove .keep files from test/results/ dirs. * Remove classification of test/results/ dir. * Add results_relative to data context. * Use variables in delegation paths. * Standardize file writing and results paths. * Fix issues reported by PyCharm. * Clean up invocation of coverage command. It now runs through the injector. * Hack to allow intercept_command in cover.py. * Simplify git ignore for test results. * Use test result tmp dir instead of cache dir. * Remove old .pytest_cache reference. * Fix unit test docker delegation. * Show HTML report link. * Clean up more results references. * Move import sanity test output to .tmp dir. * Exclude test results dir from coverage. * Fix import sanity test lib paths. * Fix hard-coded import test paths. * Fix most hard-coded integration test paths. * Fix PyCharm warnings. * Fix import placement. * Fix integration test dir path. * Fix Shippable scripts. * Fix Shippable matrix check. * Overhaul key pair management.
This commit is contained in:
parent
bf108ee7bf
commit
f5d829392a
38 changed files with 390 additions and 304 deletions
9
.gitignore
vendored
9
.gitignore
vendored
|
@ -79,14 +79,7 @@ ansible.egg-info/
|
||||||
# Release directory
|
# Release directory
|
||||||
packaging/release/ansible_release
|
packaging/release/ansible_release
|
||||||
/.cache/
|
/.cache/
|
||||||
/test/results/coverage/*=coverage.*
|
/test/results/
|
||||||
/test/results/coverage/coverage*
|
|
||||||
/test/results/reports/coverage*.xml
|
|
||||||
/test/results/reports/coverage*/
|
|
||||||
/test/results/bot/*.json
|
|
||||||
/test/results/junit/*.xml
|
|
||||||
/test/results/logs/*.log
|
|
||||||
/test/results/data/*.json
|
|
||||||
/test/integration/cloud-config-aws.yml
|
/test/integration/cloud-config-aws.yml
|
||||||
/test/integration/inventory.networking
|
/test/integration/inventory.networking
|
||||||
/test/integration/inventory.winrm
|
/test/integration/inventory.winrm
|
||||||
|
|
0
test/cache/.keep
vendored
0
test/cache/.keep
vendored
|
@ -16,6 +16,9 @@ def main():
|
||||||
import traceback
|
import traceback
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
|
import_dir = os.environ['SANITY_IMPORT_DIR']
|
||||||
|
minimal_dir = os.environ['SANITY_MINIMAL_DIR']
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import importlib.util
|
import importlib.util
|
||||||
imp = None # pylint: disable=invalid-name
|
imp = None # pylint: disable=invalid-name
|
||||||
|
@ -266,9 +269,6 @@ def main():
|
||||||
filepath = os.path.relpath(warning.filename)
|
filepath = os.path.relpath(warning.filename)
|
||||||
lineno = warning.lineno
|
lineno = warning.lineno
|
||||||
|
|
||||||
import_dir = 'test/runner/.tox/import/'
|
|
||||||
minimal_dir = 'test/runner/.tox/minimal-'
|
|
||||||
|
|
||||||
if filepath.startswith('../') or filepath.startswith(minimal_dir):
|
if filepath.startswith('../') or filepath.startswith(minimal_dir):
|
||||||
# The warning occurred outside our source tree.
|
# The warning occurred outside our source tree.
|
||||||
# The best we can do is to report the file which was tested that triggered the warning.
|
# The best we can do is to report the file which was tested that triggered the warning.
|
||||||
|
|
|
@ -21,6 +21,7 @@ from .util import (
|
||||||
|
|
||||||
from .util_common import (
|
from .util_common import (
|
||||||
run_command,
|
run_command,
|
||||||
|
ResultType,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .config import (
|
from .config import (
|
||||||
|
@ -82,7 +83,7 @@ def ansible_environment(args, color=True, ansible_config=None):
|
||||||
if args.debug:
|
if args.debug:
|
||||||
env.update(dict(
|
env.update(dict(
|
||||||
ANSIBLE_DEBUG='true',
|
ANSIBLE_DEBUG='true',
|
||||||
ANSIBLE_LOG_PATH=os.path.join(data_context().results, 'logs', 'debug.log'),
|
ANSIBLE_LOG_PATH=os.path.join(ResultType.LOGS.name, 'debug.log'),
|
||||||
))
|
))
|
||||||
|
|
||||||
if data_context().content.collection:
|
if data_context().content.collection:
|
||||||
|
|
|
@ -276,7 +276,7 @@ class PathMapper:
|
||||||
if ext == '.cs':
|
if ext == '.cs':
|
||||||
return self.get_csharp_module_utils_usage(path)
|
return self.get_csharp_module_utils_usage(path)
|
||||||
|
|
||||||
if path.startswith('test/integration/targets/'):
|
if is_subdir(path, data_context().content.integration_targets_path):
|
||||||
return self.get_integration_target_usage(path)
|
return self.get_integration_target_usage(path)
|
||||||
|
|
||||||
return []
|
return []
|
||||||
|
@ -338,7 +338,8 @@ class PathMapper:
|
||||||
:rtype: list[str]
|
:rtype: list[str]
|
||||||
"""
|
"""
|
||||||
target_name = path.split('/')[3]
|
target_name = path.split('/')[3]
|
||||||
dependents = [os.path.join('test/integration/targets/%s/' % target) for target in sorted(self.integration_dependencies.get(target_name, set()))]
|
dependents = [os.path.join(data_context().content.integration_targets_path, target) + os.path.sep
|
||||||
|
for target in sorted(self.integration_dependencies.get(target_name, set()))]
|
||||||
|
|
||||||
return dependents
|
return dependents
|
||||||
|
|
||||||
|
@ -620,22 +621,10 @@ class PathMapper:
|
||||||
if path.startswith('test/ansible_test/'):
|
if path.startswith('test/ansible_test/'):
|
||||||
return minimal # these tests are not invoked from ansible-test
|
return minimal # these tests are not invoked from ansible-test
|
||||||
|
|
||||||
if path.startswith('test/cache/'):
|
|
||||||
return minimal
|
|
||||||
|
|
||||||
if path.startswith('test/results/'):
|
|
||||||
return minimal
|
|
||||||
|
|
||||||
if path.startswith('test/legacy/'):
|
if path.startswith('test/legacy/'):
|
||||||
return minimal
|
return minimal
|
||||||
|
|
||||||
if path.startswith('test/env/'):
|
if is_subdir(path, data_context().content.integration_targets_path):
|
||||||
return minimal
|
|
||||||
|
|
||||||
if path.startswith('test/integration/roles/'):
|
|
||||||
return minimal
|
|
||||||
|
|
||||||
if path.startswith('test/integration/targets/'):
|
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
return minimal
|
return minimal
|
||||||
|
|
||||||
|
@ -655,25 +644,8 @@ class PathMapper:
|
||||||
FOCUSED_TARGET: True,
|
FOCUSED_TARGET: True,
|
||||||
}
|
}
|
||||||
|
|
||||||
if path.startswith('test/integration/'):
|
if is_subdir(path, data_context().content.integration_path):
|
||||||
if dirname == 'test/integration':
|
if dirname == data_context().content.integration_path:
|
||||||
if self.prefixes.get(name) == 'network' and ext == '.yaml':
|
|
||||||
return minimal # network integration test playbooks are not used by ansible-test
|
|
||||||
|
|
||||||
if filename == 'network-all.yaml':
|
|
||||||
return minimal # network integration test playbook not used by ansible-test
|
|
||||||
|
|
||||||
if filename == 'platform_agnostic.yaml':
|
|
||||||
return minimal # network integration test playbook not used by ansible-test
|
|
||||||
|
|
||||||
if filename.startswith('inventory.') and filename.endswith('.template'):
|
|
||||||
return minimal # ansible-test does not use these inventory templates
|
|
||||||
|
|
||||||
if filename == 'inventory':
|
|
||||||
return {
|
|
||||||
'integration': self.integration_all_target,
|
|
||||||
}
|
|
||||||
|
|
||||||
for command in (
|
for command in (
|
||||||
'integration',
|
'integration',
|
||||||
'windows-integration',
|
'windows-integration',
|
||||||
|
|
|
@ -888,7 +888,7 @@ def complete_network_testcase(prefix, parsed_args, **_):
|
||||||
if len(parsed_args.include) != 1:
|
if len(parsed_args.include) != 1:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
test_dir = 'test/integration/targets/%s/tests' % parsed_args.include[0]
|
test_dir = os.path.join(data_context().content.integration_targets_path, parsed_args.include[0], 'tests')
|
||||||
connection_dirs = data_context().content.get_dirs(test_dir)
|
connection_dirs = data_context().content.get_dirs(test_dir)
|
||||||
|
|
||||||
for connection_dir in connection_dirs:
|
for connection_dir in connection_dirs:
|
||||||
|
|
|
@ -5,7 +5,6 @@ __metaclass__ = type
|
||||||
import abc
|
import abc
|
||||||
import atexit
|
import atexit
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
|
||||||
import time
|
import time
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
|
@ -23,10 +22,14 @@ from ..util import (
|
||||||
load_plugins,
|
load_plugins,
|
||||||
ABC,
|
ABC,
|
||||||
to_bytes,
|
to_bytes,
|
||||||
make_dirs,
|
|
||||||
ANSIBLE_TEST_CONFIG_ROOT,
|
ANSIBLE_TEST_CONFIG_ROOT,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from ..util_common import (
|
||||||
|
write_json_test_results,
|
||||||
|
ResultType,
|
||||||
|
)
|
||||||
|
|
||||||
from ..target import (
|
from ..target import (
|
||||||
TestTarget,
|
TestTarget,
|
||||||
)
|
)
|
||||||
|
@ -158,17 +161,14 @@ def cloud_init(args, targets):
|
||||||
)
|
)
|
||||||
|
|
||||||
if not args.explain and results:
|
if not args.explain and results:
|
||||||
results_path = os.path.join(data_context().results, 'data', '%s-%s.json' % (
|
result_name = '%s-%s.json' % (
|
||||||
args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0)))))
|
args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
|
||||||
|
|
||||||
data = dict(
|
data = dict(
|
||||||
clouds=results,
|
clouds=results,
|
||||||
)
|
)
|
||||||
|
|
||||||
make_dirs(os.path.dirname(results_path))
|
write_json_test_results(ResultType.DATA, result_name, data)
|
||||||
|
|
||||||
with open(results_path, 'w') as results_fd:
|
|
||||||
results_fd.write(json.dumps(data, sort_keys=True, indent=4))
|
|
||||||
|
|
||||||
|
|
||||||
class CloudBase(ABC):
|
class CloudBase(ABC):
|
||||||
|
@ -280,8 +280,6 @@ class CloudBase(ABC):
|
||||||
|
|
||||||
class CloudProvider(CloudBase):
|
class CloudProvider(CloudBase):
|
||||||
"""Base class for cloud provider plugins. Sets up cloud resources before delegation."""
|
"""Base class for cloud provider plugins. Sets up cloud resources before delegation."""
|
||||||
TEST_DIR = 'test/integration'
|
|
||||||
|
|
||||||
def __init__(self, args, config_extension='.ini'):
|
def __init__(self, args, config_extension='.ini'):
|
||||||
"""
|
"""
|
||||||
:type args: IntegrationConfig
|
:type args: IntegrationConfig
|
||||||
|
@ -291,7 +289,7 @@ class CloudProvider(CloudBase):
|
||||||
|
|
||||||
self.remove_config = False
|
self.remove_config = False
|
||||||
self.config_static_name = 'cloud-config-%s%s' % (self.platform, config_extension)
|
self.config_static_name = 'cloud-config-%s%s' % (self.platform, config_extension)
|
||||||
self.config_static_path = os.path.join(self.TEST_DIR, self.config_static_name)
|
self.config_static_path = os.path.join(data_context().content.integration_path, self.config_static_name)
|
||||||
self.config_template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, '%s.template' % self.config_static_name)
|
self.config_template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, '%s.template' % self.config_static_name)
|
||||||
self.config_extension = config_extension
|
self.config_extension = config_extension
|
||||||
|
|
||||||
|
@ -352,8 +350,8 @@ class CloudProvider(CloudBase):
|
||||||
"""
|
"""
|
||||||
prefix = '%s-' % os.path.splitext(os.path.basename(self.config_static_path))[0]
|
prefix = '%s-' % os.path.splitext(os.path.basename(self.config_static_path))[0]
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile(dir=self.TEST_DIR, prefix=prefix, suffix=self.config_extension, delete=False) as config_fd:
|
with tempfile.NamedTemporaryFile(dir=data_context().content.integration_path, prefix=prefix, suffix=self.config_extension, delete=False) as config_fd:
|
||||||
filename = os.path.join(self.TEST_DIR, os.path.basename(config_fd.name))
|
filename = os.path.join(data_context().content.integration_path, os.path.basename(config_fd.name))
|
||||||
|
|
||||||
self.config_path = filename
|
self.config_path = filename
|
||||||
self.remove_config = True
|
self.remove_config = True
|
||||||
|
|
|
@ -3,7 +3,6 @@ from __future__ import (absolute_import, division, print_function)
|
||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import time
|
|
||||||
|
|
||||||
from . import (
|
from . import (
|
||||||
CloudProvider,
|
CloudProvider,
|
||||||
|
@ -14,10 +13,8 @@ from . import (
|
||||||
from ..util import (
|
from ..util import (
|
||||||
find_executable,
|
find_executable,
|
||||||
display,
|
display,
|
||||||
ApplicationError,
|
|
||||||
is_shippable,
|
is_shippable,
|
||||||
ConfigParser,
|
ConfigParser,
|
||||||
SubprocessError,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..docker_util import (
|
from ..docker_util import (
|
||||||
|
@ -32,10 +29,6 @@ from ..core_ci import (
|
||||||
AnsibleCoreCI,
|
AnsibleCoreCI,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..http import (
|
|
||||||
HttpClient,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class VcenterProvider(CloudProvider):
|
class VcenterProvider(CloudProvider):
|
||||||
"""VMware vcenter/esx plugin. Sets up cloud resources for tests."""
|
"""VMware vcenter/esx plugin. Sets up cloud resources for tests."""
|
||||||
|
|
|
@ -14,7 +14,6 @@ from .util import (
|
||||||
generate_pip_command,
|
generate_pip_command,
|
||||||
get_docker_completion,
|
get_docker_completion,
|
||||||
ApplicationError,
|
ApplicationError,
|
||||||
INTEGRATION_DIR_RELATIVE,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from .util_common import (
|
from .util_common import (
|
||||||
|
@ -247,7 +246,7 @@ class IntegrationConfig(TestConfig):
|
||||||
|
|
||||||
def get_ansible_config(self): # type: () -> str
|
def get_ansible_config(self): # type: () -> str
|
||||||
"""Return the path to the Ansible config for the given config."""
|
"""Return the path to the Ansible config for the given config."""
|
||||||
ansible_config_relative_path = os.path.join(INTEGRATION_DIR_RELATIVE, '%s.cfg' % self.command)
|
ansible_config_relative_path = os.path.join(data_context().content.integration_path, '%s.cfg' % self.command)
|
||||||
ansible_config_path = os.path.join(data_context().content.root, ansible_config_relative_path)
|
ansible_config_path = os.path.join(data_context().content.root, ansible_config_relative_path)
|
||||||
|
|
||||||
if not os.path.exists(ansible_config_path):
|
if not os.path.exists(ansible_config_path):
|
||||||
|
@ -327,6 +326,7 @@ class CoverageConfig(EnvironmentConfig):
|
||||||
self.group_by = frozenset(args.group_by) if 'group_by' in args and args.group_by else set() # type: t.FrozenSet[str]
|
self.group_by = frozenset(args.group_by) if 'group_by' in args and args.group_by else set() # type: t.FrozenSet[str]
|
||||||
self.all = args.all if 'all' in args else False # type: bool
|
self.all = args.all if 'all' in args else False # type: bool
|
||||||
self.stub = args.stub if 'stub' in args else False # type: bool
|
self.stub = args.stub if 'stub' in args else False # type: bool
|
||||||
|
self.coverage = False # temporary work-around to support intercept_command in cover.py
|
||||||
|
|
||||||
|
|
||||||
class CoverageReportConfig(CoverageConfig):
|
class CoverageReportConfig(CoverageConfig):
|
||||||
|
|
|
@ -28,6 +28,8 @@ from .util import (
|
||||||
|
|
||||||
from .util_common import (
|
from .util_common import (
|
||||||
run_command,
|
run_command,
|
||||||
|
write_json_file,
|
||||||
|
ResultType,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .config import (
|
from .config import (
|
||||||
|
@ -492,10 +494,7 @@ class AnsibleCoreCI:
|
||||||
|
|
||||||
config = self.save()
|
config = self.save()
|
||||||
|
|
||||||
make_dirs(os.path.dirname(self.path))
|
write_json_file(self.path, config, create_directories=True)
|
||||||
|
|
||||||
with open(self.path, 'w') as instance_fd:
|
|
||||||
instance_fd.write(json.dumps(config, indent=4, sort_keys=True))
|
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
"""
|
"""
|
||||||
|
@ -559,40 +558,30 @@ class SshKey:
|
||||||
"""
|
"""
|
||||||
:type args: EnvironmentConfig
|
:type args: EnvironmentConfig
|
||||||
"""
|
"""
|
||||||
cache_dir = os.path.join(data_context().content.root, 'test/cache')
|
key_pair = self.get_key_pair()
|
||||||
|
|
||||||
self.key = os.path.join(cache_dir, self.KEY_NAME)
|
if not key_pair:
|
||||||
self.pub = os.path.join(cache_dir, self.PUB_NAME)
|
key_pair = self.generate_key_pair(args)
|
||||||
|
|
||||||
key_dst = os.path.relpath(self.key, data_context().content.root)
|
key, pub = key_pair
|
||||||
pub_dst = os.path.relpath(self.pub, data_context().content.root)
|
key_dst, pub_dst = self.get_in_tree_key_pair_paths()
|
||||||
|
|
||||||
if not os.path.isfile(self.key) or not os.path.isfile(self.pub):
|
def ssh_key_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
|
||||||
base_dir = os.path.expanduser('~/.ansible/test/')
|
"""
|
||||||
|
Add the SSH keys to the payload file list.
|
||||||
|
They are either outside the source tree or in the cache dir which is ignored by default.
|
||||||
|
"""
|
||||||
|
if data_context().content.collection:
|
||||||
|
working_path = data_context().content.collection.directory
|
||||||
|
else:
|
||||||
|
working_path = ''
|
||||||
|
|
||||||
key = os.path.join(base_dir, self.KEY_NAME)
|
files.append((key, os.path.join(working_path, os.path.relpath(key_dst, data_context().content.root))))
|
||||||
pub = os.path.join(base_dir, self.PUB_NAME)
|
files.append((pub, os.path.join(working_path, os.path.relpath(pub_dst, data_context().content.root))))
|
||||||
|
|
||||||
if not args.explain:
|
data_context().register_payload_callback(ssh_key_callback)
|
||||||
make_dirs(base_dir)
|
|
||||||
|
|
||||||
if not os.path.isfile(key) or not os.path.isfile(pub):
|
self.key, self.pub = key, pub
|
||||||
run_command(args, ['ssh-keygen', '-m', 'PEM', '-q', '-t', 'rsa', '-N', '', '-f', key])
|
|
||||||
|
|
||||||
self.key = key
|
|
||||||
self.pub = pub
|
|
||||||
|
|
||||||
def ssh_key_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
|
|
||||||
"""Add the SSH keys to the payload file list."""
|
|
||||||
if data_context().content.collection:
|
|
||||||
working_path = data_context().content.collection.directory
|
|
||||||
else:
|
|
||||||
working_path = ''
|
|
||||||
|
|
||||||
files.append((key, os.path.join(working_path, key_dst)))
|
|
||||||
files.append((pub, os.path.join(working_path, pub_dst)))
|
|
||||||
|
|
||||||
data_context().register_payload_callback(ssh_key_callback)
|
|
||||||
|
|
||||||
if args.explain:
|
if args.explain:
|
||||||
self.pub_contents = None
|
self.pub_contents = None
|
||||||
|
@ -600,6 +589,50 @@ class SshKey:
|
||||||
with open(self.pub, 'r') as pub_fd:
|
with open(self.pub, 'r') as pub_fd:
|
||||||
self.pub_contents = pub_fd.read().strip()
|
self.pub_contents = pub_fd.read().strip()
|
||||||
|
|
||||||
|
def get_in_tree_key_pair_paths(self): # type: () -> t.Optional[t.Tuple[str, str]]
|
||||||
|
"""Return the ansible-test SSH key pair paths from the content tree."""
|
||||||
|
temp_dir = ResultType.TMP.path
|
||||||
|
|
||||||
|
key = os.path.join(temp_dir, self.KEY_NAME)
|
||||||
|
pub = os.path.join(temp_dir, self.PUB_NAME)
|
||||||
|
|
||||||
|
return key, pub
|
||||||
|
|
||||||
|
def get_source_key_pair_paths(self): # type: () -> t.Optional[t.Tuple[str, str]]
|
||||||
|
"""Return the ansible-test SSH key pair paths for the current user."""
|
||||||
|
base_dir = os.path.expanduser('~/.ansible/test/')
|
||||||
|
|
||||||
|
key = os.path.join(base_dir, self.KEY_NAME)
|
||||||
|
pub = os.path.join(base_dir, self.PUB_NAME)
|
||||||
|
|
||||||
|
return key, pub
|
||||||
|
|
||||||
|
def get_key_pair(self): # type: () -> t.Optional[t.Tuple[str, str]]
|
||||||
|
"""Return the ansible-test SSH key pair paths if present, otherwise return None."""
|
||||||
|
key, pub = self.get_in_tree_key_pair_paths()
|
||||||
|
|
||||||
|
if os.path.isfile(key) and os.path.isfile(pub):
|
||||||
|
return key, pub
|
||||||
|
|
||||||
|
key, pub = self.get_source_key_pair_paths()
|
||||||
|
|
||||||
|
if os.path.isfile(key) and os.path.isfile(pub):
|
||||||
|
return key, pub
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def generate_key_pair(self, args): # type: (EnvironmentConfig) -> t.Tuple[str, str]
|
||||||
|
"""Generate an SSH key pair for use by all ansible-test invocations for the current user."""
|
||||||
|
key, pub = self.get_source_key_pair_paths()
|
||||||
|
|
||||||
|
if not args.explain:
|
||||||
|
make_dirs(os.path.dirname(key))
|
||||||
|
|
||||||
|
if not os.path.isfile(key) or not os.path.isfile(pub):
|
||||||
|
run_command(args, ['ssh-keygen', '-m', 'PEM', '-q', '-t', 'rsa', '-N', '', '-f', key])
|
||||||
|
|
||||||
|
return key, pub
|
||||||
|
|
||||||
|
|
||||||
class InstanceConnection:
|
class InstanceConnection:
|
||||||
"""Container for remote instance status and connection details."""
|
"""Container for remote instance status and connection details."""
|
||||||
|
|
|
@ -18,6 +18,8 @@ from xml.dom import (
|
||||||
minidom,
|
minidom,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from . import types as t
|
||||||
|
|
||||||
from .target import (
|
from .target import (
|
||||||
walk_module_targets,
|
walk_module_targets,
|
||||||
walk_compile_targets,
|
walk_compile_targets,
|
||||||
|
@ -34,7 +36,8 @@ from .util import (
|
||||||
)
|
)
|
||||||
|
|
||||||
from .util_common import (
|
from .util_common import (
|
||||||
run_command,
|
intercept_command,
|
||||||
|
ResultType,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .config import (
|
from .config import (
|
||||||
|
@ -57,6 +60,7 @@ from .data import (
|
||||||
|
|
||||||
COVERAGE_GROUPS = ('command', 'target', 'environment', 'version')
|
COVERAGE_GROUPS = ('command', 'target', 'environment', 'version')
|
||||||
COVERAGE_CONFIG_PATH = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'coveragerc')
|
COVERAGE_CONFIG_PATH = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'coveragerc')
|
||||||
|
COVERAGE_OUTPUT_FILE_NAME = 'coverage'
|
||||||
|
|
||||||
|
|
||||||
def command_coverage_combine(args):
|
def command_coverage_combine(args):
|
||||||
|
@ -74,9 +78,9 @@ def _command_coverage_combine_python(args):
|
||||||
"""
|
"""
|
||||||
coverage = initialize_coverage(args)
|
coverage = initialize_coverage(args)
|
||||||
|
|
||||||
modules = dict((t.module, t.path) for t in list(walk_module_targets()) if t.path.endswith('.py'))
|
modules = dict((target.module, target.path) for target in list(walk_module_targets()) if target.path.endswith('.py'))
|
||||||
|
|
||||||
coverage_dir = os.path.join(data_context().results, 'coverage')
|
coverage_dir = ResultType.COVERAGE.path
|
||||||
coverage_files = [os.path.join(coverage_dir, f) for f in os.listdir(coverage_dir)
|
coverage_files = [os.path.join(coverage_dir, f) for f in os.listdir(coverage_dir)
|
||||||
if '=coverage.' in f and '=python' in f]
|
if '=coverage.' in f and '=python' in f]
|
||||||
|
|
||||||
|
@ -140,7 +144,7 @@ def _command_coverage_combine_python(args):
|
||||||
invalid_path_count = 0
|
invalid_path_count = 0
|
||||||
invalid_path_chars = 0
|
invalid_path_chars = 0
|
||||||
|
|
||||||
coverage_file = os.path.join(data_context().results, 'coverage', 'coverage')
|
coverage_file = os.path.join(ResultType.COVERAGE.path, COVERAGE_OUTPUT_FILE_NAME)
|
||||||
|
|
||||||
for group in sorted(groups):
|
for group in sorted(groups):
|
||||||
arc_data = groups[group]
|
arc_data = groups[group]
|
||||||
|
@ -322,9 +326,7 @@ def command_coverage_report(args):
|
||||||
if args.omit:
|
if args.omit:
|
||||||
options.extend(['--omit', args.omit])
|
options.extend(['--omit', args.omit])
|
||||||
|
|
||||||
env = common_environment()
|
run_coverage(args, output_file, 'report', options)
|
||||||
env.update(dict(COVERAGE_FILE=output_file))
|
|
||||||
run_command(args, env=env, cmd=['coverage', 'report', '--rcfile', COVERAGE_CONFIG_PATH] + options)
|
|
||||||
|
|
||||||
|
|
||||||
def command_coverage_html(args):
|
def command_coverage_html(args):
|
||||||
|
@ -339,10 +341,10 @@ def command_coverage_html(args):
|
||||||
display.info("Skipping output file %s in html generation" % output_file, verbosity=3)
|
display.info("Skipping output file %s in html generation" % output_file, verbosity=3)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
dir_name = os.path.join(data_context().results, 'reports', os.path.basename(output_file))
|
dir_name = os.path.join(ResultType.REPORTS.path, os.path.basename(output_file))
|
||||||
env = common_environment()
|
run_coverage(args, output_file, 'html', ['-i', '-d', dir_name])
|
||||||
env.update(dict(COVERAGE_FILE=output_file))
|
|
||||||
run_command(args, env=env, cmd=['coverage', 'html', '--rcfile', COVERAGE_CONFIG_PATH, '-i', '-d', dir_name])
|
display.info('HTML report generated: file:///%s' % os.path.join(dir_name, 'index.html'))
|
||||||
|
|
||||||
|
|
||||||
def command_coverage_xml(args):
|
def command_coverage_xml(args):
|
||||||
|
@ -352,7 +354,7 @@ def command_coverage_xml(args):
|
||||||
output_files = command_coverage_combine(args)
|
output_files = command_coverage_combine(args)
|
||||||
|
|
||||||
for output_file in output_files:
|
for output_file in output_files:
|
||||||
xml_name = os.path.join(data_context().results, 'reports', '%s.xml' % os.path.basename(output_file))
|
xml_name = os.path.join(ResultType.REPORTS.path, '%s.xml' % os.path.basename(output_file))
|
||||||
if output_file.endswith('-powershell'):
|
if output_file.endswith('-powershell'):
|
||||||
report = _generage_powershell_xml(output_file)
|
report = _generage_powershell_xml(output_file)
|
||||||
|
|
||||||
|
@ -363,9 +365,7 @@ def command_coverage_xml(args):
|
||||||
with open(xml_name, 'w') as xml_fd:
|
with open(xml_name, 'w') as xml_fd:
|
||||||
xml_fd.write(pretty)
|
xml_fd.write(pretty)
|
||||||
else:
|
else:
|
||||||
env = common_environment()
|
run_coverage(args, output_file, 'xml', ['-i', '-o', xml_name])
|
||||||
env.update(dict(COVERAGE_FILE=output_file))
|
|
||||||
run_command(args, env=env, cmd=['coverage', 'xml', '--rcfile', COVERAGE_CONFIG_PATH, '-i', '-o', xml_name])
|
|
||||||
|
|
||||||
|
|
||||||
def command_coverage_erase(args):
|
def command_coverage_erase(args):
|
||||||
|
@ -374,7 +374,7 @@ def command_coverage_erase(args):
|
||||||
"""
|
"""
|
||||||
initialize_coverage(args)
|
initialize_coverage(args)
|
||||||
|
|
||||||
coverage_dir = os.path.join(data_context().results, 'coverage')
|
coverage_dir = ResultType.COVERAGE.path
|
||||||
|
|
||||||
for name in os.listdir(coverage_dir):
|
for name in os.listdir(coverage_dir):
|
||||||
if not name.startswith('coverage') and '=coverage.' not in name:
|
if not name.startswith('coverage') and '=coverage.' not in name:
|
||||||
|
@ -440,13 +440,13 @@ def _command_coverage_combine_powershell(args):
|
||||||
:type args: CoverageConfig
|
:type args: CoverageConfig
|
||||||
:rtype: list[str]
|
:rtype: list[str]
|
||||||
"""
|
"""
|
||||||
coverage_dir = os.path.join(data_context().results, 'coverage')
|
coverage_dir = ResultType.COVERAGE.path
|
||||||
coverage_files = [os.path.join(coverage_dir, f) for f in os.listdir(coverage_dir)
|
coverage_files = [os.path.join(coverage_dir, f) for f in os.listdir(coverage_dir)
|
||||||
if '=coverage.' in f and '=powershell' in f]
|
if '=coverage.' in f and '=powershell' in f]
|
||||||
|
|
||||||
def _default_stub_value(line_count):
|
def _default_stub_value(lines):
|
||||||
val = {}
|
val = {}
|
||||||
for line in range(line_count):
|
for line in range(lines):
|
||||||
val[line] = 0
|
val[line] = 0
|
||||||
return val
|
return val
|
||||||
|
|
||||||
|
@ -504,7 +504,7 @@ def _command_coverage_combine_powershell(args):
|
||||||
invalid_path_count = 0
|
invalid_path_count = 0
|
||||||
invalid_path_chars = 0
|
invalid_path_chars = 0
|
||||||
|
|
||||||
coverage_file = os.path.join(data_context().results, 'coverage', 'coverage')
|
coverage_file = os.path.join(ResultType.COVERAGE.path, COVERAGE_OUTPUT_FILE_NAME)
|
||||||
|
|
||||||
for group in sorted(groups):
|
for group in sorted(groups):
|
||||||
coverage_data = groups[group]
|
coverage_data = groups[group]
|
||||||
|
@ -543,7 +543,7 @@ def _command_coverage_combine_powershell(args):
|
||||||
|
|
||||||
def _generage_powershell_xml(coverage_file):
|
def _generage_powershell_xml(coverage_file):
|
||||||
"""
|
"""
|
||||||
:type input_path: str
|
:type coverage_file: str
|
||||||
:rtype: Element
|
:rtype: Element
|
||||||
"""
|
"""
|
||||||
with open(coverage_file, 'rb') as coverage_fd:
|
with open(coverage_file, 'rb') as coverage_fd:
|
||||||
|
@ -669,7 +669,7 @@ def _add_cobertura_package(packages, package_name, package_data):
|
||||||
|
|
||||||
def _generate_powershell_output_report(args, coverage_file):
|
def _generate_powershell_output_report(args, coverage_file):
|
||||||
"""
|
"""
|
||||||
:type args: CoverageConfig
|
:type args: CoverageReportConfig
|
||||||
:type coverage_file: str
|
:type coverage_file: str
|
||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
|
@ -756,3 +756,13 @@ def _generate_powershell_output_report(args, coverage_file):
|
||||||
|
|
||||||
report = '{0}\n{1}\n{2}\n{1}\n{3}'.format(header, line_break, "\n".join(lines), totals)
|
report = '{0}\n{1}\n{2}\n{1}\n{3}'.format(header, line_break, "\n".join(lines), totals)
|
||||||
return report
|
return report
|
||||||
|
|
||||||
|
|
||||||
|
def run_coverage(args, output_file, command, cmd): # type: (CoverageConfig, str, str, t.List[str]) -> None
|
||||||
|
"""Run the coverage cli tool with the specified options."""
|
||||||
|
env = common_environment()
|
||||||
|
env.update(dict(COVERAGE_FILE=output_file))
|
||||||
|
|
||||||
|
cmd = ['python', '-m', 'coverage', command, '--rcfile', COVERAGE_CONFIG_PATH] + cmd
|
||||||
|
|
||||||
|
intercept_command(args, target_name='coverage', env=env, cmd=cmd, disable_coverage=True)
|
||||||
|
|
|
@ -17,6 +17,10 @@ from .util import (
|
||||||
remove_tree,
|
remove_tree,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from .util_common import (
|
||||||
|
write_text_file,
|
||||||
|
)
|
||||||
|
|
||||||
from .data import (
|
from .data import (
|
||||||
data_context,
|
data_context,
|
||||||
)
|
)
|
||||||
|
@ -45,8 +49,7 @@ def coverage_setup(args): # type: (TestConfig) -> None
|
||||||
else:
|
else:
|
||||||
args.coverage_config_base_path = tempfile.mkdtemp()
|
args.coverage_config_base_path = tempfile.mkdtemp()
|
||||||
|
|
||||||
with open(os.path.join(args.coverage_config_base_path, COVERAGE_CONFIG_NAME), 'w') as coverage_config_path_fd:
|
write_text_file(os.path.join(args.coverage_config_base_path, COVERAGE_CONFIG_NAME), coverage_config)
|
||||||
coverage_config_path_fd.write(coverage_config)
|
|
||||||
|
|
||||||
|
|
||||||
def coverage_cleanup(args): # type: (TestConfig) -> None
|
def coverage_cleanup(args): # type: (TestConfig) -> None
|
||||||
|
@ -81,6 +84,7 @@ omit =
|
||||||
*/pyshared/*
|
*/pyshared/*
|
||||||
*/pytest
|
*/pytest
|
||||||
*/AnsiballZ_*.py
|
*/AnsiballZ_*.py
|
||||||
|
*/test/results/*
|
||||||
'''
|
'''
|
||||||
|
|
||||||
return coverage_config
|
return coverage_config
|
||||||
|
@ -110,7 +114,7 @@ include =
|
||||||
%s/*
|
%s/*
|
||||||
|
|
||||||
omit =
|
omit =
|
||||||
*/test/runner/.tox/*
|
*/test/results/*
|
||||||
''' % data_context().content.root
|
''' % data_context().content.root
|
||||||
else:
|
else:
|
||||||
coverage_config += '''
|
coverage_config += '''
|
||||||
|
|
|
@ -72,7 +72,8 @@ class DataContext:
|
||||||
content = self.__create_content_layout(layout_providers, source_providers, current_path, True)
|
content = self.__create_content_layout(layout_providers, source_providers, current_path, True)
|
||||||
|
|
||||||
self.content = content # type: ContentLayout
|
self.content = content # type: ContentLayout
|
||||||
self.results = os.path.join(self.content.root, 'test', 'results')
|
self.results_relative = os.path.join('test', 'results')
|
||||||
|
self.results = os.path.join(self.content.root, self.results_relative)
|
||||||
|
|
||||||
def create_collection_layouts(self): # type: () -> t.List[ContentLayout]
|
def create_collection_layouts(self): # type: () -> t.List[ContentLayout]
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -50,6 +50,7 @@ from .util import (
|
||||||
|
|
||||||
from .util_common import (
|
from .util_common import (
|
||||||
run_command,
|
run_command,
|
||||||
|
ResultType,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .docker_util import (
|
from .docker_util import (
|
||||||
|
@ -241,6 +242,8 @@ def delegate_docker(args, exclude, require, integration_targets):
|
||||||
else:
|
else:
|
||||||
content_root = install_root
|
content_root = install_root
|
||||||
|
|
||||||
|
remote_results_root = os.path.join(content_root, data_context().results_relative)
|
||||||
|
|
||||||
cmd = generate_command(args, python_interpreter, os.path.join(install_root, 'bin'), content_root, options, exclude, require)
|
cmd = generate_command(args, python_interpreter, os.path.join(install_root, 'bin'), content_root, options, exclude, require)
|
||||||
|
|
||||||
if isinstance(args, TestConfig):
|
if isinstance(args, TestConfig):
|
||||||
|
@ -321,19 +324,12 @@ def delegate_docker(args, exclude, require, integration_targets):
|
||||||
# also disconnect from the network once requirements have been installed
|
# also disconnect from the network once requirements have been installed
|
||||||
if isinstance(args, UnitsConfig):
|
if isinstance(args, UnitsConfig):
|
||||||
writable_dirs = [
|
writable_dirs = [
|
||||||
os.path.join(install_root, '.pytest_cache'),
|
os.path.join(content_root, ResultType.JUNIT.relative_path),
|
||||||
|
os.path.join(content_root, ResultType.COVERAGE.relative_path),
|
||||||
]
|
]
|
||||||
|
|
||||||
if content_root != install_root:
|
|
||||||
writable_dirs.append(os.path.join(content_root, 'test/results/junit'))
|
|
||||||
writable_dirs.append(os.path.join(content_root, 'test/results/coverage'))
|
|
||||||
|
|
||||||
docker_exec(args, test_id, ['mkdir', '-p'] + writable_dirs)
|
docker_exec(args, test_id, ['mkdir', '-p'] + writable_dirs)
|
||||||
docker_exec(args, test_id, ['chmod', '777'] + writable_dirs)
|
docker_exec(args, test_id, ['chmod', '777'] + writable_dirs)
|
||||||
|
|
||||||
if content_root == install_root:
|
|
||||||
docker_exec(args, test_id, ['find', os.path.join(content_root, 'test/results/'), '-type', 'd', '-exec', 'chmod', '777', '{}', '+'])
|
|
||||||
|
|
||||||
docker_exec(args, test_id, ['chmod', '755', '/root'])
|
docker_exec(args, test_id, ['chmod', '755', '/root'])
|
||||||
docker_exec(args, test_id, ['chmod', '644', os.path.join(content_root, args.metadata_path)])
|
docker_exec(args, test_id, ['chmod', '644', os.path.join(content_root, args.metadata_path)])
|
||||||
|
|
||||||
|
@ -353,10 +349,16 @@ def delegate_docker(args, exclude, require, integration_targets):
|
||||||
try:
|
try:
|
||||||
docker_exec(args, test_id, cmd, options=cmd_options)
|
docker_exec(args, test_id, cmd, options=cmd_options)
|
||||||
finally:
|
finally:
|
||||||
|
local_test_root = os.path.dirname(data_context().results)
|
||||||
|
|
||||||
|
remote_test_root = os.path.dirname(remote_results_root)
|
||||||
|
remote_results_name = os.path.basename(remote_results_root)
|
||||||
|
remote_temp_file = os.path.join('/root', remote_results_name + '.tgz')
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile(prefix='ansible-result-', suffix='.tgz') as local_result_fd:
|
with tempfile.NamedTemporaryFile(prefix='ansible-result-', suffix='.tgz') as local_result_fd:
|
||||||
docker_exec(args, test_id, ['tar', 'czf', '/root/results.tgz', '-C', os.path.join(content_root, 'test'), 'results'])
|
docker_exec(args, test_id, ['tar', 'czf', remote_temp_file, '-C', remote_test_root, remote_results_name])
|
||||||
docker_get(args, test_id, '/root/results.tgz', local_result_fd.name)
|
docker_get(args, test_id, remote_temp_file, local_result_fd.name)
|
||||||
run_command(args, ['tar', 'oxzf', local_result_fd.name, '-C', 'test'])
|
run_command(args, ['tar', 'oxzf', local_result_fd.name, '-C', local_test_root])
|
||||||
finally:
|
finally:
|
||||||
if httptester_id:
|
if httptester_id:
|
||||||
docker_rm(args, httptester_id)
|
docker_rm(args, httptester_id)
|
||||||
|
@ -470,8 +472,14 @@ def delegate_remote(args, exclude, require, integration_targets):
|
||||||
download = False
|
download = False
|
||||||
|
|
||||||
if download and content_root:
|
if download and content_root:
|
||||||
manage.ssh('rm -rf /tmp/results && cp -a %s/test/results /tmp/results && chmod -R a+r /tmp/results' % content_root)
|
local_test_root = os.path.dirname(data_context().results)
|
||||||
manage.download('/tmp/results', 'test')
|
|
||||||
|
remote_results_root = os.path.join(content_root, data_context().results_relative)
|
||||||
|
remote_results_name = os.path.basename(remote_results_root)
|
||||||
|
remote_temp_path = os.path.join('/tmp', remote_results_name)
|
||||||
|
|
||||||
|
manage.ssh('rm -rf {0} && cp -a {1} {0} && chmod -R a+r {0}'.format(remote_temp_path, remote_results_root))
|
||||||
|
manage.download(remote_temp_path, local_test_root)
|
||||||
finally:
|
finally:
|
||||||
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
|
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
|
||||||
core_ci.stop()
|
core_ci.stop()
|
||||||
|
|
|
@ -26,6 +26,12 @@ from .util import (
|
||||||
get_available_python_versions,
|
get_available_python_versions,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from .util_common import (
|
||||||
|
write_json_test_results,
|
||||||
|
write_json_file,
|
||||||
|
ResultType,
|
||||||
|
)
|
||||||
|
|
||||||
from .git import (
|
from .git import (
|
||||||
Git,
|
Git,
|
||||||
)
|
)
|
||||||
|
@ -47,10 +53,6 @@ from .test import (
|
||||||
TestTimeout,
|
TestTimeout,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .data import (
|
|
||||||
data_context,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .executor import (
|
from .executor import (
|
||||||
SUPPORTED_PYTHON_VERSIONS,
|
SUPPORTED_PYTHON_VERSIONS,
|
||||||
)
|
)
|
||||||
|
@ -122,8 +124,7 @@ def show_dump_env(args):
|
||||||
show_dict(data, verbose)
|
show_dict(data, verbose)
|
||||||
|
|
||||||
if args.dump and not args.explain:
|
if args.dump and not args.explain:
|
||||||
with open(os.path.join(data_context().results, 'bot', 'data-environment.json'), 'w') as results_fd:
|
write_json_test_results(ResultType.BOT, 'data-environment.json', data)
|
||||||
results_fd.write(json.dumps(data, sort_keys=True))
|
|
||||||
|
|
||||||
|
|
||||||
def set_timeout(args):
|
def set_timeout(args):
|
||||||
|
@ -151,8 +152,7 @@ def set_timeout(args):
|
||||||
deadline=deadline,
|
deadline=deadline,
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(TIMEOUT_PATH, 'w') as timeout_fd:
|
write_json_file(TIMEOUT_PATH, data)
|
||||||
json.dump(data, timeout_fd, indent=4, sort_keys=True)
|
|
||||||
elif os.path.exists(TIMEOUT_PATH):
|
elif os.path.exists(TIMEOUT_PATH):
|
||||||
os.remove(TIMEOUT_PATH)
|
os.remove(TIMEOUT_PATH)
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,6 @@ from .util import (
|
||||||
find_python,
|
find_python,
|
||||||
get_docker_completion,
|
get_docker_completion,
|
||||||
get_remote_completion,
|
get_remote_completion,
|
||||||
COVERAGE_OUTPUT_NAME,
|
|
||||||
cmd_quote,
|
cmd_quote,
|
||||||
ANSIBLE_LIB_ROOT,
|
ANSIBLE_LIB_ROOT,
|
||||||
ANSIBLE_TEST_DATA_ROOT,
|
ANSIBLE_TEST_DATA_ROOT,
|
||||||
|
@ -71,6 +70,9 @@ from .util_common import (
|
||||||
intercept_command,
|
intercept_command,
|
||||||
named_temporary_file,
|
named_temporary_file,
|
||||||
run_command,
|
run_command,
|
||||||
|
write_text_file,
|
||||||
|
write_json_test_results,
|
||||||
|
ResultType,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .docker_util import (
|
from .docker_util import (
|
||||||
|
@ -128,9 +130,7 @@ from .integration import (
|
||||||
integration_test_environment,
|
integration_test_environment,
|
||||||
integration_test_config_file,
|
integration_test_config_file,
|
||||||
setup_common_temp_dir,
|
setup_common_temp_dir,
|
||||||
INTEGRATION_VARS_FILE_RELATIVE,
|
|
||||||
get_inventory_relative_path,
|
get_inventory_relative_path,
|
||||||
INTEGRATION_DIR_RELATIVE,
|
|
||||||
check_inventory,
|
check_inventory,
|
||||||
delegate_inventory,
|
delegate_inventory,
|
||||||
)
|
)
|
||||||
|
@ -198,8 +198,8 @@ def install_command_requirements(args, python_version=None):
|
||||||
:type python_version: str | None
|
:type python_version: str | None
|
||||||
"""
|
"""
|
||||||
if not args.explain:
|
if not args.explain:
|
||||||
make_dirs(os.path.join(data_context().results, 'coverage'))
|
make_dirs(ResultType.COVERAGE.path)
|
||||||
make_dirs(os.path.join(data_context().results, 'data'))
|
make_dirs(ResultType.DATA.path)
|
||||||
|
|
||||||
if isinstance(args, ShellConfig):
|
if isinstance(args, ShellConfig):
|
||||||
if args.raw:
|
if args.raw:
|
||||||
|
@ -322,12 +322,9 @@ Author-email: info@ansible.com
|
||||||
License: GPLv3+
|
License: GPLv3+
|
||||||
''' % get_ansible_version()
|
''' % get_ansible_version()
|
||||||
|
|
||||||
os.mkdir(egg_info_path)
|
|
||||||
|
|
||||||
pkg_info_path = os.path.join(egg_info_path, 'PKG-INFO')
|
pkg_info_path = os.path.join(egg_info_path, 'PKG-INFO')
|
||||||
|
|
||||||
with open(pkg_info_path, 'w') as pkg_info_fd:
|
write_text_file(pkg_info_path, pkg_info.lstrip(), create_directories=True)
|
||||||
pkg_info_fd.write(pkg_info.lstrip())
|
|
||||||
|
|
||||||
|
|
||||||
def generate_pip_install(pip, command, packages=None):
|
def generate_pip_install(pip, command, packages=None):
|
||||||
|
@ -394,7 +391,7 @@ def command_network_integration(args):
|
||||||
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
|
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
|
||||||
|
|
||||||
if args.inventory:
|
if args.inventory:
|
||||||
inventory_path = os.path.join(data_context().content.root, INTEGRATION_DIR_RELATIVE, args.inventory)
|
inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory)
|
||||||
else:
|
else:
|
||||||
inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
|
inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
|
||||||
|
|
||||||
|
@ -445,8 +442,7 @@ def command_network_integration(args):
|
||||||
display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
|
display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
|
||||||
|
|
||||||
if not args.explain:
|
if not args.explain:
|
||||||
with open(inventory_path, 'w') as inventory_fd:
|
write_text_file(inventory_path, inventory)
|
||||||
inventory_fd.write(inventory)
|
|
||||||
|
|
||||||
success = False
|
success = False
|
||||||
|
|
||||||
|
@ -576,7 +572,7 @@ def command_windows_integration(args):
|
||||||
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
|
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
|
||||||
|
|
||||||
if args.inventory:
|
if args.inventory:
|
||||||
inventory_path = os.path.join(data_context().content.root, INTEGRATION_DIR_RELATIVE, args.inventory)
|
inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory)
|
||||||
else:
|
else:
|
||||||
inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
|
inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
|
||||||
|
|
||||||
|
@ -620,8 +616,7 @@ def command_windows_integration(args):
|
||||||
display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
|
display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
|
||||||
|
|
||||||
if not args.explain:
|
if not args.explain:
|
||||||
with open(inventory_path, 'w') as inventory_fd:
|
write_text_file(inventory_path, inventory)
|
||||||
inventory_fd.write(inventory)
|
|
||||||
|
|
||||||
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in internal_targets)
|
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in internal_targets)
|
||||||
# if running under Docker delegation, the httptester may have already been started
|
# if running under Docker delegation, the httptester may have already been started
|
||||||
|
@ -681,9 +676,9 @@ def command_windows_integration(args):
|
||||||
pre_target = forward_ssh_ports
|
pre_target = forward_ssh_ports
|
||||||
post_target = cleanup_ssh_ports
|
post_target = cleanup_ssh_ports
|
||||||
|
|
||||||
def run_playbook(playbook, playbook_vars):
|
def run_playbook(playbook, run_playbook_vars): # type: (str, t.Dict[str, t.Any]) -> None
|
||||||
playbook_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'playbooks', playbook)
|
playbook_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'playbooks', playbook)
|
||||||
command = ['ansible-playbook', '-i', inventory_path, playbook_path, '-e', json.dumps(playbook_vars)]
|
command = ['ansible-playbook', '-i', inventory_path, playbook_path, '-e', json.dumps(run_playbook_vars)]
|
||||||
if args.verbosity:
|
if args.verbosity:
|
||||||
command.append('-%s' % ('v' * args.verbosity))
|
command.append('-%s' % ('v' * args.verbosity))
|
||||||
|
|
||||||
|
@ -716,7 +711,7 @@ def command_windows_integration(args):
|
||||||
|
|
||||||
for filename in os.listdir(local_temp_path):
|
for filename in os.listdir(local_temp_path):
|
||||||
with open_zipfile(os.path.join(local_temp_path, filename)) as coverage_zip:
|
with open_zipfile(os.path.join(local_temp_path, filename)) as coverage_zip:
|
||||||
coverage_zip.extractall(os.path.join(data_context().results, 'coverage'))
|
coverage_zip.extractall(ResultType.COVERAGE.path)
|
||||||
|
|
||||||
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
|
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
|
||||||
for instance in instances:
|
for instance in instances:
|
||||||
|
@ -882,7 +877,7 @@ def command_integration_filter(args, # type: TIntegrationConfig
|
||||||
|
|
||||||
cloud_init(args, internal_targets)
|
cloud_init(args, internal_targets)
|
||||||
|
|
||||||
vars_file_src = os.path.join(data_context().content.root, INTEGRATION_VARS_FILE_RELATIVE)
|
vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
|
||||||
|
|
||||||
if os.path.exists(vars_file_src):
|
if os.path.exists(vars_file_src):
|
||||||
def integration_config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
|
def integration_config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
|
||||||
|
@ -895,7 +890,7 @@ def command_integration_filter(args, # type: TIntegrationConfig
|
||||||
else:
|
else:
|
||||||
working_path = ''
|
working_path = ''
|
||||||
|
|
||||||
files.append((vars_file_src, os.path.join(working_path, INTEGRATION_VARS_FILE_RELATIVE)))
|
files.append((vars_file_src, os.path.join(working_path, data_context().content.integration_vars_path)))
|
||||||
|
|
||||||
data_context().register_payload_callback(integration_config_callback)
|
data_context().register_payload_callback(integration_config_callback)
|
||||||
|
|
||||||
|
@ -1086,23 +1081,22 @@ def command_integration_filtered(args, targets, all_targets, inventory_path, pre
|
||||||
finally:
|
finally:
|
||||||
if not args.explain:
|
if not args.explain:
|
||||||
if args.coverage:
|
if args.coverage:
|
||||||
coverage_temp_path = os.path.join(common_temp_path, COVERAGE_OUTPUT_NAME)
|
coverage_temp_path = os.path.join(common_temp_path, ResultType.COVERAGE.name)
|
||||||
coverage_save_path = os.path.join(data_context().results, 'coverage')
|
coverage_save_path = ResultType.COVERAGE.path
|
||||||
|
|
||||||
for filename in os.listdir(coverage_temp_path):
|
for filename in os.listdir(coverage_temp_path):
|
||||||
shutil.copy(os.path.join(coverage_temp_path, filename), os.path.join(coverage_save_path, filename))
|
shutil.copy(os.path.join(coverage_temp_path, filename), os.path.join(coverage_save_path, filename))
|
||||||
|
|
||||||
remove_tree(common_temp_path)
|
remove_tree(common_temp_path)
|
||||||
|
|
||||||
results_path = os.path.join(data_context().results, 'data', '%s-%s.json' % (
|
result_name = '%s-%s.json' % (
|
||||||
args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0)))))
|
args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
|
||||||
|
|
||||||
data = dict(
|
data = dict(
|
||||||
targets=results,
|
targets=results,
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(results_path, 'w') as results_fd:
|
write_json_test_results(ResultType.DATA, result_name, data)
|
||||||
results_fd.write(json.dumps(data, sort_keys=True, indent=4))
|
|
||||||
|
|
||||||
if failed:
|
if failed:
|
||||||
raise ApplicationError('The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s' % (
|
raise ApplicationError('The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s' % (
|
||||||
|
@ -1286,7 +1280,7 @@ def integration_environment(args, target, test_dir, inventory_path, ansible_conf
|
||||||
callback_plugins = ['junit'] + (env_config.callback_plugins or [] if env_config else [])
|
callback_plugins = ['junit'] + (env_config.callback_plugins or [] if env_config else [])
|
||||||
|
|
||||||
integration = dict(
|
integration = dict(
|
||||||
JUNIT_OUTPUT_DIR=os.path.join(data_context().results, 'junit'),
|
JUNIT_OUTPUT_DIR=ResultType.JUNIT.path,
|
||||||
ANSIBLE_CALLBACK_WHITELIST=','.join(sorted(set(callback_plugins))),
|
ANSIBLE_CALLBACK_WHITELIST=','.join(sorted(set(callback_plugins))),
|
||||||
ANSIBLE_TEST_CI=args.metadata.ci_provider,
|
ANSIBLE_TEST_CI=args.metadata.ci_provider,
|
||||||
ANSIBLE_TEST_COVERAGE='check' if args.coverage_check else ('yes' if args.coverage else ''),
|
ANSIBLE_TEST_COVERAGE='check' if args.coverage_check else ('yes' if args.coverage else ''),
|
||||||
|
|
|
@ -5,6 +5,8 @@ __metaclass__ = type
|
||||||
import ast
|
import ast
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from . import types as t
|
||||||
|
|
||||||
from .util import (
|
from .util import (
|
||||||
display,
|
display,
|
||||||
ApplicationError,
|
ApplicationError,
|
||||||
|
@ -35,13 +37,8 @@ def get_python_module_utils_imports(compile_targets):
|
||||||
for target in compile_targets:
|
for target in compile_targets:
|
||||||
imports_by_target_path[target.path] = extract_python_module_utils_imports(target.path, module_utils)
|
imports_by_target_path[target.path] = extract_python_module_utils_imports(target.path, module_utils)
|
||||||
|
|
||||||
def recurse_import(import_name, depth=0, seen=None):
|
def recurse_import(import_name, depth=0, seen=None): # type: (str, int, t.Optional[t.Set[str]]) -> t.Set[str]
|
||||||
"""Recursively expand module_utils imports from module_utils files.
|
"""Recursively expand module_utils imports from module_utils files."""
|
||||||
:type import_name: str
|
|
||||||
:type depth: int
|
|
||||||
:type seen: set[str] | None
|
|
||||||
:rtype set[str]
|
|
||||||
"""
|
|
||||||
display.info('module_utils import: %s%s' % (' ' * depth, import_name), verbosity=4)
|
display.info('module_utils import: %s%s' % (' ' * depth, import_name), verbosity=4)
|
||||||
|
|
||||||
if seen is None:
|
if seen is None:
|
||||||
|
|
|
@ -27,17 +27,16 @@ from ..util import (
|
||||||
display,
|
display,
|
||||||
make_dirs,
|
make_dirs,
|
||||||
COVERAGE_CONFIG_NAME,
|
COVERAGE_CONFIG_NAME,
|
||||||
COVERAGE_OUTPUT_NAME,
|
|
||||||
MODE_DIRECTORY,
|
MODE_DIRECTORY,
|
||||||
MODE_DIRECTORY_WRITE,
|
MODE_DIRECTORY_WRITE,
|
||||||
MODE_FILE,
|
MODE_FILE,
|
||||||
INTEGRATION_DIR_RELATIVE,
|
|
||||||
INTEGRATION_VARS_FILE_RELATIVE,
|
|
||||||
to_bytes,
|
to_bytes,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..util_common import (
|
from ..util_common import (
|
||||||
named_temporary_file,
|
named_temporary_file,
|
||||||
|
write_text_file,
|
||||||
|
ResultType,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..coverage_util import (
|
from ..coverage_util import (
|
||||||
|
@ -73,12 +72,11 @@ def setup_common_temp_dir(args, path):
|
||||||
|
|
||||||
coverage_config = generate_coverage_config(args)
|
coverage_config = generate_coverage_config(args)
|
||||||
|
|
||||||
with open(coverage_config_path, 'w') as coverage_config_fd:
|
write_text_file(coverage_config_path, coverage_config)
|
||||||
coverage_config_fd.write(coverage_config)
|
|
||||||
|
|
||||||
os.chmod(coverage_config_path, MODE_FILE)
|
os.chmod(coverage_config_path, MODE_FILE)
|
||||||
|
|
||||||
coverage_output_path = os.path.join(path, COVERAGE_OUTPUT_NAME)
|
coverage_output_path = os.path.join(path, ResultType.COVERAGE.name)
|
||||||
|
|
||||||
os.mkdir(coverage_output_path)
|
os.mkdir(coverage_output_path)
|
||||||
os.chmod(coverage_output_path, MODE_DIRECTORY_WRITE)
|
os.chmod(coverage_output_path, MODE_DIRECTORY_WRITE)
|
||||||
|
@ -153,7 +151,7 @@ def get_inventory_relative_path(args): # type: (IntegrationConfig) -> str
|
||||||
NetworkIntegrationConfig: 'inventory.networking',
|
NetworkIntegrationConfig: 'inventory.networking',
|
||||||
} # type: t.Dict[t.Type[IntegrationConfig], str]
|
} # type: t.Dict[t.Type[IntegrationConfig], str]
|
||||||
|
|
||||||
return os.path.join(INTEGRATION_DIR_RELATIVE, inventory_names[type(args)])
|
return os.path.join(data_context().content.integration_path, inventory_names[type(args)])
|
||||||
|
|
||||||
|
|
||||||
def delegate_inventory(args, inventory_path_src): # type: (IntegrationConfig, str) -> None
|
def delegate_inventory(args, inventory_path_src): # type: (IntegrationConfig, str) -> None
|
||||||
|
@ -202,10 +200,10 @@ def integration_test_environment(args, target, inventory_path_src):
|
||||||
if args.no_temp_workdir or 'no/temp_workdir/' in target.aliases:
|
if args.no_temp_workdir or 'no/temp_workdir/' in target.aliases:
|
||||||
display.warning('Disabling the temp work dir is a temporary debugging feature that may be removed in the future without notice.')
|
display.warning('Disabling the temp work dir is a temporary debugging feature that may be removed in the future without notice.')
|
||||||
|
|
||||||
integration_dir = os.path.join(data_context().content.root, INTEGRATION_DIR_RELATIVE)
|
integration_dir = os.path.join(data_context().content.root, data_context().content.integration_path)
|
||||||
inventory_path = inventory_path_src
|
inventory_path = inventory_path_src
|
||||||
ansible_config = ansible_config_src
|
ansible_config = ansible_config_src
|
||||||
vars_file = os.path.join(data_context().content.root, INTEGRATION_VARS_FILE_RELATIVE)
|
vars_file = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
|
||||||
|
|
||||||
yield IntegrationEnvironment(integration_dir, inventory_path, ansible_config, vars_file)
|
yield IntegrationEnvironment(integration_dir, inventory_path, ansible_config, vars_file)
|
||||||
return
|
return
|
||||||
|
@ -237,11 +235,11 @@ def integration_test_environment(args, target, inventory_path_src):
|
||||||
|
|
||||||
files_needed = get_files_needed(target_dependencies)
|
files_needed = get_files_needed(target_dependencies)
|
||||||
|
|
||||||
integration_dir = os.path.join(temp_dir, INTEGRATION_DIR_RELATIVE)
|
integration_dir = os.path.join(temp_dir, data_context().content.integration_path)
|
||||||
ansible_config = os.path.join(temp_dir, ansible_config_relative)
|
ansible_config = os.path.join(temp_dir, ansible_config_relative)
|
||||||
|
|
||||||
vars_file_src = os.path.join(data_context().content.root, INTEGRATION_VARS_FILE_RELATIVE)
|
vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
|
||||||
vars_file = os.path.join(temp_dir, INTEGRATION_VARS_FILE_RELATIVE)
|
vars_file = os.path.join(temp_dir, data_context().content.integration_vars_path)
|
||||||
|
|
||||||
file_copies = [
|
file_copies = [
|
||||||
(ansible_config_src, ansible_config),
|
(ansible_config_src, ansible_config),
|
||||||
|
@ -253,8 +251,10 @@ def integration_test_environment(args, target, inventory_path_src):
|
||||||
|
|
||||||
file_copies += [(path, os.path.join(temp_dir, path)) for path in files_needed]
|
file_copies += [(path, os.path.join(temp_dir, path)) for path in files_needed]
|
||||||
|
|
||||||
|
integration_targets_relative_path = data_context().content.integration_targets_path
|
||||||
|
|
||||||
directory_copies = [
|
directory_copies = [
|
||||||
(os.path.join(INTEGRATION_DIR_RELATIVE, 'targets', target.name), os.path.join(integration_dir, 'targets', target.name))
|
(os.path.join(integration_targets_relative_path, target.name), os.path.join(temp_dir, integration_targets_relative_path, target.name))
|
||||||
for target in target_dependencies
|
for target in target_dependencies
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -11,6 +11,10 @@ from .util import (
|
||||||
is_shippable,
|
is_shippable,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from .util_common import (
|
||||||
|
write_json_file,
|
||||||
|
)
|
||||||
|
|
||||||
from .diff import (
|
from .diff import (
|
||||||
parse_diff,
|
parse_diff,
|
||||||
FileDiff,
|
FileDiff,
|
||||||
|
@ -72,8 +76,7 @@ class Metadata:
|
||||||
|
|
||||||
display.info('>>> Metadata: %s\n%s' % (path, data), verbosity=3)
|
display.info('>>> Metadata: %s\n%s' % (path, data), verbosity=3)
|
||||||
|
|
||||||
with open(path, 'w') as data_fd:
|
write_json_file(path, data)
|
||||||
json.dump(data, data_fd, sort_keys=True, indent=4)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_file(path):
|
def from_file(path):
|
||||||
|
|
|
@ -81,6 +81,7 @@ class ContentLayout(Layout):
|
||||||
paths, # type: t.List[str]
|
paths, # type: t.List[str]
|
||||||
plugin_paths, # type: t.Dict[str, str]
|
plugin_paths, # type: t.Dict[str, str]
|
||||||
collection=None, # type: t.Optional[CollectionDetail]
|
collection=None, # type: t.Optional[CollectionDetail]
|
||||||
|
integration_path=None, # type: t.Optional[str]
|
||||||
unit_path=None, # type: t.Optional[str]
|
unit_path=None, # type: t.Optional[str]
|
||||||
unit_module_path=None, # type: t.Optional[str]
|
unit_module_path=None, # type: t.Optional[str]
|
||||||
unit_module_utils_path=None, # type: t.Optional[str]
|
unit_module_utils_path=None, # type: t.Optional[str]
|
||||||
|
@ -89,6 +90,9 @@ class ContentLayout(Layout):
|
||||||
|
|
||||||
self.plugin_paths = plugin_paths
|
self.plugin_paths = plugin_paths
|
||||||
self.collection = collection
|
self.collection = collection
|
||||||
|
self.integration_path = integration_path
|
||||||
|
self.integration_targets_path = os.path.join(integration_path, 'targets')
|
||||||
|
self.integration_vars_path = os.path.join(integration_path, 'integration_config.yml')
|
||||||
self.unit_path = unit_path
|
self.unit_path = unit_path
|
||||||
self.unit_module_path = unit_module_path
|
self.unit_module_path = unit_module_path
|
||||||
self.unit_module_utils_path = unit_module_utils_path
|
self.unit_module_utils_path = unit_module_utils_path
|
||||||
|
|
|
@ -31,6 +31,7 @@ class AnsibleLayout(LayoutProvider):
|
||||||
return ContentLayout(root,
|
return ContentLayout(root,
|
||||||
paths,
|
paths,
|
||||||
plugin_paths=plugin_paths,
|
plugin_paths=plugin_paths,
|
||||||
|
integration_path='test/integration',
|
||||||
unit_path='test/units',
|
unit_path='test/units',
|
||||||
unit_module_path='test/units/modules',
|
unit_module_path='test/units/modules',
|
||||||
unit_module_utils_path='test/units/module_utils',
|
unit_module_utils_path='test/units/module_utils',
|
||||||
|
|
|
@ -44,6 +44,7 @@ class CollectionLayout(LayoutProvider):
|
||||||
namespace=collection_namespace,
|
namespace=collection_namespace,
|
||||||
root=collection_root,
|
root=collection_root,
|
||||||
),
|
),
|
||||||
|
integration_path='test/integration',
|
||||||
unit_path='test/unit',
|
unit_path='test/unit',
|
||||||
unit_module_path='test/unit/plugins/modules',
|
unit_module_path='test/unit/plugins/modules',
|
||||||
unit_module_utils_path='test/unit/plugins/module_utils',
|
unit_module_utils_path='test/unit/plugins/module_utils',
|
||||||
|
|
|
@ -24,7 +24,6 @@ from ..util import (
|
||||||
display,
|
display,
|
||||||
find_python,
|
find_python,
|
||||||
parse_to_list_of_dict,
|
parse_to_list_of_dict,
|
||||||
make_dirs,
|
|
||||||
is_subdir,
|
is_subdir,
|
||||||
ANSIBLE_LIB_ROOT,
|
ANSIBLE_LIB_ROOT,
|
||||||
)
|
)
|
||||||
|
@ -32,6 +31,8 @@ from ..util import (
|
||||||
from ..util_common import (
|
from ..util_common import (
|
||||||
intercept_command,
|
intercept_command,
|
||||||
run_command,
|
run_command,
|
||||||
|
write_text_file,
|
||||||
|
ResultType,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..ansible_util import (
|
from ..ansible_util import (
|
||||||
|
@ -75,8 +76,10 @@ class ImportTest(SanityMultipleVersion):
|
||||||
|
|
||||||
env = ansible_environment(args, color=False)
|
env = ansible_environment(args, color=False)
|
||||||
|
|
||||||
|
temp_root = os.path.join(ResultType.TMP.path, 'sanity', 'import')
|
||||||
|
|
||||||
# create a clean virtual environment to minimize the available imports beyond the python standard library
|
# create a clean virtual environment to minimize the available imports beyond the python standard library
|
||||||
virtual_environment_path = os.path.abspath('test/runner/.tox/minimal-py%s' % python_version.replace('.', ''))
|
virtual_environment_path = os.path.join(temp_root, 'minimal-py%s' % python_version.replace('.', ''))
|
||||||
virtual_environment_bin = os.path.join(virtual_environment_path, 'bin')
|
virtual_environment_bin = os.path.join(virtual_environment_path, 'bin')
|
||||||
|
|
||||||
remove_tree(virtual_environment_path)
|
remove_tree(virtual_environment_path)
|
||||||
|
@ -96,7 +99,7 @@ class ImportTest(SanityMultipleVersion):
|
||||||
os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'importer.py')), importer_path)
|
os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'importer.py')), importer_path)
|
||||||
|
|
||||||
# create a minimal python library
|
# create a minimal python library
|
||||||
python_path = os.path.abspath('test/runner/.tox/import/lib')
|
python_path = os.path.join(temp_root, 'lib')
|
||||||
ansible_path = os.path.join(python_path, 'ansible')
|
ansible_path = os.path.join(python_path, 'ansible')
|
||||||
ansible_init = os.path.join(ansible_path, '__init__.py')
|
ansible_init = os.path.join(ansible_path, '__init__.py')
|
||||||
ansible_link = os.path.join(ansible_path, 'module_utils')
|
ansible_link = os.path.join(ansible_path, 'module_utils')
|
||||||
|
@ -104,10 +107,7 @@ class ImportTest(SanityMultipleVersion):
|
||||||
if not args.explain:
|
if not args.explain:
|
||||||
remove_tree(ansible_path)
|
remove_tree(ansible_path)
|
||||||
|
|
||||||
make_dirs(ansible_path)
|
write_text_file(ansible_init, '', create_directories=True)
|
||||||
|
|
||||||
with open(ansible_init, 'w'):
|
|
||||||
pass
|
|
||||||
|
|
||||||
os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'module_utils'), ansible_link)
|
os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'module_utils'), ansible_link)
|
||||||
|
|
||||||
|
@ -116,21 +116,22 @@ class ImportTest(SanityMultipleVersion):
|
||||||
# the __init__.py files are needed only for Python 2.x
|
# the __init__.py files are needed only for Python 2.x
|
||||||
# the empty modules directory is required for the collection loader to generate the synthetic packages list
|
# the empty modules directory is required for the collection loader to generate the synthetic packages list
|
||||||
|
|
||||||
make_dirs(os.path.join(ansible_path, 'utils'))
|
write_text_file(os.path.join(ansible_path, 'utils/__init__.py'), '', create_directories=True)
|
||||||
with open(os.path.join(ansible_path, 'utils/__init__.py'), 'w'):
|
|
||||||
pass
|
|
||||||
|
|
||||||
os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'utils', 'collection_loader.py'), os.path.join(ansible_path, 'utils', 'collection_loader.py'))
|
os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'utils', 'collection_loader.py'), os.path.join(ansible_path, 'utils', 'collection_loader.py'))
|
||||||
os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'utils', 'singleton.py'), os.path.join(ansible_path, 'utils', 'singleton.py'))
|
os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'utils', 'singleton.py'), os.path.join(ansible_path, 'utils', 'singleton.py'))
|
||||||
|
|
||||||
make_dirs(os.path.join(ansible_path, 'modules'))
|
write_text_file(os.path.join(ansible_path, 'modules/__init__.py'), '', create_directories=True)
|
||||||
with open(os.path.join(ansible_path, 'modules/__init__.py'), 'w'):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# activate the virtual environment
|
# activate the virtual environment
|
||||||
env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH'])
|
env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH'])
|
||||||
env['PYTHONPATH'] = python_path
|
env['PYTHONPATH'] = python_path
|
||||||
|
|
||||||
|
env.update(
|
||||||
|
SANITY_IMPORT_DIR=os.path.relpath(temp_root, data_context().content.root) + os.path.sep,
|
||||||
|
SANITY_MINIMAL_DIR=os.path.relpath(virtual_environment_path, data_context().content.root) + os.path.sep,
|
||||||
|
)
|
||||||
|
|
||||||
# make sure coverage is available in the virtual environment if needed
|
# make sure coverage is available in the virtual environment if needed
|
||||||
if args.coverage:
|
if args.coverage:
|
||||||
run_command(args, generate_pip_install(['pip'], 'sanity.import', packages=['setuptools']), env=env)
|
run_command(args, generate_pip_install(['pip'], 'sanity.import', packages=['setuptools']), env=env)
|
||||||
|
@ -163,9 +164,11 @@ class ImportTest(SanityMultipleVersion):
|
||||||
|
|
||||||
results = parse_to_list_of_dict(pattern, ex.stdout)
|
results = parse_to_list_of_dict(pattern, ex.stdout)
|
||||||
|
|
||||||
|
relative_temp_root = os.path.relpath(temp_root, data_context().content.root) + os.path.sep
|
||||||
|
|
||||||
results = [SanityMessage(
|
results = [SanityMessage(
|
||||||
message=r['message'],
|
message=r['message'],
|
||||||
path=r['path'],
|
path=os.path.relpath(r['path'], relative_temp_root) if r['path'].startswith(relative_temp_root) else r['path'],
|
||||||
line=int(r['line']),
|
line=int(r['line']),
|
||||||
column=int(r['column']),
|
column=int(r['column']),
|
||||||
) for r in results]
|
) for r in results]
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import (absolute_import, division, print_function)
|
||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
import json
|
|
||||||
import textwrap
|
import textwrap
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
|
@ -37,8 +36,9 @@ from ..util import (
|
||||||
display,
|
display,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..data import (
|
from ..util_common import (
|
||||||
data_context,
|
write_json_test_results,
|
||||||
|
ResultType,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -180,8 +180,7 @@ class IntegrationAliasesTest(SanityVersionNeutral):
|
||||||
|
|
||||||
self.check_changes(args, results)
|
self.check_changes(args, results)
|
||||||
|
|
||||||
with open(os.path.join(data_context().results, 'bot', 'data-sanity-ci.json'), 'w') as results_fd:
|
write_json_test_results(ResultType.BOT, 'data-sanity-ci.json', results)
|
||||||
json.dump(results, results_fd, sort_keys=True, indent=4)
|
|
||||||
|
|
||||||
messages = []
|
messages = []
|
||||||
|
|
||||||
|
|
|
@ -228,7 +228,7 @@ def walk_integration_targets():
|
||||||
"""
|
"""
|
||||||
:rtype: collections.Iterable[IntegrationTarget]
|
:rtype: collections.Iterable[IntegrationTarget]
|
||||||
"""
|
"""
|
||||||
path = 'test/integration/targets'
|
path = data_context().content.integration_targets_path
|
||||||
modules = frozenset(target.module for target in walk_module_targets())
|
modules = frozenset(target.module for target in walk_module_targets())
|
||||||
paths = data_context().content.get_dirs(path)
|
paths = data_context().content.get_dirs(path)
|
||||||
prefixes = load_integration_prefixes()
|
prefixes = load_integration_prefixes()
|
||||||
|
@ -241,7 +241,7 @@ def load_integration_prefixes():
|
||||||
"""
|
"""
|
||||||
:rtype: dict[str, str]
|
:rtype: dict[str, str]
|
||||||
"""
|
"""
|
||||||
path = 'test/integration'
|
path = data_context().content.integration_path
|
||||||
file_paths = sorted(f for f in data_context().content.get_files(path) if os.path.splitext(os.path.basename(f))[0] == 'target-prefixes')
|
file_paths = sorted(f for f in data_context().content.get_files(path) if os.path.splitext(os.path.basename(f))[0] == 'target-prefixes')
|
||||||
prefixes = {}
|
prefixes = {}
|
||||||
|
|
||||||
|
@ -306,7 +306,7 @@ def analyze_integration_target_dependencies(integration_targets):
|
||||||
:type integration_targets: list[IntegrationTarget]
|
:type integration_targets: list[IntegrationTarget]
|
||||||
:rtype: dict[str,set[str]]
|
:rtype: dict[str,set[str]]
|
||||||
"""
|
"""
|
||||||
real_target_root = os.path.realpath('test/integration/targets') + '/'
|
real_target_root = os.path.realpath(data_context().content.integration_targets_path) + '/'
|
||||||
|
|
||||||
role_targets = [target for target in integration_targets if target.type == 'role']
|
role_targets = [target for target in integration_targets if target.type == 'role']
|
||||||
hidden_role_target_names = set(target.name for target in role_targets if 'hidden/' in target.aliases)
|
hidden_role_target_names = set(target.name for target in role_targets if 'hidden/' in target.aliases)
|
||||||
|
@ -595,10 +595,12 @@ class IntegrationTarget(CompletionTarget):
|
||||||
if self.type not in ('script', 'role'):
|
if self.type not in ('script', 'role'):
|
||||||
groups.append('hidden')
|
groups.append('hidden')
|
||||||
|
|
||||||
|
targets_relative_path = data_context().content.integration_targets_path
|
||||||
|
|
||||||
# Collect file paths before group expansion to avoid including the directories.
|
# Collect file paths before group expansion to avoid including the directories.
|
||||||
# Ignore references to test targets, as those must be defined using `needs/target/*` or other target references.
|
# Ignore references to test targets, as those must be defined using `needs/target/*` or other target references.
|
||||||
self.needs_file = tuple(sorted(set('/'.join(g.split('/')[2:]) for g in groups if
|
self.needs_file = tuple(sorted(set('/'.join(g.split('/')[2:]) for g in groups if
|
||||||
g.startswith('needs/file/') and not g.startswith('needs/file/test/integration/targets/'))))
|
g.startswith('needs/file/') and not g.startswith('needs/file/%s/' % targets_relative_path))))
|
||||||
|
|
||||||
for group in itertools.islice(groups, 0, len(groups)):
|
for group in itertools.islice(groups, 0, len(groups)):
|
||||||
if '/' in group:
|
if '/' in group:
|
||||||
|
|
|
@ -3,25 +3,24 @@ from __future__ import (absolute_import, division, print_function)
|
||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from . import types as t
|
from . import types as t
|
||||||
|
|
||||||
from .util import (
|
from .util import (
|
||||||
display,
|
display,
|
||||||
make_dirs,
|
)
|
||||||
to_bytes,
|
|
||||||
|
from .util_common import (
|
||||||
|
write_text_test_results,
|
||||||
|
write_json_test_results,
|
||||||
|
ResultType,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .config import (
|
from .config import (
|
||||||
TestConfig,
|
TestConfig,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .data import (
|
|
||||||
data_context,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def calculate_best_confidence(choices, metadata):
|
def calculate_best_confidence(choices, metadata):
|
||||||
"""
|
"""
|
||||||
|
@ -118,23 +117,22 @@ class TestResult:
|
||||||
:type args: TestConfig
|
:type args: TestConfig
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def create_path(self, directory, extension):
|
def create_result_name(self, extension):
|
||||||
"""
|
"""
|
||||||
:type directory: str
|
|
||||||
:type extension: str
|
:type extension: str
|
||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
path = os.path.join(data_context().results, directory, 'ansible-test-%s' % self.command)
|
name = 'ansible-test-%s' % self.command
|
||||||
|
|
||||||
if self.test:
|
if self.test:
|
||||||
path += '-%s' % self.test
|
name += '-%s' % self.test
|
||||||
|
|
||||||
if self.python_version:
|
if self.python_version:
|
||||||
path += '-python-%s' % self.python_version
|
name += '-python-%s' % self.python_version
|
||||||
|
|
||||||
path += extension
|
name += extension
|
||||||
|
|
||||||
return path
|
return name
|
||||||
|
|
||||||
def save_junit(self, args, test_case, properties=None):
|
def save_junit(self, args, test_case, properties=None):
|
||||||
"""
|
"""
|
||||||
|
@ -143,8 +141,6 @@ class TestResult:
|
||||||
:type properties: dict[str, str] | None
|
:type properties: dict[str, str] | None
|
||||||
:rtype: str | None
|
:rtype: str | None
|
||||||
"""
|
"""
|
||||||
path = self.create_path('junit', '.xml')
|
|
||||||
|
|
||||||
test_suites = [
|
test_suites = [
|
||||||
self.junit.TestSuite(
|
self.junit.TestSuite(
|
||||||
name='ansible-test',
|
name='ansible-test',
|
||||||
|
@ -159,8 +155,7 @@ class TestResult:
|
||||||
if args.explain:
|
if args.explain:
|
||||||
return
|
return
|
||||||
|
|
||||||
with open(path, 'wb') as xml:
|
write_text_test_results(ResultType.JUNIT, self.create_result_name('.xml'), report)
|
||||||
xml.write(to_bytes(report))
|
|
||||||
|
|
||||||
|
|
||||||
class TestTimeout(TestResult):
|
class TestTimeout(TestResult):
|
||||||
|
@ -207,10 +202,7 @@ One or more of the following situations may be responsible:
|
||||||
</testsuites>
|
</testsuites>
|
||||||
''' % (timestamp, message, output)
|
''' % (timestamp, message, output)
|
||||||
|
|
||||||
path = self.create_path('junit', '.xml')
|
write_text_test_results(ResultType.JUNIT, self.create_result_name('.xml'), xml.lstrip())
|
||||||
|
|
||||||
with open(path, 'w') as junit_fd:
|
|
||||||
junit_fd.write(xml.lstrip())
|
|
||||||
|
|
||||||
|
|
||||||
class TestSuccess(TestResult):
|
class TestSuccess(TestResult):
|
||||||
|
@ -335,16 +327,10 @@ class TestFailure(TestResult):
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
path = self.create_path('bot', '.json')
|
|
||||||
|
|
||||||
if args.explain:
|
if args.explain:
|
||||||
return
|
return
|
||||||
|
|
||||||
make_dirs(os.path.dirname(path))
|
write_json_test_results(ResultType.BOT, self.create_result_name('.json'), bot_data)
|
||||||
|
|
||||||
with open(path, 'w') as bot_fd:
|
|
||||||
json.dump(bot_data, bot_fd, indent=4, sort_keys=True)
|
|
||||||
bot_fd.write('\n')
|
|
||||||
|
|
||||||
def populate_confidence(self, metadata):
|
def populate_confidence(self, metadata):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -17,6 +17,7 @@ try:
|
||||||
Tuple,
|
Tuple,
|
||||||
Type,
|
Type,
|
||||||
TypeVar,
|
TypeVar,
|
||||||
|
Union,
|
||||||
)
|
)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -15,6 +15,7 @@ from ..util import (
|
||||||
|
|
||||||
from ..util_common import (
|
from ..util_common import (
|
||||||
intercept_command,
|
intercept_command,
|
||||||
|
ResultType,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..ansible_util import (
|
from ..ansible_util import (
|
||||||
|
@ -98,7 +99,7 @@ def command_units(args):
|
||||||
'yes' if args.color else 'no',
|
'yes' if args.color else 'no',
|
||||||
'-p', 'no:cacheprovider',
|
'-p', 'no:cacheprovider',
|
||||||
'-c', os.path.join(ANSIBLE_TEST_DATA_ROOT, 'pytest.ini'),
|
'-c', os.path.join(ANSIBLE_TEST_DATA_ROOT, 'pytest.ini'),
|
||||||
'--junit-xml', os.path.join(data_context().results, 'junit', 'python%s-units.xml' % version),
|
'--junit-xml', os.path.join(ResultType.JUNIT.path, 'python%s-units.xml' % version),
|
||||||
]
|
]
|
||||||
|
|
||||||
if not data_context().content.collection:
|
if not data_context().content.collection:
|
||||||
|
|
|
@ -62,7 +62,6 @@ except AttributeError:
|
||||||
MAXFD = -1
|
MAXFD = -1
|
||||||
|
|
||||||
COVERAGE_CONFIG_NAME = 'coveragerc'
|
COVERAGE_CONFIG_NAME = 'coveragerc'
|
||||||
COVERAGE_OUTPUT_NAME = 'coverage'
|
|
||||||
|
|
||||||
ANSIBLE_TEST_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
ANSIBLE_TEST_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
|
||||||
|
@ -82,9 +81,6 @@ if not os.path.exists(ANSIBLE_LIB_ROOT):
|
||||||
ANSIBLE_TEST_DATA_ROOT = os.path.join(ANSIBLE_TEST_ROOT, '_data')
|
ANSIBLE_TEST_DATA_ROOT = os.path.join(ANSIBLE_TEST_ROOT, '_data')
|
||||||
ANSIBLE_TEST_CONFIG_ROOT = os.path.join(ANSIBLE_TEST_ROOT, 'config')
|
ANSIBLE_TEST_CONFIG_ROOT = os.path.join(ANSIBLE_TEST_ROOT, 'config')
|
||||||
|
|
||||||
INTEGRATION_DIR_RELATIVE = 'test/integration'
|
|
||||||
INTEGRATION_VARS_FILE_RELATIVE = os.path.join(INTEGRATION_DIR_RELATIVE, 'integration_config.yml')
|
|
||||||
|
|
||||||
# Modes are set to allow all users the same level of access.
|
# Modes are set to allow all users the same level of access.
|
||||||
# This permits files to be used in tests that change users.
|
# This permits files to be used in tests that change users.
|
||||||
# The only exception is write access to directories for the user creating them.
|
# The only exception is write access to directories for the user creating them.
|
||||||
|
@ -801,8 +797,8 @@ def get_available_port():
|
||||||
|
|
||||||
def get_subclasses(class_type): # type: (t.Type[C]) -> t.Set[t.Type[C]]
|
def get_subclasses(class_type): # type: (t.Type[C]) -> t.Set[t.Type[C]]
|
||||||
"""Returns the set of types that are concrete subclasses of the given type."""
|
"""Returns the set of types that are concrete subclasses of the given type."""
|
||||||
subclasses = set()
|
subclasses = set() # type: t.Set[t.Type[C]]
|
||||||
queue = [class_type]
|
queue = [class_type] # type: t.List[t.Type[C]]
|
||||||
|
|
||||||
while queue:
|
while queue:
|
||||||
parent = queue.pop()
|
parent = queue.pop()
|
||||||
|
|
|
@ -4,15 +4,17 @@ __metaclass__ = type
|
||||||
|
|
||||||
import atexit
|
import atexit
|
||||||
import contextlib
|
import contextlib
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import textwrap
|
import textwrap
|
||||||
|
|
||||||
|
from . import types as t
|
||||||
|
|
||||||
from .util import (
|
from .util import (
|
||||||
common_environment,
|
common_environment,
|
||||||
COVERAGE_CONFIG_NAME,
|
COVERAGE_CONFIG_NAME,
|
||||||
COVERAGE_OUTPUT_NAME,
|
|
||||||
display,
|
display,
|
||||||
find_python,
|
find_python,
|
||||||
is_shippable,
|
is_shippable,
|
||||||
|
@ -22,6 +24,7 @@ from .util import (
|
||||||
raw_command,
|
raw_command,
|
||||||
to_bytes,
|
to_bytes,
|
||||||
ANSIBLE_TEST_DATA_ROOT,
|
ANSIBLE_TEST_DATA_ROOT,
|
||||||
|
make_dirs,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .data import (
|
from .data import (
|
||||||
|
@ -29,6 +32,47 @@ from .data import (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ResultType:
|
||||||
|
"""Test result type."""
|
||||||
|
BOT = None # type: ResultType
|
||||||
|
COVERAGE = None # type: ResultType
|
||||||
|
DATA = None # type: ResultType
|
||||||
|
JUNIT = None # type: ResultType
|
||||||
|
LOGS = None # type: ResultType
|
||||||
|
REPORTS = None # type: ResultType
|
||||||
|
TMP = None # type: ResultType
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _populate():
|
||||||
|
ResultType.BOT = ResultType('bot')
|
||||||
|
ResultType.COVERAGE = ResultType('coverage')
|
||||||
|
ResultType.DATA = ResultType('data')
|
||||||
|
ResultType.JUNIT = ResultType('junit')
|
||||||
|
ResultType.LOGS = ResultType('logs')
|
||||||
|
ResultType.REPORTS = ResultType('reports')
|
||||||
|
ResultType.TMP = ResultType('.tmp')
|
||||||
|
|
||||||
|
def __init__(self, name): # type: (str) -> None
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def relative_path(self): # type: () -> str
|
||||||
|
"""The content relative path to the results."""
|
||||||
|
return os.path.join(data_context().results_relative, self.name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self): # type: () -> str
|
||||||
|
"""The absolute path to the results."""
|
||||||
|
return os.path.join(data_context().results, self.name)
|
||||||
|
|
||||||
|
def __str__(self): # type: () -> str
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyProtectedMember
|
||||||
|
ResultType._populate() # pylint: disable=protected-access
|
||||||
|
|
||||||
|
|
||||||
class CommonConfig:
|
class CommonConfig:
|
||||||
"""Configuration common to all commands."""
|
"""Configuration common to all commands."""
|
||||||
def __init__(self, args, command):
|
def __init__(self, args, command):
|
||||||
|
@ -75,6 +119,33 @@ def named_temporary_file(args, prefix, suffix, directory, content):
|
||||||
yield tempfile_fd.name
|
yield tempfile_fd.name
|
||||||
|
|
||||||
|
|
||||||
|
def write_json_test_results(category, name, content): # type: (ResultType, str, t.Union[t.List[t.Any], t.Dict[str, t.Any]]) -> None
|
||||||
|
"""Write the given json content to the specified test results path, creating directories as needed."""
|
||||||
|
path = os.path.join(category.path, name)
|
||||||
|
write_json_file(path, content, create_directories=True)
|
||||||
|
|
||||||
|
|
||||||
|
def write_text_test_results(category, name, content): # type: (ResultType, str, str) -> None
|
||||||
|
"""Write the given text content to the specified test results path, creating directories as needed."""
|
||||||
|
path = os.path.join(category.path, name)
|
||||||
|
write_text_file(path, content, create_directories=True)
|
||||||
|
|
||||||
|
|
||||||
|
def write_json_file(path, content, create_directories=False): # type: (str, t.Union[t.List[t.Any], t.Dict[str, t.Any]], bool) -> None
|
||||||
|
"""Write the given json content to the specified path, optionally creating missing directories."""
|
||||||
|
text_content = json.dumps(content, sort_keys=True, indent=4, ensure_ascii=False) + '\n'
|
||||||
|
write_text_file(path, text_content, create_directories=create_directories)
|
||||||
|
|
||||||
|
|
||||||
|
def write_text_file(path, content, create_directories=False): # type: (str, str, bool) -> None
|
||||||
|
"""Write the given text content to the specified path, optionally creating missing directories."""
|
||||||
|
if create_directories:
|
||||||
|
make_dirs(os.path.dirname(path))
|
||||||
|
|
||||||
|
with open(to_bytes(path), 'wb') as file:
|
||||||
|
file.write(to_bytes(content))
|
||||||
|
|
||||||
|
|
||||||
def get_python_path(args, interpreter):
|
def get_python_path(args, interpreter):
|
||||||
"""
|
"""
|
||||||
:type args: TestConfig
|
:type args: TestConfig
|
||||||
|
@ -126,8 +197,7 @@ def get_python_path(args, interpreter):
|
||||||
execv(python, [python] + argv[1:])
|
execv(python, [python] + argv[1:])
|
||||||
''' % (interpreter, interpreter)).lstrip()
|
''' % (interpreter, interpreter)).lstrip()
|
||||||
|
|
||||||
with open(injected_interpreter, 'w') as python_fd:
|
write_text_file(injected_interpreter, code)
|
||||||
python_fd.write(code)
|
|
||||||
|
|
||||||
os.chmod(injected_interpreter, MODE_FILE_EXECUTE)
|
os.chmod(injected_interpreter, MODE_FILE_EXECUTE)
|
||||||
|
|
||||||
|
@ -173,7 +243,7 @@ def get_coverage_environment(args, target_name, version, temp_path, module_cover
|
||||||
raise Exception('No temp path and no coverage config base path. Check for missing coverage_context usage.')
|
raise Exception('No temp path and no coverage config base path. Check for missing coverage_context usage.')
|
||||||
|
|
||||||
config_file = os.path.join(coverage_config_base_path, COVERAGE_CONFIG_NAME)
|
config_file = os.path.join(coverage_config_base_path, COVERAGE_CONFIG_NAME)
|
||||||
coverage_file = os.path.join(coverage_output_base_path, COVERAGE_OUTPUT_NAME, '%s=%s=%s=%s=coverage' % (
|
coverage_file = os.path.join(coverage_output_base_path, ResultType.COVERAGE.name, '%s=%s=%s=%s=coverage' % (
|
||||||
args.command, target_name, args.coverage_label or 'local-%s' % version, 'python-%s' % version))
|
args.command, target_name, args.coverage_label or 'local-%s' % version, 'python-%s' % version))
|
||||||
|
|
||||||
if not args.explain and not os.path.exists(config_file):
|
if not args.explain and not os.path.exists(config_file):
|
||||||
|
|
|
@ -94,7 +94,13 @@ def fail(message, output): # type: (str, str) -> NoReturn
|
||||||
</testsuites>
|
</testsuites>
|
||||||
''' % (timestamp, message, output)
|
''' % (timestamp, message, output)
|
||||||
|
|
||||||
with open('test/results/junit/check-matrix.xml', 'w') as junit_fd:
|
path = 'shippable/testresults/check-matrix.xml'
|
||||||
|
dir_path = os.path.dirname(path)
|
||||||
|
|
||||||
|
if not os.path.exists(dir_path):
|
||||||
|
os.makedirs(dir_path)
|
||||||
|
|
||||||
|
with open(path, 'w') as junit_fd:
|
||||||
junit_fd.write(xml.lstrip())
|
junit_fd.write(xml.lstrip())
|
||||||
|
|
||||||
sys.stderr.write(message + '\n')
|
sys.stderr.write(message + '\n')
|
||||||
|
|
|
@ -73,55 +73,64 @@ find lib/ansible/modules -type d -empty -print -delete
|
||||||
|
|
||||||
function cleanup
|
function cleanup
|
||||||
{
|
{
|
||||||
if find test/results/coverage/ -mindepth 1 -name '.*' -prune -o -print -quit | grep -q .; then
|
if [ -d test/results/coverage/ ]; then
|
||||||
# for complete on-demand coverage generate a report for all files with no coverage on the "other" job so we only have one copy
|
if find test/results/coverage/ -mindepth 1 -name '.*' -prune -o -print -quit | grep -q .; then
|
||||||
if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ] && [ "${test}" == "sanity/1" ]; then
|
# for complete on-demand coverage generate a report for all files with no coverage on the "other" job so we only have one copy
|
||||||
stub="--stub"
|
if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ] && [ "${test}" == "sanity/1" ]; then
|
||||||
else
|
stub="--stub"
|
||||||
stub=""
|
else
|
||||||
fi
|
stub=""
|
||||||
|
fi
|
||||||
|
|
||||||
# use python 3.7 for coverage to avoid running out of memory during coverage xml processing
|
# use python 3.7 for coverage to avoid running out of memory during coverage xml processing
|
||||||
# only use it for coverage to avoid the additional overhead of setting up a virtual environment for a potential no-op job
|
# only use it for coverage to avoid the additional overhead of setting up a virtual environment for a potential no-op job
|
||||||
virtualenv --python /usr/bin/python3.7 ~/ansible-venv
|
virtualenv --python /usr/bin/python3.7 ~/ansible-venv
|
||||||
set +ux
|
set +ux
|
||||||
. ~/ansible-venv/bin/activate
|
. ~/ansible-venv/bin/activate
|
||||||
set -ux
|
set -ux
|
||||||
|
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
ansible-test coverage xml --color -v --requirements --group-by command --group-by version ${stub:+"$stub"}
|
ansible-test coverage xml --color -v --requirements --group-by command --group-by version ${stub:+"$stub"}
|
||||||
cp -a test/results/reports/coverage=*.xml shippable/codecoverage/
|
cp -a test/results/reports/coverage=*.xml shippable/codecoverage/
|
||||||
|
|
||||||
# upload coverage report to codecov.io only when using complete on-demand coverage
|
# upload coverage report to codecov.io only when using complete on-demand coverage
|
||||||
if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ]; then
|
if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ]; then
|
||||||
for file in test/results/reports/coverage=*.xml; do
|
for file in test/results/reports/coverage=*.xml; do
|
||||||
flags="${file##*/coverage=}"
|
flags="${file##*/coverage=}"
|
||||||
flags="${flags%-powershell.xml}"
|
flags="${flags%-powershell.xml}"
|
||||||
flags="${flags%.xml}"
|
flags="${flags%.xml}"
|
||||||
# remove numbered component from stub files when converting to tags
|
# remove numbered component from stub files when converting to tags
|
||||||
flags="${flags//stub-[0-9]*/stub}"
|
flags="${flags//stub-[0-9]*/stub}"
|
||||||
flags="${flags//=/,}"
|
flags="${flags//=/,}"
|
||||||
flags="${flags//[^a-zA-Z0-9_,]/_}"
|
flags="${flags//[^a-zA-Z0-9_,]/_}"
|
||||||
|
|
||||||
bash <(curl -s https://codecov.io/bash) \
|
bash <(curl -s https://codecov.io/bash) \
|
||||||
-f "${file}" \
|
-f "${file}" \
|
||||||
-F "${flags}" \
|
-F "${flags}" \
|
||||||
-n "${test}" \
|
-n "${test}" \
|
||||||
-t 83cd8957-dc76-488c-9ada-210dcea51633 \
|
-t 83cd8957-dc76-488c-9ada-210dcea51633 \
|
||||||
-X coveragepy \
|
-X coveragepy \
|
||||||
-X gcov \
|
-X gcov \
|
||||||
-X fix \
|
-X fix \
|
||||||
-X search \
|
-X search \
|
||||||
-X xcode \
|
-X xcode \
|
||||||
|| echo "Failed to upload code coverage report to codecov.io: ${file}"
|
|| echo "Failed to upload code coverage report to codecov.io: ${file}"
|
||||||
done
|
done
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
rmdir shippable/testresults/
|
if [ -d test/results/junit/ ]; then
|
||||||
cp -a test/results/junit/ shippable/testresults/
|
cp -a test/results/junit/ shippable/testresults/
|
||||||
cp -a test/results/data/ shippable/testresults/
|
fi
|
||||||
cp -aT test/results/bot/ shippable/testresults/
|
|
||||||
|
if [ -d test/results/data/ ]; then
|
||||||
|
cp -a test/results/data/ shippable/testresults/
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -d test/results/bot/ ]; then
|
||||||
|
cp -aT test/results/bot/ shippable/testresults/
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
trap cleanup EXIT
|
trap cleanup EXIT
|
||||||
|
|
Loading…
Reference in a new issue