2020-01-30 22:21:33 +01:00
|
|
|
"""Combine code coverage files."""
|
2019-07-12 08:46:20 +02:00
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
import os
|
2021-05-17 19:00:06 +02:00
|
|
|
import json
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2021-05-15 02:19:40 +02:00
|
|
|
from ... import types as t
|
|
|
|
|
2021-05-05 23:29:30 +02:00
|
|
|
from ...target import (
|
2017-05-15 18:51:49 +02:00
|
|
|
walk_compile_targets,
|
2019-08-27 23:03:23 +02:00
|
|
|
walk_powershell_targets,
|
2017-01-19 01:31:34 +01:00
|
|
|
)
|
|
|
|
|
2021-05-05 23:29:30 +02:00
|
|
|
from ...io import (
|
2020-02-04 20:21:53 +01:00
|
|
|
read_text_file,
|
|
|
|
)
|
|
|
|
|
2021-05-05 23:29:30 +02:00
|
|
|
from ...util import (
|
2021-05-17 19:00:06 +02:00
|
|
|
ANSIBLE_TEST_DATA_ROOT,
|
2017-01-19 01:31:34 +01:00
|
|
|
display,
|
2021-05-15 02:19:40 +02:00
|
|
|
ApplicationError,
|
2017-01-19 01:31:34 +01:00
|
|
|
)
|
|
|
|
|
2021-05-05 23:29:30 +02:00
|
|
|
from ...util_common import (
|
2019-08-28 08:40:06 +02:00
|
|
|
ResultType,
|
2021-05-17 19:00:06 +02:00
|
|
|
run_command,
|
2020-11-06 00:00:54 +01:00
|
|
|
write_json_file,
|
2019-08-30 09:18:49 +02:00
|
|
|
write_json_test_results,
|
2019-07-11 07:00:34 +02:00
|
|
|
)
|
|
|
|
|
2021-05-15 02:19:40 +02:00
|
|
|
from ...executor import (
|
|
|
|
Delegate,
|
|
|
|
)
|
|
|
|
|
|
|
|
from ...data import (
|
|
|
|
data_context,
|
|
|
|
)
|
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
from . import (
|
2020-02-06 07:16:15 +01:00
|
|
|
enumerate_python_arcs,
|
|
|
|
enumerate_powershell_lines,
|
|
|
|
get_collection_path_regexes,
|
2021-05-15 02:19:40 +02:00
|
|
|
get_all_coverage_files,
|
2020-02-06 07:16:15 +01:00
|
|
|
get_python_coverage_files,
|
|
|
|
get_python_modules,
|
|
|
|
get_powershell_coverage_files,
|
2020-01-30 22:21:33 +01:00
|
|
|
initialize_coverage,
|
|
|
|
COVERAGE_OUTPUT_FILE_NAME,
|
|
|
|
COVERAGE_GROUPS,
|
|
|
|
CoverageConfig,
|
2020-02-06 07:16:15 +01:00
|
|
|
PathChecker,
|
2020-01-30 22:21:33 +01:00
|
|
|
)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
def command_coverage_combine(args):
|
|
|
|
"""Patch paths in coverage files and merge into a single file.
|
2021-05-15 02:19:40 +02:00
|
|
|
:type args: CoverageCombineConfig
|
2017-05-15 11:42:11 +02:00
|
|
|
:rtype: list[str]
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2021-05-15 02:19:40 +02:00
|
|
|
if args.delegate:
|
|
|
|
if args.docker or args.remote:
|
|
|
|
paths = get_all_coverage_files()
|
2021-05-18 00:57:27 +02:00
|
|
|
exported_paths = [path for path in paths if os.path.basename(path).split('=')[-1].split('.')[:2] == ['coverage', 'combined']]
|
2021-05-15 02:19:40 +02:00
|
|
|
|
|
|
|
if not exported_paths:
|
|
|
|
raise ExportedCoverageDataNotFound()
|
|
|
|
|
|
|
|
pairs = [(path, os.path.relpath(path, data_context().content.root)) for path in exported_paths]
|
|
|
|
|
|
|
|
def coverage_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
|
|
|
|
"""Add the coverage files to the payload file list."""
|
|
|
|
display.info('Including %d exported coverage file(s) in payload.' % len(pairs), verbosity=1)
|
|
|
|
files.extend(pairs)
|
|
|
|
|
|
|
|
data_context().register_payload_callback(coverage_callback)
|
|
|
|
|
|
|
|
raise Delegate()
|
|
|
|
|
2019-08-30 09:18:49 +02:00
|
|
|
paths = _command_coverage_combine_powershell(args) + _command_coverage_combine_python(args)
|
|
|
|
|
|
|
|
for path in paths:
|
|
|
|
display.info('Generated combined output: %s' % path, verbosity=1)
|
|
|
|
|
|
|
|
return paths
|
2019-08-27 23:03:23 +02:00
|
|
|
|
|
|
|
|
2021-05-15 02:19:40 +02:00
|
|
|
class ExportedCoverageDataNotFound(ApplicationError):
|
|
|
|
"""Exception when no combined coverage data is present yet is required."""
|
|
|
|
def __init__(self):
|
|
|
|
super(ExportedCoverageDataNotFound, self).__init__(
|
|
|
|
'Coverage data must be exported before processing with the `--docker` or `--remote` option.\n'
|
|
|
|
'Export coverage with `ansible-test coverage combine` using the `--export` option.\n'
|
|
|
|
'The exported files must be in the directory: %s/' % ResultType.COVERAGE.relative_path)
|
|
|
|
|
|
|
|
|
2019-08-27 23:03:23 +02:00
|
|
|
def _command_coverage_combine_python(args):
|
|
|
|
"""
|
2021-05-15 02:19:40 +02:00
|
|
|
:type args: CoverageCombineConfig
|
2019-08-27 23:03:23 +02:00
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
2016-11-30 06:21:53 +01:00
|
|
|
coverage = initialize_coverage(args)
|
|
|
|
|
2020-02-06 07:16:15 +01:00
|
|
|
modules = get_python_modules()
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2020-02-06 07:16:15 +01:00
|
|
|
coverage_files = get_python_coverage_files()
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-02-25 01:58:56 +01:00
|
|
|
counter = 0
|
2019-08-27 23:03:23 +02:00
|
|
|
sources = _get_coverage_targets(args, walk_compile_targets)
|
2021-05-17 19:00:06 +02:00
|
|
|
groups = _build_stub_groups(args, sources, lambda s: dict((name, set()) for name in s))
|
2017-05-15 18:51:49 +02:00
|
|
|
|
2020-02-06 07:16:15 +01:00
|
|
|
collection_search_re, collection_sub_re = get_collection_path_regexes()
|
2019-07-23 04:24:48 +02:00
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
for coverage_file in coverage_files:
|
2017-02-25 01:58:56 +01:00
|
|
|
counter += 1
|
|
|
|
display.info('[%4d/%4d] %s' % (counter, len(coverage_files), coverage_file), verbosity=2)
|
|
|
|
|
2017-05-15 11:42:11 +02:00
|
|
|
group = get_coverage_group(args, coverage_file)
|
|
|
|
|
|
|
|
if group is None:
|
|
|
|
display.warning('Unexpected name for coverage file: %s' % coverage_file)
|
|
|
|
continue
|
|
|
|
|
2020-02-06 07:16:15 +01:00
|
|
|
for filename, arcs in enumerate_python_arcs(coverage_file, coverage, modules, collection_search_re, collection_sub_re):
|
2021-03-30 22:30:38 +02:00
|
|
|
if args.export:
|
|
|
|
filename = os.path.relpath(filename) # exported paths must be relative since absolute paths may differ between systems
|
|
|
|
|
2017-05-15 11:42:11 +02:00
|
|
|
if group not in groups:
|
|
|
|
groups[group] = {}
|
|
|
|
|
|
|
|
arc_data = groups[group]
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
if filename not in arc_data:
|
2017-02-28 00:46:15 +01:00
|
|
|
arc_data[filename] = set()
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-02-28 00:46:15 +01:00
|
|
|
arc_data[filename].update(arcs)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-05-15 11:42:11 +02:00
|
|
|
output_files = []
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2020-11-06 00:00:54 +01:00
|
|
|
if args.export:
|
|
|
|
coverage_file = os.path.join(args.export, '')
|
|
|
|
suffix = '=coverage.combined'
|
|
|
|
else:
|
|
|
|
coverage_file = os.path.join(ResultType.COVERAGE.path, COVERAGE_OUTPUT_FILE_NAME)
|
|
|
|
suffix = ''
|
2019-08-14 01:28:04 +02:00
|
|
|
|
2020-02-06 07:16:15 +01:00
|
|
|
path_checker = PathChecker(args, collection_search_re)
|
|
|
|
|
2017-05-15 11:42:11 +02:00
|
|
|
for group in sorted(groups):
|
|
|
|
arc_data = groups[group]
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-05-15 11:42:11 +02:00
|
|
|
updated = coverage.CoverageData()
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-05-15 11:42:11 +02:00
|
|
|
for filename in arc_data:
|
2020-02-06 07:16:15 +01:00
|
|
|
if not path_checker.check_path(filename):
|
2017-05-15 11:42:11 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
updated.add_arcs({filename: list(arc_data[filename])})
|
|
|
|
|
2017-05-15 18:51:49 +02:00
|
|
|
if args.all:
|
2019-09-11 07:12:38 +02:00
|
|
|
updated.add_arcs(dict((source[0], []) for source in sources))
|
2017-05-15 18:51:49 +02:00
|
|
|
|
2017-05-15 11:42:11 +02:00
|
|
|
if not args.explain:
|
2020-11-06 00:00:54 +01:00
|
|
|
output_file = coverage_file + group + suffix
|
2020-03-07 00:14:59 +01:00
|
|
|
updated.write_file(output_file) # always write files to make sure stale files do not exist
|
|
|
|
|
|
|
|
if updated:
|
|
|
|
# only report files which are non-empty to prevent coverage from reporting errors
|
|
|
|
output_files.append(output_file)
|
2017-05-15 11:42:11 +02:00
|
|
|
|
2020-02-06 07:16:15 +01:00
|
|
|
path_checker.report()
|
2019-03-08 23:06:33 +01:00
|
|
|
|
2017-05-15 11:42:11 +02:00
|
|
|
return sorted(output_files)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
2019-08-27 23:03:23 +02:00
|
|
|
def _command_coverage_combine_powershell(args):
|
|
|
|
"""
|
2021-05-15 02:19:40 +02:00
|
|
|
:type args: CoverageCombineConfig
|
2019-08-27 23:03:23 +02:00
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
2020-02-06 07:16:15 +01:00
|
|
|
coverage_files = get_powershell_coverage_files()
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2021-05-17 19:00:06 +02:00
|
|
|
def _default_stub_value(source_paths):
|
|
|
|
cmd = ['pwsh', os.path.join(ANSIBLE_TEST_DATA_ROOT, 'coverage_stub.ps1')]
|
|
|
|
cmd.extend(source_paths)
|
|
|
|
|
|
|
|
stubs = json.loads(run_command(args, cmd, capture=True, always=True)[0])
|
|
|
|
|
|
|
|
return dict((d['Path'], dict((line, 0) for line in d['Lines'])) for d in stubs)
|
2019-08-27 23:03:23 +02:00
|
|
|
|
|
|
|
counter = 0
|
|
|
|
sources = _get_coverage_targets(args, walk_powershell_targets)
|
|
|
|
groups = _build_stub_groups(args, sources, _default_stub_value)
|
|
|
|
|
2020-03-03 19:28:48 +01:00
|
|
|
collection_search_re, collection_sub_re = get_collection_path_regexes()
|
|
|
|
|
2019-08-27 23:03:23 +02:00
|
|
|
for coverage_file in coverage_files:
|
|
|
|
counter += 1
|
|
|
|
display.info('[%4d/%4d] %s' % (counter, len(coverage_files), coverage_file), verbosity=2)
|
|
|
|
|
|
|
|
group = get_coverage_group(args, coverage_file)
|
|
|
|
|
|
|
|
if group is None:
|
|
|
|
display.warning('Unexpected name for coverage file: %s' % coverage_file)
|
|
|
|
continue
|
|
|
|
|
2020-03-03 19:28:48 +01:00
|
|
|
for filename, hits in enumerate_powershell_lines(coverage_file, collection_search_re, collection_sub_re):
|
2021-03-30 22:30:38 +02:00
|
|
|
if args.export:
|
|
|
|
filename = os.path.relpath(filename) # exported paths must be relative since absolute paths may differ between systems
|
|
|
|
|
2019-08-27 23:03:23 +02:00
|
|
|
if group not in groups:
|
|
|
|
groups[group] = {}
|
|
|
|
|
|
|
|
coverage_data = groups[group]
|
|
|
|
|
|
|
|
if filename not in coverage_data:
|
|
|
|
coverage_data[filename] = {}
|
|
|
|
|
|
|
|
file_coverage = coverage_data[filename]
|
|
|
|
|
2020-02-06 07:16:15 +01:00
|
|
|
for line_no, hit_count in hits.items():
|
|
|
|
file_coverage[line_no] = file_coverage.get(line_no, 0) + hit_count
|
2019-08-27 23:03:23 +02:00
|
|
|
|
|
|
|
output_files = []
|
|
|
|
|
2020-02-06 07:16:15 +01:00
|
|
|
path_checker = PathChecker(args)
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-02-06 07:16:15 +01:00
|
|
|
for group in sorted(groups):
|
|
|
|
coverage_data = dict((filename, data) for filename, data in groups[group].items() if path_checker.check_path(filename))
|
2019-08-27 23:03:23 +02:00
|
|
|
|
|
|
|
if args.all:
|
|
|
|
# Add 0 line entries for files not in coverage_data
|
|
|
|
for source, source_line_count in sources:
|
|
|
|
if source in coverage_data:
|
|
|
|
continue
|
|
|
|
|
|
|
|
coverage_data[source] = _default_stub_value(source_line_count)
|
|
|
|
|
|
|
|
if not args.explain:
|
2020-11-06 00:00:54 +01:00
|
|
|
if args.export:
|
|
|
|
output_file = os.path.join(args.export, group + '=coverage.combined')
|
|
|
|
write_json_file(output_file, coverage_data, formatted=False)
|
|
|
|
output_files.append(output_file)
|
|
|
|
continue
|
|
|
|
|
2019-08-30 09:18:49 +02:00
|
|
|
output_file = COVERAGE_OUTPUT_FILE_NAME + group + '-powershell'
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-11-06 00:00:54 +01:00
|
|
|
write_json_test_results(ResultType.COVERAGE, output_file, coverage_data, formatted=False)
|
2019-08-30 09:18:49 +02:00
|
|
|
|
|
|
|
output_files.append(os.path.join(ResultType.COVERAGE.path, output_file))
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-02-06 07:16:15 +01:00
|
|
|
path_checker.report()
|
2019-08-27 23:03:23 +02:00
|
|
|
|
|
|
|
return sorted(output_files)
|
|
|
|
|
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
def _get_coverage_targets(args, walk_func):
|
2019-08-27 23:03:23 +02:00
|
|
|
"""
|
2021-05-15 02:19:40 +02:00
|
|
|
:type args: CoverageCombineConfig
|
2020-01-30 22:21:33 +01:00
|
|
|
:type walk_func: Func
|
|
|
|
:rtype: list[tuple[str, int]]
|
2019-08-27 23:03:23 +02:00
|
|
|
"""
|
2020-01-30 22:21:33 +01:00
|
|
|
sources = []
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
if args.all or args.stub:
|
|
|
|
# excludes symlinks of regular files to avoid reporting on the same file multiple times
|
|
|
|
# in the future it would be nice to merge any coverage for symlinks into the real files
|
|
|
|
for target in walk_func(include_symlinks=False):
|
|
|
|
target_path = os.path.abspath(target.path)
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-02-04 20:21:53 +01:00
|
|
|
target_lines = len(read_text_file(target_path).splitlines())
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
sources.append((target_path, target_lines))
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
sources.sort()
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
return sources
|
2019-08-27 23:03:23 +02:00
|
|
|
|
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
def _build_stub_groups(args, sources, default_stub_value):
|
2019-08-27 23:03:23 +02:00
|
|
|
"""
|
2021-05-15 02:19:40 +02:00
|
|
|
:type args: CoverageCombineConfig
|
2020-01-30 22:21:33 +01:00
|
|
|
:type sources: List[tuple[str, int]]
|
2021-05-17 19:00:06 +02:00
|
|
|
:type default_stub_value: Func[List[str]]
|
2020-01-30 22:21:33 +01:00
|
|
|
:rtype: dict
|
2019-08-27 23:03:23 +02:00
|
|
|
"""
|
2020-01-30 22:21:33 +01:00
|
|
|
groups = {}
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
if args.stub:
|
|
|
|
stub_group = []
|
|
|
|
stub_groups = [stub_group]
|
|
|
|
stub_line_limit = 500000
|
|
|
|
stub_line_count = 0
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
for source, source_line_count in sources:
|
2021-05-17 19:00:06 +02:00
|
|
|
stub_group.append(source)
|
2020-01-30 22:21:33 +01:00
|
|
|
stub_line_count += source_line_count
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
if stub_line_count > stub_line_limit:
|
|
|
|
stub_line_count = 0
|
|
|
|
stub_group = []
|
|
|
|
stub_groups.append(stub_group)
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
for stub_index, stub_group in enumerate(stub_groups):
|
|
|
|
if not stub_group:
|
|
|
|
continue
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2021-05-17 19:00:06 +02:00
|
|
|
groups['=stub-%02d' % (stub_index + 1)] = default_stub_value(stub_group)
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
return groups
|
2019-08-27 23:03:23 +02:00
|
|
|
|
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
def get_coverage_group(args, coverage_file):
|
2019-08-27 23:03:23 +02:00
|
|
|
"""
|
2021-05-15 02:19:40 +02:00
|
|
|
:type args: CoverageCombineConfig
|
2019-08-27 23:03:23 +02:00
|
|
|
:type coverage_file: str
|
|
|
|
:rtype: str
|
|
|
|
"""
|
2020-01-30 22:21:33 +01:00
|
|
|
parts = os.path.basename(coverage_file).split('=', 4)
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-02-04 20:21:53 +01:00
|
|
|
# noinspection PyTypeChecker
|
2020-01-30 22:21:33 +01:00
|
|
|
if len(parts) != 5 or not parts[4].startswith('coverage.'):
|
|
|
|
return None
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
names = dict(
|
|
|
|
command=parts[0],
|
|
|
|
target=parts[1],
|
|
|
|
environment=parts[2],
|
|
|
|
version=parts[3],
|
|
|
|
)
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-11-06 00:00:54 +01:00
|
|
|
export_names = dict(
|
|
|
|
version=parts[3],
|
|
|
|
)
|
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
group = ''
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
for part in COVERAGE_GROUPS:
|
|
|
|
if part in args.group_by:
|
|
|
|
group += '=%s' % names[part]
|
2020-11-06 00:00:54 +01:00
|
|
|
elif args.export:
|
|
|
|
group += '=%s' % export_names.get(part, 'various')
|
|
|
|
|
|
|
|
if args.export:
|
|
|
|
group = group.lstrip('=')
|
2019-08-27 23:03:23 +02:00
|
|
|
|
2020-01-30 22:21:33 +01:00
|
|
|
return group
|
2021-05-15 02:19:40 +02:00
|
|
|
|
|
|
|
|
|
|
|
class CoverageCombineConfig(CoverageConfig):
|
|
|
|
"""Configuration for the coverage combine command."""
|
|
|
|
def __init__(self, args): # type: (t.Any) -> None
|
|
|
|
super(CoverageCombineConfig, self).__init__(args)
|
|
|
|
|
|
|
|
self.group_by = frozenset(args.group_by) if args.group_by else frozenset() # type: t.FrozenSet[str]
|
|
|
|
self.all = args.all # type: bool
|
|
|
|
self.stub = args.stub # type: bool
|
|
|
|
|
|
|
|
# only available to coverage combine
|
|
|
|
self.export = args.export if 'export' in args else False # type: str
|