Fix ansible-test coverage stub memory usage. (#53652)
Split coverage stub files into multiple parts based on the number of lines in files included in each stub. This will keep memory usage down to avoid hanging on Shippable.
This commit is contained in:
parent
887ab35656
commit
8fa294a33a
2 changed files with 24 additions and 1 deletions
|
@ -55,7 +55,28 @@ def command_coverage_combine(args):
|
||||||
sources = []
|
sources = []
|
||||||
|
|
||||||
if args.stub:
|
if args.stub:
|
||||||
groups['=stub'] = dict((source, set()) for source in sources)
|
stub_group = []
|
||||||
|
stub_groups = [stub_group]
|
||||||
|
stub_line_limit = 500000
|
||||||
|
stub_line_count = 0
|
||||||
|
|
||||||
|
for source in sources:
|
||||||
|
with open(source, 'r') as source_fd:
|
||||||
|
source_line_count = len(source_fd.read().splitlines())
|
||||||
|
|
||||||
|
stub_group.append(source)
|
||||||
|
stub_line_count += source_line_count
|
||||||
|
|
||||||
|
if stub_line_count > stub_line_limit:
|
||||||
|
stub_line_count = 0
|
||||||
|
stub_group = []
|
||||||
|
stub_groups.append(stub_group)
|
||||||
|
|
||||||
|
for stub_index, stub_group in enumerate(stub_groups):
|
||||||
|
if not stub_group:
|
||||||
|
continue
|
||||||
|
|
||||||
|
groups['=stub-%02d' % (stub_index + 1)] = dict((source, set()) for source in stub_group)
|
||||||
|
|
||||||
for coverage_file in coverage_files:
|
for coverage_file in coverage_files:
|
||||||
counter += 1
|
counter += 1
|
||||||
|
|
|
@ -90,6 +90,8 @@ function cleanup
|
||||||
for file in test/results/reports/coverage=*.xml; do
|
for file in test/results/reports/coverage=*.xml; do
|
||||||
flags="${file##*/coverage=}"
|
flags="${file##*/coverage=}"
|
||||||
flags="${flags%.xml}"
|
flags="${flags%.xml}"
|
||||||
|
# remove numbered component from stub files when converting to tags
|
||||||
|
flags="${flags//stub-[0-9]*/stub}"
|
||||||
flags="${flags//=/,}"
|
flags="${flags//=/,}"
|
||||||
flags="${flags//[^a-zA-Z0-9_,]/_}"
|
flags="${flags//[^a-zA-Z0-9_,]/_}"
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue