Allow passing skipPercent
Currently, the default is 5%. 0 gives you 0% time savings 2.5 gives you 29% 5 gives you 38% 10 gives you 50% 20 gives you 65%
This commit is contained in:
parent
a852f2feea
commit
3ef953a819
|
@ -429,6 +429,7 @@ task("runtests-parallel").flags = {
|
|||
" --workers=<number>": "The number of parallel workers to use.",
|
||||
" --timeout=<ms>": "Overrides the default test timeout.",
|
||||
" --built": "Compile using the built version of the compiler.",
|
||||
" --skipPercent=<number>": "Skip expensive tests with <percent> chance to miss an edit.",
|
||||
};
|
||||
|
||||
task("diff", () => exec(getDiffTool(), [refBaseline, localBaseline], { ignoreExitCode: true }));
|
||||
|
|
|
@ -31,6 +31,7 @@ async function runConsoleTests(runJs, defaultReporter, runInParallel, watchMode,
|
|||
const inspect = cmdLineOptions.inspect;
|
||||
const runners = cmdLineOptions.runners;
|
||||
const light = cmdLineOptions.light;
|
||||
const skipPercent = cmdLineOptions.skipPercent;
|
||||
const stackTraceLimit = cmdLineOptions.stackTraceLimit;
|
||||
const testConfigFile = "test.config";
|
||||
const failed = cmdLineOptions.failed;
|
||||
|
@ -62,8 +63,8 @@ async function runConsoleTests(runJs, defaultReporter, runInParallel, watchMode,
|
|||
testTimeout = 400000;
|
||||
}
|
||||
|
||||
if (tests || runners || light || testTimeout || taskConfigsFolder || keepFailed) {
|
||||
writeTestConfigFile(tests, runners, light, taskConfigsFolder, workerCount, stackTraceLimit, testTimeout, keepFailed);
|
||||
if (tests || runners || light || testTimeout || taskConfigsFolder || keepFailed || skipPercent) {
|
||||
writeTestConfigFile(tests, runners, light, skipPercent, taskConfigsFolder, workerCount, stackTraceLimit, testTimeout, keepFailed);
|
||||
}
|
||||
|
||||
const colors = cmdLineOptions.colors;
|
||||
|
@ -158,17 +159,19 @@ exports.cleanTestDirs = cleanTestDirs;
|
|||
* @param {string} tests
|
||||
* @param {string} runners
|
||||
* @param {boolean} light
|
||||
* @param {string} skipPercent
|
||||
* @param {string} [taskConfigsFolder]
|
||||
* @param {string | number} [workerCount]
|
||||
* @param {string} [stackTraceLimit]
|
||||
* @param {string | number} [timeout]
|
||||
* @param {boolean} [keepFailed]
|
||||
*/
|
||||
function writeTestConfigFile(tests, runners, light, taskConfigsFolder, workerCount, stackTraceLimit, timeout, keepFailed) {
|
||||
function writeTestConfigFile(tests, runners, light, skipPercent, taskConfigsFolder, workerCount, stackTraceLimit, timeout, keepFailed) {
|
||||
const testConfigContents = JSON.stringify({
|
||||
test: tests ? [tests] : undefined,
|
||||
runners: runners ? runners.split(",") : undefined,
|
||||
light,
|
||||
skipPercent,
|
||||
workerCount,
|
||||
stackTraceLimit,
|
||||
taskConfigsFolder,
|
||||
|
|
|
@ -16,8 +16,6 @@ namespace Harness.Parallel.Host {
|
|||
const { fork } = require("child_process") as typeof import("child_process");
|
||||
const { statSync, readFileSync } = require("fs") as typeof import("fs");
|
||||
|
||||
const editSkipRate = 0.05
|
||||
|
||||
// NOTE: paths for module and types for FailedTestReporter _do not_ line up due to our use of --outFile for run.js
|
||||
// tslint:disable-next-line:variable-name
|
||||
const FailedTestReporter = require(path.resolve(__dirname, "../../scripts/failed-tests")) as typeof import("../../../scripts/failed-tests");
|
||||
|
@ -194,7 +192,7 @@ namespace Harness.Parallel.Host {
|
|||
return `tsrunner-${runner}://${test}`;
|
||||
}
|
||||
|
||||
function skipCostlyTests(tasks: Task[], editSkipRate: number) {
|
||||
function skipCostlyTests(tasks: Task[]) {
|
||||
if (statSync('.test-cost.json')) {
|
||||
const costs = JSON.parse(readFileSync('.test-cost.json', 'utf8')) as {
|
||||
totalTime: number,
|
||||
|
@ -205,12 +203,12 @@ namespace Harness.Parallel.Host {
|
|||
let skippedTests = new Set<string>();
|
||||
let skippedTime = 0;
|
||||
let i = 0;
|
||||
for (; i < costs.data.length && (skippedEdits / costs.totalEdits) < editSkipRate; i++) {
|
||||
for (; i < costs.data.length && (skippedEdits / costs.totalEdits) < (skipPercent / 100); i++) {
|
||||
skippedEdits += costs.data[i].edits;
|
||||
skippedTime += costs.data[i].time;
|
||||
skippedTests.add(costs.data[i].name);
|
||||
}
|
||||
console.log(`Skipped ${i} expensive tests; estimated time savings of ${(skippedTime / costs.totalTime * 100).toFixed(2)}% with ${(editSkipRate * 100).toFixed(2)}% chance of missing a test.`)
|
||||
console.log(`Skipped ${i} expensive tests; estimated time savings of ${(skippedTime / costs.totalTime * 100).toFixed(2)}% with ${skipPercent.toFixed(2)}% chance of missing a test.`)
|
||||
return tasks.filter(t => !skippedTests.has(t.file));
|
||||
}
|
||||
else {
|
||||
|
@ -219,7 +217,7 @@ namespace Harness.Parallel.Host {
|
|||
}
|
||||
}
|
||||
|
||||
function startDelayed(perfData: { [testHash: string]: number } | undefined, totalCost: number, editSkipRate: number) {
|
||||
function startDelayed(perfData: { [testHash: string]: number } | undefined, totalCost: number) {
|
||||
console.log(`Discovered ${tasks.length} unittest suites` + (newTasks.length ? ` and ${newTasks.length} new suites.` : "."));
|
||||
console.log("Discovering runner-based tests...");
|
||||
const discoverStart = +(new Date());
|
||||
|
@ -258,7 +256,7 @@ namespace Harness.Parallel.Host {
|
|||
}
|
||||
tasks.sort((a, b) => a.size - b.size);
|
||||
tasks = tasks.concat(newTasks);
|
||||
tasks = skipCostlyTests(tasks, editSkipRate);
|
||||
tasks = skipCostlyTests(tasks);
|
||||
const batchCount = workerCount;
|
||||
const packfraction = 0.9;
|
||||
const chunkSize = 1000; // ~1KB or 1s for sending batches near the end of a test
|
||||
|
@ -653,6 +651,6 @@ namespace Harness.Parallel.Host {
|
|||
}
|
||||
|
||||
// tslint:disable-next-line:ban
|
||||
setTimeout(() => startDelayed(perfData, totalCost, editSkipRate), 0); // Do real startup on next tick, so all unit tests have been collected
|
||||
setTimeout(() => startDelayed(perfData, totalCost), 0); // Do real startup on next tick, so all unit tests have been collected
|
||||
}
|
||||
}
|
||||
|
|
|
@ -63,6 +63,7 @@ let runUnitTests: boolean | undefined;
|
|||
let stackTraceLimit: number | "full" | undefined;
|
||||
let noColors = false;
|
||||
let keepFailed = false;
|
||||
let skipPercent = 5;
|
||||
|
||||
interface TestConfig {
|
||||
light?: boolean;
|
||||
|
@ -76,6 +77,7 @@ interface TestConfig {
|
|||
noColors?: boolean;
|
||||
timeout?: number;
|
||||
keepFailed?: boolean;
|
||||
skipPercent?: number;
|
||||
}
|
||||
|
||||
interface TaskSet {
|
||||
|
@ -107,6 +109,9 @@ function handleTestConfig() {
|
|||
if (testConfig.keepFailed) {
|
||||
keepFailed = true;
|
||||
}
|
||||
if (testConfig.skipPercent !== undefined) {
|
||||
skipPercent = testConfig.skipPercent;
|
||||
}
|
||||
|
||||
if (testConfig.stackTraceLimit === "full") {
|
||||
(<any>Error).stackTraceLimit = Infinity;
|
||||
|
|
Loading…
Reference in a new issue