diff --git a/.circleci/config.yml b/.circleci/config.yml
index f3474cfcd3..38c0968f0e 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -17,6 +17,10 @@ run_env_change: &run_env_change
sudo mkdir -p /tmp/core_dumps
sudo chmod a+rwx /tmp/core_dumps
+ # Make a place for JUnit tests to live.
+ sudo mkdir -p /tmp/results/junit
+ sudo chmod -R a+rwx /tmp/results/
+
# Set the pattern for core dumps, so we can find them.
echo kernel.core_pattern="/tmp/core_dumps/core.%e.%p.%h.%t" | \
sudo tee -a /etc/sysctl.conf
@@ -167,6 +171,7 @@ jobs:
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
+ --junit /tmp/results/junit/0.xml \
--with-tag "custom-warehouse"
no_output_timeout: 20m
- run:
@@ -174,6 +179,10 @@ jobs:
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
+ - store_test_results:
+ path: /tmp/results
+ - store_artifacts:
+ path: /tmp/results
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
@@ -195,6 +204,7 @@ jobs:
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
+ --junit /tmp/results/junit/1.xml \
--file '^[a-b]|^c[a-n]|^co[a-l]|^compiler-plugins' \
--without-tag "custom-warehouse"
no_output_timeout: 20m
@@ -203,6 +213,10 @@ jobs:
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
+ - store_test_results:
+ path: /tmp/results
+ - store_artifacts:
+ path: /tmp/results
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
@@ -224,6 +238,7 @@ jobs:
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
+ --junit /tmp/results/junit/2.xml \
--file "^co[n-z]|^c[p-z]|^[d-k]" \
--without-tag "custom-warehouse"
no_output_timeout: 20m
@@ -232,6 +247,10 @@ jobs:
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
+ - store_test_results:
+ path: /tmp/results
+ - store_artifacts:
+ path: /tmp/results
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
@@ -253,6 +272,7 @@ jobs:
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
+ --junit /tmp/results/junit/3.xml \
--file '^[l-o]' \
--without-tag "custom-warehouse"
no_output_timeout: 20m
@@ -261,6 +281,10 @@ jobs:
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
+ - store_test_results:
+ path: /tmp/results
+ - store_artifacts:
+ path: /tmp/results
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
@@ -282,6 +306,7 @@ jobs:
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
+ --junit /tmp/results/junit/4.xml \
--file '^p' \
--without-tag "custom-warehouse"
no_output_timeout: 20m
@@ -290,6 +315,10 @@ jobs:
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
+ - store_test_results:
+ path: /tmp/results
+ - store_artifacts:
+ path: /tmp/results
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
@@ -311,6 +340,7 @@ jobs:
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
+ --junit /tmp/results/junit/5.xml \
--file '^run' \
--without-tag "custom-warehouse"
no_output_timeout: 20m
@@ -319,6 +349,10 @@ jobs:
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
+ - store_test_results:
+ path: /tmp/results
+ - store_artifacts:
+ path: /tmp/results
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
@@ -340,6 +374,7 @@ jobs:
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
+ --junit /tmp/results/junit/6.xml \
--file '^r(?!un)|^s' \
--without-tag "custom-warehouse"
no_output_timeout: 20m
@@ -348,6 +383,10 @@ jobs:
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
+ - store_test_results:
+ path: /tmp/results
+ - store_artifacts:
+ path: /tmp/results
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
@@ -369,6 +408,7 @@ jobs:
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
+ --junit /tmp/results/junit/7.xml \
--file '^[t-z]|^command-line' \
--without-tag "custom-warehouse"
no_output_timeout: 20m
@@ -377,6 +417,10 @@ jobs:
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
+ - store_test_results:
+ path: /tmp/results
+ - store_artifacts:
+ path: /tmp/results
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
diff --git a/tools/cli/commands.js b/tools/cli/commands.js
index 09b4387dcb..4d76c2b420 100644
--- a/tools/cli/commands.js
+++ b/tools/cli/commands.js
@@ -2079,6 +2079,7 @@ main.registerCommand({
'without-tag': { type: String },
// Only run tests with this tag
'with-tag': { type: String },
+ junit: { type: String },
},
hidden: true,
catalogRefresh: new catalog.Refresh.Never()
@@ -2175,6 +2176,7 @@ main.registerCommand({
// other options
historyLines: options.history,
clients: clients,
+ junit: options.junit && files.pathResolve(options.junit),
'without-tag': options['without-tag'],
'with-tag': options['with-tag']
});
diff --git a/tools/tool-testing/selftest.js b/tools/tool-testing/selftest.js
index 1bf2a21a6b..4935d531d0 100644
--- a/tools/tool-testing/selftest.js
+++ b/tools/tool-testing/selftest.js
@@ -1,3 +1,4 @@
+import { inspect } from 'util';
import { makeFulfillablePromise } from '../utils/fiber-helpers.js';
import { spawn, execFile } from 'child_process';
import * as files from '../fs/files.js';
@@ -1605,6 +1606,7 @@ class Test {
this.fileHash = options.fileHash;
this.tags = options.tags || [];
this.f = options.func;
+ this.durationMs = null;
this.cleanupHandlers = [];
}
@@ -1797,6 +1799,16 @@ function getFilteredTests(options) {
return new TestList(allTests, tagsToSkip, tagsToMatch, testState);
};
+function groupTestsByFile(tests) {
+ const grouped = {};
+ tests.forEach(test => {
+ grouped[test.file] = grouped[test.file] || [];
+ grouped[test.file].push(test);
+ });
+
+ return grouped;
+}
+
// A TestList is the result of getFilteredTests. It holds the original
// list of all tests, the filtered list, and stats on how many tests
// were skipped (see generateSkipReport).
@@ -1856,8 +1868,120 @@ class TestList {
// Mark a test's file as having failures. This prevents
// saveTestState from saving its hash as a potentially
// "unchanged" file to be skipped in a future run.
- notifyFailed(test) {
+ notifyFailed(test, failureObject) {
+ // Mark the file that this test lives in as having failures.
this.fileInfo[test.file].hasFailures = true;
+
+ // Mark that the specific test failed.
+ test.failed = true;
+
+ // If there is a failure object, store that for potential output.
+ if (failureObject) {
+ test.failureObject = failureObject;
+ }
+ }
+
+ saveJUnitOutput(path) {
+ const grouped = groupTestsByFile(this.filteredTests);
+
+ // We'll form an collection of "testsuites"
+ const testSuites = [];
+
+ const attrSafe = attr => (attr || "").replace('"', """);
+ const durationForOutput = durationMs => durationMs / 1000;
+
+ // Each file is a testsuite.
+ Object.keys(grouped).forEach((file) => {
+ const testCases = [];
+
+ let countError = 0;
+ let countFailure = 0;
+
+ // Each test is a "testcase".
+ grouped[file].forEach((test) => {
+ const testCaseAttrs = [
+ `name="${attrSafe(test.name)}"`,
+ ];
+
+ if (test.durationMs) {
+ testCaseAttrs.push(`time="${durationForOutput(test.durationMs)}"`);
+ }
+
+ const testCaseAttrsString = testCaseAttrs.join(' ');
+
+ if (test.failed) {
+ let failureElement = "";
+
+ if (test.failureObject instanceof TestFailure) {
+ countFailure++;
+
+ failureElement = [
+ ``,
+ '',
+ '',
+ ].join('\n');
+ } else if (test.failureObject && test.failureObject.stack) {
+ countError++;
+
+ failureElement = [
+ '',
+ '',
+ '',
+ ].join('\n');
+ } else {
+ countError++;
+
+ failureElement = '';
+ }
+
+ testCases.push(
+ [
+ ``,
+ failureElement,
+ '',
+ ].join('\n'),
+ );
+ } else {
+ testCases.push(``);
+ }
+ });
+
+ const testSuiteAttrs = [
+ `name="${file}"`,
+ `tests="${testCases.length}"`,
+ `failures="${countFailure}"`,
+ `errors="${countError}"`,
+ `time="${durationForOutput(this.durationMs)}"`,
+ ];
+
+ const testSuiteAttrsString = testSuiteAttrs.join(' ');
+
+ testSuites.push(
+ [
+ ``,
+ testCases.join('\n'),
+ '',
+ ].join('\n'),
+ );
+ });
+
+ const xmlHeader = '';
+
+ const testSuitesString = testSuites.join('\n');
+
+ files.writeFile(path,
+ [
+ xmlHeader,
+ ``,
+ testSuitesString,
+ ``,
+ ].join('\n'),
+ 'utf8',
+ );
}
// If this TestList was constructed with a testState,
@@ -1933,17 +2057,11 @@ export function listTests(options) {
return;
}
- const testsGroupedByFile = {};
- testList.filteredTests.forEach(filteredTest => {
- testsGroupedByFile[filteredTest.file] =
- testsGroupedByFile[filteredTest.file] || [];
+ const grouped = groupTestsByFile(testList.filteredTests);
- testsGroupedByFile[filteredTest.file].push(filteredTest);
- });
-
- Object.keys(testsGroupedByFile).forEach((file) => {
+ Object.keys(grouped).forEach((file) => {
Console.rawInfo(file + ':\n');
- testsGroupedByFile[file].forEach((test) => {
+ grouped[file].forEach((test) => {
Console.rawInfo(' - ' + test.name +
(test.tags.length ? ' [' + test.tags.join(' ') + ']'
: '') + '\n');
@@ -1971,6 +2089,8 @@ export function runTests(options) {
return 0;
}
+ testList.startTime = new Date;
+
let totalRun = 0;
const failedTests = [];
@@ -1980,6 +2100,9 @@ export function runTests(options) {
runTest(test);
});
+ testList.endTime = new Date;
+ testList.durationMs = testList.endTime - testList.startTime;
+
function runTest(test, tries = 3) {
let failure = null;
let startTime;
@@ -1997,6 +2120,8 @@ export function runTests(options) {
test.cleanup();
}
+ test.durationMs = +(new Date) - startTime;
+
if (failure) {
Console.error("... fail!", Console.options({ indent: 2 }));
@@ -2009,9 +2134,6 @@ export function runTests(options) {
return runTest(test, tries);
}
- failedTests.push(test);
- testList.notifyFailed(test);
-
if (failure instanceof TestFailure) {
const frames = parseStackParse(failure).outsideFiber;
const relpath = files.pathRelative(files.getCurrentToolsDir(),
@@ -2064,16 +2186,22 @@ export function runTests(options) {
} else {
Console.rawError(" => Test threw exception: " + failure.stack + "\n");
}
+
+ failedTests.push(test);
+ testList.notifyFailed(test, failure);
} else {
- const durationMs = +(new Date) - startTime;
Console.error(
- "... ok (" + durationMs + " ms)",
+ "... ok (" + test.durationMs + " ms)",
Console.options({ indent: 2 }));
}
}
testList.saveTestState();
+ if (options.junit) {
+ testList.saveJUnitOutput(options.junit);
+ }
+
if (totalRun > 0) {
Console.error();
}