mirror of
https://github.com/meteor/meteor.git
synced 2026-05-02 03:01:46 -04:00
Merge branch 'release-1.5.2' into release-1.6
This commit is contained in:
@@ -17,6 +17,10 @@ run_env_change: &run_env_change
|
||||
sudo mkdir -p /tmp/core_dumps
|
||||
sudo chmod a+rwx /tmp/core_dumps
|
||||
|
||||
# Make a place for JUnit tests to live.
|
||||
sudo mkdir -p /tmp/results/junit
|
||||
sudo chmod -R a+rwx /tmp/results/
|
||||
|
||||
# Set the pattern for core dumps, so we can find them.
|
||||
echo kernel.core_pattern="/tmp/core_dumps/core.%e.%p.%h.%t" | \
|
||||
sudo tee -a /etc/sysctl.conf
|
||||
@@ -167,6 +171,7 @@ jobs:
|
||||
./meteor self-test \
|
||||
--exclude "${SELF_TEST_EXCLUDE}" \
|
||||
--headless \
|
||||
--junit /tmp/results/junit/0.xml \
|
||||
--with-tag "custom-warehouse"
|
||||
no_output_timeout: 20m
|
||||
- run:
|
||||
@@ -174,6 +179,10 @@ jobs:
|
||||
- save_cache:
|
||||
key: meteor-cache
|
||||
<<: *meteor_cache_dirs
|
||||
- store_test_results:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/core_dumps
|
||||
- store_artifacts:
|
||||
@@ -195,6 +204,7 @@ jobs:
|
||||
./meteor self-test \
|
||||
--exclude "${SELF_TEST_EXCLUDE}" \
|
||||
--headless \
|
||||
--junit /tmp/results/junit/1.xml \
|
||||
--file '^[a-b]|^c[a-n]|^co[a-l]|^compiler-plugins' \
|
||||
--without-tag "custom-warehouse"
|
||||
no_output_timeout: 20m
|
||||
@@ -203,6 +213,10 @@ jobs:
|
||||
- save_cache:
|
||||
key: meteor-cache
|
||||
<<: *meteor_cache_dirs
|
||||
- store_test_results:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/core_dumps
|
||||
- store_artifacts:
|
||||
@@ -224,6 +238,7 @@ jobs:
|
||||
./meteor self-test \
|
||||
--exclude "${SELF_TEST_EXCLUDE}" \
|
||||
--headless \
|
||||
--junit /tmp/results/junit/2.xml \
|
||||
--file "^co[n-z]|^c[p-z]|^[d-k]" \
|
||||
--without-tag "custom-warehouse"
|
||||
no_output_timeout: 20m
|
||||
@@ -232,6 +247,10 @@ jobs:
|
||||
- save_cache:
|
||||
key: meteor-cache
|
||||
<<: *meteor_cache_dirs
|
||||
- store_test_results:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/core_dumps
|
||||
- store_artifacts:
|
||||
@@ -253,6 +272,7 @@ jobs:
|
||||
./meteor self-test \
|
||||
--exclude "${SELF_TEST_EXCLUDE}" \
|
||||
--headless \
|
||||
--junit /tmp/results/junit/3.xml \
|
||||
--file '^[l-o]' \
|
||||
--without-tag "custom-warehouse"
|
||||
no_output_timeout: 20m
|
||||
@@ -261,6 +281,10 @@ jobs:
|
||||
- save_cache:
|
||||
key: meteor-cache
|
||||
<<: *meteor_cache_dirs
|
||||
- store_test_results:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/core_dumps
|
||||
- store_artifacts:
|
||||
@@ -282,6 +306,7 @@ jobs:
|
||||
./meteor self-test \
|
||||
--exclude "${SELF_TEST_EXCLUDE}" \
|
||||
--headless \
|
||||
--junit /tmp/results/junit/4.xml \
|
||||
--file '^p' \
|
||||
--without-tag "custom-warehouse"
|
||||
no_output_timeout: 20m
|
||||
@@ -290,6 +315,10 @@ jobs:
|
||||
- save_cache:
|
||||
key: meteor-cache
|
||||
<<: *meteor_cache_dirs
|
||||
- store_test_results:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/core_dumps
|
||||
- store_artifacts:
|
||||
@@ -311,6 +340,7 @@ jobs:
|
||||
./meteor self-test \
|
||||
--exclude "${SELF_TEST_EXCLUDE}" \
|
||||
--headless \
|
||||
--junit /tmp/results/junit/5.xml \
|
||||
--file '^run' \
|
||||
--without-tag "custom-warehouse"
|
||||
no_output_timeout: 20m
|
||||
@@ -319,6 +349,10 @@ jobs:
|
||||
- save_cache:
|
||||
key: meteor-cache
|
||||
<<: *meteor_cache_dirs
|
||||
- store_test_results:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/core_dumps
|
||||
- store_artifacts:
|
||||
@@ -340,6 +374,7 @@ jobs:
|
||||
./meteor self-test \
|
||||
--exclude "${SELF_TEST_EXCLUDE}" \
|
||||
--headless \
|
||||
--junit /tmp/results/junit/6.xml \
|
||||
--file '^r(?!un)|^s' \
|
||||
--without-tag "custom-warehouse"
|
||||
no_output_timeout: 20m
|
||||
@@ -348,6 +383,10 @@ jobs:
|
||||
- save_cache:
|
||||
key: meteor-cache
|
||||
<<: *meteor_cache_dirs
|
||||
- store_test_results:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/core_dumps
|
||||
- store_artifacts:
|
||||
@@ -369,6 +408,7 @@ jobs:
|
||||
./meteor self-test \
|
||||
--exclude "${SELF_TEST_EXCLUDE}" \
|
||||
--headless \
|
||||
--junit /tmp/results/junit/7.xml \
|
||||
--file '^[t-z]|^command-line' \
|
||||
--without-tag "custom-warehouse"
|
||||
no_output_timeout: 20m
|
||||
@@ -377,6 +417,10 @@ jobs:
|
||||
- save_cache:
|
||||
key: meteor-cache
|
||||
<<: *meteor_cache_dirs
|
||||
- store_test_results:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/results
|
||||
- store_artifacts:
|
||||
path: /tmp/core_dumps
|
||||
- store_artifacts:
|
||||
|
||||
@@ -13,5 +13,5 @@ the official `coffeescript` package can compile CoffeeScript code.
|
||||
Testing the `coffeescript` package also tests this one:
|
||||
|
||||
```bash
|
||||
./meteor test-packages coffeescript
|
||||
./meteor test-packages packages/non-core/coffeescript
|
||||
```
|
||||
|
||||
@@ -13,7 +13,7 @@ if (Error.METEOR_prepareStackTrace) {
|
||||
|
||||
|
||||
// The CompileResult for this CachingCompiler is a {source, sourceMap} object.
|
||||
CoffeeScriptCompiler = class CoffeeScriptCompiler {
|
||||
export class CoffeeScriptCompiler {
|
||||
constructor() {
|
||||
this.babelCompiler = new BabelCompiler({
|
||||
// Prevent Babel from importing helpers from babel-runtime, since
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
Package.describe({
|
||||
name: 'coffeescript-compiler',
|
||||
summary: 'Compiler for CoffeeScript code, supporting the coffeescript package',
|
||||
version: '1.12.7_1' // Tracks version of NPM `coffeescript` module, with _1, _2 etc.
|
||||
// This version of NPM `coffeescript` module, with _1, _2 etc.
|
||||
// If you change this, make sure to also update ../coffeescript/package.js to match.
|
||||
version: '1.12.7_1'
|
||||
});
|
||||
|
||||
Npm.depends({
|
||||
@@ -10,10 +12,10 @@ Npm.depends({
|
||||
});
|
||||
|
||||
Package.onUse(function (api) {
|
||||
api.use('babel-compiler');
|
||||
api.use('ecmascript');
|
||||
api.use('babel-compiler@6.19.4');
|
||||
api.use('ecmascript@0.8.2');
|
||||
|
||||
api.addFiles(['coffeescript-compiler.js'], 'server');
|
||||
api.mainModule('coffeescript-compiler.js', 'server');
|
||||
|
||||
api.export('CoffeeScriptCompiler', 'server');
|
||||
});
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
Package.describe({
|
||||
summary: "Used by the coffeescript package's tests",
|
||||
version: "1.0.8"
|
||||
version: "1.0.9"
|
||||
});
|
||||
|
||||
Package.onUse(function (api) {
|
||||
api.use('coffeescript', ['client', 'server']);
|
||||
api.use('coffeescript@1.12.7', ['client', 'server']);
|
||||
api.export('COFFEESCRIPT_EXPORTED');
|
||||
api.export('COFFEESCRIPT_EXPORTED_ONE_MORE');
|
||||
api.export('COFFEESCRIPT_EXPORTED_WITH_BACKTICKS');
|
||||
|
||||
@@ -55,5 +55,5 @@ to check out the Meteor repo and run `test-packages`.
|
||||
Once you can do that successfully, to test the `coffeescript` package run:
|
||||
|
||||
```bash
|
||||
./meteor test-packages coffeescript
|
||||
./meteor test-packages packages/non-core/coffeescript
|
||||
```
|
||||
|
||||
@@ -1,21 +1,18 @@
|
||||
Package.describe({
|
||||
name: 'coffeescript',
|
||||
summary: 'Javascript dialect with fewer braces and semicolons',
|
||||
// This package version used to track the version of the NPM `coffeescript`
|
||||
// module, but now the Meteor package `coffeescript-compiler` tracks that
|
||||
// version; so in order for this to appear newer than the previous package
|
||||
// version 1.12.6_1, we jump to 10+.
|
||||
version: '1.13.0'
|
||||
// This package version should track the version of the `coffeescript-compiler`
|
||||
// package, because people will likely only have this one added to their apps;
|
||||
// so bumping the version of this package will be how they get newer versions
|
||||
// of `coffeescript-compiler`. If you change this, make sure to also update
|
||||
// ../coffeescript-compiler/package.js to match.
|
||||
version: '1.12.7_1'
|
||||
});
|
||||
|
||||
Package.registerBuildPlugin({
|
||||
name: 'compile-coffeescript',
|
||||
use: ['caching-compiler', 'coffeescript-compiler', 'ecmascript'],
|
||||
sources: ['compile-coffeescript.js'],
|
||||
npmDependencies: {
|
||||
'coffeescript': '1.12.7',
|
||||
'source-map': '0.5.6'
|
||||
}
|
||||
use: ['caching-compiler@1.1.9', 'ecmascript@0.8.2', 'coffeescript-compiler@=1.12.7_1'],
|
||||
sources: ['compile-coffeescript.js']
|
||||
});
|
||||
|
||||
Package.onUse(function (api) {
|
||||
@@ -25,11 +22,11 @@ Package.onUse(function (api) {
|
||||
// BabelCompiler.prototype.processOneFileForTarget for any ES2015+
|
||||
// JavaScript or JavaScript enclosed by backticks, it must provide the
|
||||
// same runtime environment that the 'ecmascript' package provides.
|
||||
// The following api.imply calls should match those in ../ecmascript/package.js,
|
||||
// The following api.imply calls should match those in ../../ecmascript/package.js,
|
||||
// except that coffeescript does not api.imply('modules').
|
||||
api.imply('ecmascript-runtime', 'server');
|
||||
api.imply('babel-runtime');
|
||||
api.imply('promise');
|
||||
api.imply('ecmascript-runtime@0.4.1', 'server');
|
||||
api.imply('babel-runtime@1.0.1');
|
||||
api.imply('promise@0.8.9');
|
||||
});
|
||||
|
||||
Package.onTest(function (api) {
|
||||
|
||||
@@ -2136,6 +2136,7 @@ main.registerCommand({
|
||||
'without-tag': { type: String },
|
||||
// Only run tests with this tag
|
||||
'with-tag': { type: String },
|
||||
junit: { type: String },
|
||||
},
|
||||
hidden: true,
|
||||
catalogRefresh: new catalog.Refresh.Never()
|
||||
@@ -2232,6 +2233,7 @@ main.registerCommand({
|
||||
// other options
|
||||
historyLines: options.history,
|
||||
clients: clients,
|
||||
junit: options.junit && files.pathResolve(options.junit),
|
||||
'without-tag': options['without-tag'],
|
||||
'with-tag': options['with-tag']
|
||||
});
|
||||
|
||||
@@ -584,6 +584,10 @@ class Console extends ConsoleBase {
|
||||
});
|
||||
}
|
||||
|
||||
isInteractive() {
|
||||
return !this._headless;
|
||||
}
|
||||
|
||||
setPretty(pretty) {
|
||||
// If we're being forced, do nothing.
|
||||
if (FORCE_PRETTY !== undefined) {
|
||||
|
||||
@@ -179,6 +179,15 @@ function authedRpc(options) {
|
||||
delete rpcOptions.printDeployURL;
|
||||
|
||||
if (infoResult.statusCode === 401 && rpcOptions.promptIfAuthFails) {
|
||||
Console.error("Authentication failed or login token expired.");
|
||||
|
||||
if (!Console.isInteractive()) {
|
||||
return {
|
||||
statusCode: 401,
|
||||
errorMessage: "login failed."
|
||||
};
|
||||
}
|
||||
|
||||
// Our authentication didn't validate, so prompt the user to log in
|
||||
// again, and resend the RPC if the login succeeds.
|
||||
var username = Console.readLine({
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { inspect } from 'util';
|
||||
import { makeFulfillablePromise } from '../utils/fiber-helpers.js';
|
||||
import { spawn, execFile } from 'child_process';
|
||||
import * as files from '../fs/files.js';
|
||||
@@ -1611,6 +1612,7 @@ class Test {
|
||||
this.fileHash = options.fileHash;
|
||||
this.tags = options.tags || [];
|
||||
this.f = options.func;
|
||||
this.durationMs = null;
|
||||
this.cleanupHandlers = [];
|
||||
}
|
||||
|
||||
@@ -1803,6 +1805,16 @@ function getFilteredTests(options) {
|
||||
return new TestList(allTests, tagsToSkip, tagsToMatch, testState);
|
||||
};
|
||||
|
||||
function groupTestsByFile(tests) {
|
||||
const grouped = {};
|
||||
tests.forEach(test => {
|
||||
grouped[test.file] = grouped[test.file] || [];
|
||||
grouped[test.file].push(test);
|
||||
});
|
||||
|
||||
return grouped;
|
||||
}
|
||||
|
||||
// A TestList is the result of getFilteredTests. It holds the original
|
||||
// list of all tests, the filtered list, and stats on how many tests
|
||||
// were skipped (see generateSkipReport).
|
||||
@@ -1862,8 +1874,120 @@ class TestList {
|
||||
// Mark a test's file as having failures. This prevents
|
||||
// saveTestState from saving its hash as a potentially
|
||||
// "unchanged" file to be skipped in a future run.
|
||||
notifyFailed(test) {
|
||||
notifyFailed(test, failureObject) {
|
||||
// Mark the file that this test lives in as having failures.
|
||||
this.fileInfo[test.file].hasFailures = true;
|
||||
|
||||
// Mark that the specific test failed.
|
||||
test.failed = true;
|
||||
|
||||
// If there is a failure object, store that for potential output.
|
||||
if (failureObject) {
|
||||
test.failureObject = failureObject;
|
||||
}
|
||||
}
|
||||
|
||||
saveJUnitOutput(path) {
|
||||
const grouped = groupTestsByFile(this.filteredTests);
|
||||
|
||||
// We'll form an collection of "testsuites"
|
||||
const testSuites = [];
|
||||
|
||||
const attrSafe = attr => (attr || "").replace('"', """);
|
||||
const durationForOutput = durationMs => durationMs / 1000;
|
||||
|
||||
// Each file is a testsuite.
|
||||
Object.keys(grouped).forEach((file) => {
|
||||
const testCases = [];
|
||||
|
||||
let countError = 0;
|
||||
let countFailure = 0;
|
||||
|
||||
// Each test is a "testcase".
|
||||
grouped[file].forEach((test) => {
|
||||
const testCaseAttrs = [
|
||||
`name="${attrSafe(test.name)}"`,
|
||||
];
|
||||
|
||||
if (test.durationMs) {
|
||||
testCaseAttrs.push(`time="${durationForOutput(test.durationMs)}"`);
|
||||
}
|
||||
|
||||
const testCaseAttrsString = testCaseAttrs.join(' ');
|
||||
|
||||
if (test.failed) {
|
||||
let failureElement = "";
|
||||
|
||||
if (test.failureObject instanceof TestFailure) {
|
||||
countFailure++;
|
||||
|
||||
failureElement = [
|
||||
`<error type="${test.failureObject.reason}">`,
|
||||
'<![CDATA[',
|
||||
inspect(test.failureObject.details, { depth: 4 }),
|
||||
']]>',
|
||||
'</error>',
|
||||
].join('\n');
|
||||
} else if (test.failureObject && test.failureObject.stack) {
|
||||
countError++;
|
||||
|
||||
failureElement = [
|
||||
'<failure>',
|
||||
'<![CDATA[',
|
||||
test.failureObject.stack,
|
||||
']]>',
|
||||
'</failure>',
|
||||
].join('\n');
|
||||
} else {
|
||||
countError++;
|
||||
|
||||
failureElement = '<failure />';
|
||||
}
|
||||
|
||||
testCases.push(
|
||||
[
|
||||
`<testcase ${testCaseAttrsString}>`,
|
||||
failureElement,
|
||||
'</testcase>',
|
||||
].join('\n'),
|
||||
);
|
||||
} else {
|
||||
testCases.push(`<testcase ${testCaseAttrsString}/>`);
|
||||
}
|
||||
});
|
||||
|
||||
const testSuiteAttrs = [
|
||||
`name="${file}"`,
|
||||
`tests="${testCases.length}"`,
|
||||
`failures="${countFailure}"`,
|
||||
`errors="${countError}"`,
|
||||
`time="${durationForOutput(this.durationMs)}"`,
|
||||
];
|
||||
|
||||
const testSuiteAttrsString = testSuiteAttrs.join(' ');
|
||||
|
||||
testSuites.push(
|
||||
[
|
||||
`<testsuite ${testSuiteAttrsString}>`,
|
||||
testCases.join('\n'),
|
||||
'</testsuite>',
|
||||
].join('\n'),
|
||||
);
|
||||
});
|
||||
|
||||
const xmlHeader = '<?xml version="1.0" encoding="UTF-8"?>';
|
||||
|
||||
const testSuitesString = testSuites.join('\n');
|
||||
|
||||
files.writeFile(path,
|
||||
[
|
||||
xmlHeader,
|
||||
`<testsuites>`,
|
||||
testSuitesString,
|
||||
`</testsuites>`,
|
||||
].join('\n'),
|
||||
'utf8',
|
||||
);
|
||||
}
|
||||
|
||||
// If this TestList was constructed with a testState,
|
||||
@@ -1939,17 +2063,11 @@ export function listTests(options) {
|
||||
return;
|
||||
}
|
||||
|
||||
const testsGroupedByFile = {};
|
||||
testList.filteredTests.forEach(filteredTest => {
|
||||
testsGroupedByFile[filteredTest.file] =
|
||||
testsGroupedByFile[filteredTest.file] || [];
|
||||
const grouped = groupTestsByFile(testList.filteredTests);
|
||||
|
||||
testsGroupedByFile[filteredTest.file].push(filteredTest);
|
||||
});
|
||||
|
||||
Object.keys(testsGroupedByFile).forEach((file) => {
|
||||
Object.keys(grouped).forEach((file) => {
|
||||
Console.rawInfo(file + ':\n');
|
||||
testsGroupedByFile[file].forEach((test) => {
|
||||
grouped[file].forEach((test) => {
|
||||
Console.rawInfo(' - ' + test.name +
|
||||
(test.tags.length ? ' [' + test.tags.join(' ') + ']'
|
||||
: '') + '\n');
|
||||
@@ -1977,6 +2095,8 @@ export function runTests(options) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
testList.startTime = new Date;
|
||||
|
||||
let totalRun = 0;
|
||||
const failedTests = [];
|
||||
|
||||
@@ -1986,6 +2106,9 @@ export function runTests(options) {
|
||||
runTest(test);
|
||||
});
|
||||
|
||||
testList.endTime = new Date;
|
||||
testList.durationMs = testList.endTime - testList.startTime;
|
||||
|
||||
function runTest(test, tries = 3) {
|
||||
let failure = null;
|
||||
let startTime;
|
||||
@@ -2003,6 +2126,8 @@ export function runTests(options) {
|
||||
test.cleanup();
|
||||
}
|
||||
|
||||
test.durationMs = +(new Date) - startTime;
|
||||
|
||||
if (failure) {
|
||||
Console.error("... fail!", Console.options({ indent: 2 }));
|
||||
|
||||
@@ -2015,9 +2140,6 @@ export function runTests(options) {
|
||||
return runTest(test, tries);
|
||||
}
|
||||
|
||||
failedTests.push(test);
|
||||
testList.notifyFailed(test);
|
||||
|
||||
if (failure instanceof TestFailure) {
|
||||
const frames = parseStackParse(failure).outsideFiber;
|
||||
const relpath = files.pathRelative(files.getCurrentToolsDir(),
|
||||
@@ -2070,16 +2192,22 @@ export function runTests(options) {
|
||||
} else {
|
||||
Console.rawError(" => Test threw exception: " + failure.stack + "\n");
|
||||
}
|
||||
|
||||
failedTests.push(test);
|
||||
testList.notifyFailed(test, failure);
|
||||
} else {
|
||||
const durationMs = +(new Date) - startTime;
|
||||
Console.error(
|
||||
"... ok (" + durationMs + " ms)",
|
||||
"... ok (" + test.durationMs + " ms)",
|
||||
Console.options({ indent: 2 }));
|
||||
}
|
||||
}
|
||||
|
||||
testList.saveTestState();
|
||||
|
||||
if (options.junit) {
|
||||
testList.saveJUnitOutput(options.junit);
|
||||
}
|
||||
|
||||
if (totalRun > 0) {
|
||||
Console.error();
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user