diff --git a/meteor b/meteor index dfd41d2759..9b24e5f97c 100755 --- a/meteor +++ b/meteor @@ -1,6 +1,6 @@ #!/usr/bin/env bash -BUNDLE_VERSION=4.1.0 +BUNDLE_VERSION=4.1.1 # OS Check. Put here because here is where we download the precompiled # bundles that are arch specific. diff --git a/scripts/dev-bundle-tool-package.js b/scripts/dev-bundle-tool-package.js index fd75a4d7a5..0649543971 100644 --- a/scripts/dev-bundle-tool-package.js +++ b/scripts/dev-bundle-tool-package.js @@ -54,7 +54,8 @@ var packageJson = { runas: "3.1.1", 'lru-cache': '2.6.4', 'cordova-lib': "6.0.0", - longjohn: '0.2.11' + longjohn: '0.2.11', + 'stream-buffers': '3.0.0' } }; diff --git a/tools/packaging/tropohouse.js b/tools/packaging/tropohouse.js index a30719a232..2dcb096c6d 100644 --- a/tools/packaging/tropohouse.js +++ b/tools/packaging/tropohouse.js @@ -304,7 +304,7 @@ _.extend(exports.Tropohouse.prototype, { // it relies on extractTarGz being fast and not reporting any progress. // Really, we should create two subtasks // (and, we should stream the download to the tar extractor) - var packageTarball = httpHelpers.getUrl({ + var packageTarball = httpHelpers.getUrlWithResuming({ url: url, encoding: null, progress: buildmessage.getCurrentProgressTracker(), diff --git a/tools/tests/utils-tests.js b/tools/tests/utils-tests.js index 729bca6116..b700cd19fe 100644 --- a/tools/tests/utils-tests.js +++ b/tools/tests/utils-tests.js @@ -1,6 +1,8 @@ var selftest = require('../tool-testing/selftest.js'); var utils = require('../utils/utils.js'); +import httpHelpers from '../utils/http-helpers'; + selftest.define('subset generator', function () { var out = []; utils.generateSubsetsOfIncreasingSize(['a', 'b', 'c'], function (x) { @@ -152,3 +154,36 @@ selftest.define("parse url", function () { protocol: "https" }); }); + +selftest.define("resume downloads", ['net', 'slow'], function () { + // A reasonably big file that (I think) should take more than 1s to download + // and that we know the size of + const url = 'http://warehouse.meteor.com/builds/Pr7L8f6PqXyqNJJn4/1443478653127/aRiirNrp4v/meteor-tool-1.1.9-os.osx.x86_64+web.browser+web.cordova.tgz'; + + setTimeout(() => { + httpHelpers._currentRequest.emit('error', 'pretend-http-error'); + httpHelpers._currentRequest.emit('end'); + }, 1000); + + const result = httpHelpers.getUrlWithResuming({ + // This doesn't affect the test, but if you remove the timeout above, + // you can kill the connection manually by shutting down your network. + // This makes it a bit faster + timeout: 1000, + url: url, + encoding: null, + wait: false, + progress: { + reportProgress({ current, end }) { + const percent = current / end * 100; + if (Math.random() < 0.01) { + // Uncomment this when manually testing I guess + // console.log(`${percent} %`); + } + }, + reportProgressDone() {} + } + }); + + selftest.expectEqual(result.toString().length, 65041076); +}); diff --git a/tools/utils/http-helpers.js b/tools/utils/http-helpers.js index 9505888b9f..a9a2f34e50 100644 --- a/tools/utils/http-helpers.js +++ b/tools/utils/http-helpers.js @@ -14,6 +14,8 @@ var release = require('../packaging/release.js'); var Console = require('../console/console.js').Console; var timeoutScaleFactor = require('./utils.js').timeoutScaleFactor; +import { WritableStreamBuffer } from 'stream-buffers'; +import fiberHelpers from '../utils/fiber-helpers.js'; // Helper that tracks bytes written to a writable var WritableWithProgress = function (writable, listener) { @@ -82,6 +84,10 @@ var getUserAgent = function () { var httpHelpers = exports; _.extend(exports, { + // For testing purposes, do not use (obviously it doesn't really make + // sense to have only one current request) + _currentRequest: null, + getUserAgent: getUserAgent, // A wrapper around request with the following improvements: @@ -128,6 +134,12 @@ _.extend(exports, { options = _.clone(urlOrOptions); } + var outputStream; + if (_.has(options, 'outputStream')) { + outputStream = options.outputStream; + delete options.outputStream; + } + var bodyStream; if (_.has(options, 'bodyStream')) { bodyStream = options.bodyStream; @@ -262,6 +274,8 @@ _.extend(exports, { var request = require('request'); var req = request(options, callback); + // A handle for testing + httpHelpers._currentRequest = req; var totalProgress = { current: 0, end: bodyStreamLength + responseLength, done: false }; @@ -278,6 +292,10 @@ _.extend(exports, { bodyStream.pipe(dest); } + if (outputStream) { + req.pipe(outputStream); + } + if (progress) { httpHelpers._addProgressEvents(req); req.on('progress', function (state) { @@ -358,6 +376,76 @@ _.extend(exports, { } else { return body; } - } + }, + // More or less as above, except with support for multiple attempts per + // request and resuming on retries. This means if the connection is bad, + // we can sometimes complete a request, even if each individual attempt fails. + // We only use this for package downloads. In theory we could use it for + // all requests but that seems like overkill and it isn't well tested in + // other scenarioes. + getUrlWithResuming(urlOrOptions) { + const options = _.isObject(urlOrOptions) ? _.clone(urlOrOptions) : { + url: urlOrOptions, + }; + + const outputStream = new WritableStreamBuffer(); + + const MAX_ATTEMPTS = 10; + const RETRY_DELAY_SECS = 5; + const masterProgress = options.progress; + + let lastSize = 0; + function attempt(triesRemaining) { + if (lastSize > 0) { + options.headers = { + ...options.headers, + Range: `bytes=${outputStream.size()}-` + }; + } + + if (masterProgress) { + options.progress = masterProgress.addChildTask({ + title: masterProgress._title + }); + } + + try { + return httpHelpers.request({ + outputStream, + ...options, + }); + } catch (e) { + const size = outputStream.size(); + const useTry = size === lastSize; + const change = size - lastSize; + lastSize = outputStream.size(); + + if (!useTry || triesRemaining > 0) { + if (useTry) { + Console.debug(`Request failed, ${triesRemaining - 1} attempts left`); + } else { + Console.debug(`Request failed after ${change} bytes, retrying`); + } + + return new Promise(resolve => { + setTimeout(fiberHelpers.bindEnvironment(() => { + resolve(attempt(useTry ? triesRemaining - 1 : triesRemaining)); + }, RETRY_DELAY_SECS * 1000)); + }).await(); + } else { + Console.debug(`Request failed ${MAX_ATTEMPTS} times: failing`); + throw new files.OfflineError(e); + } + } + } + + const response = attempt(MAX_ATTEMPTS).response; + if (response.statusCode >= 400 && response.statusCode < 600) { + const href = response.request.href; + throw Error(`Could not get ${href}; server returned [${response.statusCode}]`); + } else { + return outputStream.getContents(); + } + } });