From 756fb597c63fde784af1288357fa721575376e36 Mon Sep 17 00:00:00 2001 From: Tom Coleman Date: Wed, 13 Jul 2016 17:50:41 +1000 Subject: [PATCH 1/6] First pass at request resuming via buffering the `range` header This is still a WIP, but I wanted to get feedback at this point. Will update the PR with outstanding TODOs. --- tools/packaging/tropohouse.js | 2 +- tools/tests/utils-tests.js | 27 ++++++++++++++++++ tools/utils/http-helpers.js | 54 ++++++++++++++++++++++++++++++++++- 3 files changed, 81 insertions(+), 2 deletions(-) diff --git a/tools/packaging/tropohouse.js b/tools/packaging/tropohouse.js index a30719a232..2dcb096c6d 100644 --- a/tools/packaging/tropohouse.js +++ b/tools/packaging/tropohouse.js @@ -304,7 +304,7 @@ _.extend(exports.Tropohouse.prototype, { // it relies on extractTarGz being fast and not reporting any progress. // Really, we should create two subtasks // (and, we should stream the download to the tar extractor) - var packageTarball = httpHelpers.getUrl({ + var packageTarball = httpHelpers.getUrlWithResuming({ url: url, encoding: null, progress: buildmessage.getCurrentProgressTracker(), diff --git a/tools/tests/utils-tests.js b/tools/tests/utils-tests.js index 729bca6116..d7272b0d97 100644 --- a/tools/tests/utils-tests.js +++ b/tools/tests/utils-tests.js @@ -152,3 +152,30 @@ selftest.define("parse url", function () { protocol: "https" }); }); + +// XXX: WIP -- I'm not sure how to properly test a failed connection. +// At the moment, I'm thinking I'll just mark this test as "don't run" +// and use it by hand for testing (which basically means run it, kill my +// internet, start my internet again, watch what happens) +selftest.define("resume downloads", function () { + const url = 'http://warehouse.meteor.com/builds/Pr7L8f6PqXyqNJJn4/1443478653127/aRiirNrp4v/meteor-tool-1.1.9-os.osx.x86_64+web.browser+web.cordova.tgz'; + + const result = require('../utils/http-helpers').getUrlWithResuming({ + timeout: 1000, + url: url, + encoding: null, + wait: false, + progress: { + reportProgress({ current, end }) { + const percent = current / end * 100; + if (Math.random() < 0.1) { + console.log(`${percent} %`); + } + }, + reportProgressDone() { + console.log('done'); + } + } + }); + console.log(result, result.toString().length) +}); diff --git a/tools/utils/http-helpers.js b/tools/utils/http-helpers.js index cf1e196932..6793fc0851 100644 --- a/tools/utils/http-helpers.js +++ b/tools/utils/http-helpers.js @@ -14,6 +14,7 @@ var release = require('../packaging/release.js'); var Console = require('../console/console.js').Console; var timeoutScaleFactor = require('./utils.js').timeoutScaleFactor; +import { WritableStreamBuffer } from 'stream-buffers'; // Helper that tracks bytes written to a writable var WritableWithProgress = function (writable, listener) { @@ -128,6 +129,12 @@ _.extend(exports, { options = _.clone(urlOrOptions); } + var outputStream; + if (_.has(options, 'outputStream')) { + outputStream = options.outputStream; + delete options.outputStream; + } + var bodyStream; if (_.has(options, 'bodyStream')) { bodyStream = options.bodyStream; @@ -278,6 +285,10 @@ _.extend(exports, { bodyStream.pipe(dest); } + if (outputStream) { + req.pipe(outputStream); + } + if (progress) { httpHelpers._addProgressEvents(req); req.on('progress', function (state) { @@ -358,6 +369,47 @@ _.extend(exports, { } else { return body; } - } + }, + getUrlWithResuming(urlOrOptions) { + const options = _.isObject(urlOrOptions) ? urlOrOptions : { + url: urlOrOptions, + }; + + const outputStream = new WritableStreamBuffer(); + + const MAX_ATTEMPTS = 5; + function attempt(triesRemaining) { + const headers = _.extend({}, urlOrOptions.headers, { + range: `bytes=${outputStream.size()}-`, + }); + try { + return httpHelpers.request({ + outputStream, + headers, + ...urlOrOptions, + }); + } catch (e) { + console.log(e); + if (triesRemaining > 0) { + console.log(`Request failed, retrying`); + return attempt(triesRemaining - 1); + } else { + console.log(`Request failed ${MAX_ATTEMPTS} times, failing`) + throw new files.OfflineError(e); + } + } + } + const response = attempt(MAX_ATTEMPTS).response; + const body = outputStream.getContents(); + + if (response.statusCode >= 400 && response.statusCode < 600) { + const href = response.request.href; + throw Error( + body || + `Could not get ${href}; server returned [${response.statusCode}]`); + } else { + return body; + } + } }); From eeba73eba03e16e25084221b6cff912b79eadd23 Mon Sep 17 00:00:00 2001 From: Tom Coleman Date: Thu, 14 Jul 2016 14:01:53 +1000 Subject: [PATCH 2/6] Made the test work in a sensible way --- tools/tests/utils-tests.js | 33 ++++++++++++++++--------- tools/utils/http-helpers.js | 49 +++++++++++++++++++++++++------------ 2 files changed, 54 insertions(+), 28 deletions(-) diff --git a/tools/tests/utils-tests.js b/tools/tests/utils-tests.js index d7272b0d97..2fb65f99aa 100644 --- a/tools/tests/utils-tests.js +++ b/tools/tests/utils-tests.js @@ -153,14 +153,23 @@ selftest.define("parse url", function () { }); }); -// XXX: WIP -- I'm not sure how to properly test a failed connection. -// At the moment, I'm thinking I'll just mark this test as "don't run" -// and use it by hand for testing (which basically means run it, kill my -// internet, start my internet again, watch what happens) -selftest.define("resume downloads", function () { +selftest.define("resume downloads", ['net', 'slow'], function () { + // A reasonably big file that (I think) should take more than 1s to download + // and that we know the size of const url = 'http://warehouse.meteor.com/builds/Pr7L8f6PqXyqNJJn4/1443478653127/aRiirNrp4v/meteor-tool-1.1.9-os.osx.x86_64+web.browser+web.cordova.tgz'; - const result = require('../utils/http-helpers').getUrlWithResuming({ + let firstPass = true; + const httpHelpers = require('../utils/http-helpers'); + + setTimeout(() => { + httpHelpers._currentRequest.emit('error', 'pretend-http-error'); + httpHelpers._currentRequest.emit('end'); + }, 1000); + + const result = httpHelpers.getUrlWithResuming({ + // This doesn't affect the test, but if you remove the timeout above, + // you can kill the connection manually by shutting down your network. + // This makes it a bit faster timeout: 1000, url: url, encoding: null, @@ -168,14 +177,14 @@ selftest.define("resume downloads", function () { progress: { reportProgress({ current, end }) { const percent = current / end * 100; - if (Math.random() < 0.1) { - console.log(`${percent} %`); + if (Math.random() < 0.01) { + // Uncomment this when manually testing I guess + // console.log(`${percent} %`); } }, - reportProgressDone() { - console.log('done'); - } + reportProgressDone() {} } }); - console.log(result, result.toString().length) + + selftest.expectEqual(result.toString().length, 65041076); }); diff --git a/tools/utils/http-helpers.js b/tools/utils/http-helpers.js index 6793fc0851..66d483312a 100644 --- a/tools/utils/http-helpers.js +++ b/tools/utils/http-helpers.js @@ -15,6 +15,7 @@ var Console = require('../console/console.js').Console; var timeoutScaleFactor = require('./utils.js').timeoutScaleFactor; import { WritableStreamBuffer } from 'stream-buffers'; +import fiberHelpers from '../utils/fiber-helpers.js'; // Helper that tracks bytes written to a writable var WritableWithProgress = function (writable, listener) { @@ -83,6 +84,10 @@ var getUserAgent = function () { var httpHelpers = exports; _.extend(exports, { + // For testing purposes, do not use (obviously it doesn't really make + // sense to have only one current request) + _currentRequest: null, + getUserAgent: getUserAgent, // A wrapper around request with the following improvements: @@ -269,6 +274,8 @@ _.extend(exports, { var request = require('request'); var req = request(options, callback); + // A handle for testing + httpHelpers._currentRequest = req; var totalProgress = { current: 0, end: bodyStreamLength + responseLength, done: false }; @@ -372,44 +379,54 @@ _.extend(exports, { }, getUrlWithResuming(urlOrOptions) { - const options = _.isObject(urlOrOptions) ? urlOrOptions : { + const options = _.isObject(urlOrOptions) ? _.clone(urlOrOptions) : { url: urlOrOptions, }; const outputStream = new WritableStreamBuffer(); - const MAX_ATTEMPTS = 5; + const MAX_ATTEMPTS = 10; + const RETRY_DELAY_SECS = 5; + + let lastSize = 0; function attempt(triesRemaining) { - const headers = _.extend({}, urlOrOptions.headers, { - range: `bytes=${outputStream.size()}-`, - }); + if (lastSize > 0) { + options.headers = { + ...options.headers, + Range: `bytes=${outputStream.size()}-` + }; + } + try { return httpHelpers.request({ outputStream, - headers, - ...urlOrOptions, + ...options, }); } catch (e) { - console.log(e); - if (triesRemaining > 0) { + const size = outputStream.size(); + const useTry = size === lastSize; + lastSize = outputStream.size(); + + if (!useTry || triesRemaining > 0) { console.log(`Request failed, retrying`); - return attempt(triesRemaining - 1); + return new Promise(resolve => { + setTimeout(fiberHelpers.bindEnvironment(() => { + resolve(attempt(useTry ? triesRemaining - 1 : triesRemaining)); + }, RETRY_DELAY_SECS * 1000)); + }).await(); } else { console.log(`Request failed ${MAX_ATTEMPTS} times, failing`) throw new files.OfflineError(e); } } } - const response = attempt(MAX_ATTEMPTS).response; - const body = outputStream.getContents(); + const response = attempt(MAX_ATTEMPTS).response; if (response.statusCode >= 400 && response.statusCode < 600) { const href = response.request.href; - throw Error( - body || - `Could not get ${href}; server returned [${response.statusCode}]`); + throw Error(`Could not get ${href}; server returned [${response.statusCode}]`); } else { - return body; + return outputStream.getContents(); } } }); From 1ef12c7f43261e82fd15c7548aa46b23982c084f Mon Sep 17 00:00:00 2001 From: Tom Coleman Date: Thu, 14 Jul 2016 16:09:19 +1000 Subject: [PATCH 3/6] Better logging and progress Using Console.debug to register messages about retries. --- tools/utils/http-helpers.js | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/tools/utils/http-helpers.js b/tools/utils/http-helpers.js index 66d483312a..5659752121 100644 --- a/tools/utils/http-helpers.js +++ b/tools/utils/http-helpers.js @@ -387,6 +387,7 @@ _.extend(exports, { const MAX_ATTEMPTS = 10; const RETRY_DELAY_SECS = 5; + const masterProgress = options.progress; let lastSize = 0; function attempt(triesRemaining) { @@ -397,6 +398,12 @@ _.extend(exports, { }; } + if (masterProgress) { + options.progress = masterProgress.addChildTask({ + title: masterProgress._title + }); + } + try { return httpHelpers.request({ outputStream, @@ -405,17 +412,23 @@ _.extend(exports, { } catch (e) { const size = outputStream.size(); const useTry = size === lastSize; + const change = size - lastSize; lastSize = outputStream.size(); if (!useTry || triesRemaining > 0) { - console.log(`Request failed, retrying`); + if (useTry) { + Console.debug(`Request failed, ${triesRemaining - 1} attempts left`); + } else { + Console.debug(`Request failed after ${change} bytes, retrying`); + } + return new Promise(resolve => { setTimeout(fiberHelpers.bindEnvironment(() => { resolve(attempt(useTry ? triesRemaining - 1 : triesRemaining)); }, RETRY_DELAY_SECS * 1000)); }).await(); } else { - console.log(`Request failed ${MAX_ATTEMPTS} times, failing`) + Console.debug(`Request failed ${MAX_ATTEMPTS} times: failing`); throw new files.OfflineError(e); } } From c5cc1c7ddd3cc6c185f9a3259df1943317fff751 Mon Sep 17 00:00:00 2001 From: Tom Coleman Date: Thu, 14 Jul 2016 16:12:02 +1000 Subject: [PATCH 4/6] Added stream buffers to the dev bundle --- scripts/dev-bundle-tool-package.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/dev-bundle-tool-package.js b/scripts/dev-bundle-tool-package.js index fd75a4d7a5..0649543971 100644 --- a/scripts/dev-bundle-tool-package.js +++ b/scripts/dev-bundle-tool-package.js @@ -54,7 +54,8 @@ var packageJson = { runas: "3.1.1", 'lru-cache': '2.6.4', 'cordova-lib': "6.0.0", - longjohn: '0.2.11' + longjohn: '0.2.11', + 'stream-buffers': '3.0.0' } }; From 14210b29ecc7e586b4e32ec2e0d0537b6d2cbb89 Mon Sep 17 00:00:00 2001 From: Tom Coleman Date: Thu, 14 Jul 2016 16:13:31 +1000 Subject: [PATCH 5/6] Bumped dev bundle version --- meteor | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meteor b/meteor index dfd41d2759..9b24e5f97c 100755 --- a/meteor +++ b/meteor @@ -1,6 +1,6 @@ #!/usr/bin/env bash -BUNDLE_VERSION=4.1.0 +BUNDLE_VERSION=4.1.1 # OS Check. Put here because here is where we download the precompiled # bundles that are arch specific. From 6fe6875dd2e7af1b0d92d6547582af950b56e0ac Mon Sep 17 00:00:00 2001 From: Tom Coleman Date: Thu, 14 Jul 2016 16:30:18 +1000 Subject: [PATCH 6/6] Minor fixes + added a comment --- tools/tests/utils-tests.js | 5 ++--- tools/utils/http-helpers.js | 6 ++++++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/tools/tests/utils-tests.js b/tools/tests/utils-tests.js index 2fb65f99aa..b700cd19fe 100644 --- a/tools/tests/utils-tests.js +++ b/tools/tests/utils-tests.js @@ -1,6 +1,8 @@ var selftest = require('../tool-testing/selftest.js'); var utils = require('../utils/utils.js'); +import httpHelpers from '../utils/http-helpers'; + selftest.define('subset generator', function () { var out = []; utils.generateSubsetsOfIncreasingSize(['a', 'b', 'c'], function (x) { @@ -158,9 +160,6 @@ selftest.define("resume downloads", ['net', 'slow'], function () { // and that we know the size of const url = 'http://warehouse.meteor.com/builds/Pr7L8f6PqXyqNJJn4/1443478653127/aRiirNrp4v/meteor-tool-1.1.9-os.osx.x86_64+web.browser+web.cordova.tgz'; - let firstPass = true; - const httpHelpers = require('../utils/http-helpers'); - setTimeout(() => { httpHelpers._currentRequest.emit('error', 'pretend-http-error'); httpHelpers._currentRequest.emit('end'); diff --git a/tools/utils/http-helpers.js b/tools/utils/http-helpers.js index 5659752121..7d5825e52f 100644 --- a/tools/utils/http-helpers.js +++ b/tools/utils/http-helpers.js @@ -378,6 +378,12 @@ _.extend(exports, { } }, + // More or less as above, except with support for multiple attempts per + // request and resuming on retries. This means if the connection is bad, + // we can sometimes complete a request, even if each individual attempt fails. + // We only use this for package downloads. In theory we could use it for + // all requests but that seems like overkill and it isn't well tested in + // other scenarioes. getUrlWithResuming(urlOrOptions) { const options = _.isObject(urlOrOptions) ? _.clone(urlOrOptions) : { url: urlOrOptions,