Merge pull request #7399 from meteor/7267-request-resuming

Package download request resuming
This commit is contained in:
Ben Newman
2016-07-14 14:17:51 +00:00
committed by GitHub
5 changed files with 128 additions and 4 deletions

2
meteor
View File

@@ -1,6 +1,6 @@
#!/usr/bin/env bash
BUNDLE_VERSION=4.1.0
BUNDLE_VERSION=4.1.1
# OS Check. Put here because here is where we download the precompiled
# bundles that are arch specific.

View File

@@ -54,7 +54,8 @@ var packageJson = {
runas: "3.1.1",
'lru-cache': '2.6.4',
'cordova-lib': "6.0.0",
longjohn: '0.2.11'
longjohn: '0.2.11',
'stream-buffers': '3.0.0'
}
};

View File

@@ -304,7 +304,7 @@ _.extend(exports.Tropohouse.prototype, {
// it relies on extractTarGz being fast and not reporting any progress.
// Really, we should create two subtasks
// (and, we should stream the download to the tar extractor)
var packageTarball = httpHelpers.getUrl({
var packageTarball = httpHelpers.getUrlWithResuming({
url: url,
encoding: null,
progress: buildmessage.getCurrentProgressTracker(),

View File

@@ -1,6 +1,8 @@
var selftest = require('../tool-testing/selftest.js');
var utils = require('../utils/utils.js');
import httpHelpers from '../utils/http-helpers';
selftest.define('subset generator', function () {
var out = [];
utils.generateSubsetsOfIncreasingSize(['a', 'b', 'c'], function (x) {
@@ -152,3 +154,36 @@ selftest.define("parse url", function () {
protocol: "https"
});
});
selftest.define("resume downloads", ['net', 'slow'], function () {
// A reasonably big file that (I think) should take more than 1s to download
// and that we know the size of
const url = 'http://warehouse.meteor.com/builds/Pr7L8f6PqXyqNJJn4/1443478653127/aRiirNrp4v/meteor-tool-1.1.9-os.osx.x86_64+web.browser+web.cordova.tgz';
setTimeout(() => {
httpHelpers._currentRequest.emit('error', 'pretend-http-error');
httpHelpers._currentRequest.emit('end');
}, 1000);
const result = httpHelpers.getUrlWithResuming({
// This doesn't affect the test, but if you remove the timeout above,
// you can kill the connection manually by shutting down your network.
// This makes it a bit faster
timeout: 1000,
url: url,
encoding: null,
wait: false,
progress: {
reportProgress({ current, end }) {
const percent = current / end * 100;
if (Math.random() < 0.01) {
// Uncomment this when manually testing I guess
// console.log(`${percent} %`);
}
},
reportProgressDone() {}
}
});
selftest.expectEqual(result.toString().length, 65041076);
});

View File

@@ -14,6 +14,8 @@ var release = require('../packaging/release.js');
var Console = require('../console/console.js').Console;
var timeoutScaleFactor = require('./utils.js').timeoutScaleFactor;
import { WritableStreamBuffer } from 'stream-buffers';
import fiberHelpers from '../utils/fiber-helpers.js';
// Helper that tracks bytes written to a writable
var WritableWithProgress = function (writable, listener) {
@@ -82,6 +84,10 @@ var getUserAgent = function () {
var httpHelpers = exports;
_.extend(exports, {
// For testing purposes, do not use (obviously it doesn't really make
// sense to have only one current request)
_currentRequest: null,
getUserAgent: getUserAgent,
// A wrapper around request with the following improvements:
@@ -128,6 +134,12 @@ _.extend(exports, {
options = _.clone(urlOrOptions);
}
var outputStream;
if (_.has(options, 'outputStream')) {
outputStream = options.outputStream;
delete options.outputStream;
}
var bodyStream;
if (_.has(options, 'bodyStream')) {
bodyStream = options.bodyStream;
@@ -262,6 +274,8 @@ _.extend(exports, {
var request = require('request');
var req = request(options, callback);
// A handle for testing
httpHelpers._currentRequest = req;
var totalProgress = { current: 0, end: bodyStreamLength + responseLength, done: false };
@@ -278,6 +292,10 @@ _.extend(exports, {
bodyStream.pipe(dest);
}
if (outputStream) {
req.pipe(outputStream);
}
if (progress) {
httpHelpers._addProgressEvents(req);
req.on('progress', function (state) {
@@ -358,6 +376,76 @@ _.extend(exports, {
} else {
return body;
}
}
},
// More or less as above, except with support for multiple attempts per
// request and resuming on retries. This means if the connection is bad,
// we can sometimes complete a request, even if each individual attempt fails.
// We only use this for package downloads. In theory we could use it for
// all requests but that seems like overkill and it isn't well tested in
// other scenarioes.
getUrlWithResuming(urlOrOptions) {
const options = _.isObject(urlOrOptions) ? _.clone(urlOrOptions) : {
url: urlOrOptions,
};
const outputStream = new WritableStreamBuffer();
const MAX_ATTEMPTS = 10;
const RETRY_DELAY_SECS = 5;
const masterProgress = options.progress;
let lastSize = 0;
function attempt(triesRemaining) {
if (lastSize > 0) {
options.headers = {
...options.headers,
Range: `bytes=${outputStream.size()}-`
};
}
if (masterProgress) {
options.progress = masterProgress.addChildTask({
title: masterProgress._title
});
}
try {
return httpHelpers.request({
outputStream,
...options,
});
} catch (e) {
const size = outputStream.size();
const useTry = size === lastSize;
const change = size - lastSize;
lastSize = outputStream.size();
if (!useTry || triesRemaining > 0) {
if (useTry) {
Console.debug(`Request failed, ${triesRemaining - 1} attempts left`);
} else {
Console.debug(`Request failed after ${change} bytes, retrying`);
}
return new Promise(resolve => {
setTimeout(fiberHelpers.bindEnvironment(() => {
resolve(attempt(useTry ? triesRemaining - 1 : triesRemaining));
}, RETRY_DELAY_SECS * 1000));
}).await();
} else {
Console.debug(`Request failed ${MAX_ATTEMPTS} times: failing`);
throw new files.OfflineError(e);
}
}
}
const response = attempt(MAX_ATTEMPTS).response;
if (response.statusCode >= 400 && response.statusCode < 600) {
const href = response.request.href;
throw Error(`Could not get ${href}; server returned [${response.statusCode}]`);
} else {
return outputStream.getContents();
}
}
});