Profile more of tool and improvements to Profile

Previously, we would generate reports on
"Selecting Package Versions" and "Rebuild App".

- Instead of just profiling constraint solving, profile the entire
  process of preparing the project via ProjectContext, by giving
  each public function that "advances the stage" a Profile.run
  (typically prepareProjectForBuild).
- Improve profiler output to better distinguish multiple runs
- Distinguish "Build App" and "Rebuild App"
- Instrument lots of calls that weren't instrumented before
This commit is contained in:
David Greenspan
2016-02-03 11:53:09 -08:00
parent bdf64da339
commit 2fca3a9599
8 changed files with 239 additions and 192 deletions

View File

@@ -875,126 +875,130 @@ files.runJavaScript = function (code, options) {
options = options || {};
var filename = options.filename || "<anonymous>";
var keys = [], values = [];
// don't assume that _.keys and _.values are guaranteed to
// enumerate in the same order
_.each(options.symbols, function (value, name) {
keys.push(name);
values.push(value);
});
var stackFilename = filename;
if (options.sourceMap) {
// We want to generate an arbitrary filename that we use to associate the
// file with its source map.
stackFilename = "<runJavaScript-" + nextStackFilenameCounter++ + ">";
}
return Profile.time('runJavaScript ' + filename, () => {
var chunks = [];
var header = "(function(" + keys.join(',') + "){";
chunks.push(header);
if (options.sourceMap) {
var consumer = new sourcemap.SourceMapConsumer(options.sourceMap);
chunks.push(sourcemap.SourceNode.fromStringWithSourceMap(
code, consumer));
} else {
chunks.push(code);
}
// \n is necessary in case final line is a //-comment
chunks.push("\n})");
var wrapped;
var parsedSourceMap = null;
if (options.sourceMap) {
var node = new sourcemap.SourceNode(null, null, null, chunks);
var results = node.toStringWithSourceMap({
file: stackFilename
var keys = [], values = [];
// don't assume that _.keys and _.values are guaranteed to
// enumerate in the same order
_.each(options.symbols, function (value, name) {
keys.push(name);
values.push(value);
});
wrapped = results.code;
parsedSourceMap = results.map.toJSON();
if (options.sourceMapRoot) {
// Add the specified root to any root that may be in the file.
parsedSourceMap.sourceRoot = files.pathJoin(
options.sourceMapRoot, parsedSourceMap.sourceRoot || '');
}
// source-map-support doesn't ever look at the sourcesContent field, so
// there's no point in keeping it in memory.
delete parsedSourceMap.sourcesContent;
parsedSourceMaps[stackFilename] = parsedSourceMap;
} else {
wrapped = chunks.join('');
};
try {
// See #runInThisContext
//
// XXX it'd be nice to runInNewContext so that the code can't mess
// with our globals, but objects that come out of runInNewContext
// have bizarro antimatter prototype chains and break 'instanceof
// Array'. for now, steer clear
//
// Pass 'true' as third argument if we want the parse error on
// stderr (which we don't).
var script = require('vm').createScript(wrapped, stackFilename);
} catch (nodeParseError) {
if (!(nodeParseError instanceof SyntaxError)) {
var stackFilename = filename;
if (options.sourceMap) {
// We want to generate an arbitrary filename that we use to associate the
// file with its source map.
stackFilename = "<runJavaScript-" + nextStackFilenameCounter++ + ">";
}
var chunks = [];
var header = "(function(" + keys.join(',') + "){";
chunks.push(header);
if (options.sourceMap) {
var consumer = new sourcemap.SourceMapConsumer(options.sourceMap);
chunks.push(sourcemap.SourceNode.fromStringWithSourceMap(
code, consumer));
} else {
chunks.push(code);
}
// \n is necessary in case final line is a //-comment
chunks.push("\n})");
var wrapped;
var parsedSourceMap = null;
if (options.sourceMap) {
var node = new sourcemap.SourceNode(null, null, null, chunks);
var results = node.toStringWithSourceMap({
file: stackFilename
});
wrapped = results.code;
parsedSourceMap = results.map.toJSON();
if (options.sourceMapRoot) {
// Add the specified root to any root that may be in the file.
parsedSourceMap.sourceRoot = files.pathJoin(
options.sourceMapRoot, parsedSourceMap.sourceRoot || '');
}
// source-map-support doesn't ever look at the sourcesContent field, so
// there's no point in keeping it in memory.
delete parsedSourceMap.sourcesContent;
parsedSourceMaps[stackFilename] = parsedSourceMap;
} else {
wrapped = chunks.join('');
};
try {
// See #runInThisContext
//
// XXX it'd be nice to runInNewContext so that the code can't mess
// with our globals, but objects that come out of runInNewContext
// have bizarro antimatter prototype chains and break 'instanceof
// Array'. for now, steer clear
//
// Pass 'true' as third argument if we want the parse error on
// stderr (which we don't).
var script = require('vm').createScript(wrapped, stackFilename);
} catch (nodeParseError) {
if (!(nodeParseError instanceof SyntaxError)) {
throw nodeParseError;
}
// Got a parse error. Unfortunately, we can't actually get the
// location of the parse error from the SyntaxError; Node has some
// hacky support for displaying it over stderr if you pass an
// undocumented third argument to stackFilename, but that's not
// what we want. See
// https://github.com/joyent/node/issues/3452
// for more information. One thing to try (and in fact, what an
// early version of this function did) is to actually fork a new
// node to run the code and parse its output. We instead run an
// entirely different JS parser, from the Babel project, but
// which at least has a nice API for reporting errors.
var parse = require('meteor-babel').parse;
try {
parse(wrapped, { strictMode: false });
} catch (parseError) {
if (typeof parseError.loc !== "object") {
throw parseError;
}
var err = new files.FancySyntaxError;
err.message = parseError.message;
if (parsedSourceMap) {
// XXX this duplicates code in computeGlobalReferences
var consumer2 = new sourcemap.SourceMapConsumer(parsedSourceMap);
var original = consumer2.originalPositionFor(parseError.loc);
if (original.source) {
err.file = original.source;
err.line = original.line;
err.column = original.column;
throw err;
}
}
err.file = filename; // *not* stackFilename
err.line = parseError.loc.line;
err.column = parseError.loc.column;
// adjust errors on line 1 to account for our header
if (err.line === 1) {
err.column -= header.length;
}
throw err;
}
// What? Node thought that this was a parse error and Babel didn't?
// Eh, just throw Node's error and don't care too much about the line
// numbers being right.
throw nodeParseError;
}
// Got a parse error. Unfortunately, we can't actually get the
// location of the parse error from the SyntaxError; Node has some
// hacky support for displaying it over stderr if you pass an
// undocumented third argument to stackFilename, but that's not
// what we want. See
// https://github.com/joyent/node/issues/3452
// for more information. One thing to try (and in fact, what an
// early version of this function did) is to actually fork a new
// node to run the code and parse its output. We instead run an
// entirely different JS parser, from the Babel project, but
// which at least has a nice API for reporting errors.
var parse = require('meteor-babel').parse;
try {
parse(wrapped, { strictMode: false });
} catch (parseError) {
if (typeof parseError.loc !== "object") {
throw parseError;
}
var err = new files.FancySyntaxError;
err.message = parseError.message;
var func = script.runInThisContext();
if (parsedSourceMap) {
// XXX this duplicates code in computeGlobalReferences
var consumer2 = new sourcemap.SourceMapConsumer(parsedSourceMap);
var original = consumer2.originalPositionFor(parseError.loc);
if (original.source) {
err.file = original.source;
err.line = original.line;
err.column = original.column;
throw err;
}
}
err.file = filename; // *not* stackFilename
err.line = parseError.loc.line;
err.column = parseError.loc.column;
// adjust errors on line 1 to account for our header
if (err.line === 1) {
err.column -= header.length;
}
throw err;
}
// What? Node thought that this was a parse error and Babel didn't?
// Eh, just throw Node's error and don't care too much about the line
// numbers being right.
throw nodeParseError;
}
var func = script.runInThisContext();
return (buildmessage.markBoundary(func)).apply(null, values);
return (buildmessage.markBoundary(func)).apply(null, values);
});
};
// - message: an error message from the parser

View File

@@ -700,18 +700,20 @@ export class Watcher {
// Given a WatchSet, returns true if it currently describes the state of the
// disk.
export function isUpToDate(watchSet) {
var upToDate = true;
var watcher = new Watcher({
watchSet: watchSet,
onChange: function () {
upToDate = false;
},
// internal flag which prevents us from starting watches and timers that
// we're about to cancel anyway
_justCheckOnce: true
return Profile.time('watch.isUpToDate', () => {
var upToDate = true;
var watcher = new Watcher({
watchSet: watchSet,
onChange: function () {
upToDate = false;
},
// internal flag which prevents us from starting watches and timers that
// we're about to cancel anyway
_justCheckOnce: true
});
watcher.stop();
return upToDate;
});
watcher.stop();
return upToDate;
}
// Options should have absPath/include/exclude/names.

View File

@@ -199,7 +199,9 @@ compiler.compile = Profile(function (packageSource, options) {
// - isopack
// - isopackCache
// - includeCordovaUnibuild
compiler.lint = function (packageSource, options) {
compiler.lint = Profile(function (packageSource, options) {
return `compiler.lint(${ packageSource.name || 'the app' })`;
}, function (packageSource, options) {
// Note: the buildmessage context of compiler.lint and lintUnibuild is a
// normal error message context (eg, there might be errors from initializing
// plugins in getLinterSourceProcessorSet). We return the linter warnings as
@@ -226,7 +228,7 @@ compiler.lint = function (packageSource, options) {
}
});
return {warnings, linted};
};
});
compiler.getMinifiers = function (packageSource, options) {
buildmessage.assertInJob();

View File

@@ -6,6 +6,7 @@ var files = require('../fs/files.js');
var isopackModule = require('./isopack.js');
var watch = require('../fs/watch.js');
var colonConverter = require('../utils/colon-converter.js');
var Profile = require('../tool-env/profile.js').Profile;
exports.IsopackCache = function (options) {
var self = this;
@@ -189,7 +190,9 @@ _.extend(exports.IsopackCache.prototype, {
if (buildmessage.jobHasMessages()) {
return;
}
self._loadLocalPackage(name, packageInfo, previousIsopack);
Profile.time('IsopackCache Build local isopack', () => {
self._loadLocalPackage(name, packageInfo, previousIsopack);
});
});
} else if (packageInfo.kind === 'versioned') {
// We don't have to build this package, and we don't have to build its
@@ -200,41 +203,44 @@ _.extend(exports.IsopackCache.prototype, {
}
var isopack = null, packagesToLoad = [];
if (previousIsopack) {
// We can always reuse a previous Isopack for a versioned package, since
// we assume that it never changes. (Admittedly, this means we won't
// notice if we download an additional build for the package.)
isopack = previousIsopack;
packagesToLoad = isopack.getStrongOrderedUsedAndImpliedPackages();
}
if (! isopack) {
// Load the isopack from disk.
buildmessage.enterJob(
"loading package " + name + "@" + packageInfo.version,
function () {
var pluginCacheDir;
if (self._pluginCacheDirRoot) {
pluginCacheDir = self._pluginCacheDirForVersion(
name, packageInfo.version);
files.mkdir_p(pluginCacheDir);
}
var isopackPath = self._tropohouse.packagePath(
name, packageInfo.version);
var Isopack = isopackModule.Isopack;
isopack = new Isopack();
isopack.initFromPath(name, isopackPath, {
pluginCacheDir: pluginCacheDir
Profile.time('IsopackCache Load local isopack', () => {
if (previousIsopack) {
// We can always reuse a previous Isopack for a versioned package, since
// we assume that it never changes. (Admittedly, this means we won't
// notice if we download an additional build for the package.)
isopack = previousIsopack;
packagesToLoad = isopack.getStrongOrderedUsedAndImpliedPackages();
}
if (! isopack) {
// Load the isopack from disk.
buildmessage.enterJob(
"loading package " + name + "@" + packageInfo.version,
function () {
var pluginCacheDir;
if (self._pluginCacheDirRoot) {
pluginCacheDir = self._pluginCacheDirForVersion(
name, packageInfo.version);
files.mkdir_p(pluginCacheDir);
}
var isopackPath = self._tropohouse.packagePath(
name, packageInfo.version);
var Isopack = isopackModule.Isopack;
isopack = new Isopack();
isopack.initFromPath(name, isopackPath, {
pluginCacheDir: pluginCacheDir
});
// If loading the isopack fails, then we don't need to look for more
// packages to load, but we should still recover by putting it in
// self._isopacks.
if (buildmessage.jobHasMessages()) {
return;
}
packagesToLoad = isopack.getStrongOrderedUsedAndImpliedPackages();
});
// If loading the isopack fails, then we don't need to look for more
// packages to load, but we should still recover by putting it in
// self._isopacks.
if (buildmessage.jobHasMessages()) {
return;
}
packagesToLoad = isopack.getStrongOrderedUsedAndImpliedPackages();
});
}
}
});
self._isopacks[name] = isopack;
// Also load its dependencies. This is so that if this package is being
@@ -280,8 +286,9 @@ _.extend(exports.IsopackCache.prototype, {
// _checkUpToDate already verified that
// isopackBuildInfoJson.pluginProviderPackageMap is a subset of
// self._packageMap, so this operation is correct. (It can't be done
// by isopack.initFromPath, because Isopack doesn't have access to the
// PackageMap, and specifically to the local catalog it knows about.)
// by isopack.initFromPath, because Isopack doesn't have access to
// the PackageMap, and specifically to the local catalog it knows
// about.)
isopack.setPluginProviderPackageMap(
self._packageMap.makeSubsetMap(
_.keys(isopackBuildInfoJson.pluginProviderPackageMap)));

View File

@@ -127,7 +127,7 @@ _.extend(Module.prototype, {
const node = file.getPrelinkedOutput({ preserveLineNumbers: true });
const results = Profile.time(
"getPrelinkedFiles toStringWithSourceMap (app)", () => {
"toStringWithSourceMap (app)", () => {
return node.toStringWithSourceMap({
file: file.servePath
}); // results has 'code' and 'map' attributes
@@ -191,7 +191,7 @@ _.extend(Module.prototype, {
var node = new sourcemap.SourceNode(null, null, null, chunks);
Profile.time(
'getPrelinkedFiles toStringWithSourceMap (packages)',
'getPrelinkedFiles toStringWithSourceMap',
function () {
const { code, map } = node.toStringWithSourceMap({
file: self.combinedServePath

View File

@@ -217,27 +217,41 @@ _.extend(ProjectContext.prototype, {
},
readProjectMetadata: function () {
// don't generate a profiling report for this stage (Profile.run),
// because all we do here is read a handful of files.
this._completeStagesThrough(STAGE.READ_PROJECT_METADATA);
},
initializeCatalog: function () {
this._completeStagesThrough(STAGE.INITIALIZE_CATALOG);
Profile.run('ProjectContext initializeCatalog', () => {
this._completeStagesThrough(STAGE.INITIALIZE_CATALOG);
});
},
resolveConstraints: function () {
this._completeStagesThrough(STAGE.RESOLVE_CONSTRAINTS);
Profile.run('ProjectContext resolveConstraints', () => {
this._completeStagesThrough(STAGE.RESOLVE_CONSTRAINTS);
});
},
downloadMissingPackages: function () {
this._completeStagesThrough(STAGE.DOWNLOAD_MISSING_PACKAGES);
Profile.run('ProjectContext downloadMissingPackages', () => {
this._completeStagesThrough(STAGE.DOWNLOAD_MISSING_PACKAGES);
});
},
buildLocalPackages: function () {
this._completeStagesThrough(STAGE.BUILD_LOCAL_PACKAGES);
Profile.run('ProjectContext buildLocalPackages', () => {
this._completeStagesThrough(STAGE.BUILD_LOCAL_PACKAGES);
});
},
saveChangedMetadata: function () {
this._completeStagesThrough(STAGE.SAVE_CHANGED_METADATA);
Profile.run('ProjectContext saveChangedMetadata', () => {
this._completeStagesThrough(STAGE.SAVE_CHANGED_METADATA);
});
},
prepareProjectForBuild: function () {
// This is the same as saveChangedMetadata, but if we insert stages after
// that one it will continue to mean "fully finished".
this.saveChangedMetadata();
Profile.run('ProjectContext prepareProjectForBuild', () => {
this._completeStagesThrough(STAGE.SAVE_CHANGED_METADATA);
});
},
_completeStagesThrough: function (targetStage) {
@@ -270,12 +284,13 @@ _.extend(ProjectContext.prototype, {
return this.getProjectLocalDirectory("shell");
},
// You can call this manually if you want to do some work before resolving
// constraints, or you can let prepareProjectForBuild do it for you.
// You can call this manually (that is, the public version without
// an `_`) if you want to do some work before resolving constraints,
// or you can let prepareProjectForBuild do it for you.
//
// This should be pretty fast --- for example, we shouldn't worry about
// needing to wait for it to be done before we open the runner proxy.
_readProjectMetadata: function () {
_readProjectMetadata: Profile('_readProjectMetadata', function () {
var self = this;
buildmessage.assertInCapture();
@@ -335,7 +350,7 @@ _.extend(ProjectContext.prototype, {
});
self._completedStage = STAGE.READ_PROJECT_METADATA;
},
}),
_ensureProjectDir: function () {
var self = this;
@@ -427,7 +442,7 @@ _.extend(ProjectContext.prototype, {
self.appIdentifier = appId;
},
_resolveConstraints: function () {
_resolveConstraints: Profile('_resolveConstraints', function () {
var self = this;
buildmessage.assertInJob();
@@ -490,7 +505,7 @@ _.extend(ProjectContext.prototype, {
var solution;
try {
Profile.run(
Profile.time(
"Select Package Versions" +
(resolverRunCount > 1 ? (" (Try " + resolverRunCount + ")") : ""),
function () {
@@ -530,7 +545,7 @@ _.extend(ProjectContext.prototype, {
self._completedStage = STAGE.RESOLVE_CONSTRAINTS;
});
});
},
}),
_localPackageSearchDirs: function () {
var self = this;
@@ -557,7 +572,7 @@ _.extend(ProjectContext.prototype, {
// but does not compile the packages.
//
// Must be run in a buildmessage context. On build error, returns null.
_initializeCatalog: function () {
_initializeCatalog: Profile('_initializeCatalog', function () {
var self = this;
buildmessage.assertInJob();
@@ -594,7 +609,7 @@ _.extend(ProjectContext.prototype, {
}
);
});
},
}),
_getRootDepsAndConstraints: function () {
var self = this;
@@ -691,7 +706,7 @@ _.extend(ProjectContext.prototype, {
return resolver;
},
_downloadMissingPackages: function () {
_downloadMissingPackages: Profile('_downloadMissingPackages', function () {
var self = this;
buildmessage.assertInJob();
if (!self.packageMap)
@@ -707,9 +722,9 @@ _.extend(ProjectContext.prototype, {
self._completedStage = STAGE.DOWNLOAD_MISSING_PACKAGES;
});
});
},
}),
_buildLocalPackages: function () {
_buildLocalPackages: Profile('_buildLocalPackages', function () {
var self = this;
buildmessage.assertInCapture();
@@ -735,9 +750,9 @@ _.extend(ProjectContext.prototype, {
self.isopackCache.buildLocalPackages();
});
self._completedStage = STAGE.BUILD_LOCAL_PACKAGES;
},
}),
_saveChangedMetadata: function () {
_saveChangedMetadata: Profile('_saveChangedMetadata', function () {
var self = this;
// Save any changes to .meteor/packages.
@@ -756,7 +771,7 @@ _.extend(ProjectContext.prototype, {
}
self._completedStage = STAGE.SAVE_CHANGED_METADATA;
}
})
});

View File

@@ -572,7 +572,7 @@ _.extend(AppRunner.prototype, {
});
}
var bundleResult = Profile.run("Rebuild App", function () {
var bundleResult = Profile.run((firstRun?"B":"Reb")+"uild App", () => {
var includeNodeModules = 'symlink';
// On Windows we cannot symlink node_modules. Copying them is too slow.

View File

@@ -223,6 +223,7 @@ var formatMs = function (n) {
var globalEntry = [];
var running = false;
var runningName;
var start = function () {
bucketStats = {};
@@ -416,7 +417,7 @@ var reportHotLeaves = function () {
return a.time === b.time ? 0 : a.time > b.time ? -1 : 1;
});
_.each(totals, function (total) {
if (total.time < filter) {
if (total.time < 100) { // hard-coded larger filter to quality as "hot" here
return;
}
print(leftRightDots(total.name, formatMs(total.time), 65));
@@ -438,6 +439,8 @@ var setupReport = function () {
});
};
var reportNum = 1;
var report = function () {
if (! enabled) {
return;
@@ -449,16 +452,30 @@ var report = function () {
print('');
reportHotLeaves();
print('');
print('Total: ' + formatMs(getTopLevelTotal()));
print(`(#${reportNum}) Total: ${formatMs(getTopLevelTotal())}` +
` (${runningName})`);
print('');
};
var run = function (bucketName, f) {
start();
try {
if (! enabled) {
return f();
} else if (running) {
// We've kept the calls to Profile.run in the tool disjoint so far,
// and should probably keep doing so, but if we mess up, warn and continue.
console.log("Warning: Nested Profile.run at " + bucketName);
return time(bucketName, f);
}
finally {
report();
} else {
runningName = bucketName;
print(`(#${reportNum}) Profiling: ${runningName}`);
start();
try {
return time(bucketName, f);
}
finally {
report();
reportNum++;
}
}
};