Checkpoint for actually using WatchSets.

Have not yet touched initFromAppDir.
This commit is contained in:
David Glasser
2013-07-31 15:02:14 -07:00
parent 3f5edbfae9
commit ea22b3ab02
5 changed files with 135 additions and 201 deletions

View File

@@ -1,5 +1,6 @@
var path = require('path');
var files = require(path.join(__dirname, 'files.js'));
var watch = require('./watch.js');
var fs = require('fs');
var _ = require('underscore');
@@ -51,7 +52,7 @@ var Builder = function (options) {
files.rm_recursive(self.buildPath);
files.mkdir_p(self.buildPath, 0755);
self.dependencyInfo = { directories: {}, files: {} };
self.watchSet = new watch.WatchSet();
// XXX cleaner error handling. don't make the humans read an
// exception (and, make suitable for use in automated systems)
@@ -151,7 +152,7 @@ _.extend(Builder.prototype, {
//
// Returns the final canonicalize relPath that was written to.
//
// If `file` is used then a dependency will be added on that file.
// If `file` is used then it will be added to the builder's WatchSet.
write: function (relPath, options) {
var self = this;
options = options || {};
@@ -175,7 +176,7 @@ _.extend(Builder.prototype, {
} else if (options.file) {
var sourcePath = path.resolve(options.file);
data = fs.readFileSync(sourcePath);
self.dependencyInfo.files[sourcePath] = sha1(data);
self.watchSet.addFile(sourcePath, sha1(data));
}
self._ensureDirectory(path.dirname(relPath));
@@ -289,7 +290,7 @@ _.extend(Builder.prototype, {
// bundle. But if the symlink option was passed to the Builder
// constructor, then make a symlink instead, if possible.
//
// This does NOT add any dependencies.
// This does NOT add anything to the WatchSet.
//
// Options:
// - from: source path on local disk to copy from
@@ -436,11 +437,11 @@ _.extend(Builder.prototype, {
files.rm_recursive(self.buildPath);
},
// Return all dependency info that has accumulated, in the format
// expected by watch.Watcher.
getDependencyInfo: function () {
// Returns a WatchSet representing all files that were read from disk by the
// builder.
getWatchSet: function () {
var self = this;
return self.dependencyInfo;
return self.watchSet;
}
});

View File

@@ -167,6 +167,7 @@ var _ = require('underscore');
var project = require(path.join(__dirname, 'project.js'));
var builder = require(path.join(__dirname, 'builder.js'));
var unipackage = require(path.join(__dirname, 'unipackage.js'));
var watch = require('./watch.js');
var Fiber = require('fibers');
var Future = require(path.join('fibers', 'future'));
var sourcemap = require('source-map');
@@ -400,9 +401,8 @@ var Target = function (options) {
// the order given.
self.js = [];
// Files and paths used by this target, in the format used by
// watch.Watcher.
self.dependencyInfo = {files: {}, directories: {}};
// On-disk dependencies of this target.
self.watchSet = new watch.WatchSet();
// node_modules directories that we need to copy into the target (or
// otherwise make available at runtime.) A map from an absolute path
@@ -669,13 +669,8 @@ _.extend(Target.prototype, {
throw new Error("Unknown type " + resource.type);
});
// Depend on the source files that produced these
// resources. (Since the dependencyInfo.directories should be
// disjoint, it should be OK to merge them this way.)
_.extend(self.dependencyInfo.files,
slice.dependencyInfo.files);
_.extend(self.dependencyInfo.directories,
slice.dependencyInfo.directories);
// Depend on the source files that produced these resources.
self.watchSet.merge(slice.watchSet);
});
},
@@ -705,11 +700,10 @@ _.extend(Target.prototype, {
});
},
// Return all dependency info for this target, in the format
// expected by watch.Watcher.
getDependencyInfo: function () {
// Return the WatchSet for this target's dependency info.
getWatchSet: function () {
var self = this;
return self.dependencyInfo;
return self.watchSet;
},
// Return the most inclusive architecture with which this target is
@@ -765,7 +759,7 @@ _.extend(ClientTarget.prototype, {
var templatePath = path.join(__dirname, "app.html.in");
var template = fs.readFileSync(templatePath);
self.dependencyInfo.files[templatePath] = Builder.sha1(template);
self.watchSet.addFile(templatePath, Builder.sha1(template));
var f = require('handlebars').compile(template.toString());
return new Buffer(f({
@@ -1350,8 +1344,8 @@ var writeFile = function (file, builder) {
// path of a directory that should be created to contain the generated
// site archive.
//
// Returns dependencyInfo (in the format expected by watch.Watcher)
// for all files and directories that ultimately went into the bundle.
// Returns a watch.WatchSet for all files and directories that ultimately went
// into the bundle.
//
// options:
// - nodeModulesMode: "skip", "symlink", "copy"
@@ -1457,25 +1451,17 @@ var writeSiteArchive = function (targets, outputPath, options) {
// Control file
builder.writeJson('star.json', json);
// Merge the dependencyInfo of everything that went into the
// bundle. A naive merge like this doesn't work in general but
// should work in this case.
var fileDeps = {}, directoryDeps = {};
// Merge the WatchSet of everything that went into the bundle.
var watchSet = new watch.WatchSet();
var dependencySources = [builder].concat(_.values(targets));
_.each(dependencySources, function (s) {
var info = s.getDependencyInfo();
_.extend(fileDeps, info.files);
_.extend(directoryDeps, info.directories);
watchSet.merge(s.getWatchSet());
});
// We did it!
builder.complete();
return {
files: fileDeps,
directories: directoryDeps
};
return watchSet;
} catch (e) {
builder.abort();
throw e;
@@ -1495,10 +1481,9 @@ var writeSiteArchive = function (targets, outputPath, options) {
*
* Returns an object with keys:
* - errors: A buildmessage.MessageSet, or falsy if bundling succeeded.
* - dependencyInfo: Information about files and paths that were
* - watchSet: Information about files and paths that were
* inputs into the bundle and that we may wish to monitor for
* changes when developing interactively. It has two keys, 'files'
* and 'directories', in the format expected by watch.Watcher.
* changes when developing interactively, as a watch.WatchSet.
*
* On failure ('errors' is truthy), no bundle will be output (in fact,
* outputPath will have been removed if it existed.)
@@ -1545,7 +1530,7 @@ exports.bundle = function (appDir, outputPath, options) {
" " + options.releaseStamp : "");
var success = false;
var dependencyInfo = { files: {}, directories: {} };
var watchSet = new watch.WatchSet();
var messages = buildmessage.capture({
title: "building the application"
}, function () {
@@ -1749,7 +1734,7 @@ exports.bundle = function (appDir, outputPath, options) {
controlProgram = undefined;
// Write to disk
dependencyInfo = writeSiteArchive(targets, outputPath, {
watchSet = writeSiteArchive(targets, outputPath, {
nodeModulesMode: options.nodeModulesMode,
builtBy: builtBy,
controlProgram: controlProgram
@@ -1763,8 +1748,8 @@ exports.bundle = function (appDir, outputPath, options) {
return {
errors: success ? false : messages,
dependencyInfo: dependencyInfo
} ;
watchSet: watchSet
};
};
// Make a JsImage object (a complete, linked, ready-to-go JavaScript
@@ -1774,7 +1759,7 @@ exports.bundle = function (appDir, outputPath, options) {
//
// Returns an object with keys:
// - image: The created JsImage object.
// - dependencyInfo: Source file dependency info (see bundle().)
// - watchSet: Source file WatchSet (see bundle().)
//
// XXX return an 'errors' key for symmetry with bundle(), rather than
// letting exceptions escape?
@@ -1828,7 +1813,7 @@ exports.buildJsImage = function (options) {
return {
image: target.toJsImage(),
dependencyInfo: target.getDependencyInfo()
watchSet: target.getWatchSet()
};
};

View File

@@ -87,10 +87,10 @@ var rejectBadPath = function (p) {
// - implies
// - getSourcesFunc
// - exports
// - dependencyInfo
// - watchSet
// - nodeModulesPath
//
// Do not include the source files in dependencyInfo. They will be
// Do not include the source files in watchSet. They will be
// added at compile time when the sources are actually read.
var Slice = function (pkg, options) {
var self = this;
@@ -160,10 +160,8 @@ var Slice = function (pkg, options) {
self.declaredExports = options.declaredExports || null;
// Files and directories that we want to monitor for changes in
// development mode, such as source files and package.js, in the
// format accepted by watch.Watcher.
self.dependencyInfo = options.dependencyInfo ||
{ files: {}, directories: {} };
// development mode, such as source files and package.js, as a watch.WatchSet.
self.watchSet = options.watchSet || new watch.WatchSet();
// Has this slice been compiled?
self.isBuilt = false;
@@ -268,7 +266,7 @@ _.extend(Slice.prototype, {
var ext = path.extname(relPath).substr(1);
var handler = !fileOptions.isAsset && self._getSourceHandler(ext);
var contents = fs.readFileSync(absPath);
self.dependencyInfo.files[absPath] = Builder.sha1(contents);
self.watchSet.addFile(absPath, Builder.sha1(contents));
if (! handler) {
// If we don't have an extension handler, serve this file as a
@@ -497,20 +495,16 @@ _.extend(Slice.prototype, {
jsAnalyze: jsAnalyze
});
// Add dependencies on the source code to any plugins that we
// could have used (we need to depend even on plugins that we
// didn't use, because if they were changed they might become
// relevant to us)
//
// XXX I guess they're probably properly disjoint since plugins
// probably include only file dependencies? Anyway it would be a
// strange situation if plugin source directories overlapped with
// other parts of your app
// Add dependencies on the source code to any plugins that we could have
// used. We need to depend even on plugins that we didn't use, because if
// they were changed they might become relevant to us. This means that we
// end up depending on every source file contributing to all plugins in the
// packages we use (including source files from other packages that the
// plugin program itself uses), as well as the package.js file from every
// package we directly use (since changing the package.js may add or remove
// a plugin).
_.each(self._activePluginPackages(), function (otherPkg) {
_.extend(self.dependencyInfo.files,
otherPkg.pluginDependencyInfo.files);
_.extend(self.dependencyInfo.directories,
otherPkg.pluginDependencyInfo.directories);
self.watchSet.merge(otherPkg.pluginWatchSet);
});
self.prelinkFiles = results.files;
@@ -835,19 +829,17 @@ var Package = function (library) {
// pluginsBuilt is true.
self.plugins = {};
// Full transitive dependencies for all plugins in this package, as well as
// this package's package.js. If any of these dependencies change, not only
// may our plugins need to be rebuilt, but any package that directly uses this
// package needs to be rebuilt in case the change to plugins affected
// compilation.
// A WatchSet for the full transitive dependencies for all plugins in this
// package, as well as this package's package.js. If any of these dependencies
// change, our plugins need to be rebuilt... but also, any package that
// directly uses this package needs to be rebuilt in case the change to
// plugins affected compilation.
//
// Complete only when pluginsBuilt is true.
// XXX Refactor so that slice and plugin dependencies are handled by
// the same mechanism.
self.pluginDependencyInfo = { files: {}, directories: {} };
self.pluginWatchSet = new watch.WatchSet();
// True if plugins have been initialized (if
// _ensurePluginsInitialized has been called)
// True if plugins have been initialized (if _ensurePluginsInitialized has
// been called)
self._pluginsInitialized = false;
// Source file handlers registered by plugins. Map from extension
@@ -1031,16 +1023,10 @@ _.extend(Package.prototype, {
info.name))
});
if (buildResult.dependencyInfo) {
// Merge plugin dependencies
// XXX is naive merge sufficient here? should be, because
// plugins can't (for now) contain directory dependencies?
_.extend(self.pluginDependencyInfo.files,
buildResult.dependencyInfo.files);
_.extend(self.pluginDependencyInfo.directories,
buildResult.dependencyInfo.directories);
}
// Add this plugin's dependencies to our "plugin dependency" WatchSet.
self.pluginWatchSet.merge(buildResult.watchSet);
// Register the built plugin's code.
self.plugins[info.name] = buildResult.image;
});
});
@@ -1156,7 +1142,7 @@ _.extend(Package.prototype, {
// changes, because a change to package.js might add or remove a plugin,
// which could change a file from being handled by extension vs treated as
// an asset.
self.pluginDependencyInfo.files[packageJsPath] = packageJsHash;
self.pluginWatchSet.addFile(packageJsPath, packageJsHash);
// == 'Package' object visible in package.js ==
var Package = {
@@ -1728,10 +1714,12 @@ _.extend(Package.prototype, {
uses[role][where].unshift({ spec: "meteor" });
}
// We need to create a separate (non ===) copy of
// dependencyInfo for each slice.
var dependencyInfo = { files: {}, directories: {} };
dependencyInfo.files[packageJsPath] = packageJsHash;
// Each slice has its own separate WatchSet. This is so that, eg, a test
// slice's dependencies doesn't end up getting merged into the
// pluginWatchSet of a package that uses it: only the use slice's
// dependencies need to go there!
var watchSet = new watch.WatchSet();
watchSet.addFile(packageJsPath, packageJsHash);
self.slices.push(new Slice(self, {
name: ({ use: "main", test: "tests" })[role],
@@ -1741,7 +1729,7 @@ _.extend(Package.prototype, {
getSourcesFunc: function () { return sources[role][where]; },
noExports: role === "test",
declaredExports: role === "use" ? exports[where] : null,
dependencyInfo: dependencyInfo,
watchSet: watchSet,
nodeModulesPath: arch === osArch && nodeModulesPath || undefined
}));
});
@@ -1947,16 +1935,22 @@ _.extend(Package.prototype, {
// XXX should comprehensively sanitize (eg, typecheck) everything
// read from json files
// Read the dependency info (if present), and make the strings
// back into regexps
var sliceDependencies = buildInfoJson.sliceDependencies || {};
_.each(sliceDependencies, function (dependencyInfo, sliceTag) {
sliceDependencies[sliceTag] =
makeDependencyInfoIntoRegexps(dependencyInfo);
// Read the watch sets for each slice; keep them separate (for passing to
// the Slice constructor below) as well as merging them into one big
// WatchSet.
var mergedWatchSet = new watch.WatchSet();
var sliceWatchSets = {};
_.each(buildInfoJson.sliceDependencies, function (watchSetJSON, sliceTag) {
var watchSet = watch.WatchSet.fromJSON(watchSetJSON);
mergedWatchSet.merge(watchSet);
sliceWatchSets[sliceTag] = watchSet;
});
self.pluginDependencyInfo = makeDependencyInfoIntoRegexps(
self.pluginWatchSet = watch.WatchSet.fromJSON(
buildInfoJson.pluginDependencies);
// This might be redundant (since pluginWatchSet was probably merged into
// each slice watchSet when it was built) but shouldn't hurt.
mergedWatchSet.merge(self.pluginWatchSet);
// If we're supposed to check the dependencies, go ahead and do so
if (options.onlyIfUpToDate) {
@@ -1976,7 +1970,7 @@ _.extend(Package.prototype, {
return false;
}
if (! self.checkUpToDate(sliceDependencies))
if (! self.checkUpToDate(mergedWatchSet))
return false;
}
@@ -2034,7 +2028,7 @@ _.extend(Package.prototype, {
var slice = new Slice(self, {
name: sliceMeta.name,
arch: sliceMeta.arch,
dependencyInfo: sliceDependencies[sliceMeta.path],
watchSet: sliceWatchSets[sliceMeta.path],
nodeModulesPath: nodeModulesPath,
uses: _.map(sliceJson.uses, function (u) {
return {
@@ -2107,50 +2101,26 @@ _.extend(Package.prototype, {
return true;
},
// Try to check if this package is up-to-date (that is, whether its
// source files have been modified.) True if we have dependency info
// and it says that the package is up-to-date. False if a source
// file has changed.
// Try to check if this package is up-to-date (that is, whether its source
// files have been modified.) True if we have dependency info and it says that
// the package is up-to-date. False if a source file has changed.
//
// The argument _sliceDependencies is used when reading from disk when there
// are no slices yet; don't pass it from outside this file.
checkUpToDate: function (_sliceDependencies) {
// The argument _watchSet is used when reading from disk when there are no
// slices yet; don't pass it from outside this file.
checkUpToDate: function (_watchSet) {
var self = this;
// Compute the dependency info to use
var dependencyInfo = { files: {}, directories: {} };
var merge = function (di) {
// XXX is naive merge sufficient here?
_.extend(dependencyInfo.files, di.files);
_.extend(dependencyInfo.directories, di.directories);
};
if (_sliceDependencies) {
_.each(_sliceDependencies, function (dependencyInfo, sliceTag) {
merge(dependencyInfo);
});
} else {
if (!_watchSet) {
// This call was on an already-fully-loaded Package and we just want to
// see if it's changed. So we have some watchSets inside ourselves.
_watchSet = new watch.WatchSet();
_watchSet.merge(self.pluginWatchSet);
_.each(self.slices, function (slice) {
merge(slice.dependencyInfo);
_watchSet.merge(slice.watchSet);
});
}
// XXX There used to be a concept in this file of "packages loaded from disk
// without having dependencyInfo, but it was unclear when that would happen,
// so this was removed.
var isUpToDate = true;
var watcher = new watch.Watcher({
files: dependencyInfo.files,
directories: dependencyInfo.directories,
onChange: function () {
isUpToDate = false;
}
});
watcher.stop();
return isUpToDate;
return watch.isUpToDate(_watchSet);
},
// True if this package can be saved as a unipackage
@@ -2196,8 +2166,7 @@ _.extend(Package.prototype, {
var buildInfoJson = {
builtBy: exports.BUILT_BY,
sliceDependencies: { },
pluginDependencies: makeDependencyInfoSerializable(
self.pluginDependencyInfo),
pluginDependencies: self.pluginWatchSet.toJSON(),
source: options.buildOfPath || undefined
};
@@ -2249,7 +2218,7 @@ _.extend(Package.prototype, {
// Save slice dependencies. Keyed by the json path rather than thinking
// too hard about how to encode pair (name, arch).
buildInfoJson.sliceDependencies[sliceJsonFile] =
makeDependencyInfoSerializable(slice.dependencyInfo);
slice.watchSet.toJSON();
// Construct slice metadata
var sliceJson = {
@@ -2386,38 +2355,6 @@ _.extend(Package.prototype, {
}
});
// Convert regex to string.
var makeDependencyInfoSerializable = function (dependencyInfo) {
if (!dependencyInfo)
dependencyInfo = { files: {}, directories: {} };
var out = {files: dependencyInfo.files, directories: {}};
_.each(dependencyInfo.directories, function (d, path) {
var dirInfo = out.directories[path] = {};
_.each(["include", "exclude"], function (k) {
dirInfo[k] = _.map(d[k], function (r) {
return r.source;
});
});
});
return out;
};
// Convert string to regex.
var makeDependencyInfoIntoRegexps = function (dependencyInfo) {
if (!dependencyInfo)
dependencyInfo = { files: {}, directories: {} };
var out = {files: dependencyInfo.files, directories: {}};
_.each(dependencyInfo.directories, function (d, path) {
var dirInfo = out.directories[path] = {};
_.each(["include", "exclude"], function (k) {
dirInfo[k] = _.map(d[k], function (s) {
return new RegExp(s);
});
});
});
return out;
};
var packages = exports;
_.extend(exports, {
Package: Package

View File

@@ -471,7 +471,7 @@ exports.run = function (context, options) {
library: context.library
};
var startWatching = function (dependencyInfo) {
var startWatching = function (watchSet) {
if (!Status.shouldRestart)
return;
@@ -479,8 +479,7 @@ exports.run = function (context, options) {
watcher.stop();
watcher = new watch.Watcher({
files: dependencyInfo.files,
directories: dependencyInfo.directories,
watchSet: watchSet,
onChange: function () {
if (Status.crashing)
logToClients({'system': "=> Modified -- restarting."});
@@ -524,12 +523,12 @@ exports.run = function (context, options) {
// Bundle up the app
var bundleResult = bundler.bundle(context.appDir, bundlePath, bundleOpts);
var dependencyInfo = bundleResult.dependencyInfo;
var watchSet = bundleResult.watchSet;
if (bundleResult.errors) {
logToClients({stdout: "=> Errors prevented startup:\n\n" +
bundleResult.errors.formatMessages() + "\n"});
Status.hardCrashed("has errors");
startWatching(dependencyInfo);
startWatching(watchSet);
return;
}
@@ -546,32 +545,14 @@ exports.run = function (context, options) {
Builder.sha1(fs.readFileSync(options.settingsFile, "utf8"));
// Reload if the setting file changes
dependencyInfo.files[path.resolve(options.settingsFile)] =
settingsHash;
}
// If using a warehouse, don't do dependency monitoring on any of
// the files that are in the warehouse. You should not be editing
// those files directly.
if (files.usesWarehouse()) {
var warehouseDir = path.resolve(warehouse.getWarehouseDir());
var filterKeys = function (obj) {
_.each(_.keys(obj), function (k) {
k = path.resolve(k);
if (warehouseDir.length <= k.length &&
k.substr(0, warehouseDir.length) === warehouseDir)
delete obj[k];
});
};
filterKeys(dependencyInfo.files);
filterKeys(dependencyInfo.directories);
watchSet.addFile(path.resolve(options.settingsFile), settingsHash);
}
// Start watching for changes for files. There's no hurry to call
// this, since dependencyInfo contains a snapshot of the state of
// this, since watchSet contains a snapshot of the state of
// the world at the time of bundling, in the form of hashes and
// lists of matching files in each directory.
startWatching(dependencyInfo);
startWatching(watchSet);
// Start the server
Status.running = true;

View File

@@ -197,6 +197,10 @@ _.extend(WatchSet.prototype, {
WatchSet.fromJSON = function (json) {
var set = new WatchSet;
if (!json)
return set;
if (json.alwaysFire) {
set.alwaysFire = true;
return set;
@@ -265,6 +269,7 @@ var readDirectory = function (options) {
return filtered;
};
// All fields are private.
var Watcher = function (options) {
var self = this;
@@ -280,6 +285,7 @@ var Watcher = function (options) {
throw new Error("onChange option is required");
self.stopped = false;
self.justCheckOnce = !!options._justCheckOnce;
self.fileWatches = []; // array of paths
self.directoryWatches = []; // array of watch object
@@ -366,7 +372,7 @@ _.extend(Watcher.prototype, {
return true;
}
if (!isDoubleCheck) {
if (!isDoubleCheck && !self.justCheckOnce) {
// Whenever a directory changes, scan it soon as we notice,
// but then scan it again one secord later just to make sure
// that we haven't missed any changes. See commentary at
@@ -396,6 +402,9 @@ _.extend(Watcher.prototype, {
if (self._fireIfFileChanged(absPath))
return;
if (self.justCheckOnce)
return;
// Intentionally not using fs.watch since it doesn't play well with
// vim (https://github.com/joyent/node/issues/3172)
// Note that we poll very frequently (500 ms)
@@ -407,7 +416,7 @@ _.extend(Watcher.prototype, {
self.fileWatches.push(absPath);
});
if (self.stopped)
if (self.stopped || self.justCheckOnce)
return;
// One second later, check the files again, because fs.watchFile
@@ -439,6 +448,9 @@ _.extend(Watcher.prototype, {
if (self._fireIfDirectoryChanged(info))
return;
if (self.stopped || self.justCheckOnce)
return;
// fs.watchFile doesn't work for directories (as tested on ubuntu)
// Notice that we poll very frequently (500 ms)
try {
@@ -493,8 +505,26 @@ _.extend(Watcher.prototype, {
}
});
// Given a WatchSet, returns true if it currently describes the state of the
// disk.
var isUpToDate = function (watchSet) {
var upToDate = true;
var watcher = new Watcher({
watchSet: watchSet,
onChange: function () {
upToDate = false;
},
// internal flag which prevents us from starting watches and timers that
// we're about to cancel anyway
_justCheckOnce: true
});
watcher.stop();
return upToDate;
};
_.extend(exports, {
WatchSet: WatchSet,
Watcher: Watcher,
readDirectory: readDirectory
readDirectory: readDirectory,
isUpToDate: isUpToDate
});