Merge branch 'dep-rewrite-v2' into devel

This commit is contained in:
David Glasser
2013-07-31 23:56:51 -07:00
9 changed files with 875 additions and 873 deletions

View File

@@ -1,3 +1,7 @@
// IF YOU MAKE ANY CHANGES TO THIS PACKAGE THAT COULD AFFECT ITS OUTPUT, YOU
// MUST UPDATE BUILT_BY IN tools/packages.js. Otherwise packages may not be
// rebuilt with the new changes.
Package.describe({
summary: "JavaScript code analysis for Meteor",
internal: true

View File

@@ -1,5 +1,6 @@
var path = require('path');
var files = require(path.join(__dirname, 'files.js'));
var watch = require('./watch.js');
var fs = require('fs');
var _ = require('underscore');
@@ -51,7 +52,7 @@ var Builder = function (options) {
files.rm_recursive(self.buildPath);
files.mkdir_p(self.buildPath, 0755);
self.dependencyInfo = { directories: {}, files: {} };
self.watchSet = new watch.WatchSet();
// XXX cleaner error handling. don't make the humans read an
// exception (and, make suitable for use in automated systems)
@@ -151,7 +152,7 @@ _.extend(Builder.prototype, {
//
// Returns the final canonicalize relPath that was written to.
//
// If `file` is used then a dependency will be added on that file.
// If `file` is used then it will be added to the builder's WatchSet.
write: function (relPath, options) {
var self = this;
options = options || {};
@@ -173,9 +174,7 @@ _.extend(Builder.prototype, {
throw new Error("May only pass one of data and file, not both");
data = options.data;
} else if (options.file) {
var sourcePath = path.resolve(options.file);
data = fs.readFileSync(sourcePath);
self.dependencyInfo.files[sourcePath] = sha1(data);
data = watch.readAndWatchFile(self.watchSet, path.resolve(options.file));
}
self._ensureDirectory(path.dirname(relPath));
@@ -289,9 +288,7 @@ _.extend(Builder.prototype, {
// bundle. But if the symlink option was passed to the Builder
// constructor, then make a symlink instead, if possible.
//
// Adds dependencies both on the files that were copied, and on the
// contents of the directory tree (respecting 'ignore'.) Disable
// this with depend: false.
// This does NOT add anything to the WatchSet.
//
// Options:
// - from: source path on local disk to copy from
@@ -299,15 +296,12 @@ _.extend(Builder.prototype, {
// receive the files
// - ignore: array of regexps of filenames (that is, basenames) to
// ignore (they may still be visible in the output bundle if
// symlinks are being used)
// - depend: Should dependencies be added? Defaults to true.
// symlinks are being used). Like with WatchSets, they match against
// entries that end with a slash if it's a directory.
copyDirectory: function (options) {
var self = this;
options = options || {};
var createDependencies =
('depend' in options) ? options.depend : true;
var normOptionsTo = options.to;
if (normOptionsTo.slice(-1) === path.sep)
normOptionsTo = normOptionsTo.slice(0, -1);
@@ -343,24 +337,24 @@ _.extend(Builder.prototype, {
}
var ignore = options.ignore || [];
if (createDependencies) {
self.dependencyInfo.directories[absPathTo] = {
include: [/.?/],
exclude: ignore
};
}
var walk = function (absFrom, relTo) {
self._ensureDirectory(relTo);
_.each(fs.readdirSync(absFrom), function (item) {
if (_.any(ignore, function (pattern) {
return item.match(pattern);
})) return; // skip excluded files
var thisAbsFrom = path.resolve(absFrom, item);
var thisRelTo = path.join(relTo, item);
if (fs.statSync(thisAbsFrom).isDirectory()) {
var isDir = fs.statSync(thisAbsFrom).isDirectory();
var itemForMatch = item;
if (isDir)
itemForMatch += '/';
if (_.any(ignore, function (pattern) {
return itemForMatch.match(pattern);
})) return; // skip excluded files
if (isDir) {
walk(thisAbsFrom, thisRelTo);
return;
}
@@ -368,9 +362,6 @@ _.extend(Builder.prototype, {
// XXX avoid reading whole file into memory
var data = fs.readFileSync(thisAbsFrom);
if (createDependencies)
self.dependencyInfo.files[thisAbsFrom] = sha1(data);
fs.writeFileSync(path.resolve(self.buildPath, thisRelTo), data);
self.usedAsFile[thisRelTo] = true;
});
@@ -451,11 +442,11 @@ _.extend(Builder.prototype, {
files.rm_recursive(self.buildPath);
},
// Return all dependency info that has accumulated, in the format
// expected by watch.Watcher.
getDependencyInfo: function () {
// Returns a WatchSet representing all files that were read from disk by the
// builder.
getWatchSet: function () {
var self = this;
return self.dependencyInfo;
return self.watchSet;
}
});

View File

@@ -167,6 +167,7 @@ var _ = require('underscore');
var project = require(path.join(__dirname, 'project.js'));
var builder = require(path.join(__dirname, 'builder.js'));
var unipackage = require(path.join(__dirname, 'unipackage.js'));
var watch = require('./watch.js');
var Fiber = require('fibers');
var Future = require(path.join('fibers', 'future'));
var sourcemap = require('source-map');
@@ -174,9 +175,9 @@ var sourcemap = require('source-map');
// files to ignore when bundling. node has no globs, so use regexps
var ignoreFiles = [
/~$/, /^\.#/, /^#.*#$/,
/^\.DS_Store$/, /^ehthumbs\.db$/, /^Icon.$/, /^Thumbs\.db$/,
/^\.meteor$/, /* avoids scanning N^2 files when bundling all packages */
/^\.git$/ /* often has too many files to watch */
/^\.DS_Store\/?$/, /^ehthumbs\.db$/, /^Icon.$/, /^Thumbs\.db$/,
/^\.meteor\/$/, /* avoids scanning N^2 files when bundling all packages */
/^\.git\/$/ /* often has too many files to watch */
];
// http://davidshariff.com/blog/javascript-inheritance-patterns/
@@ -400,9 +401,8 @@ var Target = function (options) {
// the order given.
self.js = [];
// Files and paths used by this target, in the format used by
// watch.Watcher.
self.dependencyInfo = {files: {}, directories: {}};
// On-disk dependencies of this target.
self.watchSet = new watch.WatchSet();
// node_modules directories that we need to copy into the target (or
// otherwise make available at runtime.) A map from an absolute path
@@ -669,13 +669,8 @@ _.extend(Target.prototype, {
throw new Error("Unknown type " + resource.type);
});
// Depend on the source files that produced these
// resources. (Since the dependencyInfo.directories should be
// disjoint, it should be OK to merge them this way.)
_.extend(self.dependencyInfo.files,
slice.dependencyInfo.files);
_.extend(self.dependencyInfo.directories,
slice.dependencyInfo.directories);
// Depend on the source files that produced these resources.
self.watchSet.merge(slice.watchSet);
});
},
@@ -705,11 +700,10 @@ _.extend(Target.prototype, {
});
},
// Return all dependency info for this target, in the format
// expected by watch.Watcher.
getDependencyInfo: function () {
// Return the WatchSet for this target's dependency info.
getWatchSet: function () {
var self = this;
return self.dependencyInfo;
return self.watchSet;
},
// Return the most inclusive architecture with which this target is
@@ -764,8 +758,7 @@ _.extend(ClientTarget.prototype, {
var self = this;
var templatePath = path.join(__dirname, "app.html.in");
var template = fs.readFileSync(templatePath);
self.dependencyInfo.files[templatePath] = Builder.sha1(template);
var template = watch.readAndWatchFile(self.watchSet, templatePath);
var f = require('handlebars').compile(template.toString());
return new Buffer(f({
@@ -1105,8 +1098,7 @@ _.extend(JsImage.prototype, {
_.each(nodeModulesDirectories, function (nmd) {
builder.copyDirectory({
from: nmd.sourcePath,
to: nmd.preferredBundlePath,
depend: false
to: nmd.preferredBundlePath
});
});
@@ -1321,8 +1313,7 @@ _.extend(ServerTarget.prototype, {
builder.copyDirectory({
from: path.join(files.get_dev_bundle(), 'lib', 'node_modules'),
to: 'node_modules',
ignore: ignoreFiles,
depend: false
ignore: ignoreFiles
});
}
@@ -1352,8 +1343,8 @@ var writeFile = function (file, builder) {
// path of a directory that should be created to contain the generated
// site archive.
//
// Returns dependencyInfo (in the format expected by watch.Watcher)
// for all files and directories that ultimately went into the bundle.
// Returns a watch.WatchSet for all files and directories that ultimately went
// into the bundle.
//
// options:
// - nodeModulesMode: "skip", "symlink", "copy"
@@ -1459,25 +1450,17 @@ var writeSiteArchive = function (targets, outputPath, options) {
// Control file
builder.writeJson('star.json', json);
// Merge the dependencyInfo of everything that went into the
// bundle. A naive merge like this doesn't work in general but
// should work in this case.
var fileDeps = {}, directoryDeps = {};
// Merge the WatchSet of everything that went into the bundle.
var watchSet = new watch.WatchSet();
var dependencySources = [builder].concat(_.values(targets));
_.each(dependencySources, function (s) {
var info = s.getDependencyInfo();
_.extend(fileDeps, info.files);
_.extend(directoryDeps, info.directories);
watchSet.merge(s.getWatchSet());
});
// We did it!
builder.complete();
return {
files: fileDeps,
directories: directoryDeps
};
return watchSet;
} catch (e) {
builder.abort();
throw e;
@@ -1497,10 +1480,9 @@ var writeSiteArchive = function (targets, outputPath, options) {
*
* Returns an object with keys:
* - errors: A buildmessage.MessageSet, or falsy if bundling succeeded.
* - dependencyInfo: Information about files and paths that were
* - watchSet: Information about files and paths that were
* inputs into the bundle and that we may wish to monitor for
* changes when developing interactively. It has two keys, 'files'
* and 'directories', in the format expected by watch.Watcher.
* changes when developing interactively, as a watch.WatchSet.
*
* On failure ('errors' is truthy), no bundle will be output (in fact,
* outputPath will have been removed if it existed.)
@@ -1547,7 +1529,7 @@ exports.bundle = function (appDir, outputPath, options) {
" " + options.releaseStamp : "");
var success = false;
var dependencyInfo = { files: {}, directories: {} };
var watchSet = new watch.WatchSet();
var messages = buildmessage.capture({
title: "building the application"
}, function () {
@@ -1604,9 +1586,13 @@ exports.bundle = function (appDir, outputPath, options) {
return server;
};
var includeDefaultTargets = true;
if (fs.existsSync(path.join(appDir, 'no-default-targets')))
includeDefaultTargets = false;
// Include default targets, unless there's a no-default-targets file in the
// top level of the app. (This is a very hacky interface which will
// change. Note, eg, that .meteor/packages is confusingly ignored in this
// case.)
var includeDefaultTargets = watch.readAndWatchFile(
watchSet, path.join(appDir, 'no-default-targets')) === null;
if (includeDefaultTargets) {
// Create a Package object that represents the app
@@ -1622,73 +1608,83 @@ exports.bundle = function (appDir, outputPath, options) {
}
// Pick up any additional targets in /programs
// Step 1: scan for targets and make a list
// Step 1: scan for targets and make a list. We will reload if you create a
// new subdir in 'programs', or create 'programs' itself.
var programsDir = path.join(appDir, 'programs');
var programs = [];
if (fs.existsSync(programsDir)) {
_.each(fs.readdirSync(programsDir), function (item) {
if (item.match(/^\./))
return; // ignore dotfiles
var itemPath = path.join(programsDir, item);
var programsSubdirs = watch.readAndWatchDirectory(watchSet, {
absPath: programsDir,
include: [/\/$/],
exclude: [/^\./]
});
if (! fs.statSync(itemPath).isDirectory())
return; // ignore non-directories
_.each(programsSubdirs, function (item) {
// Remove trailing slash.
item = item.substr(0, item.length - 1);
if (item in targets) {
buildmessage.error("duplicate programs named '" + item + "'");
// Recover by ignoring this program
return;
if (_.has(targets, item)) {
buildmessage.error("duplicate programs named '" + item + "'");
// Recover by ignoring this program
return;
}
targets[item] = true; // will be overwritten with actual target later
// Read attributes.json, if it exists
var attrsJsonAbsPath = path.join(programsDir, item, 'attributes.json');
var attrsJsonRelPath = path.join('programs', item, 'attributes.json');
var attrsJsonContents = watch.readAndWatchFile(
watchSet, attrsJsonAbsPath);
var attrsJson = {};
if (attrsJsonContents !== null) {
try {
attrsJson = JSON.parse(attrsJsonContents);
} catch (e) {
if (! (e instanceof SyntaxError))
throw e;
buildmessage.error(e.message, { file: attrsJsonRelPath });
// recover by ignoring attributes.json
}
}
// Read attributes.json, if it exists
var attrsJsonPath = path.join(itemPath, 'attributes.json');
var attrsJsonRelPath = path.join('programs', item, 'attributes.json');
var attrsJson = {};
if (fs.existsSync(attrsJsonPath)) {
try {
attrsJson = JSON.parse(fs.readFileSync(attrsJsonPath));
} catch (e) {
if (! (e instanceof SyntaxError))
throw e;
buildmessage.error(e.message, { file: attrsJsonRelPath });
// recover by ignoring attributes.json
}
}
var isControlProgram = !! attrsJson.isControlProgram;
if (isControlProgram) {
if (controlProgram !== null) {
buildmessage.error(
var isControlProgram = !! attrsJson.isControlProgram;
if (isControlProgram) {
if (controlProgram !== null) {
buildmessage.error(
"there can be only one control program ('" + controlProgram +
"' is also marked as the control program)",
{ file: attrsJsonRelPath });
// recover by ignoring that it wants to be the control
// program
} else {
controlProgram = item;
}
"' is also marked as the control program)",
{ file: attrsJsonRelPath });
// recover by ignoring that it wants to be the control
// program
} else {
controlProgram = item;
}
}
// Add to list
programs.push({
type: attrsJson.type || "server",
name: item,
path: itemPath,
client: attrsJson.client,
attrsJsonRelPath: attrsJsonRelPath
});
// Add to list
programs.push({
type: attrsJson.type || "server",
name: item,
path: path.join(programsDir, item),
client: attrsJson.client,
attrsJsonRelPath: attrsJsonRelPath
});
}
});
if (! controlProgram) {
var target = makeServerTarget("ctl");
targets["ctl"] = target;
controlProgram = "ctl";
if (_.has(targets, 'ctl')) {
buildmessage.error(
"A program named ctl exists but no program has isControlProgram set");
// recover by not making a control program
} else {
var target = makeServerTarget("ctl");
targets["ctl"] = target;
controlProgram = "ctl";
}
}
// Step 2: sort the list so that programs are built first (because
// when we build the servers we need to be able to reference the
// clients)
// Step 2: sort the list so that client programs are built first (because
// when we build the servers we need to be able to reference the clients)
programs.sort(function (a, b) {
a = (a.type === "client") ? 0 : 1;
b = (b.type === "client") ? 0 : 1;
@@ -1750,12 +1746,16 @@ exports.bundle = function (appDir, outputPath, options) {
if (! (controlProgram in targets))
controlProgram = undefined;
// Make sure notice when somebody adds a package to the app packages dir
// that may override a warehouse package.
library.watchLocalPackageDirs(watchSet);
// Write to disk
dependencyInfo = writeSiteArchive(targets, outputPath, {
watchSet.merge(writeSiteArchive(targets, outputPath, {
nodeModulesMode: options.nodeModulesMode,
builtBy: builtBy,
controlProgram: controlProgram
});
}));
success = true;
});
@@ -1765,8 +1765,8 @@ exports.bundle = function (appDir, outputPath, options) {
return {
errors: success ? false : messages,
dependencyInfo: dependencyInfo
} ;
watchSet: watchSet
};
};
// Make a JsImage object (a complete, linked, ready-to-go JavaScript
@@ -1776,7 +1776,7 @@ exports.bundle = function (appDir, outputPath, options) {
//
// Returns an object with keys:
// - image: The created JsImage object.
// - dependencyInfo: Source file dependency info (see bundle().)
// - watchSet: Source file WatchSet (see bundle().)
//
// XXX return an 'errors' key for symmetry with bundle(), rather than
// letting exceptions escape?
@@ -1830,7 +1830,7 @@ exports.buildJsImage = function (options) {
return {
image: target.toJsImage(),
dependencyInfo: target.getDependencyInfo()
watchSet: target.getWatchSet()
};
};

View File

@@ -39,6 +39,20 @@ var files = exports;
_.extend(exports, {
// A sort comparator to order files into load order.
sort: function (a, b) {
// XXX HUGE HACK --
// push html (template) files ahead of everything else. this is
// important because the user wants to be able to say
// Template.foo.events = { ... }
//
// maybe all of the templates should go in one file? packages should
// probably have a way to request this treatment (load order dependency
// tags?) .. who knows.
var ishtml_a = path.extname(a) === '.html';
var ishtml_b = path.extname(b) === '.html';
if (ishtml_a !== ishtml_b) {
return (ishtml_a ? -1 : 1);
}
// main.* loaded last
var ismain_a = (path.basename(a).indexOf('main.') === 0);
var ismain_b = (path.basename(b).indexOf('main.') === 0);
@@ -47,8 +61,10 @@ _.extend(exports, {
}
// /lib/ loaded first
var islib_a = (a.indexOf(path.sep + 'lib' + path.sep) !== -1);
var islib_b = (b.indexOf(path.sep + 'lib' + path.sep) !== -1);
var islib_a = (a.indexOf(path.sep + 'lib' + path.sep) !== -1 ||
a.indexOf('lib' + path.sep) === 0);
var islib_b = (b.indexOf(path.sep + 'lib' + path.sep) !== -1 ||
b.indexOf('lib' + path.sep) === 0);
if (islib_a !== islib_b) {
return (islib_a ? -1 : 1);
}
@@ -64,84 +80,6 @@ _.extend(exports, {
return (a < b ? -1 : 1);
},
// Returns true if this is a file we should maybe care about (stat it,
// descend if it is a directory, etc).
pre_filter: function (filename) {
if (!filename) { return false; }
// no . files
var base = path.basename(filename);
if (base && base[0] === '.') { return false; }
// XXX
// first, we only want to exclude APP_ROOT/public, not some deeper public
// second, we don't really like this at all
// third, we don't update the app now if anything here changes
if (base === 'public' || base === 'private') { return false; }
return true;
},
// Returns true if this is a file we should monitor. Iterate over
// all the interesting files, applying 'func' to each file
// path. 'extensions' is an array of extensions to include, without
// leading dots (eg ['html', 'js'])
file_list_async: function (filepath, extensions, func) {
if (!files.pre_filter(filepath)) { return; }
fs.stat(filepath, function(err, stats) {
if (err) {
// XXX!
return;
}
if (stats.isDirectory()) {
fs.readdir(filepath, function(err, fileNames) {
if(err) {
// XXX!
return;
}
_.each(fileNames, function (fileName) {
files.file_list_async(path.join(filepath, fileName),
extensions, func);
});
});
} else if (files.findExtension(extensions, filepath)) {
func(filepath);
}
});
},
file_list_sync: function (filepath, extensions) {
var ret = [];
if (!files.pre_filter(filepath)) { return ret; }
var stats = fs.statSync(filepath);
if (stats.isDirectory()) {
var fileNames = fs.readdirSync(filepath);
_.each(fileNames, function (fileName) {
ret = ret.concat(files.file_list_sync(
path.join(filepath, fileName), extensions));
});
} else if (files.findExtension(extensions, filepath)) {
ret.push(filepath);
}
return ret;
},
// given a list of extensions (no leading dots) and a path, return
// the file extension provided in the list. If it doesn't find it,
// return null.
findExtension: function (extensions, filepath) {
var len = filepath.length;
for (var i = 0; i < extensions.length; ++i) {
var ext = "." + extensions[i];
if (filepath.indexOf(ext, len - ext.length) !== -1){
return ext;
}
}
return null;
},
// given a path, returns true if it is a meteor application (has a
// .meteor directory with a 'packages' file). false otherwise.
is_app_dir: function (filepath) {
@@ -161,14 +99,6 @@ _.extend(exports, {
}
},
// given a path, returns true if it is a meteor package (is a
// directory with a 'packages.js' file). false otherwise.
//
// Note that a directory can be both a package _and_ an application.
is_package_dir: function (filepath) {
return fs.existsSync(path.join(filepath, 'package.js'));
},
// given a predicate function and a starting path, traverse upwards
// from the path until we find a path that satisfies the predicate.
//

View File

@@ -1,6 +1,7 @@
var path = require('path');
var _ = require('underscore');
var files = require('./files.js');
var watch = require('./watch.js');
var packages = require('./packages.js');
var warehouse = require('./warehouse.js');
var bundler = require('./bundler.js');
@@ -127,7 +128,7 @@ _.extend(Library.prototype, {
if (! packageDir) {
for (var i = 0; i < self.localPackageDirs.length; ++i) {
var packageDir = path.join(self.localPackageDirs[i], name);
// XXX or unipackage.json?
// XXX or unipackage.json? see also watchLocalPackageDirs
if (fs.existsSync(path.join(packageDir, 'package.js')))
break;
packageDir = null;
@@ -274,6 +275,24 @@ _.extend(Library.prototype, {
}
},
// Register local package directories with a watchSet. We want to know if a
// package is created or deleted, which includes both its top-level source
// directory or its package.js file.
watchLocalPackageDirs: function (watchSet) {
var self = this;
_.each(self.localPackageDirs, function (packageDir) {
var packages = watch.readAndWatchDirectory(watchSet, {
absPath: packageDir,
include: [/\/$/]
});
_.each(packages, function (p) {
watch.readAndWatchFile(watchSet,
path.join(packageDir, p, 'package.js'));
// XXX unipackage.json too?
});
});
},
// Get all packages available. Returns a map from the package name
// to a Package object.
//

View File

@@ -20,54 +20,18 @@ var sourcemap = require('source-map');
// unipackage/slice changes, but this version (which is build-tool-specific) can
// change when the the contents (not structure) of the built output changes. So
// eg, if we improve the linker's static analysis, this should be bumped.
exports.BUILT_BY = 'meteor/6';
//
// You should also update this whenever you update any of the packages used
// directly by the unipackage creation process (eg js-analyze) since they do not
// end up as watched dependencies. (At least for now, packages only used in
// target creation (eg minifiers and dev-bundle-fetcher) don't require you to
// update BUILT_BY, though you will need to quit and rerun "meteor run".)
exports.BUILT_BY = 'meteor/7';
// Find all files under `rootPath` that have an extension in
// `extensions` (an array of extensions without leading dot), and
// return them as a list of paths relative to rootPath. Ignore files
// that match a regexp in the ignoreFiles array, if given. As a
// special case (ugh), push all html files to the head of the list.
var scanForSources = function (rootPath, extensions, ignoreFiles) {
var self = this;
// find everything in tree, sorted depth-first alphabetically.
var fileList = files.file_list_sync(rootPath, extensions);
fileList = _.reject(fileList, function (file) {
return _.any(ignoreFiles || [], function (pattern) {
return file.match(pattern);
});
});
fileList.sort(files.sort);
// XXX HUGE HACK --
// push html (template) files ahead of everything else. this is
// important because the user wants to be able to say
// Template.foo.events = { ... }
//
// maybe all of the templates should go in one file? packages
// should probably have a way to request this treatment (load
// order dependency tags?) .. who knows.
var htmls = [];
_.each(fileList, function (filename) {
if (path.extname(filename) === '.html') {
htmls.push(filename);
fileList = _.reject(fileList, function (f) { return f === filename;});
}
});
fileList = htmls.concat(fileList);
// now make everything relative to rootPath
var prefix = rootPath;
if (prefix[prefix.length - 1] !== path.sep)
prefix += path.sep;
return fileList.map(function (abs) {
if (path.relative(prefix, abs).match(/\.\./))
// XXX audit to make sure it works in all possible symlink
// scenarios
throw new Error("internal error: source file outside of parent?");
return abs.substr(prefix.length);
});
// Like Perl's quotemeta: quotes all regexp metacharacters. See
// https://github.com/substack/quotemeta/blob/master/index.js
var quotemeta = function (str) {
return String(str).replace(/(\W)/g, '\\$1');
};
var rejectBadPath = function (p) {
@@ -87,10 +51,10 @@ var rejectBadPath = function (p) {
// - implies
// - getSourcesFunc
// - exports
// - dependencyInfo
// - watchSet
// - nodeModulesPath
//
// Do not include the source files in dependencyInfo. They will be
// Do not include the source files in watchSet. They will be
// added at compile time when the sources are actually read.
var Slice = function (pkg, options) {
var self = this;
@@ -160,10 +124,8 @@ var Slice = function (pkg, options) {
self.declaredExports = options.declaredExports || null;
// Files and directories that we want to monitor for changes in
// development mode, such as source files and package.js, in the
// format accepted by watch.Watcher.
self.dependencyInfo = options.dependencyInfo ||
{ files: {}, directories: {} };
// development mode, such as source files and package.js, as a watch.WatchSet.
self.watchSet = options.watchSet || new watch.WatchSet();
// Has this slice been compiled?
self.isBuilt = false;
@@ -267,8 +229,7 @@ _.extend(Slice.prototype, {
var absPath = path.resolve(self.pkg.sourceRoot, relPath);
var ext = path.extname(relPath).substr(1);
var handler = !fileOptions.isAsset && self._getSourceHandler(ext);
var contents = fs.readFileSync(absPath);
self.dependencyInfo.files[absPath] = Builder.sha1(contents);
var contents = watch.readAndWatchFile(self.watchSet, absPath);
if (! handler) {
// If we don't have an extension handler, serve this file as a
@@ -497,20 +458,16 @@ _.extend(Slice.prototype, {
jsAnalyze: jsAnalyze
});
// Add dependencies on the source code to any plugins that we
// could have used (we need to depend even on plugins that we
// didn't use, because if they were changed they might become
// relevant to us)
//
// XXX I guess they're probably properly disjoint since plugins
// probably include only file dependencies? Anyway it would be a
// strange situation if plugin source directories overlapped with
// other parts of your app
// Add dependencies on the source code to any plugins that we could have
// used. We need to depend even on plugins that we didn't use, because if
// they were changed they might become relevant to us. This means that we
// end up depending on every source file contributing to all plugins in the
// packages we use (including source files from other packages that the
// plugin program itself uses), as well as the package.js file from every
// package we directly use (since changing the package.js may add or remove
// a plugin).
_.each(self._activePluginPackages(), function (otherPkg) {
_.extend(self.dependencyInfo.files,
otherPkg.pluginDependencyInfo.files);
_.extend(self.dependencyInfo.directories,
otherPkg.pluginDependencyInfo.directories);
self.watchSet.merge(otherPkg.pluginWatchSet);
});
self.prelinkFiles = results.files;
@@ -835,19 +792,17 @@ var Package = function (library) {
// pluginsBuilt is true.
self.plugins = {};
// Full transitive dependencies for all plugins in this package, as well as
// this package's package.js. If any of these dependencies change, not only
// may our plugins need to be rebuilt, but any package that directly uses this
// package needs to be rebuilt in case the change to plugins affected
// compilation.
// A WatchSet for the full transitive dependencies for all plugins in this
// package, as well as this package's package.js. If any of these dependencies
// change, our plugins need to be rebuilt... but also, any package that
// directly uses this package needs to be rebuilt in case the change to
// plugins affected compilation.
//
// Complete only when pluginsBuilt is true.
// XXX Refactor so that slice and plugin dependencies are handled by
// the same mechanism.
self.pluginDependencyInfo = { files: {}, directories: {} };
self.pluginWatchSet = new watch.WatchSet();
// True if plugins have been initialized (if
// _ensurePluginsInitialized has been called)
// True if plugins have been initialized (if _ensurePluginsInitialized has
// been called)
self._pluginsInitialized = false;
// Source file handlers registered by plugins. Map from extension
@@ -1031,16 +986,10 @@ _.extend(Package.prototype, {
info.name))
});
if (buildResult.dependencyInfo) {
// Merge plugin dependencies
// XXX is naive merge sufficient here? should be, because
// plugins can't (for now) contain directory dependencies?
_.extend(self.pluginDependencyInfo.files,
buildResult.dependencyInfo.files);
_.extend(self.pluginDependencyInfo.directories,
buildResult.dependencyInfo.directories);
}
// Add this plugin's dependencies to our "plugin dependency" WatchSet.
self.pluginWatchSet.merge(buildResult.watchSet);
// Register the built plugin's code.
self.plugins[info.name] = buildResult.image;
});
});
@@ -1156,7 +1105,7 @@ _.extend(Package.prototype, {
// changes, because a change to package.js might add or remove a plugin,
// which could change a file from being handled by extension vs treated as
// an asset.
self.pluginDependencyInfo.files[packageJsPath] = packageJsHash;
self.pluginWatchSet.addFile(packageJsPath, packageJsHash);
// == 'Package' object visible in package.js ==
var Package = {
@@ -1728,10 +1677,12 @@ _.extend(Package.prototype, {
uses[role][where].unshift({ spec: "meteor" });
}
// We need to create a separate (non ===) copy of
// dependencyInfo for each slice.
var dependencyInfo = { files: {}, directories: {} };
dependencyInfo.files[packageJsPath] = packageJsHash;
// Each slice has its own separate WatchSet. This is so that, eg, a test
// slice's dependencies doesn't end up getting merged into the
// pluginWatchSet of a package that uses it: only the use slice's
// dependencies need to go there!
var watchSet = new watch.WatchSet();
watchSet.addFile(packageJsPath, packageJsHash);
self.slices.push(new Slice(self, {
name: ({ use: "main", test: "tests" })[role],
@@ -1741,7 +1692,7 @@ _.extend(Package.prototype, {
getSourcesFunc: function () { return sources[role][where]; },
noExports: role === "test",
declaredExports: role === "use" ? exports[where] : null,
dependencyInfo: dependencyInfo,
watchSet: watchSet,
nodeModulesPath: arch === osArch && nodeModulesPath || undefined
}));
});
@@ -1779,82 +1730,77 @@ _.extend(Package.prototype, {
self.slices.push(slice);
// Watch control files for changes
// XXX this read has a race with the actual read that is used
// XXX this read has a race with the actual reads that are used
_.each([path.join(appDir, '.meteor', 'packages'),
path.join(appDir, '.meteor', 'releases')], function (p) {
if (fs.existsSync(p)) {
slice.dependencyInfo.files[p] =
Builder.sha1(fs.readFileSync(p));
}
path.join(appDir, '.meteor', 'release')], function (p) {
watch.readAndWatchFile(slice.watchSet, p);
});
// Determine source files
slice.getSourcesFunc = function () {
var allSources = scanForSources(
self.sourceRoot, slice.registeredExtensions(),
ignoreFiles || []);
var sourceInclude = _.map(slice.registeredExtensions(), function (ext) {
return new RegExp('\\.' + quotemeta(ext) + '$');
});
var sourceExclude = [/^\./].concat(ignoreFiles);
var withoutAppPackages = _.reject(allSources, function (sourcePath) {
// Skip files that are in app packages; they'll get watched if they
// are actually listed in the .meteor/packages file. (Directories
// named "packages" lower in the tree are OK.)
return sourcePath.match(/^packages\//);
// Wrapper around watch.readAndWatchDirectory which takes in and returns
// sourceRoot-relative directories.
var readAndWatchDirectory = function (relDir, filters) {
filters = filters || {};
var absPath = path.join(self.sourceRoot, relDir);
var contents = watch.readAndWatchDirectory(slice.watchSet, {
absPath: absPath,
include: filters.include,
exclude: filters.exclude
});
return _.map(contents, function (x) {
return path.join(relDir, x);
});
};
// Read top-level source files.
var sources = readAndWatchDirectory('', {
include: sourceInclude,
exclude: sourceExclude
});
var otherSliceName = (sliceName === "server") ? "client" : "server";
var withoutOtherSlice =
_.reject(withoutAppPackages, function (sourcePath) {
return (path.sep + sourcePath + path.sep).indexOf(
path.sep + otherSliceName + path.sep) !== -1;
});
var otherSliceRegExp =
(sliceName === "server" ? /^client\/$/ : /^server\/$/);
var tests = false; /* for now */
var withoutOtherRole =
_.reject(withoutOtherSlice, function (sourcePath) {
var isTest =
((path.sep + sourcePath + path.sep).indexOf(
path.sep + 'tests' + path.sep) !== -1);
return isTest !== (!!tests);
});
// Read top-level subdirectories. Ignore subdirectories that have
// special handling.
var sourceDirectories = readAndWatchDirectory('', {
include: [/\/$/],
exclude: [/^packages\/$/, /^programs\/$/, /^tests\/$/,
/^public\/$/, /^private\/$/,
otherSliceRegExp].concat(sourceExclude)
});
var withoutOtherPrograms =
_.reject(withoutOtherRole, function (sourcePath) {
return !! sourcePath.match(/^programs\//);
});
// XXX avoid infinite recursion with bad symlinks
while (!_.isEmpty(sourceDirectories)) {
var dir = sourceDirectories.shift();
// remove trailing slash
dir = dir.substr(0, dir.length - 1);
// XXX Add directory dependencies to slice at the time that
// getSourcesFunc is called. This is kind of a hack but it'll
// do for the moment.
// Find source files in this directory.
Array.prototype.push.apply(sources, readAndWatchDirectory(dir, {
include: sourceInclude,
exclude: sourceExclude
}));
// XXX nothing here monitors for the no-default-targets file
// Find sub-sourceDirectories. Note that we DON'T need to ignore the
// directory names that are only special at the top level.
Array.prototype.push.apply(sourceDirectories, readAndWatchDirectory(dir, {
include: [/\/$/],
exclude: [/^tests\/$/, otherSliceRegExp].concat(sourceExclude)
}));
}
// Directories to monitor for new files
var appIgnores = _.clone(ignoreFiles);
slice.dependencyInfo.directories[appDir] = {
include: _.map(slice.registeredExtensions(), function (ext) {
return new RegExp('\\.' + ext + "$");
}),
// XXX This excludes watching under *ANY* packages or programs
// directory, but we should really only care about top-level ones.
// But watcher doesn't let you do that.
exclude: ignoreFiles.concat([/^packages$/, /^programs$/,
/^tests$/])
};
// Inside the programs directory, only look for new program (which we
// can detect by the appearance of a package.js file.) Other than that,
// programs explicitly call out the files they use.
slice.dependencyInfo.directories[path.resolve(appDir, 'programs')] = {
include: [ /^package\.js$/ ],
exclude: ignoreFiles
};
// Exclude .meteor/local and everything under it.
slice.dependencyInfo.directories[
path.resolve(appDir, '.meteor', 'local')] = { exclude: [/.?/] };
// We've found all the source files. Sort them!
sources.sort(files.sort);
// Convert into relPath/fileOptions objects.
var sources = _.map(withoutOtherPrograms, function (relPath) {
sources = _.map(sources, function (relPath) {
var sourceObj = {relPath: relPath};
// Special case: on the client, JavaScript files in a
@@ -1868,44 +1814,46 @@ _.extend(Package.prototype, {
return sourceObj;
});
// Now look for assets for this slice.
var assetDir = sliceName === "client" ? "public" : "private";
var absAssetDir = path.resolve(appDir, assetDir);
slice.dependencyInfo.directories[absAssetDir]
= { include: [/.?/], exclude: ignoreFiles};
var walkAssetDir = function (subdir) {
var dir = path.join(appDir, subdir);
try {
var items = fs.readdirSync(dir);
} catch (e) {
// OK if the directory (esp the top level asset dir) doesn't exist.
if (e.code === "ENOENT")
return;
throw e;
}
_.each(items, function (item) {
// Skip excluded files
var matchesAnExclude = _.any(ignoreFiles, function (pattern) {
return item.match(pattern);
var assetDirs = readAndWatchDirectory('', {
include: [new RegExp('^' + assetDir + '/$')]
});
// XXX avoid infinite recursion with bad symlinks
if (!_.isEmpty(assetDirs)) {
if (!_.isEqual(assetDirs, [assetDir + '/']))
throw new Error("Surprising assetDirs: " + JSON.stringify(assetDirs));
while (!_.isEmpty(assetDirs)) {
dir = assetDirs.shift();
// remove trailing slash
dir = dir.substr(0, dir.length - 1);
// Find asset files in this directory.
var assetsAndSubdirs = readAndWatchDirectory(dir, {
include: [/.?/],
// we DO look under dot directories here
exclude: ignoreFiles
});
if (matchesAnExclude)
return;
var assetAbsPath = path.join(dir, item);
var assetRelPath = path.join(subdir, item);
if (fs.statSync(assetAbsPath).isDirectory()) {
walkAssetDir(assetRelPath);
return;
}
sources.push({
relPath: assetRelPath,
fileOptions: {
isAsset: true
_.each(assetsAndSubdirs, function (item) {
if (item[item.length - 1] === '/') {
// Recurse on this directory.
assetDirs.push(item);
} else {
// This file is an asset.
sources.push({
relPath: item,
fileOptions: {
isAsset: true
}
});
}
});
});
};
walkAssetDir(assetDir);
}
}
return sources;
};
});
@@ -1947,16 +1895,24 @@ _.extend(Package.prototype, {
// XXX should comprehensively sanitize (eg, typecheck) everything
// read from json files
// Read the dependency info (if present), and make the strings
// back into regexps
var sliceDependencies = buildInfoJson.sliceDependencies || {};
_.each(sliceDependencies, function (dependencyInfo, sliceTag) {
sliceDependencies[sliceTag] =
makeDependencyInfoIntoRegexps(dependencyInfo);
// Read the watch sets for each slice; keep them separate (for passing to
// the Slice constructor below) as well as merging them into one big
// WatchSet.
var mergedWatchSet = new watch.WatchSet();
var sliceWatchSets = {};
_.each(buildInfoJson.sliceDependencies, function (watchSetJSON, sliceTag) {
var watchSet = watch.WatchSet.fromJSON(watchSetJSON);
mergedWatchSet.merge(watchSet);
sliceWatchSets[sliceTag] = watchSet;
});
self.pluginDependencyInfo = makeDependencyInfoIntoRegexps(
// We do NOT put this (or anything!) onto self until we've passed the
// onlyIfUpToDate check.
var pluginWatchSet = watch.WatchSet.fromJSON(
buildInfoJson.pluginDependencies);
// This might be redundant (since pluginWatchSet was probably merged into
// each slice watchSet when it was built) but shouldn't hurt.
mergedWatchSet.merge(pluginWatchSet);
// If we're supposed to check the dependencies, go ahead and do so
if (options.onlyIfUpToDate) {
@@ -1976,7 +1932,7 @@ _.extend(Package.prototype, {
return false;
}
if (! self.checkUpToDate(sliceDependencies))
if (! self.checkUpToDate(mergedWatchSet))
return false;
}
@@ -1987,6 +1943,7 @@ _.extend(Package.prototype, {
};
self.defaultSlices = mainJson.defaultSlices;
self.testSlices = mainJson.testSlices;
self.pluginWatchSet = pluginWatchSet;
_.each(mainJson.plugins, function (pluginMeta) {
rejectBadPath(pluginMeta.path);
@@ -2034,7 +1991,7 @@ _.extend(Package.prototype, {
var slice = new Slice(self, {
name: sliceMeta.name,
arch: sliceMeta.arch,
dependencyInfo: sliceDependencies[sliceMeta.path],
watchSet: sliceWatchSets[sliceMeta.path],
nodeModulesPath: nodeModulesPath,
uses: _.map(sliceJson.uses, function (u) {
return {
@@ -2107,50 +2064,26 @@ _.extend(Package.prototype, {
return true;
},
// Try to check if this package is up-to-date (that is, whether its
// source files have been modified.) True if we have dependency info
// and it says that the package is up-to-date. False if a source
// file has changed.
// Try to check if this package is up-to-date (that is, whether its source
// files have been modified.) True if we have dependency info and it says that
// the package is up-to-date. False if a source file has changed.
//
// The argument _sliceDependencies is used when reading from disk when there
// are no slices yet; don't pass it from outside this file.
checkUpToDate: function (_sliceDependencies) {
// The argument _watchSet is used when reading from disk when there are no
// slices yet; don't pass it from outside this file.
checkUpToDate: function (_watchSet) {
var self = this;
// Compute the dependency info to use
var dependencyInfo = { files: {}, directories: {} };
var merge = function (di) {
// XXX is naive merge sufficient here?
_.extend(dependencyInfo.files, di.files);
_.extend(dependencyInfo.directories, di.directories);
};
if (_sliceDependencies) {
_.each(_sliceDependencies, function (dependencyInfo, sliceTag) {
merge(dependencyInfo);
});
} else {
if (!_watchSet) {
// This call was on an already-fully-loaded Package and we just want to
// see if it's changed. So we have some watchSets inside ourselves.
_watchSet = new watch.WatchSet();
_watchSet.merge(self.pluginWatchSet);
_.each(self.slices, function (slice) {
merge(slice.dependencyInfo);
_watchSet.merge(slice.watchSet);
});
}
// XXX There used to be a concept in this file of "packages loaded from disk
// without having dependencyInfo, but it was unclear when that would happen,
// so this was removed.
var isUpToDate = true;
var watcher = new watch.Watcher({
files: dependencyInfo.files,
directories: dependencyInfo.directories,
onChange: function () {
isUpToDate = false;
}
});
watcher.stop();
return isUpToDate;
return watch.isUpToDate(_watchSet);
},
// True if this package can be saved as a unipackage
@@ -2196,8 +2129,7 @@ _.extend(Package.prototype, {
var buildInfoJson = {
builtBy: exports.BUILT_BY,
sliceDependencies: { },
pluginDependencies: makeDependencyInfoSerializable(
self.pluginDependencyInfo),
pluginDependencies: self.pluginWatchSet.toJSON(),
source: options.buildOfPath || undefined
};
@@ -2249,7 +2181,7 @@ _.extend(Package.prototype, {
// Save slice dependencies. Keyed by the json path rather than thinking
// too hard about how to encode pair (name, arch).
buildInfoJson.sliceDependencies[sliceJsonFile] =
makeDependencyInfoSerializable(slice.dependencyInfo);
slice.watchSet.toJSON();
// Construct slice metadata
var sliceJson = {
@@ -2355,8 +2287,7 @@ _.extend(Package.prototype, {
if (slice.nodeModulesPath) {
builder.copyDirectory({
from: slice.nodeModulesPath,
to: 'npm/node_modules',
depend: false
to: 'npm/node_modules'
});
}
@@ -2387,38 +2318,6 @@ _.extend(Package.prototype, {
}
});
// Convert regex to string.
var makeDependencyInfoSerializable = function (dependencyInfo) {
if (!dependencyInfo)
dependencyInfo = { files: {}, directories: {} };
var out = {files: dependencyInfo.files, directories: {}};
_.each(dependencyInfo.directories, function (d, path) {
var dirInfo = out.directories[path] = {};
_.each(["include", "exclude"], function (k) {
dirInfo[k] = _.map(d[k], function (r) {
return r.source;
});
});
});
return out;
};
// Convert string to regex.
var makeDependencyInfoIntoRegexps = function (dependencyInfo) {
if (!dependencyInfo)
dependencyInfo = { files: {}, directories: {} };
var out = {files: dependencyInfo.files, directories: {}};
_.each(dependencyInfo.directories, function (d, path) {
var dirInfo = out.directories[path] = {};
_.each(["include", "exclude"], function (k) {
dirInfo[k] = _.map(d[k], function (s) {
return new RegExp(s);
});
});
});
return out;
};
var packages = exports;
_.extend(exports, {
Package: Package

View File

@@ -471,7 +471,10 @@ exports.run = function (context, options) {
library: context.library
};
var startWatching = function (dependencyInfo) {
var startWatching = function (watchSet) {
if (process.env.METEOR_DEBUG_WATCHSET)
console.log(JSON.stringify(watchSet, null, 2));
if (!Status.shouldRestart)
return;
@@ -479,8 +482,7 @@ exports.run = function (context, options) {
watcher.stop();
watcher = new watch.Watcher({
files: dependencyInfo.files,
directories: dependencyInfo.directories,
watchSet: watchSet,
onChange: function () {
if (Status.crashing)
logToClients({'system': "=> Modified -- restarting."});
@@ -524,12 +526,12 @@ exports.run = function (context, options) {
// Bundle up the app
var bundleResult = bundler.bundle(context.appDir, bundlePath, bundleOpts);
var dependencyInfo = bundleResult.dependencyInfo;
var watchSet = bundleResult.watchSet;
if (bundleResult.errors) {
logToClients({stdout: "=> Errors prevented startup:\n\n" +
bundleResult.errors.formatMessages() + "\n"});
Status.hardCrashed("has errors");
startWatching(dependencyInfo);
startWatching(watchSet);
return;
}
@@ -546,32 +548,14 @@ exports.run = function (context, options) {
Builder.sha1(fs.readFileSync(options.settingsFile, "utf8"));
// Reload if the setting file changes
dependencyInfo.files[path.resolve(options.settingsFile)] =
settingsHash;
}
// If using a warehouse, don't do dependency monitoring on any of
// the files that are in the warehouse. You should not be editing
// those files directly.
if (files.usesWarehouse()) {
var warehouseDir = path.resolve(warehouse.getWarehouseDir());
var filterKeys = function (obj) {
_.each(_.keys(obj), function (k) {
k = path.resolve(k);
if (warehouseDir.length <= k.length &&
k.substr(0, warehouseDir.length) === warehouseDir)
delete obj[k];
});
};
filterKeys(dependencyInfo.files);
filterKeys(dependencyInfo.directories);
watchSet.addFile(path.resolve(options.settingsFile), settingsHash);
}
// Start watching for changes for files. There's no hurry to call
// this, since dependencyInfo contains a snapshot of the state of
// this, since watchSet contains a snapshot of the state of
// the world at the time of bundling, in the form of hashes and
// lists of matching files in each directory.
startWatching(dependencyInfo);
startWatching(watchSet);
// Start the server
Status.running = true;

View File

@@ -41,10 +41,11 @@ var go = function (options) {
}
fired = false;
var files = {};
var watchSet = new watch.WatchSet();
_.each(options.files, function (value, file) {
file = path.join(tmp, file);
if (typeof value !== "string") {
if (value !== null && typeof value !== "string") {
if (fs.existsSync(file)) {
var hash = crypto.createHash('sha1');
hash.update(fs.readFileSync(file));
@@ -53,18 +54,23 @@ var go = function (options) {
value = 'dummyhash';
}
}
files[file] = value;
watchSet.addFile(file, value);
});
var directories = {};
_.each(options.directories, function (options, dir) {
dir = path.join(tmp, dir);
directories[dir] = options;
_.each(options.directories, function (dir) {
// don't mutate options.directories, since we may reuse it with a no-arg
// go() call
var realDir = {
absPath: path.join(tmp, dir.absPath),
include: dir.include,
exclude: dir.exclude
};
realDir.contents = dir.contents || watch.readDirectory(realDir);
watchSet.addDirectory(realDir);
});
theWatcher = new watch.Watcher({
files: files,
directories: directories,
watchSet: watchSet,
onChange: function () {
fired = true;
if (firedFuture)
@@ -97,11 +103,7 @@ var waitForTopOfSecond = function () {
if (msPastSecond < 100) {
return;
}
var f = new Future;
setTimeout(function () {
f.return();
}, 25);
f.wait();
delay(25);
}
};
@@ -139,15 +141,30 @@ Fiber(function () {
files: { '/aa/b': true, '/aa/c': true }
});
assert(fires()); // look like /aa/c was removed
go({
files: { '/aa/b': true, '/aa/c': null }
});
assert(!fires()); // assert that /aa/c doesn't exist
console.log("... directories");
go({
files: {'/aa/b': true },
directories: {'/aa': {
include: [/yes/, /maybe/, /aa/],
exclude: [/not/, /never/]
}}
directories: [
{absPath: '/aa',
include: [/yes/, /maybe/, /aa/],
exclude: [/not/, /never/],
contents: []
},
{absPath: '/bb',
include: [/.?/],
contents: []
}
]
});
assert(fires()); // because /bb doesn't exist
touchDir('/bb');
go();
assert(!fires());
touchFile('/aa/c');
assert(!fires());
touchFile('/aa/maybe-not');
@@ -158,9 +175,10 @@ Fiber(function () {
assert(!fires());
touchFile('/aa/yes-for-sure');
assert(fires());
go();
touchFile('/aa/nope');
assert(fires()); // because yes-for-sure isn't in the file list
assert(fires()); // because yes-for-sure isn't in 'contents'
remove('/aa/yes-for-sure');
go();
assert(!fires());
@@ -169,11 +187,18 @@ Fiber(function () {
go();
assert(fires()); // maybe-this-time is still there
go({
files: {'/aa/b': true, '/aa/maybe-this-time': true },
directories: {'/aa': {
include: [/yes/, /maybe/, /aa/],
exclude: [/not/, /never/]
}}
files: {'/aa/b': true},
directories: [
{absPath: '/aa',
include: [/yes/, /maybe/, /aa/],
exclude: [/not/, /never/],
contents: ['maybe-this-time']
},
{absPath: '/bb',
include: [/.?/],
contents: []
}
]
});
go();
assert(!fires()); // maybe-this-time is now in the expected file list
@@ -183,84 +208,62 @@ Fiber(function () {
remove('/aa/maybe-this-time');
go();
assert(fires()); // maybe-this-time is missing
console.log("... recursive directories");
touchFile('/aa/b');
touchFile('/aa/maybe-this-time');
touchDir('/aa/yes-i-said-yes-i-will-yes');
go({
files: {'/aa/b': true },
directories: {'/aa': {
include: [/yes/, /maybe/, /aa/],
exclude: [/not/, /never/]
}}
});
touchDir('/aa/yess');
assert(!fires());
remove('/aa/yess');
assert(!fires());
touchFile('/aa/yess/kitten');
assert(!fires());
touchFile('/aa/yess/maybe');
assert(fires());
remove('/aa/yess');
go();
touchFile('/aa/whatever/kitten');
assert(!fires());
touchFile('/aa/whatever/maybe');
assert(fires());
remove('/aa/whatever');
go();
touchDir('/aa/i/love/subdirectories');
assert(!fires());
touchFile('/aa/i/love/subdirectories/yessir');
assert(fires());
remove('/aa/i/love/subdirectories/yessir');
go();
touchFile('/aa/i/love/subdirectories/every/day');
assert(!fires());
remove('/aa/i/love/subdirectories');
assert(!fires());
touchFile('/aa/i/love/not/nothing/yes');
assert(!fires());
touchFile('/aa/i/love/not/nothing/maybe/yes');
assert(!fires());
touchFile('/aa/i/love/maybe');
assert(fires());
remove('/aa/i');
remove('/aa/whatever');
remove('/aa');
touchFile('/aa/b');
console.log("... nested directories");
go({
files: {'/aa/b': true },
directories: {
'/aa': {
include: [/yes/, /maybe/, /aa/],
exclude: [/not/, /never/]
},
'/aa/x': {
include: [/kitten/],
exclude: [/puppy/]
directories: [
{absPath: '/aa',
include: [/yes/, /maybe/, /aa/],
exclude: [/not/, /never/],
contents: ['maybe-this-time']
}
}
]
});
assert(fires()); // yes-i-said-yes-i-will-yes/ is missing
go({
directories: [
{absPath: '/aa',
include: [/yes/, /maybe/, /aa/],
exclude: [/not/, /never/],
contents: ['maybe-this-time', 'yes-i-said-yes-i-will-yes']
}
]
});
assert(fires()); // yes-i-said-yes-i-will-yes is a dir, not a file
go({
directories: [
{absPath: '/aa',
include: [/yes/, /maybe/, /aa/],
exclude: [/not/, /never/],
contents: ['maybe-this-time', 'yes-i-said-yes-i-will-yes/']
}
]
});
touchFile('/aa/kitten');
assert(!fires());
touchFile('/aa/maybe.puppy');
assert(fires());
remove('/aa/maybe.puppy');
go();
touchFile('/aa/x/kitten');
assert(fires());
remove('/aa/x/kitten');
go();
touchFile('/aa/x/yes');
// same directory, different filters
go({
directories: [
// dirs
{absPath: '/aa',
include: [/\/$/],
contents: ['yes-i-said-yes-i-will-yes/']
},
// files
{absPath: '/aa',
include: [/.?/],
exclude: [/\/$/],
contents: ['b', 'c', 'maybe-not', 'maybe-this-time', 'never',
'never-yes', 'nope']
}
]
});
assert(!fires());
touchFile('/aa/x/kitten.not');
touchFile('/aa/bla');
assert(fires());
remove('/aa');
// nb: these are supposed to verify that the "wait a second and try again"
// logic works, but I couldn't get them to fail even when I turned that logic
// off.
console.log("... rapid changes to file");
touchFile('/aa/x');
waitForTopOfSecond();
@@ -268,52 +271,24 @@ Fiber(function () {
files: {'/aa/x': true }});
touchFile('/aa/x');
assert(fires(2000));
go({
directories: {
'/aa': {
include: [/yes/, /maybe/, /aa/],
exclude: [/not/, /never/]
directories: [
{absPath: '/aa',
include: [/yes/, /maybe/, /aa/],
exclude: [/not/, /never/]
}
}
]
});
assert(!fires());
waitForTopOfSecond();
touchFile('/aa/thing1/whatever');
delay(100);
touchFile('/aa/thing2/yes');
touchFile('/aa/wtf');
delay(600);
touchFile('/aa/yes-indeed');
assert(fires(2000));
remove('/aa');
console.log("... rapid changes to directory");
touchDir('/aa');
waitForTopOfSecond();
go({
directories: {'/aa': {
include: [/yes/, /maybe/, /aa/],
exclude: [/not/, /never/]
}}
});
touchFile('/aa/x/yes');
assert(fires(2000));
remove('/aa/x');
waitForTopOfSecond();
go();
delay(600);
touchFile('/aa/x/not');
delay(600);
touchFile('/aa/x/yes');
assert(fires(2000));
remove('/aa/x');
touchDir('/aa/x');
go();
delay(2000);
waitForTopOfSecond();
touchFile('/aa/x/no');
delay(600);
touchFile('/aa/x/yes');
assert(fires(2000));
console.log("Watcher test passed");
theWatcher.stop();

View File

@@ -6,86 +6,260 @@ var _ = require('underscore');
// the files change, call a user-provided callback. (If you want a
// second callback, you'll need to create a second Watcher.)
//
// You describe the structure you want to watch in a WatchSet; you then create a
// Watcher to watch it. Watcher does not mutate WatchSet, so you can create
// several Watchers from the same WatchSet. WatchSet can be easily converted to
// and from JSON for serialization.
//
// You can set up two kinds of watches, file and directory watches.
//
// In a file watch, you provide an absolute path to a file and a SHA1
// (encoded as hex) of the contents of that file. If the file ever
// changes so that its contents no longer match that SHA1, the
// callback triggers.
// In a file watch, you provide an absolute path to a file and a SHA1 (encoded
// as hex) of the contents of that file. If the file ever changes so that its
// contents no longer match that SHA1, the callback triggers. You can also
// provide `null` for the SHA1, which means the file should not exist.
//
// In a directory watch, you provide an absolute path to a directory
// and two lists of regular expressions specifying the files to
// include or exclude. If there is ever a file in the directory or its
// children that matches the criteria set up by the regular
// expressions, but that IS NOT present as a file watch, then the
// callback triggers.
// In a directory watch, you provide an absolute path to a directory,
// two lists of regular expressions specifying the entries to
// include and exclude, and an array of which entries to expect
//
// For directory watches, the regular expressions work as follows. You
// provide two arrays of regular expressions, an include list and an
// exclude list. A file in the directory matches if it matches at
// least one regular expression in the include list, and doesn't match
// any regular expressions in the exclude list. Subdirectories are
// included recursively, as long as their names do not match any
// regular expression in the exclude list.
// For directory watches, the regular expressions work as follows. You provide
// two arrays of regular expressions, an include list and an exclude list. An
// entry in the directory matches if it matches at least one regular expression
// in the include list, and doesn't match any regular expressions in the exclude
// list. The string that is matched against the regular expression ends with a
// '/' if the entry is directory. There is NO IMPLICIT RECURSION here: a
// directory watch ONLY watches the immediate children of the directory! If you
// want a recursive watch, you need to do the recursive walk while building the
// WatchSet and add a bunch of separate directory watches.
//
// When multiple directory watches are set up, say on a directory A
// and its subdirectory B, the most specific watch takes precedence in
// each directory. So only B's include/exclude lists will be checked
// in B.
// There can be multiple directory watches on the same directory. There is no
// relationship between the files found in directory watches and the files
// watched by file watches; they are parallel mechanisms.
//
// Regular expressions are checked only against individual path
// components (the actual name of the file or the subdirectory), not
// against the entire path.
// Regular expressions are checked only against individual path components (the
// actual name of the file or the subdirectory) plus the trailing '/' for
// directories, not against the entire path.
//
// You can call stop() to stop watching and tear down the
// watcher. Calling stop() guarantees that you will not receive a
// callback (if you have not already.) Calling stop() is unnecessary
// if you've received a callback.
//
// A limitation of the current implementation is that if you set up a
// directory watch on a directory A, and A does not exist at the time
// the Watcher is created but is then created later, then A will not
// be monitored. (Of course, this limitation only applies to the roots
// of the directory watches. If A exists at the time the watch is
// created, and a subdirectory B is later created, it will be properly
// detected. Likewise if A exists and is then deleted it will be
// detected.)
//
// To do a "one-shot" (to see if any files have been modified,
// compared to the dependencies, at a particular point in time, just
// create a Watcher and see if your onChange function was called
// before the Watcher constructor changed. (Then call stop() as
// usual.)
//
// XXX This should be reengineered so that dependency information from
// multiple sources can be easily merged in a generic way. Possibly in
// this new model subdirectories would be allowed in include/exclude
// patterns, and multiple directory rules would be OR'd rather than
// taking the most specific rule.
//
// Options may include
// - files: see self.files comment below
// - directories: see self.directories comment below
// - onChange: the function to call when the first change is detected.
// received one argument, the absolute path to a changed or removed
// file (potentially not the only one that changed or was removed)
// To do a "one-shot" (to see if any files have been modified, compared to the
// dependencies, at a particular point in time), use the isUpToDate function.
//
// XXX Symlinks are currently treated transparently: we treat them as the thing
// they point to (ie, as a directory if they point to a directory, as
// nonexistent if they point to something nonexist, etc). Not sure if this is
// correct.
var WatchSet = function () {
var self = this;
// Set this to true if any Watcher built on this WatchSet must immediately
// fire (eg, if this WatchSet was given two different sha1 for the same file).
self.alwaysFire = false;
// Map from the absolute path to a file, to a sha1 hash, or null if the file
// should not exist. A Watcher created from this set fires when the file
// changes from that sha, or is deleted (if non-null) or created (if null).
self.files = {};
// Array of object with keys:
// - 'absPath': absolute path to a directory
// - 'include': array of RegExps
// - 'exclude': array of RegExps
// - 'contents': array of strings, or null if the directory should not exist
//
// This represents the assertion that 'absPath' is a directory and that
// 'contents' is its immediate contents, as filtered by the regular
// expressions. Entries in 'contents' are file and subdirectory names;
// directory names end with '/'. 'contents' is sorted. An entry is in
// 'contents' if its value (including the slash, for directories) matches at
// least one regular expression in 'include' and no regular expressions in
// 'exclude'.
//
// There is no recursion here: files contained in subdirectories never appear.
//
// A directory may have multiple entries (presumably with different
// include/exclude filters).
self.directories = [];
};
_.extend(WatchSet.prototype, {
addFile: function (filePath, hash) {
var self = this;
// No need to update if this is in always-fire mode already.
if (self.alwaysFire)
return;
if (_.has(self.files, filePath)) {
// Redundant?
if (self.files[filePath] === hash)
return;
// Nope, inconsistent.
self.alwaysFire = true;
return;
}
self.files[filePath] = hash;
},
// Takes options absPath, include, exclude, and contents, as described
// above. contents does not need to be pre-sorted.
addDirectory: function (options) {
var self = this;
if (self.alwaysFire)
return;
if (_.isEmpty(options.include))
return;
var contents = _.clone(options.contents);
if (contents)
contents.sort();
self.directories.push({
absPath: options.absPath,
include: options.include,
exclude: options.exclude,
contents: contents
});
},
// Merges another WatchSet into this one. This one will now fire if either
// WatchSet would have fired.
merge: function (other) {
var self = this;
if (self.alwaysFire)
return;
if (other.alwaysFire) {
self.alwaysFire = true;
return;
}
_.each(other.files, function (hash, name) {
self.addFile(name, hash);
});
_.each(other.directories, function (dir) {
// XXX this doesn't deep-clone the directory, but I think these objects
// are never mutated
self.directories.push(dir);
});
},
toJSON: function () {
var self = this;
if (self.alwaysFire)
return {alwaysFire: true};
var ret = {files: self.files};
var reToJSON = function (r) {
var options = '';
if (r.ignoreCase)
options += 'i';
if (r.multiline)
options += 'm';
if (r.global)
options += 'g';
if (options)
return {$regex: r.source, $options: options};
return r.source;
};
ret.directories = _.map(self.directories, function (d) {
return {
absPath: d.absPath,
include: _.map(d.include, reToJSON),
exclude: _.map(d.exclude, reToJSON),
contents: d.contents
};
});
return ret;
}
});
WatchSet.fromJSON = function (json) {
var set = new WatchSet;
if (!json)
return set;
if (json.alwaysFire) {
set.alwaysFire = true;
return set;
}
set.files = _.clone(json.files);
var reFromJSON = function (j) {
if (j.$regex)
return new RegExp(j.$regex, j.$options);
return new RegExp(j);
};
set.directories = _.map(json.directories, function (d) {
return {
absPath: d.absPath,
include: _.map(d.include, reFromJSON),
exclude: _.map(d.exclude, reFromJSON),
contents: d.contents
};
});
return set;
};
var readDirectory = function (options) {
// Read the directory.
try {
var contents = fs.readdirSync(options.absPath);
} catch (e) {
// If the path is not a directory, return null; let other errors through.
if (e && (e.code === 'ENOENT' || e.code === 'ENOTDIR'))
return null;
throw e;
}
// Add slashes to the end of directories.
var contentsWithSlashes = [];
_.each(contents, function (entry) {
try {
// We do stat instead of lstat here, so that we treat symlinks to
// directories just like directories themselves.
// XXX Does the treatment of symlinks make sense?
var stats = fs.statSync(path.join(options.absPath, entry));
} catch (e) {
// Disappeared after the readdirSync (or a dangling symlink)? Eh, pretend
// it was never there in the first place.
return;
}
// XXX if we're on windows, I guess it's possible for files to end with '/'.
if (stats.isDirectory())
entry += '/';
contentsWithSlashes.push(entry);
});
// Filter based on regexps.
var filtered = _.filter(contentsWithSlashes, function (entry) {
return _.any(options.include, function (re) {
return re.test(entry);
}) && !_.any(options.exclude, function (re) {
return re.test(entry);
});
});
// Sort it!
filtered.sort();
return filtered;
};
// All fields are private.
var Watcher = function (options) {
var self = this;
// Map from the absolute path to a file, to a sha1 hash. Fire when
// the file changes from that sha.
self.files = options.files || {};
// Map from an absolute path to a directory, to an object with keys
// 'include' and 'exclude', both lists of regular expressions. Fire
// when a file is added to that directory whose name matches at
// least one regular expression in 'include' and no regular
// expressions in 'exclude'. Subdirectories are included
// recursively, but not subdirectories that match 'exclude'. The
// most specific rule wins, so you can change the parameters in
// effect in subdirectories simply by specifying additional rules.
self.directories = options.directories || {};
// The set to watch.
self.watchSet = options.watchSet;
if (! self.watchSet)
throw new Error("watchSet option is required");
// Function to call when a change is detected according to one of
// the above.
@@ -93,77 +267,135 @@ var Watcher = function (options) {
if (! self.onChange)
throw new Error("onChange option is required");
// self.directories in a different form. It's an array of objects,
// each with keys 'dir', 'include', 'options', where path is
// guaranteed to not contain a trailing slash (unless it is the root
// directory) and the objects are sorted from longest path to
// shortest (that is, most specific rule to least specific.)
self.rules = _.map(self.directories, function (options, dir) {
return {
dir: path.resolve(dir),
include: options.include || [],
exclude: options.exclude || []
};
});
self.rules = self.rules.sort(function (a, b) {
return a.dir.length < b.dir.length ? 1 : -1;
});
self.stopped = false;
self.justCheckOnce = !!options._justCheckOnce;
self.fileWatches = []; // array of paths
self.directoryWatches = {}; // map from path to watch object
self.directoryWatches = []; // array of watch object
// We track all of the currently active timers so that we can cancel
// them at stop() time. This stops the process from hanging at
// shutdown until all of the timers have fired.. An alternate
// shutdown until all of the timers have fired. An alternate
// approach would be to use the unref() timer handle method present
// in modern node.
var nextTimerId = 1;
self.timers = {}; // map from arbitrary number (nextTimerId) to timer handle
// Were we given an inconsistent WatchSet? Fire now and be done with it.
if (self.watchSet.alwaysFire) {
self._fire();
return;
}
self._startFileWatches();
_.each(self.rules, function (rule) {
self._watchDirectory(rule.dir);
});
self._startDirectoryWatches();
};
_.extend(Watcher.prototype, {
_checkFileChanged: function (absPath) {
_fireIfFileChanged: function (absPath) {
var self = this;
if (! fs.existsSync(absPath))
if (self.stopped)
return true;
var crypto = require('crypto');
var hasher = crypto.createHash('sha1');
hasher.update(fs.readFileSync(absPath));
var hash = hasher.digest('hex');
var oldHash = self.watchSet.files[absPath];
return (self.files[absPath] !== hash);
if (oldHash === undefined)
throw new Error("Checking unknown file " + absPath);
var contents = readFile(absPath);
if (contents === null) {
// File does not exist (or is a directory).
// Is this what we expected?
if (oldHash === null)
return false;
// Nope, not what we expected.
self._fire();
return true;
}
// File exists! Is that what we expected?
if (oldHash === null) {
self._fire();
return true;
}
var newHash = sha1(contents);
// Unchanged?
if (newHash === oldHash)
return false;
self._fire();
return true;
},
_fireIfDirectoryChanged: function (info, isDoubleCheck) {
var self = this;
if (self.stopped)
return true;
var newContents = exports.readDirectory({
absPath: info.absPath,
include: info.include,
exclude: info.exclude
});
// If the directory has changed (including being deleted or created).
if (!_.isEqual(info.contents, newContents)) {
self._fire();
return true;
}
if (!isDoubleCheck && !self.justCheckOnce) {
// Whenever a directory changes, scan it soon as we notice,
// but then scan it again one secord later just to make sure
// that we haven't missed any changes. See commentary at
// #WorkAroundLowPrecisionMtimes
// XXX not sure why this uses a different strategy than files
var timerId = self.nextTimerId++;
self.timers[timerId] = setTimeout(function () {
delete self.timers[timerId];
if (! self.stopped)
self._fireIfDirectoryChanged(info, true);
}, 1000);
}
return false;
},
_startFileWatches: function () {
var self = this;
// Set up a watch for each file
_.each(self.files, function (hash, absPath) {
_.each(self.watchSet.files, function (hash, absPath) {
if (self.stopped)
return;
// Check for the case where by the time we created the watch,
// the file had already changed from the sha we were provided.
if (self._fireIfFileChanged(absPath))
return;
if (self.justCheckOnce)
return;
// Intentionally not using fs.watch since it doesn't play well with
// vim (https://github.com/joyent/node/issues/3172)
// Note that we poll very frequently (500 ms)
fs.watchFile(absPath, {interval: 500}, function () {
// Fire only if the contents of the file actually changed (eg,
// maybe just its atime changed)
if (self._checkFileChanged(absPath))
self._fire(absPath);
self._fireIfFileChanged(absPath);
});
self.fileWatches.push(absPath);
// Check for the case where by the time we created the watch,
// the file had already changed from the sha we were provided.
if (self._checkFileChanged(absPath))
self._fire(absPath);
});
if (self.stopped || self.justCheckOnce)
return;
// One second later, check the files again, because fs.watchFile
// is actually implemented by polling the file's mtime, and some
// filesystems (OSX HFS+) only keep mtimes to a resolution of one
@@ -174,147 +406,57 @@ _.extend(Watcher.prototype, {
var timerId = self.nextTimerId++;
self.timers[timerId] = setTimeout(function () {
delete self.timers[timerId];
if (self.stopped)
return;
_.each(self.files, function (hash, absPath) {
if (self._checkFileChanged(absPath))
self._fire(absPath);
_.each(self.watchSet.files, function (hash, absPath) {
self._fireIfFileChanged(absPath);
});
}, 1000);
},
// Pass true for `include` to include everything (and process only
// excludes)
_matches: function (filename, include, exclude) {
_startDirectoryWatches: function () {
var self = this;
if (include === true)
include = [/.?/];
for (var i = 0; i < include.length; i++)
if (include[i].test(filename))
break;
if (i === include.length) {
return false; // didn't match any includes
}
for (var i = 0; i < exclude.length; i++) {
if (exclude[i].test(filename)) {
return false; // matched an exclude
}
}
// Matched an include and didn't match any excludes
return true;
},
_watchDirectory: function (absPath) {
var self = this;
if (absPath in self.directoryWatches)
// Already being taken care of
return;
// Determine the options that apply to this directory by finding
// the most specific rule.
absPath = path.resolve(absPath); // ensure no trailing slash
for (var i = 0; i < self.rules.length; i++) {
var rule = self.rules[i];
if (absPath.length >= rule.dir.length &&
absPath.substr(0, rule.dir.length) === rule.dir)
break; // found a match
rule = null;
}
if (! rule)
// Huh, doesn't appear that we're supposed to be watching this
// directory.
return;
var contents = [];
var scanDirectory = function (isDoubleCheck) {
// Set up a watch for each directory
_.each(self.watchSet.directories, function (info) {
if (self.stopped)
return;
if (! fs.existsSync(absPath)) {
// Directory was removed. Stop watching.
var watch = self.directoryWatches[absPath];
watch && watch.close();
delete self.directoryWatches[absPath];
// Check for the case where by the time we created the watch, the
// directory has already changed.
if (self._fireIfDirectoryChanged(info))
return;
}
// Find previously unknown files and subdirectories. (We don't
// care about removed subdirectories because the logic
// immediately above handles them, and we don't care about
// removed files because the ones we care about will already
// have file watches on them.)
var newContents = fs.readdirSync(absPath);
var added = _.difference(newContents, contents);
contents = newContents;
if (self.stopped || self.justCheckOnce)
return;
// Look at each newly added item
_.each(added, function (addedItem) {
var addedPath = path.join(absPath, addedItem);
// Is it a directory?
try {
var stats = fs.lstatSync(addedPath);
} catch (e) {
// Can't be found? That's weird. Ignore.
// fs.watchFile doesn't work for directories (as tested on ubuntu)
// Notice that we poll very frequently (500 ms)
try {
self.directoryWatches.push(
fs.watch(info.absPath, {interval: 500}, function () {
self._fireIfDirectoryChanged(info);
})
);
} catch (e) {
// Can happen if the directory doesn't exist, in which case we should
// fire if it should be there.
if (e && e.code === "ENOENT") {
if (info.contents !== null)
self._fire();
return;
}
var isDirectory = stats.isDirectory();
// Does it match the rule?
if (! self._matches(addedItem,
isDirectory ? true : rule.include,
rule.exclude))
return; // No
if (! isDirectory) {
if (! (addedPath in self.files))
// Found a newly added file that we care about.
self._fire(absPath);
} else {
// Found a subdirectory that we care to monitor.
self._watchDirectory(addedPath);
}
});
if (! isDoubleCheck) {
// Whenever a directory changes, scan it soon as we notice,
// but then scan it again one secord later just to make sure
// that we haven't missed any changes. See commentary at
// #WorkAroundLowPrecisionMtimes
var timerId = self.nextTimerId++;
self.timers[timerId] = setTimeout(function () {
delete self.timers[timerId];
if (! self.stopped)
scanDirectory(true);
}, 1000);
throw e;
}
};
// fs.watchFile doesn't work for directories (as tested on ubuntu)
// Notice that we poll very frequently (500 ms)
try {
self.directoryWatches[absPath] =
fs.watch(absPath, {interval: 500}, scanDirectory);
scanDirectory();
} catch (e) {
// Can happen if the directory doesn't exist, say because a
// nonexistent path was included in self.directories
}
});
},
_fire: function (changedFile) {
_fire: function () {
var self = this;
if (self.stopped)
return;
self.stop();
self.onChange(changedFile);
self.onChange();
},
stop: function () {
@@ -325,6 +467,7 @@ _.extend(Watcher.prototype, {
_.each(self.timers, function (timer, id) {
clearTimeout(timer);
});
self.timers = {};
// Clean up file watches
_.each(self.fileWatches, function (absPath) {
@@ -336,8 +479,65 @@ _.extend(Watcher.prototype, {
_.each(self.directoryWatches, function (watch) {
watch.close();
});
self.directoryWatches = {};
self.directoryWatches = [];
}
});
exports.Watcher = Watcher;
// Given a WatchSet, returns true if it currently describes the state of the
// disk.
var isUpToDate = function (watchSet) {
var upToDate = true;
var watcher = new Watcher({
watchSet: watchSet,
onChange: function () {
upToDate = false;
},
// internal flag which prevents us from starting watches and timers that
// we're about to cancel anyway
_justCheckOnce: true
});
watcher.stop();
return upToDate;
};
// Options should have absPath/include/exclude.
var readAndWatchDirectory = function (watchSet, options) {
var contents = readDirectory(options);
watchSet.addDirectory(_.extend({contents: contents}, options));
return contents;
};
var readAndWatchFile = function (watchSet, absPath) {
var contents = readFile(absPath);
var hash = contents === null ? null : sha1(contents);
watchSet.addFile(absPath, hash);
return contents;
};
var readFile = function (absPath) {
try {
return fs.readFileSync(absPath);
} catch (e) {
// Rethrow most errors.
if (!e || (e.code !== 'ENOENT' && e.code !== 'EISDIR'))
throw e;
// File does not exist (or is a directory).
return null;
}
};
var sha1 = function (contents) {
var crypto = require('crypto');
var hash = crypto.createHash('sha1');
hash.update(contents);
return hash.digest('hex');
};
_.extend(exports, {
WatchSet: WatchSet,
Watcher: Watcher,
readDirectory: readDirectory,
isUpToDate: isUpToDate,
readAndWatchDirectory: readAndWatchDirectory,
readAndWatchFile: readAndWatchFile
});