diff --git a/packages/underscore/package.js b/packages/underscore/package.js index 3de18aa94a..07604287bc 100644 --- a/packages/underscore/package.js +++ b/packages/underscore/package.js @@ -28,3 +28,9 @@ Package.on_use(function (api) { // objects, not as arrays. Search for looksLikeArray. api.add_files(['pre.js', 'underscore.js', 'post.js']); }); + + +Package.on_test(function (api) { + // Also turn off the strong 'meteor' dependency in the test slice + api.use('meteor', {unordered: true}); +}); \ No newline at end of file diff --git a/tools/bundler.js b/tools/bundler.js index 5d0a9ca0de..73a23e4ef4 100644 --- a/tools/bundler.js +++ b/tools/bundler.js @@ -541,6 +541,8 @@ _.extend(Target.prototype, { // depends on (uses) Y, and that relationship is not marked as unordered, Y // appears before X in the ordering. Raises an exception iff there is no // such ordering (due to circular dependency). + // + // XXX The topological sort code here is duplicated in catalog.js. // What slices have not yet been added to self.slices? var needed = _.clone(getsUsed); // Map from slice.id to Slice. @@ -2004,7 +2006,7 @@ exports.buildJsImage = function (options) { npmDir: options.npmDir }); - var unipackage = complier.compile(packageSource).unipackage; + var unipackage = compiler.compile(packageSource).unipackage; var target = new JsImageTarget({ packageLoader: options.packageLoader, diff --git a/tools/catalog.js b/tools/catalog.js index a5d6a777ab..f5d406dea2 100644 --- a/tools/catalog.js +++ b/tools/catalog.js @@ -5,6 +5,10 @@ var _ = require('underscore'); var packageClient = require('./package-client.js'); var archinfo = require('./archinfo.js'); var packageCache = require('./package-cache.js'); +var PackageSource = require('./package-source.js'); +var Unipackage = require('./unipackage.js'); +var compiler = require('./compiler.js'); +var buildmessage = require('./buildmessage.js'); var tropohouse = require('./tropohouse.js'); var isDirectory = function (dir) { @@ -35,14 +39,14 @@ var Catalog = function () { self.versions = null; self.builds = null; - // Local directories to search for packages + // Local directories to search for package source trees self.localPackageDirs = null; // Packages specified by addLocalPackage - self.localPackages = {}; // package name to package directory + self.localPackages = {}; // package name to source directory // All packages found either by localPackageDirs or localPackages - self.effectiveLocalPackages = {}; // package name to package directory + self.effectiveLocalPackages = {}; // package name to source directory }; _.extend(Catalog.prototype, { @@ -54,12 +58,12 @@ _.extend(Catalog.prototype, { // // options: // - localPackageDirs: an array of paths on local disk, that - // contain subdirectories, that each contain a package that - // should override the packages on the package server. For - // example, if there is a package 'foo' that we find through - // localPackageDirs, then we will ignore all versions of 'foo' - // that we find through the package server. Directories that - // don't exist (or paths that aren't directories) will be + // contain subdirectories, that each contain a source tree for a + // package that should override the packages on the package + // server. For example, if there is a package 'foo' that we find + // through localPackageDirs, then we will ignore all versions of + // 'foo' that we find through the package server. Directories + // that don't exist (or paths that aren't directories) will be // silently ignored. initialize: function (options) { var self = this; @@ -79,8 +83,7 @@ _.extend(Catalog.prototype, { self.packages = []; self.versions = []; self.builds = []; - self._addLocalPackageOverrides(); - self.initialized = true; + self._addLocalPackageOverrides(true /* setInitialized */); // OK, now initialize the catalog for real, with both local and // package server packages. @@ -99,8 +102,7 @@ _.extend(Catalog.prototype, { self.versions = []; self.builds = []; self._insertServerPackages(serverPackageData); - self._addLocalPackageOverrides(); - self.initialized = true; + self._addLocalPackageOverrides(true /* setInitialized */); }, // Compute self.effectiveLocalPackages from self.localPackageDirs @@ -141,7 +143,13 @@ _.extend(Catalog.prototype, { // Add all packages in self.effectiveLocalPackages to the catalog, // first removing any existing packages that have the same name. - _addLocalPackageOverrides: function () { + // + // If _setInitialized is provided and true, then as soon as the + // metadata for the local packages has been loaded into the catalog, + // mark the catalog as initialized. This is a bit of a hack. + // + // XXX emits buildmessages. are callers expecting that? + _addLocalPackageOverrides: function (_setInitialized) { var self = this; // Remove all packages from the catalog that have the same name as @@ -164,13 +172,16 @@ _.extend(Catalog.prototype, { return ! _.has(self.effectiveLocalPackages, pkg.name); }); - // Now add our local packages to the catalog. + // Phase 1: Load the source code and create Package and Version + // entries from them. We have to do this before we can run the + // constraint solver. + var packageSources = {}; // name to PackageSource + var versionIds = {}; // name to _id of the created Version record _.each(self.effectiveLocalPackages, function (packageDir, name) { - // Load the package. - var pkg = packageCache.loadPackageAtPath(name, packageDir); + var packageSource = new PackageSource(packageDir); + packageSource.initFromPackageDir(name, packageDir); + packageSources[name] = packageSource; - // Synthesize records based on it and insert them in the - // catalog. self.packages.push({ name: name, maintainers: null, @@ -185,31 +196,179 @@ _.extend(Catalog.prototype, { // problem, we either will have made tools into a star, or we'll // have made Catalog be backed by a real database. var versionId = "local-" + Math.floor(Math.random() * 1000000000); + versionIds[name] = versionId; + + // Accurate version numbers are of supreme importance, because + // we use version numbers (of build-time dependencies such as + // the coffeescript plugin), together with source file hashes + // and the notion of a repeatable build, to decide when a + // package build is out of date and trigger a rebuild of the + // package. + // + // The package we have just loaded may declare its version to be + // 1.2.3, but that doesn't mean it's really the official version + // 1.2.3 of the package. It only gets that version number + // officially when it's published to the package server. So what + // we'd like to do here is give it a version number like + // '1.2.3+', where is a hash of everything + // that's necessary to repeat the build exactly: all of the + // package's source files, all of the package's build-time + // dependencies, and the version of the Meteor build tool used + // to build it. + // + // Unfortunately we can't actually compute such a buildid yet + // since it depends on knowing the build-time dependencies of + // the package, which requires that we run the constraint + // solver, which can only be done once we've populated the + // catalog, which is what we're trying to do right now. + // + // So we have a workaround. For local packages we will fake the + // version in the catalog by setting the buildid to 'local', as + // in '1.2.3+local'. This is enough for the constraint solver to + // run, but any code that actually relies on accurate versions + // (for example, code that checks if a build is up to date) + // needs to be careful to get the versions not from the catalog + // but from the actual built Unipackage objects, which will have + // accurate versions (with precise buildids) even for local + // packages. + var version = packageSource.version; + if (version.indexOf('+') !== -1) + throw new Error("version already has a buildid?"); + version = version + "+local"; self.versions.push({ _id: versionId, packageName: name, - version: pkg.version, + version: version, publishedBy: null, - earliestCompatibleVersion: pkg.earliestCompatibleVersion, + earliestCompatibleVersion: packageSource.earliestCompatibleVersion, changelog: null, // XXX get actual changelog when we have it? - description: pkg.metadata.summary, - dependencies: pkg.getDependencyMetadata(), + description: packageSource.metadata.summary, + dependencies: packageSource.getDependencyMetadata(), source: null, lastUpdated: null, published: null }); + }); + if (_setInitialized) + self.initialized = true; + + // XXX XXX in the next version, don't go build all local packages + // at startup just because! instead, do the following on an + // ongoing, as-needed basis: when we want to load a build of + // package X, and it's a local package, work out its dependencies + // (and, lazily, the dependencies of its dependencies) and build + // only what is needed. (XXX does this mean that we have to create + // build records lazily too, or is there a way that we can create + // them upfront?) + + // Phase 2: Figure out which local packages need to be built + // before which other local packages because of build-time + // dependencies. + var packageBuildDeps = {}; // map from name to array of name + _.each(self.effectiveLocalPackages, function (packageDir, name) { + packageBuildDeps[name] = []; + var deps = compiler.getBuildTimeDependencies(packageSources[name]); + _.each(deps, function (d) { + if (! _.has(self.effectiveLocalPackages, d.name)) + return; // not a local package -- may assume it's already built + if (d.version !== packageSources[d.name].version + "+local") + throw new Error("unknown version for local package?"); + packageBuildDeps[name].push(d.name); + }); + }); + + // Phase 3: Do a topological sort and build the local packages in + // an order that respects their build-time dependencies. + // + // XXX topological sort duplicated from bundler.js. + var remaining = _.clone(self.effectiveLocalPackages); + var onStack = {}; // map from name to true + + var build = function (name) { + if (! _.has(remaining, name)) + return; + + // First build things that have to build before us (if not built yet) + _.each(packageBuildDeps[name], function (otherName) { + if (_.has(onStack, otherName)) { + buildmessage.error("circular dependency between packages " + + name + " and " + otherName); + // recover by not enforcing one of the depedencies + return; + } + + onStack[otherName] = true; + build(otherName); + delete onStack[otherName]; + }); + + // Now build this package if it needs building + var unipackage = null; + var sourcePath = self.effectiveLocalPackages[name]; + var buildDir = path.join(sourcePath, '.build'); + + if (fs.existsSync(buildDir)) { + // Looks like we have an existing build. See if it's up to date. + unipackage = new Unipackage(sourcePath); + unipackage.initFromPath(name, buildDir, { buildOfPath: sourcePath }); + + if (! compiler.checkUpToDate(packageSources[name], unipackage)) + unipackage = null; + } + + if (! unipackage) { + // Didn't have a build or it wasn't up to date. Build it. + buildmessage.enterJob({ + title: "building package `" + name + "`", + rootPath: sourcePath + }, function () { + unipackage = compiler.compile(packageSources[name]).unipackage; + + if (! buildmessage.jobHasMessages()) { + // Save the build, for a fast load next time + try { + files.addToGitignore(sourcePath, '.build*'); + unipackage.saveToPath(buildDir, { buildOfPath: sourcePath }); + } catch (e) { + // If we can't write to this directory, we don't get to cache our + // output, but otherwise life is good. + if (!(e && (e.code === 'EACCES' || e.code === 'EPERM'))) + throw e; + } + } + }); + } + + // And put a build record for it in the catalog self.builds.push({ packageName: name, - architecture: pkg.architectures().join('+'), + architecture: unipackage.architectures().join('+'), builtBy: null, build: null, // this would be the URL and hash - versionId: versionId, + versionId: versionIds[name], lastUpdated: null, buildPublished: null }); - }); + + // XXX XXX maybe you actually want to, like, save the unipackage + // in memory into a cache? rather than leaving packageCache to + // reload it? or maybe packageCache is unified into catalog + // somehow? sleep on it + + // Done + delete remaining[name]; + }; + + while (true) { + // Go build an arbitrary local package from among those remaining. + var first = null; + for (first in remaining) break; + if (! first) + break; + build(first); + } }, // serverPackageData is a description of the packages available from @@ -232,8 +391,8 @@ _.extend(Catalog.prototype, { }, // Add a local package to the catalog. `name` is the name to use for - // the package and `directory` is the directory that contains either - // its source or an unpacked unipackage. + // the package and `directory` is the directory that contains the + // source tree for the package. // // If a package named `name` exists on the package server, it will // be overridden (it will be as if that package doesn't exist on the @@ -390,7 +549,8 @@ _.extend(Catalog.prototype, { var self = this; self._requireInitialized(); - var ret = _.where(self.versions, { packageName: name }).pluck('version'); + var ret = _.pluck(_.where(self.versions, { packageName: name }), + 'version'); ret.sort(semver.compare); return ret; }, @@ -399,7 +559,7 @@ _.extend(Catalog.prototype, { // null if there is no such package or version. getVersion: function (name, version) { var self = this; - self._ensureLoaded(); + self._requireInitialized(); return _.findWhere(self.versions, { packageName: name, version: version }); }, diff --git a/tools/commands.js b/tools/commands.js index f768fa7002..98f0eed6d1 100644 --- a/tools/commands.js +++ b/tools/commands.js @@ -18,8 +18,8 @@ var utils = require('./utils.js'); var httpHelpers = require('./http-helpers.js'); var archinfo = require('./archinfo.js'); var tropohouse = require('./tropohouse.js'); -var packageLoader = require('./package-loader.js').packageLoader; var packageCache = require('./package-cache.js'); +var PackageLoader = require('./package-loader.js'); var PackageSource = require('./package-source.js'); var compiler = require('./compiler.js'); var catalog = require('./catalog.js'); diff --git a/tools/compiler.js b/tools/compiler.js index a06e797678..5a85ace4ce 100644 --- a/tools/compiler.js +++ b/tools/compiler.js @@ -5,6 +5,7 @@ var buildmessage = require('./buildmessage.js'); var archinfo = require(path.join(__dirname, 'archinfo.js')); var linker = require('./linker.js'); var Unipackage = require('./unipackage.js'); +var PackageLoader = require('./package-loader.js'); var uniload = require('./uniload.js'); var bundler = require('./bundler.js'); @@ -92,6 +93,14 @@ compiler.eachUsedSlice = function (dependencies, arch, packageLoader, options, // 'meteor update' for package build-time dependencies? // // XXX deal with _makeBuildTimePackageLoader callsites +// +// XXX this function is probably going to get called a huge number of +// times. For example, the Catalog calls it on every local package +// every time the local package list changes. We could memoize the +// result on packageSource (and presumably make this a method on +// PackgeSource), or we could have some kind of cache (the ideal place +// for such a cache might be inside the constraint solver, since it +// will know how/when to invalidate it). var determineBuildTimeDependencies = function (packageSource) { var ret = {}; @@ -136,6 +145,7 @@ var determineBuildTimeDependencies = function (packageSource) { var constraintSolver = require('./constraint-solver.js'); var resolver = new constraintSolver.Resolver; + ret.directDependencies = {}; _.each(resolver.resolve(constraints), function (version, packageName) { // Take only direct dependencies if (_.has(constraints, packageName)) @@ -180,8 +190,7 @@ var compileSlice = function (unipackage, inputSlice, packageLoader) { var resources = []; var js = []; var sources = []; - // XXX: Provide a clone method on watchset. - var watchSet = watch.WatchSet.fromJSON(inputSlice.watchSet.toJSON()); + var watchSet = inputSlice.watchSet.clone(); // *** Determine and load active plugins @@ -203,7 +212,7 @@ var compileSlice = function (unipackage, inputSlice, packageLoader) { // skip unordered dependencies, because it's not going to work to // have circular build-time dependencies. // - // We pass archinfo.host here, not self.arch, because it may be more + // We pass archinfo.host here, not inputSlice.arch, because it may be more // specific, and because plugins always have to run on the host // architecture. compiler.eachUsedSlice( @@ -237,7 +246,7 @@ var compileSlice = function (unipackage, inputSlice, packageLoader) { if (ext in allHandlers && allHandlers[ext] !== handler) { buildmessage.error( "conflict: two packages included in " + - (self.pkg.name || "the app") + ", " + + (inputSlice.pkg.name || "the app") + ", " + (allHandlers[ext].pkg.name || "the app") + " and " + (otherPkg.name || "the app") + ", " + "are both trying to handle ." + ext); @@ -268,7 +277,7 @@ var compileSlice = function (unipackage, inputSlice, packageLoader) { type: "asset", data: contents, path: relPath, - servePath: path.join(self.pkg.serveRoot, relPath), + servePath: path.join(inputSlice.pkg.serveRoot, relPath), hash: hash }); @@ -538,7 +547,7 @@ var compileSlice = function (unipackage, inputSlice, packageLoader) { // *** Determine captured variables var packageVariables = []; var packageVariableNames = {}; - _.each(self.declaredExports, function (symbol) { + _.each(inputSlice.declaredExports, function (symbol) { if (_.has(packageVariableNames, symbol.name)) return; packageVariables.push({ @@ -582,9 +591,9 @@ var compileSlice = function (unipackage, inputSlice, packageLoader) { // - sources: array of source files (identified by their path on local // disk) that were used by the build (the source files you'd have to // ship to a different machine to replicate the build there) -complier.compile = function (packageSource) { +compiler.compile = function (packageSource) { var sources = []; - var pluginWatchSet = new watch.WatchSet(); + var pluginWatchSet = packageSource.pluginWatchSet.clone(); var plugins = {}; // Determine versions of build-time dependencies @@ -675,16 +684,47 @@ complier.compile = function (packageSource) { }); _.each(packageSource.slices, function (slice) { - var sources = compileSlice(unipackage, slice, packageLoader); - sources.push.apply(sources, result.sources); + var sliceSources = compileSlice(unipackage, slice, packageLoader); + sources.push.apply(sources, sliceSources); }); return { - sources: _.uniq(self.sources), + sources: _.uniq(sources), unipackage: unipackage }; }; +// Figure out what packages have to be compiled and available in the +// catalog before 'packageSource' can be compiled. Returns an array of +// objects with keys 'name', 'version' (the latter a version +// string). Yes, it is possible that multiple versions of some other +// package might be build-time dependencies (because of plugins). +compiler.getBuildTimeDependencies = function (packageSource) { + var versions = {}; // map from package name to version to true + var addVersion = function (version, name) { + if (! _.has(versions, name)) + versions[name] = {}; + versions[name][version] = true; + }; + + var buildTimeDeps = determineBuildTimeDependencies(packageSource); + _.each(buildTimeDeps.directDependencies, addVersion); + _.each(buildTimeDeps.pluginDependencies, function (versions, pluginName) { + _.each(versions, addVersion); + }); + + delete versions[packageSource.name]; + + var ret = []; + _.each(versions, function (versionArray, name) { + _.each(_.keys(versionArray), function (version) { + ret.push({ name: name, version: version }); + }); + }); + + return ret; +}; + // Check to see if a particular build of a package is up to date (that // is, if the source files haven't changed and the build-time // dependencies haven't changed, and if we're a sufficiently similar @@ -703,7 +743,7 @@ compiler.checkUpToDate = function (packageSource, unipackage) { // Do we think we'd generate different contents than the tool that // built this package? - if (unipackage.builtBy !== complier.BUILT_BY) + if (unipackage.builtBy !== compiler.BUILT_BY) return false; // XXX XXX XXX diff --git a/tools/constraint-solver.js b/tools/constraint-solver.js index 0bc524a0eb..ebe14066fb 100644 --- a/tools/constraint-solver.js +++ b/tools/constraint-solver.js @@ -1,6 +1,7 @@ var semver = require('semver'); var _ = require('underscore'); var catalog = require('./catalog.js'); +var utils = require('./utils.js'); var constraintSolver = exports; @@ -15,6 +16,32 @@ var Dependency = { }; */ +// XXX copied (with simplifications) from EJSON.clone +var deepClone = function (v) { + var ret; + if (typeof v !== "object") + return v; + if (v === null) + return null; // null has typeof "object" + // XXX: Use something better than underscore's isArray + if (_.isArray(v) || _.isArguments(v)) { + // For some reason, _.map doesn't work in this context on Opera (weird test + // failures). + ret = []; + for (i = 0; i < v.length; i++) + ret[i] = deepClone(v[i]); + return ret; + } + // handle other objects + ret = {}; + _.each(v, function (value, key) { + ret[key] = deepClone(value); + }); + return ret; +}; + + + // main class constraintSolver.Resolver = function (options) { var self = this; @@ -46,7 +73,7 @@ constraintSolver.Resolver = function (options) { packageDep.earliestCompatibleVersion = versionDef.earliestCompatibleVersion; packageDep.dependencies = _.map(versionDef.dependencies, function (dep, packageName) { return _.extend({packageName: packageName}, - PackageVersion.parseVersionConstraint(dep.constraint)); + utils.parseVersionConstraint(dep.constraint)); }); self.packageDeps[packageDef.name][versionDef.version] = packageDep; @@ -162,7 +189,7 @@ constraintSolver.Resolver.prototype._resolve = function (dependencies, state) { for (var i = 0; i < satisfyingVersions.length; i++) { var version = satisfyingVersions[i]; - var newState = EJSON.clone(state); + var newState = deepClone(state); newState.picks[candidatePackageName] = version; newState.exactDepsStack.push({ packageName: candidatePackageName, @@ -222,7 +249,7 @@ var toStructuredDeps = function (dependencies) { if (typeof details === "string" || details === null) { structuredDeps.push(_.extend( { packageName: packageName }, - PackageVersion.parseVersionConstraint(details))); + utils.parseVersionConstraint(details))); } else { structuredDeps.push(_.extend({ packageName: packageName }, details)); } diff --git a/tools/package-cache.js b/tools/package-cache.js index 2ad68cd540..eb92b3dbeb 100644 --- a/tools/package-cache.js +++ b/tools/package-cache.js @@ -1,8 +1,8 @@ var fs = require("fs"); var path = require("path"); -var packageLoader = require("./package-loader.js"); -var packages = require("./packages.js"); var archinfo = require("./archinfo.js"); +var compiler = require("./compiler.js"); +var PackageSource = require("./package-source.js"); var _ = require('underscore'); // both map from package load path to: @@ -73,78 +73,81 @@ _.extend(PackageCache.prototype, { } // Load package from disk - var pkg = new packages.Package(loadPath); + + // Does loadPath point directly at a unipackage (rather than a + // source tree?) if (fs.existsSync(path.join(loadPath, 'unipackage.json'))) { - // It's an already-built package if (options.forceRebuild) { throw new Error('Cannot rebuild from a unipackage directory.'); } - pkg.initFromUnipackage(name, loadPath); - self.loadedPackages[loadPath] = {pkg: pkg, packageDir: loadPath}; - } else { - // It's a source tree. Does it have a built unipackage inside it? - var buildDir = path.join(loadPath, '.build'); - // XXX XXX onlyIfUpToDate flag was removed. call - // compiler.checkUpToDate instead - if (! options.forceRebuild && - fs.existsSync(buildDir) && - pkg.initFromUnipackage(name, buildDir, - { onlyIfUpToDate: true, - buildOfPath: loadPath })) { - // We already had a build and it was up to date. - self.loadedPackages[loadPath] = {pkg: pkg, packageDir: loadPath}; - } else { - // Either we didn't have a build, or it was out of date, or the - // caller wanted us to rebuild no matter what. Build the - // package. - buildmessage.enterJob({ - title: "building package `" + name + "`", - rootPath: loadPath - }, function () { - // This has to be done in the right sequence: initialize - // (which loads the dependency list but does not get() those - // packages), then put the package into the package list, - // then call build() to get() the dependencies and finish - // the build. If you called build() before putting the - // package in the package list then you'd recurse - // forever. (build() needs the dependencies because it needs - // to look at the handlers registered by any plugins in the - // packages that we use.) - pkg.initFromPackageDir(name, loadPath); - self.loadedPackages[loadPath] = {pkg: pkg, packageDir: loadPath}; - pkg.build(); + var unipackage = new Unipackage(loadPath); + unipackage.initFromPath(name, loadPath); + self.loadedPackages[loadPath] = { pkg: unipackage, packageDir: loadPath }; + return unipackage; + }; - if (! buildmessage.jobHasMessages()) { - // Save it, for a fast load next time - try { - files.addToGitignore(loadPath, '.build*'); - pkg.saveAsUnipackage(buildDir, { buildOfPath: loadPath }); - } catch (e) { - // If we can't write to this directory, we don't get to cache our - // output, but otherwise life is good. - if (!(e && (e.code === 'EACCES' || e.code === 'EPERM'))) - throw e; - } - } - }); + // It's a source tree. Load it. + var packageSource = new PackageSource(loadPath); + packageSource.initFromPackageDir(name, loadPath); + + // Does it have an up-to-date build? + var buildDir = path.join(loadPath, '.build'); + if (! options.forceRebuild && fs.existsSync(buildDir)) { + var unipackage = new Unipackage(loadPath); + unipackage.initFromPath(name, buildDir); + if (compiler.checkUpToDate(packageSource, unipackage)) { + self.loadedPackages[loadPath] = { pkg: unipackage, + packageDir: loadPath }; + return unipackage; } } - return pkg; + // Either we didn't have a build, or it was out of date, or the + // caller wanted us to rebuild no matter what. Build the package. + return buildmessage.enterJob({ + title: "building package `" + name + "`", + rootPath: loadPath + }, function () { + // We used to take great care to first put a + // loaded-but-not-built package object (the equivalent of a + // PackageSource) into self.loadedPackages before calling + // build() as a hacky way of dealing with build-time + // dependencies. + // + // We don't do that anymore and .. + // XXX at the moment, rely on catalog to initalize ahead of us + // and swoop in and build all of the local packages informed by + // a topological sort + var unipackage = compiler.compile(packageSource).unipackage; + self.loadedPackages[loadPath] = { pkg: unipackage, packageDir: loadPath }; + + if (! buildmessage.jobHasMessages()) { + // Save it, for a fast load next time + try { + files.addToGitignore(loadPath, '.build*'); + unipackage.saveToPath(buildDir, { buildOfPath: loadPath }); + } catch (e) { + // If we can't write to this directory, we don't get to cache our + // output, but otherwise life is good. + if (!(e && (e.code === 'EACCES' || e.code === 'EPERM'))) + throw e; + } + } + + return unipackage; + }); }, // Get a package that represents an app. (ignoreFiles is optional // and if given, it should be an array of regexps for filenames to // ignore when scanning for source files.) - // XXX formerly called getForApp loadAppAtPath: function (appDir, ignoreFiles) { var self = this; - var pkg = new packages.Package; - pkg.initFromAppDir(appDir, ignoreFiles || []); - pkg.build(); - return pkg; + var packageSource = new PackageSource; + packageSource.initFromAppDir(appDir, ignoreFiles); + return compiler.compile(packageSource).unipackage; } }); -module.exports = new PackageCache(); +module.exports = new PackageCache; diff --git a/tools/package-loader.js b/tools/package-loader.js index 24da28ed8b..e565d4fc6d 100644 --- a/tools/package-loader.js +++ b/tools/package-loader.js @@ -2,17 +2,15 @@ var _ = require('underscore'); var packageCache = require('./package-cache.js'); var catalog = require('./catalog.js'); -var packageLoader = exports; - // options: // - versions: a map from package name to the version to use. or null to only // use local packages and ignore the package versions. -packageLoader.PackageLoader = function (options) { +var PackageLoader = function (options) { var self = this; self.versions = options.versions; }; -_.extend(packageLoader.PackageLoader.prototype, { +_.extend(PackageLoader.prototype, { // Given the name of a package, return a Package object, or throw an // error if the package wasn't included in the 'versions' passed on // initalization or isn't available (for example, hasn't been @@ -97,3 +95,5 @@ _.extend(packageLoader.PackageLoader.prototype, { return pkg.getDefaultSlices(arch); } }); + +module.exports = PackageLoader; \ No newline at end of file diff --git a/tools/package-source.js b/tools/package-source.js index 75109cb245..dabb5d6aeb 100644 --- a/tools/package-source.js +++ b/tools/package-source.js @@ -9,7 +9,8 @@ var watch = require('./watch.js'); var project = require('./project.js'); var buildmessage = require('./buildmessage.js'); var meteorNpm = require('./meteor-npm.js'); -var archinfo = require(path.join(__dirname, 'archinfo.js')); +var Builder = require('./builder.js'); +var archinfo = require('./archinfo.js'); // Like Perl's quotemeta: quotes all regexp metacharacters. See // https://github.com/substack/quotemeta/blob/master/index.js @@ -126,6 +127,7 @@ var SourceSlice = function (pkg, options) { // In most places, instead of using 'uses' directly, you want to use // something like compiler.eachUsedSlice so you also take into // account implied packages. + self.uses = options.uses || []; // Packages which are "implied" by using this package. If a slice X // uses this slice Y, and Y implies Z, then X will effectively use Z @@ -162,7 +164,7 @@ var SourceSlice = function (pkg, options) { // of the code, this does not include source files or directories, // but only control files such as package.js and .meteor/packages, // since the rest are not determined until compile time. - self.watchSet = options.watchSet || new watch.WatchSet(); + self.watchSet = options.watchSet || new watch.WatchSet; // Absolute path to the node_modules directory to use at runtime to // resolve Npm.require() calls in this slice. null if this slice @@ -238,6 +240,10 @@ var PackageSource = function (packageDirectoryForBuildInfo) { // package. Map from plugin name to object with keys 'name', 'use', // 'sources', and 'npmDependencies'. self.pluginInfo = {}; + + // Analogous to watchSet in SourceSlice but for plugins. At this + // stage will typically contain just 'package.js'. + self.pluginWatchSet = new watch.WatchSet; }; @@ -322,10 +328,9 @@ _.extend(PackageSource.prototype, { // Initialize a PackageSource from a package.js-style package // directory. - initFromPackageDir: function (name, dir, options) { + initFromPackageDir: function (name, dir) { var self = this; var isPortable = true; - options = options || {}; self.name = name; self.sourceRoot = dir; self.serveRoot = path.join(path.sep, 'packages', name); @@ -361,7 +366,8 @@ _.extend(PackageSource.prototype, { if (key === "summary" || key === "internal") self.metadata[key] = value; else if (key === "version") - // XXX validate that version parses + // XXX validate that version parses -- and that it doesn't + // contain a +! self.version = value; else if (key === "earliestCompatibleVersion") self.earliestCompatibleVersion = value; @@ -885,9 +891,7 @@ _.extend(PackageSource.prototype, { }, // Initialize a package from an application directory (has .meteor/packages). - // - // XXX XXX make dependencies provide packageLoader - initFromAppDir: function (appDir, packageLoader, ignoreFiles) { + initFromAppDir: function (appDir, ignoreFiles) { var self = this; appDir = path.resolve(appDir); self.name = null; @@ -1114,6 +1118,8 @@ _.extend(PackageSource.prototype, { // For options, see getDependencyMetadata. _computeDependencyMetadata: function (options) { var self = this; + options = options || {}; + var dependencies = {}; var allConstraints = {}; // for error reporting. package name to array var failed = false; diff --git a/tools/uniload.js b/tools/uniload.js index 34c59279bd..fd44ba55b1 100644 --- a/tools/uniload.js +++ b/tools/uniload.js @@ -2,7 +2,7 @@ var _ = require('underscore'); var bundler = require('./bundler.js'); var buildmessage = require('./buildmessage.js'); var release = require('./release.js'); -var PackageLoader = require("./package-loader.js").PackageLoader; +var PackageLoader = require("./package-loader.js"); var packageCache = require("./package-cache.js"); // Load unipackages into the currently running node.js process. Use @@ -49,7 +49,7 @@ var packageCache = require("./package-cache.js"); // var reverse = DDP.connect('reverse.meteor.com'); // console.log(reverse.call('reverse', 'hello world')); -var cacheRelease = null; +var cacheRelease = undefined; var cache = null; // map from package names (joined with ',') to return value var load = function (options) { diff --git a/tools/unipackage.js b/tools/unipackage.js index 0787ce4a54..7e46e7a6b9 100644 --- a/tools/unipackage.js +++ b/tools/unipackage.js @@ -4,6 +4,7 @@ var _ = require('underscore'); var linker = require('./linker.js'); var buildmessage = require('./buildmessage.js'); var fs = require('fs'); +var path = require('path'); var Builder = require('./builder.js'); var bundler = require('./bundler.js'); var watch = require('./watch.js'); @@ -200,7 +201,7 @@ var Unipackage = function (packageDirectoryForBuildInfo) { // -- Information for up-to-date checks -- // Version number of the tool that built this unipackage - // (complier.BUILT_BY) or null if unknown + // (compiler.BUILT_BY) or null if unknown self.builtBy = null; // If true, force the checkUpToDate to return false for this unipackage. diff --git a/tools/watch.js b/tools/watch.js index a88e9c8742..04f86e2f48 100644 --- a/tools/watch.js +++ b/tools/watch.js @@ -143,11 +143,23 @@ _.extend(WatchSet.prototype, { // }); _.each(other.directories, function (dir) { // XXX this doesn't deep-clone the directory, but I think these objects - // are never mutated + // are never mutated #WatchSetShallowClone self.directories.push(dir); }); }, + clone: function () { + var self = this; + var ret = new WatchSet; + + // XXX doesn't bother to deep-clone the directory info + // #WatchSetShallowClone + ret.alwaysFire = self.alwaysFire; + ret.files = _.clone(self.files); + ret.directories = _.clone(self.directories); + return ret; + }, + toJSON: function () { var self = this; if (self.alwaysFire)