For direct dependencies only plugins affect buid

This commit is contained in:
ekatek
2014-03-14 17:25:40 -07:00
parent 6c993f1ea2
commit 4565c2d262
13 changed files with 82 additions and 44 deletions

View File

@@ -529,7 +529,7 @@ _.extend(Target.prototype, {
if (_.has(getsUsed, slice.id))
return;
getsUsed[slice.id] = slice;
slice.eachUsedSlice(self.arch, packageLoader,
compiler.eachUsedSlice(slice.uses, self.arch, packageLoader,
{skipWeak: true}, addToGetsUsed);
};
_.each(rootSlices, addToGetsUsed);
@@ -562,8 +562,8 @@ _.extend(Target.prototype, {
// will depend on `slice` and need to be added after it. So we ignore
// those edge. Because we did follow those edges in Phase 1, any unordered
// slices were at some point in `needed` and will not be left out).
slice.eachUsedSlice(
self.arch, packageLoader, {skipUnordered: true},
compiler.eachUsedSlice(
slice.uses, self.arch, packageLoader, {skipUnordered: true},
function (usedSlice, useOptions) {
// If this is a weak dependency, and nothing else in the target had a
// strong dependency on it, then ignore this edge.
@@ -1770,7 +1770,7 @@ exports.bundle = function (options) {
if (includeDefaultTargets) {
// Create a Package object that represents the app
var app = packageCache.loadAppAtPath(appDir, ignoreFiles);
var app = packageCache.packageCache.loadAppAtPath(appDir, ignoreFiles);
// Client
var client = makeClientTarget(app);
@@ -1881,7 +1881,7 @@ exports.bundle = function (options) {
// Read this directory as a package and create a target from
// it
var pkg = packageCache.
var pkg = packageCache.packageCache.
loadPackageAtPath(p.name, p.loadPath);
var target;
switch (p.type) {

View File

@@ -11,6 +11,8 @@ var compiler = require('./compiler.js');
var buildmessage = require('./buildmessage.js');
var tropohouse = require('./tropohouse.js');
var catalog = exports;
var isDirectory = function (dir) {
try {
// use stat rather than lstat since symlink to dir is OK
@@ -247,7 +249,8 @@ _.extend(Catalog.prototype, {
dependencies: packageSource.getDependencyMetadata(),
source: null,
lastUpdated: null,
published: null
published: null,
containsPlugins: packageSource.containsPlugins()
});
});
@@ -269,7 +272,8 @@ _.extend(Catalog.prototype, {
var packageBuildDeps = {}; // map from name to array of name
_.each(self.effectiveLocalPackages, function (packageDir, name) {
packageBuildDeps[name] = [];
var deps = compiler.getBuildTimeDependencies(packageSources[name]);
var deps = compiler.getBuildOrderConstraints(packageSources[name]);
console.log("XXX deps", name, deps);
_.each(deps, function (d) {
if (! _.has(self.effectiveLocalPackages, d.name))
return; // not a local package -- may assume it's already built
@@ -278,7 +282,7 @@ _.extend(Catalog.prototype, {
packageBuildDeps[name].push(d.name);
});
});
console.log("XXX", packageBuildDeps);
// Phase 3: Do a topological sort and build the local packages in
// an order that respects their build-time dependencies.
//
@@ -476,7 +480,7 @@ _.extend(Catalog.prototype, {
self._requireInitialized();
// Clear any cached builds in the package cache.
packageCache.refresh();
packageCache.packageCache.refresh();
// Delete any that are source packages with builds.
var count = 0;
@@ -489,7 +493,8 @@ _.extend(Catalog.prototype, {
// passes because otherwise we might end up rebuilding a package
// and then immediately deleting it.
_.each(self.effectiveLocalPackages, function (loadPath, name) {
packageCache.loadPackageAtPath(name, loadPath, { throwOnError: false });
packageCache.packageCache.
loadPackageAtPath(name, loadPath, { throwOnError: false });
count ++;
});
@@ -632,4 +637,4 @@ _.extend(Catalog.prototype, {
}
});
module.exports = new Catalog();
catalog.catalog = new Catalog();

View File

@@ -22,7 +22,7 @@ var packageCache = require('./package-cache.js');
var PackageLoader = require('./package-loader.js');
var PackageSource = require('./package-source.js');
var compiler = require('./compiler.js');
var catalog = require('./catalog.js');
var catalog = require('./catalog.js').catalog;
// Given a site name passed on the command line (eg, 'mysite'), return
// a fully-qualified hostname ('mysite.meteor.com').
@@ -1494,7 +1494,8 @@ main.registerCommand({
// then we can't go through the build process to retrieve the
// sources that we used to build the package, and we need the
// source list to compile the source tarball.
pkg = packageCache.loadPackageAtPath(packageName, options.packageDir, {
pkg = packageCache.packageCache.
loadPackageAtPath(packageName, options.packageDir, {
forceRebuild: true
});
});

View File

@@ -8,6 +8,7 @@ var Unipackage = require('./unipackage.js');
var PackageLoader = require('./package-loader.js');
var uniload = require('./uniload.js');
var bundler = require('./bundler.js');
var catalog = require('./catalog.js');
var compiler = exports;
@@ -81,8 +82,8 @@ compiler.eachUsedSlice = function (dependencies, arch, packageLoader, options,
// dependencies. Emits buildmessages if this is impossible.
//
// Output is an object with keys:
// - directDependencies: map from package name to version string, for
// the package's direct, ordered, strong, non-implied dependencies
// - directDependencies: map from package name to version string, for the
// package's direct, ordered, strong, non-implied dependencies.
// - pluginDependencies: map from plugin name to complete (transitive)
// version information for all packages used to build the plugin, as
// a map from package name to version string.
@@ -147,9 +148,10 @@ var determineBuildTimeDependencies = function (packageSource) {
ret.directDependencies = {};
_.each(resolver.resolve(constraints), function (version, packageName) {
// Take only direct dependencies
if (_.has(constraints, packageName))
// Take only direct dependencies.
if (_.has(constraints, packageName)) {
ret.directDependencies[packageName] = version;
}
});
// -- Plugins --
@@ -211,17 +213,14 @@ var compileSlice = function (unipackage, inputSlice, packageLoader) {
// not some unrelated package in the target has a dependency. And we
// skip unordered dependencies, because it's not going to work to
// have circular build-time dependencies.
//
// We pass archinfo.host here, not inputSlice.arch, because it may be more
// specific, and because plugins always have to run on the host
// architecture.
compiler.eachUsedSlice(
inputSlice.uses, archinfo.host(), packageLoader,
{ skipWeak: true, skipUnordered: true },
function (usedSlice) {
activePluginPackages.push(usedSlice.pkg);
_.each(inputSlice.uses, function (dependency) {
console.log(dependency);
if (! dependency.weak && ! dependency.unordered &&
packageLoader.containsPlugins(dependency.package)) {
activePluginPackages.push(
packageLoader.getPackage(dependency.package));
}
);
});
activePluginPackages = _.uniq(activePluginPackages);
@@ -567,7 +566,7 @@ var compileSlice = function (unipackage, inputSlice, packageLoader) {
// *** Output slice object
unipackage.addSlice({
sliceName: inputSlice.sliceName,
name: inputSlice.sliceName,
arch: inputSlice.arch, // XXX: arch?
uses: inputSlice.uses,
implies: inputSlice.implies,
@@ -699,7 +698,7 @@ compiler.compile = function (packageSource) {
// objects with keys 'name', 'version' (the latter a version
// string). Yes, it is possible that multiple versions of some other
// package might be build-time dependencies (because of plugins).
compiler.getBuildTimeDependencies = function (packageSource) {
compiler.getBuildOrderConstraints = function (packageSource) {
var versions = {}; // map from package name to version to true
var addVersion = function (version, name) {
if (! _.has(versions, name))
@@ -708,7 +707,13 @@ compiler.getBuildTimeDependencies = function (packageSource) {
};
var buildTimeDeps = determineBuildTimeDependencies(packageSource);
_.each(buildTimeDeps.directDependencies, addVersion);
_.each(buildTimeDeps.directDependencies, function (version, name) {
// Direct dependencies only create a build-order constraint if they contain
// a plugin.
if( catalog.catalog.getVersion(name, version).containsPlugins) {
addVersion(version, name);
}
});
_.each(buildTimeDeps.pluginDependencies, function (versions, pluginName) {
_.each(versions, addVersion);
});
@@ -784,4 +789,4 @@ compiler.checkUpToDate = function (packageSource, unipackage) {
return false;
return true;
};
};

View File

@@ -1,6 +1,6 @@
var semver = require('semver');
var _ = require('underscore');
var catalog = require('./catalog.js');
var catalog = require('./catalog.js').catalog;
var utils = require('./utils.js');
var constraintSolver = exports;
@@ -130,7 +130,7 @@ constraintSolver.Resolver.prototype._propagateExactDeps =
constraintSolver.Resolver.prototype._resolve = function (dependencies, state) {
// Comment this out until we have a way to get check() here
// check(dependencies, [Dependency]);
// check(dependencies, [Dependency]);
state = state || {};
state.picks = state.picks || {};

View File

@@ -10,7 +10,7 @@ var warehouse = require('./warehouse.js');
var release = require('./release.js');
var project = require('./project.js');
var fs = require('fs');
var catalog = require('./catalog.js');
var catalog = require('./catalog.js').catalog;
var main = exports;
// node (v8) defaults to only recording 10 lines of stack trace. This

View File

@@ -4,6 +4,9 @@ var archinfo = require("./archinfo.js");
var compiler = require("./compiler.js");
var PackageSource = require("./package-source.js");
var _ = require('underscore');
var Unipackage = require("./unipackage.js");
var packageCache = exports;
// both map from package load path to:
// - pkg: cached Package object
@@ -150,4 +153,4 @@ _.extend(PackageCache.prototype, {
}
});
module.exports = new PackageCache;
packageCache.packageCache = new PackageCache;

View File

@@ -48,11 +48,27 @@ _.extend(PackageLoader.prototype, {
return pkg;
}
return packageCache.loadPackageAtPath(name, loadPath, {
return packageCache.packageCache.loadPackageAtPath(name, loadPath, {
forceRebuild: options.forceRebuild
});
},
containsPlugins: function (name) {
var self = this;
var versionRecord;
if (self.versions === null) {
versionRecord = catalog.catalog.getLatestVersion(name);
} else if (_.has(self.versions, name)) {
versionRecord = catalog.catalog.getVersion(name, self.versions[name]);
} else {
console.log("Plugins:", self.versions);
throw new Error("no version specified for package " + name);
}
return versionRecord.containsPlugins;
},
// As getPackage, but returns the path of the package that would be
// loaded rather than loading the package, and does not take any
// options. Returns null if the package is not available.
@@ -65,8 +81,10 @@ _.extend(PackageLoader.prototype, {
var self = this;
console.log(name, self.versions);
if (self.versions && ! _.has(self.versions, name))
throw new Error("no version chosen for package?");
if (self.versions && ! _.has(self.versions, name)) {
console.log("VERSIONS:", self.versions);
throw new Error("no version chosen for package " + name + "?");
}
var version;
if (self.versions) {
@@ -75,7 +93,7 @@ _.extend(PackageLoader.prototype, {
version = null;
}
return catalog.getLoadPathForPackage(name, version);
return catalog.catalog.getLoadPathForPackage(name, version);
},
// Given a slice set spec -- either a package name like "ddp", or a particular
@@ -96,4 +114,4 @@ _.extend(PackageLoader.prototype, {
}
});
module.exports = PackageLoader;
module.exports = PackageLoader;

View File

@@ -1081,6 +1081,12 @@ _.extend(PackageSource.prototype, {
self.defaultSlices = { browser: ['client'], 'os': ['server'] };
},
// True if the package defines any plugins.
containsPlugins: function () {
var self = this;
return ! _.isEmpty(self.pluginInfo);
},
// Return dependency metadata for all slices, in the format needed
// by the package catalog.
//

View File

@@ -11,7 +11,7 @@ var release = require('./release.js');
var buildmessage = require('./buildmessage.js');
var inFiber = require('./fiber-helpers.js').inFiber;
var runLog = require('./run-log.js').runLog;
var catalog = require('./catalog.js');
var catalog = require('./catalog.js').catalog;
var packageCache = require('./package-cache.js');
// Parse out s as if it were a bash command line.
@@ -392,7 +392,7 @@ _.extend(AppRunner.prototype, {
// Bundle up the app
if (! self.firstRun)
packageCache.refresh(true); // pick up changes to packages
packageCache.packageCache.refresh(true); // pick up changes to packages
var bundlePath = path.join(self.appDir, '.meteor', 'local', 'build');
var bundleResult = bundler.bundle({

View File

@@ -10,7 +10,7 @@ var httpHelpers = require('./http-helpers.js');
var fiberHelpers = require('./fiber-helpers.js');
var release = require('./release.js');
var archinfo = require('./archinfo.js');
var catalog = require('./catalog.js');
var catalog = require('./catalog.js').catalog;
var Unipackage = require('./unipackage.js');
var tropohouse = exports;

View File

@@ -3,7 +3,6 @@ var bundler = require('./bundler.js');
var buildmessage = require('./buildmessage.js');
var release = require('./release.js');
var PackageLoader = require("./package-loader.js");
var packageCache = require("./package-cache.js");
// Load unipackages into the currently running node.js process. Use
// this to use unipackages (such as the DDP client) from command-line

View File

@@ -296,6 +296,7 @@ _.extend(Unipackage.prototype, {
// message, not an exception, and all of this talk of slices an
// architectures is likely to be confusing/overkill in many
// contexts.
console.log("SLICES:", self.slices);
throw new Error((self.name || "this app") +
" does not have a slice named '" + name +
"' that runs on architecture '" + arch + "'");