mirror of
https://github.com/meteor/meteor.git
synced 2026-05-02 03:01:46 -04:00
Previously, if localPackageDirs contained an app package that used a troposphere package, the constraint solver would bomb out on that app package. This commit separates out the checkout packages that we need for bootstrapping catalog initialization from other local packages, which are allowed to use troposphere packages.
714 lines
27 KiB
JavaScript
714 lines
27 KiB
JavaScript
var fs = require('fs');
|
|
var path = require('path');
|
|
var semver = require('semver');
|
|
var _ = require('underscore');
|
|
var packageClient = require('./package-client.js');
|
|
var archinfo = require('./archinfo.js');
|
|
var packageCache = require('./package-cache.js');
|
|
var PackageSource = require('./package-source.js');
|
|
var Unipackage = require('./unipackage.js');
|
|
var compiler = require('./compiler.js');
|
|
var buildmessage = require('./buildmessage.js');
|
|
var tropohouse = require('./tropohouse.js');
|
|
var watch = require('./watch.js');
|
|
var files = require('./files.js');
|
|
|
|
var catalog = exports;
|
|
|
|
var isDirectory = function (dir) {
|
|
try {
|
|
// use stat rather than lstat since symlink to dir is OK
|
|
var stats = fs.statSync(dir);
|
|
} catch (e) {
|
|
return false;
|
|
}
|
|
return stats.isDirectory();
|
|
};
|
|
|
|
// Use this class to query the metadata for all of the packages that
|
|
// we know about (including packages on the package server that we
|
|
// haven't actually download yet).
|
|
//
|
|
var Catalog = function () {
|
|
var self = this;
|
|
|
|
// The catalog cannot be used until it is initialized by calling
|
|
// initialize(). We use this pattern, rather than expecting
|
|
// arguments to the constructor, to make it easier to use catalog as
|
|
// a singleton.
|
|
self.initialized = false;
|
|
|
|
// Package server data. Arrays of objects.
|
|
self.packages = null;
|
|
self.versions = null;
|
|
self.builds = null;
|
|
|
|
// Local directories to search for package source trees
|
|
self.localPackageDirs = null;
|
|
|
|
// Packages specified by addLocalPackage
|
|
self.localPackages = {}; // package name to source directory
|
|
|
|
// All packages found either by localPackageDirs or localPackages
|
|
self.effectiveLocalPackages = {}; // package name to source directory
|
|
|
|
// Set this to true if we are not going to connect to the remote package
|
|
// server, and will only use the cached data.json file for our package
|
|
// information. This means that the catalog might be out of date on the latest
|
|
// developments.
|
|
self.offline = null;
|
|
};
|
|
|
|
_.extend(Catalog.prototype, {
|
|
// Initialize the Catalog. This must be called before any other
|
|
// Catalog function.
|
|
//
|
|
// It will try to talk to the network to synchronize our package
|
|
// list with the package server.
|
|
//
|
|
// options:
|
|
// - localPackageDirs: an array of paths on local disk, that
|
|
// contain subdirectories, that each contain a source tree for a
|
|
// package that should override the packages on the package
|
|
// server. For example, if there is a package 'foo' that we find
|
|
// through localPackageDirs, then we will ignore all versions of
|
|
// 'foo' that we find through the package server. Directories
|
|
// that don't exist (or paths that aren't directories) will be
|
|
// silently ignored.
|
|
// - bootstrapLocalPackageDirs: like 'localPackageDirs', but
|
|
// containing the packages that we can call 'unipackage.load' to
|
|
// load the packages that we need to talk to the server. Packages
|
|
// inside `bootstrapLocalPackageDirs` cannot use troposphere
|
|
// packages.
|
|
initialize: function (options) {
|
|
var self = this;
|
|
|
|
options = options || {};
|
|
|
|
var trimPackageDirs = function (packageDirs) {
|
|
// Trim down local package dirs to just those that actually exist
|
|
// (and that are actually directories)
|
|
return _.filter(packageDirs || [], isDirectory);
|
|
};
|
|
|
|
var bootstrapPackageDirs = trimPackageDirs(
|
|
options.bootstrapLocalPackageDirs);
|
|
var localPackageDirs = trimPackageDirs(
|
|
options.localPackageDirs);
|
|
var allLocalPackageDirs = bootstrapPackageDirs.concat(localPackageDirs);
|
|
|
|
self.localPackageDirs = bootstrapPackageDirs;
|
|
self._recomputeEffectiveLocalPackages();
|
|
|
|
// First, initialize the catalog with just the local packages for
|
|
// bootstrapping. This is just enough (at least if we're running
|
|
// from a checkout) that we're able to call unipackage.load to load
|
|
// the packages that we need to talk to the server.
|
|
self.packages = [];
|
|
self.versions = [];
|
|
self.builds = [];
|
|
console.log("XXX Loading local packages for bootstrapping");
|
|
self._addLocalPackageOverrides(true /* setInitialized */);
|
|
|
|
// Now we can include options.localPackageDirs. We do this
|
|
// separately from the bootstrapping packages because packages in
|
|
// options.localPackageDirs (app packages, for example) are allowed
|
|
// to use troposphere packages, so we have to be able to talk to the
|
|
// server before we load them.
|
|
self.localPackageDirs = allLocalPackageDirs;
|
|
self._recomputeEffectiveLocalPackages();
|
|
// We don't need to call _addLocalPackageOverrides here; that will
|
|
// be called as part of catalog initialization, which is the next
|
|
// step.
|
|
|
|
// OK, now initialize the catalog for real, with both local and
|
|
// package server packages.
|
|
console.log("XXX Loading catalog for real");
|
|
|
|
// We should to figure out if we are intending to connect to the package
|
|
// server.
|
|
self.offline = options.offline ? options.offline : false;
|
|
self._refresh(true /* load server packages */);
|
|
},
|
|
|
|
// If sync is false, this will not synchronize with the remote server, even if
|
|
// the catalog is not in offline mode. This is an optimization for loading
|
|
// local packages. (An offline catalog will not sync with the server even if
|
|
// sync is true.)
|
|
//
|
|
// Prints a warning if `sync` is true and we can't contact the package server.
|
|
_refresh: function (sync) {
|
|
var self = this;
|
|
self._requireInitialized();
|
|
|
|
var localData = packageClient.loadCachedServerData();
|
|
var allPackageData;
|
|
if (! self.offline && sync) {
|
|
allPackageData = packageClient.updateServerPackageData(localData);
|
|
if (! allPackageData) {
|
|
// If we couldn't contact the package server, use our local data.
|
|
allPackageData = localData.collections;
|
|
// XXX should do some nicer error handling here (return error to
|
|
// caller and let them handle it?)
|
|
process.stderr.write("Warning: could not connect to package server\n");
|
|
}
|
|
} else {
|
|
allPackageData = localData.collections;
|
|
}
|
|
|
|
self.initialized = false;
|
|
self.packages = [];
|
|
self.versions = [];
|
|
self.builds = [];
|
|
if (allPackageData) {
|
|
self._insertServerPackages(allPackageData);
|
|
}
|
|
self._addLocalPackageOverrides(true /* setInitialized */);
|
|
},
|
|
|
|
// Compute self.effectiveLocalPackages from self.localPackageDirs
|
|
// and self.localPackages.
|
|
_recomputeEffectiveLocalPackages: function () {
|
|
var self = this;
|
|
|
|
self.effectiveLocalPackages = {};
|
|
|
|
_.each(self.localPackageDirs, function (localPackageDir) {
|
|
if (! isDirectory(localPackageDir))
|
|
return;
|
|
var contents = fs.readdirSync(localPackageDir);
|
|
_.each(contents, function (item) {
|
|
var packageDir = path.resolve(path.join(localPackageDir, item));
|
|
if (! isDirectory(packageDir))
|
|
return;
|
|
|
|
// Consider a directory to be a package source tree if it
|
|
// contains 'package.js'. (We used to support unipackages in
|
|
// localPackageDirs, but no longer.)
|
|
if (fs.existsSync(path.join(packageDir, 'package.js'))) {
|
|
// Let earlier package directories override later package
|
|
// directories.
|
|
|
|
// XXX XXX for now, get the package name from the
|
|
// directory. in a future refactor, should instead build the
|
|
// package right here and get the name from the (not yet
|
|
// added) 'name' attribute in package.js.
|
|
if (! _.has(self.effectiveLocalPackages, item))
|
|
self.effectiveLocalPackages[item] = packageDir;
|
|
}
|
|
});
|
|
});
|
|
|
|
_.extend(self.effectiveLocalPackages, self.localPackages);
|
|
},
|
|
|
|
// Add all packages in self.effectiveLocalPackages to the catalog,
|
|
// first removing any existing packages that have the same name.
|
|
//
|
|
// If _setInitialized is provided and true, then as soon as the
|
|
// metadata for the local packages has been loaded into the catalog,
|
|
// mark the catalog as initialized. This is a bit of a hack.
|
|
//
|
|
// XXX emits buildmessages. are callers expecting that?
|
|
_addLocalPackageOverrides: function (_setInitialized) {
|
|
var self = this;
|
|
|
|
// Remove all packages from the catalog that have the same name as
|
|
// a local package, along with all of their versions and builds.
|
|
var removedVersionIds = {};
|
|
self.versions = _.filter(self.versions, function (version) {
|
|
if (_.has(self.effectiveLocalPackages, version.packageName)) {
|
|
// Remove this one
|
|
removedVersionIds[version._id] = true;
|
|
return false;
|
|
}
|
|
return true;
|
|
});
|
|
|
|
self.builds = _.filter(self.builds, function (build) {
|
|
return ! _.has(removedVersionIds, build.versionId);
|
|
});
|
|
|
|
self.packages = _.filter(self.packages, function (pkg) {
|
|
return ! _.has(self.effectiveLocalPackages, pkg.name);
|
|
});
|
|
|
|
// Phase 1: Load the source code and create Package and Version
|
|
// entries from them. We have to do this before we can run the
|
|
// constraint solver.
|
|
var packageSources = {}; // name to PackageSource
|
|
var versionIds = {}; // name to _id of the created Version record
|
|
_.each(self.effectiveLocalPackages, function (packageDir, name) {
|
|
var packageSource = new PackageSource;
|
|
packageSource.initFromPackageDir(name, packageDir);
|
|
packageSources[name] = packageSource;
|
|
|
|
self.packages.push({
|
|
name: name,
|
|
maintainers: null,
|
|
lastUpdated: null
|
|
});
|
|
|
|
// This doesn't have great birthday-paradox properties, but we
|
|
// don't have Random.id() here (since it comes from a
|
|
// unipackage), and making an index so we can see if a value is
|
|
// already in use would complicated the code. Let's take the bet
|
|
// that by the time we have enough local packages that this is a
|
|
// problem, we either will have made tools into a star, or we'll
|
|
// have made Catalog be backed by a real database.
|
|
var versionId = "local-" + Math.floor(Math.random() * 1000000000);
|
|
versionIds[name] = versionId;
|
|
|
|
// Accurate version numbers are of supreme importance, because
|
|
// we use version numbers (of build-time dependencies such as
|
|
// the coffeescript plugin), together with source file hashes
|
|
// and the notion of a repeatable build, to decide when a
|
|
// package build is out of date and trigger a rebuild of the
|
|
// package.
|
|
//
|
|
// The package we have just loaded may declare its version to be
|
|
// 1.2.3, but that doesn't mean it's really the official version
|
|
// 1.2.3 of the package. It only gets that version number
|
|
// officially when it's published to the package server. So what
|
|
// we'd like to do here is give it a version number like
|
|
// '1.2.3+<buildid>', where <buildid> is a hash of everything
|
|
// that's necessary to repeat the build exactly: all of the
|
|
// package's source files, all of the package's build-time
|
|
// dependencies, and the version of the Meteor build tool used
|
|
// to build it.
|
|
//
|
|
// Unfortunately we can't actually compute such a buildid yet
|
|
// since it depends on knowing the build-time dependencies of
|
|
// the package, which requires that we run the constraint
|
|
// solver, which can only be done once we've populated the
|
|
// catalog, which is what we're trying to do right now.
|
|
//
|
|
// So we have a workaround. For local packages we will fake the
|
|
// version in the catalog by setting the buildid to 'local', as
|
|
// in '1.2.3+local'. This is enough for the constraint solver to
|
|
// run, but any code that actually relies on accurate versions
|
|
// (for example, code that checks if a build is up to date)
|
|
// needs to be careful to get the versions not from the catalog
|
|
// but from the actual built Unipackage objects, which will have
|
|
// accurate versions (with precise buildids) even for local
|
|
// packages.
|
|
var version = packageSource.version;
|
|
if (version.indexOf('+') !== -1)
|
|
throw new Error("version already has a buildid?");
|
|
version = version + "+local";
|
|
|
|
self.versions.push({
|
|
_id: versionId,
|
|
packageName: name,
|
|
version: version,
|
|
publishedBy: null,
|
|
earliestCompatibleVersion: packageSource.earliestCompatibleVersion,
|
|
changelog: null, // XXX get actual changelog when we have it?
|
|
description: packageSource.metadata.summary,
|
|
dependencies: packageSource.getDependencyMetadata(),
|
|
source: null,
|
|
lastUpdated: null,
|
|
published: null,
|
|
containsPlugins: packageSource.containsPlugins()
|
|
});
|
|
});
|
|
|
|
if (_setInitialized)
|
|
self.initialized = true;
|
|
|
|
// XXX XXX in the next version, don't go build all local packages
|
|
// at startup just because! instead, do the following on an
|
|
// ongoing, as-needed basis: when we want to load a build of
|
|
// package X, and it's a local package, work out its dependencies
|
|
// (and, lazily, the dependencies of its dependencies) and build
|
|
// only what is needed. (XXX does this mean that we have to create
|
|
// build records lazily too, or is there a way that we can create
|
|
// them upfront?)
|
|
|
|
// Phase 2: Figure out which local packages need to be built
|
|
// before which other local packages because of build-time
|
|
// dependencies.
|
|
var packageBuildDeps = {}; // map from name to array of name
|
|
_.each(self.effectiveLocalPackages, function (packageDir, name) {
|
|
packageBuildDeps[name] = [];
|
|
var deps = compiler.getBuildOrderConstraints(packageSources[name]);
|
|
_.each(deps, function (d) {
|
|
if (! _.has(self.effectiveLocalPackages, d.name))
|
|
return; // not a local package -- may assume it's already built
|
|
if (d.version !== packageSources[d.name].version + "+local")
|
|
throw new Error("unknown version for local package?");
|
|
packageBuildDeps[name].push(d.name);
|
|
});
|
|
});
|
|
// Phase 3: Do a topological sort and build the local packages in
|
|
// an order that respects their build-time dependencies.
|
|
//
|
|
// XXX topological sort duplicated from bundler.js.
|
|
var remaining = _.clone(self.effectiveLocalPackages);
|
|
var onStack = {}; // map from name to true
|
|
|
|
var maybeGetUpToDateBuild = function (name) {
|
|
var sourcePath = self.effectiveLocalPackages[name];
|
|
var buildDir = path.join(sourcePath, '.build');
|
|
if (fs.existsSync(buildDir)) {
|
|
var unipackage = new Unipackage;
|
|
unipackage.initFromPath(name, buildDir, { buildOfPath: sourcePath });
|
|
if (compiler.checkUpToDate(packageSources[name], unipackage)) {
|
|
return unipackage;
|
|
}
|
|
}
|
|
return null;
|
|
};
|
|
|
|
var build = function (name) {
|
|
var unipackage = null;
|
|
|
|
if (! _.has(remaining, name))
|
|
return;
|
|
|
|
// First build things that have to build before us (if not built yet)
|
|
_.each(packageBuildDeps[name], function (otherName) {
|
|
if (_.has(onStack, otherName)) {
|
|
// Allow a circular dependency if the other thing is already
|
|
// built and doesn't need to be rebuilt.
|
|
unipackage = maybeGetUpToDateBuild(otherName);
|
|
if (unipackage) {
|
|
return;
|
|
} else {
|
|
buildmessage.error("circular dependency between packages " +
|
|
name + " and " + otherName);
|
|
// recover by not enforcing one of the depedencies
|
|
return;
|
|
}
|
|
}
|
|
|
|
onStack[otherName] = true;
|
|
build(otherName);
|
|
delete onStack[otherName];
|
|
});
|
|
|
|
// Now build this package if it needs building
|
|
var sourcePath = self.effectiveLocalPackages[name];
|
|
unipackage = maybeGetUpToDateBuild(name);
|
|
|
|
if (! unipackage) {
|
|
// Didn't have a build or it wasn't up to date. Build it.
|
|
buildmessage.enterJob({
|
|
title: "building package `" + name + "`",
|
|
rootPath: sourcePath
|
|
}, function () {
|
|
unipackage = compiler.compile(packageSources[name]).unipackage;
|
|
|
|
if (! buildmessage.jobHasMessages()) {
|
|
// Save the build, for a fast load next time
|
|
try {
|
|
var buildDir = path.join(sourcePath, '.build');
|
|
files.addToGitignore(sourcePath, '.build*');
|
|
unipackage.saveToPath(buildDir, { buildOfPath: sourcePath });
|
|
} catch (e) {
|
|
// If we can't write to this directory, we don't get to cache our
|
|
// output, but otherwise life is good.
|
|
if (!(e && (e.code === 'EACCES' || e.code === 'EPERM')))
|
|
throw e;
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
// And put a build record for it in the catalog
|
|
self.builds.push({
|
|
packageName: name,
|
|
architecture: unipackage.architectures().join('+'),
|
|
builtBy: null,
|
|
build: null, // this would be the URL and hash
|
|
versionId: versionIds[name],
|
|
lastUpdated: null,
|
|
buildPublished: null
|
|
});
|
|
|
|
// XXX XXX maybe you actually want to, like, save the unipackage
|
|
// in memory into a cache? rather than leaving packageCache to
|
|
// reload it? or maybe packageCache is unified into catalog
|
|
// somehow? sleep on it
|
|
|
|
// Done
|
|
delete remaining[name];
|
|
};
|
|
|
|
while (true) {
|
|
// Go build an arbitrary local package from among those remaining.
|
|
var first = null;
|
|
for (first in remaining) break;
|
|
if (! first)
|
|
break;
|
|
build(first);
|
|
}
|
|
},
|
|
|
|
// serverPackageData is a description of the packages available from
|
|
// the package server, as returned by
|
|
// packageClient.loadPackageData. Add all of those packages to the
|
|
// catalog without checking for duplicates.
|
|
_insertServerPackages: function (serverPackageData) {
|
|
var self = this;
|
|
|
|
self.packages.push.apply(self.packages, serverPackageData.packages);
|
|
self.versions.push.apply(self.versions, serverPackageData.versions);
|
|
self.builds.push.apply(self.builds, serverPackageData.builds);
|
|
},
|
|
|
|
_requireInitialized: function () {
|
|
var self = this;
|
|
|
|
if (! self.initialized)
|
|
throw new Error("catalog not initialized yet?");
|
|
},
|
|
|
|
// Add a local package to the catalog. `name` is the name to use for
|
|
// the package and `directory` is the directory that contains the
|
|
// source tree for the package.
|
|
//
|
|
// If a package named `name` exists on the package server, it will
|
|
// be overridden (it will be as if that package doesn't exist on the
|
|
// package server at all). And for now, it's an error to call this
|
|
// function twice with the same `name`.
|
|
addLocalPackage: function (name, directory) {
|
|
var self = this;
|
|
self._requireInitialized();
|
|
|
|
var resolvedPath = path.resolve(directory);
|
|
if (_.has(self.localPackages, name) &&
|
|
self.localPackages[name] !== resolvedPath) {
|
|
throw new Error("Duplicate local package '" + name + "'");
|
|
}
|
|
self.localPackages[name] = resolvedPath;
|
|
|
|
// If we were making lots of calls to addLocalPackage, we would
|
|
// want to coalesce the calls to _refresh somehow, but I don't
|
|
// think we'll actually be doing that so this should be fine.
|
|
// #CallingRefreshEveryTimeLocalPackagesChange
|
|
self._recomputeEffectiveLocalPackages();
|
|
self._refresh(false /* sync */);
|
|
},
|
|
|
|
// Reverse the effect of addLocalPackage.
|
|
removeLocalPackage: function (name) {
|
|
var self = this;
|
|
self._requireInitialized();
|
|
|
|
if (! _.has(self.localPackages, name))
|
|
throw new Error("no such local package?");
|
|
delete self.localPackages[name];
|
|
|
|
// see #CallingRefreshEveryTimeLocalPackagesChange
|
|
self._recomputeEffectiveLocalPackages();
|
|
self._refresh(false /* sync */);
|
|
},
|
|
|
|
// True if `name` is a local package (is to be loaded via
|
|
// localPackageDirs or addLocalPackage rather than from the package
|
|
// server)
|
|
isLocalPackage: function (name) {
|
|
var self = this;
|
|
self._requireInitialized();
|
|
|
|
return _.has(self.effectiveLocalPackages, name);
|
|
},
|
|
|
|
// Register local package directories with a watchSet. We want to know if a
|
|
// package is created or deleted, which includes both its top-level source
|
|
// directory and its main package metadata file.
|
|
//
|
|
// This will watch the local package directories that are in effect when the
|
|
// function is called. (As set by the most recent call to
|
|
// setLocalPackageDirs.)
|
|
watchLocalPackageDirs: function (watchSet) {
|
|
var self = this;
|
|
self._requireInitialized();
|
|
|
|
_.each(self.localPackageDirs, function (packageDir) {
|
|
var packages = watch.readAndWatchDirectory(watchSet, {
|
|
absPath: packageDir,
|
|
include: [/\/$/]
|
|
});
|
|
_.each(packages, function (p) {
|
|
watch.readAndWatchFile(watchSet,
|
|
path.join(packageDir, p, 'package.js'));
|
|
watch.readAndWatchFile(watchSet,
|
|
path.join(packageDir, p, 'unipackage.json'));
|
|
});
|
|
});
|
|
},
|
|
|
|
// Rebuild all source packages in our search paths. If two packages
|
|
// have the same name only the one that we would load will get
|
|
// rebuilt.
|
|
//
|
|
// Returns a count of packages rebuilt.
|
|
rebuildLocalPackages: function () {
|
|
var self = this;
|
|
self._requireInitialized();
|
|
|
|
// Clear any cached builds in the package cache.
|
|
packageCache.packageCache.refresh();
|
|
|
|
// Delete any that are source packages with builds.
|
|
var count = 0;
|
|
_.each(self.effectiveLocalPackages, function (loadPath, name) {
|
|
var buildDir = path.join(loadPath, '.build');
|
|
files.rm_recursive(buildDir);
|
|
});
|
|
|
|
// Now reload them, forcing a rebuild. We have to do this in two
|
|
// passes because otherwise we might end up rebuilding a package
|
|
// and then immediately deleting it.
|
|
_.each(self.effectiveLocalPackages, function (loadPath, name) {
|
|
packageCache.packageCache.loadPackageAtPath(name, loadPath);
|
|
count ++;
|
|
});
|
|
|
|
return count;
|
|
},
|
|
|
|
// Given a name and a version of a package, return a path on disk
|
|
// from which we can load it. If we don't have it on disk (we
|
|
// haven't downloaded it, or it just plain doesn't exist in the
|
|
// catalog) return null.
|
|
//
|
|
// Doesn't download packages. Downloading should be done at the time
|
|
// that .meteor/versions is updated.
|
|
//
|
|
// HACK: Version can be null if you are certain that the package is to be
|
|
// loaded from local packages. In the future, version should always be
|
|
// required and we should confirm that the version on disk is the version that
|
|
// we asked for. This is to support unipackage loader not having a version
|
|
// manifest.
|
|
getLoadPathForPackage: function (name, version) {
|
|
var self = this;
|
|
self._requireInitialized();
|
|
|
|
if (_.has(self.effectiveLocalPackages, name)) {
|
|
return self.effectiveLocalPackages[name];
|
|
}
|
|
|
|
if (! version)
|
|
throw new Error(name + " not a local package, and no version specified?");
|
|
|
|
var packageDir = tropohouse.packagePath(name, version);
|
|
if (fs.existsSync(packageDir)) {
|
|
return packageDir;
|
|
}
|
|
return null;
|
|
},
|
|
|
|
// Return an array with the names of all of the packages that we
|
|
// know about, in no particular order.
|
|
getAllPackageNames: function () {
|
|
var self = this;
|
|
self._requireInitialized();
|
|
|
|
return _.pluck(self.packages, 'name');
|
|
},
|
|
|
|
// Returns general (non-version-specific) information about a
|
|
// package, or null if there is no such package.
|
|
getPackage: function (name) {
|
|
var self = this;
|
|
self._requireInitialized();
|
|
return _.findWhere(self.packages, { name: name });
|
|
},
|
|
|
|
// Given a package, returns an array of the versions available for
|
|
// this package (for any architecture), sorted from oldest to newest
|
|
// (according to the version string, not according to their
|
|
// publication date). Returns the empty array if the package doesn't
|
|
// exist or doesn't have any versions.
|
|
getSortedVersions: function (name) {
|
|
var self = this;
|
|
self._requireInitialized();
|
|
|
|
var ret = _.pluck(_.where(self.versions, { packageName: name }),
|
|
'version');
|
|
ret.sort(semver.compare);
|
|
return ret;
|
|
},
|
|
|
|
// Return information about a particular version of a package, or
|
|
// null if there is no such package or version.
|
|
getVersion: function (name, version) {
|
|
var self = this;
|
|
self._requireInitialized();
|
|
return _.findWhere(self.versions, { packageName: name,
|
|
version: version });
|
|
},
|
|
|
|
// As getVersion, but returns info on the latest version of the
|
|
// package, or null if the package doesn't exist or has no versions.
|
|
getLatestVersion: function (name) {
|
|
var self = this;
|
|
self._requireInitialized();
|
|
|
|
var versions = self.getSortedVersions(name);
|
|
if (versions.length === 0)
|
|
return null;
|
|
return self.getVersion(name, versions[versions.length - 1]);
|
|
},
|
|
|
|
// If this package has any builds at this version, return an array of builds
|
|
// which cover all of the required arches, or null if it is impossible to
|
|
// cover them all (or if the version does not exist).
|
|
getBuildsForArches: function (name, version, arches) {
|
|
var self = this;
|
|
self._requireInitialized();
|
|
|
|
var versionInfo = self.getVersion(name, version);
|
|
if (! versionInfo)
|
|
return null;
|
|
|
|
// XXX this uses a greedy algorithm that might decide, when we're looking
|
|
// for ["browser", "os.mac"] that we should download browser+os.linux to
|
|
// satisfy browser and browser+os.mac to satisfy os.mac. This is not
|
|
// optimal, but on the other hand you might want the linux one later anyway
|
|
// for deployment.
|
|
// XXX if we have a choice between os and os.mac, this returns a random one.
|
|
// so in practice we don't really support "maybe-platform-specific"
|
|
// packages
|
|
|
|
var neededArches = {};
|
|
_.each(arches, function (arch) {
|
|
neededArches[arch] = true;
|
|
});
|
|
|
|
var buildsToUse = [];
|
|
var allBuilds = _.where(self.builds, { versionId: versionInfo._id });
|
|
for (var i = 0; i < allBuilds.length && !_.isEmpty(neededArches); ++i) {
|
|
var build = allBuilds[i];
|
|
// XXX why isn't this a list in the DB? I guess because of the unique
|
|
// index?
|
|
var buildArches = build.architecture.split('+');
|
|
var usingThisBuild = false;
|
|
_.each(neededArches, function (ignored, neededArch) {
|
|
if (archinfo.mostSpecificMatch(neededArch, buildArches)) {
|
|
// This build gives us something we need! We don't need it any
|
|
// more. (It is safe to delete keys of something you are each'ing over
|
|
// because _.each internally is doing an iteration over _.keys.)
|
|
delete neededArches[neededArch];
|
|
if (! usingThisBuild) {
|
|
usingThisBuild = true;
|
|
buildsToUse.push(build);
|
|
// XXX this should probably be denormalized in the DB
|
|
build.version = version;
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
if (_.isEmpty(neededArches))
|
|
return buildsToUse;
|
|
// We couldn't satisfy it!
|
|
return null;
|
|
}
|
|
});
|
|
|
|
catalog.catalog = new Catalog();
|