mirror of
https://github.com/meteor/meteor.git
synced 2026-05-02 03:01:46 -04:00
342 lines
11 KiB
JavaScript
342 lines
11 KiB
JavaScript
const fs = Npm.require('fs');
|
|
const path = Npm.require('path');
|
|
const _ = Npm.require('underscore');
|
|
const sourcemap = Npm.require('source-map');
|
|
const LRU = Npm.require('lru-cache');
|
|
const createHash = Npm.require("crypto").createHash;
|
|
const assert = Npm.require("assert");
|
|
|
|
// The coffee-script compiler overrides Error.prepareStackTrace, mostly for the
|
|
// use of coffee.run which we don't use. This conflicts with the tool's use of
|
|
// Error.prepareStackTrace to properly show error messages in linked code. Save
|
|
// the tool's one and restore it after coffee-script clobbers it.
|
|
const prepareStackTrace = Error.prepareStackTrace;
|
|
const coffee = Npm.require('coffee-script');
|
|
Error.prepareStackTrace = prepareStackTrace;
|
|
|
|
const CACHE_SIZE = process.env.METEOR_COFFEESCRIPT_CACHE_SIZE || 1024*1024*10;
|
|
const CACHE_DEBUG = !! process.env.METEOR_TEST_PRINT_CACHE_DEBUG;
|
|
|
|
function stripExportedVars(source, exports) {
|
|
if (!exports || _.isEmpty(exports))
|
|
return source;
|
|
const lines = source.split("\n");
|
|
|
|
// We make the following assumptions, based on the output of CoffeeScript
|
|
// 1.7.1.
|
|
// - The var declaration in question is not indented and is the first such
|
|
// var declaration. (CoffeeScript only produces one var line at each
|
|
// scope and there's only one top-level scope.) All relevant variables
|
|
// are actually on this line.
|
|
// - The user hasn't used a ###-comment containing a line that looks like
|
|
// a var line, to produce something like
|
|
// /* bla
|
|
// var foo;
|
|
// */
|
|
// before an actual var line. (ie, we do NOT attempt to figure out if
|
|
// we're inside a /**/ comment, which is produced by ### comments.)
|
|
// - The var in question is not assigned to in the declaration, nor are any
|
|
// other vars on this line. (CoffeeScript does produce some assignments
|
|
// but only for internal helpers generated by CoffeeScript, and they end
|
|
// up on subsequent lines.)
|
|
// XXX relax these assumptions by doing actual JS parsing (eg with jsparse).
|
|
// I'd do this now, but there's no easy way to "unparse" a jsparse AST.
|
|
// Or alternatively, hack the compiler to allow us to specify unbound
|
|
// symbols directly.
|
|
|
|
for (let i = 0; i < lines.length; i++) {
|
|
const line = lines[i];
|
|
const match = /^var (.+)([,;])$/.exec(line);
|
|
if (!match)
|
|
continue;
|
|
|
|
// If there's an assignment on this line, we assume that there are ONLY
|
|
// assignments and that the var we are looking for is not declared. (Part
|
|
// of our strong assumption about the layout of this code.)
|
|
if (match[1].indexOf('=') !== -1)
|
|
continue;
|
|
|
|
// We want to replace the line with something no shorter, so that all
|
|
// records in the source map continue to point at valid
|
|
// characters.
|
|
function replaceLine(x) {
|
|
if (x.length >= lines[i].length) {
|
|
lines[i] = x;
|
|
} else {
|
|
lines[i] = x + new Array(1 + (lines[i].length - x.length)).join(' ');
|
|
}
|
|
}
|
|
|
|
let vars = match[1].split(', ');
|
|
vars = _.difference(vars, exports);
|
|
if (vars.length) {
|
|
replaceLine("var " + vars.join(', ') + match[2]);
|
|
} else {
|
|
// We got rid of all the vars on this line. Drop the whole line if this
|
|
// didn't continue to the next line, otherwise keep just the 'var '.
|
|
if (match[2] === ';')
|
|
replaceLine('');
|
|
else
|
|
replaceLine('var');
|
|
}
|
|
break;
|
|
}
|
|
|
|
return lines.join('\n');
|
|
}
|
|
|
|
function addSharedHeader(source, sourceMap) {
|
|
// We want the symbol "share" to be visible to all CoffeeScript files in the
|
|
// package (and shared between them), but not visible to JavaScript
|
|
// files. (That's because we don't want to introduce two competing ways to
|
|
// make package-local variables into JS ("share" vs assigning to non-var
|
|
// variables).) The following hack accomplishes that: "__coffeescriptShare"
|
|
// will be visible at the package level and "share" at the file level. This
|
|
// should work both in "package" mode where __coffeescriptShare will be added
|
|
// as a var in the package closure, and in "app" mode where it will end up as
|
|
// a global.
|
|
//
|
|
// This ends in a newline to make the source map easier to adjust.
|
|
const header = ("__coffeescriptShare = typeof __coffeescriptShare === 'object' " +
|
|
"? __coffeescriptShare : {}; " +
|
|
"var share = __coffeescriptShare;\n");
|
|
|
|
// If the file begins with "use strict", we need to keep that as the first
|
|
// statement.
|
|
const processedSource = source.replace(/^(?:((['"])use strict\2;)\n)?/, (match, useStrict) => {
|
|
if (match) {
|
|
// There's a "use strict"; we keep this as the first statement and insert
|
|
// our header at the end of the line that it's on. This doesn't change
|
|
// line numbers or the part of the line that previous may have been
|
|
// annotated, so we don't need to update the source map.
|
|
return useStrict + " " + header;
|
|
} else {
|
|
// There's no use strict, so we can just add the header at the very
|
|
// beginning. This adds a line to the file, so we update the source map to
|
|
// add a single un-annotated line to the beginning.
|
|
sourceMap.mappings = ";" + sourceMap.mappings;
|
|
return header;
|
|
}
|
|
});
|
|
return {
|
|
source: processedSource,
|
|
sourceMap: sourceMap
|
|
};
|
|
}
|
|
|
|
class CoffeeCompiler {
|
|
constructor() {
|
|
// Maps from a cache key (encoding the source hash, exports, and the other
|
|
// options passed to coffee.compile) to a {source,sourceMap} object. Note
|
|
// that this is the result of both coffee.compile and the post-processing
|
|
// that we do.
|
|
this._cache = new LRU({
|
|
max: CACHE_SIZE,
|
|
// Cache is measured in bytes.
|
|
length: (value) => {
|
|
return value.source.length + sourceMapLength(value.sourceMap);
|
|
}
|
|
});
|
|
this._diskCache = null;
|
|
// For testing.
|
|
this._callCount = 0;
|
|
}
|
|
|
|
processFilesForTarget(inputFiles) {
|
|
const cacheMisses = [];
|
|
|
|
inputFiles.forEach((inputFile) => {
|
|
const source = inputFile.getContentsAsString();
|
|
const outputFilePath = inputFile.getPathInPackage() + ".js";
|
|
const extension = inputFile.getExtension();
|
|
const literate = extension !== 'coffee';
|
|
|
|
const options = {
|
|
bare: true,
|
|
filename: inputFile.getPathInPackage(),
|
|
literate: literate,
|
|
// Return a source map.
|
|
sourceMap: true,
|
|
// Include the original source in the source map (sourcesContent field).
|
|
inline: true,
|
|
// This becomes the "file" field of the source map.
|
|
generatedFile: "/" + outputFilePath,
|
|
// This becomes the "sources" field of the source map.
|
|
sourceFiles: [inputFile.getDisplayPath()]
|
|
};
|
|
|
|
const cacheKey = deepHash([inputFile.getSourceHash(),
|
|
inputFile.getDeclaredExports(),
|
|
options]);
|
|
let sourceWithMap = this._cache.get(cacheKey);
|
|
if (! sourceWithMap) {
|
|
sourceWithMap = this._readCache(cacheKey);
|
|
if (sourceWithMap && CACHE_DEBUG) {
|
|
console.log(
|
|
`Loaded ${ inputFile.getDisplayPath() } from coffeescript cache`);
|
|
}
|
|
}
|
|
if (! sourceWithMap) {
|
|
cacheMisses.push(inputFile.getDisplayPath());
|
|
let output;
|
|
try {
|
|
output = coffee.compile(source, options);
|
|
} catch (e) {
|
|
inputFile.error({
|
|
message: e.message,
|
|
line: e.location && (e.location.first_line + 1),
|
|
column: e.location && (e.location.first_column + 1)
|
|
});
|
|
|
|
return;
|
|
}
|
|
|
|
const stripped = stripExportedVars(
|
|
output.js,
|
|
_.pluck(inputFile.getDeclaredExports(), 'name'));
|
|
sourceWithMap = addSharedHeader(
|
|
stripped, JSON.parse(output.v3SourceMap));
|
|
this._cache.set(cacheKey, sourceWithMap);
|
|
this._writeCacheAsync(cacheKey, sourceWithMap);
|
|
}
|
|
|
|
inputFile.addJavaScript({
|
|
path: outputFilePath,
|
|
sourcePath: inputFile.getPathInPackage(),
|
|
data: sourceWithMap.source,
|
|
sourceMap: sourceWithMap.sourceMap,
|
|
bare: inputFile.getFileOptions().bare
|
|
});
|
|
});
|
|
|
|
if (CACHE_DEBUG) {
|
|
cacheMisses.sort();
|
|
console.log(
|
|
`Ran coffee.compile (#${ ++this._callCount }) on: ` +
|
|
JSON.stringify(cacheMisses));
|
|
}
|
|
}
|
|
|
|
setDiskCacheDirectory(diskCache) {
|
|
if (this._diskCache)
|
|
throw Error("setDiskCacheDirectory called twice?");
|
|
this._diskCache = diskCache;
|
|
}
|
|
_cacheFilename(cacheKey) {
|
|
// We want cacheKeys to be hex so that they work on any FS and never end in
|
|
// .json.
|
|
if (!/^[a-f0-9]+$/.test(cacheKey)) {
|
|
throw Error('bad cacheKey: ' + cacheKey);
|
|
}
|
|
return path.join(this._diskCache, cacheKey + '.json');
|
|
}
|
|
// Load a cache entry from disk. Returns the {source,sourceMap} object
|
|
// and loads it into the in-memory cache too.
|
|
_readCache(cacheKey) {
|
|
if (! this._diskCache) {
|
|
return null;
|
|
}
|
|
const cacheFilename = this._cacheFilename(cacheKey);
|
|
const cacheJSON = readJSONOrNull(cacheFilename);
|
|
if (! cacheJSON) {
|
|
return null;
|
|
}
|
|
this._cache.set(cacheKey, cacheJSON);
|
|
return cacheJSON;
|
|
}
|
|
_writeCacheAsync(cacheKey, cacheJSON) {
|
|
if (! this._diskCache)
|
|
return;
|
|
const cacheFilename = this._cacheFilename(cacheKey);
|
|
const cacheContents = JSON.stringify(cacheJSON);
|
|
|
|
// We want to write the file atomically. But we also don't want to block
|
|
// processing on the file write.
|
|
const cacheTempFilename = cacheFilename + '.tmp.' + Random.id();
|
|
fs.writeFile(cacheTempFilename, cacheContents, (err) => {
|
|
// ignore errors, it's just a cache
|
|
if (err) {
|
|
return;
|
|
}
|
|
fs.rename(cacheTempFilename, cacheFilename, (err) => {
|
|
// ignore this error too.
|
|
});
|
|
});
|
|
}
|
|
}
|
|
|
|
Plugin.registerCompiler({
|
|
extensions: ['coffee', 'litcoffee', 'coffee.md']
|
|
}, () => new CoffeeCompiler());
|
|
|
|
function sourceMapLength(sm) {
|
|
if (! sm) return 0;
|
|
// sum the length of sources and the mappings, the size of
|
|
// metadata is ignored, but it is not a big deal
|
|
return sm.mappings.length
|
|
+ (sm.sourcesContent || []).reduce(function (soFar, current) {
|
|
return soFar + (current ? current.length : 0);
|
|
}, 0);
|
|
}
|
|
|
|
// Borrowed from another MIT-licensed project that benjamn wrote:
|
|
// https://github.com/reactjs/commoner/blob/235d54a12c/lib/util.js#L136-L168
|
|
function deepHash(val) {
|
|
const hash = createHash("sha1");
|
|
let type = typeof val;
|
|
|
|
if (val === null) {
|
|
type = "null";
|
|
}
|
|
|
|
switch (type) {
|
|
case "object":
|
|
const keys = Object.keys(val);
|
|
|
|
// Array keys will already be sorted.
|
|
if (! Array.isArray(val)) {
|
|
keys.sort();
|
|
}
|
|
|
|
keys.forEach((key) => {
|
|
if (typeof val[key] === "function") {
|
|
// Silently ignore nested methods, but nevertheless complain below
|
|
// if the root value is a function.
|
|
return;
|
|
}
|
|
|
|
hash.update(key + "\0").update(deepHash(val[key]));
|
|
});
|
|
|
|
break;
|
|
|
|
case "function":
|
|
assert.ok(false, "cannot hash function objects");
|
|
break;
|
|
|
|
default:
|
|
hash.update("" + val);
|
|
break;
|
|
}
|
|
|
|
return hash.digest("hex");
|
|
}
|
|
|
|
// Returns null if the file does not exist or is invalid JSON, otherwise returns
|
|
// the parsed JSON in the file.
|
|
function readJSONOrNull(filename) {
|
|
let raw;
|
|
try {
|
|
raw = fs.readFileSync(filename, 'utf8');
|
|
} catch (e) {
|
|
if (e && e.code === 'ENOENT')
|
|
return null;
|
|
throw e;
|
|
}
|
|
try {
|
|
return JSON.parse(raw);
|
|
} catch (e) {
|
|
return null;
|
|
}
|
|
}
|