Prefer source-maps in memory in a parsed JSON form

This commit is contained in:
Slava Kim
2015-07-06 15:24:09 -07:00
parent ed822326aa
commit 9d610e8d26
7 changed files with 96 additions and 37 deletions

View File

@@ -84,8 +84,6 @@ var stripExportedVars = function (source, exports) {
};
var addSharedHeader = function (source, sourceMap) {
var sourceMapJSON = JSON.parse(sourceMap);
// We want the symbol "share" to be visible to all CoffeeScript files in the
// package (and shared between them), but not visible to JavaScript
// files. (That's because we don't want to introduce two competing ways to
@@ -114,13 +112,13 @@ var addSharedHeader = function (source, sourceMap) {
// There's no use strict, so we can just add the header at the very
// beginning. This adds a line to the file, so we update the source map to
// add a single un-annotated line to the beginning.
sourceMapJSON.mappings = ";" + sourceMapJSON.mappings;
sourceMap.mappings = ";" + sourceMap.mappings;
return header;
}
});
return {
source: source,
sourceMap: JSON.stringify(sourceMapJSON)
sourceMap: sourceMap
};
};
@@ -134,7 +132,7 @@ var CoffeeCompiler = function () {
max: CACHE_SIZE,
// Cache is measured in bytes.
length: function (value) {
return value.source.length + value.sourceMap.length;
return value.source.length + sourceMapLength(value.sourceMap);
}
});
self._diskCache = null;
@@ -188,6 +186,7 @@ _.extend(CoffeeCompiler.prototype, {
var stripped = stripExportedVars(
output.js,
_.pluck(inputFile.getDeclaredExports(), 'name'));
console.log('>>>> ', typeof output.sourceMap, typeof output.v3SourceMap);
sourceWithMap = addSharedHeader(stripped, output.v3SourceMap);
self._cache.set(cacheKey, sourceWithMap);
}
@@ -257,3 +256,12 @@ Plugin.registerCompiler({
return new CoffeeCompiler();
});
function sourceMapLength(sm) {
if (! sm) return 0;
// sum the length of sources and the mappings, the size of
// metadata is ignored, but it is not a big deal
return sm.mappings.length
+ (sm.sourcesContent || []).reduce(function (soFar, current) {
return soFar + (current ? current.length : 0);
}, 0);
};

View File

@@ -25,7 +25,7 @@ var LessCompiler = function () {
max: CACHE_SIZE,
// Cache is measured in bytes (not counting the hashes).
length: function (value) {
return value.css.length + value.sourceMap.length;
return value.css.length + sourceMapLength(value.sourceMap);
}
});
self._diskCache = null;
@@ -100,7 +100,7 @@ _.extend(LessCompiler.prototype, {
if (output.map) {
var map = JSON.parse(output.map);
map.sources = map.sources.map(decodeFilePath);
output.map = JSON.stringify(map);
output.map = map;
}
cacheEntry = {
hashes: {},
@@ -244,3 +244,12 @@ _.extend(MeteorImportLessFileManager.prototype, {
}
});
function sourceMapLength(sm) {
if (! sm) return 0;
// sum the length of sources and the mappings, the size of
// metadata is ignored, but it is not a big deal
return sm.mappings.length
+ (sm.sourcesContent || []).reduce(function (soFar, current) {
return soFar + (current ? current.length : 0);
}, 0);
};

View File

@@ -22,7 +22,7 @@ function StylusCompiler () {
max: CACHE_SIZE,
// Cache is measured in bytes (not counting the hashes).
length: function (value) {
return value.css.length + value.sourceMap.length;
return value.css.length + sourceMapLength(value.sourceMap);
}
});
@@ -156,7 +156,7 @@ StylusCompiler.prototype.processFilesForTarget = function (files) {
cacheEntry = {
hashes: {},
css: css,
sourceMap: JSON.stringify(sourcemap)
sourceMap: sourcemap
};
cacheEntry.hashes[absolutePath] = inputFile.getSourceHash();
currentlyProcessedImports.forEach(function (path) {
@@ -190,3 +190,12 @@ StylusCompiler.prototype._writeCache = function () {
// XXX BBP no on-disk caching yet
};
function sourceMapLength(sm) {
if (! sm) return 0;
// sum the length of sources and the mappings, the size of
// metadata is ignored, but it is not a big deal
return sm.mappings.length
+ (sm.sourcesContent || []).reduce(function (soFar, current) {
return soFar + (current ? current.length : 0);
}, 0);
};

View File

@@ -235,7 +235,8 @@ var NodeModulesDirectory = function (options) {
// - sourcePath: path to file on disk that will provide our contents
// - data: contents of the file as a Buffer
// - hash: optional, sha1 hash of the file contents, if known
// - sourceMap: if 'data' is given, can be given instead of sourcePath. a string
// - sourceMap: if 'data' is given, can be given instead of
// sourcePath. a string or a JS Object. Will be stored as Object.
// - cacheable
var File = function (options) {
var self = this;
@@ -390,8 +391,14 @@ _.extend(File.prototype, {
setSourceMap: function (sourceMap, root) {
var self = this;
if (typeof sourceMap !== "string")
throw new Error("sourceMap must be given as a string");
if (sourceMap === null || ['object', 'string'].indexOf(typeof sourceMap) === -1) {
throw new Error("sourceMap must be given as a string or an object");
}
if (typeof sourceMap === 'string') {
sourceMap = JSON.parse(sourceMap);
}
self.sourceMap = sourceMap;
self.sourceMapRoot = root;
},
@@ -863,18 +870,20 @@ _.extend(Target.prototype, {
// For every source file we process, sets the domain name to
// 'meteor://[emoji]app/', so there is a separate category in Chrome DevTools
// with the original sources.
rewriteSourceMaps: function () {
rewriteSourceMaps: Profile("Target#rewriteSourceMaps", function () {
var self = this;
function rewriteSourceMap (sm) {
var smPlain = JSON.parse(sm);
smPlain.sources = smPlain.sources.map(function (path) {
sm.sources = sm.sources.map(function (path) {
const prefix = 'meteor://\u{1f4bb}app';
if (path.slice(0, prefix.length) === prefix) return path;
// This emoji makes sure the category is always last. The character
// is PERSONAL COMPUTER (yay ES6 unicode escapes):
// http://www.fileformat.info/info/unicode/char/1f4bb/index.htm
return 'meteor://\u{1f4bb}app' + (path[0] === '/' ? '' : '/') + path;
return prefix + (path[0] === '/' ? '' : '/') + path;
});
return JSON.stringify(smPlain);
return sm;
}
if (self.js) {
@@ -890,7 +899,7 @@ _.extend(Target.prototype, {
css.sourceMap = rewriteSourceMap(css.sourceMap);
});
}
},
}),
// Add a Cordova plugin dependency to the target. If the same plugin
// has already been added at a different version and `override` is
@@ -1072,13 +1081,13 @@ _.extend(ClientTarget.prototype, {
});
if (file.sourceMap) {
let mapData = file.sourceMap;
let mapData = null;
// don't need to do this in devel mode
if (mode === 'production') {
mapData = antiXSSIPrepend(file.sourceMap);
mapData = antiXSSIPrepend(JSON.stringify(file.sourceMap));
} else {
mapData = new Buffer(file.sourceMap, 'utf8');
mapData = new Buffer(JSON.stringify(file.sourceMap), 'utf8');
}
manifestItem.sourceMap = builder.writeToGeneratedFilename(
@@ -1390,7 +1399,7 @@ _.extend(JsImage.prototype, {
// Write the source map.
loadItem.sourceMap = builder.writeToGeneratedFilename(
sourceMapBaseName,
{ data: new Buffer(item.sourceMap, 'utf8') }
{ data: new Buffer(JSON.stringify(item.sourceMap), 'utf8') }
);
var sourceMapFileName = files.pathBasename(loadItem.sourceMap);

View File

@@ -10,6 +10,7 @@ var _ = require('underscore');
var Profile = require('./profile.js').Profile;
import {sha1} from './watch.js';
import LRU from 'lru-cache';
import {sourceMapLength} from './utils.js';
const CACHE_SIZE = process.env.METEOR_LINKER_CACHE_SIZE || 1024*1024*100;
const CACHE_DEBUG = !! process.env.METEOR_TEST_PRINT_LINKER_CACHE_DEBUG;
@@ -22,8 +23,7 @@ const LINKER_CACHE = new LRU({
// Key is JSONification of all options plus all hashes.
length: function (files) {
return files.reduce((soFar, current) => {
return soFar + current.data.length +
(current.sourceMap ? current.sourceMap.length : 0);
return soFar + current.data.length + sourceMapLength(current.sourceMap);
}, 0);
}
});
@@ -175,14 +175,19 @@ _.extend(InputFile.prototype, {
* be satisfied if there are path conflicts.
* @param {String} options.data The content of the stylesheet that should be
* added.
* @param {String} options.sourceMap A stringified JSON sourcemap, in case the
* stylesheet was generated from a different file.
* @param {String|Object} options.sourceMap A stringified JSON
* sourcemap, in case the stylesheet was generated from a different
* file.
* @memberOf InputFile
* @instance
*/
addStylesheet: function (options) {
var self = this;
// XXX BBP validate input!!
if (options.sourceMap && typeof options.sourceMap === 'string') {
// XXX remove an anti-XSSI header? ")]}'\n"
options.sourceMap = JSON.parse(options.sourceMap);
}
self._resourceSlot.addStylesheet(options);
},
/**
@@ -195,13 +200,18 @@ _.extend(InputFile.prototype, {
* @param {String} options.path The path at which the JavaScript file
* should be inserted, may not be honored in case of path conflicts.
* @param {String} options.data The code to be added.
* @param {String} options.sourceMap A stringified JSON sourcemap, in case the
* JavaScript file was generated from a different file.
* @param {String|Object} options.sourceMap A stringified JSON
* sourcemap, in case the JavaScript file was generated from a
* different file.
* @memberOf InputFile
* @instance
*/
addJavaScript: function (options) {
var self = this;
if (options.sourceMap && typeof options.sourceMap === 'string') {
// XXX remove an anti-XSSI header? ")]}'\n"
options.sourceMap = JSON.parse(options.sourceMap);
}
self._resourceSlot.addJavaScript(options);
},
/**
@@ -578,11 +588,13 @@ _.extend(PackageSourceBatch.prototype, {
// Add each output as a resource
const ret = linkedFiles.map((file) => {
const sm = (typeof file.sourceMap === 'string')
? JSON.parse(file.sourceMap) : file.sourceMap;
return {
type: "js",
data: new Buffer(file.source, 'utf8'), // XXX encoding
servePath: file.servePath,
sourceMap: file.sourceMap
sourceMap: sm
// XXX BBP hash? needed for minifiers?
};
});

View File

@@ -5,6 +5,7 @@ var isopackets = require('./isopackets.js');
var watch = require('./watch.js');
var Profile = require('./profile.js').Profile;
import LRU from 'lru-cache';
import {sourceMapLength} from './utils.js';
// A rather small cache size, assuming only one module is being linked
// most of the time.
@@ -14,8 +15,7 @@ const CACHE_SIZE = process.env.METEOR_LINKER_PRELINK_CACHE_SIZE || 1024*1024*20;
const LINKER_PRELINK_CACHE = new LRU({
max: CACHE_SIZE,
length: function (prelinked) {
return prelinked.source.length +
prelinked.sourceMap ? prelinked.sourceMap.length : 0;
return prelinked.source.length + sourceMapLength(prelinked.sourceMap);
}
});
@@ -131,7 +131,7 @@ _.extend(Module.prototype, {
}); // results has 'code' and 'map' attributes
}
);
const sourceMap = JSON.stringify(results.map.toJSON());
const sourceMap = results.map.toJSON();
const prelinked = {
source: results.code,
@@ -175,7 +175,7 @@ _.extend(Module.prototype, {
return [{
source: results.code,
servePath: self.combinedServePath,
sourceMap: results.map.toString()
sourceMap: results.map.toJSON()
}];
})
});
@@ -252,6 +252,7 @@ var File = function (inputFile, module) {
self.bare = !!inputFile.bare;
// A source map (generated by something like CoffeeScript) for the input file.
// Is an Object, not a string.
self.sourceMap = inputFile.sourceMap;
// The Module containing this file.
@@ -765,13 +766,13 @@ var fullLink = Profile("linker.fullLink", function (inputFiles, {
if (header.charAt(header.length - 1) !== "\n")
header += "\n"; // make sure it's a whole number of lines
var headerLines = header.split('\n').length - 1;
var sourceMapJson = JSON.parse(file.sourceMap);
sourceMapJson.mappings = (new Array(headerLines + 1).join(';')) +
sourceMapJson.mappings;
var sourceMap = file.sourceMap;
sourceMap.mappings = (new Array(headerLines + 1).join(';')) +
sourceMap.mappings;
return {
source: header + file.source + footer,
servePath: file.servePath,
sourceMap: JSON.stringify(sourceMapJson)
sourceMap: sourceMap
};
} else {
return {

View File

@@ -726,3 +726,14 @@ exports.sha256 = function (contents) {
hash.update(contents);
return hash.digest('base64');
};
exports.sourceMapLength = function (sm) {
if (! sm) return 0;
// sum the length of sources and the mappings, the size of
// metadata is ignored, but it is not a big deal
return sm.mappings.length
+ (sm.sourcesContent || []).reduce((soFar, current) => {
return soFar + (current ? current.length : 0);
}, 0);
};