Add hot module reload for web.browser architecture

This commit is contained in:
zodern
2020-06-07 19:10:46 -05:00
parent 71f67d9dba
commit d85b28ebed
17 changed files with 1375 additions and 1 deletions

View File

@@ -1,3 +1,55 @@
### Hot Module Reload
As an alternative to Hot Code Push in development, Hot Module Reload replaces modules in the bundle that were modified without reloading the page.
Use:
```
meteor run --release zodern:COMET --include-packages zodern:hot
```
#### API
`module.hot.accept()` - The module will be rerun whenever it or any of its dependencies are modified
`module.hot.decline()` - If the module or any of its dependencies are modified, hot code push will be used instead of HMR.
These should be wrapped by
```
if (module.hot) {
// Use module.hot here
}
```
Minifiers are able to remove the if statement for production, though none of the common minifiers for Meteor currently do.
#### Implementation details
For now, the linker emits an event that packages, such as zodern:hot, can listen to for an up to date list of files in an unibuild. Eventually this will be replaced with moving the HMR code from a build plugin into the meteor-tool.
The zodern:hot package compares the list of files between builds to identify what has changed. When a client tries to do a hot code push, the client will request changes over a websocket from the build plugin. If possible, it will use HMR to update the modules. Otherwise, it will let the page to be reloaded by hot code push.
The client uses a modified version of [install](https://www.npmjs.com/package/install) to allow replacing modules. To force it to be loaded at the correct time, the meteor-tool adds the `zodern:modules-runtime-hot` dependency to `modules`.
#### Remaining tasks
This is still an early version, and there is still a long list of items to implement.
- HMR is enabled for the second build after Meteor is started so there is two builds to compare. This should be fixed by storing the necessary information in the linker cache.
- Support full webpack API
- Look into allowing packages to replace the HMR api to allow experimentation
- Look into an api to allow packages to run code before and after each module is run. This could be used to implement react fast reload and for packages to automatically clean up after a file is modified. For example, this could be used to remove methods and publications previously added by a modified file.
HMR is only enabled for the `web.browser` architectures. These architectures are remaining:
- web.browser.legacy
- web.cordova
- server architectures
HMR is not supported in these situations. Hot code push will be used instead:
- A package is modified
- A file is added
- A file is removed
- Files were modified that do not have a module id, are bare, are json data, or do not have meteorInstallOptions
# <a href='https://www.meteor.com'><img src='https://user-images.githubusercontent.com/841294/26841702-0902bbee-4af3-11e7-9805-0618da66a246.png' height='60' alt='Meteor'></a>
[![TravisCI Status](https://travis-ci.org/meteor/meteor.svg?branch=devel)](https://travis-ci.org/meteor/meteor)

View File

@@ -0,0 +1 @@
node_modules

View File

@@ -0,0 +1,7 @@
This directory and the files immediately inside it are automatically generated
when you change this package's NPM dependencies. Commit the files in this
directory (npm-shrinkwrap.json, .gitignore, and this README) to source control
so that others run the same versions of sub-dependencies.
You should NOT check in the node_modules directory that Meteor automatically
creates; if you are using git, the .gitignore file tells git to ignore it.

View File

@@ -0,0 +1,10 @@
{
"lockfileVersion": 1,
"dependencies": {
"ws": {
"version": "7.2.5",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.2.5.tgz",
"integrity": "sha512-C34cIU4+DB2vMyAbmEKossWq2ZQDr6QEyuuCzWrM9zfw1sGc0mYiJ0UnG9zzNykt49C2Fi34hvr2vssFQRS6EA=="
}
}
}

101
packages/hot/changesets.js Normal file
View File

@@ -0,0 +1,101 @@
function comparePrelinkResult(previousResult, {
name,
module,
}) {
const {
fileHashes: previousFileHashes,
unreloadableHashes: previousUnreloadable,
} = previousResult || {};
const {
addedFiles,
changedFiles,
removedFilePaths,
unreloadable,
unreloadableChanged,
fileHashes
} = compareFiles(
previousFileHashes,
previousUnreloadable,
module.files
);
const canCompare = !!previousFileHashes;
const reloadable = canCompare && !unreloadableChanged
function fileDetailsToSave (file) {
return {
content: file.getPrelinkedOutput({}).toStringWithSourceMap({}),
path: file.absModuleId
}
}
const result = {
fileHashes,
unreloadableHashes: unreloadable,
removedFilePaths,
reloadable,
addedFiles: reloadable ? addedFiles.map(fileDetailsToSave) : [],
changedFiles: reloadable ? changedFiles.map(fileDetailsToSave) : [],
linkedAt: Date.now(),
name
};
return result;
}
function checkReloadable(file) {
return file.absModuleId &&
!file.bare &&
!file.jsonData &&
file.meteorInstallOptions
}
function compareFiles(previousHashes = new Map(), previousUnreloadable = [], currentFiles) {
const unreloadable = [];
const currentHashes = new Map();
const unseenModules = new Map(previousHashes);
const changedFiles = [];
const addedFiles = [];
currentFiles.forEach(file => {
if (
!checkReloadable(file)
) {
// TODO: we should be using more than just the hash
unreloadable.push(file._inputHash);
return;
}
// TODO: we should be using more than just the hash
currentHashes.set(file.absModuleId, file._inputHash);
const previousHash = previousHashes.get(file.absModuleId);
if (!previousHash) {
addedFiles.push(file);
} else if (previousHash !== file._inputHash) {
changedFiles.push(file);
}
unseenModules.delete(file.absModuleId);
});
const removedFilePaths = unseenModules.keys();
const unreloadableChanged = unreloadable.length !== previousUnreloadable.length ||
unreloadable.some((hash, i) => hash !== previousUnreloadable[i]);
return {
fileHashes: currentHashes,
addedFiles,
changedFiles,
removedFilePaths,
unreloadable,
unreloadableChanged,
};
};
module.exports = {
comparePrelinkResult
}

261
packages/hot/client.js Normal file
View File

@@ -0,0 +1,261 @@
// TODO: add an api to Reify to update cached exports for a module
const ReifyEntry = require('/node_modules/meteor/modules/node_modules/reify/lib/runtime/entry.js')
// Due to the bundler and proxy running in the same node process
// this could possibly be ran after the next build finished
// TODO: the builder should inject the build time in the bundle
let lastUpdated = Date.now();
let arch = __meteor_runtime_config__.isModern ? 'web.browser' : 'web.browser.legacy';
let enabled = arch === 'web.browser';
if (!enabled) {
console.log(`HMR is not supported in ${arch}`);
}
let pendingReload = null;
let mustReload = false;
// TODO: handle disconnects
const socket = new WebSocket('ws://localhost:3124');
function requestChanges() {
socket.send(JSON.stringify({
type: 'request-changes',
arch,
after: lastUpdated
}));
}
socket.addEventListener('open', function () {
console.log('HMR: connected');
});
socket.addEventListener('message', function (event) {
let message = JSON.parse(event.data);
switch (message.type) {
case 'changes':
// TODO: support removed or added files
const hasUnreloadable = message.changeSets.find(changeSet => {
return !changeSet.reloadable ||
changeSet.removedFilePaths.length > 0 ||
changeSet.addedFiles.length > 0
})
if (
pendingReload &&
hasUnreloadable ||
message.changeSets.length === 0
) {
console.log('HMR: Unable to do HMR. Falling back to hot code push.')
// Complete hot code push if we can not do hot module reload
mustReload = true;
return pendingReload();
}
// In case the user changed how a module works with HMR
// in one of the earlier change sets, we want to apply each
// change set one at a time.
message.changeSets.forEach(changeSet => {
applyChangeset(changeSet);
});
if (message.changeSets.length > 0) {
lastUpdated = message.changeSets[message.changeSets.length - 1].linkedAt;
}
}
});
function walkTree(pathParts, tree) {
const part = pathParts.shift();
const _module = tree.contents[part];
if (!_module) {
console.log(part, pathParts, _module, tree);
throw new Error('not-exist');
}
if (pathParts.length === 0) {
return _module;
}
return walkTree(pathParts, _module);
}
function createInlineSourceMap(map) {
return "//# sourceMappingURL=data:application/json;base64," + btoa(JSON.stringify(map));
}
function replaceFileContent(file, contents) {
console.log('HMR: replacing module:', file.module.id);
// TODO: to replace content in packages, we need an eval function that runs
// within the package scope, like dynamic imports does.
const moduleFunction = function () {
// TODO: Use same sourceURL as the sourcemap for the main bundle does
return eval(
// Wrap the function(require,exports,module){...} expression in
// parentheses to force it to be parsed as an expression.
"(" + contents.code + ")\n//# sourceURL=" + file.module.id +
"\n" + createInlineSourceMap(contents.map)
).apply(this, arguments);
}
file.contents = moduleFunction;
}
function rerunFile(file) {
delete file.module.exports;
file.module.loaded = false;
console.log('HMR: rerunning', file.module.id);
// re-eveluate the file
require(file.module.id);
}
const oldLink = module.constructor.prototype.link;
module.constructor.prototype.link = function (path) {
if (this._recordImport) {
this._recordImport(path);
};
return oldLink.apply(this, arguments);
}
function findReloadableParents(importedBy) {
return Object.values(importedBy).map(parentFile => {
// Force module to be rerun when we complete applying the changeset
parentFile.module.replaceModule();
const canAccept = parentFile.module.hot && parentFile.module.hot._canAcceptUpdate();
if (canAccept === true) {
return parentFile;
} else if (
canAccept === null && Object.keys(parentFile.importedBy).length > 0
) {
// When canAccept is null, whether it is reloadable or not depends on
// if its parents can accept changes.
return findReloadableParents(parentFile.importedBy);
} else {
return false;
}
}).flat(Infinity);
}
module.constructor.prototype.replaceModule = function (id, contents) {
const moduleId = id || this.id;
const root = this._getRoot();
let file;
try {
file = walkTree(moduleId.split('/').slice(1), root);
} catch (e) {
if (e.message === 'not-exist') {
return null;
}
throw e;
}
if (!file.contents) {
// File is a dynamic import that hasn't been loaded
return null;
}
if (contents) {
replaceFileContent(file, contents);
}
// Clear cached exports
// TODO: check how this affects live bindings for ecmascript modules
delete file.module.exports;
const entry = ReifyEntry.getOrCreate(id);
entry.getters = {};
entry.setters = {};
entry.module = null;
Object.keys(entry.namespace).forEach(key => {
if (key !== '__esModule') {
delete entry.namespace[key];
}
});
return file;
}
function applyChangeset({
changedFiles
}) {
// TODO: prevent requiring removed files
// TODO: install added files
const reloadableParents = [];
let hasImportedModules = false;
changedFiles.forEach(({ content, path }) => {
const file = module.replaceModule(path, content);
// file will be null for dynamic imports that haven't been
// imported
if (file) {
hasImportedModules = true;
reloadableParents.push(...findReloadableParents({ self: file }));
} else {
console.log(`Unable to replace module ${path}. It is probably a dynamic file that hasn't been imported`);
}
});
// Check if some of the module's parents are not reloadable
// In that case, we have to do a full reload
// TODO: record which parents cause this
if (
hasImportedModules &&
reloadableParents.length === 0 ||
reloadableParents.some(parent => !parent)
) {
if (pendingReload) {
return pendingReload();
}
}
// TODO: deduplicate
reloadableParents.forEach(parent => {
rerunFile(parent);
});
}
let nonRefreshableVersion = (__meteor_runtime_config__.autoupdate.versions || {})['web.browser'].versionNonRefreshable;
Meteor.startup(() => {
if (!enabled) {
return;
}
Package['autoupdate'].Autoupdate._clientVersions.watch((doc) => {
if (doc._id !== 'web.browser') {
return;
}
// We can't do anything here until Reload._onMigrate
// has been called
if (!pendingReload) {
return;
}
if (doc.versionNonRefreshable !== nonRefreshableVersion) {
requestChanges();
nonRefreshableVersion = doc.versionNonRefreshable;
}
});
// We disable hot code push for js until there were
// changes that can not be applied through HMR.
Package['reload'].Reload._onMigrate((tryReload) => {
if (mustReload) {
return [true];
}
pendingReload = tryReload;
requestChanges();
return [false];
});
});

35
packages/hot/package.js Normal file
View File

@@ -0,0 +1,35 @@
Package.describe({
name: 'zodern:hot',
version: '0.1.1',
summary: 'Adds Hot Module Reloading to Meteor',
documentation: 'README.md'
});
Package.registerBuildPlugin({
name: 'hot-core',
sources: ['plugin.js'],
use: ['ecmascript'],
npmDependencies: {
ws: '7.2.5'
},
});
Package.onUse(function (api) {
api.use('isobuild:compiler-plugin@1.0.0');
api.use('babel-compiler');
api.use('modules');
api.imply('zodern:modules-runtime-hot');
api.addFiles('./client.js', 'client');
});
Package.onTest(function (api) {
api.use(["tinytest", "underscore"]);
api.use(["es5-shim", "ecmascript", "babel-compiler"]);
api.addFiles("runtime-tests.js");
api.addFiles("transpilation-tests.js", "server");
api.addFiles("bare-test.js");
api.addFiles("bare-test-file.js", ["client", "server"], {
bare: true
});
});

117
packages/hot/plugin.js Normal file
View File

@@ -0,0 +1,117 @@
const {
comparePrelinkResult
} = require('./changesets');
// Meteor can load the plugin multiple times
// when it is a local package
// Any state that should be preserved is stored in
// this object
const sharedState = global.__hotState || {
initialized: false,
wsServer: null,
wsMessageHandler: null,
prelinkResultHandler: null,
previousPrelinkResults: {},
};
global.__hotState = sharedState;
function findLast(array, compare) {
for (let i = array.length - 1; i > 0; i--) {
if (compare(array[i])) {
return array[i]
}
}
}
function prelinkResultHandler(prelinkResult) {
const {
isApp,
name,
bundleArch
} = prelinkResult;
// TODO: we should limit the number of change sets that are saved
// to avoid a memory leak
sharedState.previousPrelinkResults[bundleArch] =
sharedState.previousPrelinkResults[bundleArch] || [];
// TODO: support HMR in packages
// TODO: support HMR in legacy bundle
if (!isApp || bundleArch !== 'web.browser') {
// Require a full reload whenever a package is modified
sharedState.previousPrelinkResults[bundleArch].push({
name,
reloadable: false,
linkedAt: Date.now()
})
return;
}
// TODO: Meteor should cache some of this data with the linker cache
// so we have something to compare with when linking the first time
// after meteor is started
const changeSets = sharedState.previousPrelinkResults[bundleArch];
let previousChangeset = findLast(changeSets, (changeSet) => {
return changeSet.name === name;
});
const changeset = comparePrelinkResult(previousChangeset, prelinkResult);
sharedState.previousPrelinkResults[bundleArch].push(changeset);
}
function wsMessageHandler(conn, _message) {
const message = JSON.parse(_message);
switch(message.type) {
case 'request-changes': {
const {
after,
arch
} = message;
const changesets = sharedState.previousPrelinkResults[arch] || [];
const newChanges = changesets.filter(({ linkedAt }) => {
return linkedAt > after;
});
conn.send(JSON.stringify({
type: 'changes',
changeSets: newChanges
}));
break;
}
default:
console.warn('Unknown HMR message:', message.type);
}
}
// Update handlers so event listeners added during initialization can
// use the latest handlers if this package was modified and rebuilt
sharedState.prelinkResultHandler = prelinkResultHandler;
sharedState.wsMessageHandler = wsMessageHandler;
function init() {
sharedState.initialized = true;
// TODO: port should be based on port app is using
// TODO: look into using sockjs instead
const WebSocket = require('ws');
sharedState.wsServer = new WebSocket.Server({ port: 3124 });
// TODO: should require connections to send a secret before
// being able to receive changes
sharedState.wsServer.on('connection', function (ws) {
ws.on('message', (message) => {
sharedState.wsMessageHandler(ws, message);
});
});
Plugin._onPreLinked(function (prelinkResult) {
sharedState.prelinkResultHandler(prelinkResult);
});
}
if (!sharedState.initialized) {
init();
}

View File

@@ -0,0 +1,570 @@
makeInstaller = function (options) {
"use strict";
options = options || {};
// These file extensions will be appended to required module identifiers
// if they do not exactly match an installed module.
var defaultExtensions = options.extensions || [".js", ".json"];
// If defined, the options.fallback function will be called when no
// installed module is found for a required module identifier. Often
// options.fallback will be implemented in terms of the native Node
// require function, which has the ability to load binary modules.
var fallback = options.fallback;
// List of fields to look for in package.json files to determine the
// main entry module of the package. The first field listed here whose
// value is a string will be used to resolve the entry module.
var mainFields = options.mainFields ||
// If options.mainFields is absent and options.browser is truthy,
// package resolution will prefer the "browser" field of package.json
// files to the "main" field. Note that this only supports
// string-valued "browser" fields for now, though in the future it
// might make sense to support the object version, a la browserify.
(options.browser ? ["browser", "main"] : ["main"]);
var hasOwn = {}.hasOwnProperty;
function strictHasOwn(obj, key) {
return isObject(obj) && isString(key) && hasOwn.call(obj, key);
}
// Cache for looking up File objects given absolute module identifiers.
// Invariants:
// filesByModuleId[module.id] === fileAppendId(root, module.id)
// filesByModuleId[module.id].module === module
var filesByModuleId = {};
// The file object representing the root directory of the installed
// module tree.
var root = new File("/", new File("/.."));
var rootRequire = makeRequire(root);
// Merges the given tree of directories and module factory functions
// into the tree of installed modules and returns a require function
// that behaves as if called from a module in the root directory.
function install(tree, options) {
if (isObject(tree)) {
fileMergeContents(root, tree, options);
}
return rootRequire;
}
// Replace this function to enable Module.prototype.prefetch.
install.fetch = function (ids) {
throw new Error("fetch not implemented");
};
// This constructor will be used to instantiate the module objects
// passed to module factory functions (i.e. the third argument after
// require and exports), and is exposed as install.Module in case the
// caller of makeInstaller wishes to modify Module.prototype.
function Module(id) {
this.id = id;
// The Node implementation of module.children unfortunately includes
// only those child modules that were imported for the first time by
// this parent module (i.e., child.parent === this).
this.children = [];
// This object is an install.js extension that includes all child
// modules imported by this module, even if this module is not the
// first to import them.
this.childrenById = {};
}
// Used to keep module.prefetch promise resolutions well-ordered.
var lastPrefetchPromise;
// May be shared by multiple sequential calls to module.prefetch.
// Initialized to {} only when necessary.
var missing;
Module.prototype.prefetch = function (id) {
var module = this;
var parentFile = getOwn(filesByModuleId, module.id);
lastPrefetchPromise = lastPrefetchPromise || Promise.resolve();
var previousPromise = lastPrefetchPromise;
function walk(module) {
var file = getOwn(filesByModuleId, module.id);
if (fileIsDynamic(file) && !file.pending) {
file.pending = true;
missing = missing || {};
// These are the data that will be exposed to the install.fetch
// callback, so it's worth documenting each item with a comment.
missing[module.id] = {
// The CommonJS module object that will be exposed to this
// dynamic module when it is evaluated. Note that install.fetch
// could decide to populate module.exports directly, instead of
// fetching anything. In that case, install.fetch should omit
// this module from the tree that it produces.
module: file.module,
// List of module identifier strings imported by this module.
// Note that the missing object already contains all available
// dependencies (including transitive dependencies), so
// install.fetch should not need to traverse these dependencies
// in most cases; however, they may be useful for other reasons.
// Though the strings are unique, note that two different
// strings could resolve to the same module.
deps: Object.keys(file.deps),
// The options (if any) that were passed as the second argument
// to the install(tree, options) function when this stub was
// first registered. Typically contains options.extensions, but
// could contain any information appropriate for the entire tree
// as originally installed. These options will be automatically
// inherited by the newly fetched modules, so install.fetch
// should not need to modify them.
options: file.options,
// Any stub data included in the array notation from the
// original entry for this dynamic module. Typically contains
// "main" and/or "browser" fields for package.json files, and is
// otherwise undefined.
stub: file.stub
};
each(file.deps, function (parentId, id) {
fileResolve(file, id);
});
each(module.childrenById, walk);
}
}
return lastPrefetchPromise = new Promise(function (resolve) {
var absChildId = module.resolve(id);
each(module.childrenById, walk);
resolve(absChildId);
}).then(function (absChildId) {
// Grab the current missing object and fetch its contents.
var toBeFetched = missing;
missing = null;
function clearPending() {
if (toBeFetched) {
Object.keys(toBeFetched).forEach(function (id) {
getOwn(filesByModuleId, id).pending = false;
});
}
}
return new Promise(function (resolve) {
// The install.fetch function takes an object mapping missing
// dynamic module identifiers to options objects, and should
// return a Promise that resolves to a module tree that can be
// installed. As an optimization, if there were no missing dynamic
// modules, then we can skip calling install.fetch entirely.
resolve(toBeFetched && install.fetch(toBeFetched));
}).then(function (tree) {
function both() {
install(tree);
clearPending();
return absChildId;
}
// Although we want multiple install.fetch calls to run in
// parallel, it is important that the promises returned by
// module.prefetch are resolved in the same order as the original
// calls to module.prefetch, because previous fetches may include
// modules assumed to exist by more recent module.prefetch calls.
// Whether previousPromise was resolved or rejected, carry on with
// the installation regardless.
return previousPromise.then(both, both);
}, function (error) {
// Fixes https://github.com/meteor/meteor/issues/10182.
clearPending();
throw error;
});
});
};
install.Module = Module;
function getOwn(obj, key) {
return strictHasOwn(obj, key) && obj[key];
}
function isObject(value) {
return value !== null && typeof value === "object";
}
function isFunction(value) {
return typeof value === "function";
}
function isString(value) {
return typeof value === "string";
}
function makeMissingError(id) {
return new Error("Cannot find module '" + id + "'");
}
Module.prototype.resolve = function (id) {
var file = fileResolve(filesByModuleId[this.id], id);
if (file) return file.module.id;
var error = makeMissingError(id);
if (fallback && isFunction(fallback.resolve)) {
return fallback.resolve(id, this.id, error);
}
throw error;
};
Module.prototype.require = function require(id) {
var result = fileResolve(filesByModuleId[this.id], id);
if (result) {
result.importedBy[this.id] = filesByModuleId[this.id];
return fileEvaluate(result, this);
}
var error = makeMissingError(id);
if (isFunction(fallback)) {
return fallback(
id, // The missing module identifier.
this.id, // ID of the parent module.
error // The error we would have thrown.
);
}
throw error;
};
Module.prototype._getRoot = function () {
return root;
}
Module.prototype._recordImport = function (id) {
var result = fileResolve(filesByModuleId[this.id], id);
result.importedBy[this.id] = filesByModuleId[this.id];
}
function makeRequire(file) {
var module = file.module;
function require(id) {
return module.require(id);
}
require.extensions = fileGetExtensions(file).slice(0);
require.resolve = function resolve(id) {
return module.resolve(id);
};
return require;
}
// File objects represent either directories or modules that have been
// installed. When a `File` respresents a directory, its `.contents`
// property is an object containing the names of the files (or
// directories) that it contains. When a `File` represents a module, its
// `.contents` property is a function that can be invoked with the
// appropriate `(require, exports, module)` arguments to evaluate the
// module. If the `.contents` property is a string, that string will be
// resolved as a module identifier, and the exports of the resulting
// module will provide the exports of the original file. The `.parent`
// property of a File is either a directory `File` or `null`. Note that
// a child may claim another `File` as its parent even if the parent
// does not have an entry for that child in its `.contents` object.
// This is important for implementing anonymous files, and preventing
// child modules from using `../relative/identifier` syntax to examine
// unrelated modules.
function File(moduleId, parent) {
var file = this;
// Link to the parent file.
file.parent = parent = parent || null;
// The module object for this File, which will eventually boast an
// .exports property when/if the file is evaluated.
file.module = new Module(moduleId);
filesByModuleId[moduleId] = file;
// The .contents of the file can be either (1) an object, if the file
// represents a directory containing other files; (2) a factory
// function, if the file represents a module that can be imported; (3)
// a string, if the file is an alias for another file; or (4) null, if
// the file's contents are not (yet) available.
file.contents = null;
// Set of module identifiers imported by this module. Note that this
// set is not necessarily complete, so don't rely on it unless you
// know what you're doing.
file.deps = {};
// Files that imported this module.
file.importedBy = {};
}
function fileEvaluate(file, parentModule) {
var module = file.module;
if (!strictHasOwn(module, "exports")) {
var contents = file.contents;
if (!contents) {
// If this file was installed with array notation, and the array
// contained one or more objects but no functions, then the combined
// properties of the objects are treated as a temporary stub for
// file.module.exports. This is particularly important for partial
// package.json modules, so that the resolution logic can know the
// value of the "main" and/or "browser" fields, at least, even if
// the rest of the package.json file is not (yet) available.
if (file.stub) {
return file.stub;
}
throw makeMissingError(module.id);
}
if (parentModule) {
module.parent = parentModule;
var children = parentModule.children;
if (Array.isArray(children)) {
children.push(module);
}
}
contents(
makeRequire(file),
// If the file had a .stub, reuse the same object for exports.
module.exports = file.stub || {},
module,
file.module.id,
file.parent.module.id
);
module.loaded = true;
}
// The module.runModuleSetters method will be deprecated in favor of
// just module.runSetters: https://github.com/benjamn/reify/pull/160
var runSetters = module.runSetters || module.runModuleSetters;
if (isFunction(runSetters)) {
runSetters.call(module);
}
return module.exports;
}
function fileIsDirectory(file) {
return file && isObject(file.contents);
}
function fileIsDynamic(file) {
return file && file.contents === null;
}
function fileMergeContents(file, contents, options) {
if (Array.isArray(contents)) {
contents.forEach(function (item) {
if (isString(item)) {
file.deps[item] = file.module.id;
} else if (isFunction(item)) {
contents = item;
} else if (isObject(item)) {
file.stub = file.stub || {};
each(item, function (value, key) {
file.stub[key] = value;
});
}
});
if (!isFunction(contents)) {
// If the array did not contain a function, merge nothing.
contents = null;
}
} else if (!isFunction(contents) &&
!isString(contents) &&
!isObject(contents)) {
// If contents is neither an array nor a function nor a string nor
// an object, just give up and merge nothing.
contents = null;
}
if (contents) {
file.contents = file.contents || (isObject(contents) ? {} : contents);
if (isObject(contents) && fileIsDirectory(file)) {
each(contents, function (value, key) {
if (key === "..") {
child = file.parent;
} else {
var child = getOwn(file.contents, key);
if (!child) {
child = file.contents[key] = new File(
file.module.id.replace(/\/*$/, "/") + key,
file
);
child.options = options;
}
}
fileMergeContents(child, value, options);
});
}
}
}
function each(obj, callback, context) {
Object.keys(obj).forEach(function (key) {
callback.call(this, obj[key], key);
}, context);
}
function fileGetExtensions(file) {
return file.options
&& file.options.extensions
|| defaultExtensions;
}
function fileAppendIdPart(file, part, extensions) {
// Always append relative to a directory.
while (file && !fileIsDirectory(file)) {
file = file.parent;
}
if (!file || !part || part === ".") {
return file;
}
if (part === "..") {
return file.parent;
}
var exactChild = getOwn(file.contents, part);
// Only consider multiple file extensions if this part is the last
// part of a module identifier and not equal to `.` or `..`, and there
// was no exact match or the exact match was a directory.
if (extensions && (!exactChild || fileIsDirectory(exactChild))) {
for (var e = 0; e < extensions.length; ++e) {
var child = getOwn(file.contents, part + extensions[e]);
if (child && !fileIsDirectory(child)) {
return child;
}
}
}
return exactChild;
}
function fileAppendId(file, id, extensions) {
var parts = id.split("/");
// Use `Array.prototype.every` to terminate iteration early if
// `fileAppendIdPart` returns a falsy value.
parts.every(function (part, i) {
return file = i < parts.length - 1
? fileAppendIdPart(file, part)
: fileAppendIdPart(file, part, extensions);
});
return file;
}
function recordChild(parentModule, childFile) {
var childModule = childFile && childFile.module;
if (parentModule && childModule) {
parentModule.childrenById[childModule.id] = childModule;
}
}
function fileResolve(file, id, parentModule, seenDirFiles) {
var parentModule = parentModule || file.module;
var extensions = fileGetExtensions(file);
file =
// Absolute module identifiers (i.e. those that begin with a `/`
// character) are interpreted relative to the root directory, which
// is a slight deviation from Node, which has access to the entire
// file system.
id.charAt(0) === "/" ? fileAppendId(root, id, extensions) :
// Relative module identifiers are interpreted relative to the
// current file, naturally.
id.charAt(0) === "." ? fileAppendId(file, id, extensions) :
// Top-level module identifiers are interpreted as referring to
// packages in `node_modules` directories.
nodeModulesLookup(file, id, extensions);
// If the identifier resolves to a directory, we use the same logic as
// Node to find an `index.js` or `package.json` file to evaluate.
while (fileIsDirectory(file)) {
seenDirFiles = seenDirFiles || [];
// If the "main" field of a `package.json` file resolves to a
// directory we've already considered, then we should not attempt to
// read the same `package.json` file again. Using an array as a set
// is acceptable here because the number of directories to consider
// is rarely greater than 1 or 2. Also, using indexOf allows us to
// store File objects instead of strings.
if (seenDirFiles.indexOf(file) < 0) {
seenDirFiles.push(file);
var pkgJsonFile = fileAppendIdPart(file, "package.json");
var pkg = pkgJsonFile && fileEvaluate(pkgJsonFile, parentModule);
var mainFile, resolved = pkg && mainFields.some(function (name) {
var main = pkg[name];
if (isString(main)) {
// The "main" field of package.json does not have to begin
// with ./ to be considered relative, so first we try
// simply appending it to the directory path before
// falling back to a full fileResolve, which might return
// a package from a node_modules directory.
return mainFile = fileAppendId(file, main, extensions) ||
fileResolve(file, main, parentModule, seenDirFiles);
}
});
if (resolved && mainFile) {
file = mainFile;
recordChild(parentModule, pkgJsonFile);
// The fileAppendId call above may have returned a directory,
// so continue the loop to make sure we resolve it to a
// non-directory file.
continue;
}
}
// If we didn't find a `package.json` file, or it didn't have a
// resolvable `.main` property, the only possibility left to
// consider is that this directory contains an `index.js` module.
// This assignment almost always terminates the while loop, because
// there's very little chance `fileIsDirectory(file)` will be true
// for `fileAppendIdPart(file, "index", extensions)`. However, in
// principle it is remotely possible that a file called `index.js`
// could be a directory instead of a file.
file = fileAppendIdPart(file, "index", extensions);
}
if (file && isString(file.contents)) {
file = fileResolve(file, file.contents, parentModule, seenDirFiles);
}
recordChild(parentModule, file);
return file;
};
function nodeModulesLookup(file, id, extensions) {
for (var resolved; file && !resolved; file = file.parent) {
resolved = fileIsDirectory(file) &&
fileAppendId(file, "node_modules/" + id, extensions);
}
return resolved;
}
return install;
};
if (typeof exports === "object") {
exports.makeInstaller = makeInstaller;
}

View File

@@ -0,0 +1,61 @@
meteorInstall = makeInstaller({
// On the client, make package resolution prefer the "browser" field of
// package.json over the "module" field over the "main" field.
browser: true,
mainFields: ["browser", "module", "main"],
fallback: function (id, parentId, error) {
if (id && id.startsWith('meteor/')) {
var packageName = id.split('/', 2)[1];
throw new Error(
'Cannot find package "' + packageName + '". ' +
'Try "meteor add ' + packageName + '".'
);
}
throw error;
}
});
let Module = Package['modules-runtime'].meteorInstall.Module;
meteorInstall.Module.prototype.link = Module.prototype.link;
Object.defineProperty(meteorInstall.Module.prototype, "hot", {
get: function () {
if (!this._hotState) {
this._hotState = {
// if null, whether it accepts depends on all of the modules that
// required it
_hotAccepts: null
};
}
let hotState = this._hotState;
return {
accept() {
if (arguments.length > 0) {
// TODO: support same options as webpack
throw new Error('hot.accept does not support any arguments.');
}
hotState._hotAccepts = true;
},
decline() {
if (arguments.length > 0) {
throw new Error('hot.decline does not support any arguments.');
}
hotState._hotAccepts = false;
},
_canAcceptUpdate() {
return hotState._hotAccepts;
}
}
},
set() {}
});
// Due to changes in the comet meteor-tool, this package should be running
// after modules-runtime but before modules. We want modules to use
// our patched meteorInstall
Package['modules-runtime'].meteorInstall = meteorInstall;

View File

@@ -0,0 +1,29 @@
Package.describe({
name: "zodern:modules-runtime-hot",
version: "0.12.0",
summary: "CommonJS module system with modifications to support HMR",
git: "https://github.com/benjamn/install",
documentation: "README.md"
});
Package.onUse(function (api) {
api.addFiles("installer.js", [
"client",
// "server"
], {
bare: true
});
api.addFiles("modern.js", "modern");
// api.addFiles("legacy.js", "legacy");
// api.addFiles("server.js", "server");
// api.addFiles("profile.js");
api.export("meteorInstall", "client");
});
Package.onTest(function (api) {
api.use("tinytest");
api.use("modules"); // Test modules-runtime via modules.
api.addFiles("modules-runtime-tests.js");
});

View File

@@ -0,0 +1,93 @@
// Options that will be populated below and then passed to makeInstaller.
var makeInstallerOptions = {};
// RegExp matching strings that don't start with a `.` or a `/`.
var topLevelIdPattern = /^[^./]/;
// This function will be called whenever a module identifier that hasn't
// been installed is required. For backwards compatibility, and so that we
// can require binary dependencies on the server, we implement the
// fallback in terms of Npm.require.
makeInstallerOptions.fallback = function (id, parentId, error) {
// For simplicity, we honor only top-level module identifiers here.
// We could try to honor relative and absolute module identifiers by
// somehow combining `id` with `dir`, but we'd have to be really careful
// that the resulting modules were located in a known directory (not
// some arbitrary location on the file system), and we only really need
// the fallback for dependencies installed in node_modules directories.
if (topLevelIdPattern.test(id)) {
if (id && id.startsWith('meteor/')) {
const [meteorPrefix, packageName] = id.split('/', 2);
throw new Error(
`Cannot find package "${packageName}". ` +
`Try "meteor add ${packageName}".`
);
}
if (typeof Npm === "object" &&
typeof Npm.require === "function") {
return Npm.require(id, error);
}
}
throw error;
};
makeInstallerOptions.fallback.resolve = function (id, parentId, error) {
if (topLevelIdPattern.test(id)) {
// Allow any top-level identifier to resolve to itself on the server,
// so that makeInstallerOptions.fallback has a chance to handle it.
return id;
}
throw error;
};
meteorInstall = makeInstaller(makeInstallerOptions);
var Module;
// If the "modules" package has already run, it set up reify on the Module prototype in modules-runtime
if (Package['modules-runtime']) {
console.log('------- using module from modules-runtime')
Module = Package['modules-runtime'].meteorInstall.Module;
console.log(Module.prototype.link);
meteorInstall.Module.prototype.link = Module.prototype.link;
} else {
console.log('--------- using own Module');
Module = meteorInstall.Module;
}
Module.prototype.useNode = function () {
if (typeof npmRequire !== "function") {
// Can't use Node if npmRequire is not defined.
return false;
}
var parts = this.id.split("/");
var start = 0;
if (parts[start] === "") ++start;
if (parts[start] === "node_modules" &&
parts[start + 1] === "meteor") {
start += 2;
}
if (parts.indexOf("node_modules", start) < 0) {
// Don't try to use Node for modules that aren't in node_modules
// directories.
return false;
}
try {
npmRequire.resolve(this.id);
} catch (e) {
return false;
}
// See tools/static-assets/server/npm-require.js for the implementation
// of npmRequire. Note that this strategy fails when importing ESM
// modules (typically, a .js file in a package with "type": "module" in
// its package.json), as of Node 12.16.0 (Meteor 1.9.1).
this.exports = npmRequire(this.id);
return true;
};

View File

@@ -0,0 +1,3 @@
const EventEmitter = require('events');
export default new EventEmitter();

View File

@@ -1026,8 +1026,14 @@ class Target {
add(usedUnibuild);
delete onStack[usedUnibuild.id];
};
let uses = [...unibuild.uses]
if (unibuild.id.startsWith('modules.main')) {
uses.push({
package: 'zodern:modules-runtime-hot'
});
}
compiler.eachUsedUnibuild({
dependencies: unibuild.uses,
dependencies: uses,
arch: this.arch,
isopackCache: isopackCache,
skipUnordered: true,

View File

@@ -20,6 +20,7 @@ var Console = require('../console/console.js').Console;
var Profile = require('../tool-env/profile').Profile;
import { requestGarbageCollection } from "../utils/gc.js";
import { Unibuild } from "./unibuild.js";
import buildEvents from './build-events.js';
var rejectBadPath = function (p) {
if (p.match(/\.\./)) {
@@ -790,6 +791,10 @@ _.extend(Isopack.prototype, {
});
},
_onPreLinked: function (handler) {
buildEvents.on('prelinked', handler);
},
nudge: function () {
Console.nudge(true);
},

View File

@@ -9,6 +9,7 @@ import { sourceMapLength } from '../utils/utils.js';
import files from '../fs/files';
import { findAssignedGlobals } from './js-analyze.js';
import { convert as convertColons } from '../utils/colon-converter.js';
import buildEvents from './build-events.js';
// A rather small cache size, assuming only one module is being linked
// most of the time.
@@ -1103,6 +1104,17 @@ export var fullLink = Profile("linker.fullLink", function (inputFiles, {
var prelinkedFiles = module.getPrelinkedFiles();
Profile.time('prelinkedEvent', () => {
buildEvents.emit('prelinked', {
isApp,
bundleArch,
name,
imports,
module,
prelinkedFiles
});
});
// If we're in the app, then we just add the import code as its own file in
// the front.
if (isApp) {

View File

@@ -665,6 +665,17 @@ _.extend(PackageSource.prototype, {
_.each(api.implies[label], doNotDepOnSelf);
});
if (self.name === 'modules') {
// Since we can't use a forked version of modules
// we add a dependency to patch it
api.uses['web.browser'].push({
package: 'zodern:modules-runtime-hot',
constraint: '',
unordered: false,
weak: false
});
}
// Cause packages that use `prodOnly` to automatically depend on the
// `isobuild:prod-only` feature package, which will cause an error
// when a package using `prodOnly` is run by a version of the tool