diff --git a/packages/spark/spark.js b/packages/spark/spark.js index 20020f5827..ae998498f3 100644 --- a/packages/spark/spark.js +++ b/packages/spark/spark.js @@ -15,11 +15,14 @@ // XXX s/render/rendered/ (etc) in landmarks? -// XXX consider how new patching semantics have changed, eg, -// leaderboard. must create a landmark (with legacyLabels) inside each -// item for patching to work, because the outer landmark with -// legacyLabels isn't getting rerendered. (if that's actually what's -// going on with the test failure) +// XXX in computePreservations, also need to generate preservations +// based on enclosing landmarks that haven't changed! + +// XXX david's idea: DomUtils.find takes two node arguments, the +// search root but also the virtual root for the selector + +// XXX specify flush order someday (context dependencies? is this in +// the domain of spark -- overdraw concerns?) (function() { @@ -477,6 +480,34 @@ Spark.isolate = function (htmlFunc) { /* Lists */ /******************************************************************************/ +// Run 'f' at flush()-time. If atFlushTime is called multiple times, +// we guarantee that the 'f's will run in the order of their +// respective atFlushTime calls. +// +// XXX either break this out into a separate package or fold it into +// deps +var atFlushQueue = []; +var atFlushContext = null; +var atFlushTime = function (f) { + atFlushQueue.push(f); + + if (! atFlushContext) { + atFlushContext = new Meteor.deps.Context; + atFlushContext.on_invalidate(function () { + var f; + while (f = atFlushQueue.shift()) { + // Since atFlushContext is truthy, if f() calls atFlushTime + // reentrantly, it's guaranteed to append to atFlushQueue and + // not contruct a new atFlushContext. + f(); + } + atFlushContext = null; + }); + + atFlushContext.invalidate(); + } +}; + Spark.list = function (cursor, itemFunc, elseFunc) { elseFunc = elseFunc || function () { return ''; }; @@ -508,8 +539,24 @@ Spark.list = function (cursor, itemFunc, elseFunc) { Spark.finalize(outerRange.replace_contents(frag)); }; + // Decorator. If we're rendering the list for the first time, call + // the function immediately. Otherwise, defer it to flush time. + var maybeDefer = function (func) { + return function (/* arguments */) { + var args = _.toArray(arguments); + var callFunc = function () { + func.apply(null, args); + }; + + if (! handle) + callFunc(); + else + atFlushTime(callFunc); + }; + }; + var handle = cursor.observe({ - added: function (item, beforeIndex) { + added: maybeDefer(function (item, beforeIndex) { var frag = Spark.render(_.bind(itemFunc, null, item)); DomUtils.wrapFragmentForContainer(frag, outerRange.containerNode()); var range = new LiveRange(Spark._TAG, frag); @@ -523,16 +570,16 @@ Spark.list = function (cursor, itemFunc, elseFunc) { } itemRanges.splice(beforeIndex, 0, range); - }, - removed: function (item, atIndex) { + }), + removed: maybeDefer(function (item, atIndex) { if (itemRanges.length === 1) replaceWithElse(); else Spark.finalize(itemRanges[atIndex].extract()); itemRanges.splice(atIndex, 1); - }, - moved: function (item, oldIndex, newIndex) { + }), + moved: maybeDefer(function (item, oldIndex, newIndex) { if (oldIndex === newIndex) return; @@ -544,12 +591,12 @@ Spark.list = function (cursor, itemFunc, elseFunc) { itemRanges[newIndex].insert_before(frag); itemRanges.splice(newIndex, 0, range); - }, - changed: function (item, atIndex) { + }), + changed: maybeDefer(function (item, atIndex) { var frag = Spark.render(_.bind(itemFunc, null, item)); DomUtils.wrapFragmentForContainer(frag, outerRange.containerNode()); replaceContentsPreservingLandmarks(itemRanges[atIndex], frag); - } + }) }); if (! itemRanges.length) diff --git a/packages/spark/spark_tests.js b/packages/spark/spark_tests.js index 14c4e58068..9bd1e65a5b 100644 --- a/packages/spark/spark_tests.js +++ b/packages/spark/spark_tests.js @@ -1721,21 +1721,21 @@ Tinytest.add("spark - leaderboard", function(test) { var html = Spark.list( players.find({}, {sort: {score: -1}}), function(player) { - var html = Spark.isolate(function () { + return Spark.isolate(function () { var style; if (selected_player.get() === player._id) style = "player selected"; else style = "player"; - return '
' + + var html = '
' + '
' + player.name + '
' + '
' + player.score + '
'; + html = Spark.setDataContext(player, html); + html = Spark.createLandmark({preserve: legacyLabels}, html); + html = Spark.labelBranch(player._id, html); + return html; }); - html = Spark.setDataContext(player, html); - html = Spark.createLandmark({preserve: legacyLabels}, html); - html = Spark.labelBranch(player._id, html); - return html; }); html = Spark.attachEvents({ "click": function () { @@ -1872,13 +1872,13 @@ Tinytest.add("spark - list table", function(test) { buf.push(Spark.list( c.find({}, {sort: ['order']}), function(doc) { - var html = Spark.isolate(function () { - return ""+doc.value + (doc.reactive ? R.get() : '')+ + return Spark.isolate(function () { + var html = ""+doc.value + (doc.reactive ? R.get() : '')+ ""; + html = Spark.createLandmark({preserve: legacyLabels}, html); + html = Spark.labelBranch(doc._id, html); + return html; }); - html = Spark.createLandmark({preserve: legacyLabels}, html); - html = Spark.labelBranch(doc._id, html); - return html; }, function() { return "(nothing)"; @@ -2650,9 +2650,9 @@ Tinytest.add("spark - oldschool landmark matching", function(test) { R = ReactiveVar("A"); div = OnscreenDiv(Meteor.render(function() { R.get(); - var html = Spark.createLandmark(testCallbacks(1), html); + var html = Spark.createLandmark(testCallbacks(1), "HI"); html = Spark.labelBranch("foo", html); - html = "
" + "
"; + html = "
" + html + "
"; html = Spark.createLandmark(testCallbacks(0), html); return html; })); @@ -2661,12 +2661,12 @@ Tinytest.add("spark - oldschool landmark matching", function(test) { Meteor.flush(); // what order of chunks {0,1} is preferable?? // should be consistent but I'm not sure what makes most sense. - test.equal(buf, "c1,on1,c0,on0".split(',')); + test.equal(buf, "c0,on0,c1,on1".split(',')); buf.length = 0; R.set("B"); Meteor.flush(); - test.equal(buf, "on1,on0".split(',')); + test.equal(buf, "on0,on1".split(',')); buf.length = 0; div.kill(); @@ -2687,7 +2687,7 @@ Tinytest.add("spark - oldschool branch keys", function(test) { div = OnscreenDiv(Meteor.render(function() { var html = R.get(); html = Spark.createLandmark({ - rendered: function () { objs.push(true); } + render: function () { objs.push(true); } }, html); return html; })); @@ -2712,7 +2712,7 @@ Tinytest.add("spark - oldschool branch keys", function(test) { var testCallbacks = function(theNum /*, extend opts*/) { return _.extend.apply(_, [{ - created: function() { + create: function() { this.num = String(theNum); var howManyBefore = counts[this.num] || 0; counts[this.num] = howManyBefore + 1; @@ -2720,16 +2720,18 @@ Tinytest.add("spark - oldschool branch keys", function(test) { this.num += "*"; // add stars buf.push("c"+this.num); }, - onscreen: function(start, end, range) { + render: function(start, end, range) { buf.push("on"+this.num); }, - offscreen: function() { + destroy: function() { buf.push("off"+this.num); } }].concat(_.toArray(arguments).slice(1))); }; + var counter = 1; var chunk = function(contents, num, branch) { + var html; if (typeof contents === "string") html = contents; else if (_.isArray(contents)) @@ -2741,8 +2743,12 @@ Tinytest.add("spark - oldschool branch keys", function(test) { else html = contents(); + if (branch === null) + branch = "unique_branch_" + (counter++); + html = Spark.createLandmark(testCallbacks(num), html); html = Spark.labelBranch(branch, html); + return html; }; ///// Chunk 1 contains 2,3,4, all should be matched @@ -2792,7 +2798,7 @@ Tinytest.add("spark - oldschool branch keys", function(test) { return "no chunk!"; else return chunk([['apple', 2, 'x'], - ['banana', 3, ''], + ['banana', 3, null], ['kiwi', 4, 'z'] ], 1, 'fruit'); }));