Merge branch 'minimongo-separation' into devel

This commit is contained in:
Slava Kim
2015-06-12 11:33:17 -07:00
26 changed files with 623 additions and 551 deletions

View File

@@ -0,0 +1,6 @@
MongoIDMap = function () {
var self = this;
IdMap.call(self, MongoID.idStringify, MongoID.idParse);
};
Meteor._inherits(MongoIDMap, IdMap);

View File

@@ -936,7 +936,7 @@ _.extend(Connection.prototype, {
originals.forEach(function (doc, id) {
docsWritten.push({collection: collection, id: id});
if (!_.has(self._serverDocuments, collection))
self._serverDocuments[collection] = new LocalCollection._IdMap;
self._serverDocuments[collection] = new MongoIDMap;
var serverDoc = self._serverDocuments[collection].setDefault(id, {});
if (serverDoc.writtenByStubs) {
// We're not the first stub to write this doc. Just add our method ID
@@ -1274,7 +1274,7 @@ _.extend(Connection.prototype, {
_process_added: function (msg, updates) {
var self = this;
var id = LocalCollection._idParse(msg.id);
var id = MongoID.idParse(msg.id);
var serverDoc = self._getServerDoc(msg.collection, id);
if (serverDoc) {
// Some outstanding stub wrote here.
@@ -1290,11 +1290,11 @@ _.extend(Connection.prototype, {
_process_changed: function (msg, updates) {
var self = this;
var serverDoc = self._getServerDoc(
msg.collection, LocalCollection._idParse(msg.id));
msg.collection, MongoID.idParse(msg.id));
if (serverDoc) {
if (serverDoc.document === undefined)
throw new Error("Server sent changed for nonexisting id: " + msg.id);
LocalCollection._applyChanges(serverDoc.document, msg.fields);
DiffSequence.applyChanges(serverDoc.document, msg.fields);
} else {
self._pushUpdate(updates, msg.collection, msg);
}
@@ -1303,7 +1303,7 @@ _.extend(Connection.prototype, {
_process_removed: function (msg, updates) {
var self = this;
var serverDoc = self._getServerDoc(
msg.collection, LocalCollection._idParse(msg.id));
msg.collection, MongoID.idParse(msg.id));
if (serverDoc) {
// Some outstanding stub wrote here.
if (serverDoc.document === undefined)
@@ -1341,7 +1341,7 @@ _.extend(Connection.prototype, {
// the ID because it's supposed to look like a wire message.)
self._pushUpdate(updates, written.collection, {
msg: 'replace',
id: LocalCollection._idStringify(written.id),
id: MongoID.idStringify(written.id),
replace: serverDoc.document
});
// Call all flush callbacks.

View File

@@ -437,7 +437,7 @@ if (Meteor.isClient) {
test.equal(counts, {added: 1, removed: 0, changed: 0, moved: 0});
// data methods do not show up (not quiescent yet)
stream.receive({msg: 'added', collection: collName, id: LocalCollection._idStringify(docId),
stream.receive({msg: 'added', collection: collName, id: MongoID.idStringify(docId),
fields: {value: 'tuesday'}});
test.equal(coll.find({}).count(), 1);
test.equal(coll.find({value: 'friday!'}).count(), 1);
@@ -569,7 +569,7 @@ if (Meteor.isClient) {
// get data from the method. data from this doc does not show up yet, but data
// from another doc does.
stream.receive({msg: 'added', collection: coll_name, id: LocalCollection._idStringify(docId),
stream.receive({msg: 'added', collection: coll_name, id: MongoID.idStringify(docId),
fields: {value: 'tuesday'}});
o.expectCallbacks();
test.equal(coll.findOne(docId), {_id: docId, a: 1});
@@ -785,7 +785,7 @@ if (Meteor.isClient) {
// Get some data.
stream.receive({msg: 'added', collection: collName,
id: LocalCollection._idStringify(stubWrittenId), fields: {baz: 42}});
id: MongoID.idStringify(stubWrittenId), fields: {baz: 42}});
// It doesn't show up yet.
test.equal(coll.find().count(), 1);
test.equal(coll.findOne(stubWrittenId), {_id: stubWrittenId, foo: 'bar'});
@@ -820,7 +820,7 @@ if (Meteor.isClient) {
test.equal(callbackOutput, ['bla']);
test.equal(onResultReceivedOutput, ['bla']);
stream.receive({msg: 'added', collection: collName,
id: LocalCollection._idStringify(stubWrittenId), fields: {baz: 42}});
id: MongoID.idStringify(stubWrittenId), fields: {baz: 42}});
test.equal(coll.findOne(stubWrittenId), {_id: stubWrittenId, baz: 42});
o.expectCallbacks({added: 1});
@@ -852,7 +852,7 @@ if (Meteor.isClient) {
// Get some data.
stream.receive({msg: 'added', collection: collName,
id: LocalCollection._idStringify(stubWrittenId2), fields: {baz: 42}});
id: MongoID.idStringify(stubWrittenId2), fields: {baz: 42}});
// It doesn't show up yet.
test.equal(coll.find().count(), 2);
test.equal(coll.findOne(stubWrittenId2), {_id: stubWrittenId2, foo: 'bar'});
@@ -896,7 +896,7 @@ if (Meteor.isClient) {
// Receive data matching our stub. It doesn't take effect yet.
stream.receive({msg: 'added', collection: collName,
id: LocalCollection._idStringify(stubWrittenId2), fields: {foo: 'bar'}});
id: MongoID.idStringify(stubWrittenId2), fields: {foo: 'bar'}});
o.expectCallbacks();
// slowMethod is done writing, so we get full reconnect quiescence (but no
@@ -1051,7 +1051,7 @@ if (Meteor.isClient) {
// Get some data... slightly different than what we wrote.
stream.receive({msg: 'added', collection: collName,
id: LocalCollection._idStringify(stubWrittenId), fields: {foo: 'barb', other: 'field',
id: MongoID.idStringify(stubWrittenId), fields: {foo: 'barb', other: 'field',
other2: 'bla'}});
// It doesn't show up yet.
test.equal(coll.find().count(), 1);
@@ -1069,7 +1069,7 @@ if (Meteor.isClient) {
// More data. Not quite what we wrote. Also ignored for now.
stream.receive({msg: 'changed', collection: collName,
id: LocalCollection._idStringify(stubWrittenId), fields: {baz: 43}, cleared: ['other']});
id: MongoID.idStringify(stubWrittenId), fields: {baz: 43}, cleared: ['other']});
test.equal(coll.find().count(), 1);
test.equal(coll.findOne(stubWrittenId),
{_id: stubWrittenId, foo: 'bar', baz: 42});

View File

@@ -10,7 +10,7 @@ Npm.depends({
Package.onUse(function (api) {
api.use(['check', 'random', 'ejson', 'json', 'underscore', 'tracker',
'logging', 'retry'],
'logging', 'retry', 'id-map'],
['client', 'server']);
// common functionality
@@ -18,12 +18,14 @@ Package.onUse(function (api) {
api.use('reload', 'client', {weak: true});
// we depend on LocalCollection._diffObjects, _applyChanges,
// we depend on _diffObjects, _applyChanges,
api.use('diff-sequence', ['client', 'server']);
// _idParse, _idStringify.
api.use('minimongo', ['client', 'server']);
api.use('mongo-id', ['client', 'server']);
api.addFiles('namespace.js', ['client', 'server']);
api.addFiles('id_map.js', ['client', 'server']);
api.addFiles(['sockjs-0.3.4.js', 'stream_client_sockjs.js'], 'client');
api.addFiles('stream_client_nodejs.js', 'server');
api.addFiles('stream_client_common.js', ['client', 'server']);
@@ -44,7 +46,7 @@ Package.onTest(function (api) {
api.use('livedata', ['client', 'server']);
api.use('mongo', ['client', 'server']);
api.use('test-helpers', ['client', 'server']);
api.use(['underscore', 'tinytest', 'random', 'tracker', 'minimongo', 'reactive-var']);
api.use(['underscore', 'tinytest', 'random', 'tracker', 'reactive-var', 'mongo-id', 'diff-sequence']);
api.addFiles('stub_stream.js');
api.addFiles('livedata_connection_tests.js', ['client', 'server']);

View File

@@ -139,8 +139,8 @@ _.extend(DDPServer._Crossbar.prototype, {
notification.id !== trigger.id) {
return false;
}
if (notification.id instanceof LocalCollection._ObjectID &&
trigger.id instanceof LocalCollection._ObjectID &&
if (notification.id instanceof MongoID.ObjectID &&
trigger.id instanceof MongoID.ObjectID &&
! notification.id.equals(trigger.id)) {
return false;
}

View File

@@ -125,7 +125,7 @@ _.extend(SessionCollectionView.prototype, {
diff: function (previous) {
var self = this;
LocalCollection._diffObjects(previous.documents, self.documents, {
DiffSequence.diffObjects(previous.documents, self.documents, {
both: _.bind(self.diffDocument, self),
rightOnly: function (id, nowDV) {
@@ -141,7 +141,7 @@ _.extend(SessionCollectionView.prototype, {
diffDocument: function (id, prevDV, nowDV) {
var self = this;
var fields = {};
LocalCollection._diffObjects(prevDV.getFields(), nowDV.getFields(), {
DiffSequence.diffObjects(prevDV.getFields(), nowDV.getFields(), {
both: function (key, prev, now) {
if (!EJSON.equals(prev, now))
fields[key] = now;
@@ -675,7 +675,7 @@ _.extend(Session.prototype, {
_diffCollectionViews: function (beforeCVs) {
var self = this;
LocalCollection._diffObjects(beforeCVs, self.collectionViews, {
DiffSequence.diffObjects(beforeCVs, self.collectionViews, {
both: function (collectionName, leftValue, rightValue) {
rightValue.diff(leftValue);
},
@@ -919,7 +919,7 @@ var Subscription = function (
self.userId = session.userId;
// For now, the id filter is going to default to
// the to/from DDP methods on LocalCollection, to
// the to/from DDP methods on MongoID, to
// specifically deal with mongo/minimongo ObjectIds.
// Later, you will be able to make this be "raw"
@@ -928,8 +928,8 @@ var Subscription = function (
// a ddp consumer that isn't minimongo
self._idFilter = {
idStringify: LocalCollection._idStringify,
idParse: LocalCollection._idParse
idStringify: MongoID.idStringify,
idParse: MongoID.idParse
};
Package.facts && Package.facts.Facts.incrementServerFact(

View File

@@ -10,7 +10,7 @@ Npm.depends({
Package.onUse(function (api) {
api.use(['check', 'random', 'ejson', 'json', 'underscore', 'tracker',
'logging', 'retry'],
'logging', 'retry', 'mongo-id', 'diff-sequence'],
['client', 'server']);
// common functionality

View File

@@ -0,0 +1,251 @@
DiffSequence = {};
// ordered: bool.
// old_results and new_results: collections of documents.
// if ordered, they are arrays.
// if unordered, they are IdMaps
DiffSequence.diffQueryChanges = function (ordered, oldResults, newResults,
observer, options) {
if (ordered)
DiffSequence.diffQueryOrderedChanges(
oldResults, newResults, observer, options);
else
DiffSequence.diffQueryUnorderedChanges(
oldResults, newResults, observer, options);
};
DiffSequence.diffQueryUnorderedChanges = function (oldResults, newResults,
observer, options) {
options = options || {};
var projectionFn = options.projectionFn || EJSON.clone;
if (observer.movedBefore) {
throw new Error("_diffQueryUnordered called with a movedBefore observer!");
}
newResults.forEach(function (newDoc, id) {
var oldDoc = oldResults.get(id);
if (oldDoc) {
if (observer.changed && !EJSON.equals(oldDoc, newDoc)) {
var projectedNew = projectionFn(newDoc);
var projectedOld = projectionFn(oldDoc);
var changedFields =
DiffSequence.makeChangedFields(projectedNew, projectedOld);
if (! _.isEmpty(changedFields)) {
observer.changed(id, changedFields);
}
}
} else if (observer.added) {
var fields = projectionFn(newDoc);
delete fields._id;
observer.added(newDoc._id, fields);
}
});
if (observer.removed) {
oldResults.forEach(function (oldDoc, id) {
if (!newResults.has(id))
observer.removed(id);
});
}
};
DiffSequence.diffQueryOrderedChanges = function (old_results, new_results,
observer, options) {
options = options || {};
var projectionFn = options.projectionFn || EJSON.clone;
var new_presence_of_id = {};
_.each(new_results, function (doc) {
if (new_presence_of_id[doc._id])
Meteor._debug("Duplicate _id in new_results");
new_presence_of_id[doc._id] = true;
});
var old_index_of_id = {};
_.each(old_results, function (doc, i) {
if (doc._id in old_index_of_id)
Meteor._debug("Duplicate _id in old_results");
old_index_of_id[doc._id] = i;
});
// ALGORITHM:
//
// To determine which docs should be considered "moved" (and which
// merely change position because of other docs moving) we run
// a "longest common subsequence" (LCS) algorithm. The LCS of the
// old doc IDs and the new doc IDs gives the docs that should NOT be
// considered moved.
// To actually call the appropriate callbacks to get from the old state to the
// new state:
// First, we call removed() on all the items that only appear in the old
// state.
// Then, once we have the items that should not move, we walk through the new
// results array group-by-group, where a "group" is a set of items that have
// moved, anchored on the end by an item that should not move. One by one, we
// move each of those elements into place "before" the anchoring end-of-group
// item, and fire changed events on them if necessary. Then we fire a changed
// event on the anchor, and move on to the next group. There is always at
// least one group; the last group is anchored by a virtual "null" id at the
// end.
// Asymptotically: O(N k) where k is number of ops, or potentially
// O(N log N) if inner loop of LCS were made to be binary search.
//////// LCS (longest common sequence, with respect to _id)
// (see Wikipedia article on Longest Increasing Subsequence,
// where the LIS is taken of the sequence of old indices of the
// docs in new_results)
//
// unmoved: the output of the algorithm; members of the LCS,
// in the form of indices into new_results
var unmoved = [];
// max_seq_len: length of LCS found so far
var max_seq_len = 0;
// seq_ends[i]: the index into new_results of the last doc in a
// common subsequence of length of i+1 <= max_seq_len
var N = new_results.length;
var seq_ends = new Array(N);
// ptrs: the common subsequence ending with new_results[n] extends
// a common subsequence ending with new_results[ptr[n]], unless
// ptr[n] is -1.
var ptrs = new Array(N);
// virtual sequence of old indices of new results
var old_idx_seq = function(i_new) {
return old_index_of_id[new_results[i_new]._id];
};
// for each item in new_results, use it to extend a common subsequence
// of length j <= max_seq_len
for(var i=0; i<N; i++) {
if (old_index_of_id[new_results[i]._id] !== undefined) {
var j = max_seq_len;
// this inner loop would traditionally be a binary search,
// but scanning backwards we will likely find a subseq to extend
// pretty soon, bounded for example by the total number of ops.
// If this were to be changed to a binary search, we'd still want
// to scan backwards a bit as an optimization.
while (j > 0) {
if (old_idx_seq(seq_ends[j-1]) < old_idx_seq(i))
break;
j--;
}
ptrs[i] = (j === 0 ? -1 : seq_ends[j-1]);
seq_ends[j] = i;
if (j+1 > max_seq_len)
max_seq_len = j+1;
}
}
// pull out the LCS/LIS into unmoved
var idx = (max_seq_len === 0 ? -1 : seq_ends[max_seq_len-1]);
while (idx >= 0) {
unmoved.push(idx);
idx = ptrs[idx];
}
// the unmoved item list is built backwards, so fix that
unmoved.reverse();
// the last group is always anchored by the end of the result list, which is
// an id of "null"
unmoved.push(new_results.length);
_.each(old_results, function (doc) {
if (!new_presence_of_id[doc._id])
observer.removed && observer.removed(doc._id);
});
// for each group of things in the new_results that is anchored by an unmoved
// element, iterate through the things before it.
var startOfGroup = 0;
_.each(unmoved, function (endOfGroup) {
var groupId = new_results[endOfGroup] ? new_results[endOfGroup]._id : null;
var oldDoc, newDoc, fields, projectedNew, projectedOld;
for (var i = startOfGroup; i < endOfGroup; i++) {
newDoc = new_results[i];
if (!_.has(old_index_of_id, newDoc._id)) {
fields = projectionFn(newDoc);
delete fields._id;
observer.addedBefore && observer.addedBefore(newDoc._id, fields, groupId);
observer.added && observer.added(newDoc._id, fields);
} else {
// moved
oldDoc = old_results[old_index_of_id[newDoc._id]];
projectedNew = projectionFn(newDoc);
projectedOld = projectionFn(oldDoc);
fields = DiffSequence.makeChangedFields(projectedNew, projectedOld);
if (!_.isEmpty(fields)) {
observer.changed && observer.changed(newDoc._id, fields);
}
observer.movedBefore && observer.movedBefore(newDoc._id, groupId);
}
}
if (groupId) {
newDoc = new_results[endOfGroup];
oldDoc = old_results[old_index_of_id[newDoc._id]];
projectedNew = projectionFn(newDoc);
projectedOld = projectionFn(oldDoc);
fields = DiffSequence.makeChangedFields(projectedNew, projectedOld);
if (!_.isEmpty(fields)) {
observer.changed && observer.changed(newDoc._id, fields);
}
}
startOfGroup = endOfGroup+1;
});
};
// General helper for diff-ing two objects.
// callbacks is an object like so:
// { leftOnly: function (key, leftValue) {...},
// rightOnly: function (key, rightValue) {...},
// both: function (key, leftValue, rightValue) {...},
// }
DiffSequence.diffObjects = function (left, right, callbacks) {
_.each(left, function (leftValue, key) {
if (_.has(right, key))
callbacks.both && callbacks.both(key, leftValue, right[key]);
else
callbacks.leftOnly && callbacks.leftOnly(key, leftValue);
});
if (callbacks.rightOnly) {
_.each(right, function(rightValue, key) {
if (!_.has(left, key))
callbacks.rightOnly(key, rightValue);
});
}
};
DiffSequence.makeChangedFields = function (newDoc, oldDoc) {
var fields = {};
DiffSequence.diffObjects(oldDoc, newDoc, {
leftOnly: function (key, value) {
fields[key] = undefined;
},
rightOnly: function (key, value) {
fields[key] = value;
},
both: function (key, leftValue, rightValue) {
if (!EJSON.equals(leftValue, rightValue))
fields[key] = rightValue;
}
});
return fields;
};
DiffSequence.applyChanges = function (doc, changeFields) {
_.each(changeFields, function (value, key) {
if (value === undefined)
delete doc[key];
else
doc[key] = value;
});
};

View File

@@ -0,0 +1,22 @@
Package.describe({
summary: "An implementation of a diff algorithm on arrays and objects.",
version: '1.0.0'
});
Package.onUse(function (api) {
api.export('DiffSequence');
api.use(['underscore', 'ejson']);
api.addFiles([
'diff.js'
]);
});
Package.onTest(function (api) {
api.use('tinytest');
api.use('diff-sequence');
api.addFiles([
'tests.js'
]);
});

View File

@@ -0,0 +1,160 @@
Tinytest.add("diff-sequence - diff changes ordering", function (test) {
var makeDocs = function (ids) {
return _.map(ids, function (id) { return {_id: id};});
};
var testMutation = function (a, b) {
var aa = makeDocs(a);
var bb = makeDocs(b);
var aaCopy = EJSON.clone(aa);
DiffSequence.diffQueryOrderedChanges(aa, bb, {
addedBefore: function (id, doc, before) {
if (before === null) {
aaCopy.push( _.extend({_id: id}, doc));
return;
}
for (var i = 0; i < aaCopy.length; i++) {
if (aaCopy[i]._id === before) {
aaCopy.splice(i, 0, _.extend({_id: id}, doc));
return;
}
}
},
movedBefore: function (id, before) {
var found;
for (var i = 0; i < aaCopy.length; i++) {
if (aaCopy[i]._id === id) {
found = aaCopy[i];
aaCopy.splice(i, 1);
}
}
if (before === null) {
aaCopy.push( _.extend({_id: id}, found));
return;
}
for (i = 0; i < aaCopy.length; i++) {
if (aaCopy[i]._id === before) {
aaCopy.splice(i, 0, _.extend({_id: id}, found));
return;
}
}
},
removed: function (id) {
var found;
for (var i = 0; i < aaCopy.length; i++) {
if (aaCopy[i]._id === id) {
found = aaCopy[i];
aaCopy.splice(i, 1);
}
}
}
});
test.equal(aaCopy, bb);
};
var testBothWays = function (a, b) {
testMutation(a, b);
testMutation(b, a);
};
testBothWays(["a", "b", "c"], ["c", "b", "a"]);
testBothWays(["a", "b", "c"], []);
testBothWays(["a", "b", "c"], ["e","f"]);
testBothWays(["a", "b", "c", "d"], ["c", "b", "a"]);
testBothWays(['A','B','C','D','E','F','G','H','I'],
['A','B','F','G','C','D','I','L','M','N','H']);
testBothWays(['A','B','C','D','E','F','G','H','I'],['A','B','C','D','F','G','H','E','I']);
});
Tinytest.add("diff-sequence - diff", function (test) {
// test correctness
var diffTest = function(origLen, newOldIdx) {
var oldResults = new Array(origLen);
for (var i = 1; i <= origLen; i++)
oldResults[i-1] = {_id: i};
var newResults = _.map(newOldIdx, function(n) {
var doc = {_id: Math.abs(n)};
if (n < 0)
doc.changed = true;
return doc;
});
var find = function (arr, id) {
for (var i = 0; i < arr.length; i++) {
if (EJSON.equals(arr[i]._id, id))
return i;
}
return -1;
};
var results = _.clone(oldResults);
var observer = {
addedBefore: function(id, fields, before) {
var before_idx;
if (before === null)
before_idx = results.length;
else
before_idx = find (results, before);
var doc = _.extend({_id: id}, fields);
test.isFalse(before_idx < 0 || before_idx > results.length);
results.splice(before_idx, 0, doc);
},
removed: function(id) {
var at_idx = find (results, id);
test.isFalse(at_idx < 0 || at_idx >= results.length);
results.splice(at_idx, 1);
},
changed: function(id, fields) {
var at_idx = find (results, id);
var oldDoc = results[at_idx];
var doc = EJSON.clone(oldDoc);
DiffSequence.applyChanges(doc, fields);
test.isFalse(at_idx < 0 || at_idx >= results.length);
test.equal(doc._id, oldDoc._id);
results[at_idx] = doc;
},
movedBefore: function(id, before) {
var old_idx = find(results, id);
var new_idx;
if (before === null)
new_idx = results.length;
else
new_idx = find (results, before);
if (new_idx > old_idx)
new_idx--;
test.isFalse(old_idx < 0 || old_idx >= results.length);
test.isFalse(new_idx < 0 || new_idx >= results.length);
results.splice(new_idx, 0, results.splice(old_idx, 1)[0]);
}
};
DiffSequence.diffQueryOrderedChanges(oldResults, newResults, observer);
test.equal(results, newResults);
};
// edge cases and cases run into during debugging
diffTest(5, [5, 1, 2, 3, 4]);
diffTest(0, [1, 2, 3, 4]);
diffTest(4, []);
diffTest(7, [4, 5, 6, 7, 1, 2, 3]);
diffTest(7, [5, 6, 7, 1, 2, 3, 4]);
diffTest(10, [7, 4, 11, 6, 12, 1, 5]);
diffTest(3, [3, 2, 1]);
diffTest(10, [2, 7, 4, 6, 11, 3, 8, 9]);
diffTest(0, []);
diffTest(1, []);
diffTest(0, [1]);
diffTest(1, [1]);
diffTest(5, [1, 2, 3, 4, 5]);
// interaction between "changed" and other ops
diffTest(5, [-5, -1, 2, -3, 4]);
diffTest(7, [-4, -5, 6, 7, -1, 2, 3]);
diffTest(7, [5, 6, -7, 1, 2, -3, 4]);
diffTest(10, [7, -4, 11, 6, 12, -1, 5]);
diffTest(3, [-3, -2, -1]);
diffTest(10, [-2, 7, 4, 6, 11, -3, -8, 9]);
});

View File

@@ -2,220 +2,20 @@
// old_results and new_results: collections of documents.
// if ordered, they are arrays.
// if unordered, they are IdMaps
LocalCollection._diffQueryChanges = function (ordered, oldResults, newResults,
observer, options) {
if (ordered)
LocalCollection._diffQueryOrderedChanges(
oldResults, newResults, observer, options);
else
LocalCollection._diffQueryUnorderedChanges(
oldResults, newResults, observer, options);
LocalCollection._diffQueryChanges = function (ordered, oldResults, newResults, observer, options) {
return DiffSequence.diffQueryChanges(ordered, oldResults, newResults, observer, options);
};
LocalCollection._diffQueryUnorderedChanges = function (oldResults, newResults,
observer, options) {
options = options || {};
var projectionFn = options.projectionFn || EJSON.clone;
if (observer.movedBefore) {
throw new Error("_diffQueryUnordered called with a movedBefore observer!");
}
newResults.forEach(function (newDoc, id) {
var oldDoc = oldResults.get(id);
if (oldDoc) {
if (observer.changed && !EJSON.equals(oldDoc, newDoc)) {
var projectedNew = projectionFn(newDoc);
var projectedOld = projectionFn(oldDoc);
var changedFields =
LocalCollection._makeChangedFields(projectedNew, projectedOld);
if (! _.isEmpty(changedFields)) {
observer.changed(id, changedFields);
}
}
} else if (observer.added) {
var fields = projectionFn(newDoc);
delete fields._id;
observer.added(newDoc._id, fields);
}
});
if (observer.removed) {
oldResults.forEach(function (oldDoc, id) {
if (!newResults.has(id))
observer.removed(id);
});
}
LocalCollection._diffQueryUnorderedChanges = function (oldResults, newResults, observer, options) {
return DiffSequence.diffQueryUnorderedChanges(oldResults, newResults, observer, options);
};
LocalCollection._diffQueryOrderedChanges = function (old_results, new_results,
observer, options) {
options = options || {};
var projectionFn = options.projectionFn || EJSON.clone;
var new_presence_of_id = {};
_.each(new_results, function (doc) {
if (new_presence_of_id[doc._id])
Meteor._debug("Duplicate _id in new_results");
new_presence_of_id[doc._id] = true;
});
var old_index_of_id = {};
_.each(old_results, function (doc, i) {
if (doc._id in old_index_of_id)
Meteor._debug("Duplicate _id in old_results");
old_index_of_id[doc._id] = i;
});
// ALGORITHM:
//
// To determine which docs should be considered "moved" (and which
// merely change position because of other docs moving) we run
// a "longest common subsequence" (LCS) algorithm. The LCS of the
// old doc IDs and the new doc IDs gives the docs that should NOT be
// considered moved.
// To actually call the appropriate callbacks to get from the old state to the
// new state:
// First, we call removed() on all the items that only appear in the old
// state.
// Then, once we have the items that should not move, we walk through the new
// results array group-by-group, where a "group" is a set of items that have
// moved, anchored on the end by an item that should not move. One by one, we
// move each of those elements into place "before" the anchoring end-of-group
// item, and fire changed events on them if necessary. Then we fire a changed
// event on the anchor, and move on to the next group. There is always at
// least one group; the last group is anchored by a virtual "null" id at the
// end.
// Asymptotically: O(N k) where k is number of ops, or potentially
// O(N log N) if inner loop of LCS were made to be binary search.
//////// LCS (longest common sequence, with respect to _id)
// (see Wikipedia article on Longest Increasing Subsequence,
// where the LIS is taken of the sequence of old indices of the
// docs in new_results)
//
// unmoved: the output of the algorithm; members of the LCS,
// in the form of indices into new_results
var unmoved = [];
// max_seq_len: length of LCS found so far
var max_seq_len = 0;
// seq_ends[i]: the index into new_results of the last doc in a
// common subsequence of length of i+1 <= max_seq_len
var N = new_results.length;
var seq_ends = new Array(N);
// ptrs: the common subsequence ending with new_results[n] extends
// a common subsequence ending with new_results[ptr[n]], unless
// ptr[n] is -1.
var ptrs = new Array(N);
// virtual sequence of old indices of new results
var old_idx_seq = function(i_new) {
return old_index_of_id[new_results[i_new]._id];
};
// for each item in new_results, use it to extend a common subsequence
// of length j <= max_seq_len
for(var i=0; i<N; i++) {
if (old_index_of_id[new_results[i]._id] !== undefined) {
var j = max_seq_len;
// this inner loop would traditionally be a binary search,
// but scanning backwards we will likely find a subseq to extend
// pretty soon, bounded for example by the total number of ops.
// If this were to be changed to a binary search, we'd still want
// to scan backwards a bit as an optimization.
while (j > 0) {
if (old_idx_seq(seq_ends[j-1]) < old_idx_seq(i))
break;
j--;
}
ptrs[i] = (j === 0 ? -1 : seq_ends[j-1]);
seq_ends[j] = i;
if (j+1 > max_seq_len)
max_seq_len = j+1;
}
}
// pull out the LCS/LIS into unmoved
var idx = (max_seq_len === 0 ? -1 : seq_ends[max_seq_len-1]);
while (idx >= 0) {
unmoved.push(idx);
idx = ptrs[idx];
}
// the unmoved item list is built backwards, so fix that
unmoved.reverse();
// the last group is always anchored by the end of the result list, which is
// an id of "null"
unmoved.push(new_results.length);
_.each(old_results, function (doc) {
if (!new_presence_of_id[doc._id])
observer.removed && observer.removed(doc._id);
});
// for each group of things in the new_results that is anchored by an unmoved
// element, iterate through the things before it.
var startOfGroup = 0;
_.each(unmoved, function (endOfGroup) {
var groupId = new_results[endOfGroup] ? new_results[endOfGroup]._id : null;
var oldDoc, newDoc, fields, projectedNew, projectedOld;
for (var i = startOfGroup; i < endOfGroup; i++) {
newDoc = new_results[i];
if (!_.has(old_index_of_id, newDoc._id)) {
fields = projectionFn(newDoc);
delete fields._id;
observer.addedBefore && observer.addedBefore(newDoc._id, fields, groupId);
observer.added && observer.added(newDoc._id, fields);
} else {
// moved
oldDoc = old_results[old_index_of_id[newDoc._id]];
projectedNew = projectionFn(newDoc);
projectedOld = projectionFn(oldDoc);
fields = LocalCollection._makeChangedFields(projectedNew, projectedOld);
if (!_.isEmpty(fields)) {
observer.changed && observer.changed(newDoc._id, fields);
}
observer.movedBefore && observer.movedBefore(newDoc._id, groupId);
}
}
if (groupId) {
newDoc = new_results[endOfGroup];
oldDoc = old_results[old_index_of_id[newDoc._id]];
projectedNew = projectionFn(newDoc);
projectedOld = projectionFn(oldDoc);
fields = LocalCollection._makeChangedFields(projectedNew, projectedOld);
if (!_.isEmpty(fields)) {
observer.changed && observer.changed(newDoc._id, fields);
}
}
startOfGroup = endOfGroup+1;
});
LocalCollection._diffQueryOrderedChanges =
function (oldResults, newResults, observer, options) {
return DiffSequence.diffQueryOrderedChanges(oldResults, newResults, observer, options);
};
// General helper for diff-ing two objects.
// callbacks is an object like so:
// { leftOnly: function (key, leftValue) {...},
// rightOnly: function (key, rightValue) {...},
// both: function (key, leftValue, rightValue) {...},
// }
LocalCollection._diffObjects = function (left, right, callbacks) {
_.each(left, function (leftValue, key) {
if (_.has(right, key))
callbacks.both && callbacks.both(key, leftValue, right[key]);
else
callbacks.leftOnly && callbacks.leftOnly(key, leftValue);
});
if (callbacks.rightOnly) {
_.each(right, function(rightValue, key) {
if (!_.has(left, key))
callbacks.rightOnly(key, rightValue);
});
}
return DiffSequence.diffObjects(left, right, callbacks);
};

View File

@@ -1,6 +1,6 @@
LocalCollection._IdMap = function () {
var self = this;
IdMap.call(self, LocalCollection._idStringify, LocalCollection._idParse);
IdMap.call(self, MongoID.idStringify, MongoID.idParse);
};
Meteor._inherits(LocalCollection._IdMap, IdMap);

View File

@@ -40,15 +40,6 @@ Minimongo = {};
// Use it to export private functions to test in Tinytest.
MinimongoTest = {};
LocalCollection._applyChanges = function (doc, changeFields) {
_.each(changeFields, function (value, key) {
if (value === undefined)
delete doc[key];
else
doc[key] = value;
});
};
MinimongoError = function (message) {
var e = new Error(message);
e.name = "MinimongoError";
@@ -548,7 +539,7 @@ LocalCollection.prototype.insert = function (doc, callback) {
if (!_.has(doc, '_id')) {
// if you really want to use ObjectIDs, set this global.
// Mongo.Collection specifies its own ids and does not use this code.
doc._id = LocalCollection._useOID ? new LocalCollection._ObjectID()
doc._id = LocalCollection._useOID ? new MongoID.ObjectID()
: Random.id();
}
var id = doc._id;
@@ -873,7 +864,7 @@ LocalCollection._updateInResults = function (query, doc, old_doc) {
if (!EJSON.equals(doc._id, old_doc._id))
throw new Error("Can't change a doc's _id while updating");
var projectionFn = query.projectionFn;
var changedFields = LocalCollection._makeChangedFields(
var changedFields = DiffSequence.makeChangedFields(
projectionFn(doc), projectionFn(old_doc));
if (!query.ordered) {
@@ -1051,62 +1042,3 @@ LocalCollection.prototype.resumeObservers = function () {
self._observeQueue.drain();
};
// NB: used by livedata
LocalCollection._idStringify = function (id) {
if (id instanceof LocalCollection._ObjectID) {
return id.valueOf();
} else if (typeof id === 'string') {
if (id === "") {
return id;
} else if (id.substr(0, 1) === "-" || // escape previously dashed strings
id.substr(0, 1) === "~" || // escape escaped numbers, true, false
LocalCollection._looksLikeObjectID(id) || // escape object-id-form strings
id.substr(0, 1) === '{') { // escape object-form strings, for maybe implementing later
return "-" + id;
} else {
return id; // other strings go through unchanged.
}
} else if (id === undefined) {
return '-';
} else if (typeof id === 'object' && id !== null) {
throw new Error("Meteor does not currently support objects other than ObjectID as ids");
} else { // Numbers, true, false, null
return "~" + JSON.stringify(id);
}
};
// NB: used by livedata
LocalCollection._idParse = function (id) {
if (id === "") {
return id;
} else if (id === '-') {
return undefined;
} else if (id.substr(0, 1) === '-') {
return id.substr(1);
} else if (id.substr(0, 1) === '~') {
return JSON.parse(id.substr(1));
} else if (LocalCollection._looksLikeObjectID(id)) {
return new LocalCollection._ObjectID(id);
} else {
return id;
}
};
LocalCollection._makeChangedFields = function (newDoc, oldDoc) {
var fields = {};
LocalCollection._diffObjects(oldDoc, newDoc, {
leftOnly: function (key, value) {
fields[key] = undefined;
},
rightOnly: function (key, value) {
fields[key] = value;
},
both: function (key, leftValue, rightValue) {
if (!EJSON.equals(leftValue, rightValue))
fields[key] = rightValue;
}
});
return fields;
};

View File

@@ -628,7 +628,7 @@ Tinytest.add("minimongo - selector_compiler", function (test) {
match({a: {$type: 5}}, {a: EJSON.newBinary(4)});
nomatch({a: {$type: 5}}, {a: []});
nomatch({a: {$type: 5}}, {a: [42]});
match({a: {$type: 7}}, {a: new LocalCollection._ObjectID()});
match({a: {$type: 7}}, {a: new MongoID.ObjectID()});
nomatch({a: {$type: 7}}, {a: "1234567890abcd1234567890"});
match({a: {$type: 8}}, {a: true});
match({a: {$type: 8}}, {a: false});
@@ -1580,8 +1580,8 @@ Tinytest.add("minimongo - ordering", function (test) {
{b: {}}, {b: [1, 2, 3]}, {b: [1, 2, 4]},
[], [1, 2], [1, 2, 3], [1, 2, 4], [1, 2, "4"], [1, 2, [4]],
shortBinary, longBinary1, longBinary2,
new LocalCollection._ObjectID("1234567890abcd1234567890"),
new LocalCollection._ObjectID("abcd1234567890abcd123456"),
new MongoID.ObjectID("1234567890abcd1234567890"),
new MongoID.ObjectID("abcd1234567890abcd123456"),
false, true,
date1, date2
]);
@@ -2546,167 +2546,6 @@ _.each([true, false], function (ordered) {
});
Tinytest.add("minimongo - diff changes ordering", function (test) {
var makeDocs = function (ids) {
return _.map(ids, function (id) { return {_id: id};});
};
var testMutation = function (a, b) {
var aa = makeDocs(a);
var bb = makeDocs(b);
var aaCopy = EJSON.clone(aa);
LocalCollection._diffQueryOrderedChanges(aa, bb, {
addedBefore: function (id, doc, before) {
if (before === null) {
aaCopy.push( _.extend({_id: id}, doc));
return;
}
for (var i = 0; i < aaCopy.length; i++) {
if (aaCopy[i]._id === before) {
aaCopy.splice(i, 0, _.extend({_id: id}, doc));
return;
}
}
},
movedBefore: function (id, before) {
var found;
for (var i = 0; i < aaCopy.length; i++) {
if (aaCopy[i]._id === id) {
found = aaCopy[i];
aaCopy.splice(i, 1);
}
}
if (before === null) {
aaCopy.push( _.extend({_id: id}, found));
return;
}
for (i = 0; i < aaCopy.length; i++) {
if (aaCopy[i]._id === before) {
aaCopy.splice(i, 0, _.extend({_id: id}, found));
return;
}
}
},
removed: function (id) {
var found;
for (var i = 0; i < aaCopy.length; i++) {
if (aaCopy[i]._id === id) {
found = aaCopy[i];
aaCopy.splice(i, 1);
}
}
}
});
test.equal(aaCopy, bb);
};
var testBothWays = function (a, b) {
testMutation(a, b);
testMutation(b, a);
};
testBothWays(["a", "b", "c"], ["c", "b", "a"]);
testBothWays(["a", "b", "c"], []);
testBothWays(["a", "b", "c"], ["e","f"]);
testBothWays(["a", "b", "c", "d"], ["c", "b", "a"]);
testBothWays(['A','B','C','D','E','F','G','H','I'],
['A','B','F','G','C','D','I','L','M','N','H']);
testBothWays(['A','B','C','D','E','F','G','H','I'],['A','B','C','D','F','G','H','E','I']);
});
Tinytest.add("minimongo - diff", function (test) {
// test correctness
var diffTest = function(origLen, newOldIdx) {
var oldResults = new Array(origLen);
for (var i = 1; i <= origLen; i++)
oldResults[i-1] = {_id: i};
var newResults = _.map(newOldIdx, function(n) {
var doc = {_id: Math.abs(n)};
if (n < 0)
doc.changed = true;
return doc;
});
var find = function (arr, id) {
for (var i = 0; i < arr.length; i++) {
if (EJSON.equals(arr[i]._id, id))
return i;
}
return -1;
};
var results = _.clone(oldResults);
var observer = {
addedBefore: function(id, fields, before) {
var before_idx;
if (before === null)
before_idx = results.length;
else
before_idx = find (results, before);
var doc = _.extend({_id: id}, fields);
test.isFalse(before_idx < 0 || before_idx > results.length);
results.splice(before_idx, 0, doc);
},
removed: function(id) {
var at_idx = find (results, id);
test.isFalse(at_idx < 0 || at_idx >= results.length);
results.splice(at_idx, 1);
},
changed: function(id, fields) {
var at_idx = find (results, id);
var oldDoc = results[at_idx];
var doc = EJSON.clone(oldDoc);
LocalCollection._applyChanges(doc, fields);
test.isFalse(at_idx < 0 || at_idx >= results.length);
test.equal(doc._id, oldDoc._id);
results[at_idx] = doc;
},
movedBefore: function(id, before) {
var old_idx = find(results, id);
var new_idx;
if (before === null)
new_idx = results.length;
else
new_idx = find (results, before);
if (new_idx > old_idx)
new_idx--;
test.isFalse(old_idx < 0 || old_idx >= results.length);
test.isFalse(new_idx < 0 || new_idx >= results.length);
results.splice(new_idx, 0, results.splice(old_idx, 1)[0]);
}
};
LocalCollection._diffQueryOrderedChanges(oldResults, newResults, observer);
test.equal(results, newResults);
};
// edge cases and cases run into during debugging
diffTest(5, [5, 1, 2, 3, 4]);
diffTest(0, [1, 2, 3, 4]);
diffTest(4, []);
diffTest(7, [4, 5, 6, 7, 1, 2, 3]);
diffTest(7, [5, 6, 7, 1, 2, 3, 4]);
diffTest(10, [7, 4, 11, 6, 12, 1, 5]);
diffTest(3, [3, 2, 1]);
diffTest(10, [2, 7, 4, 6, 11, 3, 8, 9]);
diffTest(0, []);
diffTest(1, []);
diffTest(0, [1]);
diffTest(1, [1]);
diffTest(5, [1, 2, 3, 4, 5]);
// interaction between "changed" and other ops
diffTest(5, [-5, -1, 2, -3, 4]);
diffTest(7, [-4, -5, 6, 7, -1, 2, 3]);
diffTest(7, [5, 6, -7, 1, 2, -3, 4]);
diffTest(10, [7, -4, 11, 6, 12, -1, 5]);
diffTest(3, [-3, -2, -1]);
diffTest(10, [-2, 7, 4, 6, 11, -3, -8, 9]);
});
Tinytest.add("minimongo - saveOriginals", function (test) {
// set up some data
var c = new LocalCollection(),
@@ -2774,27 +2613,27 @@ Tinytest.add("minimongo - saveOriginals errors", function (test) {
Tinytest.add("minimongo - objectid transformation", function (test) {
var testId = function (item) {
test.equal(item, LocalCollection._idParse(LocalCollection._idStringify(item)));
test.equal(item, MongoID.idParse(MongoID.idStringify(item)));
};
var randomOid = new LocalCollection._ObjectID();
var randomOid = new MongoID.ObjectID();
testId(randomOid);
testId("FOO");
testId("ffffffffffff");
testId("0987654321abcdef09876543");
testId(new LocalCollection._ObjectID());
testId(new MongoID.ObjectID());
testId("--a string");
test.equal("ffffffffffff", LocalCollection._idParse(LocalCollection._idStringify("ffffffffffff")));
test.equal("ffffffffffff", MongoID.idParse(MongoID.idStringify("ffffffffffff")));
});
Tinytest.add("minimongo - objectid", function (test) {
var randomOid = new LocalCollection._ObjectID();
var anotherRandomOid = new LocalCollection._ObjectID();
var randomOid = new MongoID.ObjectID();
var anotherRandomOid = new MongoID.ObjectID();
test.notEqual(randomOid, anotherRandomOid);
test.throws(function() { new LocalCollection._ObjectID("qqqqqqqqqqqqqqqqqqqqqqqq");});
test.throws(function() { new LocalCollection._ObjectID("ABCDEF"); });
test.equal(randomOid, new LocalCollection._ObjectID(randomOid.valueOf()));
test.throws(function() { new MongoID.ObjectID("qqqqqqqqqqqqqqqqqqqqqqqq");});
test.throws(function() { new MongoID.ObjectID("ABCDEF"); });
test.equal(randomOid, new MongoID.ObjectID(randomOid.valueOf()));
});
Tinytest.add("minimongo - pause", function (test) {
@@ -2848,7 +2687,7 @@ Tinytest.add("minimongo - ids matched by selector", function (test) {
};
check("foo", ["foo"]);
check({_id: "foo"}, ["foo"]);
var oid1 = new LocalCollection._ObjectID();
var oid1 = new MongoID.ObjectID();
check(oid1, [oid1]);
check({_id: oid1}, [oid1]);
check({_id: "foo", x: 42}, ["foo"]);

View File

@@ -1,57 +1,8 @@
LocalCollection._looksLikeObjectID = function (str) {
return str.length === 24 && str.match(/^[0-9a-f]*$/);
};
LocalCollection._ObjectID = function (hexString) {
//random-based impl of Mongo ObjectID
var self = this;
if (hexString) {
hexString = hexString.toLowerCase();
if (!LocalCollection._looksLikeObjectID(hexString)) {
throw new Error("Invalid hexadecimal string for creating an ObjectID");
}
// meant to work with _.isEqual(), which relies on structural equality
self._str = hexString;
} else {
self._str = Random.hexString(24);
}
};
LocalCollection._ObjectID.prototype.toString = function () {
var self = this;
return "ObjectID(\"" + self._str + "\")";
};
LocalCollection._ObjectID.prototype.equals = function (other) {
var self = this;
return other instanceof LocalCollection._ObjectID &&
self.valueOf() === other.valueOf();
};
LocalCollection._ObjectID.prototype.clone = function () {
var self = this;
return new LocalCollection._ObjectID(self._str);
};
LocalCollection._ObjectID.prototype.typeName = function() {
return "oid";
};
LocalCollection._ObjectID.prototype.getTimestamp = function() {
var self = this;
return parseInt(self._str.substr(0, 8), 16);
};
LocalCollection._ObjectID.prototype.valueOf =
LocalCollection._ObjectID.prototype.toJSONValue =
LocalCollection._ObjectID.prototype.toHexString =
function () { return this._str; };
// Is this selector just shorthand for lookup by _id?
LocalCollection._selectorIsId = function (selector) {
return (typeof selector === "string") ||
(typeof selector === "number") ||
selector instanceof LocalCollection._ObjectID;
selector instanceof MongoID.ObjectID;
};
// Is the selector just lookup by _id (shorthand or not)?
@@ -103,6 +54,4 @@ LocalCollection._idsMatchedBySelector = function (selector) {
return null;
};
EJSON.addType("oid", function (str) {
return new LocalCollection._ObjectID(str);
});

View File

@@ -25,7 +25,7 @@ LocalCollection._CachingChangeObserver = function (options) {
var callbacks = options.callbacks || {};
if (self.ordered) {
self.docs = new OrderedDict(LocalCollection._idStringify);
self.docs = new OrderedDict(MongoID.idStringify);
self.applyChange = {
addedBefore: function (id, fields, before) {
var doc = EJSON.clone(fields);
@@ -65,7 +65,7 @@ LocalCollection._CachingChangeObserver = function (options) {
throw new Error("Unknown id for changed: " + id);
callbacks.changed && callbacks.changed.call(
self, id, EJSON.clone(fields));
LocalCollection._applyChanges(doc, fields);
DiffSequence.applyChanges(doc, fields);
};
self.applyChange.removed = function (id) {
callbacks.removed && callbacks.removed.call(self, id);
@@ -106,7 +106,7 @@ LocalCollection._observeFromObserveChanges = function (cursor, observeCallbacks)
if (!doc)
throw new Error("Unknown id for changed: " + id);
var oldDoc = transform(EJSON.clone(doc));
LocalCollection._applyChanges(doc, fields);
DiffSequence.applyChanges(doc, fields);
doc = transform(doc);
if (observeCallbacks.changedAt) {
var index = indices ? self.docs.indexOf(id) : -1;
@@ -158,7 +158,7 @@ LocalCollection._observeFromObserveChanges = function (cursor, observeCallbacks)
if (observeCallbacks.changed) {
var oldDoc = self.docs.get(id);
var doc = EJSON.clone(oldDoc);
LocalCollection._applyChanges(doc, fields);
DiffSequence.applyChanges(doc, fields);
observeCallbacks.changed(transform(doc),
transform(EJSON.clone(oldDoc)));
}

View File

@@ -8,9 +8,11 @@ Package.onUse(function (api) {
api.export('Minimongo');
api.export('MinimongoTest', { testOnly: true });
api.use(['underscore', 'json', 'ejson', 'id-map', 'ordered-dict', 'tracker',
'random', 'ordered-dict']);
'mongo-id', 'random', 'diff-sequence']);
// This package is used for geo-location queries such as $near
api.use('geojson-utils');
// This package is used to get diff results on arrays and objects
api.use('diff-sequence');
api.addFiles([
'minimongo.js',
'wrap_transform.js',
@@ -37,7 +39,7 @@ Package.onTest(function (api) {
api.use('minimongo', ['client', 'server']);
api.use('test-helpers', 'client');
api.use(['tinytest', 'underscore', 'ejson', 'ordered-dict',
'random', 'tracker', 'reactive-var']);
'random', 'tracker', 'reactive-var', 'mongo-id']);
api.addFiles('minimongo_tests.js', 'client');
api.addFiles('wrap_transform_tests.js');
api.addFiles('minimongo_server_tests.js', 'server');

View File

@@ -990,7 +990,7 @@ LocalCollection._f = {
return 9;
if (EJSON.isBinary(v))
return 5;
if (v instanceof LocalCollection._ObjectID)
if (v instanceof MongoID.ObjectID)
return 7;
return 3; // object

View File

@@ -18,14 +18,14 @@ Tinytest.add("minimongo - wrapTransform", function (test) {
test.equal(transformed.z(), 43);
// Ensure that ObjectIDs work (even if the _ids in question are not ===-equal)
var oid1 = new LocalCollection._ObjectID();
var oid2 = new LocalCollection._ObjectID(oid1.toHexString());
var oid1 = new MongoID.ObjectID();
var oid2 = new MongoID.ObjectID(oid1.toHexString());
test.equal(wrap(function () {return {_id: oid2};})({_id: oid1}),
{_id: oid2});
// transform functions must return objects
var invalidObjects = [
"asdf", new LocalCollection._ObjectID(), false, null, true,
"asdf", new MongoID.ObjectID(), false, null, true,
27, [123], /adsf/, new Date, function () {}, undefined
];
_.each(invalidObjects, function (invalidObject) {

95
packages/mongo-id/id.js Normal file
View File

@@ -0,0 +1,95 @@
MongoID = {};
MongoID._looksLikeObjectID = function (str) {
return str.length === 24 && str.match(/^[0-9a-f]*$/);
};
MongoID.ObjectID = function (hexString) {
//random-based impl of Mongo ObjectID
var self = this;
if (hexString) {
hexString = hexString.toLowerCase();
if (!MongoID._looksLikeObjectID(hexString)) {
throw new Error("Invalid hexadecimal string for creating an ObjectID");
}
// meant to work with _.isEqual(), which relies on structural equality
self._str = hexString;
} else {
self._str = Random.hexString(24);
}
};
MongoID.ObjectID.prototype.toString = function () {
var self = this;
return "ObjectID(\"" + self._str + "\")";
};
MongoID.ObjectID.prototype.equals = function (other) {
var self = this;
return other instanceof MongoID.ObjectID &&
self.valueOf() === other.valueOf();
};
MongoID.ObjectID.prototype.clone = function () {
var self = this;
return new MongoID.ObjectID(self._str);
};
MongoID.ObjectID.prototype.typeName = function() {
return "oid";
};
MongoID.ObjectID.prototype.getTimestamp = function() {
var self = this;
return parseInt(self._str.substr(0, 8), 16);
};
MongoID.ObjectID.prototype.valueOf =
MongoID.ObjectID.prototype.toJSONValue =
MongoID.ObjectID.prototype.toHexString =
function () { return this._str; };
EJSON.addType("oid", function (str) {
return new MongoID.ObjectID(str);
});
MongoID.idStringify = function (id) {
if (id instanceof MongoID.ObjectID) {
return id.valueOf();
} else if (typeof id === 'string') {
if (id === "") {
return id;
} else if (id.substr(0, 1) === "-" || // escape previously dashed strings
id.substr(0, 1) === "~" || // escape escaped numbers, true, false
MongoID._looksLikeObjectID(id) || // escape object-id-form strings
id.substr(0, 1) === '{') { // escape object-form strings, for maybe implementing later
return "-" + id;
} else {
return id; // other strings go through unchanged.
}
} else if (id === undefined) {
return '-';
} else if (typeof id === 'object' && id !== null) {
throw new Error("Meteor does not currently support objects other than ObjectID as ids");
} else { // Numbers, true, false, null
return "~" + JSON.stringify(id);
}
};
MongoID.idParse = function (id) {
if (id === "") {
return id;
} else if (id === '-') {
return undefined;
} else if (id.substr(0, 1) === '-') {
return id.substr(1);
} else if (id.substr(0, 1) === '~') {
return JSON.parse(id.substr(1));
} else if (MongoID._looksLikeObjectID(id)) {
return new MongoID.ObjectID(id);
} else {
return id;
}
};

View File

@@ -0,0 +1,13 @@
Package.describe({
summary: "JS simulation of MongoDB ObjectIDs",
version: '1.0.0'
});
Package.onUse(function (api) {
api.export('MongoID');
api.use(['ejson', 'id-map', 'random']);
api.addFiles([
'id.js'
]);
});

View File

@@ -136,7 +136,7 @@ Mongo.Collection = function (name, options) {
// Apply an update.
// XXX better specify this interface (not in terms of a wire message)?
update: function (msg) {
var mongoId = LocalCollection._idParse(msg.id);
var mongoId = MongoID.idParse(msg.id);
var doc = self._collection.findOne(mongoId);
// Is this a "replace the whole doc" message coming from the quiescence
@@ -669,7 +669,7 @@ Mongo.Collection.prototype.rawDatabase = function () {
* @class
* @param {String} hexString Optional. The 24-character hexadecimal contents of the ObjectID to create
*/
Mongo.ObjectID = LocalCollection._ObjectID;
Mongo.ObjectID = MongoID.ObjectID;
/**
* @summary To create a cursor, use find. To access the documents in a cursor, use forEach, map, or fetch.

View File

@@ -264,7 +264,7 @@ _.extend(OplogObserveDriver.prototype, {
self._published.set(id, self._sharedProjectionFn(newDoc));
var projectedNew = self._projectionFn(newDoc);
var projectedOld = self._projectionFn(oldDoc);
var changed = LocalCollection._makeChangedFields(
var changed = DiffSequence.makeChangedFields(
projectedNew, projectedOld);
if (!_.isEmpty(changed))
self._multiplexer.changed(id, changed);

View File

@@ -24,7 +24,7 @@ Package.onUse(function (api) {
api.use('npm-mongo', 'server');
api.use(['random', 'ejson', 'json', 'underscore', 'minimongo', 'logging',
'ddp', 'tracker'],
'ddp', 'tracker', 'diff-sequence', 'mongo-id'],
['client', 'server']);
api.use('check', ['client', 'server']);

View File

@@ -9,8 +9,8 @@ var warn = function () {
}
};
var idStringify = LocalCollection._idStringify;
var idParse = LocalCollection._idParse;
var idStringify = MongoID.idStringify;
var idParse = MongoID.idParse;
ObserveSequence = {
_suppressWarnings: 0,
@@ -150,7 +150,7 @@ var isStoreCursor = function (cursor) {
// `seqArray` and calls appropriate functions from `callbacks`.
// Reuses Minimongo's diff algorithm implementation.
var diffArray = function (lastSeqArray, seqArray, callbacks) {
var diffFn = Package.minimongo.LocalCollection._diffQueryOrderedChanges;
var diffFn = Package['diff-sequence'].DiffSequence.diffQueryOrderedChanges;
var oldIdObjects = [];
var newIdObjects = [];
var posOld = {}; // maps from idStringify'd ids

View File

@@ -5,7 +5,8 @@ Package.describe({
Package.onUse(function (api) {
api.use('tracker');
api.use('minimongo'); // for idStringify
api.use('mongo-id'); // for idStringify
api.use('diff-sequence');
api.use('underscore');
api.use('random');
api.export('ObserveSequence');