Merge branch 'release-1.5.2' into release-1.6

This commit is contained in:
Ben Newman
2017-08-08 18:40:46 -04:00
83 changed files with 2144 additions and 1585 deletions

413
.circleci/config.yml Normal file
View File

@@ -0,0 +1,413 @@
version: 2
# These directories are cached across all builds, currently with no
# hashing mechanism, but we should consider doing it off dev_bundle.
meteor_cache_dirs: &meteor_cache_dirs
paths:
- "dev_bundle"
- ".babel-cache"
- ".meteor"
# A reusable "run" snippet which is ran before each test to setup the
# environment for user-limits, core-dumps, etc.
run_env_change: &run_env_change
name: Environment Changes
command: |
# Make a place to core dumps to live.
sudo mkdir -p /tmp/core_dumps
sudo chmod a+rwx /tmp/core_dumps
# Set the pattern for core dumps, so we can find them.
echo kernel.core_pattern="/tmp/core_dumps/core.%e.%p.%h.%t" | \
sudo tee -a /etc/sysctl.conf
# Note that since every "run" command starts its own shell, and I wasn't
# able to set this at a system wide level for all users, it's necessary to
# run "ulimit -c unlimited" before each command which you want to (possibly)
# output a core dump.
# Raise inotify user watches up higher.
echo fs.inotify.max_user_watches=524288 | \
sudo tee -a /etc/sysctl.conf
# Reload sysctl so these are in effect.
sudo sysctl -p
# A reusable "run" snippet which enables the continued logging of memoryusage
# to a file on disk which can be saved to build artifacts for later analysis.
run_log_mem_use: &run_log_mem_use
background: true
name: Setup Memory Logging
command: |
# Log memory usage throughout entire build.
MEMUSELOG=/tmp/memuse.txt /bin/bash -c '\
while true; do\
ps -u $USER eo pid,%cpu,%mem,rss:10,vsz:10,args:20 --sort=-%mem >> $MEMUSELOG; \
echo "----------" >> $MEMUSELOG; \
sleep 1; \
done'
# A reusable "run" snippet for saving the Node binary if a core dump is present.
run_save_node_bin: &run_save_node_bin
name: Save Node Binary
when: on_fail
command: |
if compgen -G "/tmp/core_dumps/core.*" > /dev/null; then
echo "Saving Node binary since Core dump is present..."
cp dev_bundle/bin/node /tmp/core_dumps/node
fi
# This environment is set to every job (and the initial build).
build_machine_environment: &build_machine_environment
# Specify that we want an actual machine (ala Circle 1.0), not a Docker image.
machine: true
environment:
# This multiplier scales the waitSecs for selftests.
TIMEOUT_SCALE_FACTOR: 4
# These, mostly overlapping, flags ensure that CircleCI is as pretty as
# possible for a non-interactive environment. See also: --headless.
EMACS: t
METEOR_HEADLESS: true
METEOR_PRETTY_OUTPUT: 0
# In an effort to stop SIGSEGV, this just doesn't bother cleaning up
# the mess of temp directories that Meteor makes.
METEOR_SAVE_TMPDIRS: 1
# Disable the optimistic caching of file watchers, which incurs a slight
# polling delay which is less than ideal in a CI environment where file
# watchers should be plentiful.
METEOR_DISABLE_OPTIMISTIC_CACHING: 1
# Skip these tests on every test run.
# For readability, this is a regex wrapped across multiple lines in quotes.
SELF_TEST_EXCLUDE: "\
^old cli tests|\
^minifiers can't register non-js|\
^minifiers: apps can't use|\
^compiler plugins - addAssets\
"
# These will be evaled before each command.
PRE_TEST_COMMANDS: |-
ulimit -c unlimited; # Set core dump size as Ubuntu 14.04 lacks prlimit.
ulimit -n 4096; # CircleCI default is soft 1024, hard 4096. Take it all.
# Enable the Garbage Collection `gc` object to be exposed so we can try
# to our own, hopefully more graceful, technique.
TOOL_NODE_FLAGS: --expose-gc
# This is only to make Meteor self-test not remind us that we can set
# this argument for self-tests.
SELF_TEST_TOOL_NODE_FLAGS: " "
jobs:
Get Ready:
<<: *build_machine_environment
steps:
- run:
<<: *run_log_mem_use
- run:
<<: *run_env_change
- checkout
- run:
# https://discuss.circleci.com/t/git-submodule-url-isnt-playing-nice-with-the-cache/549/3
name: Git Submodules.
command: (git submodule sync && git submodule update --init --recursive) || (rm -fr .git/config .git/modules && git submodule deinit -f . && git submodule update --init --recursive)
- restore_cache:
key: meteor-cache
- run:
name: Get Ready
command: |
eval $PRE_TEST_COMMANDS;
./meteor --help
# shouldn't take longer than 5 minutes
no_output_timeout: 5m
# Clear dev_bundle/.npm to ensure consistent test runs.
- run:
name: Clear npm cache
command: ./meteor npm cache clear
# Since PhantomJS has been removed from dev_bundle/lib/node_modules
# (#6905), but self-test still needs it, install it now.
- run:
name: Test Prereqs
command: ./meteor npm install -g phantomjs-prebuilt browserstack-webdriver
- run:
<<: *run_save_node_bin
- persist_to_workspace:
root: .
paths: .
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
path: /tmp/memuse.txt
save_caches:
<<: *build_machine_environment
steps:
- attach_workspace:
at: .
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
Group 0:
<<: *build_machine_environment
steps:
- run:
<<: *run_log_mem_use
- run:
<<: *run_env_change
- attach_workspace:
at: .
- run:
name: "Running warehouse self-tests"
command: |
eval $PRE_TEST_COMMANDS;
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
--with-tag "custom-warehouse"
no_output_timeout: 20m
- run:
<<: *run_save_node_bin
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
path: /tmp/memuse.txt
Group 1:
<<: *build_machine_environment
steps:
- run:
<<: *run_log_mem_use
- run:
<<: *run_env_change
- attach_workspace:
at: .
- run:
name: "Running self-test (1): A-Com"
command: |
eval $PRE_TEST_COMMANDS;
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
--file '^[a-b]|^c[a-n]|^co[a-l]|^compiler-plugins' \
--without-tag "custom-warehouse"
no_output_timeout: 20m
- run:
<<: *run_save_node_bin
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
path: /tmp/memuse.txt
Group 2:
<<: *build_machine_environment
steps:
- run:
<<: *run_log_mem_use
- run:
<<: *run_env_change
- attach_workspace:
at: .
- run:
name "Running self-test (2): Con-K"
command: |
eval $PRE_TEST_COMMANDS;
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
--file "^co[n-z]|^c[p-z]|^[d-k]" \
--without-tag "custom-warehouse"
no_output_timeout: 20m
- run:
<<: *run_save_node_bin
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
path: /tmp/memuse.txt
Group 3:
<<: *build_machine_environment
steps:
- run:
<<: *run_log_mem_use
- run:
<<: *run_env_change
- attach_workspace:
at: .
- run:
name: "Running self-test (3): L-O"
command: |
eval $PRE_TEST_COMMANDS;
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
--file '^[l-o]' \
--without-tag "custom-warehouse"
no_output_timeout: 20m
- run:
<<: *run_save_node_bin
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
path: /tmp/memuse.txt
Group 4:
<<: *build_machine_environment
steps:
- run:
<<: *run_log_mem_use
- run:
<<: *run_env_change
- attach_workspace:
at: .
- run:
name: "Running self-test (4): P"
command: |
eval $PRE_TEST_COMMANDS;
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
--file '^p' \
--without-tag "custom-warehouse"
no_output_timeout: 20m
- run:
<<: *run_save_node_bin
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
path: /tmp/memuse.txt
Group 5:
<<: *build_machine_environment
steps:
- run:
<<: *run_log_mem_use
- run:
<<: *run_env_change
- attach_workspace:
at: .
- run:
name: "Running self-test (5): Run"
command: |
eval $PRE_TEST_COMMANDS;
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
--file '^run' \
--without-tag "custom-warehouse"
no_output_timeout: 20m
- run:
<<: *run_save_node_bin
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
path: /tmp/memuse.txt
Group 6:
<<: *build_machine_environment
steps:
- run:
<<: *run_log_mem_use
- run:
<<: *run_env_change
- attach_workspace:
at: .
- run:
name: "Running self-test (6): R-S"
command: |
eval "$PRE_TEST_COMMANDS";
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
--file '^r(?!un)|^s' \
--without-tag "custom-warehouse"
no_output_timeout: 20m
- run:
<<: *run_save_node_bin
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
path: /tmp/memuse.txt
Group 7:
<<: *build_machine_environment
steps:
- run:
<<: *run_log_mem_use
- run:
<<: *run_env_change
- attach_workspace:
at: .
- run:
name: "Running self-test (7): Sp-Z"
command: |
eval $PRE_TEST_COMMANDS;
./meteor self-test \
--exclude "${SELF_TEST_EXCLUDE}" \
--headless \
--file '^[t-z]|^command-line' \
--without-tag "custom-warehouse"
no_output_timeout: 20m
- run:
<<: *run_save_node_bin
- save_cache:
key: meteor-cache
<<: *meteor_cache_dirs
- store_artifacts:
path: /tmp/core_dumps
- store_artifacts:
path: /tmp/memuse.txt
workflows:
version: 2
Build and Test:
jobs:
- Get Ready
- Group 0:
requires:
- Get Ready
- Group 1:
requires:
- Get Ready
- Group 2:
requires:
- Get Ready
- Group 3:
requires:
- Get Ready
- Group 4:
requires:
- Get Ready
- Group 5:
requires:
- Get Ready
- Group 6:
requires:
- Get Ready
- Group 7:
requires:
- Get Ready

1
.gitignore vendored
View File

@@ -14,6 +14,7 @@
*.iml
*.sublime-project
*.sublime-workspace
/.vscode/
TAGS
*.log
*.out

View File

@@ -1,2 +0,0 @@
REVIEWBOARD_URL = 'https://rbcommons.com/s/meteor/'
REPOSITORY = 'Meteor framework'

View File

@@ -58,17 +58,18 @@
* The `semver` npm package has been upgraded to version 5.3.0.
[PR #8859](https://github.com/meteor/meteor/pull/8859)
* A new package called `mongo-dev-server` has been created and wired into
`mongo` as a dependency. As long as this package is included in a Meteor
application (which it is by default since all new Meteor apps have `mongo`
as a dependency), a local development MongoDB server is started alongside
the application. This package was created to provide a way to disable the
local development Mongo server, when `mongo` isn't needed (e.g. when using
Meteor as a build system only). If an application has no dependency on
`mongo`, the `mongo-dev-server` package is not added, which means no local
development Mongo server is started.
[Feature Request #31](https://github.com/meteor/meteor-feature-requests/issues/31)
[PR #8853](https://github.com/meteor/meteor/pull/8853)
* The `faye-websocket` npm package has been upgraded to version 0.11.1,
and its dependency `websocket-driver` has been upgraded to a version
containing [this fix](https://github.com/faye/websocket-driver-node/issues/21),
thanks to [@sdarnell](https://github.com/sdarnell).
[meteor-feature-requests#160](https://github.com/meteor/meteor-feature-requests/issues/160)
* The `star.json` manifest created within the root of a `meteor build` bundle
will now contain `nodeVersion` and `npmVersion` which will specify the exact
versions of Node.js and npm (respectively) which the Meteor release was
bundled with. The `.node_version.txt` file will still be written into the
root of the bundle, but it may be deprecated in a future version of Meteor.
[PR #8956](https://github.com/meteor/meteor/pull/8956)
* `Accounts.config` no longer mistakenly allows tokens to expire when
the `loginExpirationInDays` option is set to `null`.
@@ -84,6 +85,15 @@
[#8424](https://github.com/meteor/meteor/issues/8424), and
[#8464](https://github.com/meteor/meteor/issues/8464).
* The `"env"` field is now supported in `.babelrc` files.
[PR #8963](https://github.com/meteor/meteor/pull/8963)
* Files contained by `client/compatibility/` directories or added with
`api.addFiles(files, ..., { bare: true })` are now evaluated before
importing modules with `require`, which may be a breaking change if you
depend on the interleaving of `bare` files with eager module evaluation.
[PR #8972](https://github.com/meteor/meteor/pull/8972)
## v1.5.1, 2017-07-12
* Node has been upgraded to version 4.8.4.

View File

@@ -1,28 +0,0 @@
checkout:
post:
# https://discuss.circleci.com/t/git-submodule-url-isnt-playing-nice-with-the-cache/549/3
- git submodule sync
- git submodule update --init --recursive || (rm -fr .git/config .git/modules && git submodule deinit -f . && git submodule update --init --recursive)
dependencies:
pre:
# https://github.com/meteor/docs/blob/version-NEXT/long-form/file-change-watcher-efficiency.md
- echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p
cache_directories:
- "dev_bundle"
- ".meteor"
- ".babel-cache"
override:
# shouldn't take longer than 5 minutes
- ./meteor --help:
timeout: 300
environment:
METEOR_PRETTY_OUTPUT: 0
METEOR_DISABLE_OPTIMISTIC_CACHING: 1
TOOL_NODE_FLAGS: --expose-gc
test:
override:
- ./scripts/ci.sh :
parallel: true
timeout: 1200

View File

@@ -306,6 +306,16 @@ BCp._inferHelper = function (
merge(babelOptions, babelrc, "presets");
merge(babelOptions, babelrc, "plugins");
const babelEnv = (process.env.BABEL_ENV ||
process.env.NODE_ENV ||
"development");
if (babelrc && babelrc.env && babelrc.env[babelEnv]) {
const env = babelrc.env[babelEnv];
walkBabelRC(env);
merge(babelOptions, env, "presets");
merge(babelOptions, env, "plugins");
}
return !! (babelrc.presets ||
babelrc.plugins);
};

View File

@@ -186,6 +186,10 @@ CS.isConstraintSatisfied = function (pkg, vConstraint, version) {
var cVersion = simpleConstraint.versionString;
return (cVersion === version);
} else if (type === 'compatible-with') {
if (typeof simpleConstraint.test === "function") {
return simpleConstraint.test(version);
}
var cv = PV.parse(simpleConstraint.versionString);
var v = PV.parse(version);

View File

@@ -2,24 +2,22 @@
"lockfileVersion": 1,
"dependencies": {
"faye-websocket": {
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.0.tgz",
"integrity": "sha1-2czw54nn23JddLxId9I6pClyrFA=",
"version": "0.11.1",
"resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.1.tgz",
"integrity": "sha1-8O/hjE9W5PQK/H4Gxxn9XuYYjzg=",
"dependencies": {
"websocket-driver": {
"version": "0.6.4",
"resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.6.4.tgz",
"integrity": "sha1-ZbhNAhE0gNP8BeY+gJMiBCvclAs=",
"dependencies": {
"websocket-extensions": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.1.tgz",
"integrity": "sha1-domUmcGEtu91Q3fC27DNbLVdKec="
}
}
"version": "0.6.5",
"resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.6.5.tgz",
"integrity": "sha1-XLJVbOuF9Dc8bYI4qmkchFThOjY="
}
}
},
"http-parser-js": {
"version": "0.4.5",
"resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.4.5.tgz",
"integrity": "sha512-sYaqbMBf8hoS6OZBwMygxdLD3TsWgzheP55nkQ7GiR7gsn8x+2oTMCoJSAQmNm3obzOjJYT6tdTz1XcYjKyUqg=="
},
"lolex": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/lolex/-/lolex-1.4.0.tgz",
@@ -29,6 +27,15 @@
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/permessage-deflate/-/permessage-deflate-0.1.3.tgz",
"integrity": "sha1-VnVbIrzkUKLuVoauXy7zJRNlk3k="
},
"websocket-driver": {
"version": "https://github.com/faye/websocket-driver-node/tarball/1325828a9e8b5e29c7b4758995efdb84703919ad",
"integrity": "sha1-R3ysWk21bTKDcB2nUGcV4rZHgIY="
},
"websocket-extensions": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.1.tgz",
"integrity": "sha1-domUmcGEtu91Q3fC27DNbLVdKec="
}
}
}

View File

@@ -1,11 +1,16 @@
Package.describe({
summary: "Meteor's latency-compensated distributed data client",
version: '2.1.0-beta.15',
version: '2.2.0-beta.15',
documentation: null
});
Npm.depends({
"faye-websocket": "0.11.0",
"faye-websocket": "0.11.1",
// TODO Remove this direct websocket-driver dependency when a new
// version gets published, though that may not happen very soon:
// https://github.com/faye/websocket-driver-node/issues/21
"websocket-driver": "https://github.com/faye/websocket-driver-node/" +
"tarball/1325828a9e8b5e29c7b4758995efdb84703919ad",
"lolex": "1.4.0",
"permessage-deflate": "0.1.3"
});

View File

@@ -1,87 +1,74 @@
function Address (city, state) {
this.city = city;
this.state = state;
}
import { EJSON } from './ejson';
Address.prototype = {
constructor: Address,
class Address {
constructor(city, state) {
this.city = city;
this.state = state;
}
typeName: function () {
return "Address";
},
typeName() {
return 'Address';
}
toJSONValue: function () {
toJSONValue() {
return {
city: this.city,
state: this.state
state: this.state,
};
}
}
EJSON.addType("Address", function fromJSONValue(value) {
return new Address(value.city, value.state);
});
EJSON.addType('Address', value => new Address(value.city, value.state));
function Person (name, dob, address) {
this.name = name;
this.dob = dob;
this.address = address;
}
class Person {
constructor(name, dob, address) {
this.name = name;
this.dob = dob;
this.address = address;
}
Person.prototype = {
constructor: Person,
typeName() {
return 'Person';
}
typeName: function () {
return "Person";
},
toJSONValue: function () {
toJSONValue() {
return {
name: this.name,
dob: EJSON.toJSONValue(this.dob),
address: EJSON.toJSONValue(this.address)
address: EJSON.toJSONValue(this.address),
};
}
}
_.extend(Person, {
fromJSONValue: function(value) {
return new Person(
value.name,
EJSON.fromJSONValue(value.dob),
EJSON.fromJSONValue(value.address)
);
EJSON.addType(
'Person',
value => new Person(
value.name,
EJSON.fromJSONValue(value.dob),
EJSON.fromJSONValue(value.address)
)
);
class Holder {
constructor(content) {
this.content = content;
}
});
EJSON.addType("Person", Person.fromJSONValue);
typeName() {
return 'Holder';
}
function Holder (content) {
this.content = content;
}
Holder.prototype = {
constructor: Holder,
typeName: function () {
return "Holder";
},
toJSONValue: function () {
toJSONValue() {
return this.content;
}
}
_.extend(Holder, {
fromJSONValue: function(value) {
return new Holder(value);
}
});
EJSON.addType('Holder', value => new Holder(value));
EJSON.addType("Holder", Holder.fromJSONValue);
const EJSONTest = {
Address,
Person,
Holder,
};
_.extend(EJSONTest, {
Address: Address,
Person: Person,
Holder: Holder
});
export default EJSONTest;

View File

@@ -2,10 +2,7 @@
* @namespace
* @summary Namespace for EJSON functions
*/
EJSON = {};
EJSONTest = {};
const EJSON = {};
// Custom type interface definition
/**
@@ -19,7 +16,9 @@ EJSONTest = {};
/**
* @function typeName
* @memberOf EJSON.CustomType
* @summary Return the tag used to identify this type. This must match the tag used to register this type with [`EJSON.addType`](#ejson_add_type).
* @summary Return the tag used to identify this type. This must match the
* tag used to register this type with
* [`EJSON.addType`](#ejson_add_type).
* @locus Anywhere
* @instance
*/
@@ -35,7 +34,8 @@ EJSONTest = {};
/**
* @function clone
* @memberOf EJSON.CustomType
* @summary Return a value `r` such that `this.equals(r)` is true, and modifications to `r` do not affect `this` and vice versa.
* @summary Return a value `r` such that `this.equals(r)` is true, and
* modifications to `r` do not affect `this` and vice versa.
* @locus Anywhere
* @instance
*/
@@ -43,14 +43,22 @@ EJSONTest = {};
/**
* @function equals
* @memberOf EJSON.CustomType
* @summary Return `true` if `other` has a value equal to `this`; `false` otherwise.
* @summary Return `true` if `other` has a value equal to `this`; `false`
* otherwise.
* @locus Anywhere
* @param {Object} other Another object to compare this to.
* @instance
*/
const customTypes = {};
const hasOwn = (obj, prop) => ({}).hasOwnProperty.call(obj, prop);
const isArguments = obj => obj != null && hasOwn(obj, 'callee');
const isInfOrNan =
obj => Number.isNaN(obj) || obj === Infinity || obj === -Infinity;
var customTypes = {};
// Add a custom type, using a method of your choice to get to and
// from a basic JSON-able representation. The factory argument
// is a function of JSON-able --> your object
@@ -66,172 +74,201 @@ var customTypes = {};
/**
* @summary Add a custom datatype to EJSON.
* @locus Anywhere
* @param {String} name A tag for your custom type; must be unique among custom data types defined in your project, and must match the result of your type's `typeName` method.
* @param {Function} factory A function that deserializes a JSON-compatible value into an instance of your type. This should match the serialization performed by your type's `toJSONValue` method.
* @param {String} name A tag for your custom type; must be unique among
* custom data types defined in your project, and must
* match the result of your type's `typeName` method.
* @param {Function} factory A function that deserializes a JSON-compatible
* value into an instance of your type. This should
* match the serialization performed by your
* type's `toJSONValue` method.
*/
EJSON.addType = function (name, factory) {
if (_.has(customTypes, name))
throw new Error("Type " + name + " already present");
EJSON.addType = (name, factory) => {
if (hasOwn(customTypes, name)) {
throw new Error(`Type ${name} already present`);
}
customTypes[name] = factory;
};
var isInfOrNan = function (obj) {
return _.isNaN(obj) || obj === Infinity || obj === -Infinity;
};
var builtinConverters = [
const builtinConverters = [
{ // Date
matchJSONValue: function (obj) {
return _.has(obj, '$date') && _.size(obj) === 1;
matchJSONValue(obj) {
return hasOwn(obj, '$date') && Object.keys(obj).length === 1;
},
matchObject: function (obj) {
matchObject(obj) {
return obj instanceof Date;
},
toJSONValue: function (obj) {
toJSONValue(obj) {
return {$date: obj.getTime()};
},
fromJSONValue: function (obj) {
fromJSONValue(obj) {
return new Date(obj.$date);
}
},
},
{ // RegExp
matchJSONValue: function (obj) {
return _.has(obj, '$regexp') && _.has(obj, '$flags') && _.size(obj) === 2;
matchJSONValue(obj) {
return hasOwn(obj, '$regexp')
&& hasOwn(obj, '$flags')
&& Object.keys(obj).length === 2;
},
matchObject: function (obj) {
matchObject(obj) {
return obj instanceof RegExp;
},
toJSONValue: function (regexp) {
return { $regexp: regexp.source, $flags: regexp.flags };
toJSONValue(regexp) {
return {
$regexp: regexp.source,
$flags: regexp.flags
};
},
fromJSONValue(obj) {
// Replaces duplicate / invalid flags.
return new RegExp(
obj.$regexp,
obj.$flags
// Cut off flags at 50 chars to avoid abusing RegExp for DOS.
.slice(0, 50)
.replace(/[^gimuy]/g,'')
.replace(/(.)(?=.*\1)/g, '')
);
},
fromJSONValue: function (obj) {
//replaces duplicate / invalid flags
return new RegExp(obj.$regexp, obj.$flags.replace(/[^gimuy]/g,'').replace(/(.)(?=.*\1)/g, ''));
}
},
{ // NaN, Inf, -Inf. (These are the only objects with typeof !== 'object'
// which we match.)
matchJSONValue: function (obj) {
return _.has(obj, '$InfNaN') && _.size(obj) === 1;
matchJSONValue(obj) {
return hasOwn(obj, '$InfNaN') && Object.keys(obj).length === 1;
},
matchObject: isInfOrNan,
toJSONValue: function (obj) {
var sign;
if (_.isNaN(obj))
toJSONValue(obj) {
let sign;
if (Number.isNaN(obj)) {
sign = 0;
else if (obj === Infinity)
} else if (obj === Infinity) {
sign = 1;
else
} else {
sign = -1;
}
return {$InfNaN: sign};
},
fromJSONValue: function (obj) {
return obj.$InfNaN/0;
}
fromJSONValue(obj) {
return obj.$InfNaN / 0;
},
},
{ // Binary
matchJSONValue: function (obj) {
return _.has(obj, '$binary') && _.size(obj) === 1;
matchJSONValue(obj) {
return hasOwn(obj, '$binary') && Object.keys(obj).length === 1;
},
matchObject: function (obj) {
matchObject(obj) {
return typeof Uint8Array !== 'undefined' && obj instanceof Uint8Array
|| (obj && _.has(obj, '$Uint8ArrayPolyfill'));
|| (obj && hasOwn(obj, '$Uint8ArrayPolyfill'));
},
toJSONValue: function (obj) {
toJSONValue(obj) {
return {$binary: Base64.encode(obj)};
},
fromJSONValue: function (obj) {
fromJSONValue(obj) {
return Base64.decode(obj.$binary);
}
},
},
{ // Escaping one level
matchJSONValue: function (obj) {
return _.has(obj, '$escape') && _.size(obj) === 1;
matchJSONValue(obj) {
return hasOwn(obj, '$escape') && Object.keys(obj).length === 1;
},
matchObject: function (obj) {
if (_.isEmpty(obj) || _.size(obj) > 2) {
return false;
matchObject(obj) {
let match = false;
if (obj) {
const keyCount = Object.keys(obj).length;
if (keyCount === 1 || keyCount === 2) {
match =
builtinConverters.some(converter => converter.matchJSONValue(obj));
}
}
return _.any(builtinConverters, function (converter) {
return converter.matchJSONValue(obj);
});
return match;
},
toJSONValue: function (obj) {
var newObj = {};
_.each(obj, function (value, key) {
newObj[key] = EJSON.toJSONValue(value);
toJSONValue(obj) {
const newObj = {};
Object.keys(obj).forEach(key => {
newObj[key] = EJSON.toJSONValue(obj[key]);
});
return {$escape: newObj};
},
fromJSONValue: function (obj) {
var newObj = {};
_.each(obj.$escape, function (value, key) {
newObj[key] = EJSON.fromJSONValue(value);
fromJSONValue(obj) {
const newObj = {};
Object.keys(obj.$escape).forEach(key => {
newObj[key] = EJSON.fromJSONValue(obj.$escape[key]);
});
return newObj;
}
},
},
{ // Custom
matchJSONValue: function (obj) {
return _.has(obj, '$type') && _.has(obj, '$value') && _.size(obj) === 2;
matchJSONValue(obj) {
return hasOwn(obj, '$type')
&& hasOwn(obj, '$value') && Object.keys(obj).length === 2;
},
matchObject: function (obj) {
matchObject(obj) {
return EJSON._isCustomType(obj);
},
toJSONValue: function (obj) {
var jsonValue = Meteor._noYieldsAllowed(function () {
return obj.toJSONValue();
});
toJSONValue(obj) {
const jsonValue = Meteor._noYieldsAllowed(() => obj.toJSONValue());
return {$type: obj.typeName(), $value: jsonValue};
},
fromJSONValue: function (obj) {
var typeName = obj.$type;
if (!_.has(customTypes, typeName))
throw new Error("Custom EJSON type " + typeName + " is not defined");
var converter = customTypes[typeName];
return Meteor._noYieldsAllowed(function () {
return converter(obj.$value);
});
}
}
fromJSONValue(obj) {
const typeName = obj.$type;
if (!hasOwn(customTypes, typeName)) {
throw new Error(`Custom EJSON type ${typeName} is not defined`);
}
const converter = customTypes[typeName];
return Meteor._noYieldsAllowed(() => converter(obj.$value));
},
},
];
EJSON._isCustomType = function (obj) {
return obj &&
typeof obj.toJSONValue === 'function' &&
typeof obj.typeName === 'function' &&
_.has(customTypes, obj.typeName());
};
EJSON._isCustomType = (obj) => (
obj &&
typeof obj.toJSONValue === 'function' &&
typeof obj.typeName === 'function' &&
hasOwn(customTypes, obj.typeName())
);
EJSON._getTypes = function () {
return customTypes;
};
EJSON._getTypes = () => customTypes;
EJSON._getConverters = function () {
return builtinConverters;
EJSON._getConverters = () => builtinConverters;
// Either return the JSON-compatible version of the argument, or undefined (if
// the item isn't itself replaceable, but maybe some fields in it are)
const toJSONValueHelper = item => {
for (let i = 0; i < builtinConverters.length; i++) {
const converter = builtinConverters[i];
if (converter.matchObject(item)) {
return converter.toJSONValue(item);
}
}
return undefined;
};
// for both arrays and objects, in-place modification.
var adjustTypesToJSONValue =
EJSON._adjustTypesToJSONValue = function (obj) {
const adjustTypesToJSONValue = obj => {
// Is it an atom that we need to adjust?
if (obj === null)
if (obj === null) {
return null;
var maybeChanged = toJSONValueHelper(obj);
if (maybeChanged !== undefined)
}
const maybeChanged = toJSONValueHelper(obj);
if (maybeChanged !== undefined) {
return maybeChanged;
}
// Other atoms are unchanged.
if (typeof obj !== 'object')
if (typeof obj !== 'object') {
return obj;
}
// Iterate over array or object structure.
_.each(obj, function (value, key) {
Object.keys(obj).forEach(key => {
const value = obj[key];
if (typeof value !== 'object' && value !== undefined &&
!isInfOrNan(value))
!isInfOrNan(value)) {
return; // continue
}
var changed = toJSONValueHelper(value);
const changed = toJSONValueHelper(value);
if (changed) {
obj[key] = changed;
return; // on to the next key
@@ -243,53 +280,70 @@ EJSON._adjustTypesToJSONValue = function (obj) {
return obj;
};
// Either return the JSON-compatible version of the argument, or undefined (if
// the item isn't itself replaceable, but maybe some fields in it are)
var toJSONValueHelper = function (item) {
for (var i = 0; i < builtinConverters.length; i++) {
var converter = builtinConverters[i];
if (converter.matchObject(item)) {
return converter.toJSONValue(item);
}
}
return undefined;
};
EJSON._adjustTypesToJSONValue = adjustTypesToJSONValue;
/**
* @summary Serialize an EJSON-compatible value into its plain JSON representation.
* @summary Serialize an EJSON-compatible value into its plain JSON
* representation.
* @locus Anywhere
* @param {EJSON} val A value to serialize to plain JSON.
*/
EJSON.toJSONValue = function (item) {
var changed = toJSONValueHelper(item);
if (changed !== undefined)
EJSON.toJSONValue = item => {
const changed = toJSONValueHelper(item);
if (changed !== undefined) {
return changed;
if (typeof item === 'object') {
item = EJSON.clone(item);
adjustTypesToJSONValue(item);
}
return item;
let newItem = item;
if (typeof item === 'object') {
newItem = EJSON.clone(item);
adjustTypesToJSONValue(newItem);
}
return newItem;
};
// Either return the argument changed to have the non-json
// rep of itself (the Object version) or the argument itself.
// DOES NOT RECURSE. For actually getting the fully-changed value, use
// EJSON.fromJSONValue
const fromJSONValueHelper = value => {
if (typeof value === 'object' && value !== null) {
const keys = Object.keys(value);
if (keys.length <= 2
&& keys.every(k => typeof k === 'string' && k.substr(0, 1) === '$')) {
for (let i = 0; i < builtinConverters.length; i++) {
const converter = builtinConverters[i];
if (converter.matchJSONValue(value)) {
return converter.fromJSONValue(value);
}
}
}
}
return value;
};
// for both arrays and objects. Tries its best to just
// use the object you hand it, but may return something
// different if the object you hand it itself needs changing.
//
var adjustTypesFromJSONValue =
EJSON._adjustTypesFromJSONValue = function (obj) {
if (obj === null)
const adjustTypesFromJSONValue = obj => {
if (obj === null) {
return null;
var maybeChanged = fromJSONValueHelper(obj);
if (maybeChanged !== obj)
}
const maybeChanged = fromJSONValueHelper(obj);
if (maybeChanged !== obj) {
return maybeChanged;
}
// Other atoms are unchanged.
if (typeof obj !== 'object')
if (typeof obj !== 'object') {
return obj;
}
_.each(obj, function (value, key) {
Object.keys(obj).forEach(key => {
const value = obj[key];
if (typeof value === 'object') {
var changed = fromJSONValueHelper(value);
const changed = fromJSONValueHelper(value);
if (value !== changed) {
obj[key] = changed;
return;
@@ -302,171 +356,186 @@ EJSON._adjustTypesFromJSONValue = function (obj) {
return obj;
};
// Either return the argument changed to have the non-json
// rep of itself (the Object version) or the argument itself.
// DOES NOT RECURSE. For actually getting the fully-changed value, use
// EJSON.fromJSONValue
var fromJSONValueHelper = function (value) {
if (typeof value === 'object' && value !== null) {
if (_.size(value) <= 2
&& _.all(value, function (v, k) {
return typeof k === 'string' && k.substr(0, 1) === '$';
})) {
for (var i = 0; i < builtinConverters.length; i++) {
var converter = builtinConverters[i];
if (converter.matchJSONValue(value)) {
return converter.fromJSONValue(value);
}
}
}
}
return value;
};
EJSON._adjustTypesFromJSONValue = adjustTypesFromJSONValue;
/**
* @summary Deserialize an EJSON value from its plain JSON representation.
* @locus Anywhere
* @param {JSONCompatible} val A value to deserialize into EJSON.
*/
EJSON.fromJSONValue = function (item) {
var changed = fromJSONValueHelper(item);
EJSON.fromJSONValue = item => {
let changed = fromJSONValueHelper(item);
if (changed === item && typeof item === 'object') {
item = EJSON.clone(item);
adjustTypesFromJSONValue(item);
return item;
} else {
return changed;
changed = EJSON.clone(item);
adjustTypesFromJSONValue(changed);
}
return changed;
};
/**
* @summary Serialize a value to a string.
For EJSON values, the serialization fully represents the value. For non-EJSON values, serializes the same way as `JSON.stringify`.
* @summary Serialize a value to a string. For EJSON values, the serialization
* fully represents the value. For non-EJSON values, serializes the
* same way as `JSON.stringify`.
* @locus Anywhere
* @param {EJSON} val A value to stringify.
* @param {Object} [options]
* @param {Boolean | Integer | String} options.indent Indents objects and arrays for easy readability. When `true`, indents by 2 spaces; when an integer, indents by that number of spaces; and when a string, uses the string as the indentation pattern.
* @param {Boolean} options.canonical When `true`, stringifies keys in an object in sorted order.
* @param {Boolean | Integer | String} options.indent Indents objects and
* arrays for easy readability. When `true`, indents by 2 spaces; when an
* integer, indents by that number of spaces; and when a string, uses the
* string as the indentation pattern.
* @param {Boolean} options.canonical When `true`, stringifies keys in an
* object in sorted order.
*/
EJSON.stringify = function (item, options) {
var json = EJSON.toJSONValue(item);
EJSON.stringify = (item, options) => {
let serialized;
const json = EJSON.toJSONValue(item);
if (options && (options.canonical || options.indent)) {
return EJSON._canonicalStringify(json, options);
import canonicalStringify from './stringify';
serialized = canonicalStringify(json, options);
} else {
return JSON.stringify(json);
serialized = JSON.stringify(json);
}
return serialized;
};
/**
* @summary Parse a string into an EJSON value. Throws an error if the string is not valid EJSON.
* @summary Parse a string into an EJSON value. Throws an error if the string
* is not valid EJSON.
* @locus Anywhere
* @param {String} str A string to parse into an EJSON value.
*/
EJSON.parse = function (item) {
if (typeof item !== 'string')
throw new Error("EJSON.parse argument should be a string");
EJSON.parse = item => {
if (typeof item !== 'string') {
throw new Error('EJSON.parse argument should be a string');
}
return EJSON.fromJSONValue(JSON.parse(item));
};
/**
* @summary Returns true if `x` is a buffer of binary data, as returned from [`EJSON.newBinary`](#ejson_new_binary).
* @summary Returns true if `x` is a buffer of binary data, as returned from
* [`EJSON.newBinary`](#ejson_new_binary).
* @param {Object} x The variable to check.
* @locus Anywhere
*/
EJSON.isBinary = function (obj) {
EJSON.isBinary = obj => {
return !!((typeof Uint8Array !== 'undefined' && obj instanceof Uint8Array) ||
(obj && obj.$Uint8ArrayPolyfill));
};
/**
* @summary Return true if `a` and `b` are equal to each other. Return false otherwise. Uses the `equals` method on `a` if present, otherwise performs a deep comparison.
* @summary Return true if `a` and `b` are equal to each other. Return false
* otherwise. Uses the `equals` method on `a` if present, otherwise
* performs a deep comparison.
* @locus Anywhere
* @param {EJSON} a
* @param {EJSON} b
* @param {Object} [options]
* @param {Boolean} options.keyOrderSensitive Compare in key sensitive order, if supported by the JavaScript implementation. For example, `{a: 1, b: 2}` is equal to `{b: 2, a: 1}` only when `keyOrderSensitive` is `false`. The default is `false`.
* @param {Boolean} options.keyOrderSensitive Compare in key sensitive order,
* if supported by the JavaScript implementation. For example, `{a: 1, b: 2}`
* is equal to `{b: 2, a: 1}` only when `keyOrderSensitive` is `false`. The
* default is `false`.
*/
EJSON.equals = function (a, b, options) {
var i;
var keyOrderSensitive = !!(options && options.keyOrderSensitive);
if (a === b)
EJSON.equals = (a, b, options) => {
let i;
const keyOrderSensitive = !!(options && options.keyOrderSensitive);
if (a === b) {
return true;
if (_.isNaN(a) && _.isNaN(b))
return true; // This differs from the IEEE spec for NaN equality, b/c we don't want
// anything ever with a NaN to be poisoned from becoming equal to anything.
if (!a || !b) // if either one is falsy, they'd have to be === to be equal
}
// This differs from the IEEE spec for NaN equality, b/c we don't want
// anything ever with a NaN to be poisoned from becoming equal to anything.
if (Number.isNaN(a) && Number.isNaN(b)) {
return true;
}
// if either one is falsy, they'd have to be === to be equal
if (!a || !b) {
return false;
if (!(typeof a === 'object' && typeof b === 'object'))
}
if (!(typeof a === 'object' && typeof b === 'object')) {
return false;
if (a instanceof Date && b instanceof Date)
}
if (a instanceof Date && b instanceof Date) {
return a.valueOf() === b.valueOf();
}
if (EJSON.isBinary(a) && EJSON.isBinary(b)) {
if (a.length !== b.length)
if (a.length !== b.length) {
return false;
}
for (i = 0; i < a.length; i++) {
if (a[i] !== b[i])
if (a[i] !== b[i]) {
return false;
}
}
return true;
}
if (typeof (a.equals) === 'function')
if (typeof (a.equals) === 'function') {
return a.equals(b, options);
if (typeof (b.equals) === 'function')
}
if (typeof (b.equals) === 'function') {
return b.equals(a, options);
}
if (a instanceof Array) {
if (!(b instanceof Array))
if (!(b instanceof Array)) {
return false;
if (a.length !== b.length)
}
if (a.length !== b.length) {
return false;
}
for (i = 0; i < a.length; i++) {
if (!EJSON.equals(a[i], b[i], options))
if (!EJSON.equals(a[i], b[i], options)) {
return false;
}
}
return true;
}
// fallback for custom types that don't implement their own equals
switch (EJSON._isCustomType(a) + EJSON._isCustomType(b)) {
case 1: return false;
case 2: return EJSON.equals(EJSON.toJSONValue(a), EJSON.toJSONValue(b));
default: // Do nothing
}
// fall back to structural equality of objects
var ret;
let ret;
const aKeys = Object.keys(a);
const bKeys = Object.keys(b);
if (keyOrderSensitive) {
var bKeys = [];
_.each(b, function (val, x) {
bKeys.push(x);
});
i = 0;
ret = _.all(a, function (val, x) {
ret = aKeys.every(key => {
if (i >= bKeys.length) {
return false;
}
if (x !== bKeys[i]) {
if (key !== bKeys[i]) {
return false;
}
if (!EJSON.equals(val, b[bKeys[i]], options)) {
if (!EJSON.equals(a[key], b[bKeys[i]], options)) {
return false;
}
i++;
return true;
});
return ret && i === bKeys.length;
} else {
i = 0;
ret = _.all(a, function (val, key) {
if (!_.has(b, key)) {
ret = aKeys.every(key => {
if (!hasOwn(b, key)) {
return false;
}
if (!EJSON.equals(val, b[key], options)) {
if (!EJSON.equals(a[key], b[key], options)) {
return false;
}
i++;
return true;
});
return ret && _.size(b) === i;
}
return ret && i === bKeys.length;
};
/**
@@ -474,46 +543,56 @@ EJSON.equals = function (a, b, options) {
* @locus Anywhere
* @param {EJSON} val A value to copy.
*/
EJSON.clone = function (v) {
var ret;
if (typeof v !== "object")
EJSON.clone = v => {
let ret;
if (typeof v !== 'object') {
return v;
if (v === null)
}
if (v === null) {
return null; // null has typeof "object"
if (v instanceof Date)
}
if (v instanceof Date) {
return new Date(v.getTime());
}
// RegExps are not really EJSON elements (eg we don't define a serialization
// for them), but they're immutable anyway, so we can support them in clone.
if (v instanceof RegExp)
if (v instanceof RegExp) {
return v;
}
if (EJSON.isBinary(v)) {
ret = EJSON.newBinary(v.length);
for (var i = 0; i < v.length; i++) {
for (let i = 0; i < v.length; i++) {
ret[i] = v[i];
}
return ret;
}
// XXX: Use something better than underscore's isArray
if (_.isArray(v) || _.isArguments(v)) {
// For some reason, _.map doesn't work in this context on Opera (weird test
// failures).
ret = [];
for (i = 0; i < v.length; i++)
ret[i] = EJSON.clone(v[i]);
return ret;
if (Array.isArray(v)) {
return v.map(value => EJSON.clone(value));
}
if (isArguments(v)) {
return Array.from(v).map(value => EJSON.clone(value));
}
// handle general user-defined typed Objects if they have a clone method
if (typeof v.clone === 'function') {
return v.clone();
}
// handle other custom types
if (EJSON._isCustomType(v)) {
return EJSON.fromJSONValue(EJSON.clone(EJSON.toJSONValue(v)), true);
}
// handle other objects
ret = {};
_.each(v, function (value, key) {
ret[key] = EJSON.clone(value);
Object.keys(v).forEach((key) => {
ret[key] = EJSON.clone(v[key]);
});
return ret;
};
@@ -529,3 +608,5 @@ EJSON.clone = function (v) {
// then 'base64' would have to use EJSON.newBinary, and 'ejson' would
// also have to use 'base64'.)
EJSON.newBinary = Base64.newBinary;
export { EJSON };

View File

@@ -1,242 +0,0 @@
Tinytest.add("ejson - keyOrderSensitive", function (test) {
test.isTrue(EJSON.equals({
a: {b: 1, c: 2},
d: {e: 3, f: 4}
}, {
d: {f: 4, e: 3},
a: {c: 2, b: 1}
}));
test.isFalse(EJSON.equals({
a: {b: 1, c: 2},
d: {e: 3, f: 4}
}, {
d: {f: 4, e: 3},
a: {c: 2, b: 1}
}, {keyOrderSensitive: true}));
test.isFalse(EJSON.equals({
a: {b: 1, c: 2},
d: {e: 3, f: 4}
}, {
a: {c: 2, b: 1},
d: {f: 4, e: 3}
}, {keyOrderSensitive: true}));
test.isFalse(EJSON.equals({a: {}}, {a: {b:2}}, {keyOrderSensitive: true}));
test.isFalse(EJSON.equals({a: {b:2}}, {a: {}}, {keyOrderSensitive: true}));
});
Tinytest.add("ejson - nesting and literal", function (test) {
var d = new Date;
var obj = {$date: d};
var eObj = EJSON.toJSONValue(obj);
var roundTrip = EJSON.fromJSONValue(eObj);
test.equal(obj, roundTrip);
});
Tinytest.add("ejson - some equality tests", function (test) {
test.isTrue(EJSON.equals({a: 1, b: 2, c: 3}, {a: 1, c: 3, b: 2}));
test.isFalse(EJSON.equals({a: 1, b: 2}, {a: 1, c: 3, b: 2}));
test.isFalse(EJSON.equals({a: 1, b: 2, c: 3}, {a: 1, b: 2}));
test.isFalse(EJSON.equals({a: 1, b: 2, c: 3}, {a: 1, c: 3, b: 4}));
test.isFalse(EJSON.equals({a: {}}, {a: {b:2}}));
test.isFalse(EJSON.equals({a: {b:2}}, {a: {}}));
});
Tinytest.add("ejson - equality and falsiness", function (test) {
test.isTrue(EJSON.equals(null, null));
test.isTrue(EJSON.equals(undefined, undefined));
test.isFalse(EJSON.equals({foo: "foo"}, null));
test.isFalse(EJSON.equals(null, {foo: "foo"}));
test.isFalse(EJSON.equals(undefined, {foo: "foo"}));
test.isFalse(EJSON.equals({foo: "foo"}, undefined));
});
Tinytest.add("ejson - NaN and Inf", function (test) {
test.equal(EJSON.parse("{\"$InfNaN\": 1}"), Infinity);
test.equal(EJSON.parse("{\"$InfNaN\": -1}"), -Infinity);
test.isTrue(_.isNaN(EJSON.parse("{\"$InfNaN\": 0}")));
test.equal(EJSON.parse(EJSON.stringify(Infinity)), Infinity);
test.equal(EJSON.parse(EJSON.stringify(-Infinity)), -Infinity);
test.isTrue(_.isNaN(EJSON.parse(EJSON.stringify(NaN))));
test.isTrue(EJSON.equals(NaN, NaN));
test.isTrue(EJSON.equals(Infinity, Infinity));
test.isTrue(EJSON.equals(-Infinity, -Infinity));
test.isFalse(EJSON.equals(Infinity, -Infinity));
test.isFalse(EJSON.equals(Infinity, NaN));
test.isFalse(EJSON.equals(Infinity, 0));
test.isFalse(EJSON.equals(NaN, 0));
test.isTrue(EJSON.equals(
EJSON.parse("{\"a\": {\"$InfNaN\": 1}}"),
{a: Infinity}
));
test.isTrue(EJSON.equals(
EJSON.parse("{\"a\": {\"$InfNaN\": 0}}"),
{a: NaN}
));
});
Tinytest.add("ejson - clone", function (test) {
var cloneTest = function (x, identical) {
var y = EJSON.clone(x);
test.isTrue(EJSON.equals(x, y));
test.equal(x === y, !!identical);
};
cloneTest(null, true);
cloneTest(undefined, true);
cloneTest(42, true);
cloneTest("asdf", true);
cloneTest([1, 2, 3]);
cloneTest([1, "fasdf", {foo: 42}]);
cloneTest({x: 42, y: "asdf"});
var testCloneArgs = function (/*arguments*/) {
var clonedArgs = EJSON.clone(arguments);
test.equal(clonedArgs, [1, 2, "foo", [4]]);
};
testCloneArgs(1, 2, "foo", [4]);
});
Tinytest.add("ejson - stringify", function (test) {
test.equal(EJSON.stringify(null), "null");
test.equal(EJSON.stringify(true), "true");
test.equal(EJSON.stringify(false), "false");
test.equal(EJSON.stringify(123), "123");
test.equal(EJSON.stringify("abc"), "\"abc\"");
test.equal(EJSON.stringify([1, 2, 3]),
"[1,2,3]"
);
test.equal(EJSON.stringify([1, 2, 3], {indent: true}),
"[\n 1,\n 2,\n 3\n]"
);
test.equal(EJSON.stringify([1, 2, 3], {canonical: false}),
"[1,2,3]"
);
test.equal(EJSON.stringify([1, 2, 3], {indent: true, canonical: false}),
"[\n 1,\n 2,\n 3\n]"
);
test.equal(EJSON.stringify([1, 2, 3], {indent: 4}),
"[\n 1,\n 2,\n 3\n]"
);
test.equal(EJSON.stringify([1, 2, 3], {indent: '--'}),
"[\n--1,\n--2,\n--3\n]"
);
test.equal(
EJSON.stringify(
{b: [2, {d: 4, c: 3}], a: 1},
{canonical: true}
),
"{\"a\":1,\"b\":[2,{\"c\":3,\"d\":4}]}"
);
test.equal(
EJSON.stringify(
{b: [2, {d: 4, c: 3}], a: 1},
{
indent: true,
canonical: true
}
),
"{\n" +
" \"a\": 1,\n" +
" \"b\": [\n" +
" 2,\n" +
" {\n" +
" \"c\": 3,\n" +
" \"d\": 4\n" +
" }\n" +
" ]\n" +
"}"
);
test.equal(
EJSON.stringify(
{b: [2, {d: 4, c: 3}], a: 1},
{canonical: false}
),
"{\"b\":[2,{\"d\":4,\"c\":3}],\"a\":1}"
);
test.equal(
EJSON.stringify(
{b: [2, {d: 4, c: 3}], a: 1},
{indent: true, canonical: false}
),
"{\n" +
" \"b\": [\n" +
" 2,\n" +
" {\n" +
" \"d\": 4,\n" +
" \"c\": 3\n" +
" }\n" +
" ],\n" +
" \"a\": 1\n" +
"}"
);
});
Tinytest.add("ejson - parse", function (test) {
test.equal(EJSON.parse("[1,2,3]"), [1,2,3]);
test.throws(
function () { EJSON.parse(null) },
/argument should be a string/
);
});
Tinytest.add("ejson - regexp", function (test) {
test.equal(EJSON.stringify(/foo/gi), "{\"$regexp\":\"foo\",\"$flags\":\"gi\"}");
var d = new RegExp("foo", "gi");
var obj = { $regexp: "foo", $flags: "gi" };
var eObj = EJSON.toJSONValue(obj);
var roundTrip = EJSON.fromJSONValue(eObj);
test.equal(obj, roundTrip);
});
Tinytest.add("ejson - custom types", function (test) {
var testSameConstructors = function (obj, compareWith) {
test.equal(obj.constructor, compareWith.constructor);
if (typeof obj === 'object') {
_.each(obj, function(value, key) {
testSameConstructors(value, compareWith[key]);
});
}
}
var testReallyEqual = function (obj, compareWith) {
test.equal(obj, compareWith);
testSameConstructors(obj, compareWith);
}
var testRoundTrip = function (obj) {
var str = EJSON.stringify(obj);
var roundTrip = EJSON.parse(str);
testReallyEqual(obj, roundTrip);
}
var testCustomObject = function (obj) {
testRoundTrip(obj);
testReallyEqual(obj, EJSON.clone(obj));
}
var a = new EJSONTest.Address('Montreal', 'Quebec');
testCustomObject( {address: a} );
// Test that difference is detected even if they
// have similar toJSONValue results:
var nakedA = {city: 'Montreal', state: 'Quebec'};
test.notEqual(nakedA, a);
test.notEqual(a, nakedA);
var holder = new EJSONTest.Holder(nakedA);
test.equal(holder.toJSONValue(), a.toJSONValue()); // sanity check
test.notEqual(holder, a);
test.notEqual(a, holder);
var d = new Date;
var obj = new EJSONTest.Person("John Doe", d, a);
testCustomObject( obj );
// Test clone is deep:
var clone = EJSON.clone(obj);
clone.address.city = 'Sherbrooke';
test.notEqual( obj, clone );
});

View File

@@ -0,0 +1,278 @@
import { EJSON } from './ejson';
import EJSONTest from './custom_models_for_tests';
Tinytest.add('ejson - keyOrderSensitive', test => {
test.isTrue(EJSON.equals({
a: {b: 1, c: 2},
d: {e: 3, f: 4},
}, {
d: {f: 4, e: 3},
a: {c: 2, b: 1},
}));
test.isFalse(EJSON.equals({
a: {b: 1, c: 2},
d: {e: 3, f: 4},
}, {
d: {f: 4, e: 3},
a: {c: 2, b: 1},
}, {keyOrderSensitive: true}));
test.isFalse(EJSON.equals({
a: {b: 1, c: 2},
d: {e: 3, f: 4},
}, {
a: {c: 2, b: 1},
d: {f: 4, e: 3},
}, {keyOrderSensitive: true}));
test.isFalse(EJSON.equals({a: {}}, {a: {b: 2}}, {keyOrderSensitive: true}));
test.isFalse(EJSON.equals({a: {b: 2}}, {a: {}}, {keyOrderSensitive: true}));
});
Tinytest.add('ejson - nesting and literal', test => {
const d = new Date();
const obj = {$date: d};
const eObj = EJSON.toJSONValue(obj);
const roundTrip = EJSON.fromJSONValue(eObj);
test.equal(obj, roundTrip);
});
Tinytest.add('ejson - some equality tests', test => {
test.isTrue(EJSON.equals({a: 1, b: 2, c: 3}, {a: 1, c: 3, b: 2}));
test.isFalse(EJSON.equals({a: 1, b: 2}, {a: 1, c: 3, b: 2}));
test.isFalse(EJSON.equals({a: 1, b: 2, c: 3}, {a: 1, b: 2}));
test.isFalse(EJSON.equals({a: 1, b: 2, c: 3}, {a: 1, c: 3, b: 4}));
test.isFalse(EJSON.equals({a: {}}, {a: {b: 2}}));
test.isFalse(EJSON.equals({a: {b: 2}}, {a: {}}));
});
Tinytest.add('ejson - equality and falsiness', test => {
test.isTrue(EJSON.equals(null, null));
test.isTrue(EJSON.equals(undefined, undefined));
test.isFalse(EJSON.equals({foo: 'foo'}, null));
test.isFalse(EJSON.equals(null, {foo: 'foo'}));
test.isFalse(EJSON.equals(undefined, {foo: 'foo'}));
test.isFalse(EJSON.equals({foo: 'foo'}, undefined));
});
Tinytest.add('ejson - NaN and Inf', test => {
test.equal(EJSON.parse('{"$InfNaN": 1}'), Infinity);
test.equal(EJSON.parse('{"$InfNaN": -1}'), -Infinity);
test.isTrue(Number.isNaN(EJSON.parse('{"$InfNaN": 0}')));
test.equal(EJSON.parse(EJSON.stringify(Infinity)), Infinity);
test.equal(EJSON.parse(EJSON.stringify(-Infinity)), -Infinity);
test.isTrue(Number.isNaN(EJSON.parse(EJSON.stringify(NaN))));
test.isTrue(EJSON.equals(NaN, NaN));
test.isTrue(EJSON.equals(Infinity, Infinity));
test.isTrue(EJSON.equals(-Infinity, -Infinity));
test.isFalse(EJSON.equals(Infinity, -Infinity));
test.isFalse(EJSON.equals(Infinity, NaN));
test.isFalse(EJSON.equals(Infinity, 0));
test.isFalse(EJSON.equals(NaN, 0));
test.isTrue(EJSON.equals(
EJSON.parse('{"a": {"$InfNaN": 1}}'),
{a: Infinity}
));
test.isTrue(EJSON.equals(
EJSON.parse('{"a": {"$InfNaN": 0}}'),
{a: NaN}
));
});
Tinytest.add('ejson - clone', test => {
const cloneTest = (x, identical) => {
const y = EJSON.clone(x);
test.isTrue(EJSON.equals(x, y));
test.equal(x === y, !!identical);
};
cloneTest(null, true);
cloneTest(undefined, true);
cloneTest(42, true);
cloneTest('asdf', true);
cloneTest([1, 2, 3]);
cloneTest([1, 'fasdf', {foo: 42}]);
cloneTest({x: 42, y: 'asdf'});
function testCloneArgs(/*arguments*/) {
const clonedArgs = EJSON.clone(arguments);
test.equal(clonedArgs, [1, 2, 'foo', [4]]);
};
testCloneArgs(1, 2, 'foo', [4]);
});
Tinytest.add('ejson - stringify', test => {
test.equal(EJSON.stringify(null), 'null');
test.equal(EJSON.stringify(true), 'true');
test.equal(EJSON.stringify(false), 'false');
test.equal(EJSON.stringify(123), '123');
test.equal(EJSON.stringify('abc'), '"abc"');
test.equal(EJSON.stringify([1, 2, 3]),
'[1,2,3]'
);
test.equal(EJSON.stringify([1, 2, 3], {indent: true}),
'[\n 1,\n 2,\n 3\n]'
);
test.equal(EJSON.stringify([1, 2, 3], {canonical: false}),
'[1,2,3]'
);
test.equal(EJSON.stringify([1, 2, 3], {indent: true, canonical: false}),
'[\n 1,\n 2,\n 3\n]'
);
test.equal(EJSON.stringify([1, 2, 3], {indent: 4}),
'[\n 1,\n 2,\n 3\n]'
);
test.equal(EJSON.stringify([1, 2, 3], {indent: '--'}),
'[\n--1,\n--2,\n--3\n]'
);
test.equal(
EJSON.stringify(
{b: [2, {d: 4, c: 3}], a: 1},
{canonical: true}
),
'{"a":1,"b":[2,{"c":3,"d":4}]}'
);
test.equal(
EJSON.stringify(
{b: [2, {d: 4, c: 3}], a: 1},
{
indent: true,
canonical: true,
}
),
'{\n' +
' "a": 1,\n' +
' "b": [\n' +
' 2,\n' +
' {\n' +
' "c": 3,\n' +
' "d": 4\n' +
' }\n' +
' ]\n' +
'}'
);
test.equal(
EJSON.stringify(
{b: [2, {d: 4, c: 3}], a: 1},
{canonical: false}
),
'{"b":[2,{"d":4,"c":3}],"a":1}'
);
test.equal(
EJSON.stringify(
{b: [2, {d: 4, c: 3}], a: 1},
{indent: true, canonical: false}
),
'{\n' +
' "b": [\n' +
' 2,\n' +
' {\n' +
' "d": 4,\n' +
' "c": 3\n' +
' }\n' +
' ],\n' +
' "a": 1\n' +
'}'
);
});
Tinytest.add('ejson - parse', test => {
test.equal(EJSON.parse('[1,2,3]'), [1, 2, 3]);
test.throws(
() => { EJSON.parse(null); },
/argument should be a string/
);
});
Tinytest.add("ejson - regexp", test => {
test.equal(EJSON.stringify(/foo/gi), "{\"$regexp\":\"foo\",\"$flags\":\"gi\"}");
var d = new RegExp("foo", "gi");
var obj = { $regexp: "foo", $flags: "gi" };
var eObj = EJSON.toJSONValue(obj);
var roundTrip = EJSON.fromJSONValue(eObj);
test.equal(obj, roundTrip);
});
Tinytest.add('ejson - custom types', test => {
const testSameConstructors = (someObj, compareWith) => {
test.equal(someObj.constructor, compareWith.constructor);
if (typeof someObj === 'object') {
Object.keys(someObj).forEach(key => {
const value = someObj[key];
testSameConstructors(value, compareWith[key]);
});
}
};
const testReallyEqual = (someObj, compareWith) => {
test.equal(someObj, compareWith);
testSameConstructors(someObj, compareWith);
};
const testRoundTrip = (someObj) => {
const str = EJSON.stringify(someObj);
const roundTrip = EJSON.parse(str);
testReallyEqual(someObj, roundTrip);
};
const testCustomObject = (someObj) => {
testRoundTrip(someObj);
testReallyEqual(someObj, EJSON.clone(someObj));
};
const a = new EJSONTest.Address('Montreal', 'Quebec');
testCustomObject( {address: a} );
// Test that difference is detected even if they
// have similar toJSONValue results:
const nakedA = {city: 'Montreal', state: 'Quebec'};
test.notEqual(nakedA, a);
test.notEqual(a, nakedA);
const holder = new EJSONTest.Holder(nakedA);
test.equal(holder.toJSONValue(), a.toJSONValue()); // sanity check
test.notEqual(holder, a);
test.notEqual(a, holder);
const d = new Date();
const obj = new EJSONTest.Person('John Doe', d, a);
testCustomObject( obj );
// Test clone is deep:
const clone = EJSON.clone(obj);
clone.address.city = 'Sherbrooke';
test.notEqual( obj, clone );
});
// Verify objects with a property named "length" can be handled by the EJSON
// API properly (see https://github.com/meteor/meteor/issues/5175).
Tinytest.add('ejson - handle objects with properties named "length"', test => {
class Widget {
constructor() {
this.length = 10;
}
}
const widget = new Widget();
const toJsonWidget = EJSON.toJSONValue(widget);
test.equal(widget, toJsonWidget);
const fromJsonWidget = EJSON.fromJSONValue(widget);
test.equal(widget, fromJsonWidget);
const stringifiedWidget = EJSON.stringify(widget);
test.equal(stringifiedWidget, '{"length":10}');
const parsedWidget = EJSON.parse('{"length":10}');
test.equal({ length: 10 }, parsedWidget);
test.isFalse(EJSON.isBinary(widget));
const widget2 = new Widget();
test.isTrue(widget, widget2);
const clonedWidget = EJSON.clone(widget);
test.equal(widget, clonedWidget);
});

View File

@@ -1,20 +1,16 @@
Package.describe({
summary: "Extended and Extensible JSON library",
version: '1.0.13'
summary: 'Extended and Extensible JSON library',
version: '1.0.14-beta152.8'
});
Package.onUse(function (api) {
api.use(['underscore', 'base64']);
Package.onUse(function onUse(api) {
api.use(['ecmascript', 'base64']);
api.mainModule('ejson.js');
api.export('EJSON');
api.export('EJSONTest', {testOnly: true});
api.addFiles('ejson.js', ['client', 'server']);
api.addFiles('stringify.js', ['client', 'server']);
});
Package.onTest(function (api) {
api.use('ejson', ['client', 'server']);
api.use(['tinytest', 'underscore']);
api.addFiles('custom_models_for_tests.js', ['client', 'server']);
api.addFiles('ejson_test.js', ['client', 'server']);
Package.onTest(function onTest(api) {
api.use(['ecmascript', 'tinytest']);
api.use('ejson');
api.mainModule('ejson_tests.js');
});

View File

@@ -11,20 +11,10 @@ function quote(string) {
return JSON.stringify(string);
}
var str = function (key, holder, singleIndent, outerIndent, canonical) {
// Produce a string from holder[key].
var i; // The loop counter.
var k; // The member key.
var v; // The member value.
var length;
var innerIndent = outerIndent;
var partial;
var value = holder[key];
const str = (key, holder, singleIndent, outerIndent, canonical) => {
const value = holder[key];
// What happens next depends on the value's type.
switch (typeof value) {
case 'string':
return quote(value);
@@ -41,78 +31,91 @@ var str = function (key, holder, singleIndent, outerIndent, canonical) {
if (!value) {
return 'null';
}
// Make an array to hold the partial results of stringifying this object value.
innerIndent = outerIndent + singleIndent;
partial = [];
// Make an array to hold the partial results of stringifying this object
// value.
const innerIndent = outerIndent + singleIndent;
const partial = [];
// Is the value an array?
if (_.isArray(value) || _.isArguments(value)) {
// The value is an array. Stringify every element. Use null as a placeholder
// for non-JSON values.
length = value.length;
for (i = 0; i < length; i += 1) {
partial[i] = str(i, value, singleIndent, innerIndent, canonical) || 'null';
if (Array.isArray(value) || ({}).hasOwnProperty.call(value, 'callee')) {
// The value is an array. Stringify every element. Use null as a
// placeholder for non-JSON values.
const length = value.length;
for (let i = 0; i < length; i += 1) {
partial[i] =
str(i, value, singleIndent, innerIndent, canonical) || 'null';
}
// Join all of the elements together, separated with commas, and wrap them in
// brackets.
// Join all of the elements together, separated with commas, and wrap
// them in brackets.
let v;
if (partial.length === 0) {
v = '[]';
} else if (innerIndent) {
v = '[\n' + innerIndent + partial.join(',\n' + innerIndent) + '\n' + outerIndent + ']';
v = '[\n' +
innerIndent +
partial.join(',\n' +
innerIndent) +
'\n' +
outerIndent +
']';
} else {
v = '[' + partial.join(',') + ']';
}
return v;
}
// Iterate through all of the keys in the object.
var keys = _.keys(value);
if (canonical)
let keys = Object.keys(value);
if (canonical) {
keys = keys.sort();
_.each(keys, function (k) {
}
keys.forEach(k => {
v = str(k, value, singleIndent, innerIndent, canonical);
if (v) {
partial.push(quote(k) + (innerIndent ? ': ' : ':') + v);
}
});
// Join all of the member texts together, separated with commas,
// and wrap them in braces.
if (partial.length === 0) {
v = '{}';
} else if (innerIndent) {
v = '{\n' + innerIndent + partial.join(',\n' + innerIndent) + '\n' + outerIndent + '}';
v = '{\n' +
innerIndent +
partial.join(',\n' +
innerIndent) +
'\n' +
outerIndent +
'}';
} else {
v = '{' + partial.join(',') + '}';
}
return v;
default: // Do nothing
}
}
};
// If the JSON object does not yet have a stringify method, give it one.
EJSON._canonicalStringify = function (value, options) {
const canonicalStringify = (value, options) => {
// Make a fake root object containing our value under the key of ''.
// Return the result of stringifying the value.
options = _.extend({
indent: "",
canonical: false
const allOptions = Object.assign({
indent: '',
canonical: false,
}, options);
if (options.indent === true) {
options.indent = " ";
} else if (typeof options.indent === 'number') {
var newIndent = "";
for (var i = 0; i < options.indent; i++) {
if (allOptions.indent === true) {
allOptions.indent = ' ';
} else if (typeof allOptions.indent === 'number') {
let newIndent = '';
for (let i = 0; i < allOptions.indent; i++) {
newIndent += ' ';
}
options.indent = newIndent;
allOptions.indent = newIndent;
}
return str('', {'': value}, options.indent, "", options.canonical);
return str('', {'': value}, allOptions.indent, '', allOptions.canonical);
};
export default canonicalStringify;

View File

@@ -7,32 +7,32 @@ Meteor.EnvironmentVariable = function () {
this.slot = nextSlot++;
};
_.extend(Meteor.EnvironmentVariable.prototype, {
get: function () {
return currentValues[this.slot];
},
var EVp = Meteor.EnvironmentVariable.prototype;
getOrNullIfOutsideFiber: function () {
return this.get();
},
EVp.get = function () {
return currentValues[this.slot];
};
withValue: function (value, func) {
var saved = currentValues[this.slot];
try {
currentValues[this.slot] = value;
var ret = func();
} finally {
currentValues[this.slot] = saved;
}
return ret;
EVp.getOrNullIfOutsideFiber = function () {
return this.get();
};
EVp.withValue = function (value, func) {
var saved = currentValues[this.slot];
try {
currentValues[this.slot] = value;
var ret = func();
} finally {
currentValues[this.slot] = saved;
}
});
return ret;
};
Meteor.bindEnvironment = function (func, onException, _this) {
// needed in order to be able to create closures inside func and
// have the closed variables not change back to their original
// values
var boundValues = _.clone(currentValues);
var boundValues = currentValues.slice();
if (!onException || typeof(onException) === 'string') {
var description = onException || "callback of async function";
@@ -48,7 +48,7 @@ Meteor.bindEnvironment = function (func, onException, _this) {
var savedValues = currentValues;
try {
currentValues = boundValues;
var ret = func.apply(_this, _.toArray(arguments));
var ret = func.apply(_this, arguments);
} catch (e) {
// note: callback-hook currently relies on the fact that if onException
// throws in the browser, the wrapped call throws.

View File

@@ -16,51 +16,51 @@ Meteor.EnvironmentVariable = function () {
this.slot = nextSlot++;
};
_.extend(Meteor.EnvironmentVariable.prototype, {
get: function () {
Meteor._nodeCodeMustBeInFiber();
var EVp = Meteor.EnvironmentVariable.prototype;
return Fiber.current._meteor_dynamics &&
Fiber.current._meteor_dynamics[this.slot];
},
EVp.get = function () {
Meteor._nodeCodeMustBeInFiber();
// Most Meteor code ought to run inside a fiber, and the
// _nodeCodeMustBeInFiber assertion helps you remember to include appropriate
// bindEnvironment calls (which will get you the *right value* for your
// environment variables, on the server).
//
// In some very special cases, it's more important to run Meteor code on the
// server in non-Fiber contexts rather than to strongly enforce the safeguard
// against forgetting to use bindEnvironment. For example, using `check` in
// some top-level constructs like connect handlers without needing unnecessary
// Fibers on every request is more important that possibly failing to find the
// correct argumentChecker. So this function is just like get(), but it
// returns null rather than throwing when called from outside a Fiber. (On the
// client, it is identical to get().)
getOrNullIfOutsideFiber: function () {
if (!Fiber.current)
return null;
return this.get();
},
return Fiber.current._meteor_dynamics &&
Fiber.current._meteor_dynamics[this.slot];
};
withValue: function (value, func) {
Meteor._nodeCodeMustBeInFiber();
// Most Meteor code ought to run inside a fiber, and the
// _nodeCodeMustBeInFiber assertion helps you remember to include appropriate
// bindEnvironment calls (which will get you the *right value* for your
// environment variables, on the server).
//
// In some very special cases, it's more important to run Meteor code on the
// server in non-Fiber contexts rather than to strongly enforce the safeguard
// against forgetting to use bindEnvironment. For example, using `check` in
// some top-level constructs like connect handlers without needing unnecessary
// Fibers on every request is more important that possibly failing to find the
// correct argumentChecker. So this function is just like get(), but it
// returns null rather than throwing when called from outside a Fiber. (On the
// client, it is identical to get().)
EVp.getOrNullIfOutsideFiber = function () {
if (!Fiber.current)
return null;
return this.get();
};
if (!Fiber.current._meteor_dynamics)
Fiber.current._meteor_dynamics = [];
var currentValues = Fiber.current._meteor_dynamics;
EVp.withValue = function (value, func) {
Meteor._nodeCodeMustBeInFiber();
var saved = currentValues[this.slot];
try {
currentValues[this.slot] = value;
var ret = func();
} finally {
currentValues[this.slot] = saved;
}
if (!Fiber.current._meteor_dynamics)
Fiber.current._meteor_dynamics = [];
var currentValues = Fiber.current._meteor_dynamics;
return ret;
var saved = currentValues[this.slot];
try {
currentValues[this.slot] = value;
var ret = func();
} finally {
currentValues[this.slot] = saved;
}
});
return ret;
};
// Meteor application code is always supposed to be run inside a
// fiber. bindEnvironment ensures that the function it wraps is run from
@@ -84,7 +84,8 @@ _.extend(Meteor.EnvironmentVariable.prototype, {
Meteor.bindEnvironment = function (func, onException, _this) {
Meteor._nodeCodeMustBeInFiber();
var boundValues = _.clone(Fiber.current._meteor_dynamics || []);
var dynamics = Fiber.current._meteor_dynamics;
var boundValues = dynamics ? dynamics.slice() : [];
if (!onException || typeof(onException) === 'string') {
var description = onException || "callback of async function";
@@ -99,14 +100,14 @@ Meteor.bindEnvironment = function (func, onException, _this) {
}
return function (/* arguments */) {
var args = _.toArray(arguments);
var args = Array.prototype.slice.call(arguments);
var runWithEnvironment = function () {
var savedValues = Fiber.current._meteor_dynamics;
try {
// Need to clone boundValues in case two fibers invoke this
// function at the same time
Fiber.current._meteor_dynamics = _.clone(boundValues);
Fiber.current._meteor_dynamics = boundValues.slice();
var ret = func.apply(_this, args);
} catch (e) {
// note: callback-hook currently relies on the fact that if onException

View File

@@ -54,121 +54,123 @@ Meteor._SynchronousQueue = function () {
self._draining = false;
};
_.extend(Meteor._SynchronousQueue.prototype, {
runTask: function (task) {
var self = this;
var SQp = Meteor._SynchronousQueue.prototype;
if (!self.safeToRunTask()) {
if (Fiber.current)
throw new Error("Can't runTask from another task in the same fiber");
else
throw new Error("Can only call runTask in a Fiber");
}
SQp.runTask = function (task) {
var self = this;
var fut = new Future;
var handle = {
task: Meteor.bindEnvironment(task, function (e) {
Meteor._debug("Exception from task:", e && e.stack || e);
throw e;
}),
future: fut,
name: task.name
};
self._taskHandles.push(handle);
self._scheduleRun();
// Yield. We'll get back here after the task is run (and will throw if the
// task throws).
fut.wait();
},
queueTask: function (task) {
var self = this;
self._taskHandles.push({
task: task,
name: task.name
});
self._scheduleRun();
// No need to block.
},
if (!self.safeToRunTask()) {
if (Fiber.current)
throw new Error("Can't runTask from another task in the same fiber");
else
throw new Error("Can only call runTask in a Fiber");
}
flush: function () {
var self = this;
self.runTask(function () {});
},
var fut = new Future;
var handle = {
task: Meteor.bindEnvironment(task, function (e) {
Meteor._debug("Exception from task:", e && e.stack || e);
throw e;
}),
future: fut,
name: task.name
};
self._taskHandles.push(handle);
self._scheduleRun();
// Yield. We'll get back here after the task is run (and will throw if the
// task throws).
fut.wait();
};
safeToRunTask: function () {
var self = this;
return Fiber.current && self._currentTaskFiber !== Fiber.current;
},
SQp.queueTask = function (task) {
var self = this;
self._taskHandles.push({
task: task,
name: task.name
});
self._scheduleRun();
// No need to block.
};
drain: function () {
var self = this;
if (self._draining)
return;
if (!self.safeToRunTask())
return;
self._draining = true;
while (! self._taskHandles.isEmpty()) {
self.flush();
}
self._draining = false;
},
SQp.flush = function () {
var self = this;
self.runTask(function () {});
};
_scheduleRun: function () {
var self = this;
// Already running or scheduled? Do nothing.
if (self._runningOrRunScheduled)
return;
SQp.safeToRunTask = function () {
var self = this;
return Fiber.current && self._currentTaskFiber !== Fiber.current;
};
self._runningOrRunScheduled = true;
setImmediate(function () {
Fiber(function () {
self._run();
}).run();
});
},
_run: function () {
var self = this;
SQp.drain = function () {
var self = this;
if (self._draining)
return;
if (!self.safeToRunTask())
return;
self._draining = true;
while (! self._taskHandles.isEmpty()) {
self.flush();
}
self._draining = false;
};
if (!self._runningOrRunScheduled)
throw new Error("expected to be _runningOrRunScheduled");
SQp._scheduleRun = function () {
var self = this;
// Already running or scheduled? Do nothing.
if (self._runningOrRunScheduled)
return;
if (self._taskHandles.isEmpty()) {
// Done running tasks! Don't immediately schedule another run, but
// allow future tasks to do so.
self._runningOrRunScheduled = false;
return;
}
var taskHandle = self._taskHandles.shift();
self._runningOrRunScheduled = true;
setImmediate(function () {
Fiber(function () {
self._run();
}).run();
});
};
// Run the task.
self._currentTaskFiber = Fiber.current;
var exception = undefined;
try {
taskHandle.task();
} catch (err) {
if (taskHandle.future) {
// We'll throw this exception through runTask.
exception = err;
} else {
Meteor._debug("Exception in queued task: " + (err.stack || err));
}
}
self._currentTaskFiber = undefined;
SQp._run = function () {
var self = this;
// Soon, run the next task, if there is any.
if (!self._runningOrRunScheduled)
throw new Error("expected to be _runningOrRunScheduled");
if (self._taskHandles.isEmpty()) {
// Done running tasks! Don't immediately schedule another run, but
// allow future tasks to do so.
self._runningOrRunScheduled = false;
self._scheduleRun();
return;
}
var taskHandle = self._taskHandles.shift();
// If this was queued with runTask, let the runTask call return (throwing if
// the task threw).
// Run the task.
self._currentTaskFiber = Fiber.current;
var exception = undefined;
try {
taskHandle.task();
} catch (err) {
if (taskHandle.future) {
if (exception)
taskHandle.future['throw'](exception);
else
taskHandle.future['return']();
// We'll throw this exception through runTask.
exception = err;
} else {
Meteor._debug("Exception in queued task: " + (err.stack || err));
}
}
});
self._currentTaskFiber = undefined;
// Soon, run the next task, if there is any.
self._runningOrRunScheduled = false;
self._scheduleRun();
// If this was queued with runTask, let the runTask call return (throwing if
// the task threw).
if (taskHandle.future) {
if (exception)
taskHandle.future['throw'](exception);
else
taskHandle.future['return']();
}
};
// Sleep. Mostly used for debugging (eg, inserting latency into server
// methods).

View File

@@ -9,7 +9,11 @@ Tinytest.add("fibers - synchronous queue", function (test) {
};
};
var outputIsUpTo = function (n) {
test.equal(output, _.range(1, n+1));
var range = [];
for (var i = 1; i <= n; ++i) {
range.push(i);
}
test.equal(output, range);
};
// Queue a task. It cannot run until we yield.

View File

@@ -17,70 +17,72 @@ Meteor._SynchronousQueue = function () {
self._runTimeout = null;
};
_.extend(Meteor._SynchronousQueue.prototype, {
runTask: function (task) {
var self = this;
if (!self.safeToRunTask())
throw new Error("Could not synchronously run a task from a running task");
self._tasks.push(task);
var tasks = self._tasks;
self._tasks = [];
self._running = true;
var SQp = Meteor._SynchronousQueue.prototype;
if (self._runTimeout) {
// Since we're going to drain the queue, we can forget about the timeout
// which tries to run it. (But if one of our tasks queues something else,
// the timeout will be correctly re-created.)
clearTimeout(self._runTimeout);
self._runTimeout = null;
}
SQp.runTask = function (task) {
var self = this;
if (!self.safeToRunTask())
throw new Error("Could not synchronously run a task from a running task");
self._tasks.push(task);
var tasks = self._tasks;
self._tasks = [];
self._running = true;
try {
while (!_.isEmpty(tasks)) {
var t = tasks.shift();
try {
t();
} catch (e) {
if (_.isEmpty(tasks)) {
// this was the last task, that is, the one we're calling runTask
// for.
throw e;
} else {
Meteor._debug("Exception in queued task: " + (e.stack || e));
}
}
}
} finally {
self._running = false;
}
},
queueTask: function (task) {
var self = this;
self._tasks.push(task);
// Intentionally not using Meteor.setTimeout, because it doesn't like runing
// in stubs for now.
if (!self._runTimeout) {
self._runTimeout = setTimeout(_.bind(self.flush, self), 0);
}
},
flush: function () {
var self = this;
self.runTask(function () {});
},
drain: function () {
var self = this;
if (!self.safeToRunTask())
return;
while (!_.isEmpty(self._tasks)) {
self.flush();
}
},
safeToRunTask: function () {
var self = this;
return !self._running;
if (self._runTimeout) {
// Since we're going to drain the queue, we can forget about the timeout
// which tries to run it. (But if one of our tasks queues something else,
// the timeout will be correctly re-created.)
clearTimeout(self._runTimeout);
self._runTimeout = null;
}
});
try {
while (tasks.length > 0) {
var t = tasks.shift();
try {
t();
} catch (e) {
if (tasks.length === 0) {
// this was the last task, that is, the one we're calling runTask
// for.
throw e;
}
Meteor._debug("Exception in queued task: " + (e.stack || e));
}
}
} finally {
self._running = false;
}
};
SQp.queueTask = function (task) {
var self = this;
self._tasks.push(task);
// Intentionally not using Meteor.setTimeout, because it doesn't like runing
// in stubs for now.
if (!self._runTimeout) {
self._runTimeout = setTimeout(function () {
return self.flush.apply(self, arguments);
}, 0);
}
};
SQp.flush = function () {
var self = this;
self.runTask(function () {});
};
SQp.drain = function () {
var self = this;
if (!self.safeToRunTask()) {
return;
}
while (self._tasks.length > 0) {
self.flush();
}
};
SQp.safeToRunTask = function () {
var self = this;
return !self._running;
};

View File

@@ -14,136 +14,136 @@ if (typeof __meteor_runtime_config__ === 'object' &&
// XXX find a better home for these? Ideally they would be _.get,
// _.ensure, _.delete..
_.extend(Meteor, {
// _get(a,b,c,d) returns a[b][c][d], or else undefined if a[b] or
// a[b][c] doesn't exist.
//
_get: function (obj /*, arguments */) {
for (var i = 1; i < arguments.length; i++) {
if (!(arguments[i] in obj))
return undefined;
obj = obj[arguments[i]];
}
return obj;
},
// _ensure(a,b,c,d) ensures that a[b][c][d] exists. If it does not,
// it is created and set to {}. Either way, it is returned.
//
_ensure: function (obj /*, arguments */) {
for (var i = 1; i < arguments.length; i++) {
var key = arguments[i];
if (!(key in obj))
obj[key] = {};
obj = obj[key];
}
return obj;
},
// _delete(a, b, c, d) deletes a[b][c][d], then a[b][c] unless it
// isn't empty, then a[b] unless it isn't empty.
//
_delete: function (obj /*, arguments */) {
var stack = [obj];
var leaf = true;
for (var i = 1; i < arguments.length - 1; i++) {
var key = arguments[i];
if (!(key in obj)) {
leaf = false;
break;
}
obj = obj[key];
if (typeof obj !== "object")
break;
stack.push(obj);
}
for (var i = stack.length - 1; i >= 0; i--) {
var key = arguments[i+1];
if (leaf)
leaf = false;
else
for (var other in stack[i][key])
return; // not empty -- we're done
delete stack[i][key];
}
},
// wrapAsync can wrap any function that takes some number of arguments that
// can't be undefined, followed by some optional arguments, where the callback
// is the last optional argument.
// e.g. fs.readFile(pathname, [callback]),
// fs.open(pathname, flags, [mode], [callback])
// For maximum effectiveness and least confusion, wrapAsync should be used on
// functions where the callback is the only argument of type Function.
/**
* @memberOf Meteor
* @summary Wrap a function that takes a callback function as its final parameter. The signature of the callback of the wrapped function should be `function(error, result){}`. On the server, the wrapped function can be used either synchronously (without passing a callback) or asynchronously (when a callback is passed). On the client, a callback is always required; errors will be logged if there is no callback. If a callback is provided, the environment captured when the original function was called will be restored in the callback.
* @locus Anywhere
* @param {Function} func A function that takes a callback as its final parameter
* @param {Object} [context] Optional `this` object against which the original function will be invoked
*/
wrapAsync: function (fn, context) {
return function (/* arguments */) {
var self = context || this;
var newArgs = _.toArray(arguments);
var callback;
for (var i = newArgs.length - 1; i >= 0; --i) {
var arg = newArgs[i];
var type = typeof arg;
if (type !== "undefined") {
if (type === "function") {
callback = arg;
}
break;
}
}
if (! callback) {
if (Meteor.isClient) {
callback = logErr;
} else {
var fut = new Future();
callback = fut.resolver();
}
++i; // Insert the callback just after arg.
}
newArgs[i] = Meteor.bindEnvironment(callback);
var result = fn.apply(self, newArgs);
return fut ? fut.wait() : result;
};
},
// Sets child's prototype to a new object whose prototype is parent's
// prototype. Used as:
// Meteor._inherits(ClassB, ClassA).
// _.extend(ClassB.prototype, { ... })
// Inspired by CoffeeScript's `extend` and Google Closure's `goog.inherits`.
_inherits: function (Child, Parent) {
// copy Parent static properties
for (var key in Parent) {
// make sure we only copy hasOwnProperty properties vs. prototype
// properties
if (_.has(Parent, key))
Child[key] = Parent[key];
}
// a middle member of prototype chain: takes the prototype from the Parent
var Middle = function () {
this.constructor = Child;
};
Middle.prototype = Parent.prototype;
Child.prototype = new Middle();
Child.__super__ = Parent.prototype;
return Child;
// _get(a,b,c,d) returns a[b][c][d], or else undefined if a[b] or
// a[b][c] doesn't exist.
//
Meteor._get = function (obj /*, arguments */) {
for (var i = 1; i < arguments.length; i++) {
if (!(arguments[i] in obj))
return undefined;
obj = obj[arguments[i]];
}
});
return obj;
};
// _ensure(a,b,c,d) ensures that a[b][c][d] exists. If it does not,
// it is created and set to {}. Either way, it is returned.
//
Meteor._ensure = function (obj /*, arguments */) {
for (var i = 1; i < arguments.length; i++) {
var key = arguments[i];
if (!(key in obj))
obj[key] = {};
obj = obj[key];
}
return obj;
};
// _delete(a, b, c, d) deletes a[b][c][d], then a[b][c] unless it
// isn't empty, then a[b] unless it isn't empty.
//
Meteor._delete = function (obj /*, arguments */) {
var stack = [obj];
var leaf = true;
for (var i = 1; i < arguments.length - 1; i++) {
var key = arguments[i];
if (!(key in obj)) {
leaf = false;
break;
}
obj = obj[key];
if (typeof obj !== "object")
break;
stack.push(obj);
}
for (var i = stack.length - 1; i >= 0; i--) {
var key = arguments[i+1];
if (leaf)
leaf = false;
else
for (var other in stack[i][key])
return; // not empty -- we're done
delete stack[i][key];
}
};
// wrapAsync can wrap any function that takes some number of arguments that
// can't be undefined, followed by some optional arguments, where the callback
// is the last optional argument.
// e.g. fs.readFile(pathname, [callback]),
// fs.open(pathname, flags, [mode], [callback])
// For maximum effectiveness and least confusion, wrapAsync should be used on
// functions where the callback is the only argument of type Function.
/**
* @memberOf Meteor
* @summary Wrap a function that takes a callback function as its final parameter. The signature of the callback of the wrapped function should be `function(error, result){}`. On the server, the wrapped function can be used either synchronously (without passing a callback) or asynchronously (when a callback is passed). On the client, a callback is always required; errors will be logged if there is no callback. If a callback is provided, the environment captured when the original function was called will be restored in the callback.
* @locus Anywhere
* @param {Function} func A function that takes a callback as its final parameter
* @param {Object} [context] Optional `this` object against which the original function will be invoked
*/
Meteor.wrapAsync = function (fn, context) {
return function (/* arguments */) {
var self = context || this;
var newArgs = Array.prototype.slice.call(arguments);
var callback;
for (var i = newArgs.length - 1; i >= 0; --i) {
var arg = newArgs[i];
var type = typeof arg;
if (type !== "undefined") {
if (type === "function") {
callback = arg;
}
break;
}
}
if (! callback) {
if (Meteor.isClient) {
callback = logErr;
} else {
var fut = new Future();
callback = fut.resolver();
}
++i; // Insert the callback just after arg.
}
newArgs[i] = Meteor.bindEnvironment(callback);
var result = fn.apply(self, newArgs);
return fut ? fut.wait() : result;
};
};
// Sets child's prototype to a new object whose prototype is parent's
// prototype. Used as:
// Meteor._inherits(ClassB, ClassA).
// _.extend(ClassB.prototype, { ... })
// Inspired by CoffeeScript's `extend` and Google Closure's `goog.inherits`.
var hasOwn = Object.prototype.hasOwnProperty;
Meteor._inherits = function (Child, Parent) {
// copy Parent static properties
for (var key in Parent) {
// make sure we only copy hasOwnProperty properties vs. prototype
// properties
if (hasOwn.call(Parent, key)) {
Child[key] = Parent[key];
}
}
// a middle member of prototype chain: takes the prototype from the Parent
var Middle = function () {
this.constructor = Child;
};
Middle.prototype = Parent.prototype;
Child.prototype = new Middle();
Child.__super__ = Parent.prototype;
return Child;
};
var warnedAboutWrapAsync = false;

View File

@@ -15,8 +15,6 @@ Npm.depends({
});
Package.onUse(function (api) {
api.use('underscore', ['client', 'server']);
api.use('isobuild:compiler-plugin@1.0.0');
api.export('Meteor');

View File

@@ -1,77 +1,86 @@
var withoutInvocation = function (f) {
function withoutInvocation(f) {
if (Package.ddp) {
var _CurrentMethodInvocation = Package.ddp.DDP._CurrentMethodInvocation;
if (_CurrentMethodInvocation.get() && _CurrentMethodInvocation.get().isSimulation)
var DDP = Package.ddp.DDP;
var CurrentInvocation =
DDP._CurrentMethodInvocation ||
// For backwards compatibility, as explained in this issue:
// https://github.com/meteor/meteor/issues/8947
DDP._CurrentInvocation;
var invocation = CurrentInvocation.get();
if (invocation && invocation.isSimulation) {
throw new Error("Can't set timers inside simulations");
return function () { _CurrentMethodInvocation.withValue(null, f); };
}
else
}
return function () {
CurrentInvocation.withValue(null, f);
};
} else {
return f;
};
var bindAndCatch = function (context, f) {
return Meteor.bindEnvironment(withoutInvocation(f), context);
};
_.extend(Meteor, {
// Meteor.setTimeout and Meteor.setInterval callbacks scheduled
// inside a server method are not part of the method invocation and
// should clear out the CurrentMethodInvocation environment variable.
/**
* @memberOf Meteor
* @summary Call a function in the future after waiting for a specified delay.
* @locus Anywhere
* @param {Function} func The function to run
* @param {Number} delay Number of milliseconds to wait before calling function
*/
setTimeout: function (f, duration) {
return setTimeout(bindAndCatch("setTimeout callback", f), duration);
},
/**
* @memberOf Meteor
* @summary Call a function repeatedly, with a time delay between calls.
* @locus Anywhere
* @param {Function} func The function to run
* @param {Number} delay Number of milliseconds to wait between each function call.
*/
setInterval: function (f, duration) {
return setInterval(bindAndCatch("setInterval callback", f), duration);
},
/**
* @memberOf Meteor
* @summary Cancel a repeating function call scheduled by `Meteor.setInterval`.
* @locus Anywhere
* @param {Object} id The handle returned by `Meteor.setInterval`
*/
clearInterval: function(x) {
return clearInterval(x);
},
/**
* @memberOf Meteor
* @summary Cancel a function call scheduled by `Meteor.setTimeout`.
* @locus Anywhere
* @param {Object} id The handle returned by `Meteor.setTimeout`
*/
clearTimeout: function(x) {
return clearTimeout(x);
},
// XXX consider making this guarantee ordering of defer'd callbacks, like
// Tracker.afterFlush or Node's nextTick (in practice). Then tests can do:
// callSomethingThatDefersSomeWork();
// Meteor.defer(expect(somethingThatValidatesThatTheWorkHappened));
/**
* @memberOf Meteor
* @summary Defer execution of a function to run asynchronously in the background (similar to `Meteor.setTimeout(func, 0)`.
* @locus Anywhere
* @param {Function} func The function to run
*/
defer: function (f) {
Meteor._setImmediate(bindAndCatch("defer callback", f));
}
});
}
function bindAndCatch(context, f) {
return Meteor.bindEnvironment(withoutInvocation(f), context);
}
// Meteor.setTimeout and Meteor.setInterval callbacks scheduled
// inside a server method are not part of the method invocation and
// should clear out the CurrentMethodInvocation environment variable.
/**
* @memberOf Meteor
* @summary Call a function in the future after waiting for a specified delay.
* @locus Anywhere
* @param {Function} func The function to run
* @param {Number} delay Number of milliseconds to wait before calling function
*/
Meteor.setTimeout = function (f, duration) {
return setTimeout(bindAndCatch("setTimeout callback", f), duration);
};
/**
* @memberOf Meteor
* @summary Call a function repeatedly, with a time delay between calls.
* @locus Anywhere
* @param {Function} func The function to run
* @param {Number} delay Number of milliseconds to wait between each function call.
*/
Meteor.setInterval = function (f, duration) {
return setInterval(bindAndCatch("setInterval callback", f), duration);
};
/**
* @memberOf Meteor
* @summary Cancel a repeating function call scheduled by `Meteor.setInterval`.
* @locus Anywhere
* @param {Object} id The handle returned by `Meteor.setInterval`
*/
Meteor.clearInterval = function(x) {
return clearInterval(x);
};
/**
* @memberOf Meteor
* @summary Cancel a function call scheduled by `Meteor.setTimeout`.
* @locus Anywhere
* @param {Object} id The handle returned by `Meteor.setTimeout`
*/
Meteor.clearTimeout = function(x) {
return clearTimeout(x);
};
// XXX consider making this guarantee ordering of defer'd callbacks, like
// Tracker.afterFlush or Node's nextTick (in practice). Then tests can do:
// callSomethingThatDefersSomeWork();
// Meteor.defer(expect(somethingThatValidatesThatTheWorkHappened));
/**
* @memberOf Meteor
* @summary Defer execution of a function to run asynchronously in the background (similar to `Meteor.setTimeout(func, 0)`.
* @locus Anywhere
* @param {Function} func The function to run
*/
Meteor.defer = function (f) {
Meteor._setImmediate(bindAndCatch("defer callback", f));
};

View File

@@ -14,7 +14,7 @@ Meteor.absoluteUrl = function (path, options) {
path = undefined;
}
// merge options with defaults
options = _.extend({}, Meteor.absoluteUrl.defaultOptions, options || {});
options = Object.assign({}, Meteor.absoluteUrl.defaultOptions, options || {});
var url = options.rootUrl;
if (!url)

View File

@@ -1,6 +1,5 @@
Tinytest.add("absolute-url - basics", function(test) {
_.each(['', 'http://'], function (prefix) {
['', 'http://'].forEach(function (prefix) {
test.equal(Meteor.absoluteUrl({rootUrl: prefix + 'asdf.com'}),
'http://asdf.com/');

View File

@@ -161,6 +161,14 @@ Tinytest.add("minimongo - basics", function (test) {
test.equal(c.find({foo: {bam: 'baz'}}).count(), 0);
test.equal(c.find({foo: {bar: 'baz'}}).count(), 1);
// Regression test for #5301
c.remove({});
c.insert({ a: 'a', b: 'b' });
const noop = () => null;
test.equal(c.find({ a: noop }).count(), 1);
test.equal(c.find({ a: 'a', b: noop }).count(), 1);
test.equal(c.find({ c: noop }).count(), 1);
test.equal(c.find({ a: noop, c: 'c' }).count(), 0);
});
Tinytest.add("minimongo - error - no options", function (test) {

View File

@@ -142,10 +142,15 @@ var compileDocumentSelector = function (docSelector, matcher, options) {
var lookUpByIndex = makeLookupFunction(key);
var valueMatcher =
compileValueSelector(subSelector, matcher, options.isRoot);
docMatchers.push(function (doc) {
var branchValues = lookUpByIndex(doc);
return valueMatcher(branchValues);
});
// Don't add a matcher if subSelector is a function -- this is to match
// the behavior of Meteor on the server (inherited from the node mongodb
// driver), which is to ignore any part of a selector which is a function.
if (typeof subSelector !== 'function') {
docMatchers.push(function (doc) {
var branchValues = lookUpByIndex(doc);
return valueMatcher(branchValues);
});
}
}
});

View File

@@ -1,18 +0,0 @@
# mongo-dev-server
[Source code of released version](https://github.com/meteor/meteor/tree/master/packages/mongo-dev-server) | [Source code of development version](https://github.com/meteor/meteor/tree/devel/packages/mongo-dev-server)
***
When the `mongo-dev-server` package is included in a Meteor application, a
local development MongoDB server is started alongside the application. This
package is mostly used internally, as it is included by default with any
application that has a dependency on `mongo` (which is most Meteor
applications). In some cases however, people might be interested in
using the Meteor Tool without having to start a local development Mongo
instance (e.g. when using Meteor as a build system). If an application has no
dependency on `mongo`, the `mongo-dev-server` package will be removed
(since it is a direct dependency of the `mongo` package), and no local
development Mongo server will be started.
Note this is a `debugOnly` package, meaning it will not be included in any
production bundles.

View File

@@ -1,12 +0,0 @@
Package.describe({
debugOnly: true,
documentation: 'README.md',
name: 'mongo-dev-server',
summary: 'Start MongoDB alongside Meteor, in development mode.',
version: '1.1.0-beta.15',
});
Package.onUse(function (api) {
api.use('modules');
api.mainModule('server.js', 'server');
});

View File

@@ -1,3 +0,0 @@
if (process.env.MONGO_URL === 'no-mongo-server') {
Meteor._debug('Note: Restart Meteor to start the MongoDB server.');
}

View File

@@ -34,8 +34,7 @@ Package.onUse(function (api) {
'diff-sequence',
'mongo-id',
'check',
'ecmascript',
'mongo-dev-server',
'ecmascript'
]);
// Binary Heap data structure is used to optimize oplog observe driver

View File

@@ -35,13 +35,13 @@ this package must be used in conjunction with the `--production` flag to the
`meteor` tool to simulate production bundling and enable minification.
> **IMPORTANT:** Since this package is active in production mode, it is critical
> to remove this package prior to bundling or deploying the application.
> to only add this package temporarily. This can be easily accomplished using
> the `--extra-packages` option to `meteor`.
### Enabling
```sh
$ cd app/
$ meteor add bundle-visualizer
$ meteor --production
$ meteor --extra-packages bundle-visualizer --production
```
### Viewing
@@ -52,9 +52,9 @@ application.
### Disabling
> It is important to remove this package prior to bundling or deploying to
> production.
If you used `--extra-packages`, simply remove `bundle-visualizer` from the list
of included packages and run `meteor` as normal.
```sh
$ meteor remove bundle-visualizer
```
> If you've added `bundle-visualizer` permanently with `meteor add`, it is
> important to remove this package prior to bundling or deploying to
> production with `meteor remove `bundle-visualizer`.

View File

@@ -244,20 +244,41 @@ var parseSimpleConstraint = function (constraintString) {
throw new Error("Non-empty string required");
}
var type, versionString;
var result = {};
var needToCheckValidity = true;
if (constraintString.charAt(0) === '=') {
type = "exactly";
versionString = constraintString.substr(1);
result.type = "exactly";
result.versionString = constraintString.slice(1);
} else {
type = "compatible-with";
versionString = constraintString;
result.type = "compatible-with";
if (constraintString.charAt(0) === "~") {
var semversion = PV.parse(
result.versionString = constraintString.slice(1)
).semver;
var range = new semver.Range("~" + semversion);
result.test = function (version) {
return range.test(PV.parse(version).semver);
};
// Already checked by calling PV.parse above.
needToCheckValidity = false;
} else {
result.versionString = constraintString;
}
}
// This will throw if the version string is invalid.
PV.getValidServerVersion(versionString);
if (needToCheckValidity) {
// This will throw if the version string is invalid.
PV.getValidServerVersion(result.versionString);
}
return { type: type, versionString: versionString };
return result;
};

View File

@@ -5,7 +5,7 @@ set -u
UNAME=$(uname)
ARCH=$(uname -m)
MONGO_VERSION=3.2.12
MONGO_VERSION=3.2.15
NODE_VERSION=8.2.1
NPM_VERSION=5.3.0

View File

@@ -1,126 +0,0 @@
#!/bin/sh
#
# Optional Environment Variables for Configuration
#
# - TIMEOUT_SCALE_FACTOR: (default: 15)
# A multiplation factor that can be used to raise the wait-time on
# various longer-running tests. Useful for slower (or faster!) hardware.
# - ADDL_SELF_TEST_EXCLUDE: (optional)
# A regex or list of additional regexes to skip.
# Export this one so it's available in the node environment.
export TIMEOUT_SCALE_FACTOR=${TIMEOUT_SCALE_FACTOR:-4}
# Skip these tests always. Add other tests with ADDL_SELF_TEST_EXCLUDE.
SELF_TEST_EXCLUDE="^old cli tests|^minifiers can't register non-js|^minifiers: apps can't use|^compiler plugins - addAssets"
# If no SELF_TEST_EXCLUDE is defined, use those defined here by default
if ! [ -z "$ADDL_SELF_TEST_EXCLUDE" ]; then
SELF_TEST_EXCLUDE="${SELF_TEST_EXCLUDE}|${ADDL_SELF_TEST_EXCLUDE}"
fi
# Don't print as many progress indicators
export EMACS=t
export METEOR_HEADLESS=true
if [ -z "$CIRCLE_NODE_TOTAL" ] || [ -z "$CIRCLE_NODE_INDEX" ]; then
# In the case where these aren't set, just pretend like we're a single node.
# This is also handy if the user is using another CI service besides CircleCI
CIRCLE_NODE_TOTAL=1
CIRCLE_NODE_INDEX=0
echo "[warn] CIRCLE_NODE_TOTAL or CIRCLE_NODE_INDEX was not defined. \c"
echo "Running all tests!"
fi
# Make sure we have initialized and updated submodules such as
# packages/non-core/blaze.
git submodule update --init --recursive
# run different jobs based on CicleCI parallel container index
should_run_test () {
test $(($1 % $CIRCLE_NODE_TOTAL)) -eq $CIRCLE_NODE_INDEX
}
# Keep track of errors, but let the tests all finish. This is necessary since
# more than one of the following tests may be executed from a single run if
# parallelism is lower than the number of tests.
exit_code=0
# Also, if any uncaught errors slip through, fail the build.
set -e
if should_run_test 0; then
echo "Running warehouse self-tests"
./meteor self-test --headless \
--with-tag "custom-warehouse" \
--exclude "$SELF_TEST_EXCLUDE" \
|| exit_code=$?
fi
if should_run_test 1; then
echo "Running self-test (1): A-Com"
./meteor self-test --headless \
--file "^[a-b]|^c[a-n]|^co[a-l]|^compiler-plugins" \
--without-tag "custom-warehouse" \
--exclude "$SELF_TEST_EXCLUDE" \
|| exit_code=$?
fi
if should_run_test 2; then
echo "Running self-test (2): Con-K"
./meteor self-test --headless \
--file "^co[n-z]|^c[p-z]|^[d-k]" \
--without-tag "custom-warehouse" \
--exclude "$SELF_TEST_EXCLUDE" \
|| exit_code=$?
fi
if should_run_test 3; then
echo "Running self-test (3): L-O"
./meteor self-test --headless \
--file "^[l-o]" \
--without-tag "custom-warehouse" \
--exclude "$SELF_TEST_EXCLUDE" \
|| exit_code=$?
fi
if should_run_test 4; then
echo "Running self-test (4): P"
./meteor self-test --headless \
--file "^p" \
--without-tag "custom-warehouse" \
--exclude "$SELF_TEST_EXCLUDE" \
|| exit_code=$?
fi
if should_run_test 5; then
echo "Running self-test (5): Run"
./meteor self-test --headless \
--file "^run" \
--without-tag "custom-warehouse" \
--exclude "$SELF_TEST_EXCLUDE" \
|| exit_code=$?
fi
if should_run_test 6; then
echo "Running self-test (6): R-S"
./meteor self-test --headless \
--file "^r(?!un)|^s" \
--without-tag "custom-warehouse" \
--exclude "$SELF_TEST_EXCLUDE" \
|| exit_code=$?
fi
if should_run_test 7; then
echo "Running self-test (7): Sp-Z"
./meteor self-test --headless \
--file "^[t-z]|^command-line" \
--without-tag "custom-warehouse" \
--exclude "$SELF_TEST_EXCLUDE" \
|| exit_code=$?
fi
exit $exit_code

View File

@@ -52,7 +52,7 @@ var packageJson = {
pathwatcher: "7.1.0",
optimism: "0.3.3",
'lru-cache': '4.0.1',
longjohn: '0.2.11'
longjohn: '0.2.12'
}
};

View File

@@ -1138,6 +1138,8 @@ main.registerCommand({
name: 'list',
requiresApp: true,
options: {
'tree': { type: Boolean },
'weak': { type: Boolean },
'allow-incompatible-update': { type: Boolean }
},
catalogRefresh: new catalog.Refresh.OnceAtStart({ ignoreErrors: true })
@@ -1152,6 +1154,86 @@ main.registerCommand({
// No need to display the PackageMapDelta here, since we're about to list all
// of the packages anyway!
if (options['tree']) {
const showWeak = !!options['weak'];
// Load package details of all used packages (inc. dependencies)
const packageDetails = new Map;
projectContext.packageMap.eachPackage(function (name, info) {
packageDetails.set(name, projectContext.projectCatalog.getVersion(name, info.version));
});
// Build a set of top level package names
const topLevelSet = new Set;
projectContext.projectConstraintsFile.eachConstraint(function (constraint) {
topLevelSet.add(constraint.package);
});
// Package that should not be expanded (top level or expanded already)
const dontExpand = new Set(topLevelSet.values());
// Recursive function that outputs each package
const printPackage = function (packageToPrint, isWeak, indent1, indent2) {
const packageName = packageToPrint.packageName;
const depsObj = packageToPrint.dependencies || {};
let deps = Object.keys(depsObj).sort();
// Ignore references to a meteor version or isobuild marker packages
deps = deps.filter(dep => {
return dep !== 'meteor' && !compiler.isIsobuildFeaturePackage(dep);
});
if (!showWeak) {
// Filter out any weakly referenced dependencies
deps = deps.filter(dep => {
let references = depsObj[dep].references || [];
let weakRef = references.length > 0 && references.every(r => r.weak);
return !weakRef;
});
}
const expandedAlready = (deps.length > 0 && dontExpand.has(packageName));
const shouldExpand = (deps.length > 0 && !expandedAlready && !isWeak);
if (indent1 !== '') {
indent1 += (shouldExpand ? '┬' : '─') + ' ';
}
let suffix = (isWeak ? '[weak]' : '');
if (expandedAlready) {
suffix += topLevelSet.has(packageName) ? ' (top level)' : ' (expanded above)';
}
Console.info(indent1 + packageName + '@' + packageToPrint.version + suffix);
if (shouldExpand) {
dontExpand.add(packageName);
deps.forEach((dep, index) => {
const references = depsObj[dep].references || [];
const weakRef = references.length > 0 && references.every(r => r.weak);
const last = ((index + 1) === deps.length);
const child = packageDetails.get(dep);
const newIndent1 = indent2 + (last ? '└─' : '├─');
const newIndent2 = indent2 + (last ? ' ' : '│ ');
if (child) {
printPackage(child, weakRef, newIndent1, newIndent2);
} else if (weakRef) {
Console.info(newIndent1 + '─ ' + dep + '[weak] package skipped');
} else {
Console.info(newIndent1 + '─ ' + dep + ' missing?');
}
});
}
};
const topLevelNames = Array.from(topLevelSet.values()).sort();
topLevelNames.forEach((dep, index) => {
const topLevelPackage = packageDetails.get(dep);
if (topLevelPackage) {
// Force top level packages to be expanded
dontExpand.delete(topLevelPackage.packageName);
printPackage(topLevelPackage, false, '', '');
}
});
return 0;
}
var items = [];
var newVersionsAvailable = false;

View File

@@ -1326,8 +1326,7 @@ main.registerCommand({
"Setting passwords on apps is no longer supported. Now there are " +
"user accounts and your apps are associated with your account so " +
"that only you (and people you designate) can access them. See the " +
Console.command("'meteor claim'") + " and " +
Console.command("'meteor authorized'") + " commands.");
Console.command("'meteor authorized'") + " command.");
return 1;
}
@@ -1454,33 +1453,6 @@ main.registerCommand({
}
});
///////////////////////////////////////////////////////////////////////////////
// claim
///////////////////////////////////////////////////////////////////////////////
main.registerCommand({
name: 'claim',
minArgs: 1,
maxArgs: 1,
catalogRefresh: new catalog.Refresh.Never()
}, function (options) {
auth.pollForRegistrationCompletion();
var site = qualifySitename(options.args[0]);
if (! auth.isLoggedIn()) {
Console.error(
"You must be logged in to claim sites. Use " +
Console.command("'meteor login'") + " to log in. If you don't have a " +
"Meteor developer account yet, create one by clicking " +
Console.command("'Sign in'") + " and then " +
Console.command("'Create account'") + " at www.meteor.com.");
Console.error();
return 1;
}
return deploy.claim(site);
});
///////////////////////////////////////////////////////////////////////////////
// test and test-packages
///////////////////////////////////////////////////////////////////////////////

View File

@@ -248,9 +248,14 @@ Options:
>>> list
List the packages explicitly used by your project.
Usage: meteor list
meteor list --tree [--weak]
Lists the packages that you have explicitly added to your project.
This will not list transitive dependencies.
Transitive dependencies are not listed unless you use the --tree option,
which outputs a tree showing how packages are referenced.
Options:
--weak Show weakly referenced dependencies in the tree.
>>> add-platform
Add a platform to this project.
@@ -506,16 +511,6 @@ Options:
--list list authorized users and organizations (the default)
>>> claim
Claim a site deployed with an old Meteor version.
Usage: meteor claim <site>
If you deployed a site with an old version of Meteor that did not have
support for developer accounts, you can use this command to claim that
site into your account. If you had set a password on the site you will
be prompted for it one last time.
>>> login
Log in to your Meteor developer account.
Usage: meteor login [--email]

View File

@@ -78,7 +78,10 @@ function startNewWatcher(absPath) {
const stat = statOrNull(absPath);
const ino = stat && stat.ino;
if (ino > 0 && entriesByIno.has(ino)) {
return entriesByIno.get(ino);
const entry = entriesByIno.get(ino);
if (entries[absPath] === entry) {
return entry;
}
}
function safeUnwatch() {

View File

@@ -2628,7 +2628,9 @@ var writeSiteArchive = Profile("bundler writeSiteArchive", function (
format: "site-archive-pre1",
builtBy,
programs: [],
meteorRelease: releaseName
meteorRelease: releaseName,
nodeVersion: process.versions.node,
npmVersion: meteorNpm.npmVersion,
};
var nodePath = [];

View File

@@ -233,8 +233,8 @@ _.extend(Module.prototype, {
_.each(this.files, file => {
if (file.bare) {
// Bare files will be added in between the synchronous require
// calls in _chunkifyEagerRequires.
// Bare files will be added before the synchronous require calls
// in _chunkifyEagerRequires.
return;
}
@@ -384,10 +384,9 @@ _.extend(Module.prototype, {
},
// Adds require calls to the chunks array for all modules that should be
// eagerly evaluated, and also includes bare files in the appropriate
// order with respect to the require calls. Returns the name of the
// variable that holds the main exports object, if api.mainModule was
// used to define a main module.
// eagerly evaluated, and also includes any bare files before the
// require calls. Returns the name of the variable that holds the main
// exports object, if api.mainModule was used to define a main module.
_chunkifyEagerRequires(chunks, moduleCount, sourceWidth) {
assert.ok(_.isArray(chunks));
assert.ok(_.isNumber(moduleCount));
@@ -396,8 +395,11 @@ _.extend(Module.prototype, {
let exportsName;
// Now that we have installed everything in this package or
// application, immediately require the non-lazy modules and
// evaluate the bare files.
// application, first evaluate the bare files, then require the
// non-lazy (eager) modules.
const eagerModuleFiles = [];
_.each(this.files, file => {
if (file.bare) {
chunks.push("\n", file.getPrelinkedOutput({
@@ -405,6 +407,12 @@ _.extend(Module.prototype, {
noLineNumbers: this.noLineNumbers
}));
} else if (moduleCount > 0 && ! file.lazy) {
eagerModuleFiles.push(file);
}
});
if (eagerModuleFiles.length > 0) {
_.each(eagerModuleFiles, file => {
if (file.mainModule) {
exportsName = "exports";
}
@@ -415,8 +423,8 @@ _.extend(Module.prototype, {
JSON.stringify("./" + file.installPath),
");"
);
}
});
});
}
return exportsName;
}

View File

@@ -14,6 +14,7 @@ var buildmessage = require('../utils/buildmessage.js');
var utils = require('../utils/utils.js');
var runLog = require('../runners/run-log.js');
var Profile = require('../tool-env/profile.js').Profile;
import { version as npmVersion } from 'npm';
import { execFileAsync } from "../utils/processes.js";
import {
get as getRebuildArgs
@@ -33,6 +34,9 @@ import {
var meteorNpm = exports;
// Expose the version of npm in use from the dev bundle.
meteorNpm.npmVersion = npmVersion;
// if a user exits meteor while we're trying to create a .npm
// directory, we will have temporary directories that we clean up
var tmpDirs = [];

View File

@@ -508,6 +508,7 @@ var doInteractivePasswordLogin = function (options) {
} else {
loginFailed();
if (options.retry) {
delete loginData.password;
Console.error();
continue;
} else {

View File

@@ -4,14 +4,25 @@
// prompt for password
// send RPC with or without password as required
var files = require('../fs/files.js');
var httpHelpers = require('../utils/http-helpers.js');
var buildmessage = require('../utils/buildmessage.js');
var config = require('./config.js');
var auth = require('./auth.js');
var _ = require('underscore');
var stats = require('./stats.js');
var Console = require('../console/console.js').Console;
import {
pathJoin,
createTarGzStream,
getSettings,
mkdtemp,
} from '../fs/files.js';
import { request } from '../utils/http-helpers.js';
import buildmessage from '../utils/buildmessage.js';
import {
pollForRegistrationCompletion,
doInteractivePasswordLogin,
loggedInUsername,
isLoggedIn,
maybePrintRegistrationLink,
} from './auth.js';
import { recordPackages } from './stats.js';
import { Console } from '../console/console.js';
const hasOwn = Object.prototype.hasOwnProperty;
const CAPABILITIES = ['showDeployMessages', 'canTransferAuthorization'];
@@ -58,13 +69,13 @@ const CAPABILITIES = ['showDeployMessages', 'canTransferAuthorization'];
// derived from either a transport-level exception, the response
// body, or a generic 'try again later' message, as appropriate
var deployRpc = function (options) {
options = _.clone(options);
options.headers = _.clone(options.headers || {});
function deployRpc(options) {
options = Object.assign({}, options);
options.headers = Object.assign({}, options.headers || {});
if (options.headers.cookie) {
throw new Error("sorry, can't combine cookie headers yet");
}
options.qs = _.extend({}, options.qs, {capabilities: CAPABILITIES});
options.qs = Object.assign({}, options.qs, {capabilities: CAPABILITIES});
const deployURLBase = getDeployURL(options.site).await();
@@ -74,7 +85,7 @@ var deployRpc = function (options) {
// XXX: Reintroduce progress for upload
try {
var result = httpHelpers.request(_.extend(options, {
var result = request(Object.assign(options, {
url: deployURLBase + '/' + options.operation +
(options.site ? ('/' + options.site) : ''),
method: options.method || 'GET',
@@ -117,13 +128,13 @@ var deployRpc = function (options) {
ret.message = body;
}
var hasAllExpectedKeys = _.all(_.map(
options.expectPayload || [], function (key) {
return ret.payload && _.has(ret.payload, key);
}));
const hasAllExpectedKeys =
(options.expectPayload || [])
.map(key => ret.payload && hasOwn.call(ret.payload, key))
.every(x => x);
if ((options.expectPayload && ! _.has(ret, 'payload')) ||
(options.expectMessage && ! _.has(ret, 'message')) ||
if ((options.expectPayload && ! hasOwn.call(ret, 'payload')) ||
(options.expectMessage && ! hasOwn.call(ret, 'message')) ||
! hasAllExpectedKeys) {
delete ret.payload;
delete ret.message;
@@ -152,8 +163,8 @@ var deployRpc = function (options) {
// accounts server but our authentication actually fails, then prompt
// the user to log in with a username and password and then resend the
// RPC.
var authedRpc = function (options) {
var rpcOptions = _.clone(options);
function authedRpc(options) {
var rpcOptions = Object.assign({}, options);
var preflight = rpcOptions.preflight;
delete rpcOptions.preflight;
@@ -178,7 +189,7 @@ var authedRpc = function (options) {
username: username,
suppressErrorMessage: true
};
if (auth.doInteractivePasswordLogin(loginOptions)) {
if (doInteractivePasswordLogin(loginOptions)) {
return authedRpc(options);
} else {
return {
@@ -198,45 +209,15 @@ var authedRpc = function (options) {
}
var info = infoResult.payload;
if (! _.has(info, 'protection')) {
if (! hasOwn.call(info, 'protection')) {
// Not protected.
//
// XXX should prompt the user to claim the app (only if deploying?)
return preflight ? { } : deployRpc(rpcOptions);
}
if (info.protection === "password") {
if (preflight) {
return { protection: info.protection };
}
// Password protected. Read a password, hash it, and include the
// hashed password as a query parameter when doing the RPC.
var password;
password = Console.readLine({
echo: false,
prompt: "Password: ",
stream: process.stderr
});
// Hash the password so we never send plaintext over the
// wire. Doesn't actually make us more secure, but it means we
// won't leak a user's password, which they might use on other
// sites too.
var crypto = require('crypto');
var hash = crypto.createHash('sha1');
hash.update('S3krit Salt!');
hash.update(password);
password = hash.digest('hex');
rpcOptions = _.clone(rpcOptions);
rpcOptions.qs = _.clone(rpcOptions.qs || {});
rpcOptions.qs.password = password;
return deployRpc(rpcOptions);
}
if (info.protection === "account") {
if (! _.has(info, 'authorized')) {
if (! hasOwn.call(info, 'authorized')) {
// Absence of this implies that we are not an authorized user on
// this app
if (preflight) {
@@ -244,7 +225,7 @@ var authedRpc = function (options) {
} else {
return {
statusCode: null,
errorMessage: auth.isLoggedIn() ?
errorMessage: isLoggedIn() ?
// XXX better error message (probably need to break out of
// the 'errorMessage printed with brief prefix' pattern)
"Not an authorized user on this site" :
@@ -270,26 +251,11 @@ var authedRpc = function (options) {
};
};
// When the user is trying to do something with a legacy
// password-protected app, instruct them to claim it with 'meteor
// claim'.
var printLegacyPasswordMessage = function (site) {
Console.error(
"\nThis site was deployed with an old version of Meteor that used " +
"site passwords instead of user accounts. Now we have a much better " +
"system, Meteor developer accounts.");
Console.error();
Console.error("If this is your site, please claim it into your account with");
Console.error(
Console.command("meteor claim " + site),
Console.options({ indent: 2 }));
};
// When the user is trying to do something with an app that they are not
// authorized for, instruct them to get added via 'meteor authorized
// --add' or switch accounts.
var printUnauthorizedMessage = function () {
var username = auth.loggedInUsername();
function printUnauthorizedMessage() {
var username = loggedInUsername();
Console.error("Sorry, that site belongs to a different user.");
if (username) {
Console.error("You are currently logged in as " + username + ".");
@@ -306,7 +272,7 @@ var printUnauthorizedMessage = function () {
// syntactically good, canonicalize it (this essentially means
// stripping 'http://' or a trailing '/' if present) and return it. If
// not, print an error message to stderr and return null.
var canonicalizeSite = function (site) {
function canonicalizeSite(site) {
// There are actually two different bugs here. One is that the meteor deploy
// server does not support apps whose total site length is greater than 63
// (because of how it generates Mongo database names); that can be fixed on
@@ -359,7 +325,7 @@ var canonicalizeSite = function (site) {
// stats server.
// - buildOptions: the 'buildOptions' argument to the bundler
// - rawOptions: any unknown options that were passed to the command line tool
var bundleAndDeploy = function (options) {
export function bundleAndDeploy(options) {
if (options.recordPackageUsage === undefined) {
options.recordPackageUsage = true;
}
@@ -379,10 +345,10 @@ var bundleAndDeploy = function (options) {
// they'll get an email prompt instead of a username prompt because
// the command-line tool didn't have time to learn about their
// username before the credential was expired.
auth.pollForRegistrationCompletion({
pollForRegistrationCompletion({
noLogout: true
});
var promptIfAuthFails = (auth.loggedInUsername() !== null);
var promptIfAuthFails = (loggedInUsername() !== null);
// Check auth up front, rather than after the (potentially lengthy)
// bundling process.
@@ -399,19 +365,14 @@ var bundleAndDeploy = function (options) {
return 1;
}
if (preflight.protection === "password") {
printLegacyPasswordMessage(site);
Console.error("If it's not your site, please try a different name!");
return 1;
} else if (preflight.protection === "account" &&
if (preflight.protection === "account" &&
! preflight.authorized) {
printUnauthorizedMessage();
return 1;
}
var buildDir = files.mkdtemp('build_tar');
var bundlePath = files.pathJoin(buildDir, 'bundle');
var buildDir = mkdtemp('build_tar');
var bundlePath = pathJoin(buildDir, 'bundle');
Console.info('Deploying your app...');
@@ -421,7 +382,7 @@ var bundleAndDeploy = function (options) {
rootPath: process.cwd()
}, function () {
if (options.settingsFile) {
settings = files.getSettings(options.settingsFile);
settings = getSettings(options.settingsFile);
}
});
@@ -446,7 +407,7 @@ var bundleAndDeploy = function (options) {
}
if (options.recordPackageUsage) {
stats.recordPackages({
recordPackages({
what: "sdk.deploy",
projectContext: options.projectContext,
site: site
@@ -458,8 +419,8 @@ var bundleAndDeploy = function (options) {
method: 'POST',
operation: 'deploy',
site: site,
qs: _.extend({}, options.rawOptions, settings !== null ? {settings: settings} : {}),
bodyStream: files.createTarGzStream(files.pathJoin(buildDir, 'bundle')),
qs: Object.assign({}, options.rawOptions, settings !== null ? {settings: settings} : {}),
bodyStream: createTarGzStream(pathJoin(buildDir, 'bundle')),
expectPayload: ['url'],
preflightPassword: preflight.preflightPassword,
// Disable the HTTP timeout for this POST request.
@@ -502,7 +463,7 @@ var bundleAndDeploy = function (options) {
return 0;
};
var deleteApp = function (site) {
export function deleteApp(site) {
site = canonicalizeSite(site);
if (! site) {
return 1;
@@ -533,7 +494,7 @@ var deleteApp = function (site) {
// messages. Returns the result of the RPC if successful, or null
// otherwise (including if auth failed or if the user is not authorized
// for this site).
var checkAuthThenSendRpc = function (site, operation, what) {
function checkAuthThenSendRpc(site, operation, what) {
var preflight = authedRpc({
operation: operation,
site: site,
@@ -547,15 +508,12 @@ var checkAuthThenSendRpc = function (site, operation, what) {
return null;
}
if (preflight.protection === "password") {
printLegacyPasswordMessage(site);
return null;
} else if (preflight.protection === "account" &&
if (preflight.protection === "account" &&
! preflight.authorized) {
if (! auth.isLoggedIn()) {
if (! isLoggedIn()) {
// Maybe the user is authorized for this app but not logged in
// yet, so give them a login prompt.
var loginResult = auth.doUsernamePasswordLogin({ retry: true });
var loginResult = doUsernamePasswordLogin({ retry: true });
if (loginResult) {
// Once we've logged in, retry the whole operation. We need to
// do the preflight request again instead of immediately moving
@@ -603,7 +561,7 @@ var checkAuthThenSendRpc = function (site, operation, what) {
// On failure, prints a message to stderr and returns null. Otherwise,
// returns a temporary authenticated Mongo URL allowing access to this
// site's database.
var temporaryMongoUrl = function (site) {
export function temporaryMongoUrl(site) {
site = canonicalizeSite(site);
if (! site) {
// canonicalizeSite printed an error
@@ -619,7 +577,7 @@ var temporaryMongoUrl = function (site) {
}
};
var logs = function (site) {
export function logs(site) {
site = canonicalizeSite(site);
if (! site) {
return 1;
@@ -631,12 +589,12 @@ var logs = function (site) {
return 1;
} else {
Console.info(result.message);
auth.maybePrintRegistrationLink({ leadingNewline: true });
maybePrintRegistrationLink({ leadingNewline: true });
return 0;
}
};
var listAuthorized = function (site) {
export function listAuthorized(site) {
site = canonicalizeSite(site);
if (! site) {
return 1;
@@ -654,25 +612,20 @@ var listAuthorized = function (site) {
}
var info = result.payload;
if (! _.has(info, 'protection')) {
if (! hasOwn.call(info, 'protection')) {
Console.info("<anyone>");
return 0;
}
if (info.protection === "password") {
Console.info("<password>");
return 0;
}
if (info.protection === "account") {
if (! _.has(info, 'authorized')) {
if (! hasOwn.call(info, 'authorized')) {
Console.error("Couldn't get authorized users list: " +
"You are not authorized");
return 1;
}
Console.info((auth.loggedInUsername() || "<you>"));
_.each(info.authorized, function (username) {
Console.info((loggedInUsername() || "<you>"));
info.authorized.forEach(username => {
if (username) {
// Current username rules don't let you register anything that we might
// want to split over multiple lines (ex: containing a space), but we
@@ -685,7 +638,7 @@ var listAuthorized = function (site) {
};
// action is "add", "transfer" or "remove"
var changeAuthorized = function (site, action, username) {
export function changeAuthorized(site, action, username) {
site = canonicalizeSite(site);
if (! site) {
// canonicalizeSite will have already printed an error
@@ -715,89 +668,7 @@ var changeAuthorized = function (site, action, username) {
return 0;
};
var claim = function (site) {
site = canonicalizeSite(site);
if (! site) {
// canonicalizeSite will have already printed an error
return 1;
}
// Check to see if it's even a claimable site, so that we can print
// a more appropriate message than we'd get if we called authedRpc
// straight away (at a cost of an extra REST call)
var infoResult = deployRpc({
operation: 'info',
site: site,
printDeployURL: true
});
if (infoResult.statusCode === 404) {
Console.error(
"There isn't a site deployed at that address. Use " +
Console.command("'meteor deploy'") + " " +
"if you'd like to deploy your app here.");
return 1;
}
if (infoResult.payload && infoResult.payload.protection === "account") {
if (infoResult.payload.authorized) {
Console.error("That site already belongs to you.\n");
} else {
Console.error("Sorry, that site belongs to someone else.\n");
}
return 1;
}
if (infoResult.payload &&
infoResult.payload.protection === "password") {
Console.info(
"To claim this site and transfer it to your account, enter the",
"site password one last time.");
Console.info();
}
var result = authedRpc({
method: 'POST',
operation: 'claim',
site: site,
promptIfAuthFails: true
});
if (result.errorMessage) {
auth.pollForRegistrationCompletion();
if (! auth.loggedInUsername() &&
auth.registrationUrl()) {
Console.error(
"You need to set a password on your Meteor developer account before",
"you can claim sites. You can do that here in under a minute:");
Console.error(Console.url(auth.registrationUrl()));
Console.error();
} else {
Console.error("Couldn't claim site: " + result.errorMessage);
}
return 1;
}
Console.info(site + ": " + "successfully transferred to your account.");
Console.info();
Console.info("Show authorized users with:");
Console.info(
Console.command("meteor authorized " + site),
Console.options({ indent: 2 }));
Console.info();
Console.info("Add authorized users with:");
Console.info(
Console.command("meteor authorized " + site + " --add <username>"),
Console.options({ indent: 2 }));
Console.info();
Console.info("Remove authorized users with:");
Console.info(
Console.command("meteor authorized " + site + " --remove <username>"),
Console.options({ indent: 2 }));
Console.info();
return 0;
};
var listSites = function () {
export function listSites() {
var result = deployRpc({
method: "GET",
operation: "authorized-apps",
@@ -815,10 +686,9 @@ var listSites = function () {
! result.payload.sites.length) {
Console.info("You don't have any sites yet.");
} else {
result.payload.sites.sort();
_.each(result.payload.sites, function (site) {
Console.info(site);
});
result.payload.sites
.sort()
.forEach(site => Console.info(site));
}
return 0;
};
@@ -876,7 +746,7 @@ async function discoverGalaxy(site, scheme) {
scheme + "://" + site + "/.well-known/meteor/deploy-url";
// If httpHelpers.request throws, the returned Promise will reject, which is
// fine.
const { response, body } = httpHelpers.request({
const { response, body } = request({
url: discoveryURL,
json: true,
strictSSL: true,
@@ -894,17 +764,8 @@ async function discoverGalaxy(site, scheme) {
throw new Error(
"unexpected galaxyDiscoveryVersion: " + body.galaxyDiscoveryVersion);
}
if (!_.has(body, "deployURL")) {
if (! hasOwn.call(body, "deployURL")) {
throw new Error("no deployURL");
}
return body.deployURL;
}
exports.bundleAndDeploy = bundleAndDeploy;
exports.deleteApp = deleteApp;
exports.temporaryMongoUrl = temporaryMongoUrl;
exports.logs = logs;
exports.listAuthorized = listAuthorized;
exports.changeAuthorized = changeAuthorized;
exports.claim = claim;
exports.listSites = listSites;

View File

@@ -654,7 +654,11 @@ _.extend(ProjectContext.prototype, {
files.pathJoin(files.getCurrentToolsDir(), 'packages');
searchDirs.push(
// Include packages like packages/ecmascript.
packagesDir,
// Include packages like packages/non-core/coffeescript.
files.pathJoin(packagesDir, "non-core"),
// Include packages like packages/non-core/blaze/packages/blaze.
files.pathJoin(packagesDir, "non-core", "*", "packages"),
);
}
@@ -747,8 +751,9 @@ _.extend(ProjectContext.prototype, {
var constraint = utils.parsePackageConstraint(
// Note that this used to be an exact name@=version constraint,
// before #7084 eliminated these constraints completely. They
// were reinstated in Meteor 1.4.3 as name@version constraints.
packageName + "@" + version);
// were reinstated in Meteor 1.4.3 as name@version constraints,
// and further refined to name@~version constraints in 1.5.2.
packageName + "@~" + version);
// Add a constraint but no dependency (we don't automatically use
// all local packages!):
depsAndConstraints.constraints.push(constraint);

View File

@@ -71,20 +71,10 @@ class Runner {
onFailure
});
buildmessage.capture(function () {
self.projectContext.resolveConstraints();
});
const packageMap = self.projectContext.packageMap;
const hasMongoDevServerPackage =
packageMap && packageMap.getInfo('mongo-dev-server') != null;
self.mongoRunner = null;
if (mongoUrl) {
oplogUrl = disableOplog ? null : oplogUrl;
} else if (hasMongoDevServerPackage
|| process.env.METEOR_TEST_FAKE_MONGOD_CONTROL_PORT) {
// The mongo-dev-server package is required to start Mongo, but
// tests using fake-mongod are exempted.
} else {
self.mongoRunner = new MongoRunner({
projectLocalDir: self.projectContext.projectLocalDir,
port: mongoPort,
@@ -96,13 +86,6 @@ class Runner {
mongoUrl = self.mongoRunner.mongoUrl();
oplogUrl = disableOplog ? null : self.mongoRunner.oplogUrl();
} else {
// Don't start a mongodb server.
// Set monogUrl to a specific value to prevent MongoDB connections
// and to allow a check for printing a message if `mongo-dev-server`
// is added while the app is running.
// The check and message is printed by the `mongo-dev-server` package.
mongoUrl = 'no-mongo-server';
}
self.updater = new Updater();

View File

@@ -593,7 +593,7 @@ var launchMongo = function (options) {
'meteor',
new mongoNpmModule.Server('127.0.0.1', options.port, {
poolSize: 1,
socketOptions: {connectTimeoutMS: 30000},
socketOptions: {connectTimeoutMS: 60000},
}),
{safe: true});

View File

@@ -0,0 +1,10 @@
{
"env": {
"development": {
"plugins": ["transform-do-expressions"]
},
"production": {
"plugins": ["transform-do-expressions"]
}
}
}

View File

@@ -0,0 +1,18 @@
function babeltest() {
// use transform-do-expressions plugin to prove babel `env` subkey was loaded
let x = do {
1;
};
console.log(x)
}
/*
If the plugin is loaded correctly there will be no errors during the compilation of this file.
Without this plugin you will get the error:
W20170803-17:58:17.054(-7)? (STDERR) var x = do {
W20170803-17:58:17.055(-7)? (STDERR) ^^
W20170803-17:58:17.055(-7)? (STDERR)
W20170803-17:58:17.055(-7)? (STDERR) SyntaxError: Unexpected token do
*/

View File

@@ -19,5 +19,8 @@
"test": "METEOR_PROFILE=100 ../../../../meteor test --full-app --driver-package dispatch:mocha-phantomjs",
"browser": "METEOR_PROFILE=100 ../../../../meteor test --full-app --driver-package dispatch:mocha-browser",
"test-packages": "../../../../meteor test-packages --driver-package dispatch:mocha-phantomjs packages/modules-test-package"
},
"devDependencies": {
"babel-plugin-transform-do-expressions": "^6.22.0"
}
}

View File

@@ -0,0 +1,11 @@
Package.describe({
name: "tilde-constraints",
version: "0.4.2",
summary: "Package for testing @~ version constraints",
documentation: "README.md"
});
Package.onUse(function(api) {
api.use("ecmascript");
api.mainModule("tilde-constraints.js");
});

View File

@@ -0,0 +1 @@
console.log(module.id);

View File

@@ -0,0 +1,12 @@
Package.describe({
name: "tilde-dependent",
version: "0.1.0",
summary: "Package for testing inter-package @~ constraints",
documentation: "README.md"
});
Package.onUse(function(api) {
api.use("ecmascript");
api.use("tilde-constraints");
api.mainModule("tilde-dependent.js");
});

View File

@@ -0,0 +1 @@
console.log(module.id);

View File

@@ -122,3 +122,34 @@ selftest.define("login", ['net'], function () {
run.matchErr("Login failed");
run.expectExit(1);
});
// This is a Galaxy-related command (deploy), but still pretty auth-y.
selftest.define("login on deploy", ['net'], function () {
const s = new Sandbox;
const appName = testUtils.randomAppName();
s.createApp(appName, "standard-app");
s.cd(appName);
let run = s.run("deploy", appName);
run.matchErr(/You must be logged in to deploy/);
run.matchErr("Email:");
run.write("test@test.com\n");
run.matchErr("Logging in as test.");
run.matchErr("Password:");
run.write("SoVeryWrong\n");
run.waitSecs(commandTimeoutSecs);
run.matchErr("Login failed");
run.matchErr("Password:");
run.write("testtest\n");
run.waitSecs(commandTimeoutSecs);
run.match("Talking to Galaxy servers");
// "test" user can't actually deploy, so it will still fail.
run.expectExit(1);
});

View File

@@ -183,6 +183,51 @@ selftest.define("change packages during hot code push", [], function () {
run.stop();
});
selftest.define("add debugOnly and prodOnly packages", [], function () {
var s = new Sandbox();
var run;
// Starting a run
s.createApp("myapp", "package-tests");
s.cd("myapp");
s.set("METEOR_OFFLINE_CATALOG", "t");
// Add a debugOnly package. It should work during a normal run, but print
// nothing in production mode.
run = s.run("add", "debug-only");
run.match("debug-only");
run.expectExit(0);
s.mkdir("server");
s.write("server/exit-test.js",
"process.exit(global.DEBUG_ONLY_LOADED ? 234 : 235)");
run = s.run("--once");
run.waitSecs(15);
run.expectExit(234);
run = s.run("--once", "--production");
run.waitSecs(15);
run.expectExit(235);
// Add prod-only package, which sets GLOBAL.PROD_ONLY_LOADED.
run = s.run("add", "prod-only");
run.match("prod-only");
run.expectExit(0);
s.mkdir("server");
s.write("server/exit-test.js", // overwrite
"process.exit(global.PROD_ONLY_LOADED ? 234 : 235)");
run = s.run("--once");
run.waitSecs(15);
run.expectExit(235);
run = s.run("--once", "--production");
run.waitSecs(15);
run.expectExit(234);
});
// Add packages through the command line. Make sure that the correct set of
// changes is reflected in .meteor/packages, .meteor/versions and list.
selftest.define("add packages to app", [], function () {
@@ -274,51 +319,6 @@ selftest.define("add packages to app", [], function () {
run.expectExit(0);
});
selftest.define("add debugOnly and prodOnly packages", [], function () {
var s = new Sandbox();
var run;
// Starting a run
s.createApp("myapp", "package-tests");
s.cd("myapp");
s.set("METEOR_OFFLINE_CATALOG", "t");
// Add a debugOnly package. It should work during a normal run, but print
// nothing in production mode.
run = s.run("add", "debug-only");
run.match("debug-only");
run.expectExit(0);
s.mkdir("server");
s.write("server/exit-test.js",
"process.exit(global.DEBUG_ONLY_LOADED ? 234 : 235)");
run = s.run("--once");
run.waitSecs(15);
run.expectExit(234);
run = s.run("--once", "--production");
run.waitSecs(15);
run.expectExit(235);
// Add prod-only package, which sets GLOBAL.PROD_ONLY_LOADED.
run = s.run("add", "prod-only");
run.match("prod-only");
run.expectExit(0);
s.mkdir("server");
s.write("server/exit-test.js", // overwrite
"process.exit(global.PROD_ONLY_LOADED ? 234 : 235)");
run = s.run("--once");
run.waitSecs(15);
run.expectExit(235);
run = s.run("--once", "--production");
run.waitSecs(15);
run.expectExit(234);
});
selftest.define("add package with both debugOnly and prodOnly", [], function () {
var s = new Sandbox();
var run;
@@ -955,3 +955,149 @@ selftest.define("show readme excerpt", function () {
run.matchErr("Documentation not found");
run.expectExit(1);
});
selftest.define("tilde version constraints", [], function () {
var s = new Sandbox();
s.set("METEOR_WATCH_PRIORITIZE_CHANGED", "false");
s.createApp("tilde-app", "package-tests");
s.cd("tilde-app");
var run = s.run();
run.match("tilde-app");
run.match("proxy");
run.waitSecs(10);
run.match("MongoDB");
run.waitSecs(10);
run.match("your app");
run.waitSecs(10);
run.match("running at");
run.waitSecs(60);
var packages = s.read(".meteor/packages")
.replace(/\n*$/m, "\n");
function setTopLevelConstraint(constraint) {
s.write(
".meteor/packages",
packages + "tilde-constraints" + (
constraint ? "@" + constraint : ""
) + "\n"
);
}
setTopLevelConstraint("");
run.match(/tilde-constraints.*added, version 0\.4\.2/);
run.match("tilde-constraints.js");
run.waitSecs(10);
setTopLevelConstraint("0.4.0");
run.match("tilde-constraints.js");
run.match("server restarted");
run.waitSecs(10);
setTopLevelConstraint("~0.4.0");
run.match("tilde-constraints.js");
run.match("server restarted");
run.waitSecs(10);
setTopLevelConstraint("0.4.3");
run.match("error: No version of tilde-constraints satisfies all constraints");
run.waitSecs(10);
setTopLevelConstraint("~0.4.3");
run.match("error: No version of tilde-constraints satisfies all constraints");
run.waitSecs(10);
setTopLevelConstraint("0.3.0");
run.match("tilde-constraints.js");
run.match("server restarted");
run.waitSecs(10);
setTopLevelConstraint("~0.3.0");
run.match("error: No version of tilde-constraints satisfies all constraints");
run.waitSecs(10);
setTopLevelConstraint("0.5.0");
run.match("error: No version of tilde-constraints satisfies all constraints");
run.waitSecs(10);
setTopLevelConstraint("~0.5.0");
run.match("error: No version of tilde-constraints satisfies all constraints");
run.waitSecs(10);
s.write(
".meteor/packages",
packages
);
run.match(/tilde-constraints.*removed/);
run.waitSecs(10);
s.write(
".meteor/packages",
packages + "tilde-dependent\n"
);
run.match(/tilde-constraints.*added, version 0\.4\.2/);
run.match(/tilde-dependent.*added, version 0\.1\.0/);
run.match("tilde-constraints.js");
run.match("tilde-dependent.js");
run.waitSecs(10);
var depPackageJsPath = "packages/tilde-dependent/package.js"
var depPackageJs = s.read(depPackageJsPath);
function setDepConstraint(constraint) {
s.write(
depPackageJsPath,
depPackageJs.replace(
/tilde-constraints[^"]*/g, // Syntax highlighting hack: "
"tilde-constraints" + (
constraint ? "@" + constraint : ""
)
)
);
}
setDepConstraint("0.4.0");
run.match("tilde-constraints.js");
run.match("tilde-dependent.js");
run.match("server restarted");
run.waitSecs(10);
setDepConstraint("~0.4.0");
run.match("tilde-constraints.js");
run.match("tilde-dependent.js");
run.match("server restarted");
run.waitSecs(10);
setDepConstraint("0.3.0");
run.match("tilde-constraints.js");
run.match("tilde-dependent.js");
run.match("server restarted");
run.waitSecs(10);
// TODO The rest of these tests should cause version conflicts, but it
// seems like version constraints between local packages are ignored,
// which is a larger (preexisting) problem we should investigate.
/*
setDepConstraint("=0.4.0");
run.match("error: No version of tilde-constraints satisfies all constraints");
run.waitSecs(10);
setDepConstraint("~0.3.0");
run.match("error: No version of tilde-constraints satisfies all constraints");
run.waitSecs(10);
setDepConstraint("0.4.3");
run.match("error: No version of tilde-constraints satisfies all constraints");
run.waitSecs(10);
setDepConstraint("~0.4.3");
run.match("error: No version of tilde-constraints satisfies all constraints");
run.waitSecs(10);
*/
run.stop();
});

View File

@@ -54,7 +54,8 @@ selftest.define("static-html - throws error", () => {
s.cd('myapp');
const run = startRun(s);
run.matchBeforeExit("Attributes on <head> not supported");
run.match("Attributes on <head> not supported");
run.waitSecs(90);
run.stop();
});

View File

@@ -36,9 +36,6 @@ const DEV_DEPENDENCY_VERSIONS = {
require("../tool-env/install-runtime.js");
// To allow long stack traces that cross async boundaries
require('longjohn');
// Exception representing a test failure
var TestFailure = function (reason, details) {
var self = this;