diff --git a/.circleci/config.yml b/.circleci/config.yml index 9fb85d6669..a4e3e5944b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,13 +1,5 @@ version: 2 -# These directories are cached across all builds, currently with no -# hashing mechanism, but we should consider doing it off dev_bundle. -meteor_cache_dirs: &meteor_cache_dirs - paths: - - "dev_bundle" - - ".babel-cache" - - ".meteor" - # A reusable "run" snippet which is ran before each test to setup the # environment for user-limits, core-dumps, etc. run_env_change: &run_env_change @@ -17,13 +9,16 @@ run_env_change: &run_env_change sudo mkdir -p /tmp/core_dumps sudo chmod a+rwx /tmp/core_dumps - # Make a place for JUnit tests to live. - sudo mkdir -p /tmp/results/junit - sudo chmod -R a+rwx /tmp/results/ + # Bake the locale we expect into the image. + echo "en_US.UTF-8 UTF-8" | sudo tee /etc/locale.gen + sudo locale-gen + + # The commands below don't work in Docker images, but might be worth + # reenabling if we switch back to machine:true instead of Docker. # Set the pattern for core dumps, so we can find them. - echo kernel.core_pattern="/tmp/core_dumps/core.%e.%p.%h.%t" | \ - sudo tee -a /etc/sysctl.conf + # echo kernel.core_pattern="/tmp/core_dumps/core.%e.%p.%h.%t" | \ + # sudo tee -a /etc/sysctl.conf # Note that since every "run" command starts its own shell, and I wasn't # able to set this at a system wide level for all users, it's necessary to @@ -31,11 +26,11 @@ run_env_change: &run_env_change # output a core dump. # Raise inotify user watches up higher. - echo fs.inotify.max_user_watches=524288 | \ - sudo tee -a /etc/sysctl.conf + # echo fs.inotify.max_user_watches=524288 | \ + # sudo tee -a /etc/sysctl.conf # Reload sysctl so these are in effect. - sudo sysctl -p + # sudo sysctl -p # A reusable "run" snippet which enables the continued logging of memoryusage # to a file on disk which can be saved to build artifacts for later analysis. @@ -46,7 +41,8 @@ run_log_mem_use: &run_log_mem_use # Log memory usage throughout entire build. MEMUSELOG=/tmp/memuse.txt /bin/bash -c '\ while true; do\ - ps -u $USER eo pid,%cpu,%mem,rss:10,vsz:10,args:20 --sort=-%mem >> $MEMUSELOG; \ + ps -e -o user,%cpu,%mem,rss:10,vsz:10,command:20 \ + --sort=-%mem >> $MEMUSELOG; \ echo "----------" >> $MEMUSELOG; \ sleep 1; \ done' @@ -64,14 +60,17 @@ run_save_node_bin: &run_save_node_bin # This environment is set to every job (and the initial build). build_machine_environment: &build_machine_environment # Specify that we want an actual machine (ala Circle 1.0), not a Docker image. - machine: true + docker: + - image: meteor/circleci environment: # This multiplier scales the waitSecs for selftests. TIMEOUT_SCALE_FACTOR: 8 + # Retry failed tests additional times. + METEOR_SELF_TEST_RETRIES: 2 + # These, mostly overlapping, flags ensure that CircleCI is as pretty as # possible for a non-interactive environment. See also: --headless. - EMACS: t METEOR_HEADLESS: true METEOR_PRETTY_OUTPUT: 0 @@ -80,26 +79,23 @@ build_machine_environment: &build_machine_environment METEOR_SAVE_TMPDIRS: 1 # Skip these tests on every test run. - # For readability, this is a regex wrapped across multiple lines in quotes. - SELF_TEST_EXCLUDE: "\ - ^old cli tests|\ - ^minifiers can't register non-js|\ - ^minifiers: apps can't use|\ - ^compiler plugins - addAssets\ - " + # If needed, for readability this should be a regex wrapped across + # multiple lines in quotes. + SELF_TEST_EXCLUDE: "add debugOnly and prodOnly packages" + # These will be evaled before each command. PRE_TEST_COMMANDS: |- ulimit -c unlimited; # Set core dump size as Ubuntu 14.04 lacks prlimit. - ulimit -n 4096; # CircleCI default is soft 1024, hard 4096. Take it all. - - # Enable the Garbage Collection `gc` object to be exposed so we can try - # to our own, hopefully more graceful, technique. - TOOL_NODE_FLAGS: --expose-gc + ulimit -a # Display all ulimit settings for transparency. # This is only to make Meteor self-test not remind us that we can set # this argument for self-tests. SELF_TEST_TOOL_NODE_FLAGS: " " + # Variables for load-balancing + NUM_GROUPS: 11 + RUNNING_AVG_LENGTH: 5 + jobs: Get Ready: <<: *build_machine_environment @@ -114,7 +110,39 @@ jobs: name: Git Submodules. command: (git submodule sync && git submodule update --init --recursive) || (rm -fr .git/config .git/modules && git submodule deinit -f . && git submodule update --init --recursive) - restore_cache: - key: meteor-cache + keys: + - v1-dev-bundle-cache-{{ checksum "meteor" }} + - v1-dev-bundle-cache- + - run: + name: Combine NPM Shrinkwrap Files + command: | + for d in packages/*/.npm/package; do cat $d/npm-shrinkwrap.json >> shrinkwraps.txt; done + for d in packages/*/.npm/plugin/*; do cat $d/npm-shrinkwrap.json >> shrinkwraps.txt; done + - restore_cache: + keys: + - package-npm-deps-cache-group1-v1-{{ checksum "shrinkwraps.txt" }} + - package-npm-deps-cache-group1-v1- + - restore_cache: + keys: + - package-npm-deps-cache-group2-v3-{{ checksum "shrinkwraps.txt" }} + - package-npm-deps-cache-group2-v3- + - restore_cache: + keys: + - v2-other-deps-cache-{{ .Branch }}-{{ .Revision }} + - v2-other-deps-cache-{{ .Branch }}- + - restore_cache: + keys: + - v1-test-groups-{{ .Branch }} + - v1-test-groups- + - run: + name: Create Test Results Directory + command: | + sudo mkdir -p ./tmp/results/junit + sudo chmod a+rwx ./tmp/results/junit + # Clear dev_bundle/.npm to ensure consistent test runs. + - run: + name: Clear npm cache + command: ./meteor npm cache clear --force - run: name: Get Ready command: | @@ -122,15 +150,6 @@ jobs: ./meteor --get-ready # shouldn't take longer than 20 minutes no_output_timeout: 20m - # Clear dev_bundle/.npm to ensure consistent test runs. - - run: - name: Clear npm cache - command: ./meteor npm cache clear --force - # Since PhantomJS has been removed from dev_bundle/lib/node_modules - # (#6905), but self-test still needs it, install it now. - - run: - name: Test Prereqs - command: ./meteor npm install -g phantomjs-prebuilt browserstack-webdriver - run: <<: *run_save_node_bin - persist_to_workspace: @@ -141,16 +160,7 @@ jobs: - store_artifacts: path: /tmp/memuse.txt - save_caches: - <<: *build_machine_environment - steps: - - attach_workspace: - at: . - - save_cache: - key: meteor-cache - <<: *meteor_cache_dirs - - Group 0: + Isolated Tests: <<: *build_machine_environment steps: - run: @@ -163,30 +173,38 @@ jobs: name: "Print environment" command: printenv - run: - name: "Running warehouse self-tests" + name: "Running self-test ('package-tests: add debugOnly and prodOnly packages')" command: | eval $PRE_TEST_COMMANDS; ./meteor self-test \ + 'add debugOnly and prodOnly packages' \ + --retries ${METEOR_SELF_TEST_RETRIES} \ + --headless \ + --phantom + no_output_timeout: 20m + - run: + name: "Running self-test (Custom Warehouse Tests)" + command: | + eval $PRE_TEST_COMMANDS; + ./meteor self-test \ + --retries ${METEOR_SELF_TEST_RETRIES} \ --exclude "${SELF_TEST_EXCLUDE}" \ --headless \ - --junit /tmp/results/junit/0.xml \ + --phantom \ --with-tag "custom-warehouse" no_output_timeout: 20m - run: <<: *run_save_node_bin - - save_cache: - key: meteor-cache - <<: *meteor_cache_dirs - store_test_results: - path: /tmp/results + path: ./tmp/results - store_artifacts: - path: /tmp/results + path: ./tmp/results - store_artifacts: path: /tmp/core_dumps - store_artifacts: path: /tmp/memuse.txt - Group 1: + Test Group 0: <<: *build_machine_environment steps: - run: @@ -199,31 +217,35 @@ jobs: name: "Print environment" command: printenv - run: - name: "Running self-test (1): A-Com" + name: "Running self-test (Test Group 0)" command: | + if [ -f ./tmp/test-groups/0.txt ]; then TEST_GROUP=$(<./tmp/test-groups/0.txt); else TEST_GROUP='^[a-b]|^c[a-n]|^co[a-l]|^comm'; fi + echo $TEST_GROUP; eval $PRE_TEST_COMMANDS; ./meteor self-test \ + "$TEST_GROUP" \ + --retries ${METEOR_SELF_TEST_RETRIES} \ --exclude "${SELF_TEST_EXCLUDE}" \ --headless \ - --junit /tmp/results/junit/1.xml \ - --file '^[a-b]|^c[a-n]|^co[a-l]|^compiler-plugins' \ + --phantom \ + --junit ./tmp/results/junit/0.xml \ --without-tag "custom-warehouse" no_output_timeout: 20m - run: <<: *run_save_node_bin - - save_cache: - key: meteor-cache - <<: *meteor_cache_dirs - store_test_results: - path: /tmp/results + path: ./tmp/results + - persist_to_workspace: + root: . + paths: ./tmp/results/junit - store_artifacts: - path: /tmp/results + path: ./tmp/results - store_artifacts: path: /tmp/core_dumps - store_artifacts: path: /tmp/memuse.txt - Group 2: + Test Group 1: <<: *build_machine_environment steps: - run: @@ -236,31 +258,35 @@ jobs: name: "Print environment" command: printenv - run: - name: "Running self-test (2): Con-K" + name: "Running self-test (Test Group 1)" command: | + if [ -f ./tmp/test-groups/1.txt ]; then TEST_GROUP=$(<./tmp/test-groups/1.txt); elif [ -f ./tmp/test-groups/0.txt ]; then TEST_GROUP=XXXXX; else TEST_GROUP='^com[n-z]'; fi + echo $TEST_GROUP; eval $PRE_TEST_COMMANDS; ./meteor self-test \ + "$TEST_GROUP" \ + --retries ${METEOR_SELF_TEST_RETRIES} \ --exclude "${SELF_TEST_EXCLUDE}" \ --headless \ - --junit /tmp/results/junit/2.xml \ - --file "^co[n-z]|^c[p-z]|^[d-k]" \ + --phantom \ + --junit ./tmp/results/junit/1.xml \ --without-tag "custom-warehouse" no_output_timeout: 20m - run: <<: *run_save_node_bin - - save_cache: - key: meteor-cache - <<: *meteor_cache_dirs - store_test_results: - path: /tmp/results + path: ./tmp/results + - persist_to_workspace: + root: . + paths: ./tmp/results/junit - store_artifacts: - path: /tmp/results + path: ./tmp/results - store_artifacts: path: /tmp/core_dumps - store_artifacts: path: /tmp/memuse.txt - Group 3: + Test Group 2: <<: *build_machine_environment steps: - run: @@ -273,31 +299,35 @@ jobs: name: "Print environment" command: printenv - run: - name: "Running self-test (3): L-O" + name: "Running self-test (Test Group 2)" command: | + if [ -f ./tmp/test-groups/2.txt ]; then TEST_GROUP=$(<./tmp/test-groups/2.txt); elif [ -f ./tmp/test-groups/0.txt ]; then TEST_GROUP=XXXXX; else TEST_GROUP='^co[n-z]'; fi + echo $TEST_GROUP; eval $PRE_TEST_COMMANDS; ./meteor self-test \ + "$TEST_GROUP" \ + --retries ${METEOR_SELF_TEST_RETRIES} \ --exclude "${SELF_TEST_EXCLUDE}" \ --headless \ - --junit /tmp/results/junit/3.xml \ - --file '^[l-o]' \ + --phantom \ + --junit ./tmp/results/junit/2.xml \ --without-tag "custom-warehouse" no_output_timeout: 20m - run: <<: *run_save_node_bin - - save_cache: - key: meteor-cache - <<: *meteor_cache_dirs - store_test_results: - path: /tmp/results + path: ./tmp/results + - persist_to_workspace: + root: . + paths: ./tmp/results/junit - store_artifacts: - path: /tmp/results + path: ./tmp/results - store_artifacts: path: /tmp/core_dumps - store_artifacts: path: /tmp/memuse.txt - Group 4: + Test Group 3: <<: *build_machine_environment steps: - run: @@ -310,68 +340,35 @@ jobs: name: "Print environment" command: printenv - run: - name: "Running self-test (4): P" - command: | - eval $PRE_TEST_COMMANDS; - ./meteor self-test \ - --exclude "${SELF_TEST_EXCLUDE}" \ - --headless \ - --junit /tmp/results/junit/4.xml \ - --file '^p' \ - --without-tag "custom-warehouse" - no_output_timeout: 20m - - run: - <<: *run_save_node_bin - - save_cache: - key: meteor-cache - <<: *meteor_cache_dirs - - store_test_results: - path: /tmp/results - - store_artifacts: - path: /tmp/results - - store_artifacts: - path: /tmp/core_dumps - - store_artifacts: - path: /tmp/memuse.txt - - Group 5: - <<: *build_machine_environment - steps: - - run: - <<: *run_log_mem_use - - run: - <<: *run_env_change - - attach_workspace: - at: . - - run: - name: "Print environment" - command: printenv - - run: - name: "Running self-test (5): Run" + name: "Running self-test (Test Group 3)" command: | + if [ -f ./tmp/test-groups/3.txt ]; then TEST_GROUP=$(<./tmp/test-groups/3.txt); elif [ -f ./tmp/test-groups/0.txt ]; then TEST_GROUP=XXXXX; else TEST_GROUP='^c[p-z]|^[d-g]|^h[a-e]'; fi + echo $TEST_GROUP; eval $PRE_TEST_COMMANDS; ./meteor self-test \ + "$TEST_GROUP" \ + --retries ${METEOR_SELF_TEST_RETRIES} \ --exclude "${SELF_TEST_EXCLUDE}" \ --headless \ - --junit /tmp/results/junit/5.xml \ - --file '^run' \ + --phantom \ + --junit ./tmp/results/junit/3.xml \ --without-tag "custom-warehouse" no_output_timeout: 20m - run: <<: *run_save_node_bin - - save_cache: - key: meteor-cache - <<: *meteor_cache_dirs - store_test_results: - path: /tmp/results + path: ./tmp/results + - persist_to_workspace: + root: . + paths: ./tmp/results/junit - store_artifacts: - path: /tmp/results + path: ./tmp/results - store_artifacts: path: /tmp/core_dumps - store_artifacts: path: /tmp/memuse.txt - Group 6: + Test Group 4: <<: *build_machine_environment steps: - run: @@ -384,31 +381,35 @@ jobs: name: "Print environment" command: printenv - run: - name: "Running self-test (6): R-S" + name: "Running self-test (Test Group 4)" command: | + if [ -f ./tmp/test-groups/4.txt ]; then TEST_GROUP=$(<./tmp/test-groups/4.txt); elif [ -f ./tmp/test-groups/0.txt ]; then TEST_GROUP=XXXXX; else TEST_GROUP='^h[f-z]|^[i-l]'; fi + echo $TEST_GROUP; eval $PRE_TEST_COMMANDS; ./meteor self-test \ + "$TEST_GROUP" \ + --retries ${METEOR_SELF_TEST_RETRIES} \ --exclude "${SELF_TEST_EXCLUDE}" \ --headless \ - --junit /tmp/results/junit/6.xml \ - --file '^r(?!un)|^s' \ + --phantom \ + --junit ./tmp/results/junit/4.xml \ --without-tag "custom-warehouse" no_output_timeout: 20m - run: <<: *run_save_node_bin - - save_cache: - key: meteor-cache - <<: *meteor_cache_dirs - store_test_results: - path: /tmp/results + path: ./tmp/results + - persist_to_workspace: + root: . + paths: ./tmp/results/junit - store_artifacts: - path: /tmp/results + path: ./tmp/results - store_artifacts: path: /tmp/core_dumps - store_artifacts: path: /tmp/memuse.txt - Group 7: + Test Group 5: <<: *build_machine_environment steps: - run: @@ -421,56 +422,406 @@ jobs: name: "Print environment" command: printenv - run: - name: "Running self-test (7): Sp-Z" + name: "Running self-test (Test Group 5)" command: | + if [ -f ./tmp/test-groups/5.txt ]; then TEST_GROUP=$(<./tmp/test-groups/5.txt); elif [ -f ./tmp/test-groups/0.txt ]; then TEST_GROUP=XXXXX; else TEST_GROUP='^m[a-n]|^mo[a-d]'; fi + echo $TEST_GROUP; eval $PRE_TEST_COMMANDS; ./meteor self-test \ + "$TEST_GROUP" \ + --retries ${METEOR_SELF_TEST_RETRIES} \ --exclude "${SELF_TEST_EXCLUDE}" \ --headless \ - --junit /tmp/results/junit/7.xml \ - --file '^[t-z]|^command-line' \ + --phantom \ + --junit ./tmp/results/junit/5.xml \ --without-tag "custom-warehouse" no_output_timeout: 20m - run: <<: *run_save_node_bin - - save_cache: - key: meteor-cache - <<: *meteor_cache_dirs - store_test_results: - path: /tmp/results + path: ./tmp/results + - persist_to_workspace: + root: . + paths: ./tmp/results/junit - store_artifacts: - path: /tmp/results + path: ./tmp/results - store_artifacts: path: /tmp/core_dumps - store_artifacts: path: /tmp/memuse.txt + Test Group 6: + <<: *build_machine_environment + steps: + - run: + <<: *run_log_mem_use + - run: + <<: *run_env_change + - attach_workspace: + at: . + - run: + name: "Print environment" + command: printenv + - run: + name: "Running self-test (Test Group 6)" + command: | + if [ -f ./tmp/test-groups/6.txt ]; then TEST_GROUP=$(<./tmp/test-groups/6.txt); elif [ -f ./tmp/test-groups/0.txt ]; then TEST_GROUP=XXXXX; else TEST_GROUP='^mo[e-z]|^m[p-z]|^[n-o]'; fi + echo $TEST_GROUP; + eval $PRE_TEST_COMMANDS; + ./meteor self-test \ + "$TEST_GROUP" \ + --retries ${METEOR_SELF_TEST_RETRIES} \ + --exclude "${SELF_TEST_EXCLUDE}" \ + --headless \ + --phantom \ + --junit ./tmp/results/junit/6.xml \ + --without-tag "custom-warehouse" + no_output_timeout: 20m + - run: + <<: *run_save_node_bin + - store_test_results: + path: ./tmp/results + - persist_to_workspace: + root: . + paths: ./tmp/results/junit + - store_artifacts: + path: ./tmp/results + - store_artifacts: + path: /tmp/core_dumps + - store_artifacts: + path: /tmp/memuse.txt + + Test Group 7: + <<: *build_machine_environment + steps: + - run: + <<: *run_log_mem_use + - run: + <<: *run_env_change + - attach_workspace: + at: . + - run: + name: "Print environment" + command: printenv + - run: + name: "Running self-test (Test Group 7)" + command: | + if [ -f ./tmp/test-groups/7.txt ]; then TEST_GROUP=$(<./tmp/test-groups/7.txt); elif [ -f ./tmp/test-groups/0.txt ]; then TEST_GROUP=XXXXX; else TEST_GROUP='^[p-q]|^r[a-e]'; fi + echo $TEST_GROUP; + eval $PRE_TEST_COMMANDS; + ./meteor self-test \ + "$TEST_GROUP" \ + --retries ${METEOR_SELF_TEST_RETRIES} \ + --exclude "${SELF_TEST_EXCLUDE}" \ + --headless \ + --phantom \ + --junit ./tmp/results/junit/7.xml \ + --without-tag "custom-warehouse" + no_output_timeout: 20m + - run: + <<: *run_save_node_bin + - store_test_results: + path: ./tmp/results + - persist_to_workspace: + root: . + paths: ./tmp/results/junit + - store_artifacts: + path: ./tmp/results + - store_artifacts: + path: /tmp/core_dumps + - store_artifacts: + path: /tmp/memuse.txt + + Test Group 8: + <<: *build_machine_environment + steps: + - run: + <<: *run_log_mem_use + - run: + <<: *run_env_change + - attach_workspace: + at: . + - run: + name: "Print environment" + command: printenv + - run: + name: "Running self-test (Test Group 8)" + command: | + if [ -f ./tmp/test-groups/8.txt ]; then TEST_GROUP=$(<./tmp/test-groups/8.txt); elif [ -f ./tmp/test-groups/0.txt ]; then TEST_GROUP=XXXXX; else TEST_GROUP='^r[f-z]'; fi + echo $TEST_GROUP; + eval $PRE_TEST_COMMANDS; + ./meteor self-test \ + "$TEST_GROUP" \ + --retries ${METEOR_SELF_TEST_RETRIES} \ + --exclude "${SELF_TEST_EXCLUDE}" \ + --headless \ + --phantom \ + --junit ./tmp/results/junit/8.xml \ + --without-tag "custom-warehouse" + no_output_timeout: 20m + - run: + <<: *run_save_node_bin + - store_test_results: + path: ./tmp/results + - persist_to_workspace: + root: . + paths: ./tmp/results/junit + - store_artifacts: + path: ./tmp/results + - store_artifacts: + path: /tmp/core_dumps + - store_artifacts: + path: /tmp/memuse.txt + + Test Group 9: + <<: *build_machine_environment + steps: + - run: + <<: *run_log_mem_use + - run: + <<: *run_env_change + - attach_workspace: + at: . + - run: + name: "Print environment" + command: printenv + - run: + name: "Running self-test (Test Group 9)" + command: | + if [ -f ./tmp/test-groups/9.txt ]; then TEST_GROUP=$(<./tmp/test-groups/9.txt); elif [ -f ./tmp/test-groups/0.txt ]; then TEST_GROUP=XXXXX; else TEST_GROUP='^s'; fi + echo $TEST_GROUP; + eval $PRE_TEST_COMMANDS; + ./meteor self-test \ + "$TEST_GROUP" \ + --retries ${METEOR_SELF_TEST_RETRIES} \ + --exclude "${SELF_TEST_EXCLUDE}" \ + --headless \ + --phantom \ + --junit ./tmp/results/junit/9.xml \ + --without-tag "custom-warehouse" + no_output_timeout: 20m + - run: + <<: *run_save_node_bin + - store_test_results: + path: ./tmp/results + - persist_to_workspace: + root: . + paths: ./tmp/results/junit + - store_artifacts: + path: ./tmp/results + - store_artifacts: + path: /tmp/core_dumps + - store_artifacts: + path: /tmp/memuse.txt + + Test Group 10: + <<: *build_machine_environment + steps: + - run: + <<: *run_log_mem_use + - run: + <<: *run_env_change + - attach_workspace: + at: . + - run: + name: "Print environment" + command: printenv + - run: + name: "Running self-test (Test Group 10)" + command: | + if [ -f ./tmp/test-groups/10.txt ]; then TEST_GROUP=$(<./tmp/test-groups/10.txt); elif [ -f ./tmp/test-groups/0.txt ]; then TEST_GROUP=XXXXX; else TEST_GROUP='^[t-z]'; fi + echo $TEST_GROUP; + eval $PRE_TEST_COMMANDS; + ./meteor self-test \ + "$TEST_GROUP" \ + --retries ${METEOR_SELF_TEST_RETRIES} \ + --exclude "${SELF_TEST_EXCLUDE}" \ + --headless \ + --phantom \ + --junit ./tmp/results/junit/10.xml \ + --without-tag "custom-warehouse" + no_output_timeout: 20m + - run: + <<: *run_save_node_bin + - store_test_results: + path: ./tmp/results + - persist_to_workspace: + root: . + paths: ./tmp/results/junit + - store_artifacts: + path: ./tmp/results + - store_artifacts: + path: /tmp/core_dumps + - store_artifacts: + path: /tmp/memuse.txt + + # Test the JSDoc declarations which live within this codebase against the + # Meteor Docs (https://github.com/meteor/docs) repository, where they'll + # eventually be consumed. This test aims to provide an early warning of + # potentially breaking changes, so they aren't discovered when the docs are + # next updated, which generally occurs during major Meteor version releases + # (for example, 1.4 to 1.5, 1.5 to 1.6). + Docs: + docker: + # This Node version should match that in the meteor/docs CircleCI config. + - image: circleci/node:8 + environment: + CHECKOUT_METEOR_DOCS: /home/circleci/test_docs + steps: + - run: + name: Cloning "meteor/docs" Repository's "master" branch + command: | + git clone https://github.com/meteor/docs.git ${CHECKOUT_METEOR_DOCS} + # The "docs" repository normally brings in the Meteor code as a Git + # submodule checked out into the "code" directory. As the goal of this + # test is to run it against the _current_ repository's code, we'll move + # the "code" directory out of the way and move the checkout (of meteor) + # into that directory, rather than the default $CIRCLE_WORKING_DIRECTORY. + - checkout + - run: + name: Move Meteor checkout into docs repository's "code" directory + command: | + rmdir "${CHECKOUT_METEOR_DOCS}/code" + # $CIRCLE_WORKING_DIRECTORY uses a tilde, so expand it to $HOME. + mv "${CIRCLE_WORKING_DIRECTORY/#\~/$HOME}" \ + "${CHECKOUT_METEOR_DOCS}/code" + # Run almost the same steps the meteor/docs repository runs, minus deploy. + - run: + name: Generating Meteor documentation for JSDoc testing + command: | + cd ${CHECKOUT_METEOR_DOCS} + npm install + npm test + + Clean Up: + <<: *build_machine_environment + steps: + - attach_workspace: + at: . + - run: + name: Create Test Groups Directory + command: | + sudo mkdir -p ./tmp/test-groups + sudo chmod a+rwx ./tmp/test-groups + - run: + name: Calculate Balanced Test Groups + command: | + npm install --prefix ./scripts/test-balancer + npm start --prefix ./scripts/test-balancer --num-groups ${NUM_GROUPS} --running-avg-length ${RUNNING_AVG_LENGTH} + - save_cache: + key: v1-test-groups-{{ .Branch }}-{{ .BuildNum }} + paths: + - ./tmp/test-groups + when: on_success + - save_cache: + key: v1-dev-bundle-cache-{{ checksum "meteor" }} + paths: + - "dev_bundle" + # The package npm dependencies are split into two caches to avoid an AWS + # `MetadataTooLarge` error that consistently appears if we put all of + # these folders in the same cache + - save_cache: + key: package-npm-deps-cache-group1-v1-{{ checksum "shrinkwraps.txt" }} + paths: + - packages/meteor/.npm/package/node_modules + - packages/modules-runtime/.npm/package/node_modules + - packages/modules/.npm/package/node_modules + - packages/ecmascript-runtime-server/.npm/package/node_modules + - packages/promise/.npm/package/node_modules + - packages/babel-compiler/.npm/package/node_modules + - packages/babel-runtime/.npm/package/node_modules + - packages/http/.npm/package/node_modules + - packages/socket-stream-client/.npm/package/node_modules + - packages/ddp-client/.npm/package/node_modules + - packages/npm-mongo/.npm/package/node_modules + - packages/package-version-parser/.npm/package/node_modules + - packages/boilerplate-generator/.npm/package/node_modules + - save_cache: + key: package-npm-deps-cache-group2-v3-{{ checksum "shrinkwraps.txt" }} + paths: + - packages/xmlbuilder/.npm/package/node_modules + - packages/logging/.npm/package/node_modules + - packages/webapp/.npm/package/node_modules + - packages/ddp-server/.npm/package/node_modules + - packages/mongo/.npm/package/node_modules + - packages/npm-bcrypt/.npm/package/node_modules + - packages/email/.npm/package/node_modules + - packages/caching-compiler/.npm/package/node_modules + - packages/less/.npm/plugin/compileLessBatch/node_modules + - packages/non-core/blaze/packages/spacebars-compiler/.npm/package/node_modules + - packages/boilerplate-generator-tests/.npm/package/node_modules + - packages/non-core/bundle-visualizer/.npm/package/node_modules + - packages/d3-hierarchy/.npm/package/node_modules + - packages/non-core/coffeescript-compiler/.npm/package/node_modules + - packages/server-render/.npm/package/node_modules + - packages/es5-shim/.npm/package/node_modules + - packages/force-ssl-common/.npm/package/node_modules + - packages/jshint/.npm/plugin/lintJshint/node_modules + - packages/minifier-css/.npm/package/node_modules + - packages/minifier-js/.npm/package/node_modules + - packages/standard-minifier-css/.npm/plugin/minifyStdCSS/node_modules + - packages/inter-process-messaging/.npm/package/node_modules + - packages/fetch/.npm/package/node_modules + - packages/non-core/mongo-decimal/.npm/package/node_modules + - save_cache: + key: v2-other-deps-cache-{{ .Branch }}-{{ .Revision }} + paths: + - ".babel-cache" + - ".meteor" + workflows: version: 2 Build and Test: jobs: + - Docs - Get Ready - - Group 0: + - Isolated Tests: requires: - Get Ready - - Group 1: + - Test Group 0: requires: - Get Ready - - Group 2: + - Test Group 1: requires: - Get Ready - - Group 3: + - Test Group 2: requires: - Get Ready - - Group 4: + - Test Group 3: requires: - Get Ready - - Group 5: + - Test Group 4: requires: - Get Ready - - Group 6: + - Test Group 5: requires: - Get Ready - - Group 7: + - Test Group 6: requires: - Get Ready + - Test Group 7: + requires: + - Get Ready + - Test Group 8: + requires: + - Get Ready + - Test Group 9: + requires: + - Get Ready + - Test Group 10: + requires: + - Get Ready + - Clean Up: + requires: + - Isolated Tests + - Test Group 0 + - Test Group 1 + - Test Group 2 + - Test Group 3 + - Test Group 4 + - Test Group 5 + - Test Group 6 + - Test Group 7 + - Test Group 8 + - Test Group 9 + - Test Group 10 diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index f436b53cbc..758bfb59fe 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -12,9 +12,9 @@ first and read the instructions for filing a bug report: https://github.com/meteor/meteor/blob/devel/CONTRIBUTING.md#reporting-a-bug-in-meteor ### This bug report should include: -- [ ] A descriptive title +- [ ] A short, but descriptive title. The title doesn't need "Meteor" in it. - [ ] The version of Meteor showing the problem. -- [ ] The last version of Meteor where the problem did _not_ occur (if applicable) +- [ ] The last version of Meteor where the problem did _not_ occur, if applicable. - [ ] The operating system you're running Meteor on. - [ ] The expected behavior. - [ ] The actual behavior. diff --git a/.travis.yml b/.travis.yml index e943bf20c2..0cf166baac 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,11 +1,11 @@ language: node_js node_js: - - "4.0" + - "8.11.1" cache: directories: - ".meteor" - ".babel-cache" -script: TEST_PACKAGES_EXCLUDE="less" ./packages/test-in-console/run.sh +script: TEST_PACKAGES_EXCLUDE="less" phantom=false ./packages/test-in-console/run.sh sudo: false env: - CXX=g++-4.8 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 33ca5cfa03..37c9d5b073 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -11,7 +11,7 @@ Before we jump into detailed guidelines for opening and triaging issues and subm There are many ways to contribute to the Meteor Project. Here’s a list of technical contributions with increasing levels of involvement and required knowledge of Meteor’s code and operations. - [Reporting a bug](CONTRIBUTING.md#reporting-a-bug-in-meteor) - [Triaging issues](ISSUE_TRIAGE.md) -- [Contributing to documentation](https://github.com/meteor/docs/blob/master/CONTRIBUTING.md) +- [Contributing to documentation](CONTRIBUTING.md#documentation) - [Finding work](CONTRIBUTING.md#finding-work) - [Submitting pull requests](CONTRIBUTING.md#making-changes-to-meteor-core) - [Reviewing pull requests](CONTRIBUTING.md#reviewer) @@ -29,6 +29,8 @@ Issues which *also* have the `confirmed` label ([bugs](https://github.com/meteor Any issue which does not have the `confirmed` label still requires discussion on implementation details but input and positive commentary is welcome! Any pull request opened on an issue which is not `confirmed` is still welcome, however the pull-request is more likely to be sent back for reworking than a `confirmed` issue. If in doubt about the best way to implement something, please create additional conversation on the issue. +Please note that `pull-requests-encouraged` issues with low activity will often be closed without being implemented. These issues are tagged with an additional [`not-implemented`](https://github.com/meteor/meteor/issues?utf8=✓&q=label%3Apull-requests-encouraged+label%3Anot-implemented) label, and can still be considered good candidates to work on. If you're interested in working on a closed and `not-implemented` issue, please let us know by posting on that issue. + ### Project roles We’ve just begun to create more defined project roles for Meteor. Here are descriptions of the existing project roles, along with the current contributors taking on those roles today. @@ -168,7 +170,7 @@ A great way to contribute to Meteor is by helping keep the issues in the reposit ## Documentation -If you'd like to contribution to Meteor's documentation, head over to https://github.com/meteor/docs and create issues or pull requests there. +If you'd like to contribute to Meteor's documentation, head over to https://github.com/meteor/docs and create issues or pull requests there. ## Blaze diff --git a/History.md b/History.md index 70304ff5e2..53e80785fc 100644 --- a/History.md +++ b/History.md @@ -1,14 +1,1103 @@ ## v.NEXT -* [`cordova-lib`](https://github.com/apache/cordova-cli) has been updated to - version 7.1.0, [`cordova-android`](https://github.com/apache/cordova-android/) - has been updated to version 6.3.0, and [`cordova-ios`](https://github.com/apache/cordova-ios/) - has been updated to version 4.5.3. The cordova-plugins `cordova-plugin-console`, - `cordova-plugin-device-motion`, and `cordova-plugin-device-orientation` have been - [deprecated](https://cordova.apache.org/news/2017/09/22/plugins-release.html) +## v1.8, 2018-10-08 + +### Breaking changes +N/A + +### Migration Steps + +* Update the `@babel/runtime` npm package to version 7.0.0 or later: + + ```sh + meteor npm install @babel/runtime@latest + ``` + +### Changes + +* Although Node 8.12.0 has been released, Meteor 1.8 still uses Node + 8.11.4, due to concerns about excessive garbage collection and CPU usage + in production. To enable Galaxy customers to use Node 8.12.0, we are + planning a quick follow-up Meteor 1.8.1 release, which can be obtained + by running the command + ```bash + meteor update --release 1.8.1 + ``` + [Issue #10216](https://github.com/meteor/meteor/issues/10216) + +* Meteor 1.7 introduced a new client bundle called `web.browser.legacy` in + addition to the `web.browser` (modern) and `web.cordova` bundles. + Naturally, this extra bundle increased client (re)build times. Since + developers spend most of their time testing the modern bundle in + development, and the legacy bundle mostly provides a safe fallback in + production, Meteor 1.8 cleverly postpones building the legacy bundle + until just after the development server restarts, so that development + can continue as soon as the modern bundle has finished building. Since + the legacy build happens during a time when the build process would + otherwise be completely idle, the impact of the legacy build on server + performance is minimal. Nevertheless, the legacy bundle still gets + rebuilt regularly, so any legacy build errors will be surfaced in a + timely fashion, and legacy clients can test the new legacy bundle by + waiting a bit longer than modern clients. Applications using the + `autoupdate` or `hot-code-push` packages will reload modern and legacy + clients independently, once each new bundle becomes available. + [Issue #9948](https://github.com/meteor/meteor/issues/9948) + [PR #10055](https://github.com/meteor/meteor/pull/10055) + +* Compiler plugins that call `inputFile.addJavaScript` or + `inputFile.addStylesheet` may now delay expensive compilation work by + passing partial options (`{ path, hash }`) as the first argument, + followed by a callback function as the second argument, which will be + called by the build system once it knows the module will actually be + included in the bundle. For example, here's the old implementation of + `BabelCompiler#processFilesForTarget`: + ```js + processFilesForTarget(inputFiles) { + inputFiles.forEach(inputFile => { + var toBeAdded = this.processOneFileForTarget(inputFile); + if (toBeAdded) { + inputFile.addJavaScript(toBeAdded); + } + }); + } + ``` + and here's the new version: + ```js + processFilesForTarget(inputFiles) { + inputFiles.forEach(inputFile => { + if (inputFile.supportsLazyCompilation) { + inputFile.addJavaScript({ + path: inputFile.getPathInPackage(), + hash: inputFile.getSourceHash(), + }, function () { + return this.processOneFileForTarget(inputFile); + }); + } else { + var toBeAdded = this.processOneFileForTarget(inputFile); + if (toBeAdded) { + inputFile.addJavaScript(toBeAdded); + } + } + }); + } + ``` + If you are an author of a compiler plugin, we strongly recommend using + this new API, since unnecessary compilation of files that are not + included in the bundle can be a major source of performance problems for + compiler plugins. Although this new API is only available in Meteor 1.8, + you can use `inputFile.supportsLazyCompilation` to determine dynamically + whether the new API is available, so you can support older versions of + Meteor without having to publish multiple versions of your package. [PR + #9983](https://github.com/meteor/meteor/pull/9983) + +* New [React](https://reactjs.org/)-based Meteor applications can now be + created using the command + ```bash + meteor create --react new-react-app + ``` + Though relatively simple, this application template reflects the ideas + of many contributors, especially [@dmihal](https://github.com/dmihal) + and [@alexsicart](https://github.com/alexsicart), and it will no doubt + continue to evolve in future Meteor releases. + [Feature #182](https://github.com/meteor/meteor-feature-requests/issues/182) + [PR #10149](https://github.com/meteor/meteor/pull/10149) + +* The `.meteor/packages` file supports a new syntax for overriding + problematic version constraints from packages you do not control. + + If a package version constraint in `.meteor/packages` ends with a `!` + character, any other (non-`!`) constraints on that package elsewhere in + the application will be _weakened_ to allow any version greater than or + equal to the constraint, even if the major/minor versions do not match. + + For example, using both CoffeeScript 2 and `practicalmeteor:mocha` used + to be impossible (or at least very difficult) because of this + [`api.versionsFrom("1.3")`](https://github.com/practicalmeteor/meteor-mocha/blob/3a2658070a920f8846df48bb8d8c7b678b8c6870/package.js#L28) + statement, which unfortunately constrained the `coffeescript` package to + version 1.x. In Meteor 1.8, if you want to update `coffeescript` to + 2.x, you can relax the `practicalmeteor:mocha` constraint by putting + ``` + coffeescript@2.2.1_1! # note the ! + ``` + in your `.meteor/packages` file. The `coffeescript` version still needs + to be at least 1.x, so that `practicalmeteor:mocha` can count on that + minimum. However, `practicalmeteor:mocha` will no longer constrain the + major version of `coffeescript`, so `coffeescript@2.2.1_1` will work. + + [Feature #208](https://github.com/meteor/meteor-feature-requests/issues/208) + [Commit 4a70b12e](https://github.com/meteor/meteor/commit/4a70b12eddef00b6700f129e90018a6076cb1681) + [Commit 9872a3a7](https://github.com/meteor/meteor/commit/9872a3a71df033e4cf6290b75fea28f44427c0c2) + +* The `npm` package has been upgraded to version 6.4.1, and our + [fork](https://github.com/meteor/pacote/tree/v8.1.6-meteor) of its + `pacote` dependency has been rebased against version 8.1.6. + +* The `node-gyp` npm package has been updated to version 3.7.0, and the + `node-pre-gyp` npm package has been updated to version 0.10.3. + +* Scripts run via `meteor npm ...` can now use the `meteor` command more + safely, since the `PATH` environment variable will now be set so that + `meteor` always refers to the same `meteor` used to run `meteor npm`. + [PR #9941](https://github.com/meteor/meteor/pull/9941) + +* Minimongo's behavior for sorting fields containing an array + is now compatible with the behavior of [Mongo 3.6+](https://docs.mongodb.com/manual/release-notes/3.6-compatibility/#array-sort-behavior). + Note that this means it is now incompatible with the behavior of earlier MongoDB versions. + [PR #10214](https://github.com/meteor/meteor/pull/10214) + +* Meteor's `self-test` has been updated to use "headless" Chrome rather + than PhantomJS for browser tests. PhantomJS can still be forced by + passing the `--phantom` flag to the `meteor self-test` command. + [PR #9814](https://github.com/meteor/meteor/pull/9814) + +* Importing a directory containing an `index.*` file now works for + non-`.js` file extensions. As before, the list of possible extensions is + defined by which compiler plugins you have enabled. + [PR #10027](https://github.com/meteor/meteor/pull/10027) + +* Any client (modern or legacy) may now request any static JS or CSS + `web.browser` or `web.browser.legacy` resource, even if it was built for + a different architecture, which greatly simplifies CDN setup if your CDN + does not forward the `User-Agent` header to the origin. + [Issue #9953](https://github.com/meteor/meteor/issues/9953) + [PR #9965](https://github.com/meteor/meteor/pull/9965) + +* Cross-origin dynamic `import()` requests will now succeed in more cases. + [PR #9954](https://github.com/meteor/meteor/pull/9954) + +* Dynamic CSS modules (which are compiled to JS and handled like any other + JS module) will now be properly minified in production and source mapped + in development. [PR #9998](https://github.com/meteor/meteor/pull/9998) + +* While CSS is only minified in production, CSS files must be merged + together into a single stylesheet in both development and production. + This merging is [cached by `standard-minifier-css`](https://github.com/meteor/meteor/blob/183d5ff9500d908d537f58d35ce6cd6d780ab270/packages/standard-minifier-css/plugin/minify-css.js#L58-L62) + so that it does not happen on every rebuild in development, but not all + CSS minifier packages use the same caching techniques. Thanks to + [1ed095c36d](https://github.com/meteor/meteor/pull/9942/commits/1ed095c36d7b2915872eb0c943dae0c4f870d7e4), + this caching is now performed within the Meteor build tool, so it works + the same way for all CSS minifier packages, which may eliminate a few + seconds of rebuild time for projects with lots of CSS. + +* The `meteor-babel` npm package used by `babel-compiler` has been updated + to version 7.1.0. **Note:** This change _requires_ also updating the + `@babel/runtime` npm package to version 7.0.0-beta.56 or later: + ```sh + meteor npm install @babel/runtime@latest + ``` + [`meteor-babel` issue #22](https://github.com/meteor/babel/issues/22) + +* The `@babel/preset-env` and `@babel/preset-react` presets will be + ignored by Meteor if included in a `.babelrc` file, since Meteor already + provides equivalent/superior functionality without them. However, you + should feel free to leave these plugins in your `.babelrc` file if they + are needed by external tools. + +* The `install` npm package used by `modules-runtime` has been updated to + version 0.12.0. + +* The `reify` npm package has been updated to version 0.17.3, which + introduces the `module.link(id, {...})` runtime method as a replacement + for `module.watch(require(id), {...})`. Note: in future versions of + `reify` and Meteor, the `module.watch` runtime API will be removed, but + for now it still exists (and is used to implement `module.link`), so + that existing code will continue to work without recompilation. + +* The `uglify-es` npm package used by `minifier-js` has been replaced with + [`terser@3.9.2`](https://www.npmjs.com/package/terser), a fork of + `uglify-es` that appears to be (more actively) maintained. + [Issue #10042](https://github.com/meteor/meteor/issues/10042) + +* Mongo has been updated to version 4.0.2 and the `mongodb` npm package + used by `npm-mongo` has been updated to version 3.1.6. + [PR #10058](https://github.com/meteor/meteor/pull/10058) + [Feature Request #269](https://github.com/meteor/meteor-feature-requests/issues/269) + +* When a Meteor application uses a compiler plugin to process files with a + particular file extension (other than `.js` or `.json`), those file + extensions should be automatically appended to imports that do not + resolve as written. However, this behavior was not previously enabled + for modules inside `node_modules`. Thanks to + [8b04c25390](https://github.com/meteor/meteor/pull/9942/commits/8b04c253900e4ca2a194d2fcaf6fc2ce9a9085e7), + the same file extensions that are applied to modules outside the + `node_modules` directory will now be applied to those within it, though + `.js` and `.json` will always be tried first. + +* As foreshadowed in this [talk](https://youtu.be/vpCotlPieIY?t=29m18s) + about Meteor 1.7's modern/legacy bundling system + ([slides](https://slides.com/benjamn/meteor-night-may-2018#/46)), Meteor + now provides an isomorphic implementation of the [WHATWG `fetch()` + API](https://fetch.spec.whatwg.org/), which can be installed by running + ```sh + meteor add fetch + ``` + This package is a great demonstration of the modern/legacy bundling + system, since it has very different implementations in modern + browsers, legacy browsers, and Node. + [PR #10029](https://github.com/meteor/meteor/pull/10029) + +* The [`bundle-visualizer` + package](https://github.com/meteor/meteor/tree/release-1.7.1/packages/non-core/bundle-visualizer) + has received a number of UI improvements thanks to work by + [@jamesmillerburgess](https://github.com/jamesmillerburgess) in + [PR #10025](https://github.com/meteor/meteor/pull/10025). + [Feature #310](https://github.com/meteor/meteor-feature-requests/issues/310) + +* Sub-resource integrity hashes (sha512) can now be enabled for static CSS + and JS assets by calling `WebAppInternals.enableSubresourceIntegrity()`. + [PR #9933](https://github.com/meteor/meteor/pull/9933) + [PR #10050](https://github.com/meteor/meteor/pull/10050) + +* The environment variable `METEOR_PROFILE=milliseconds` now works for the + build portion of the `meteor build` and `meteor deploy` commands. + [Feature #239](https://github.com/meteor/meteor-feature-requests/issues/239) + +* Babel compiler plugins will now receive a `caller` option of the + following form: + ```js + { name: "meteor", arch } + ``` + where `arch` is the target architecture, e.g. `os.*`, `web.browser`, + `web.cordova`, or `web.browser.legacy`. + [PR #10211](https://github.com/meteor/meteor/pull/10211) + +## v1.7.0.5, 2018-08-16 + +### Breaking changes +N/A + +### Migration Steps +N/A + +### Changes + +* Node has been updated to version + [8.11.4](https://nodejs.org/en/blog/release/v8.11.4/), an important + [security release](https://nodejs.org/en/blog/vulnerability/august-2018-security-releases/). + +## v1.7.0.4, 2018-08-07 + +### Breaking changes +N/A + +### Migration Steps +N/A + +### Changes + +* The npm package `@babel/runtime`, which is depended on by most Meteor + apps, introduced a breaking change in version `7.0.0-beta.56` with the + removal of the `@babel/runtime/helpers/builtin` directory. While this + change has clear benefits in the long term, in the short term it has + been disruptive for Meteor 1.7.0.x applications that accidentally + updated to the latest version of `@babel/runtime`. Meteor 1.7.0.4 is a + patch release that provides better warnings about this problem, and + ensures newly created Meteor applications do not use `7.0.0-beta.56`. + [PR #10134](https://github.com/meteor/meteor/pull/10134) + +* The `npm` package has been upgraded to version 6.3.0, and our + [fork](https://github.com/meteor/pacote/tree/v8.1.6-meteor) of its + `pacote` dependency has been rebased against version 8.1.6. + [Issue #9940](https://github.com/meteor/meteor/issues/9940) + +* The `reify` npm package has been updated to version 0.16.4. + +## v1.7.0.3, 2018-06-13 + +### Breaking changes +N/A + +### Migration Steps +N/A + +### Changes + +* Fixed [Issue #9991](https://github.com/meteor/meteor/issues/9991), + introduced in + [Meteor 1.7.0.2](https://github.com/meteor/meteor/pull/9990) + by [PR #9977](https://github.com/meteor/meteor/pull/9977). + +## v1.7.0.2, 2018-06-13 + +### Breaking changes +N/A + +### Migration Steps +N/A + +### Changes + +* Node has been updated to version + [8.11.3](https://nodejs.org/en/blog/release/v8.11.3/), an important + [security release](https://nodejs.org/en/blog/vulnerability/june-2018-security-releases/). + +* The `meteor-babel` npm package has been updated to version + [7.0.0-beta.51](https://github.com/babel/babel/releases/tag/v7.0.0-beta.51). + +* Meteor apps created with `meteor create` or `meteor create --minimal` + will now have a directory called `tests/` rather than `test/`, so that + test code will not be eagerly loaded if you decide to remove the + `meteor.mainModule` configuration from `package.json`, thanks to + [PR #9977](https://github.com/meteor/meteor/pull/9977) by + [@robfallows](https://github.com/robfallows). + [Issue #9961](https://github.com/meteor/meteor/issues/9961) + +## v1.7.0.1, 2018-05-29 + +### Breaking changes + +* The `aggregate` method of raw Mongo collections now returns an + `AggregationCursor` rather than returning the aggregation result + directly. To obtain an array of aggregation results, you will need to + call the `.toArray()` method of the cursor: + ```js + // With MongoDB 2.x, callback style: + rawCollection.aggregate( + pipeline, + (error, results) => {...} + ); + + // With MongoDB 2.x, wrapAsync style: + const results = Meteor.wrapAsync( + rawCollection.aggregate, + rawCollection + )(pipeline); + + // With MongoDB 3.x, callback style: + rawCollection.aggregate( + pipeline, + (error, aggregationCursor) => { + ... + const results = aggregationCursor.toArray(); + ... + } + ); + + // With MongoDB 3.x, wrapAsync style: + const results = Meteor.wrapAsync( + rawCollection.aggregate, + rawCollection + )(pipeline).toArray(); + ``` + [Issue #9936](https://github.com/meteor/meteor/issues/9936) + +### Migration Steps + +* Update `@babel/runtime` (as well as other Babel-related packages) and + `meteor-node-stubs` to their latest versions: + ```sh + meteor npm install @babel/runtime@latest meteor-node-stubs@latest + ``` + +### Changes + +* Reverted an [optimization](https://github.com/meteor/meteor/pull/9825) + introduced in Meteor 1.7 to stop scanning `node_modules` for files that + might be of interest to compiler plugins, since the intended workarounds + (creating symlinks) did not satisfy all existing use cases. We will + revisit this optimization in Meteor 1.8. + [mozfet/meteor-autoform-materialize#43](https://github.com/mozfet/meteor-autoform-materialize/issues/43) + +* After updating to Meteor 1.7 or 1.7.0.1, you should update the + `@babel/runtime` npm package (as well as other Babel-related packages) + to their latest versions, along with the `meteor-node-stubs` package, + by running the following command: + ```sh + meteor npm install @babel/runtime@latest meteor-node-stubs@latest + ``` + +## v1.7, 2018-05-28 + +### Breaking changes +N/A + +### Migration Steps +N/A + +### Changes + +* More than 80% of internet users worldwide have access to a web browser + that natively supports the latest ECMAScript features and keeps itself + updated automatically, which means new features become available almost + as soon as they ship. In other words, the future we envisioned when we + first began [compiling code with + Babel](https://blog.meteor.com/how-much-does-ecmascript-2015-cost-2ded41d70914) + is finally here, yet most web frameworks and applications still compile + a single client-side JavaScript bundle that must function simultaneously + in the oldest and the newest browsers the application developer wishes + to support. + + That choice is understandable, because the alternative is daunting: not + only must you build multiple JavaScript and CSS bundles for different + browsers, with different dependency graphs and compilation rules and + webpack configurations, but your server must also be able to detect the + capabilities of each visiting client, so that it can deliver the + appropriate assets at runtime. Testing a matrix of different browsers + and application versions gets cumbersome quickly, so it's no surprise + that responsible web developers would rather ship a single, well-tested + bundle, and forget about taking advantage of modern features until + legacy browsers have disappeared completely. + + With Meteor 1.7, this awkward balancing act is no longer necessary, + because Meteor now automatically builds two sets of client-side assets, + one tailored to the capabilities of modern browsers, and the other + designed to work in all supported browsers, thus keeping legacy browsers + working exactly as they did before. Best of all, the entire Meteor + community relies on the same system, so any bugs or differences in + behavior can be identified and fixed quickly. + + In this system, a "modern" browser can be loosely defined as one with + full native support for `async` functions and `await` expressions, which + includes more than 80% of the world market, and 85% of the US market + ([source](https://caniuse.com/#feat=async-functions)). This standard may + seem extremely strict, since `async`/`await` was [just finalized in + ECMAScript 2017](http://2ality.com/2016/10/async-function-tips.html), + but the statistics clearly justify it. As another example, any modern + browser can handle native `class` syntax, though newer syntax like class + fields may still need to be compiled for now, whereas a legacy browser + will need compilation for both advanced and basic `class` syntax. And of + course you can safely assume that any modern browser has a native + `Promise` implementation, because `async` functions must return + `Promise`s. The list goes on and on. + + This boundary between modern and legacy browsers is designed to be tuned + over time, not only by the Meteor framework itself but also by each + individual Meteor application. For example, here's how the minimum + versions for native ECMAScript `class` support might be expressed: + + ```js + import { setMinimumBrowserVersions } from "meteor/modern-browsers"; + + setMinimumBrowserVersions({ + chrome: 49, + firefox: 45, + edge: 12, + ie: Infinity, // Sorry, IE11. + mobile_safari: [9, 2], // 9.2.0+ + opera: 36, + safari: 9, + electron: 1, + }, "classes"); + ``` + + The minimum modern version for each browser is simply the maximum of all + versions passed to `setMinimumBrowserVersions` for that browser. The + Meteor development server decides which assets to deliver to each client + based on the `User-Agent` string of the HTTP request. In production, + different bundles are named with unique hashes, which prevents cache + collisions, though Meteor also sets the `Vary: User-Agent` HTTP response + header to let well-behaved clients know they should cache modern and + legacy resources separately. + + For the most part, the modern/legacy system will transparently determine + how your code is compiled, bundled, and delivered—and yes, it + works with every existing part of Meteor, including dynamic `import()` + and even [the old `appcache` + package](https://github.com/meteor/meteor/pull/9776). However, if you're + writing dynamic code that depends on modern features, you can use the + boolean `Meteor.isModern` flag to detect the status of the current + environment (Node 8 is modern, too, of course). If you're writing a + Meteor package, you can call `api.addFiles(files, "legacy")` in your + `package.js` configuration file to add extra files to the legacy bundle, + or `api.addFiles(files, "client")` to add files to all client bundles, + or `api.addFiles(files, "web.browser")` to add files only to the modern + bundle, and the same rules apply to `api.mainModule`. Just be sure to + call `setMinimumBrowserVersions` (in server startup code) to enforce + your assumptions about ECMAScript feature support. + + We think this modern/legacy system is one of the most powerful features + we've added since we first introduced the `ecmascript` package in Meteor + 1.2, and we look forward to other frameworks attempting to catch up. + + [PR #9439](https://github.com/meteor/meteor/pull/9439) + +* Although Meteor does not recompile packages installed in `node_modules` + by default, compilation of specific npm packages (for example, to + support older browsers that the package author neglected) can now be + enabled in one of two ways: + + * Clone the package repository into your application's `imports` + directory, make any modifications necessary, then use `npm install` to + link `the-package` into `node_modules`: + ```sh + meteor npm install imports/the-package + ``` + Meteor will compile the contents of the package exposed via + `imports/the-package`, and this compiled code will be used when you + import `the-package` in any of the usual ways: + ```js + import stuff from "the-package" + require("the-package") === require("/imports/the-package") + import("the-package").then(...) + ``` + This reuse of compiled code is the critical new feature that was added + in Meteor 1.7. + + * Install the package normally with `meteor npm install the-package`, + then create a symbolic link *to* the installed package elsewhere in + your application, outside of `node_modules`: + ```sh + meteor npm install the-package + cd imports + ln -s ../node_modules/the-package . + ``` + Again, Meteor will compile the contents of the package because they + are exposed outside of `node_modules`, and the compiled code will be + used whenever `the-package` is imported from `node_modules`. + + > Note: this technique also works if you create symbolic links to + individual files, rather than linking the entire package directory. + + In both cases, Meteor will compile the exposed code as if it was part of + your application, using whatever compiler plugins you have installed. + You can influence this compilation using `.babelrc` files or any other + techniques you would normally use to configure compilation of + application code. [PR #9771](https://github.com/meteor/meteor/pull/9771) + [Feature #6](https://github.com/meteor/meteor-feature-requests/issues/6) + + > ~Note: since compilation of npm packages can now be enabled using the + techniques described above, Meteor will no longer automatically scan + `node_modules` directories for modules that can be compiled by + compiler plugins. If you have been using that functionality to import + compiled-to-JS modules from `node_modules`, you should start using the + symlinking strategy instead.~ **Follow-up note: this optimization was + reverted in Meteor 1.7.0.1 (see [above](#v1701-2018-05-29)).** + +* Node has been updated to version + [8.11.2](https://nodejs.org/en/blog/release/v8.11.2/), officially fixing + a [cause](https://github.com/nodejs/node/issues/19274) of frequent + segmentation faults in Meteor applications that was introduced in Node + 8.10.0. Meteor 1.6.1.1 shipped with a custom build of Node that patched + this problem, but that approach was never intended to be permanent. + +* The `npm` package has been upgraded to version 5.10.0, and our + [fork](https://github.com/meteor/pacote/tree/v7.6.1-meteor) of its + `pacote` dependency has been rebased against version 7.6.1. + +* Applications may now specify client and server entry point modules in a + newly-supported `"meteor"` section of `package.json`: + ```js + "meteor": { + "mainModule": { + "client": "client/main.js", + "server": "server/main.js" + } + } + ``` + When specified, these entry points override Meteor's default module + loading semantics, rendering `imports` directories unnecessary. If + `mainModule` is left unspecified for either client or server, the + default rules will apply for that architecture, as before. To disable + eager loading of modules on a given architecture, simply provide a + `mainModule` value of `false`: + ```js + "meteor": { + "mainModule": { + "client": false, + "server": "server/main.js" + } + } + ``` + [Feature #135](https://github.com/meteor/meteor-feature-requests/issues/135) + [PR #9690](https://github.com/meteor/meteor/pull/9690) + +* In addition to `meteor.mainModule`, the `"meteor"` section of + `package.json` may also specify `meteor.testModule` to control which + test modules are loaded by `meteor test` or `meteor test --full-app`: + ```js + "meteor": { + "mainModule": {...}, + "testModule": "tests.js" + } + ``` + If your client and server test files are different, you can expand the + `testModule` configuration using the same syntax as `mainModule`: + ```js + "meteor": { + "testModule": { + "client": "client/tests.js", + "server": "server/tests.js" + } + } + ``` + The same test module will be loaded whether or not you use the + `--full-app` option. Any tests that need to detect `--full-app` should + check `Meteor.isAppTest`. The module(s) specified by `meteor.testModule` + can import other test modules at runtime, so you can still distribute + test files across your codebase; just make sure you import the ones you + want to run. [PR #9714](https://github.com/meteor/meteor/pull/9714) + +* The `meteor create` command now supports a `--minimal` option, which + creates an app with as few Meteor packages as possible, in order to + minimize client bundle size while still demonstrating advanced features + such as server-side rendering. This starter application is a solid + foundation for any application that doesn't need Mongo or DDP. + +* The `meteor-babel` npm package has been updated to version + 7.0.0-beta.49-1. Note: while Babel has recently implemented support for + a new kind of `babel.config.js` configuration file (see [this + PR](https://github.com/babel/babel/pull/7358)), and future versions of + Meteor will no doubt embrace this functionality, Meteor 1.7 supports + only `.babelrc` files as a means of customizing the default Babel + configuration provided by Meteor. In other words, if your project + contains a `babel.config.js` file, it will be ignored by Meteor 1.7. + +* The `reify` npm package has been updated to version 0.16.2. + +* The `meteor-node-stubs` package, which provides stub implementations for + any Node built-in modules used by the client (such as `path` and + `http`), has a new minor version (0.4.1) that may help with Windows + installation problems. To install the new version, run + ```sh + meteor npm install meteor-node-stubs@latest + ``` + +* The `optimism` npm package has been updated to version 0.6.3. + +* The `minifier-js` package has been updated to use `uglify-es` 3.3.9. + +* Individual Meteor `self-test`'s can now be skipped by adjusting their + `define` call to be prefixed by `skip`. For example, + `selftest.skip.define('some test', ...` will skip running "some test". + [PR #9579](https://github.com/meteor/meteor/pull/9579) + +* Mongo has been upgraded to version 3.6.4 for 64-bit systems, and 3.2.19 + for 32-bit systems. [PR #9632](https://github.com/meteor/meteor/pull/9632) + + **NOTE:** After upgrading an application to use Mongo 3.6.4, it has been + observed ([#9591](https://github.com/meteor/meteor/issues/9591)) + that attempting to run that application with an older version of + Meteor (via `meteor --release X`), that uses an older version of Mongo, can + prevent the application from starting. This can be fixed by either + running `meteor reset`, or by repairing the Mongo database. To repair the + database, find the `mongod` binary on your system that lines up with the + Meteor release you're jumping back to, and run + `mongodb --dbpath your-apps-db --repair`. For example: + ```sh + ~/.meteor/packages/meteor-tool/1.6.0_1/mt-os.osx.x86_64/dev_bundle/mongodb/bin/mongod --dbpath /my-app/.meteor/local/db --repair + ``` + [PR #9632](https://github.com/meteor/meteor/pull/9632) + +* The `mongodb` driver package has been updated from version 2.2.34 to + version 3.0.7. [PR #9790](https://github.com/meteor/meteor/pull/9790) + [PR #9831](https://github.com/meteor/meteor/pull/9831) + [Feature #268](https://github.com/meteor/meteor-feature-requests/issues/268) + +* The `cordova-plugin-meteor-webapp` package depended on by the Meteor + `webapp` package has been updated to version 1.6.0. + [PR #9761](https://github.com/meteor/meteor/pull/9761) + +* Any settings read from a JSON file passed with the `--settings` option + during Cordova run/build/deploy will be exposed in `mobile-config.js` + via the `App.settings` property, similar to `Meteor.settings`. + [PR #9873](https://github.com/meteor/meteor/pull/9873) + +* The `@babel/plugin-proposal-class-properties` plugin provided by + `meteor-babel` now runs with the `loose:true` option, as required by + other (optional) plugins like `@babel/plugin-proposal-decorators`. + [Issue #9628](https://github.com/meteor/meteor/issues/9628) + +* The `underscore` package has been removed as a dependency from `meteor-base`. + This opens up the possibility of removing 14.4 kb from production bundles. + Since this would be a breaking change for any apps that may have been + using `_` without having any packages that depend on `underscore` + besides `meteor-base`, we have added an upgrader that will automatically + add `underscore` to the `.meteor/packages` file of any project which + lists `meteor-base`, but not `underscore`. Apps which do not require this + package can safely remove it using `meteor remove underscore`. + [PR #9596](https://github.com/meteor/meteor/pull/9596) + +* Meteor's `promise` package has been updated to support + [`Promise.prototype.finally`](https://github.com/tc39/proposal-promise-finally). + [Issue 9639](https://github.com/meteor/meteor/issues/9639) + [PR #9663](https://github.com/meteor/meteor/pull/9663) + +* Assets made available via symlinks in the `public` and `private` directories + of an application are now copied into Meteor application bundles when + using `meteor build`. This means npm package assets that need to be made + available publicly can now be symlinked from their `node_modules` location, + in the `public` directory, and remain available in production bundles. + [Issue #7013](https://github.com/meteor/meteor/issues/7013) + [PR #9666](https://github.com/meteor/meteor/pull/9666) + +* The `facts` package has been split into `facts-base` and `facts-ui`. The + original `facts` package has been deprecated. + [PR #9629](https://github.com/meteor/meteor/pull/9629) + +* If the new pseudo tag `` is used anywhere in the + `` of an app, it will be replaced by the `link` to Meteor's bundled + CSS. If the new tag isn't used, the bundle will be placed at the top of + the `` section as before (for backwards compatibility). + [Feature #24](https://github.com/meteor/meteor-feature-requests/issues/24) + [PR #9657](https://github.com/meteor/meteor/pull/9657) + +## v1.6.1.3, 2018-06-16 + +### Breaking changes +N/A + +### Migration Steps +N/A + +### Changes + +* Node has been updated to version + [8.11.3](https://nodejs.org/en/blog/release/v8.11.3/), an important + [security release](https://nodejs.org/en/blog/vulnerability/june-2018-security-releases/). + +## v1.6.1.2, 2018-05-28 + +### Breaking changes +N/A + +### Migration Steps +N/A + +### Changes + +* Meteor 1.6.1.2 is a very small release intended to fix + [#9863](https://github.com/meteor/meteor/issues/9863) by making + [#9887](https://github.com/meteor/meteor/pull/9887) available to Windows + users without forcing them to update to Meteor 1.7 (yet). Thanks very + much to [@zodern](https://github.com/zodern) for identifying a solution + to this problem. [PR #9910](https://github.com/meteor/meteor/pull/9910) + +## v1.6.1.1, 2018-04-02 + +### Breaking changes +N/A + +### Migration Steps +* Update `@babel/runtime` npm package and any custom Babel plugin enabled in +`.babelrc` + ```sh + meteor npm install @babel/runtime@latest + ``` + +### Changes + +* Node has been updated to version + [8.11.1](https://nodejs.org/en/blog/release/v8.11.1/), an important + [security release](https://nodejs.org/en/blog/vulnerability/march-2018-security-releases/), + with a critical [patch](https://github.com/nodejs/node/pull/19477) + [applied](https://github.com/meteor/node/commits/v8.11.1-meteor) to + solve a segmentation fault + [problem](https://github.com/nodejs/node/issues/19274) that was + introduced in Node 8.10.0. + +* The `meteor-babel` npm package has been updated to version + 7.0.0-beta.42, which may require updating any custom Babel plugins + you've enabled in a `.babelrc` file, and/or running the following + command to update `@babel/runtime`: + ```sh + meteor npm install @babel/runtime@latest + ``` + +## v1.6.1, 2018-01-19 + +### Breaking changes + +* Meteor's Node Mongo driver is now configured with the + [`ignoreUndefined`](http://mongodb.github.io/node-mongodb-native/2.2/api/MongoClient.html#connect) + connection option set to `true`, to make sure fields with `undefined` + values are not first converted to `null`, when inserted/updated. `undefined` + values are now removed from all Mongo queries and insert/update documents. + + This is a potentially breaking change if you are upgrading an existing app + from an earlier version of Meteor. + + For example: + ```js + // return data pertaining to the current user + db.privateUserData.find({ + userId: currentUser._id // undefined + }); + ``` + Assuming there are no documents in the `privateUserData` collection with + `userId: null`, in Meteor versions prior to 1.6.1 this query will return + zero documents. From Meteor 1.6.1 onwards, this query will now return + _every_ document in the collection. It is highly recommend you review all + your existing queries to ensure that any potential usage of `undefined` in + query objects won't lead to problems. + +### Migration Steps +N/A + +### Changes + +* Node has been updated to version + [8.9.4](https://nodejs.org/en/blog/release/v8.9.4/). + +* The `meteor-babel` npm package (along with its Babel-related + dependencies) has been updated to version 7.0.0-beta.38, a major + update from Babel 6. Thanks to the strong abstraction of the + `meteor-babel` package, the most noticeable consequence of the Babel 7 + upgrade is that the `babel-runtime` npm package has been replaced by + `@babel/runtime`, which can be installed by running + ```js + meteor npm install @babel/runtime + ``` + in your application directory. There's a good chance that the old + `babel-runtime` package can be removed from your `package.json` + dependencies, though there's no harm in leaving it there. Please see + [this blog post](https://babeljs.io/blog/2017/09/12/planning-for-7.0) + for general information about updating to Babel 7 (note especially any + changes to plugins you've been using in any `.babelrc` files). + [PR #9440](https://github.com/meteor/meteor/pull/9440) + +* Because `babel-compiler@7.0.0` is a major version bump for a core + package, any package that explicitly depends on `babel-compiler` with + `api.use` or `api.imply` will need to be updated and republished in + order to remain compatible with Meteor 1.6.1. One notable example is the + `practicalmeteor:mocha` package. If you have been using this test-driver + package, we strongly recommend switching to `meteortesting:mocha` + instead. If you are the author of a package that depends on + `babel-compiler`, we recommend publishing your updated version using a + new major or minor version, so that you can continue releasing patch + updates compatible with older versions of Meteor, if necessary. + +* Meteor's Node Mongo driver is now configured with the + [`ignoreUndefined`](http://mongodb.github.io/node-mongodb-native/2.2/api/MongoClient.html#connect) + connection option set to `true`, to make sure fields with `undefined` + values are not first converted to `null`, when inserted/updated. `undefined` + values are now removed from all Mongo queries and insert/update documents. + [Issue #6051](https://github.com/meteor/meteor/issues/6051) + [PR #9444](https://github.com/meteor/meteor/pull/9444) + +* The `server-render` package now supports passing a `Stream` object to + `ServerSink` methods that previously expected a string, which enables + [streaming server-side rendering with React + 16](https://hackernoon.com/whats-new-with-server-side-rendering-in-react-16-9b0d78585d67): + ```js + import React from "react"; + import { renderToNodeStream } from "react-dom/server"; + import { onPageLoad } from "meteor/server-render"; + import App from "/imports/Server.js"; + + onPageLoad(sink => { + sink.renderIntoElementById("app", renderToNodeStream( + + )); + }); + ``` + [PR #9343](https://github.com/meteor/meteor/pull/9343) + +* The [`cordova-lib`](https://github.com/apache/cordova-cli) package has + been updated to version 7.1.0, + [`cordova-android`](https://github.com/apache/cordova-android/) has been + updated to version 6.4.0 (plus one additional + [commit](https://github.com/meteor/cordova-android/commit/317db7df0f7a054444197bc6d28453cf4ab23280)), + and [`cordova-ios`](https://github.com/apache/cordova-ios/) has been + updated to version 4.5.4. The cordova plugins `cordova-plugin-console`, + `cordova-plugin-device-motion`, and `cordova-plugin-device-orientation` + have been [deprecated](https://cordova.apache.org/news/2017/09/22/plugins-release.html) and will likely be removed in a future Meteor release. [Feature Request #196](https://github.com/meteor/meteor-feature-requests/issues/196) [PR #9213](https://github.com/meteor/meteor/pull/9213) + [Issue #9447](https://github.com/meteor/meteor/issues/9447) + [PR #9448](https://github.com/meteor/meteor/pull/9448) + +* The previously-served `/manifest.json` application metadata file is now + served from `/__browser/manifest.json` for web browsers, to avoid + confusion with other kinds of `manifest.json` files. Cordova clients + will continue to load the manifest file from `/__cordova/manifest.json`, + as before. [Issue #6674](https://github.com/meteor/meteor/issues/6674) + [PR #9424](https://github.com/meteor/meteor/pull/9424) + +* The bundled version of MongoDB used by `meteor run` in development + on 64-bit architectures has been updated to 3.4.10. 32-bit architectures + will continue to use MongoDB 3.2.x versions since MongoDB is no longer + producing 32-bit versions of MongoDB for newer release tracks. + [PR #9396](https://github.com/meteor/meteor/pull/9396) + +* Meteor's internal `minifier-css` package has been updated to use `postcss` + for CSS parsing and minifying, instead of the abandoned `css-parse` and + `css-stringify` packages. Changes made to the `CssTools` API exposed by the + `minifier-css` package are mostly backwards compatible (the + `standard-minifier-css` package that uses it didn't have to change for + example), but now that we're using `postcss` the AST accepted and returned + from certain functions is different. This could impact developers who are + tying into Meteor's internal `minifier-css` package directly. The AST based + function changes are: + + * `CssTools.parseCss` now returns a PostCSS + [`Root`](http://api.postcss.org/Root.html) object. + * `CssTools.stringifyCss` expects a PostCSS `Root` object as its first + parameter. + * `CssTools.mergeCssAsts` expects an array of PostCSS `Root` objects as its + first parameter. + * `CssTools.rewriteCssUrls` expects a PostCSS `Root` object as its first + parameter. + + [PR #9263](https://github.com/meteor/meteor/pull/9263) + +* The `_` variable will once again remain bound to `underscore` (if + installed) in `meteor shell`, fixing a regression introduced by Node 8. + [PR #9406](https://github.com/meteor/meteor/pull/9406) + +* Dynamically `import()`ed modules will now be fetched from the + application server using an HTTP POST request, rather than a WebSocket + message. This strategy has all the benefits of the previous strategy, + except that it does not require establishing a WebSocket connection + before fetching dynamic modules, in exchange for slightly higher latency + per request. [PR #9384](https://github.com/meteor/meteor/pull/9384) + +* To reduce the total number of HTTP requests for dynamic modules, rapid + sequences of `import()` calls within the same tick of the event loop + will now be automatically batched into a single HTTP request. In other + words, the following code will result in only one HTTP request: + ```js + const [ + React, + ReactDOM + ] = await Promise.all([ + import("react"), + import("react-dom") + ]); + ``` + +* Thanks to a feature request and pull request from + [@CaptainN](https://github.com/CaptainN), all available dynamic modules + will be automatically prefetched after page load and permanently cached + in IndexedDB when the `appcache` package is in use, ensuring that + dynamic `import()` will work for offline apps. Although the HTML5 + Application Cache was deliberately *not* used for this prefetching, the + new behavior matches the spirit/intention of the `appcache` package. + [Feature Request #236](https://github.com/meteor/meteor-feature-requests/issues/236) + [PR #9482](https://github.com/meteor/meteor/pull/9482) + [PR #9434](https://github.com/meteor/meteor/pull/9434) + +* The `es5-shim` library is no longer included in the initial JavaScript + bundle, but is instead injected using a `')({ + conf: meteorRuntimeConfig, + }) + : template(' ')({ + src: rootUrlPathPrefix, + }), + '', - (css || []).map(({ url }) => - template(' ')({ - href: bundledJsCssUrlRewriteHook(url) + ...(js || []).map(file => + template(' ')({ + src: bundledJsCssUrlRewriteHook(file.url), + sri: sri(file.sri, sriMode), + }) + ), + + ...(additionalStaticJs || []).map(({ contents, pathname }) => ( + inlineScriptsAllowed + ? template(' ')({ + contents, }) - ), - - [ - head, - dynamicHead, - '', - '', - body, - dynamicBody, - '', - (inlineScriptsAllowed - ? template(' ')({ - conf: meteorRuntimeConfig - }) - : template(' ')({ - src: rootUrlPathPrefix - }) - ) , - '' - ], - - (js || []).map(({ url }) => - template(' ')({ - src: bundledJsCssUrlRewriteHook(url) + : template(' ')({ + src: rootUrlPathPrefix + pathname, }) - ), + )), - (additionalStaticJs || []).map(({ contents, pathname }) => ( - (inlineScriptsAllowed - ? template(' ')({ - contents: contents - }) - : template(' ')({ - src: rootUrlPathPrefix + pathname - })) - )), - - [ - '', '', - '', - '' - ], - ).join('\n'); -} + '', + '', + '', + '' +].join('\n'); diff --git a/packages/boilerplate-generator/template-web.cordova.js b/packages/boilerplate-generator/template-web.cordova.js index 12f4bdde36..236fb2e60f 100644 --- a/packages/boilerplate-generator/template-web.cordova.js +++ b/packages/boilerplate-generator/template-web.cordova.js @@ -1,7 +1,7 @@ import template from './template'; // Template function for rendering the boilerplate html for cordova -export default function({ +export const headTemplate = ({ meteorRuntimeConfig, rootUrlPathPrefix, inlineScriptsAllowed, @@ -11,69 +11,69 @@ export default function({ htmlAttributes, bundledJsCssUrlRewriteHook, head, - body, dynamicHead, - dynamicBody, -}) { - return [].concat( - [ - '', - '', - ' ', - ' ', - ' ', - ' ', - ' ', - ], +}) => { + var headSections = head.split(/]*>/, 2); + var cssBundle = [ // We are explicitly not using bundledJsCssUrlRewriteHook: in cordova we serve assets up directly from disk, so rewriting the URL does not make sense - (css || []).map(({ url }) => + ...(css || []).map(file => template(' ')({ - href: url + href: file.url, }) - ), - [ - ' ', - '', - ' ' - ], - (js || []).map(({ url }) => + )].join('\n'); + + return [ + '', + '', + ' ', + ' ', + ' ', + ' ', + ' ', + + (headSections.length === 1) + ? [cssBundle, headSections[0]].join('\n') + : [headSections[0], cssBundle, headSections[1]].join('\n'), + + ' ', + '', + ' ', + + ...(js || []).map(file => template(' ')({ - src: url + src: file.url, }) ), - (additionalStaticJs || []).map(({ contents, pathname }) => ( - (inlineScriptsAllowed + ...(additionalStaticJs || []).map(({ contents, pathname }) => ( + inlineScriptsAllowed ? template(' ')({ - contents: contents + contents, }) : template(' ')({ src: rootUrlPathPrefix + pathname - })) + }) )), + '', + '', + '', + '', + ].join('\n'); +}; - [ - '', - head, - '', - '', - '', - body, - '', - '' - ], - ).join('\n'); +export function closeTemplate() { + return "\n"; } diff --git a/packages/caching-compiler/.npm/package/npm-shrinkwrap.json b/packages/caching-compiler/.npm/package/npm-shrinkwrap.json deleted file mode 100644 index f0ac52d443..0000000000 --- a/packages/caching-compiler/.npm/package/npm-shrinkwrap.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "lockfileVersion": 1, - "dependencies": { - "async": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/async/-/async-1.4.0.tgz", - "integrity": "sha1-Nfhvg8WeBCHQmc2akdgnj7V4wA0=" - }, - "lru-cache": { - "version": "2.6.4", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.6.4.tgz", - "integrity": "sha1-JnUZDM0bBwHsL2UqTQ09QA12wN0=" - } - } -} diff --git a/packages/caching-compiler/caching-compiler.js b/packages/caching-compiler/caching-compiler.js index c200944091..ab1cea5cae 100644 --- a/packages/caching-compiler/caching-compiler.js +++ b/packages/caching-compiler/caching-compiler.js @@ -2,9 +2,7 @@ const fs = Plugin.fs; const path = Plugin.path; const createHash = Npm.require('crypto').createHash; const assert = Npm.require('assert'); -const Future = Npm.require('fibers/future'); const LRU = Npm.require('lru-cache'); -const async = Npm.require('async'); // Base class for CachingCompiler and MultiFileCachingCompiler. CachingCompilerBase = class CachingCompilerBase { @@ -172,10 +170,12 @@ CachingCompilerBase = class CachingCompilerBase { // ignore errors, it's just a cache } } else { - fs.writeFile(tempFilename, contents, err => { - // ignore errors, it's just a cache - if (! err) { - fs.rename(tempFilename, filename, err => {}); + fs.writeFile(tempFilename, contents, writeError => { + if (writeError) return; + try { + fs.renameSync(tempFilename, filename); + } catch (renameError) { + // ignore errors, it's just a cache } }); } @@ -277,13 +277,16 @@ CachingCompiler = class CachingCompiler extends CachingCompilerBase { // you have processing you want to perform at the beginning or end of a // processing phase, you may want to override this method and call the // superclass implementation from within your method. - processFilesForTarget(inputFiles) { + async processFilesForTarget(inputFiles) { const cacheMisses = []; + const arches = this._cacheDebugEnabled && Object.create(null); - const future = new Future; - async.eachLimit(inputFiles, this._maxParallelism, (inputFile, cb) => { - let error = null; - try { + inputFiles.forEach(inputFile => { + if (arches) { + arches[inputFile.getArch()] = 1; + } + + const getResult = () => { const cacheKey = this._deepHash(this.getCacheKey(inputFile)); let compileResult = this._cache.get(cacheKey); @@ -296,7 +299,7 @@ CachingCompiler = class CachingCompiler extends CachingCompilerBase { if (! compileResult) { cacheMisses.push(inputFile.getDisplayPath()); - compileResult = this.compileOneFile(inputFile); + compileResult = Promise.await(this.compileOneFile(inputFile)); if (! compileResult) { // compileOneFile should have called inputFile.error. @@ -309,19 +312,32 @@ CachingCompiler = class CachingCompiler extends CachingCompilerBase { this._writeCacheAsync(cacheKey, compileResult); } - this.addCompileResult(inputFile, compileResult); - } catch (e) { - error = e; - } finally { - cb(error); + return compileResult; + }; + + if (this.compileOneFileLater && + inputFile.supportsLazyCompilation) { + this.compileOneFileLater(inputFile, getResult); + } else { + const result = getResult(); + if (result) { + this.addCompileResult(inputFile, result); + } } - }, future.resolver()); - future.wait(); + }); if (this._cacheDebugEnabled) { cacheMisses.sort(); + this._cacheDebug( - `Ran (#${ ++this._callCount }) on: ${ JSON.stringify(cacheMisses) }`); + `Ran (#${ + ++this._callCount + }) on: ${ + JSON.stringify(cacheMisses) + } ${ + JSON.stringify(Object.keys(arches).sort()) + }` + ); } } diff --git a/packages/caching-compiler/multi-file-caching-compiler.js b/packages/caching-compiler/multi-file-caching-compiler.js index 3cbee6e573..4add6b81a3 100644 --- a/packages/caching-compiler/multi-file-caching-compiler.js +++ b/packages/caching-compiler/multi-file-caching-compiler.js @@ -1,7 +1,5 @@ const path = Plugin.path; -const Future = Npm.require('fibers/future'); const LRU = Npm.require('lru-cache'); -const async = Npm.require('async'); // MultiFileCachingCompiler is like CachingCompiler, but for implementing // languages which allow files to reference each other, such as CSS @@ -22,7 +20,7 @@ extends CachingCompilerBase { }) { super({compilerName, defaultCacheSize, maxParallelism}); - // Maps from absolute import path to { compileResult, cacheKeys }, where + // Maps from cache key to { compileResult, cacheKeys }, where // cacheKeys is an object mapping from absolute import path to hashed // cacheKey for each file referenced by this file (including itself). this._cache = new LRU({ @@ -80,45 +78,50 @@ extends CachingCompilerBase { } // The processFilesForTarget method from the Plugin.registerCompiler API. - processFilesForTarget(inputFiles) { + async processFilesForTarget(inputFiles) { const allFiles = new Map; const cacheKeyMap = new Map; const cacheMisses = []; + const arches = this._cacheDebugEnabled && Object.create(null); inputFiles.forEach((inputFile) => { const importPath = this.getAbsoluteImportPath(inputFile); allFiles.set(importPath, inputFile); - cacheKeyMap.set(importPath, this._deepHash(this.getCacheKey(inputFile))); + cacheKeyMap.set(importPath, this._getCacheKeyWithPath(inputFile)); }); - const allProcessedFuture = new Future; - async.eachLimit(inputFiles, this._maxParallelism, (inputFile, cb) => { - let error = null; - try { - // If this isn't a root, skip it (and definitely don't waste time - // looking for a cache file that won't be there). - if (!this.isRoot(inputFile)) { - return; - } + inputFiles.forEach(inputFile => { + if (arches) { + arches[inputFile.getArch()] = 1; + } + const getResult = () => { const absoluteImportPath = this.getAbsoluteImportPath(inputFile); - let cacheEntry = this._cache.get(absoluteImportPath); + const cacheKey = cacheKeyMap.get(absoluteImportPath); + let cacheEntry = this._cache.get(cacheKey); if (! cacheEntry) { - cacheEntry = this._readCache(absoluteImportPath); + cacheEntry = this._readCache(cacheKey); if (cacheEntry) { this._cacheDebug(`Loaded ${ absoluteImportPath }`); } } + if (! (cacheEntry && this._cacheEntryValid(cacheEntry, cacheKeyMap))) { cacheMisses.push(inputFile.getDisplayPath()); - const compileOneFileReturn = this.compileOneFile(inputFile, allFiles); + const compileOneFileReturn = + Promise.await(this.compileOneFile(inputFile, allFiles)); + if (! compileOneFileReturn) { // compileOneFile should have called inputFile.error. - // We don't cache failures for now. + // We don't cache failures for now. return; } - const {compileResult, referencedImportPaths} = compileOneFileReturn; + + const { + compileResult, + referencedImportPaths, + } = compileOneFileReturn; cacheEntry = { compileResult, @@ -137,26 +140,62 @@ extends CachingCompilerBase { }); // Save the cache entry. - this._cache.set(absoluteImportPath, cacheEntry); - this._writeCacheAsync(absoluteImportPath, cacheEntry); + this._cache.set(cacheKey, cacheEntry); + this._writeCacheAsync(cacheKey, cacheEntry); } - this.addCompileResult(inputFile, cacheEntry.compileResult); - } catch (e) { - error = e; - } finally { - cb(error); + return cacheEntry.compileResult; + }; + + if (this.compileOneFileLater && + inputFile.supportsLazyCompilation) { + if (! this.isRoot(inputFile)) { + // If this inputFile is definitely not a root, then it must be + // lazy, and this is our last chance to mark it as such, so that + // the rest of the compiler plugin system can avoid worrying + // about the MultiFileCachingCompiler-specific concept of a + // "root." If this.isRoot(inputFile) returns true instead, that + // classification may not be trustworthy, since returning true + // used to be the only way to get the file to be compiled, so + // that it could be imported later by a JS module. Now that + // files can be compiled on-demand, it's safe to pass all files + // that might be roots to this.compileOneFileLater. + inputFile.getFileOptions().lazy = true; + } + this.compileOneFileLater(inputFile, getResult); + } else if (this.isRoot(inputFile)) { + const result = getResult(); + if (result) { + this.addCompileResult(inputFile, result); + } } - }, allProcessedFuture.resolver()); - allProcessedFuture.wait(); + }); if (this._cacheDebugEnabled) { cacheMisses.sort(); + this._cacheDebug( - `Ran (#${ ++this._callCount }) on: ${ JSON.stringify(cacheMisses) }`); + `Ran (#${ + ++this._callCount + }) on: ${ + JSON.stringify(cacheMisses) + } ${ + JSON.stringify(Object.keys(arches).sort()) + }` + ); } } + // Returns a hash that incorporates both this.getCacheKey(inputFile) and + // this.getAbsoluteImportPath(inputFile), since the file path might be + // relevant to the compiled output when using MultiFileCachingCompiler. + _getCacheKeyWithPath(inputFile) { + return this._deepHash([ + this.getAbsoluteImportPath(inputFile), + this.getCacheKey(inputFile), + ]); + } + _cacheEntryValid(cacheEntry, cacheKeyMap) { return Object.keys(cacheEntry.cacheKeys).every( (path) => cacheEntry.cacheKeys[path] === cacheKeyMap.get(path) @@ -166,17 +205,17 @@ extends CachingCompilerBase { // The format of a cache file on disk is the JSON-stringified cacheKeys // object, a newline, followed by the CompileResult as returned from // this.stringifyCompileResult. - _cacheFilename(absoluteImportPath) { - return path.join(this._diskCache, - this._deepHash(absoluteImportPath) + '.cache'); + _cacheFilename(cacheKey) { + return path.join(this._diskCache, cacheKey + ".cache"); } + // Loads a {compileResult, cacheKeys} cache entry from disk. Returns the whole // cache entry and loads it into the in-memory cache too. - _readCache(absoluteImportPath) { + _readCache(cacheKey) { if (! this._diskCache) { return null; } - const cacheFilename = this._cacheFilename(absoluteImportPath); + const cacheFilename = this._cacheFilename(cacheKey); const raw = this._readFileOrNull(cacheFilename); if (!raw) { return null; @@ -200,17 +239,18 @@ extends CachingCompilerBase { } const cacheEntry = {compileResult, cacheKeys}; - this._cache.set(absoluteImportPath, cacheEntry); + this._cache.set(cacheKey, cacheEntry); return cacheEntry; } - _writeCacheAsync(absoluteImportPath, cacheEntry) { + + _writeCacheAsync(cacheKey, cacheEntry) { if (! this._diskCache) { return null; } - const cacheFilename = this._cacheFilename(absoluteImportPath); + const cacheFilename = this._cacheFilename(cacheKey); const cacheContents = - JSON.stringify(cacheEntry.cacheKeys) + '\n' - + this.stringifyCompileResult(cacheEntry.compileResult); + JSON.stringify(cacheEntry.cacheKeys) + '\n' + + this.stringifyCompileResult(cacheEntry.compileResult); this._writeFileAsync(cacheFilename, cacheContents); } } diff --git a/packages/caching-compiler/package.js b/packages/caching-compiler/package.js index 470e4d04be..987d9b7651 100644 --- a/packages/caching-compiler/package.js +++ b/packages/caching-compiler/package.js @@ -1,15 +1,10 @@ Package.describe({ name: 'caching-compiler', - version: '1.1.9', + version: '1.2.0', summary: 'An easy way to make compiler plugins cache', documentation: 'README.md' }); -Npm.depends({ - 'lru-cache': '2.6.4', - 'async': '1.4.0' -}); - Package.onUse(function(api) { api.use(['ecmascript', 'random']); api.addFiles(['caching-compiler.js'], 'server'); diff --git a/packages/callback-hook/hook.js b/packages/callback-hook/hook.js index 5d1212f1a7..d44ebd54df 100644 --- a/packages/callback-hook/hook.js +++ b/packages/callback-hook/hook.js @@ -36,50 +36,52 @@ // callback will propagate up to the iterator function, and will // terminate calling the remaining callbacks if not caught. -Hook = function (options) { - var self = this; - options = options || {}; - self.nextCallbackId = 0; - self.callbacks = {}; - // Whether to wrap callbacks with Meteor.bindEnvironment - self.bindEnvironment = true; - if (options.bindEnvironment === false) - self.bindEnvironment = false; +const hasOwn = Object.prototype.hasOwnProperty; - if (options.exceptionHandler) - self.exceptionHandler = options.exceptionHandler; - else if (options.debugPrintExceptions) { - if (! _.isString(options.debugPrintExceptions)) - throw new Error("Hook option debugPrintExceptions should be a string"); - self.exceptionHandler = options.debugPrintExceptions; +export class Hook { + constructor(options) { + options = options || {}; + this.nextCallbackId = 0; + this.callbacks = Object.create(null); + // Whether to wrap callbacks with Meteor.bindEnvironment + this.bindEnvironment = true; + if (options.bindEnvironment === false) { + this.bindEnvironment = false; + } + + if (options.exceptionHandler) { + this.exceptionHandler = options.exceptionHandler; + } else if (options.debugPrintExceptions) { + if (typeof options.debugPrintExceptions !== "string") { + throw new Error("Hook option debugPrintExceptions should be a string"); + } + this.exceptionHandler = options.debugPrintExceptions; + } } -}; -_.extend(Hook.prototype, { - register: function (callback) { - var self = this; - var exceptionHandler = self.exceptionHandler || function (exception) { + register(callback) { + var exceptionHandler = this.exceptionHandler || function (exception) { // Note: this relies on the undocumented fact that if bindEnvironment's // onException throws, and you are invoking the callback either in the // browser or from within a Fiber in Node, the exception is propagated. throw exception; }; - if (self.bindEnvironment) { + if (this.bindEnvironment) { callback = Meteor.bindEnvironment(callback, exceptionHandler); } else { callback = dontBindEnvironment(callback, exceptionHandler); } - var id = self.nextCallbackId++; - self.callbacks[id] = callback; + var id = this.nextCallbackId++; + this.callbacks[id] = callback; return { - stop: function () { - delete self.callbacks[id]; + stop: () => { + delete this.callbacks[id]; } }; - }, + } // For each registered callback, call the passed iterator function // with the callback. @@ -91,52 +93,44 @@ _.extend(Hook.prototype, { // The iteration is stopped if the iterator function returns a falsy // value or throws an exception. // - each: function (iterator) { - var self = this; - + each(iterator) { // Invoking bindEnvironment'd callbacks outside of a Fiber in Node doesn't // run them to completion (and exceptions thrown from onException are not // propagated), so we need to be in a Fiber. Meteor._nodeCodeMustBeInFiber(); - var ids = _.keys(self.callbacks); + var ids = Object.keys(this.callbacks); for (var i = 0; i < ids.length; ++i) { var id = ids[i]; // check to see if the callback was removed during iteration - if (_.has(self.callbacks, id)) { - var callback = self.callbacks[id]; - - if (! iterator(callback)) + if (hasOwn.call(this.callbacks, id)) { + var callback = this.callbacks[id]; + if (! iterator(callback)) { break; + } } } } -}); +} // Copied from Meteor.bindEnvironment and removed all the env stuff. -var dontBindEnvironment = function (func, onException, _this) { +function dontBindEnvironment(func, onException, _this) { if (!onException || typeof(onException) === 'string') { var description = onException || "callback of async function"; onException = function (error) { Meteor._debug( - "Exception in " + description + ":", - error && error.stack || error + "Exception in " + description, + error ); }; } - return function (/* arguments */) { - var args = _.toArray(arguments); - - var runAndHandleExceptions = function () { - try { - var ret = func.apply(_this, args); - } catch (e) { - onException(e); - } - return ret; - }; - - return runAndHandleExceptions(); + return function (...args) { + try { + var ret = func.apply(_this, args); + } catch (e) { + onException(e); + } + return ret; }; -}; +} diff --git a/packages/callback-hook/package.js b/packages/callback-hook/package.js index ece4b39e27..4e6711df0f 100644 --- a/packages/callback-hook/package.js +++ b/packages/callback-hook/package.js @@ -1,14 +1,12 @@ Package.describe({ summary: "Register callbacks on a hook", - version: '1.0.10' + version: '1.1.0' }); Package.onUse(function (api) { - api.use('underscore', ['client', 'server']); - + api.use('ecmascript'); + api.mainModule('hook.js'); api.export('Hook'); - - api.addFiles('hook.js', ['client', 'server']); }); Package.onTest(function (api) { diff --git a/packages/check/isPlainObject.js b/packages/check/isPlainObject.js index eb3c18498a..bf392c70af 100644 --- a/packages/check/isPlainObject.js +++ b/packages/check/isPlainObject.js @@ -1,24 +1,24 @@ // Copy of jQuery.isPlainObject for the server side from jQuery v3.1.1. -var class2type = {}; +const class2type = {}; -var toString = class2type.toString; +const toString = class2type.toString; -var hasOwn = class2type.hasOwnProperty; +const hasOwn = Object.prototype.hasOwnProperty; -var fnToString = hasOwn.toString; +const fnToString = hasOwn.toString; -var ObjectFunctionString = fnToString.call(Object); +const ObjectFunctionString = fnToString.call(Object); -var getProto = Object.getPrototypeOf; +const getProto = Object.getPrototypeOf; -exports.isPlainObject = function( obj ) { - var proto, - Ctor; +export const isPlainObject = obj => { + let proto; + let Ctor; // Detect obvious negatives // Use toString instead of jQuery.type to catch host objects - if (!obj || toString.call(obj) !== "[object Object]") { + if (!obj || toString.call(obj) !== '[object Object]') { return false; } @@ -30,6 +30,7 @@ exports.isPlainObject = function( obj ) { } // Objects with prototype are plain iff they were constructed by a global Object function - Ctor = hasOwn.call(proto, "constructor") && proto.constructor; - return typeof Ctor === "function" && fnToString.call(Ctor) === ObjectFunctionString; + Ctor = hasOwn.call(proto, 'constructor') && proto.constructor; + return typeof Ctor === 'function' && + fnToString.call(Ctor) === ObjectFunctionString; }; diff --git a/packages/check/match.js b/packages/check/match.js index e90ea56915..4c1e2bd94d 100644 --- a/packages/check/match.js +++ b/packages/check/match.js @@ -1,10 +1,11 @@ // XXX docs +import { isPlainObject } from './isPlainObject'; // Things we explicitly do NOT support: // - heterogenous arrays -var currentArgumentChecker = new Meteor.EnvironmentVariable; -var isPlainObject = require("./isPlainObject.js").isPlainObject; +const currentArgumentChecker = new Meteor.EnvironmentVariable; +const hasOwn = Object.prototype.hasOwnProperty; /** * @summary Check that a value matches a [pattern](#matchpatterns). @@ -14,10 +15,9 @@ var isPlainObject = require("./isPlainObject.js").isPlainObject; * types and structure. * @locus Anywhere * @param {Any} value The value to check - * @param {MatchPattern} pattern The pattern to match - * `value` against + * @param {MatchPattern} pattern The pattern to match `value` against */ -var check = exports.check = function (value, pattern) { +export function check(value, pattern) { // Record that check got called, if somebody cared. // // We use getOrNullIfOutsideFiber so that it's OK to call check() @@ -26,16 +26,19 @@ var check = exports.check = function (value, pattern) { // it might not find the argumentChecker and you'll get an error about // not checking an argument that it looks like you're checking (instead // of just getting a "Node code must run in a Fiber" error). - var argChecker = currentArgumentChecker.getOrNullIfOutsideFiber(); - if (argChecker) + const argChecker = currentArgumentChecker.getOrNullIfOutsideFiber(); + if (argChecker) { argChecker.checking(value); - var result = testSubtree(value, pattern); + } + + const result = testSubtree(value, pattern); if (result) { - var err = new Match.Error(result.message); + const err = new Match.Error(result.message); if (result.path) { - err.message += " in field " + result.path; + err.message += ` in field ${result.path}`; err.path = result.path; } + throw err; } }; @@ -44,40 +47,48 @@ var check = exports.check = function (value, pattern) { * @namespace Match * @summary The namespace for all Match types and methods. */ -var Match = exports.Match = { - Optional: function (pattern) { +export const Match = { + Optional: function(pattern) { return new Optional(pattern); }, - Maybe: function (pattern) { + + Maybe: function(pattern) { return new Maybe(pattern); }, - OneOf: function (/*arguments*/) { - return new OneOf(_.toArray(arguments)); + + OneOf: function(...args) { + return new OneOf(args); }, + Any: ['__any__'], - Where: function (condition) { + Where: function(condition) { return new Where(condition); }, - ObjectIncluding: function (pattern) { - return new ObjectIncluding(pattern); + + ObjectIncluding: function(pattern) { + return new ObjectIncluding(pattern) }, - ObjectWithValues: function (pattern) { + + ObjectWithValues: function(pattern) { return new ObjectWithValues(pattern); }, + // Matches only signed 32-bit integers Integer: ['__integer__'], // XXX matchers should know how to describe themselves for errors - Error: Meteor.makeErrorType("Match.Error", function (msg) { - this.message = "Match error: " + msg; + Error: Meteor.makeErrorType('Match.Error', function (msg) { + this.message = `Match error: ${msg}`; + // The path of the value that failed to match. Initially empty, this gets // populated by catching and rethrowing the exception as it goes back up the // stack. // E.g.: "vals[3].entity.created" - this.path = ""; + this.path = ''; + // If this gets sent over DDP, don't give full internal details but at least // provide something better than 500 Internal server error. - this.sanitizedError = new Meteor.Error(400, "Match failed"); + this.sanitizedError = new Meteor.Error(400, 'Match failed'); }), // Tests to see if value matches pattern. Unlike check, it merely returns true @@ -93,71 +104,87 @@ var Match = exports.Match = { * @param {Any} value The value to check * @param {MatchPattern} pattern The pattern to match `value` against */ - test: function (value, pattern) { + test(value, pattern) { return !testSubtree(value, pattern); }, // Runs `f.apply(context, args)`. If check() is not called on every element of // `args` (either directly or in the first level of an array), throws an error // (using `description` in the message). - // - _failIfArgumentsAreNotAllChecked: function (f, context, args, description) { - var argChecker = new ArgumentChecker(args, description); - var result = currentArgumentChecker.withValue(argChecker, function () { - return f.apply(context, args); - }); + _failIfArgumentsAreNotAllChecked(f, context, args, description) { + const argChecker = new ArgumentChecker(args, description); + const result = currentArgumentChecker.withValue( + argChecker, + () => f.apply(context, args) + ); + // If f didn't itself throw, make sure it checked all of its arguments. argChecker.throwUnlessAllArgumentsHaveBeenChecked(); return result; } }; -var Optional = function (pattern) { - this.pattern = pattern; -}; +class Optional { + constructor(pattern) { + this.pattern = pattern; + } +} -var Maybe = function (pattern) { - this.pattern = pattern; -}; +class Maybe { + constructor(pattern) { + this.pattern = pattern; + } +} -var OneOf = function (choices) { - if (_.isEmpty(choices)) - throw new Error("Must provide at least one choice to Match.OneOf"); - this.choices = choices; -}; +class OneOf { + constructor(choices) { + if (!choices || choices.length === 0) { + throw new Error('Must provide at least one choice to Match.OneOf'); + } -var Where = function (condition) { - this.condition = condition; -}; + this.choices = choices; + } +} -var ObjectIncluding = function (pattern) { - this.pattern = pattern; -}; +class Where { + constructor(condition) { + this.condition = condition; + } +} -var ObjectWithValues = function (pattern) { - this.pattern = pattern; -}; +class ObjectIncluding { + constructor(pattern) { + this.pattern = pattern; + } +} -var stringForErrorMessage = function (value, options) { - options = options || {}; +class ObjectWithValues { + constructor(pattern) { + this.pattern = pattern; + } +} - if ( value === null ) return "null"; +const stringForErrorMessage = (value, options = {}) => { + if ( value === null ) { + return 'null'; + } if ( options.onlyShowType ) { return typeof value; } // Your average non-object things. Saves from doing the try/catch below for. - if ( typeof value !== "object" ) { + if ( typeof value !== 'object' ) { return EJSON.stringify(value) } try { + // Find objects with circular references since EJSON doesn't support them yet (Issue #4778 + Unaccepted PR) // If the native stringify is going to choke, EJSON.stringify is going to choke too. JSON.stringify(value); } catch (stringifyError) { - if ( stringifyError.name === "TypeError" ) { + if ( stringifyError.name === 'TypeError' ) { return typeof value; } } @@ -165,31 +192,36 @@ var stringForErrorMessage = function (value, options) { return EJSON.stringify(value); }; -var typeofChecks = [ - [String, "string"], - [Number, "number"], - [Boolean, "boolean"], +const typeofChecks = [ + [String, 'string'], + [Number, 'number'], + [Boolean, 'boolean'], + // While we don't allow undefined/function in EJSON, this is good for optional // arguments with OneOf. - [Function, "function"], - [undefined, "undefined"] + [Function, 'function'], + [undefined, 'undefined'], ]; // Return `false` if it matches. Otherwise, return an object with a `message` and a `path` field. -var testSubtree = function (value, pattern) { +const testSubtree = (value, pattern) => { + // Match anything! - if (pattern === Match.Any) + if (pattern === Match.Any) { return false; + } // Basic atomic types. // Do not match boxed objects (e.g. String, Boolean) - for (var i = 0; i < typeofChecks.length; ++i) { + for (let i = 0; i < typeofChecks.length; ++i) { if (pattern === typeofChecks[i][0]) { - if (typeof value === typeofChecks[i][1]) + if (typeof value === typeofChecks[i][1]) { return false; + } + return { - message: "Expected " + typeofChecks[i][1] + ", got " + stringForErrorMessage(value, { onlyShowType: true }), - path: "" + message: `Expected ${typeofChecks[i][1]}, got ${stringForErrorMessage(value, { onlyShowType: true })}`, + path: '', }; } } @@ -198,141 +230,159 @@ var testSubtree = function (value, pattern) { if (value === null) { return false; } + return { - message: "Expected null, got " + stringForErrorMessage(value), - path: "" + message: `Expected null, got ${stringForErrorMessage(value)}`, + path: '', }; } // Strings, numbers, and booleans match literally. Goes well with Match.OneOf. - if (typeof pattern === "string" || typeof pattern === "number" || typeof pattern === "boolean") { - if (value === pattern) + if (typeof pattern === 'string' || typeof pattern === 'number' || typeof pattern === 'boolean') { + if (value === pattern) { return false; + } + return { - message: "Expected " + pattern + ", got " + stringForErrorMessage(value), - path: "" + message: `Expected ${pattern}, got ${stringForErrorMessage(value)}`, + path: '', }; } // Match.Integer is special type encoded with array if (pattern === Match.Integer) { + // There is no consistent and reliable way to check if variable is a 64-bit // integer. One of the popular solutions is to get reminder of division by 1 // but this method fails on really large floats with big precision. // E.g.: 1.348192308491824e+23 % 1 === 0 in V8 // Bitwise operators work consistantly but always cast variable to 32-bit // signed integer according to JavaScript specs. - if (typeof value === "number" && (value | 0) === value) + if (typeof value === 'number' && (value | 0) === value) { return false; + } + return { - message: "Expected Integer, got " + stringForErrorMessage(value), - path: "" + message: `Expected Integer, got ${stringForErrorMessage(value)}`, + path: '', }; } - // "Object" is shorthand for Match.ObjectIncluding({}); - if (pattern === Object) + // 'Object' is shorthand for Match.ObjectIncluding({}); + if (pattern === Object) { pattern = Match.ObjectIncluding({}); + } // Array (checked AFTER Any, which is implemented as an Array). if (pattern instanceof Array) { if (pattern.length !== 1) { return { - message: "Bad pattern: arrays must have one type element" + stringForErrorMessage(pattern), - path: "" - }; - } - if (!_.isArray(value) && !_.isArguments(value)) { - return { - message: "Expected array, got " + stringForErrorMessage(value), - path: "" + message: `Bad pattern: arrays must have one type element ${stringForErrorMessage(pattern)}`, + path: '', }; } - for (var i = 0, length = value.length; i < length; i++) { - var result = testSubtree(value[i], pattern[0]); + if (!Array.isArray(value) && !isArguments(value)) { + return { + message: `Expected array, got ${stringForErrorMessage(value)}`, + path: '', + }; + } + + for (let i = 0, length = value.length; i < length; i++) { + const result = testSubtree(value[i], pattern[0]); if (result) { result.path = _prependPath(i, result.path); return result; } } + return false; } // Arbitrary validation checks. The condition can return false or throw a // Match.Error (ie, it can internally use check()) to fail. if (pattern instanceof Where) { - var result; + let result; try { result = pattern.condition(value); } catch (err) { - if (!(err instanceof Match.Error)) + if (!(err instanceof Match.Error)) { throw err; + } + return { message: err.message, path: err.path }; } - if (result) + + if (result) { return false; + } + // XXX this error is terrible return { - message: "Failed Match.Where validation", - path: "" + message: 'Failed Match.Where validation', + path: '', }; } - if (pattern instanceof Maybe) { pattern = Match.OneOf(undefined, null, pattern.pattern); - } - else if (pattern instanceof Optional) { + } else if (pattern instanceof Optional) { pattern = Match.OneOf(undefined, pattern.pattern); } if (pattern instanceof OneOf) { - for (var i = 0; i < pattern.choices.length; ++i) { - var result = testSubtree(value, pattern.choices[i]); + for (let i = 0; i < pattern.choices.length; ++i) { + const result = testSubtree(value, pattern.choices[i]); if (!result) { + // No error? Yay, return. return false; } + // Match errors just mean try another choice. } + // XXX this error is terrible return { - message: "Failed Match.OneOf, Match.Maybe or Match.Optional validation", - path: "" + message: 'Failed Match.OneOf, Match.Maybe or Match.Optional validation', + path: '', }; } // A function that isn't something we special-case is assumed to be a // constructor. if (pattern instanceof Function) { - if (value instanceof pattern) + if (value instanceof pattern) { return false; + } + return { - message: "Expected " + (pattern.name ||"particular constructor"), - path: "" + message: `Expected ${pattern.name || 'particular constructor'}`, + path: '', }; } - var unknownKeysAllowed = false; - var unknownKeyPattern; + let unknownKeysAllowed = false; + let unknownKeyPattern; if (pattern instanceof ObjectIncluding) { unknownKeysAllowed = true; pattern = pattern.pattern; } + if (pattern instanceof ObjectWithValues) { unknownKeysAllowed = true; unknownKeyPattern = [pattern.pattern]; pattern = {}; // no required keys } - if (typeof pattern !== "object") { + if (typeof pattern !== 'object') { return { - message: "Bad pattern: unknown pattern type", - path: "" + message: 'Bad pattern: unknown pattern type', + path: '', }; } @@ -341,66 +391,65 @@ var testSubtree = function (value, pattern) { // the pattern: this really needs to be a plain old {Object}! if (typeof value !== 'object') { return { - message: "Expected object, got " + typeof value, - path: "" + message: `Expected object, got ${typeof value}`, + path: '', }; } + if (value === null) { return { - message: "Expected object, got null", - path: "" + message: `Expected object, got null`, + path: '', }; } + if (! isPlainObject(value)) { return { - message: "Expected plain object", - path: "" + message: `Expected plain object`, + path: '', }; } - var requiredPatterns = {}; - var optionalPatterns = {}; - _.each(pattern, function (subPattern, key) { - if (subPattern instanceof Optional || subPattern instanceof Maybe) + const requiredPatterns = Object.create(null); + const optionalPatterns = Object.create(null); + + Object.keys(pattern).forEach(key => { + const subPattern = pattern[key]; + if (subPattern instanceof Optional || + subPattern instanceof Maybe) { optionalPatterns[key] = subPattern.pattern; - else + } else { requiredPatterns[key] = subPattern; + } }); - //XXX: replace with underscore's _.allKeys if Meteor updates underscore to 1.8+ (or lodash) - var allKeys = function(obj){ - var keys = []; - if (_.isObject(obj)){ - for (var key in obj) keys.push(key); - } - return keys; - } + for (let key in Object(value)) { + const subValue = value[key]; + if (hasOwn.call(requiredPatterns, key)) { + const result = testSubtree(subValue, requiredPatterns[key]); + if (result) { + result.path = _prependPath(key, result.path); + return result; + } - for (var keys = allKeys(value), i = 0, length = keys.length; i < length; i++) { - var key = keys[i]; - var subValue = value[key]; - if (_.has(requiredPatterns, key)) { - var result = testSubtree(subValue, requiredPatterns[key]); - if (result) { - result.path = _prependPath(key, result.path); - return result; - } delete requiredPatterns[key]; - } else if (_.has(optionalPatterns, key)) { - var result = testSubtree(subValue, optionalPatterns[key]); + } else if (hasOwn.call(optionalPatterns, key)) { + const result = testSubtree(subValue, optionalPatterns[key]); if (result) { result.path = _prependPath(key, result.path); return result; } + } else { if (!unknownKeysAllowed) { return { - message: "Unknown key", - path: key + message: 'Unknown key', + path: key, }; } + if (unknownKeyPattern) { - var result = testSubtree(subValue, unknownKeyPattern[0]); + const result = testSubtree(subValue, unknownKeyPattern[0]); if (result) { result.path = _prependPath(key, result.path); return result; @@ -409,79 +458,95 @@ var testSubtree = function (value, pattern) { } } - var keys = _.keys(requiredPatterns); + const keys = Object.keys(requiredPatterns); if (keys.length) { return { - message: "Missing key '" + keys[0] + "'", - path: "" + message: `Missing key '${keys[0]}'`, + path: '', }; } }; -var ArgumentChecker = function (args, description) { - var self = this; - // Make a SHALLOW copy of the arguments. (We'll be doing identity checks - // against its contents.) - self.args = _.clone(args); - // Since the common case will be to check arguments in order, and we splice - // out arguments when we check them, make it so we splice out from the end - // rather than the beginning. - self.args.reverse(); - self.description = description; -}; +class ArgumentChecker { + constructor (args, description) { -_.extend(ArgumentChecker.prototype, { - checking: function (value) { - var self = this; - if (self._checkingOneValue(value)) + // Make a SHALLOW copy of the arguments. (We'll be doing identity checks + // against its contents.) + this.args = [...args]; + + // Since the common case will be to check arguments in order, and we splice + // out arguments when we check them, make it so we splice out from the end + // rather than the beginning. + this.args.reverse(); + this.description = description; + } + + checking(value) { + if (this._checkingOneValue(value)) { return; + } + // Allow check(arguments, [String]) or check(arguments.slice(1), [String]) // or check([foo, bar], [String]) to count... but only if value wasn't // itself an argument. - if (_.isArray(value) || _.isArguments(value)) { - _.each(value, _.bind(self._checkingOneValue, self)); + if (Array.isArray(value) || isArguments(value)) { + Array.prototype.forEach.call(value, this._checkingOneValue.bind(this)); } - }, - _checkingOneValue: function (value) { - var self = this; - for (var i = 0; i < self.args.length; ++i) { + } + + _checkingOneValue(value) { + for (let i = 0; i < this.args.length; ++i) { + // Is this value one of the arguments? (This can have a false positive if // the argument is an interned primitive, but it's still a good enough // check.) // (NaN is not === to itself, so we have to check specially.) - if (value === self.args[i] || (_.isNaN(value) && _.isNaN(self.args[i]))) { - self.args.splice(i, 1); + if (value === this.args[i] || + (Number.isNaN(value) && Number.isNaN(this.args[i]))) { + this.args.splice(i, 1); return true; } } return false; - }, - throwUnlessAllArgumentsHaveBeenChecked: function () { - var self = this; - if (!_.isEmpty(self.args)) - throw new Error("Did not check() all arguments during " + - self.description); } -}); -var _jsKeywords = ["do", "if", "in", "for", "let", "new", "try", "var", "case", - "else", "enum", "eval", "false", "null", "this", "true", "void", "with", - "break", "catch", "class", "const", "super", "throw", "while", "yield", - "delete", "export", "import", "public", "return", "static", "switch", - "typeof", "default", "extends", "finally", "package", "private", "continue", - "debugger", "function", "arguments", "interface", "protected", "implements", - "instanceof"]; + throwUnlessAllArgumentsHaveBeenChecked() { + if (this.args.length > 0) + throw new Error(`Did not check() all arguments during ${this.description}`); + } +} + +const _jsKeywords = ['do', 'if', 'in', 'for', 'let', 'new', 'try', 'var', 'case', + 'else', 'enum', 'eval', 'false', 'null', 'this', 'true', 'void', 'with', + 'break', 'catch', 'class', 'const', 'super', 'throw', 'while', 'yield', + 'delete', 'export', 'import', 'public', 'return', 'static', 'switch', + 'typeof', 'default', 'extends', 'finally', 'package', 'private', 'continue', + 'debugger', 'function', 'arguments', 'interface', 'protected', 'implements', + 'instanceof']; // Assumes the base of path is already escaped properly // returns key + base -var _prependPath = function (key, base) { - if ((typeof key) === "number" || key.match(/^[0-9]+$/)) - key = "[" + key + "]"; - else if (!key.match(/^[a-z_$][0-9a-z_$]*$/i) || _.contains(_jsKeywords, key)) +const _prependPath = (key, base) => { + if ((typeof key) === 'number' || key.match(/^[0-9]+$/)) { + key = `[${key}]`; + } else if (!key.match(/^[a-z_$][0-9a-z_$]*$/i) || + _jsKeywords.indexOf(key) >= 0) { key = JSON.stringify([key]); + } + + if (base && base[0] !== '[') { + return `${key}.${base}`; + } - if (base && base[0] !== "[") - return key + '.' + base; return key + base; -}; +} +const isObject = value => typeof value === 'object' && value !== null; + +const baseIsArguments = item => + isObject(item) && + Object.prototype.toString.call(item) === '[object Arguments]'; + +const isArguments = baseIsArguments(function() { return arguments; }()) ? + baseIsArguments : + value => isObject(value) && typeof value.callee === 'function'; diff --git a/packages/check/match_test.js b/packages/check/match_test.js index be38ace09c..33f2a72440 100644 --- a/packages/check/match_test.js +++ b/packages/check/match_test.js @@ -1,30 +1,33 @@ -Tinytest.add("check - check", function (test) { - var matches = function (value, pattern) { - var error; +Tinytest.add('check - check', test => { + const matches = (value, pattern) => { + let error; try { check(value, pattern); } catch (e) { error = e; } + test.isFalse(error); test.isTrue(Match.test(value, pattern)); }; - var fails = function (value, pattern) { - var error; + + const fails = (value, pattern) => { + let error; try { check(value, pattern); } catch (e) { error = e; } + test.isTrue(error); test.instanceOf(error, Match.Error); test.isFalse(Match.test(value, pattern)); }; // Atoms. - var pairs = [ - ["foo", String], - ["", String], + const pairs = [ + ['foo', String], + ['', String], [0, Number], [42.59, Number], [NaN, Number], @@ -35,9 +38,9 @@ Tinytest.add("check - check", function (test) { [undefined, undefined], [null, null] ]; - _.each(pairs, function (pair) { + pairs.forEach(pair => { matches(pair[0], Match.Any); - _.each([String, Number, Boolean, undefined, null], function (type) { + [String, Number, Boolean, undefined, null].forEach(type => { if (type === pair[1]) { matches(pair[0], type); matches(pair[0], Match.Optional(type)); @@ -45,11 +48,11 @@ Tinytest.add("check - check", function (test) { matches(pair[0], Match.Maybe(type)); matches(undefined, Match.Maybe(type)); matches(null, Match.Maybe(type)); - matches(pair[0], Match.Where(function () { + matches(pair[0], Match.Where(() => { check(pair[0], type); return true; })); - matches(pair[0], Match.Where(function () { + matches(pair[0], Match.Where(() => { try { check(pair[0], type); return true; @@ -61,11 +64,11 @@ Tinytest.add("check - check", function (test) { fails(pair[0], type); matches(pair[0], Match.OneOf(type, pair[1])); matches(pair[0], Match.OneOf(pair[1], type)); - fails(pair[0], Match.Where(function () { + fails(pair[0], Match.Where(() => { check(pair[0], type); return true; })); - fails(pair[0], Match.Where(function () { + fails(pair[0], Match.Where(() => { try { check(pair[0], type); return true; @@ -74,21 +77,26 @@ Tinytest.add("check - check", function (test) { } })); } - if ( type !== null ) fails(null, Match.Optional(type)); // Optional doesn't allow null, but does match on null type + + if ( type !== null ) { + + // Optional doesn't allow null, but does match on null type + fails(null, Match.Optional(type)); + } fails(pair[0], [type]); fails(pair[0], Object); }); }); fails(true, Match.OneOf(String, Number, undefined, null, [Boolean])); - fails(new String("foo"), String); + fails(new String('foo'), String); fails(new Boolean(true), Boolean); fails(new Number(123), Number); matches([1, 2, 3], [Number]); matches([], [Number]); - fails([1, 2, 3, "4"], [Number]); + fails([1, 2, 3, '4'], [Number]); fails([1, 2, 3, [4]], [Number]); - matches([1, 2, 3, "4"], [Match.OneOf(Number, String)]); + matches([1, 2, 3, '4'], [Match.OneOf(Number, String)]); matches({}, Object); matches({}, {}); @@ -116,7 +124,7 @@ Tinytest.add("check - check", function (test) { matches(undefined, Match.Optional(null)); fails(true, Match.Optional(String)); // different should still fail - matches("String", Match.Optional(String)); // same should pass + matches('String', Match.Optional(String)); // same should pass matches({}, {a: Match.Optional(Number)}); matches({a: 1}, {a: Match.Optional(Number)}); @@ -133,38 +141,40 @@ Tinytest.add("check - check", function (test) { matches(undefined, Match.Maybe(null)); fails(true, Match.Maybe(String)); // different should still fail - matches("String", Match.Maybe(String)); // same should pass + matches('String', Match.Maybe(String)); // same should pass matches({}, {a: Match.Maybe(Number)}); matches({a: 1}, {a: Match.Maybe(Number)}); fails({a: true}, {a: Match.Maybe(Number)}); + // Match.Optional means "or undefined" at the top level but "or absent" in // objects. // Match.Maybe should behave the same as Match.Optional in objects // including handling nulls fails({a: undefined}, {a: Match.Maybe(Number)}); fails({a: null}, {a: Match.Maybe(Number)}); - var F = function () { + const F = function () { this.x = 123; }; + fails(new F, { x: 123 }); matches({}, Match.ObjectWithValues(Number)); matches({x: 1}, Match.ObjectWithValues(Number)); matches({x: 1, y: 2}, Match.ObjectWithValues(Number)); - fails({x: 1, y: "2"}, Match.ObjectWithValues(Number)); + fails({x: 1, y: '2'}, Match.ObjectWithValues(Number)); - matches("asdf", "asdf"); - fails("asdf", "monkey"); + matches('asdf', 'asdf'); + fails('asdf', 'monkey'); matches(123, 123); fails(123, 456); - fails("123", 123); - fails(123, "123"); + fails('123', 123); + fails(123, '123'); matches(true, true); matches(false, false); fails(true, false); - fails(true, "true"); - fails("false", false); + fails(true, 'true'); + fails('false', false); matches(/foo/, RegExp); fails(/foo/, String); @@ -173,18 +183,24 @@ Tinytest.add("check - check", function (test) { matches(EJSON.newBinary(42), Match.Where(EJSON.isBinary)); fails([], Match.Where(EJSON.isBinary)); - matches(42, Match.Where(function (x) { return x % 2 === 0; })); - fails(43, Match.Where(function (x) { return x % 2 === 0; })); + matches(42, Match.Where(x => x % 2 === 0)); + fails(43, Match.Where(x => x % 2 === 0)); matches({ - a: "something", + a: 'something', b: [ {x: 42, k: null}, - {x: 43, k: true, p: ["yay"]} - ] - }, {a: String, b: [Match.ObjectIncluding({ - x: Number, - k: Match.OneOf(null, Boolean)})]}); + {x: 43, k: true, p: ['yay']}, + ], + }, { + a: String, + b: [ + Match.ObjectIncluding({ + x: Number, + k: Match.OneOf(null, Boolean) + }), + ], + }); // Match.Integer @@ -206,8 +222,8 @@ Tinytest.add("check - check", function (test) { // Test non-plain objects. - var parentObj = {foo: "bar"}; - var childObj = Object.assign(Object.create(parentObj), {bar: "foo"}); + const parentObj = {foo: 'bar'}; + const childObj = Object.assign(Object.create(parentObj), {bar: 'foo'}); matches(parentObj, Object); fails(parentObj, {foo: String, bar: String}); fails(parentObj, {bar: String}); @@ -218,20 +234,20 @@ Tinytest.add("check - check", function (test) { fails(childObj, {foo: String}); // Functions - var testFunction = function () {}; + const testFunction = () => {}; matches(testFunction, Function); fails(5, Function); // Circular Reference "Classes" - var TestInstanceChild = function () {}; - var TestInstanceParent = function (child) { + const TestInstanceChild = function () {}; + const TestInstanceParent = function (child) { child._parent = this; this.child = child; }; - var testInstanceChild = new TestInstanceChild() - var testInstanceParent = new TestInstanceParent(testInstanceChild); + const testInstanceChild = new TestInstanceChild() + const testInstanceParent = new TestInstanceParent(testInstanceChild); matches(TestInstanceParent, Function); matches(testInstanceParent, TestInstanceParent); @@ -242,201 +258,199 @@ Tinytest.add("check - check", function (test) { // Circular Reference Objects - var circleFoo = {}; - var circleBar = {}; + const circleFoo = {}; + const circleBar = {}; circleFoo.bar = circleBar; circleBar.foo = circleFoo; fails(circleFoo, null); // Test that "arguments" is treated like an array. - var argumentsMatches = function () { + const argumentsMatches = function () { matches(arguments, [Number]); }; argumentsMatches(); argumentsMatches(1); argumentsMatches(1, 2); - var argumentsFails = function () { + const argumentsFails = function () { fails(arguments, [Number]); }; - argumentsFails("123"); - argumentsFails(1, "23"); + argumentsFails('123'); + argumentsFails(1, '23'); }); -Tinytest.add("check - argument checker", function (test) { - var checksAllArguments = function (f /*arguments*/) { - Match._failIfArgumentsAreNotAllChecked( - f, {}, _.toArray(arguments).slice(1), "test"); - }; - checksAllArguments(function () {}); - checksAllArguments(function (x) {check(x, Match.Any);}, undefined); - checksAllArguments(function (x) {check(x, Match.Any);}, null); - checksAllArguments(function (x) {check(x, Match.Any);}, false); - checksAllArguments(function (x) {check(x, Match.Any);}, true); - checksAllArguments(function (x) {check(x, Match.Any);}, 0); - checksAllArguments(function (a, b, c) { +Tinytest.add('check - argument checker', test => { + const checksAllArguments = (f, ...args) => + Match._failIfArgumentsAreNotAllChecked(f, {}, args, 'test'); + checksAllArguments(() => {}); + checksAllArguments(x => check(x, Match.Any), undefined); + checksAllArguments(x => check(x, Match.Any), null); + checksAllArguments(x => check(x, Match.Any), false); + checksAllArguments(x => check(x, Match.Any), true); + checksAllArguments(x => check(x, Match.Any), 0); + checksAllArguments((a, b, c) => { check(a, String); check(b, Boolean); check(c, Match.Optional(Number)); - }, "foo", true); - checksAllArguments(function () { - check(arguments, [Number]); - }, 1, 2, 4); - checksAllArguments(function(x) { + }, 'foo', true); + checksAllArguments((...args) => check(args, [Number]), 1, 2, 4); + checksAllArguments((x, ...args) => { check(x, Number); - check(_.toArray(arguments).slice(1), [String]); - }, 1, "foo", "bar", "baz"); - // NaN values - checksAllArguments(function (x) { - check(x, Number); - }, NaN); + check(args, [String]); + }, 1, 'foo', 'bar', 'baz'); - var doesntCheckAllArguments = function (f /*arguments*/) { + // NaN values + checksAllArguments(x => check(x, Number), NaN); + + const doesntCheckAllArguments = (f, ...args) => { try { - Match._failIfArgumentsAreNotAllChecked( - f, {}, _.toArray(arguments).slice(1), "test"); - test.fail({message: "expected _failIfArgumentsAreNotAllChecked to throw"}); + Match._failIfArgumentsAreNotAllChecked(f, {}, args, 'test'); + test.fail({message: 'expected _failIfArgumentsAreNotAllChecked to throw'}); } catch (e) { - test.equal(e.message, "Did not check() all arguments during test"); + test.equal(e.message, 'Did not check() all arguments during test'); } }; - doesntCheckAllArguments(function () {}, undefined); - doesntCheckAllArguments(function () {}, null); - doesntCheckAllArguments(function () {}, 1); - doesntCheckAllArguments(function () { - check(_.toArray(arguments).slice(1), [String]); - }, 1, "asdf", "foo"); - doesntCheckAllArguments(function (x, y) { - check(x, Boolean); - }, true, false); + doesntCheckAllArguments(() => {}, undefined); + doesntCheckAllArguments(() => {}, null); + doesntCheckAllArguments(() => {}, 1); + doesntCheckAllArguments((x, ...args) => check(args, [String]), 1, 'asdf', 'foo'); + doesntCheckAllArguments((x, y) => check(x, Boolean), true, false); + // One "true" check doesn't count for all. - doesntCheckAllArguments(function (x, y) { - check(x, Boolean); - }, true, true); + doesntCheckAllArguments((x, y) => check(x, Boolean), true, true); + // For non-primitives, we really do require that each arg gets checked. - doesntCheckAllArguments(function (x, y) { + doesntCheckAllArguments((x, y) => { check(x, [Boolean]); check(x, [Boolean]); }, [true], [true]); - // In an ideal world this test would fail, but we currently can't // differentiate between "two calls to check x, both of which are true" and // "check x and check y, both of which are true" (for any interned primitive // type). - checksAllArguments(function (x, y) { + checksAllArguments((x, y) => { check(x, Boolean); check(x, Boolean); }, true, true); }); -Tinytest.add("check - Match error path", function (test) { - var match = function (value, pattern, expectedPath) { +Tinytest.add('check - Match error path', test => { + const match = (value, pattern, expectedPath) => { try { check(value, pattern); } catch (err) { + // XXX just for FF 3.6, its JSON stringification prefers "\u000a" to "\n" - err.path = err.path.replace(/\\u000a/, "\\n"); - if (err.path != expectedPath) + err.path = err.path.replace(/\\u000a/, '\\n'); + if (err.path != expectedPath) { test.fail({ - type: "match-error-path", + type: 'match-error-path', message: "The path of Match.Error doesn't match.", pattern: JSON.stringify(pattern), value: JSON.stringify(value), path: err.path, - expectedPath: expectedPath + expectedPath, }); + } } }; - match({ foo: [ { bar: 3 }, {bar: "something"} ] }, { foo: [ { bar: Number } ] }, "foo[1].bar"); + match({ foo: [ { bar: 3 }, { bar: 'something' } ] }, { foo: [{ bar: Number }] }, 'foo[1].bar'); + // Complicated case with arrays, $, whitespace and quotes! match([{ $FoO: { "bar baz\n\"'": 3 } }], [{ $FoO: { "bar baz\n\"'": String } }], "[0].$FoO[\"bar baz\\n\\\"'\"]"); + // Numbers only, can be accessed w/o quotes - match({ "1231": 123 }, { "1231": String }, "[1231]"); - match({ "1234abcd": 123 }, { "1234abcd": String }, "[\"1234abcd\"]"); - match({ $set: { people: "nice" } }, { $set: { people: [String] } }, "$set.people"); - match({ _underscore: "should work" }, { _underscore: Number }, "_underscore"); + match({ '1231': 123 }, { '1231': String }, '[1231]'); + match({ '1234abcd': 123 }, { '1234abcd': String }, '[\"1234abcd\"]'); + match({ $set: { people: 'nice' } }, { $set: { people: [String] } }, '$set.people'); + match({ _underscore: 'should work' }, { _underscore: Number }, '_underscore'); + // Nested array looks nice - match([[["something", "here"], []], [["string", 123]]], [[[String]]], "[1][0][1]"); + match([[['something', 'here'], []], [['string', 123]]], [[[String]]], '[1][0][1]'); + // Object nested in arrays should look nice, too! - match([[[{ foo: "something" }, { foo: "here"}], - [{ foo: "asdf" }]], + match([[[{ foo: 'something' }, { foo: 'here'}], + [{ foo: 'asdf' }]], [[{ foo: 123 }]]], - [[[{ foo: String }]]], "[1][0][0].foo"); + [[[{ foo: String }]]], '[1][0][0].foo'); // JS keyword - match({ "return": 0 }, { "return": String }, "[\"return\"]"); + match({ 'return': 0 }, { 'return': String }, '[\"return\"]'); }); -Tinytest.add("check - Match error message", function (test) { - var match = function (value, pattern, expectedMessage) { +Tinytest.add('check - Match error message', test => { + const match = (value, pattern, expectedMessage) => { try { check(value, pattern); } catch (err) { - if (err.message !== "Match error: " + expectedMessage) + if (err.message !== `Match error: ${expectedMessage}`) { test.fail({ - type: "match-error-message", + type: 'match-error-message', message: "The message of Match.Error doesn't match.", pattern: JSON.stringify(pattern), value: JSON.stringify(value), errorMessage: err.message, - expectedErrorMessage: expectedMessage + expectedErrorMessage, }); + } } }; - match(2, String, "Expected string, got number"); - match({key: 0}, Number, "Expected number, got object"); - match(null, Boolean, "Expected boolean, got null"); - match("string", undefined, "Expected undefined, got string"); - match(true, null, "Expected null, got true"); + match(2, String, 'Expected string, got number'); + match({ key: 0 }, Number, 'Expected number, got object'); + match(null, Boolean, 'Expected boolean, got null'); + match('string', undefined, 'Expected undefined, got string'); + match(true, null, 'Expected null, got true'); match({}, Match.ObjectIncluding({ bar: String }), "Missing key 'bar'"); - match(null, Object, "Expected object, got null"); - match(null, Function, "Expected function, got null"); - match("bar", "foo", "Expected foo, got \"bar\""); - match(3.14, Match.Integer, "Expected Integer, got 3.14"); - match(false, [Boolean], "Expected array, got false"); - match([null, null], [String], "Expected string, got null in field [0]"); - match(2, {key: 2}, "Expected object, got number"); - match(null, {key: 2}, "Expected object, got null"); - match(new Date, {key: 2}, "Expected plain object"); + match(null, Object, 'Expected object, got null'); + match(null, Function, 'Expected function, got null'); + match('bar', 'foo', 'Expected foo, got \"bar\"'); + match(3.14, Match.Integer, 'Expected Integer, got 3.14'); + match(false, [Boolean], 'Expected array, got false'); + match([null, null], [String], 'Expected string, got null in field [0]'); + match(2, { key: 2 }, 'Expected object, got number'); + match(null, { key: 2 }, 'Expected object, got null'); + match(new Date, { key: 2 }, 'Expected plain object'); - var TestInstanceChild = function () {}; - var TestInstanceParent = function (child) { + const TestInstanceChild = function () {}; + const TestInstanceParent = function (child) { child._parent = this; this.child = child; }; - var testInstanceChild = new TestInstanceChild() - var testInstanceParent = new TestInstanceParent(testInstanceChild); - match(testInstanceChild, TestInstanceParent, "Expected " + (TestInstanceParent.name || "particular constructor")); + const testInstanceChild = new TestInstanceChild() + const testInstanceParent = new TestInstanceParent(testInstanceChild); + match(testInstanceChild, TestInstanceParent, `Expected ${(TestInstanceParent.name || 'particular constructor')}`); - var circleFoo = {}; - var circleBar = {}; + const circleFoo = {}; + const circleBar = {}; circleFoo.bar = circleBar; circleBar.foo = circleFoo; - match(circleFoo, null, "Expected null, got object"); + match(circleFoo, null, 'Expected null, got object'); }); // Regression test for https://github.com/meteor/meteor/issues/2136 -Meteor.isServer && Tinytest.addAsync("check - non-fiber check works", function (test, onComplete) { - var Fiber = Npm.require('fibers'); +Meteor.isServer && Tinytest.addAsync('check - non-fiber check works', (test, onComplete) => { + const Fiber = Npm.require('fibers'); // We can only call test.isTrue inside normal Meteor Fibery code, so give us a // bindEnvironment way to get back. - var report = Meteor.bindEnvironment(function (success) { + const report = Meteor.bindEnvironment(success => { test.isTrue(success); onComplete(); }); // Get out of a fiber with process.nextTick and ensure that we can still use // check. - process.nextTick(function () { - var success = true; - if (Fiber.current) + process.nextTick(() => { + const success = true; + if (Fiber.current) { success = false; + } + if (success) { try { check(true, Boolean); @@ -444,6 +458,23 @@ Meteor.isServer && Tinytest.addAsync("check - non-fiber check works", function ( success = false; } } + report(success); }); }); + +Tinytest.add( + 'check - Match methods that return class instances can be called as ' + + 'constructors', + test => { + + // Existing code sometimes uses these properties as constructors, so we can't + // switch them to arrow functions or method shorthand. + test.equal(new Match.Optional(), Match.Optional()); + test.equal(new Match.Maybe(), Match.Maybe()); + test.equal(new Match.OneOf([1]), Match.OneOf([1])); // Needs a non-empty array + test.equal(new Match.Where(), Match.Where()); + test.equal(new Match.ObjectIncluding(), Match.ObjectIncluding()); + test.equal(new Match.ObjectWithValues(), Match.ObjectWithValues()); + } +); diff --git a/packages/check/package.js b/packages/check/package.js index 2d3e057849..ee260f6f24 100644 --- a/packages/check/package.js +++ b/packages/check/package.js @@ -1,11 +1,10 @@ Package.describe({ - summary: "Check whether a value matches a pattern", - version: '1.2.5' + summary: 'Check whether a value matches a pattern', + version: '1.3.1', }); -Package.onUse(function (api) { - api.use('modules'); - api.use('underscore'); +Package.onUse(api => { + api.use('ecmascript'); api.use('ejson'); api.mainModule('match.js'); @@ -14,8 +13,8 @@ Package.onUse(function (api) { api.export('Match'); }); -Package.onTest(function (api) { - api.use(['check', 'tinytest', 'underscore', 'ejson', 'ecmascript'], ['client', 'server']); +Package.onTest(api => { + api.use(['check', 'tinytest', 'ejson', 'ecmascript'], ['client', 'server']); api.addFiles('match_test.js', ['client', 'server']); }); diff --git a/packages/constraint-solver/benchmark-tests.js b/packages/constraint-solver/benchmark-tests.js index 22d7717142..28def3db61 100644 --- a/packages/constraint-solver/benchmark-tests.js +++ b/packages/constraint-solver/benchmark-tests.js @@ -283,7 +283,7 @@ runBenchmarks && Tinytest.add("constraint solver - benchmark on gems - rails, gi // Given a set of gems definitions returns a Catalog-like object function getCatalogStub (gems) { return { - getSortedVersionRecords: function (name) { + getSortedVersionRecords(name) { var versions = _.chain(gems) .filter(function (pv) { return pv.name === name; }) .pluck('number') @@ -319,6 +319,17 @@ function getCatalogStub (gems) { return packageVersion; }); + }, + + getVersion(packageName, version) { + let result = null; + this.getSortedVersionRecords(packageName).some(pkgVersion => { + if (pkgVersion.version === version) { + result = pkgVersion; + return true; + } + }); + return result; } }; } diff --git a/packages/constraint-solver/constraint-solver-tests.js b/packages/constraint-solver/constraint-solver-tests.js index 52a980e348..660db6da78 100644 --- a/packages/constraint-solver/constraint-solver-tests.js +++ b/packages/constraint-solver/constraint-solver-tests.js @@ -25,12 +25,23 @@ var makeResolver = function (data) { }); var catalogStub = { - getSortedVersionRecords: function (name) { + getSortedVersionRecords(name) { var records = Versions.find({packageName: name}).fetch(); records.sort(function (a, b) { return PV.compare(a.version, b.version); }); return records; + }, + + getVersion(packageName, version) { + let result = null; + this.getSortedVersionRecords(packageName).some(pkgVersion => { + if (pkgVersion.version === version) { + result = pkgVersion; + return true; + } + }); + return result; } }; return new CS.PackagesResolver(catalogStub); diff --git a/packages/constraint-solver/constraint-solver.js b/packages/constraint-solver/constraint-solver.js index 435c312769..f185894d09 100644 --- a/packages/constraint-solver/constraint-solver.js +++ b/packages/constraint-solver/constraint-solver.js @@ -59,11 +59,27 @@ CS.PackagesResolver.prototype.resolve = function (dependencies, constraints, 'upgradeIndirectDepPatchVersions')); }); - var resultCache = self._options.resultCache; - if (resultCache && - resultCache.lastInput && - _.isEqual(resultCache.lastInput, - input.toJSONable(true))) { + // The constraint solver avoids re-solving everything from scratch on + // rebuilds if the current input of top-level constraints matches the + // previously solved input (also just top-level constraints). This is + // slightly unsound, because non-top-level dependency constraints might + // have changed, but it's important for performance, and relatively + // harmless in practice (if there's a version conflict, you'll find out + // about it the next time you do a full restart of the development + // server). The unsoundness can cause problems for tests, however, so it + // may be a good idea to set this environment variable to "true" to + // disable the caching entirely. + const disableCaching = !! JSON.parse( + process.env.METEOR_DISABLE_CONSTRAINT_SOLVER_CACHING || "false" + ); + + let resultCache = self._options.resultCache; + if (disableCaching) { + resultCache = null; + } else if (resultCache && + resultCache.lastInput && + _.isEqual(resultCache.lastInput, + input.toJSONable(true))) { return resultCache.lastOutput; } @@ -182,10 +198,26 @@ CS.isConstraintSatisfied = function (pkg, vConstraint, version) { if (type === "any-reasonable") { return true; - } else if (type === "exactly") { + } + + // If any top-level constraints use the @x.y.z! override syntax, all + // other constraints on the same package will be marked with the + // weakMinimum property, which means they constrain nothing other than + // the minimum version of the package. Look for weakMinimum in the + // CS.Solver#analyze method for related logic. + if (vConstraint.weakMinimum) { + return ! PV.lessThan( + PV.parse(version), + PV.parse(simpleConstraint.versionString) + ); + } + + if (type === "exactly") { var cVersion = simpleConstraint.versionString; return (cVersion === version); - } else if (type === 'compatible-with') { + } + + if (type === 'compatible-with') { if (typeof simpleConstraint.test === "function") { return simpleConstraint.test(version); } @@ -206,9 +238,9 @@ CS.isConstraintSatisfied = function (pkg, vConstraint, version) { } return true; - } else { - throw Error("Unknown constraint type: " + type); } + + throw Error("Unknown constraint type: " + type); }); }; diff --git a/packages/constraint-solver/package.js b/packages/constraint-solver/package.js index 5f8f368bc9..babea7efa6 100644 --- a/packages/constraint-solver/package.js +++ b/packages/constraint-solver/package.js @@ -1,6 +1,6 @@ Package.describe({ summary: "Given the set of the constraints, picks a satisfying configuration", - version: "1.1.1" + version: "1.2.0" }); Package.onUse(function (api) { diff --git a/packages/constraint-solver/solver.js b/packages/constraint-solver/solver.js index 1687f5a6f8..c92bb67843 100644 --- a/packages/constraint-solver/solver.js +++ b/packages/constraint-solver/solver.js @@ -207,11 +207,63 @@ CS.Solver.prototype.analyze = function () { analysis.topLevelEqualityConstrainedPackages = {}; Profile.time("analyze constraints", function () { + // Find package names with @x.y.z! overrides. We consider only + // top-level constraints here, which includes (1) .meteor/packages, + // (2) local package versions, and (3) Meteor release constraints. + // Since (2) and (3) are generated programmatically without any + // override syntax (in tools/project-context.js), the .meteor/packages + // file is effectively the only place where override syntax has any + // impact. This limitation is deliberate, since overriding package + // version constraints is a power-tool that should be used sparingly + // by application developers, and never abused by package authors. + var overrides = new Set; + _.each(input.constraints, function (c) { + if (c.constraintString && + c.versionConstraint.override) { + overrides.add(c.package); + } + }); + + // Return c.versionConstraint unless it is overridden, in which case + // make a copy of it and set vConstraint.weakMinimum = true. + function getVersionConstraint(c) { + var vConstraint = c.versionConstraint; + + // The meteor-tool version can never be weakened/overridden. + if (c.package === "meteor-tool") { + return vConstraint; + } + + // Overrides cannot be weakened, so in theory they could conflict + // with each other, though that's unlikely to be a problem within a + // single .meteor/packages file. + if (vConstraint.override) { + return vConstraint; + } + + if (overrides.has(c.package)) { + // Make a defensive shallow copy of vConstraint with the same + // prototype (that is, PV.VersionConstraint.prototype). + vConstraint = Object.create( + Object.getPrototypeOf(vConstraint), + Object.getOwnPropertyDescriptors(vConstraint) + ); + + // This weakens the constraint so that it matches any version not + // less than the constraint, regardless of whether the major or + // minor versions are the same. See CS.isConstraintSatisfied in + // constraint-solver.js for the implementation of this behavior. + vConstraint.weakMinimum = true; + } + + return vConstraint; + } + // top-level constraints _.each(input.constraints, function (c) { if (c.constraintString) { analysis.constraints.push(new CS.Solver.Constraint( - null, c.package, c.versionConstraint, + null, c.package, getVersionConstraint(c), "constraint#" + analysis.constraints.length)); if (c.versionConstraint.alternatives.length === 1 && @@ -231,7 +283,7 @@ CS.Solver.prototype.analyze = function () { if (input.isKnownPackage(p2) && dep.packageConstraint.constraintString) { analysis.constraints.push(new CS.Solver.Constraint( - pv, p2, dep.packageConstraint.versionConstraint, + pv, p2, getVersionConstraint(dep.packageConstraint), "constraint#" + analysis.constraints.length)); } }); diff --git a/packages/ddp-client/.npm/package/npm-shrinkwrap.json b/packages/ddp-client/.npm/package/npm-shrinkwrap.json index 46fc9fb495..83ff1746e4 100644 --- a/packages/ddp-client/.npm/package/npm-shrinkwrap.json +++ b/packages/ddp-client/.npm/package/npm-shrinkwrap.json @@ -1,35 +1,10 @@ { "lockfileVersion": 1, "dependencies": { - "faye-websocket": { - "version": "0.11.1", - "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.1.tgz", - "integrity": "sha1-8O/hjE9W5PQK/H4Gxxn9XuYYjzg=" - }, - "http-parser-js": { - "version": "0.4.6", - "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.4.6.tgz", - "integrity": "sha1-GVJz9YcExFLWcQdr4gEyndNB3FU=" - }, "lolex": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/lolex/-/lolex-1.4.0.tgz", - "integrity": "sha1-LycSsbwYDendzF06epbvPAuxYq0=" - }, - "permessage-deflate": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/permessage-deflate/-/permessage-deflate-0.1.6.tgz", - "integrity": "sha1-WB8c7fvUQPrEfQd3vohjM4a5kt4=" - }, - "websocket-driver": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.0.tgz", - "integrity": "sha1-DK+dLXVdk67gSdS90NP+LMoqJOs=" - }, - "websocket-extensions": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.2.tgz", - "integrity": "sha1-Dhh4HeYpoYMIzhSBZQ9n/6JpOl0=" + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-2.3.1.tgz", + "integrity": "sha512-mQuW55GhduF3ppo+ZRUTz1PRjEh1hS5BbqU7d8D0ez2OKxHDod7StPPeAVKisZR5aLkHZjdGWSL42LSONUJsZw==" } } } diff --git a/packages/ddp-client/client/client.js b/packages/ddp-client/client/client.js new file mode 100644 index 0000000000..fd9c746bbc --- /dev/null +++ b/packages/ddp-client/client/client.js @@ -0,0 +1,6 @@ +export { DDP } from '../common/namespace.js'; + +import '../common/livedata_connection'; + +// Initialize the default server connection and put it on Meteor.connection +import './client_convenience'; diff --git a/packages/ddp-client/client/client_convenience.js b/packages/ddp-client/client/client_convenience.js new file mode 100644 index 0000000000..e4a7b8319c --- /dev/null +++ b/packages/ddp-client/client/client_convenience.js @@ -0,0 +1,74 @@ +import { DDP } from '../common/namespace.js'; +import { Meteor } from 'meteor/meteor'; + +// Meteor.refresh can be called on the client (if you're in common code) but it +// only has an effect on the server. +Meteor.refresh = () => {}; + +// By default, try to connect back to the same endpoint as the page +// was served from. +// +// XXX We should be doing this a different way. Right now we don't +// include ROOT_URL_PATH_PREFIX when computing ddpUrl. (We don't +// include it on the server when computing +// DDP_DEFAULT_CONNECTION_URL, and we don't include it in our +// default, '/'.) We get by with this because DDP.connect then +// forces the URL passed to it to be interpreted relative to the +// app's deploy path, even if it is absolute. Instead, we should +// make DDP_DEFAULT_CONNECTION_URL, if set, include the path prefix; +// make the default ddpUrl be '' rather that '/'; and make +// _translateUrl in stream_client_common.js not force absolute paths +// to be treated like relative paths. See also +// stream_client_common.js #RationalizingRelativeDDPURLs +var ddpUrl = '/'; +if (typeof __meteor_runtime_config__ !== 'undefined') { + if (__meteor_runtime_config__.DDP_DEFAULT_CONNECTION_URL) + ddpUrl = __meteor_runtime_config__.DDP_DEFAULT_CONNECTION_URL; +} + +var retry = new Retry(); + +function onDDPVersionNegotiationFailure(description) { + Meteor._debug(description); + if (Package.reload) { + var migrationData = + Package.reload.Reload._migrationData('livedata') || + Object.create(null); + var failures = migrationData.DDPVersionNegotiationFailures || 0; + ++failures; + Package.reload.Reload._onMigrate('livedata', () => { + return [true, { DDPVersionNegotiationFailures: failures }]; + }); + retry.retryLater(failures, () => { + Package.reload.Reload._reload({ immediateMigration: true }); + }); + } +} + +Meteor.connection = DDP.connect(ddpUrl, { + onDDPVersionNegotiationFailure: onDDPVersionNegotiationFailure +}); + +// Proxy the public methods of Meteor.connection so they can +// be called directly on Meteor. +[ + 'subscribe', + 'methods', + 'call', + 'apply', + 'status', + 'reconnect', + 'disconnect' +].forEach(name => { + Meteor[name] = Meteor.connection[name].bind(Meteor.connection); +}); + +// Meteor.connection used to be called +// Meteor.default_connection. Provide backcompat as a courtesy even +// though it was never documented. +// XXX COMPAT WITH 0.6.4 +Meteor.default_connection = Meteor.connection; + +// We should transition from Meteor.connect to DDP.connect. +// XXX COMPAT WITH 0.6.4 +Meteor.connect = DDP.connect; diff --git a/packages/ddp-client/client_convenience.js b/packages/ddp-client/client_convenience.js deleted file mode 100644 index ece26a963f..0000000000 --- a/packages/ddp-client/client_convenience.js +++ /dev/null @@ -1,73 +0,0 @@ -import { DDP } from "./namespace.js"; - -// Meteor.refresh can be called on the client (if you're in common code) but it -// only has an effect on the server. -Meteor.refresh = function (notification) { -}; - -if (Meteor.isClient) { - // By default, try to connect back to the same endpoint as the page - // was served from. - // - // XXX We should be doing this a different way. Right now we don't - // include ROOT_URL_PATH_PREFIX when computing ddpUrl. (We don't - // include it on the server when computing - // DDP_DEFAULT_CONNECTION_URL, and we don't include it in our - // default, '/'.) We get by with this because DDP.connect then - // forces the URL passed to it to be interpreted relative to the - // app's deploy path, even if it is absolute. Instead, we should - // make DDP_DEFAULT_CONNECTION_URL, if set, include the path prefix; - // make the default ddpUrl be '' rather that '/'; and make - // _translateUrl in stream_client_common.js not force absolute paths - // to be treated like relative paths. See also - // stream_client_common.js #RationalizingRelativeDDPURLs - var ddpUrl = '/'; - if (typeof __meteor_runtime_config__ !== "undefined") { - if (__meteor_runtime_config__.DDP_DEFAULT_CONNECTION_URL) - ddpUrl = __meteor_runtime_config__.DDP_DEFAULT_CONNECTION_URL; - } - - var retry = new Retry(); - - var onDDPVersionNegotiationFailure = function (description) { - Meteor._debug(description); - if (Package.reload) { - var migrationData = Package.reload.Reload._migrationData('livedata') || {}; - var failures = migrationData.DDPVersionNegotiationFailures || 0; - ++failures; - Package.reload.Reload._onMigrate('livedata', function () { - return [true, {DDPVersionNegotiationFailures: failures}]; - }); - retry.retryLater(failures, function () { - Package.reload.Reload._reload(); - }); - } - }; - - Meteor.connection = - DDP.connect(ddpUrl, { - onDDPVersionNegotiationFailure: onDDPVersionNegotiationFailure - }); - - // Proxy the public methods of Meteor.connection so they can - // be called directly on Meteor. - _.each(['subscribe', 'methods', 'call', 'apply', 'status', 'reconnect', - 'disconnect'], - function (name) { - Meteor[name] = _.bind(Meteor.connection[name], Meteor.connection); - }); -} else { - // Never set up a default connection on the server. Don't even map - // subscribe/call/etc onto Meteor. - Meteor.connection = null; -} - -// Meteor.connection used to be called -// Meteor.default_connection. Provide backcompat as a courtesy even -// though it was never documented. -// XXX COMPAT WITH 0.6.4 -Meteor.default_connection = Meteor.connection; - -// We should transition from Meteor.connect to DDP.connect. -// XXX COMPAT WITH 0.6.4 -Meteor.connect = DDP.connect; diff --git a/packages/ddp-client/common/MethodInvoker.js b/packages/ddp-client/common/MethodInvoker.js new file mode 100644 index 0000000000..f2490b92f8 --- /dev/null +++ b/packages/ddp-client/common/MethodInvoker.js @@ -0,0 +1,85 @@ +// A MethodInvoker manages sending a method to the server and calling the user's +// callbacks. On construction, it registers itself in the connection's +// _methodInvokers map; it removes itself once the method is fully finished and +// the callback is invoked. This occurs when it has both received a result, +// and the data written by it is fully visible. +export default class MethodInvoker { + constructor(options) { + // Public (within this file) fields. + this.methodId = options.methodId; + this.sentMessage = false; + + this._callback = options.callback; + this._connection = options.connection; + this._message = options.message; + this._onResultReceived = options.onResultReceived || (() => {}); + this._wait = options.wait; + this.noRetry = options.noRetry; + this._methodResult = null; + this._dataVisible = false; + + // Register with the connection. + this._connection._methodInvokers[this.methodId] = this; + } + // Sends the method message to the server. May be called additional times if + // we lose the connection and reconnect before receiving a result. + sendMessage() { + // This function is called before sending a method (including resending on + // reconnect). We should only (re)send methods where we don't already have a + // result! + if (this.gotResult()) + throw new Error('sendingMethod is called on method with result'); + + // If we're re-sending it, it doesn't matter if data was written the first + // time. + this._dataVisible = false; + this.sentMessage = true; + + // If this is a wait method, make all data messages be buffered until it is + // done. + if (this._wait) + this._connection._methodsBlockingQuiescence[this.methodId] = true; + + // Actually send the message. + this._connection._send(this._message); + } + // Invoke the callback, if we have both a result and know that all data has + // been written to the local cache. + _maybeInvokeCallback() { + if (this._methodResult && this._dataVisible) { + // Call the callback. (This won't throw: the callback was wrapped with + // bindEnvironment.) + this._callback(this._methodResult[0], this._methodResult[1]); + + // Forget about this method. + delete this._connection._methodInvokers[this.methodId]; + + // Let the connection know that this method is finished, so it can try to + // move on to the next block of methods. + this._connection._outstandingMethodFinished(); + } + } + // Call with the result of the method from the server. Only may be called + // once; once it is called, you should not call sendMessage again. + // If the user provided an onResultReceived callback, call it immediately. + // Then invoke the main callback if data is also visible. + receiveResult(err, result) { + if (this.gotResult()) + throw new Error('Methods should only receive results once'); + this._methodResult = [err, result]; + this._onResultReceived(err, result); + this._maybeInvokeCallback(); + } + // Call this when all data written by the method is visible. This means that + // the method has returns its "data is done" message *AND* all server + // documents that are buffered at that time have been written to the local + // cache. Invokes the main callback if the result has been received. + dataVisible() { + this._dataVisible = true; + this._maybeInvokeCallback(); + } + // True if receiveResult has been called. + gotResult() { + return !!this._methodResult; + } +} diff --git a/packages/ddp-client/livedata_connection.js b/packages/ddp-client/common/livedata_connection.js similarity index 50% rename from packages/ddp-client/livedata_connection.js rename to packages/ddp-client/common/livedata_connection.js index 7e1f1e6f17..9ec5eee42e 100644 --- a/packages/ddp-client/livedata_connection.js +++ b/packages/ddp-client/common/livedata_connection.js @@ -1,11 +1,31 @@ -import { DDP, LivedataTest } from "./namespace.js"; -import { MongoIDMap } from "./id_map.js"; +import { Meteor } from 'meteor/meteor'; +import { DDPCommon } from 'meteor/ddp-common'; +import { Tracker } from 'meteor/tracker'; +import { EJSON } from 'meteor/ejson'; +import { Random } from 'meteor/random'; +import { Hook } from 'meteor/callback-hook'; +import { MongoID } from 'meteor/mongo-id'; +import { DDP } from './namespace.js'; +import MethodInvoker from './MethodInvoker.js'; +import { + hasOwn, + slice, + keys, + isEmpty, + last, +} from "meteor/ddp-common/utils.js"; if (Meteor.isServer) { var Fiber = Npm.require('fibers'); var Future = Npm.require('fibers/future'); } +class MongoIDMap extends IdMap { + constructor() { + super(MongoID.idStringify, MongoID.idParse); + } +} + // @param url {String|Object} URL to Meteor app, // or an object as a test hook (see code) // Options: @@ -26,484 +46,276 @@ if (Meteor.isServer) { // fails. We should have better usability in the latter case (while // still transparently reconnecting if it's just a transient failure // or the server migrating us). -var Connection = function (url, options) { - var self = this; - options = _.extend({ - onConnected: function () {}, - onDDPVersionNegotiationFailure: function (description) { - Meteor._debug(description); - }, - heartbeatInterval: 17500, - heartbeatTimeout: 15000, - npmFayeOptions: {}, - // These options are only for testing. - reloadWithOutstanding: false, - supportedDDPVersions: DDPCommon.SUPPORTED_DDP_VERSIONS, - retry: true, - respondToPings: true, - // When updates are coming within this ms interval, batch them together. - bufferedWritesInterval: 5, - // Flush buffers immediately if writes are happening continuously for more than this many ms. - bufferedWritesMaxAge: 500 - }, options); +export class Connection { + constructor(url, options) { + var self = this; - // If set, called when we reconnect, queuing method calls _before_ the - // existing outstanding ones. - // NOTE: This feature has been preserved for backwards compatibility. The - // preferred method of setting a callback on reconnect is to use - // DDP.onReconnect. - self.onReconnect = null; + this.options = options = { + onConnected() {}, + onDDPVersionNegotiationFailure(description) { + Meteor._debug(description); + }, + heartbeatInterval: 17500, + heartbeatTimeout: 15000, + npmFayeOptions: Object.create(null), + // These options are only for testing. + reloadWithOutstanding: false, + supportedDDPVersions: DDPCommon.SUPPORTED_DDP_VERSIONS, + retry: true, + respondToPings: true, + // When updates are coming within this ms interval, batch them together. + bufferedWritesInterval: 5, + // Flush buffers immediately if writes are happening continuously for more than this many ms. + bufferedWritesMaxAge: 500, - // as a test hook, allow passing a stream instead of a url. - if (typeof url === "object") { - self._stream = url; - } else { - self._stream = new LivedataTest.ClientStream(url, { - retry: options.retry, - headers: options.headers, - _sockjsOptions: options._sockjsOptions, - // Used to keep some tests quiet, or for other cases in which - // the right thing to do with connection errors is to silently - // fail (e.g. sending package usage stats). At some point we - // should have a real API for handling client-stream-level - // errors. - _dontPrintErrors: options._dontPrintErrors, - connectTimeoutMs: options.connectTimeoutMs, - npmFayeOptions: options.npmFayeOptions - }); - } + ...options + }; - self._lastSessionId = null; - self._versionSuggestion = null; // The last proposed DDP version. - self._version = null; // The DDP version agreed on by client and server. - self._stores = {}; // name -> object with methods - self._methodHandlers = {}; // name -> func - self._nextMethodId = 1; - self._supportedDDPVersions = options.supportedDDPVersions; + // If set, called when we reconnect, queuing method calls _before_ the + // existing outstanding ones. + // NOTE: This feature has been preserved for backwards compatibility. The + // preferred method of setting a callback on reconnect is to use + // DDP.onReconnect. + self.onReconnect = null; - self._heartbeatInterval = options.heartbeatInterval; - self._heartbeatTimeout = options.heartbeatTimeout; - - // Tracks methods which the user has tried to call but which have not yet - // called their user callback (ie, they are waiting on their result or for all - // of their writes to be written to the local cache). Map from method ID to - // MethodInvoker object. - self._methodInvokers = {}; - - // Tracks methods which the user has called but whose result messages have not - // arrived yet. - // - // _outstandingMethodBlocks is an array of blocks of methods. Each block - // represents a set of methods that can run at the same time. The first block - // represents the methods which are currently in flight; subsequent blocks - // must wait for previous blocks to be fully finished before they can be sent - // to the server. - // - // Each block is an object with the following fields: - // - methods: a list of MethodInvoker objects - // - wait: a boolean; if true, this block had a single method invoked with - // the "wait" option - // - // There will never be adjacent blocks with wait=false, because the only thing - // that makes methods need to be serialized is a wait method. - // - // Methods are removed from the first block when their "result" is - // received. The entire first block is only removed when all of the in-flight - // methods have received their results (so the "methods" list is empty) *AND* - // all of the data written by those methods are visible in the local cache. So - // it is possible for the first block's methods list to be empty, if we are - // still waiting for some objects to quiesce. - // - // Example: - // _outstandingMethodBlocks = [ - // {wait: false, methods: []}, - // {wait: true, methods: []}, - // {wait: false, methods: [, - // ]}] - // This means that there were some methods which were sent to the server and - // which have returned their results, but some of the data written by - // the methods may not be visible in the local cache. Once all that data is - // visible, we will send a 'login' method. Once the login method has returned - // and all the data is visible (including re-running subs if userId changes), - // we will send the 'foo' and 'bar' methods in parallel. - self._outstandingMethodBlocks = []; - - // method ID -> array of objects with keys 'collection' and 'id', listing - // documents written by a given method's stub. keys are associated with - // methods whose stub wrote at least one document, and whose data-done message - // has not yet been received. - self._documentsWrittenByStub = {}; - // collection -> IdMap of "server document" object. A "server document" has: - // - "document": the version of the document according the - // server (ie, the snapshot before a stub wrote it, amended by any changes - // received from the server) - // It is undefined if we think the document does not exist - // - "writtenByStubs": a set of method IDs whose stubs wrote to the document - // whose "data done" messages have not yet been processed - self._serverDocuments = {}; - - // Array of callbacks to be called after the next update of the local - // cache. Used for: - // - Calling methodInvoker.dataVisible and sub ready callbacks after - // the relevant data is flushed. - // - Invoking the callbacks of "half-finished" methods after reconnect - // quiescence. Specifically, methods whose result was received over the old - // connection (so we don't re-send it) but whose data had not been made - // visible. - self._afterUpdateCallbacks = []; - - // In two contexts, we buffer all incoming data messages and then process them - // all at once in a single update: - // - During reconnect, we buffer all data messages until all subs that had - // been ready before reconnect are ready again, and all methods that are - // active have returned their "data done message"; then - // - During the execution of a "wait" method, we buffer all data messages - // until the wait method gets its "data done" message. (If the wait method - // occurs during reconnect, it doesn't get any special handling.) - // all data messages are processed in one update. - // - // The following fields are used for this "quiescence" process. - - // This buffers the messages that aren't being processed yet. - self._messagesBufferedUntilQuiescence = []; - // Map from method ID -> true. Methods are removed from this when their - // "data done" message is received, and we will not quiesce until it is - // empty. - self._methodsBlockingQuiescence = {}; - // map from sub ID -> true for subs that were ready (ie, called the sub - // ready callback) before reconnect but haven't become ready again yet - self._subsBeingRevived = {}; // map from sub._id -> true - // if true, the next data update should reset all stores. (set during - // reconnect.) - self._resetStores = false; - - // name -> array of updates for (yet to be created) collections - self._updatesForUnknownStores = {}; - // if we're blocking a migration, the retry func - self._retryMigrate = null; - - self.__flushBufferedWrites = Meteor.bindEnvironment( - self._flushBufferedWrites, "flushing DDP buffered writes", self); - // Collection name -> array of messages. - self._bufferedWrites = {}; - // When current buffer of updates must be flushed at, in ms timestamp. - self._bufferedWritesFlushAt = null; - // Timeout handle for the next processing of all pending writes - self._bufferedWritesFlushHandle = null; - - self._bufferedWritesInterval = options.bufferedWritesInterval; - self._bufferedWritesMaxAge = options.bufferedWritesMaxAge; - - // metadata for subscriptions. Map from sub ID to object with keys: - // - id - // - name - // - params - // - inactive (if true, will be cleaned up if not reused in re-run) - // - ready (has the 'ready' message been received?) - // - readyCallback (an optional callback to call when ready) - // - errorCallback (an optional callback to call if the sub terminates with - // an error, XXX COMPAT WITH 1.0.3.1) - // - stopCallback (an optional callback to call when the sub terminates - // for any reason, with an error argument if an error triggered the stop) - self._subscriptions = {}; - - // Reactive userId. - self._userId = null; - self._userIdDeps = new Tracker.Dependency; - - // Block auto-reload while we're waiting for method responses. - if (Meteor.isClient && Package.reload && !options.reloadWithOutstanding) { - Package.reload.Reload._onMigrate(function (retry) { - if (!self._readyToMigrate()) { - if (self._retryMigrate) - throw new Error("Two migrations in progress?"); - self._retryMigrate = retry; - return false; - } else { - return [true]; - } - }); - } - - var onMessage = function (raw_msg) { - try { - var msg = DDPCommon.parseDDP(raw_msg); - } catch (e) { - Meteor._debug("Exception while parsing DDP", e); - return; + // as a test hook, allow passing a stream instead of a url. + if (typeof url === 'object') { + self._stream = url; + } else { + const { ClientStream } = require("meteor/socket-stream-client"); + self._stream = new ClientStream(url, { + retry: options.retry, + ConnectionError: DDP.ConnectionError, + headers: options.headers, + _sockjsOptions: options._sockjsOptions, + // Used to keep some tests quiet, or for other cases in which + // the right thing to do with connection errors is to silently + // fail (e.g. sending package usage stats). At some point we + // should have a real API for handling client-stream-level + // errors. + _dontPrintErrors: options._dontPrintErrors, + connectTimeoutMs: options.connectTimeoutMs, + npmFayeOptions: options.npmFayeOptions + }); } - // Any message counts as receiving a pong, as it demonstrates that - // the server is still alive. - if (self._heartbeat) { - self._heartbeat.messageReceived(); - } + self._lastSessionId = null; + self._versionSuggestion = null; // The last proposed DDP version. + self._version = null; // The DDP version agreed on by client and server. + self._stores = Object.create(null); // name -> object with methods + self._methodHandlers = Object.create(null); // name -> func + self._nextMethodId = 1; + self._supportedDDPVersions = options.supportedDDPVersions; - if (msg === null || !msg.msg) { - // XXX COMPAT WITH 0.6.6. ignore the old welcome message for back - // compat. Remove this 'if' once the server stops sending welcome - // messages (stream_server.js). - if (! (msg && msg.server_id)) - Meteor._debug("discarding invalid livedata message", msg); - return; - } + self._heartbeatInterval = options.heartbeatInterval; + self._heartbeatTimeout = options.heartbeatTimeout; - if (msg.msg === 'connected') { - self._version = self._versionSuggestion; - self._livedata_connected(msg); - options.onConnected(); - } - else if (msg.msg === 'failed') { - if (_.contains(self._supportedDDPVersions, msg.version)) { - self._versionSuggestion = msg.version; - self._stream.reconnect({_force: true}); - } else { - var description = - "DDP version negotiation failed; server requested version " + msg.version; - self._stream.disconnect({_permanent: true, _error: description}); - options.onDDPVersionNegotiationFailure(description); - } - } - else if (msg.msg === 'ping' && options.respondToPings) { - self._send({msg: "pong", id: msg.id}); - } - else if (msg.msg === 'pong') { - // noop, as we assume everything's a pong - } - else if (_.include(['added', 'changed', 'removed', 'ready', 'updated'], msg.msg)) - self._livedata_data(msg); - else if (msg.msg === 'nosub') - self._livedata_nosub(msg); - else if (msg.msg === 'result') - self._livedata_result(msg); - else if (msg.msg === 'error') - self._livedata_error(msg); - else - Meteor._debug("discarding unknown livedata message type", msg); - }; + // Tracks methods which the user has tried to call but which have not yet + // called their user callback (ie, they are waiting on their result or for all + // of their writes to be written to the local cache). Map from method ID to + // MethodInvoker object. + self._methodInvokers = Object.create(null); - var onReset = function () { - // Send a connect message at the beginning of the stream. - // NOTE: reset is called even on the first connection, so this is - // the only place we send this message. - var msg = {msg: 'connect'}; - if (self._lastSessionId) - msg.session = self._lastSessionId; - msg.version = self._versionSuggestion || self._supportedDDPVersions[0]; - self._versionSuggestion = msg.version; - msg.support = self._supportedDDPVersions; - self._send(msg); + // Tracks methods which the user has called but whose result messages have not + // arrived yet. + // + // _outstandingMethodBlocks is an array of blocks of methods. Each block + // represents a set of methods that can run at the same time. The first block + // represents the methods which are currently in flight; subsequent blocks + // must wait for previous blocks to be fully finished before they can be sent + // to the server. + // + // Each block is an object with the following fields: + // - methods: a list of MethodInvoker objects + // - wait: a boolean; if true, this block had a single method invoked with + // the "wait" option + // + // There will never be adjacent blocks with wait=false, because the only thing + // that makes methods need to be serialized is a wait method. + // + // Methods are removed from the first block when their "result" is + // received. The entire first block is only removed when all of the in-flight + // methods have received their results (so the "methods" list is empty) *AND* + // all of the data written by those methods are visible in the local cache. So + // it is possible for the first block's methods list to be empty, if we are + // still waiting for some objects to quiesce. + // + // Example: + // _outstandingMethodBlocks = [ + // {wait: false, methods: []}, + // {wait: true, methods: []}, + // {wait: false, methods: [, + // ]}] + // This means that there were some methods which were sent to the server and + // which have returned their results, but some of the data written by + // the methods may not be visible in the local cache. Once all that data is + // visible, we will send a 'login' method. Once the login method has returned + // and all the data is visible (including re-running subs if userId changes), + // we will send the 'foo' and 'bar' methods in parallel. + self._outstandingMethodBlocks = []; - // Mark non-retry calls as failed. This has to be done early as getting these methods out of the - // current block is pretty important to making sure that quiescence is properly calculated, as - // well as possibly moving on to another useful block. + // method ID -> array of objects with keys 'collection' and 'id', listing + // documents written by a given method's stub. keys are associated with + // methods whose stub wrote at least one document, and whose data-done message + // has not yet been received. + self._documentsWrittenByStub = Object.create(null); + // collection -> IdMap of "server document" object. A "server document" has: + // - "document": the version of the document according the + // server (ie, the snapshot before a stub wrote it, amended by any changes + // received from the server) + // It is undefined if we think the document does not exist + // - "writtenByStubs": a set of method IDs whose stubs wrote to the document + // whose "data done" messages have not yet been processed + self._serverDocuments = Object.create(null); - // Only bother testing if there is an outstandingMethodBlock (there might not be, especially if - // we are connecting for the first time. - if (self._outstandingMethodBlocks.length > 0) { - // If there is an outstanding method block, we only care about the first one as that is the - // one that could have already sent messages with no response, that are not allowed to retry. - const currentMethodBlock = self._outstandingMethodBlocks[0].methods; - self._outstandingMethodBlocks[0].methods = currentMethodBlock.filter((methodInvoker) => { + // Array of callbacks to be called after the next update of the local + // cache. Used for: + // - Calling methodInvoker.dataVisible and sub ready callbacks after + // the relevant data is flushed. + // - Invoking the callbacks of "half-finished" methods after reconnect + // quiescence. Specifically, methods whose result was received over the old + // connection (so we don't re-send it) but whose data had not been made + // visible. + self._afterUpdateCallbacks = []; - // Methods with 'noRetry' option set are not allowed to re-send after - // recovering dropped connection. - if (methodInvoker.sentMessage && methodInvoker.noRetry) { - // Make sure that the method is told that it failed. - methodInvoker.receiveResult(new Meteor.Error('invocation-failed', - 'Method invocation might have failed due to dropped connection. ' + - 'Failing because `noRetry` option was passed to Meteor.apply.')); + // In two contexts, we buffer all incoming data messages and then process them + // all at once in a single update: + // - During reconnect, we buffer all data messages until all subs that had + // been ready before reconnect are ready again, and all methods that are + // active have returned their "data done message"; then + // - During the execution of a "wait" method, we buffer all data messages + // until the wait method gets its "data done" message. (If the wait method + // occurs during reconnect, it doesn't get any special handling.) + // all data messages are processed in one update. + // + // The following fields are used for this "quiescence" process. + + // This buffers the messages that aren't being processed yet. + self._messagesBufferedUntilQuiescence = []; + // Map from method ID -> true. Methods are removed from this when their + // "data done" message is received, and we will not quiesce until it is + // empty. + self._methodsBlockingQuiescence = Object.create(null); + // map from sub ID -> true for subs that were ready (ie, called the sub + // ready callback) before reconnect but haven't become ready again yet + self._subsBeingRevived = Object.create(null); // map from sub._id -> true + // if true, the next data update should reset all stores. (set during + // reconnect.) + self._resetStores = false; + + // name -> array of updates for (yet to be created) collections + self._updatesForUnknownStores = Object.create(null); + // if we're blocking a migration, the retry func + self._retryMigrate = null; + + self.__flushBufferedWrites = Meteor.bindEnvironment( + self._flushBufferedWrites, + 'flushing DDP buffered writes', + self + ); + // Collection name -> array of messages. + self._bufferedWrites = Object.create(null); + // When current buffer of updates must be flushed at, in ms timestamp. + self._bufferedWritesFlushAt = null; + // Timeout handle for the next processing of all pending writes + self._bufferedWritesFlushHandle = null; + + self._bufferedWritesInterval = options.bufferedWritesInterval; + self._bufferedWritesMaxAge = options.bufferedWritesMaxAge; + + // metadata for subscriptions. Map from sub ID to object with keys: + // - id + // - name + // - params + // - inactive (if true, will be cleaned up if not reused in re-run) + // - ready (has the 'ready' message been received?) + // - readyCallback (an optional callback to call when ready) + // - errorCallback (an optional callback to call if the sub terminates with + // an error, XXX COMPAT WITH 1.0.3.1) + // - stopCallback (an optional callback to call when the sub terminates + // for any reason, with an error argument if an error triggered the stop) + self._subscriptions = Object.create(null); + + // Reactive userId. + self._userId = null; + self._userIdDeps = new Tracker.Dependency(); + + // Block auto-reload while we're waiting for method responses. + if (Meteor.isClient && + Package.reload && + ! options.reloadWithOutstanding) { + Package.reload.Reload._onMigrate(retry => { + if (! self._readyToMigrate()) { + self._retryMigrate = retry; + return [false]; + } else { + return [true]; } - - // Only keep a method if it wasn't sent or it's allowed to retry. - // This may leave the block empty, but we don't move on to the next - // block until the callback has been delivered, in _outstandingMethodFinished. - return !(methodInvoker.sentMessage && methodInvoker.noRetry); }); } - // Now, to minimize setup latency, go ahead and blast out all of - // our pending methods ands subscriptions before we've even taken - // the necessary RTT to know if we successfully reconnected. (1) - // They're supposed to be idempotent, and where they are not, - // they can block retry in apply; (2) even if we did reconnect, - // we're not sure what messages might have gotten lost - // (in either direction) since we were disconnected (TCP being - // sloppy about that.) + var onDisconnect = () => { + if (self._heartbeat) { + self._heartbeat.stop(); + self._heartbeat = null; + } + }; - // If the current block of methods all got their results (but didn't all get - // their data visible), discard the empty block now. - if (! _.isEmpty(self._outstandingMethodBlocks) && - _.isEmpty(self._outstandingMethodBlocks[0].methods)) { - self._outstandingMethodBlocks.shift(); + if (Meteor.isServer) { + self._stream.on( + 'message', + Meteor.bindEnvironment( + this.onMessage.bind(this), + 'handling DDP message' + ) + ); + self._stream.on( + 'reset', + Meteor.bindEnvironment(this.onReset.bind(this), 'handling DDP reset') + ); + self._stream.on( + 'disconnect', + Meteor.bindEnvironment(onDisconnect, 'handling DDP disconnect') + ); + } else { + self._stream.on('message', this.onMessage.bind(this)); + self._stream.on('reset', this.onReset.bind(this)); + self._stream.on('disconnect', onDisconnect); } - - // Mark all messages as unsent, they have not yet been sent on this - // connection. - _.each(self._methodInvokers, function (m) { - m.sentMessage = false; - }); - - // If an `onReconnect` handler is set, call it first. Go through - // some hoops to ensure that methods that are called from within - // `onReconnect` get executed _before_ ones that were originally - // outstanding (since `onReconnect` is used to re-establish auth - // certificates) - self._callOnReconnectAndSendAppropriateOutstandingMethods(); - - // add new subscriptions at the end. this way they take effect after - // the handlers and we don't see flicker. - _.each(self._subscriptions, function (sub, id) { - self._send({ - msg: 'sub', - id: id, - name: sub.name, - params: sub.params - }); - }); - }; - - var onDisconnect = function () { - if (self._heartbeat) { - self._heartbeat.stop(); - self._heartbeat = null; - } - }; - - if (Meteor.isServer) { - self._stream.on('message', Meteor.bindEnvironment(onMessage, "handling DDP message")); - self._stream.on('reset', Meteor.bindEnvironment(onReset, "handling DDP reset")); - self._stream.on('disconnect', Meteor.bindEnvironment(onDisconnect, "handling DDP disconnect")); - } else { - self._stream.on('message', onMessage); - self._stream.on('reset', onReset); - self._stream.on('disconnect', onDisconnect); } -}; -// A MethodInvoker manages sending a method to the server and calling the user's -// callbacks. On construction, it registers itself in the connection's -// _methodInvokers map; it removes itself once the method is fully finished and -// the callback is invoked. This occurs when it has both received a result, -// and the data written by it is fully visible. -var MethodInvoker = function (options) { - var self = this; - - // Public (within this file) fields. - self.methodId = options.methodId; - self.sentMessage = false; - - self._callback = options.callback; - self._connection = options.connection; - self._message = options.message; - self._onResultReceived = options.onResultReceived || function () {}; - self._wait = options.wait; - self.noRetry = options.noRetry; - self._methodResult = null; - self._dataVisible = false; - - // Register with the connection. - self._connection._methodInvokers[self.methodId] = self; -}; -_.extend(MethodInvoker.prototype, { - // Sends the method message to the server. May be called additional times if - // we lose the connection and reconnect before receiving a result. - sendMessage: function () { - var self = this; - // This function is called before sending a method (including resending on - // reconnect). We should only (re)send methods where we don't already have a - // result! - if (self.gotResult()) - throw new Error("sendingMethod is called on method with result"); - - - // If we're re-sending it, it doesn't matter if data was written the first - // time. - self._dataVisible = false; - self.sentMessage = true; - - // If this is a wait method, make all data messages be buffered until it is - // done. - if (self._wait) - self._connection._methodsBlockingQuiescence[self.methodId] = true; - - // Actually send the message. - self._connection._send(self._message); - }, - // Invoke the callback, if we have both a result and know that all data has - // been written to the local cache. - _maybeInvokeCallback: function () { - var self = this; - if (self._methodResult && self._dataVisible) { - // Call the callback. (This won't throw: the callback was wrapped with - // bindEnvironment.) - self._callback(self._methodResult[0], self._methodResult[1]); - - // Forget about this method. - delete self._connection._methodInvokers[self.methodId]; - - // Let the connection know that this method is finished, so it can try to - // move on to the next block of methods. - self._connection._outstandingMethodFinished(); - } - }, - // Call with the result of the method from the server. Only may be called - // once; once it is called, you should not call sendMessage again. - // If the user provided an onResultReceived callback, call it immediately. - // Then invoke the main callback if data is also visible. - receiveResult: function (err, result) { - var self = this; - if (self.gotResult()) - throw new Error("Methods should only receive results once"); - self._methodResult = [err, result]; - self._onResultReceived(err, result); - self._maybeInvokeCallback(); - }, - // Call this when all data written by the method is visible. This means that - // the method has returns its "data is done" message *AND* all server - // documents that are buffered at that time have been written to the local - // cache. Invokes the main callback if the result has been received. - dataVisible: function () { - var self = this; - self._dataVisible = true; - self._maybeInvokeCallback(); - }, - // True if receiveResult has been called. - gotResult: function () { - var self = this; - return !!self._methodResult; - } -}); - -_.extend(Connection.prototype, { // 'name' is the name of the data on the wire that should go in the // store. 'wrappedStore' should be an object with methods beginUpdate, update, // endUpdate, saveOriginals, retrieveOriginals. see Collection for an example. - registerStore: function (name, wrappedStore) { + registerStore(name, wrappedStore) { var self = this; - if (name in self._stores) - return false; + if (name in self._stores) return false; // Wrap the input object in an object which makes any store method not // implemented by 'store' into a no-op. - var store = {}; - _.each(['update', 'beginUpdate', 'endUpdate', 'saveOriginals', - 'retrieveOriginals', 'getDoc', - '_getCollection'], function (method) { - store[method] = function () { - return (wrappedStore[method] - ? wrappedStore[method].apply(wrappedStore, arguments) - : undefined); - }; - }); + var store = Object.create(null); + [ 'update', + 'beginUpdate', + 'endUpdate', + 'saveOriginals', + 'retrieveOriginals', + 'getDoc', + '_getCollection' + ].forEach(method => { + store[method] = (...args) => { + if (wrappedStore[method]) { + return wrappedStore[method](...args); + } + }; + }); self._stores[name] = store; var queued = self._updatesForUnknownStores[name]; if (queued) { store.beginUpdate(queued.length, false); - _.each(queued, function (msg) { + queued.forEach(msg => { store.update(msg); }); store.endUpdate(); @@ -511,11 +323,12 @@ _.extend(Connection.prototype, { } return true; - }, + } /** * @memberOf Meteor * @importFromPackage meteor + * @alias Meteor.subscribe * @summary Subscribe to a record set. Returns a handle that provides * `stop()` and `ready()` methods. * @locus Client @@ -528,20 +341,22 @@ _.extend(Connection.prototype, { * argument to `onStop`. If a function is passed instead of an object, it * is interpreted as an `onReady` callback. */ - subscribe: function (name /* .. [arguments] .. (callback|callbacks) */) { + subscribe(name /* .. [arguments] .. (callback|callbacks) */) { var self = this; - var params = Array.prototype.slice.call(arguments, 1); - var callbacks = {}; + var params = slice.call(arguments, 1); + var callbacks = Object.create(null); if (params.length) { var lastParam = params[params.length - 1]; - if (_.isFunction(lastParam)) { + if (typeof lastParam === 'function') { callbacks.onReady = params.pop(); - } else if (lastParam && + } else if (lastParam && [ + lastParam.onReady, // XXX COMPAT WITH 1.0.3.1 onError used to exist, but now we use // onStop with an error callback instead. - _.any([lastParam.onReady, lastParam.onError, lastParam.onStop], - _.isFunction)) { + lastParam.onError, + lastParam.onStop + ].some(f => typeof f === "function")) { callbacks = params.pop(); } } @@ -564,9 +379,14 @@ _.extend(Connection.prototype, { // We only look for one such sub; if there are N apparently-identical subs // being invalidated, we will require N matching subscribe calls to keep // them all active. - var existing = _.find(self._subscriptions, function (sub) { - return sub.inactive && sub.name === name && - EJSON.equals(sub.params, params); + var existing; + keys(self._subscriptions).some(id => { + const sub = self._subscriptions[id]; + if (sub.inactive && + sub.name === name && + EJSON.equals(sub.params, params)) { + return existing = sub; + } }); var id; @@ -611,18 +431,18 @@ _.extend(Connection.prototype, { params: EJSON.clone(params), inactive: false, ready: false, - readyDeps: new Tracker.Dependency, + readyDeps: new Tracker.Dependency(), readyCallback: callbacks.onReady, // XXX COMPAT WITH 1.0.3.1 #errorCallback errorCallback: callbacks.onError, stopCallback: callbacks.onStop, connection: self, - remove: function() { + remove() { delete this.connection._subscriptions[this.id]; this.ready && this.readyDeps.changed(); }, - stop: function() { - this.connection._send({msg: 'unsub', id: id}); + stop() { + this.connection._send({ msg: 'unsub', id: id }); this.remove(); if (callbacks.onStop) { @@ -630,21 +450,22 @@ _.extend(Connection.prototype, { } } }; - self._send({msg: 'sub', id: id, name: name, params: params}); + self._send({ msg: 'sub', id: id, name: name, params: params }); } // return a handle to the application. var handle = { - stop: function () { - if (!_.has(self._subscriptions, id)) + stop() { + if (! hasOwn.call(self._subscriptions, id)) { return; - + } self._subscriptions[id].stop(); }, - ready: function () { + ready() { // return false if we've unsubscribed. - if (!_.has(self._subscriptions, id)) + if (! hasOwn.call(self._subscriptions, id)) { return false; + } var record = self._subscriptions[id]; record.readyDeps.depend(); return record.ready; @@ -659,101 +480,84 @@ _.extend(Connection.prototype, { // as a change to mark the subscription "inactive" so that it can // be reused from the rerun. If it isn't reused, it's killed from // an afterFlush. - Tracker.onInvalidate(function (c) { - if (_.has(self._subscriptions, id)) + Tracker.onInvalidate(c => { + if (hasOwn.call(self._subscriptions, id)) { self._subscriptions[id].inactive = true; + } - Tracker.afterFlush(function () { - if (_.has(self._subscriptions, id) && - self._subscriptions[id].inactive) + Tracker.afterFlush(() => { + if (hasOwn.call(self._subscriptions, id) && + self._subscriptions[id].inactive) { handle.stop(); + } }); }); } return handle; - }, + } // options: // - onLateError {Function(error)} called if an error was received after the ready event. // (errors received before ready cause an error to be thrown) - _subscribeAndWait: function (name, args, options) { + _subscribeAndWait(name, args, options) { var self = this; var f = new Future(); var ready = false; var handle; args = args || []; args.push({ - onReady: function () { + onReady() { ready = true; f['return'](); }, - onError: function (e) { - if (!ready) - f['throw'](e); - else - options && options.onLateError && options.onLateError(e); + onError(e) { + if (!ready) f['throw'](e); + else options && options.onLateError && options.onLateError(e); } }); handle = self.subscribe.apply(self, [name].concat(args)); f.wait(); return handle; - }, + } - methods: function (methods) { - var self = this; - _.each(methods, function (func, name) { - if (typeof func !== 'function') + methods(methods) { + keys(methods).forEach(name => { + const func = methods[name]; + if (typeof func !== 'function') { throw new Error("Method '" + name + "' must be a function"); - if (self._methodHandlers[name]) + } + if (this._methodHandlers[name]) { throw new Error("A method named '" + name + "' is already defined"); - self._methodHandlers[name] = func; + } + this._methodHandlers[name] = func; }); - }, + } /** * @memberOf Meteor * @importFromPackage meteor + * @alias Meteor.call * @summary Invokes a method passing any number of arguments. * @locus Anywhere * @param {String} name Name of method to invoke * @param {EJSONable} [arg1,arg2...] Optional method arguments * @param {Function} [asyncCallback] Optional callback, which is called asynchronously with the error or result after the method is complete. If not provided, the method runs synchronously if possible (see below). */ - call: function (name /* .. [arguments] .. callback */) { + call(name /* .. [arguments] .. callback */) { // if it's a function, the last argument is the result callback, // not a parameter to the remote method. - var args = Array.prototype.slice.call(arguments, 1); - if (args.length && typeof args[args.length - 1] === "function") + var args = slice.call(arguments, 1); + if (args.length && typeof args[args.length - 1] === 'function') var callback = args.pop(); return this.apply(name, args, callback); - }, - - // @param options {Optional Object} - // wait: Boolean - Should we wait to call this until all current methods - // are fully finished, and block subsequent method calls - // until this method is fully finished? - // (does not affect methods called from within this method) - // onResultReceived: Function - a callback to call as soon as the method - // result is received. the data written by - // the method may not yet be in the cache! - // returnStubValue: Boolean - If true then in cases where we would have - // otherwise discarded the stub's return value - // and returned undefined, instead we go ahead - // and return it. Specifically, this is any - // time other than when (a) we are already - // inside a stub or (b) we are in Node and no - // callback was provided. Currently we require - // this flag to be explicitly passed to reduce - // the likelihood that stub return values will - // be confused with server return values; we - // may improve this in future. - // @param callback {Optional Function} + } /** * @memberOf Meteor * @importFromPackage meteor + * @alias Meteor.apply * @summary Invoke a method passing an array of arguments. * @locus Anywhere * @param {String} name Name of method to invoke @@ -763,18 +567,19 @@ _.extend(Connection.prototype, { * @param {Function} options.onResultReceived (Client only) This callback is invoked with the error or result of the method (just like `asyncCallback`) as soon as the error or result is available. The local cache may not yet reflect the writes performed by the method. * @param {Boolean} options.noRetry (Client only) if true, don't send this method again on reload, simply call the callback an error with the error code 'invocation-failed'. * @param {Boolean} options.throwStubExceptions (Client only) If true, exceptions thrown by method stubs will be thrown instead of logged, and the method will not be invoked on the server. + * @param {Boolean} options.returnStubValue (Client only) If true then in cases where we would have otherwise discarded the stub's return value and returned undefined, instead we go ahead and return it. Specifically, this is any time other than when (a) we are already inside a stub or (b) we are in Node and no callback was provided. Currently we require this flag to be explicitly passed to reduce the likelihood that stub return values will be confused with server return values; we may improve this in future. * @param {Function} [asyncCallback] Optional callback; same semantics as in [`Meteor.call`](#meteor_call). */ - apply: function (name, args, options, callback) { + apply(name, args, options, callback) { var self = this; // We were passed 3 arguments. They may be either (name, args, options) // or (name, args, callback) if (!callback && typeof options === 'function') { callback = options; - options = {}; + options = Object.create(null); } - options = options || {}; + options = options || Object.create(null); if (callback) { // XXX would it be better form to do the binding in stream.on, @@ -790,16 +595,6 @@ _.extend(Connection.prototype, { // while because of a wait method). args = EJSON.clone(args); - // Lazily allocate method ID once we know that it'll be needed. - var methodId = (function () { - var id; - return function () { - if (id === undefined) - id = '' + (self._nextMethodId++); - return id; - }; - })(); - var enclosing = DDP._CurrentMethodInvocation.get(); var alreadyInSimulation = enclosing && enclosing.isSimulation; @@ -814,7 +609,7 @@ _.extend(Connection.prototype, { // randomSeed to save bandwidth, and we don't even generate it to save a // bit of CPU and to avoid consuming entropy. var randomSeed = null; - var randomSeedGenerator = function () { + var randomSeedGenerator = () => { if (randomSeed === null) { randomSeed = DDPCommon.makeRpcSeed(enclosing, name); } @@ -835,7 +630,7 @@ _.extend(Connection.prototype, { var stub = self._methodHandlers[name]; if (stub) { - var setUserId = function(userId) { + var setUserId = userId => { self.setUserId(userId); }; @@ -843,34 +638,34 @@ _.extend(Connection.prototype, { isSimulation: true, userId: self.userId(), setUserId: setUserId, - randomSeed: function () { return randomSeedGenerator(); } + randomSeed() { + return randomSeedGenerator(); + } }); - if (!alreadyInSimulation) - self._saveOriginals(); + if (!alreadyInSimulation) self._saveOriginals(); try { // Note that unlike in the corresponding server code, we never audit // that stubs check() their arguments. - var stubReturnValue = DDP._CurrentMethodInvocation.withValue(invocation, function () { - if (Meteor.isServer) { - // Because saveOriginals and retrieveOriginals aren't reentrant, - // don't allow stubs to yield. - return Meteor._noYieldsAllowed(function () { - // re-clone, so that the stub can't affect our caller's values + var stubReturnValue = DDP._CurrentMethodInvocation.withValue( + invocation, + () => { + if (Meteor.isServer) { + // Because saveOriginals and retrieveOriginals aren't reentrant, + // don't allow stubs to yield. + return Meteor._noYieldsAllowed(() => { + // re-clone, so that the stub can't affect our caller's values + return stub.apply(invocation, EJSON.clone(args)); + }); + } else { return stub.apply(invocation, EJSON.clone(args)); - }); - } else { - return stub.apply(invocation, EJSON.clone(args)); + } } - }); - } - catch (e) { + ); + } catch (e) { var exception = e; } - - if (!alreadyInSimulation) - self._retrieveAndStoreOriginals(methodId()); } // If we're in a simulation, stop and return the result we have, @@ -881,28 +676,46 @@ _.extend(Connection.prototype, { callback(exception, stubReturnValue); return undefined; } - if (exception) - throw exception; + if (exception) throw exception; return stubReturnValue; } + // We only create the methodId here because we don't actually need one if + // we're already in a simulation + const methodId = '' + self._nextMethodId++; + if (stub) { + self._retrieveAndStoreOriginals(methodId); + } + + // Generate the DDP message for the method call. Note that on the client, + // it is important that the stub have finished before we send the RPC, so + // that we know we have a complete list of which local documents the stub + // wrote. + var message = { + msg: 'method', + method: name, + params: args, + id: methodId + }; + // If an exception occurred in a stub, and we're ignoring it // because we're doing an RPC and want to use what the server // returns instead, log it so the developer knows // (unless they explicitly ask to see the error). // - // Tests can set the 'expected' flag on an exception so it won't + // Tests can set the '_expectedByTest' flag on an exception so it won't // go to log. if (exception) { if (options.throwStubExceptions) { throw exception; - } else if (!exception.expected) { - Meteor._debug("Exception while simulating the effect of invoking '" + - name + "'", exception, exception.stack); + } else if (!exception._expectedByTest) { + Meteor._debug( + "Exception while simulating the effect of invoking '" + name + "'", + exception + ); } } - // At this point we're definitely doing an RPC, and we're going to // return the value of the RPC to the caller. @@ -913,26 +726,16 @@ _.extend(Connection.prototype, { // only thing we can do is to return undefined and discard the // result of the RPC. If an error occurred then print the error // to the console. - callback = function (err) { - err && Meteor._debug("Error invoking Method '" + name + "':", - err.message); + callback = err => { + err && Meteor._debug("Error invoking Method '" + name + "'", err); }; } else { // On the server, make the function synchronous. Throw on // errors, return on success. - var future = new Future; + var future = new Future(); callback = future.resolver(); } } - // Send the RPC. Note that on the client, it is important that the - // stub have finished before we send the RPC, so that we know we have - // a complete list of which local documents the stub wrote. - var message = { - msg: 'method', - method: name, - params: args, - id: methodId() - }; // Send the randomSeed only if we used it if (randomSeed !== null) { @@ -940,7 +743,7 @@ _.extend(Connection.prototype, { } var methodInvoker = new MethodInvoker({ - methodId: methodId(), + methodId, callback: callback, connection: self, onResultReceived: options.onResultReceived, @@ -951,20 +754,26 @@ _.extend(Connection.prototype, { if (options.wait) { // It's a wait method! Wait methods go in their own block. - self._outstandingMethodBlocks.push( - {wait: true, methods: [methodInvoker]}); + self._outstandingMethodBlocks.push({ + wait: true, + methods: [methodInvoker] + }); } else { // Not a wait method. Start a new block if the previous block was a wait // block, and add it to the last block of methods. - if (_.isEmpty(self._outstandingMethodBlocks) || - _.last(self._outstandingMethodBlocks).wait) - self._outstandingMethodBlocks.push({wait: false, methods: []}); - _.last(self._outstandingMethodBlocks).methods.push(methodInvoker); + if (isEmpty(self._outstandingMethodBlocks) || + last(self._outstandingMethodBlocks).wait) { + self._outstandingMethodBlocks.push({ + wait: false, + methods: [], + }); + } + + last(self._outstandingMethodBlocks).methods.push(methodInvoker); } // If we added it to the first block, send it out now. - if (self._outstandingMethodBlocks.length === 1) - methodInvoker.sendMessage(); + if (self._outstandingMethodBlocks.length === 1) methodInvoker.sendMessage(); // If we're using the default callback on the server, // block waiting for the result. @@ -972,38 +781,44 @@ _.extend(Connection.prototype, { return future.wait(); } return options.returnStubValue ? stubReturnValue : undefined; - }, + } // Before calling a method stub, prepare all stores to track changes and allow // _retrieveAndStoreOriginals to get the original versions of changed // documents. - _saveOriginals: function () { - var self = this; - if (!self._waitingForQuiescence()) - self._flushBufferedWrites(); - _.each(self._stores, function (s) { - s.saveOriginals(); + _saveOriginals() { + if (! this._waitingForQuiescence()) { + this._flushBufferedWrites(); + } + + keys(this._stores).forEach(storeName => { + this._stores[storeName].saveOriginals(); }); - }, + } + // Retrieves the original versions of all documents modified by the stub for // method 'methodId' from all stores and saves them to _serverDocuments (keyed // by document) and _documentsWrittenByStub (keyed by method ID). - _retrieveAndStoreOriginals: function (methodId) { + _retrieveAndStoreOriginals(methodId) { var self = this; if (self._documentsWrittenByStub[methodId]) - throw new Error("Duplicate methodId in _retrieveAndStoreOriginals"); + throw new Error('Duplicate methodId in _retrieveAndStoreOriginals'); var docsWritten = []; - _.each(self._stores, function (s, collection) { - var originals = s.retrieveOriginals(); + + keys(self._stores).forEach(collection => { + var originals = self._stores[collection].retrieveOriginals(); // not all stores define retrieveOriginals - if (!originals) - return; - originals.forEach(function (doc, id) { - docsWritten.push({collection: collection, id: id}); - if (!_.has(self._serverDocuments, collection)) - self._serverDocuments[collection] = new MongoIDMap; - var serverDoc = self._serverDocuments[collection].setDefault(id, {}); + if (! originals) return; + originals.forEach((doc, id) => { + docsWritten.push({ collection, id }); + if (! hasOwn.call(self._serverDocuments, collection)) { + self._serverDocuments[collection] = new MongoIDMap(); + } + var serverDoc = self._serverDocuments[collection].setDefault( + id, + Object.create(null) + ); if (serverDoc.writtenByStubs) { // We're not the first stub to write this doc. Just add our method ID // to the record. @@ -1012,21 +827,21 @@ _.extend(Connection.prototype, { // First stub! Save the original value and our method ID. serverDoc.document = doc; serverDoc.flushCallbacks = []; - serverDoc.writtenByStubs = {}; + serverDoc.writtenByStubs = Object.create(null); serverDoc.writtenByStubs[methodId] = true; } }); }); - if (!_.isEmpty(docsWritten)) { + if (! isEmpty(docsWritten)) { self._documentsWrittenByStub[methodId] = docsWritten; } - }, + } // This is very much a private function we use to make the tests // take up fewer server resources after they complete. - _unsubscribeAll: function () { - var self = this; - _.each(_.clone(self._subscriptions), function (sub, id) { + _unsubscribeAll() { + keys(this._subscriptions).forEach(id => { + const sub = this._subscriptions[id]; // Avoid killing the autoupdate subscription so that developers // still get hot code pushes when writing tests. // @@ -1034,125 +849,120 @@ _.extend(Connection.prototype, { // but it doesn't seem worth it yet to have a special API for // subscriptions to preserve after unit tests. if (sub.name !== 'meteor_autoupdate_clientVersions') { - self._subscriptions[id].stop(); + sub.stop(); } }); - }, + } // Sends the DDP stringification of the given message object - _send: function (obj) { - var self = this; - self._stream.send(DDPCommon.stringifyDDP(obj)); - }, + _send(obj) { + this._stream.send(DDPCommon.stringifyDDP(obj)); + } // We detected via DDP-level heartbeats that we've lost the // connection. Unlike `disconnect` or `close`, a lost connection // will be automatically retried. - _lostConnection: function (error) { - var self = this; - self._stream._lostConnection(error); - }, + _lostConnection(error) { + this._stream._lostConnection(error); + } /** - * @summary Get the current connection status. A reactive data source. - * @locus Client * @memberOf Meteor * @importFromPackage meteor + * @alias Meteor.status + * @summary Get the current connection status. A reactive data source. + * @locus Client */ - status: function (/*passthrough args*/) { - var self = this; - return self._stream.status.apply(self._stream, arguments); - }, + status(...args) { + return this._stream.status(...args); + } /** * @summary Force an immediate reconnection attempt if the client is not connected to the server. This method does nothing if the client is already connected. - * @locus Client * @memberOf Meteor * @importFromPackage meteor + * @alias Meteor.reconnect + * @locus Client */ - reconnect: function (/*passthrough args*/) { - var self = this; - return self._stream.reconnect.apply(self._stream, arguments); - }, + reconnect(...args) { + return this._stream.reconnect(...args); + } /** - * @summary Disconnect the client from the server. - * @locus Client * @memberOf Meteor * @importFromPackage meteor + * @alias Meteor.disconnect + * @summary Disconnect the client from the server. + * @locus Client */ - disconnect: function (/*passthrough args*/) { - var self = this; - return self._stream.disconnect.apply(self._stream, arguments); - }, + disconnect(...args) { + return this._stream.disconnect(...args); + } - close: function () { - var self = this; - return self._stream.disconnect({_permanent: true}); - }, + close() { + return this._stream.disconnect({ _permanent: true }); + } /// /// Reactive user system /// - userId: function () { - var self = this; - if (self._userIdDeps) - self._userIdDeps.depend(); - return self._userId; - }, + userId() { + if (this._userIdDeps) this._userIdDeps.depend(); + return this._userId; + } - setUserId: function (userId) { - var self = this; + setUserId(userId) { // Avoid invalidating dependents if setUserId is called with current value. - if (self._userId === userId) - return; - self._userId = userId; - if (self._userIdDeps) - self._userIdDeps.changed(); - }, + if (this._userId === userId) return; + this._userId = userId; + if (this._userIdDeps) this._userIdDeps.changed(); + } // Returns true if we are in a state after reconnect of waiting for subs to be // revived or early methods to finish their data, or we are waiting for a // "wait" method to finish. - _waitingForQuiescence: function () { - var self = this; - return (! _.isEmpty(self._subsBeingRevived) || - ! _.isEmpty(self._methodsBlockingQuiescence)); - }, + _waitingForQuiescence() { + return ( + ! isEmpty(this._subsBeingRevived) || + ! isEmpty(this._methodsBlockingQuiescence) + ); + } // Returns true if any method whose message has been sent to the server has // not yet invoked its user callback. - _anyMethodsAreOutstanding: function () { - var self = this; - return _.any(_.pluck(self._methodInvokers, 'sentMessage')); - }, + _anyMethodsAreOutstanding() { + const invokers = this._methodInvokers; + return keys(invokers).some(id => { + return invokers[id].sentMessage; + }); + } - _livedata_connected: function (msg) { + _livedata_connected(msg) { var self = this; if (self._version !== 'pre1' && self._heartbeatInterval !== 0) { self._heartbeat = new DDPCommon.Heartbeat({ heartbeatInterval: self._heartbeatInterval, heartbeatTimeout: self._heartbeatTimeout, - onTimeout: function () { + onTimeout() { self._lostConnection( - new DDP.ConnectionError("DDP heartbeat timed out")); + new DDP.ConnectionError('DDP heartbeat timed out') + ); }, - sendPing: function () { - self._send({msg: 'ping'}); + sendPing() { + self._send({ msg: 'ping' }); } }); self._heartbeat.start(); } // If this is a reconnect, we'll have to reset all stores. - if (self._lastSessionId) - self._resetStores = true; + if (self._lastSessionId) self._resetStores = true; - if (typeof (msg.session) === "string") { - var reconnectedToPreviousSession = (self._lastSessionId === msg.session); + if (typeof msg.session === 'string') { + var reconnectedToPreviousSession = self._lastSessionId === msg.session; self._lastSessionId = msg.session; } @@ -1169,13 +979,13 @@ _.extend(Connection.prototype, { // Forget about messages we were buffering for unknown collections. They'll // be resent if still relevant. - self._updatesForUnknownStores = {}; + self._updatesForUnknownStores = Object.create(null); if (self._resetStores) { // Forget about the effects of stubs. We'll be resetting all collections // anyway. - self._documentsWrittenByStub = {}; - self._serverDocuments = {}; + self._documentsWrittenByStub = Object.create(null); + self._serverDocuments = Object.create(null); } // Clear _afterUpdateCallbacks. @@ -1185,10 +995,11 @@ _.extend(Connection.prototype, { // ready callback) as needing to be revived. // XXX We should also block reconnect quiescence until unnamed subscriptions // (eg, autopublish) are done re-publishing to avoid flicker! - self._subsBeingRevived = {}; - _.each(self._subscriptions, function (sub, id) { - if (sub.ready) + self._subsBeingRevived = Object.create(null); + keys(self._subscriptions).forEach(id => { + if (self._subscriptions[id].ready) { self._subsBeingRevived[id] = true; + } }); // Arrange for "half-finished" methods to have their callbacks run, and @@ -1198,15 +1009,19 @@ _.extend(Connection.prototype, { // Start by clearing _methodsBlockingQuiescence: methods sent before // reconnect don't matter, and any "wait" methods sent on the new connection // that we drop here will be restored by the loop below. - self._methodsBlockingQuiescence = {}; + self._methodsBlockingQuiescence = Object.create(null); if (self._resetStores) { - _.each(self._methodInvokers, function (invoker) { + const invokers = self._methodInvokers; + keys(invokers).forEach(id => { + const invoker = invokers[id]; if (invoker.gotResult()) { // This method already got its result, but it didn't call its callback // because its data didn't become visible. We did not resend the // method RPC. We'll call its callback when we get a full quiesce, // since that's as close as we'll get to "data must be visible". - self._afterUpdateCallbacks.push(_.bind(invoker.dataVisible, invoker)); + self._afterUpdateCallbacks.push( + (...args) => invoker.dataVisible(...args) + ); } else if (invoker.sentMessage) { // This method has been sent on this connection (maybe as a resend // from the last connection, maybe from onReconnect, maybe just very @@ -1226,9 +1041,10 @@ _.extend(Connection.prototype, { // If we're not waiting on any methods or subs, we can reset the stores and // call the callbacks immediately. - if (!self._waitingForQuiescence()) { + if (! self._waitingForQuiescence()) { if (self._resetStores) { - _.each(self._stores, function (s) { + keys(self._stores).forEach(storeName => { + const s = self._stores[storeName]; s.beginUpdate(0, true); s.endUpdate(); }); @@ -1236,42 +1052,69 @@ _.extend(Connection.prototype, { } self._runAfterUpdateCallbacks(); } - }, + } + _processOneDataMessage(msg, updates) { + const messageType = msg.msg; - _processOneDataMessage: function (msg, updates) { - var self = this; - // Using underscore here so as not to need to capitalize. - self['_process_' + msg.msg](msg, updates); - }, + // msg is one of ['added', 'changed', 'removed', 'ready', 'updated'] + if (messageType === 'added') { + this._process_added(msg, updates); + } else if (messageType === 'changed') { + this._process_changed(msg, updates); + } else if (messageType === 'removed') { + this._process_removed(msg, updates); + } else if (messageType === 'ready') { + this._process_ready(msg, updates); + } else if (messageType === 'updated') { + this._process_updated(msg, updates); + } else if (messageType === 'nosub') { + // ignore this + } else { + Meteor._debug('discarding unknown livedata data message type', msg); + } + } - - _livedata_data: function (msg) { + _livedata_data(msg) { var self = this; if (self._waitingForQuiescence()) { self._messagesBufferedUntilQuiescence.push(msg); - if (msg.msg === "nosub") + if (msg.msg === 'nosub') { delete self._subsBeingRevived[msg.id]; + } - _.each(msg.subs || [], function (subId) { - delete self._subsBeingRevived[subId]; - }); - _.each(msg.methods || [], function (methodId) { - delete self._methodsBlockingQuiescence[methodId]; - }); + if (msg.subs) { + msg.subs.forEach(subId => { + delete self._subsBeingRevived[subId]; + }); + } - if (self._waitingForQuiescence()) + if (msg.methods) { + msg.methods.forEach(methodId => { + delete self._methodsBlockingQuiescence[methodId]; + }); + } + + if (self._waitingForQuiescence()) { return; + } // No methods or subs are blocking quiescence! // We'll now process and all of our buffered messages, reset all stores, // and apply them all at once. - _.each(self._messagesBufferedUntilQuiescence, function (bufferedMsg) { - self._processOneDataMessage(bufferedMsg, self._bufferedWrites); + + const bufferedMessages = self._messagesBufferedUntilQuiescence; + keys(bufferedMessages).forEach(id => { + self._processOneDataMessage( + bufferedMessages[id], + self._bufferedWrites + ); }); + self._messagesBufferedUntilQuiescence = []; + } else { self._processOneDataMessage(msg, self._bufferedWrites); } @@ -1279,16 +1122,20 @@ _.extend(Connection.prototype, { // Immediately flush writes when: // 1. Buffering is disabled. Or; // 2. any non-(added/changed/removed) message arrives. - var standardWrite = _.include(['added', 'changed', 'removed'], msg.msg); - if (self._bufferedWritesInterval === 0 || !standardWrite) { + var standardWrite = + msg.msg === "added" || + msg.msg === "changed" || + msg.msg === "removed"; + + if (self._bufferedWritesInterval === 0 || ! standardWrite) { self._flushBufferedWrites(); return; } if (self._bufferedWritesFlushAt === null) { - self._bufferedWritesFlushAt = new Date().valueOf() + self._bufferedWritesMaxAge; - } - else if (self._bufferedWritesFlushAt < new Date().valueOf()) { + self._bufferedWritesFlushAt = + new Date().valueOf() + self._bufferedWritesMaxAge; + } else if (self._bufferedWritesFlushAt < new Date().valueOf()) { self._flushBufferedWrites(); return; } @@ -1296,11 +1143,13 @@ _.extend(Connection.prototype, { if (self._bufferedWritesFlushHandle) { clearTimeout(self._bufferedWritesFlushHandle); } - self._bufferedWritesFlushHandle = setTimeout(self.__flushBufferedWrites, - self._bufferedWritesInterval); - }, + self._bufferedWritesFlushHandle = setTimeout( + self.__flushBufferedWrites, + self._bufferedWritesInterval + ); + } - _flushBufferedWrites: function () { + _flushBufferedWrites() { var self = this; if (self._bufferedWritesFlushHandle) { clearTimeout(self._bufferedWritesFlushHandle); @@ -1312,25 +1161,32 @@ _.extend(Connection.prototype, { // performWrites. As there's no guarantee that it // will exit cleanly. var writes = self._bufferedWrites; - self._bufferedWrites = {}; + self._bufferedWrites = Object.create(null); self._performWrites(writes); - }, + } - _performWrites: function(updates){ + _performWrites(updates) { var self = this; - if (self._resetStores || !_.isEmpty(updates)) { + if (self._resetStores || ! isEmpty(updates)) { // Begin a transactional update of each store. - _.each(self._stores, function (s, storeName) { - s.beginUpdate(_.has(updates, storeName) ? updates[storeName].length : 0, - self._resetStores); + + keys(self._stores).forEach(storeName => { + self._stores[storeName].beginUpdate( + hasOwn.call(updates, storeName) + ? updates[storeName].length + : 0, + self._resetStores + ); }); + self._resetStores = false; - _.each(updates, function (updateMessages, storeName) { + keys(updates).forEach(storeName => { + const updateMessages = updates[storeName]; var store = self._stores[storeName]; if (store) { - _.each(updateMessages, function (updateMessage) { + updateMessages.forEach(updateMessage => { store.update(updateMessage); }); } else { @@ -1339,57 +1195,62 @@ _.extend(Connection.prototype, { // XXX memory use will grow without bound if you forget to // create a collection or just don't care about it... going // to have to do something about that. - if (!_.has(self._updatesForUnknownStores, storeName)) - self._updatesForUnknownStores[storeName] = []; - Array.prototype.push.apply(self._updatesForUnknownStores[storeName], - updateMessages); + const updates = self._updatesForUnknownStores; + + if (! hasOwn.call(updates, storeName)) { + updates[storeName] = []; + } + + updates[storeName].push(...updateMessages); } }); // End update transaction. - _.each(self._stores, function (s) { s.endUpdate(); }); + keys(self._stores).forEach(storeName => { + self._stores[storeName].endUpdate(); + }); } self._runAfterUpdateCallbacks(); - }, + } // Call any callbacks deferred with _runWhenAllServerDocsAreFlushed whose // relevant docs have been flushed, as well as dataVisible callbacks at // reconnect-quiescence time. - _runAfterUpdateCallbacks: function () { + _runAfterUpdateCallbacks() { var self = this; var callbacks = self._afterUpdateCallbacks; self._afterUpdateCallbacks = []; - _.each(callbacks, function (c) { + callbacks.forEach(c => { c(); }); - }, + } - _pushUpdate: function (updates, collection, msg) { - var self = this; - if (!_.has(updates, collection)) { + _pushUpdate(updates, collection, msg) { + if (! hasOwn.call(updates, collection)) { updates[collection] = []; } updates[collection].push(msg); - }, + } - _getServerDoc: function (collection, id) { + _getServerDoc(collection, id) { var self = this; - if (!_.has(self._serverDocuments, collection)) + if (! hasOwn.call(self._serverDocuments, collection)) { return null; + } var serverDocsForCollection = self._serverDocuments[collection]; return serverDocsForCollection.get(id) || null; - }, + } - _process_added: function (msg, updates) { + _process_added(msg, updates) { var self = this; var id = MongoID.idParse(msg.id); var serverDoc = self._getServerDoc(msg.collection, id); if (serverDoc) { // Some outstanding stub wrote here. - var isExisting = (serverDoc.document !== undefined); + var isExisting = serverDoc.document !== undefined; - serverDoc.document = msg.fields || {}; + serverDoc.document = msg.fields || Object.create(null); serverDoc.document._id = id; if (self._resetStores) { @@ -1398,39 +1259,36 @@ _.extend(Connection.prototype, { // reset. Use current version of the document for this update, so // that stub-written values are preserved. var currentDoc = self._stores[msg.collection].getDoc(msg.id); - if (currentDoc !== undefined) - msg.fields = currentDoc; + if (currentDoc !== undefined) msg.fields = currentDoc; self._pushUpdate(updates, msg.collection, msg); } else if (isExisting) { - throw new Error("Server sent add for existing id: " + msg.id); + throw new Error('Server sent add for existing id: ' + msg.id); } } else { self._pushUpdate(updates, msg.collection, msg); } - }, + } - _process_changed: function (msg, updates) { + _process_changed(msg, updates) { var self = this; - var serverDoc = self._getServerDoc( - msg.collection, MongoID.idParse(msg.id)); + var serverDoc = self._getServerDoc(msg.collection, MongoID.idParse(msg.id)); if (serverDoc) { if (serverDoc.document === undefined) - throw new Error("Server sent changed for nonexisting id: " + msg.id); + throw new Error('Server sent changed for nonexisting id: ' + msg.id); DiffSequence.applyChanges(serverDoc.document, msg.fields); } else { self._pushUpdate(updates, msg.collection, msg); } - }, + } - _process_removed: function (msg, updates) { + _process_removed(msg, updates) { var self = this; - var serverDoc = self._getServerDoc( - msg.collection, MongoID.idParse(msg.id)); + var serverDoc = self._getServerDoc(msg.collection, MongoID.idParse(msg.id)); if (serverDoc) { // Some outstanding stub wrote here. if (serverDoc.document === undefined) - throw new Error("Server sent removed for nonexisting id:" + msg.id); + throw new Error('Server sent removed for nonexisting id:' + msg.id); serverDoc.document = undefined; } else { self._pushUpdate(updates, msg.collection, { @@ -1439,21 +1297,30 @@ _.extend(Connection.prototype, { id: msg.id }); } - }, + } - _process_updated: function (msg, updates) { + _process_updated(msg, updates) { var self = this; // Process "method done" messages. - _.each(msg.methods, function (methodId) { - _.each(self._documentsWrittenByStub[methodId], function (written) { - var serverDoc = self._getServerDoc(written.collection, written.id); - if (!serverDoc) - throw new Error("Lost serverDoc for " + JSON.stringify(written)); - if (!serverDoc.writtenByStubs[methodId]) - throw new Error("Doc " + JSON.stringify(written) + - " not written by method " + methodId); + + msg.methods.forEach(methodId => { + const docs = self._documentsWrittenByStub[methodId]; + keys(docs).forEach(id => { + const written = docs[id]; + const serverDoc = self._getServerDoc(written.collection, written.id); + if (! serverDoc) { + throw new Error('Lost serverDoc for ' + JSON.stringify(written)); + } + if (! serverDoc.writtenByStubs[methodId]) { + throw new Error( + 'Doc ' + + JSON.stringify(written) + + ' not written by method ' + + methodId + ); + } delete serverDoc.writtenByStubs[methodId]; - if (_.isEmpty(serverDoc.writtenByStubs)) { + if (isEmpty(serverDoc.writtenByStubs)) { // All methods whose stubs wrote this method have completed! We can // now copy the saved document to the database (reverting the stub's // change if the server did not write to this object, or applying the @@ -1468,7 +1335,8 @@ _.extend(Connection.prototype, { replace: serverDoc.document }); // Call all flush callbacks. - _.each(serverDoc.flushCallbacks, function (c) { + + serverDoc.flushCallbacks.forEach(c => { c(); }); @@ -1482,45 +1350,47 @@ _.extend(Connection.prototype, { // We want to call the data-written callback, but we can't do so until all // currently buffered messages are flushed. - var callbackInvoker = self._methodInvokers[methodId]; - if (!callbackInvoker) - throw new Error("No callback invoker for method " + methodId); - self._runWhenAllServerDocsAreFlushed( - _.bind(callbackInvoker.dataVisible, callbackInvoker)); - }); - }, + const callbackInvoker = self._methodInvokers[methodId]; + if (! callbackInvoker) { + throw new Error('No callback invoker for method ' + methodId); + } - _process_ready: function (msg, updates) { + self._runWhenAllServerDocsAreFlushed( + (...args) => callbackInvoker.dataVisible(...args) + ); + }); + } + + _process_ready(msg, updates) { var self = this; // Process "sub ready" messages. "sub ready" messages don't take effect // until all current server documents have been flushed to the local // database. We can use a write fence to implement this. - _.each(msg.subs, function (subId) { - self._runWhenAllServerDocsAreFlushed(function () { + + msg.subs.forEach(subId => { + self._runWhenAllServerDocsAreFlushed(() => { var subRecord = self._subscriptions[subId]; // Did we already unsubscribe? - if (!subRecord) - return; + if (!subRecord) return; // Did we already receive a ready message? (Oops!) - if (subRecord.ready) - return; + if (subRecord.ready) return; subRecord.ready = true; subRecord.readyCallback && subRecord.readyCallback(); subRecord.readyDeps.changed(); }); }); - }, + } // Ensures that "f" will be called after all documents currently in // _serverDocuments have been written to the local cache. f will not be called // if the connection is lost before then! - _runWhenAllServerDocsAreFlushed: function (f) { + _runWhenAllServerDocsAreFlushed(f) { var self = this; - var runFAfterUpdates = function () { + var runFAfterUpdates = () => { self._afterUpdateCallbacks.push(f); }; var unflushedServerDocCount = 0; - var onServerDocFlush = function () { + var onServerDocFlush = () => { --unflushedServerDocCount; if (unflushedServerDocCount === 0) { // This was the last doc to flush! Arrange to run f after the updates @@ -1528,13 +1398,15 @@ _.extend(Connection.prototype, { runFAfterUpdates(); } }; - _.each(self._serverDocuments, function (collectionDocs) { - collectionDocs.forEach(function (serverDoc) { - var writtenByStubForAMethodWithSentMessage = _.any( - serverDoc.writtenByStubs, function (dummy, methodId) { + + keys(self._serverDocuments).forEach(collection => { + self._serverDocuments[collection].forEach(serverDoc => { + const writtenByStubForAMethodWithSentMessage = + keys(serverDoc.writtenByStubs).some(methodId => { var invoker = self._methodInvokers[methodId]; return invoker && invoker.sentMessage; }); + if (writtenByStubForAMethodWithSentMessage) { ++unflushedServerDocCount; serverDoc.flushCallbacks.push(onServerDocFlush); @@ -1546,9 +1418,9 @@ _.extend(Connection.prototype, { // round of updates is applied! runFAfterUpdates(); } - }, + } - _livedata_nosub: function (msg) { + _livedata_nosub(msg) { var self = this; // First pass it through _livedata_data, which only uses it to help get @@ -1559,8 +1431,9 @@ _.extend(Connection.prototype, { // buffering-until-quiescence. // we weren't subbed anyway, or we initiated the unsub. - if (!_.has(self._subscriptions, msg.id)) + if (! hasOwn.call(self._subscriptions, msg.id)) { return; + } // XXX COMPAT WITH 1.0.3.1 #errorCallback var errorCallback = self._subscriptions[msg.id].errorCallback; @@ -1568,10 +1441,17 @@ _.extend(Connection.prototype, { self._subscriptions[msg.id].remove(); - var meteorErrorFromMsg = function (msgArg) { - return msgArg && msgArg.error && new Meteor.Error( - msgArg.error.error, msgArg.error.reason, msgArg.error.details); - } + var meteorErrorFromMsg = msgArg => { + return ( + msgArg && + msgArg.error && + new Meteor.Error( + msgArg.error.error, + msgArg.error.reason, + msgArg.error.details + ) + ); + }; // XXX COMPAT WITH 1.0.3.1 #errorCallback if (errorCallback && msg.error) { @@ -1581,38 +1461,29 @@ _.extend(Connection.prototype, { if (stopCallback) { stopCallback(meteorErrorFromMsg(msg)); } - }, + } - _process_nosub: function () { - // This is called as part of the "buffer until quiescence" process, but - // nosub's effect is always immediate. It only goes in the buffer at all - // because it's possible for a nosub to be the thing that triggers - // quiescence, if we were waiting for a sub to be revived and it dies - // instead. - }, - - _livedata_result: function (msg) { + _livedata_result(msg) { // id, result or error. error has error (code), reason, details var self = this; // Lets make sure there are no buffered writes before returning result. - if (!_.isEmpty(self._bufferedWrites)) { + if (! isEmpty(self._bufferedWrites)) { self._flushBufferedWrites(); } // find the outstanding request // should be O(1) in nearly all realistic use cases - if (_.isEmpty(self._outstandingMethodBlocks)) { - Meteor._debug("Received method result but no methods outstanding"); + if (isEmpty(self._outstandingMethodBlocks)) { + Meteor._debug('Received method result but no methods outstanding'); return; } var currentMethodBlock = self._outstandingMethodBlocks[0].methods; var m; for (var i = 0; i < currentMethodBlock.length; i++) { m = currentMethodBlock[i]; - if (m.methodId === msg.id) - break; + if (m.methodId === msg.id) break; } if (!m) { @@ -1625,78 +1496,80 @@ _.extend(Connection.prototype, { // _outstandingMethodFinished. currentMethodBlock.splice(i, 1); - if (_.has(msg, 'error')) { - m.receiveResult(new Meteor.Error( - msg.error.error, msg.error.reason, - msg.error.details)); + if (hasOwn.call(msg, 'error')) { + m.receiveResult( + new Meteor.Error(msg.error.error, msg.error.reason, msg.error.details) + ); } else { // msg.result may be undefined if the method didn't return a // value m.receiveResult(undefined, msg.result); } - }, + } // Called by MethodInvoker after a method's callback is invoked. If this was // the last outstanding method in the current block, runs the next block. If // there are no more methods, consider accepting a hot code push. - _outstandingMethodFinished: function () { + _outstandingMethodFinished() { var self = this; - if (self._anyMethodsAreOutstanding()) - return; + if (self._anyMethodsAreOutstanding()) return; // No methods are outstanding. This should mean that the first block of // methods is empty. (Or it might not exist, if this was a method that // half-finished before disconnect/reconnect.) - if (! _.isEmpty(self._outstandingMethodBlocks)) { + if (! isEmpty(self._outstandingMethodBlocks)) { var firstBlock = self._outstandingMethodBlocks.shift(); - if (! _.isEmpty(firstBlock.methods)) - throw new Error("No methods outstanding but nonempty block: " + - JSON.stringify(firstBlock)); + if (! isEmpty(firstBlock.methods)) + throw new Error( + 'No methods outstanding but nonempty block: ' + + JSON.stringify(firstBlock) + ); // Send the outstanding methods now in the first block. - if (!_.isEmpty(self._outstandingMethodBlocks)) + if (! isEmpty(self._outstandingMethodBlocks)) self._sendOutstandingMethods(); } // Maybe accept a hot code push. self._maybeMigrate(); - }, + } // Sends messages for all the methods in the first block in // _outstandingMethodBlocks. - _sendOutstandingMethods: function() { + _sendOutstandingMethods() { var self = this; - if (_.isEmpty(self._outstandingMethodBlocks)) + + if (isEmpty(self._outstandingMethodBlocks)) { return; - _.each(self._outstandingMethodBlocks[0].methods, function (m) { + } + + self._outstandingMethodBlocks[0].methods.forEach(m => { m.sendMessage(); }); - }, + } - _livedata_error: function (msg) { - Meteor._debug("Received error from server: ", msg.reason); - if (msg.offendingMessage) - Meteor._debug("For: ", msg.offendingMessage); - }, + _livedata_error(msg) { + Meteor._debug('Received error from server: ', msg.reason); + if (msg.offendingMessage) Meteor._debug('For: ', msg.offendingMessage); + } - _callOnReconnectAndSendAppropriateOutstandingMethods: function() { + _callOnReconnectAndSendAppropriateOutstandingMethods() { var self = this; var oldOutstandingMethodBlocks = self._outstandingMethodBlocks; self._outstandingMethodBlocks = []; self.onReconnect && self.onReconnect(); - DDP._reconnectHook.each(function (callback) { + DDP._reconnectHook.each(callback => { callback(self); return true; }); - if (_.isEmpty(oldOutstandingMethodBlocks)) - return; + if (isEmpty(oldOutstandingMethodBlocks)) return; // We have at least one block worth of old outstanding methods to try // again. First: did onReconnect actually send anything? If not, we just // restore all outstanding methods and run the first block. - if (_.isEmpty(self._outstandingMethodBlocks)) { + if (isEmpty(self._outstandingMethodBlocks)) { self._outstandingMethodBlocks = oldOutstandingMethodBlocks; self._sendOutstandingMethods(); return; @@ -1705,85 +1578,183 @@ _.extend(Connection.prototype, { // OK, there are blocks on both sides. Special case: merge the last block of // the reconnect methods with the first block of the original methods, if // neither of them are "wait" blocks. - if (!_.last(self._outstandingMethodBlocks).wait && - !oldOutstandingMethodBlocks[0].wait) { - _.each(oldOutstandingMethodBlocks[0].methods, function (m) { - _.last(self._outstandingMethodBlocks).methods.push(m); + if (! last(self._outstandingMethodBlocks).wait && + ! oldOutstandingMethodBlocks[0].wait) { + oldOutstandingMethodBlocks[0].methods.forEach(m => { + last(self._outstandingMethodBlocks).methods.push(m); // If this "last block" is also the first block, send the message. - if (self._outstandingMethodBlocks.length === 1) + if (self._outstandingMethodBlocks.length === 1) { m.sendMessage(); + } }); oldOutstandingMethodBlocks.shift(); } // Now add the rest of the original blocks on. - _.each(oldOutstandingMethodBlocks, function (block) { + oldOutstandingMethodBlocks.forEach(block => { self._outstandingMethodBlocks.push(block); }); - }, + } // We can accept a hot code push if there are no methods in flight. - _readyToMigrate: function() { - var self = this; - return _.isEmpty(self._methodInvokers); - }, + _readyToMigrate() { + return isEmpty(this._methodInvokers); + } // If we were blocking a migration, see if it's now possible to continue. // Call whenever the set of outstanding/blocked methods shrinks. - _maybeMigrate: function () { + _maybeMigrate() { var self = this; if (self._retryMigrate && self._readyToMigrate()) { self._retryMigrate(); self._retryMigrate = null; } } -}); -LivedataTest.Connection = Connection; + onMessage(raw_msg) { + try { + var msg = DDPCommon.parseDDP(raw_msg); + } catch (e) { + Meteor._debug('Exception while parsing DDP', e); + return; + } -// @param url {String} URL to Meteor app, -// e.g.: -// "subdomain.meteor.com", -// "http://subdomain.meteor.com", -// "/", -// "ddp+sockjs://ddp--****-foo.meteor.com/sockjs" + // Any message counts as receiving a pong, as it demonstrates that + // the server is still alive. + if (this._heartbeat) { + this._heartbeat.messageReceived(); + } -/** - * @summary Connect to the server of a different Meteor application to subscribe to its document sets and invoke its remote methods. - * @locus Anywhere - * @param {String} url The URL of another Meteor application. - */ -DDP.connect = function (url, options) { - var ret = new Connection(url, options); - allConnections.push(ret); // hack. see below. - return ret; -}; + if (msg === null || !msg.msg) { + // XXX COMPAT WITH 0.6.6. ignore the old welcome message for back + // compat. Remove this 'if' once the server stops sending welcome + // messages (stream_server.js). + if (!(msg && msg.server_id)) + Meteor._debug('discarding invalid livedata message', msg); + return; + } -DDP._reconnectHook = new Hook({ bindEnvironment: false }); + if (msg.msg === 'connected') { + this._version = this._versionSuggestion; + this._livedata_connected(msg); + this.options.onConnected(); + } else if (msg.msg === 'failed') { + if (this._supportedDDPVersions.indexOf(msg.version) >= 0) { + this._versionSuggestion = msg.version; + this._stream.reconnect({ _force: true }); + } else { + var description = + 'DDP version negotiation failed; server requested version ' + + msg.version; + this._stream.disconnect({ _permanent: true, _error: description }); + this.options.onDDPVersionNegotiationFailure(description); + } + } else if (msg.msg === 'ping' && this.options.respondToPings) { + this._send({ msg: 'pong', id: msg.id }); + } else if (msg.msg === 'pong') { + // noop, as we assume everything's a pong + } else if ( + ['added', 'changed', 'removed', 'ready', 'updated'].includes(msg.msg) + ) { + this._livedata_data(msg); + } else if (msg.msg === 'nosub') { + this._livedata_nosub(msg); + } else if (msg.msg === 'result') { + this._livedata_result(msg); + } else if (msg.msg === 'error') { + this._livedata_error(msg); + } else { + Meteor._debug('discarding unknown livedata message type', msg); + } + } -/** - * @summary Register a function to call as the first step of - * reconnecting. This function can call methods which will be executed before - * any other outstanding methods. For example, this can be used to re-establish - * the appropriate authentication context on the connection. - * @locus Anywhere - * @param {Function} callback The function to call. It will be called with a - * single argument, the [connection object](#ddp_connect) that is reconnecting. - */ -DDP.onReconnect = function (callback) { - return DDP._reconnectHook.register(callback); -}; + onReset() { + // Send a connect message at the beginning of the stream. + // NOTE: reset is called even on the first connection, so this is + // the only place we send this message. + var msg = { msg: 'connect' }; + if (this._lastSessionId) msg.session = this._lastSessionId; + msg.version = this._versionSuggestion || this._supportedDDPVersions[0]; + this._versionSuggestion = msg.version; + msg.support = this._supportedDDPVersions; + this._send(msg); -// Hack for `spiderable` package: a way to see if the page is done -// loading all the data it needs. -// -allConnections = []; -DDP._allSubscriptionsReady = function () { - return _.all(allConnections, function (conn) { - return _.all(conn._subscriptions, function (sub) { - return sub.ready; + // Mark non-retry calls as failed. This has to be done early as getting these methods out of the + // current block is pretty important to making sure that quiescence is properly calculated, as + // well as possibly moving on to another useful block. + + // Only bother testing if there is an outstandingMethodBlock (there might not be, especially if + // we are connecting for the first time. + if (this._outstandingMethodBlocks.length > 0) { + // If there is an outstanding method block, we only care about the first one as that is the + // one that could have already sent messages with no response, that are not allowed to retry. + const currentMethodBlock = this._outstandingMethodBlocks[0].methods; + this._outstandingMethodBlocks[0].methods = currentMethodBlock.filter( + methodInvoker => { + // Methods with 'noRetry' option set are not allowed to re-send after + // recovering dropped connection. + if (methodInvoker.sentMessage && methodInvoker.noRetry) { + // Make sure that the method is told that it failed. + methodInvoker.receiveResult( + new Meteor.Error( + 'invocation-failed', + 'Method invocation might have failed due to dropped connection. ' + + 'Failing because `noRetry` option was passed to Meteor.apply.' + ) + ); + } + + // Only keep a method if it wasn't sent or it's allowed to retry. + // This may leave the block empty, but we don't move on to the next + // block until the callback has been delivered, in _outstandingMethodFinished. + return !(methodInvoker.sentMessage && methodInvoker.noRetry); + } + ); + } + + // Now, to minimize setup latency, go ahead and blast out all of + // our pending methods ands subscriptions before we've even taken + // the necessary RTT to know if we successfully reconnected. (1) + // They're supposed to be idempotent, and where they are not, + // they can block retry in apply; (2) even if we did reconnect, + // we're not sure what messages might have gotten lost + // (in either direction) since we were disconnected (TCP being + // sloppy about that.) + + // If the current block of methods all got their results (but didn't all get + // their data visible), discard the empty block now. + if ( + this._outstandingMethodBlocks.length > 0 && + this._outstandingMethodBlocks[0].methods.length === 0 + ) { + this._outstandingMethodBlocks.shift(); + } + + // Mark all messages as unsent, they have not yet been sent on this + // connection. + keys(this._methodInvokers).forEach(id => { + this._methodInvokers[id].sentMessage = false; }); - }); -}; + + // If an `onReconnect` handler is set, call it first. Go through + // some hoops to ensure that methods that are called from within + // `onReconnect` get executed _before_ ones that were originally + // outstanding (since `onReconnect` is used to re-establish auth + // certificates) + this._callOnReconnectAndSendAppropriateOutstandingMethods(); + + // add new subscriptions at the end. this way they take effect after + // the handlers and we don't see flicker. + keys(this._subscriptions).forEach(id => { + const sub = this._subscriptions[id]; + this._send({ + msg: 'sub', + id: id, + name: sub.name, + params: sub.params + }); + }); + } +} diff --git a/packages/ddp-client/common/namespace.js b/packages/ddp-client/common/namespace.js new file mode 100644 index 0000000000..60e79c8543 --- /dev/null +++ b/packages/ddp-client/common/namespace.js @@ -0,0 +1,93 @@ +import { DDPCommon } from 'meteor/ddp-common'; +import { Meteor } from 'meteor/meteor'; +import { keys } from "meteor/ddp-common/utils.js"; + +import { Connection } from './livedata_connection.js'; + +// This array allows the `_allSubscriptionsReady` method below, which +// is used by the `spiderable` package, to keep track of whether all +// data is ready. +const allConnections = []; + +/** + * @namespace DDP + * @summary Namespace for DDP-related methods/classes. + */ +export const DDP = {}; + +// This is private but it's used in a few places. accounts-base uses +// it to get the current user. Meteor.setTimeout and friends clear +// it. We can probably find a better way to factor this. +DDP._CurrentMethodInvocation = new Meteor.EnvironmentVariable(); +DDP._CurrentPublicationInvocation = new Meteor.EnvironmentVariable(); + +// XXX: Keep DDP._CurrentInvocation for backwards-compatibility. +DDP._CurrentInvocation = DDP._CurrentMethodInvocation; + +// This is passed into a weird `makeErrorType` function that expects its thing +// to be a constructor +function connectionErrorConstructor(message) { + this.message = message; +} + +DDP.ConnectionError = Meteor.makeErrorType( + 'DDP.ConnectionError', + connectionErrorConstructor +); + +DDP.ForcedReconnectError = Meteor.makeErrorType( + 'DDP.ForcedReconnectError', + () => {} +); + +// Returns the named sequence of pseudo-random values. +// The scope will be DDP._CurrentMethodInvocation.get(), so the stream will produce +// consistent values for method calls on the client and server. +DDP.randomStream = name => { + var scope = DDP._CurrentMethodInvocation.get(); + return DDPCommon.RandomStream.get(scope, name); +}; + +// @param url {String} URL to Meteor app, +// e.g.: +// "subdomain.meteor.com", +// "http://subdomain.meteor.com", +// "/", +// "ddp+sockjs://ddp--****-foo.meteor.com/sockjs" + +/** + * @summary Connect to the server of a different Meteor application to subscribe to its document sets and invoke its remote methods. + * @locus Anywhere + * @param {String} url The URL of another Meteor application. + */ +DDP.connect = (url, options) => { + var ret = new Connection(url, options); + allConnections.push(ret); // hack. see below. + return ret; +}; + +DDP._reconnectHook = new Hook({ bindEnvironment: false }); + +/** + * @summary Register a function to call as the first step of + * reconnecting. This function can call methods which will be executed before + * any other outstanding methods. For example, this can be used to re-establish + * the appropriate authentication context on the connection. + * @locus Anywhere + * @param {Function} callback The function to call. It will be called with a + * single argument, the [connection object](#ddp_connect) that is reconnecting. + */ +DDP.onReconnect = callback => { + return DDP._reconnectHook.register(callback); +}; + +// Hack for `spiderable` package: a way to see if the page is done +// loading all the data it needs. +// +DDP._allSubscriptionsReady = () => { + return allConnections.every(conn => { + return keys(conn._subscriptions).every(id => { + return conn._subscriptions[id].ready; + }); + }); +}; diff --git a/packages/ddp-client/id_map.js b/packages/ddp-client/id_map.js deleted file mode 100644 index abefefc5e4..0000000000 --- a/packages/ddp-client/id_map.js +++ /dev/null @@ -1,8 +0,0 @@ -export class MongoIDMap extends IdMap { - constructor() { - super( - MongoID.idStringify, - MongoID.idParse, - ); - } -} diff --git a/packages/ddp-client/livedata_common.js b/packages/ddp-client/livedata_common.js deleted file mode 100644 index 5e50aa0b23..0000000000 --- a/packages/ddp-client/livedata_common.js +++ /dev/null @@ -1,12 +0,0 @@ -import { DDP, LivedataTest } from "./namespace.js"; - -LivedataTest.SUPPORTED_DDP_VERSIONS = DDPCommon.SUPPORTED_DDP_VERSIONS; - -// This is private but it's used in a few places. accounts-base uses -// it to get the current user. Meteor.setTimeout and friends clear -// it. We can probably find a better way to factor this. -DDP._CurrentMethodInvocation = new Meteor.EnvironmentVariable; -DDP._CurrentPublicationInvocation = new Meteor.EnvironmentVariable; - -// XXX: Keep DDP._CurrentInvocation for backwards-compatibility. -DDP._CurrentInvocation = DDP._CurrentMethodInvocation; diff --git a/packages/ddp-client/livedata_connection_tests.js b/packages/ddp-client/livedata_connection_tests.js deleted file mode 100644 index 4b7c71e45e..0000000000 --- a/packages/ddp-client/livedata_connection_tests.js +++ /dev/null @@ -1,2121 +0,0 @@ -import lolex from 'lolex'; -import { DDP, LivedataTest } from "./namespace.js"; - -var newConnection = function (stream, options) { - // Some of these tests leave outstanding methods with no result yet - // returned. This should not block us from re-running tests when sources - // change. - return new LivedataTest.Connection(stream, _.extend({ - reloadWithOutstanding: true, - bufferedWritesInterval: 0 - }, options)); -}; - -var makeConnectMessage = function (session) { - var msg = { - msg: 'connect', - version: LivedataTest.SUPPORTED_DDP_VERSIONS[0], - support: LivedataTest.SUPPORTED_DDP_VERSIONS - }; - - if (session) - msg.session = session; - return msg; -}; - -// Tests that stream got a message that matches expected. -// Expected is normally an object, and allows a wildcard value of '*', -// which will then match any value. -// Returns the message (parsed as a JSON object if expected is an object); -// which is particularly handy if you want to extract a value that was -// matched as a wildcard. -var testGotMessage = function (test, stream, expected) { - if (stream.sent.length === 0) { - test.fail({error: 'no message received', expected: expected}); - return undefined; - } - - var got = stream.sent.shift(); - - if (typeof got === 'string' && typeof expected === 'object') - got = JSON.parse(got); - - // An expected value of '*' matches any value, and the matching value (or - // array of matching values, if there are multiple) is returned from this - // function. - if (typeof expected === 'object') { - var keysWithStarValues = []; - _.each(expected, function (v, k) { - if (v === '*') - keysWithStarValues.push(k); - }); - _.each(keysWithStarValues, function (k) { - expected[k] = got[k]; - }); - } - - test.equal(got, expected); - return got; -}; - -var startAndConnect = function(test, stream) { - stream.reset(); // initial connection start. - - testGotMessage(test, stream, makeConnectMessage()); - test.length(stream.sent, 0); - - stream.receive({msg: 'connected', session: SESSION_ID}); - test.length(stream.sent, 0); -}; - -var SESSION_ID = '17'; - -Tinytest.add("livedata stub - receive data", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - startAndConnect(test, stream); - - // data comes in for unknown collection. - var coll_name = Random.id(); - stream.receive({msg: 'added', collection: coll_name, id: '1234', - fields: {a: 1}}); - // break throught the black box and test internal state - test.length(conn._updatesForUnknownStores[coll_name], 1); - - // XXX: Test that the old signature of passing manager directly instead of in - // options works. - var coll = new Mongo.Collection(coll_name, conn); - - // queue has been emptied and doc is in db. - test.isUndefined(conn._updatesForUnknownStores[coll_name]); - test.equal(coll.find({}).fetch(), [{_id:'1234', a:1}]); - - // second message. applied directly to the db. - stream.receive({msg: 'changed', collection: coll_name, id: '1234', - fields: {a:2}}); - test.equal(coll.find({}).fetch(), [{_id:'1234', a:2}]); - test.isUndefined(conn._updatesForUnknownStores[coll_name]); -}); - -Tinytest.add("livedata stub - buffering data", function (test) { - // Install special setTimeout that allows tick-by-tick control in tests using sinonjs 'lolex' - // This needs to be before the connection is instantiated. - const clock = lolex.install(); - const tick = (timeout) => clock.tick(timeout); - - const stream = new StubStream(); - const conn = newConnection(stream, { - bufferedWritesInterval: 10, - bufferedWritesMaxAge: 40, - }); - - startAndConnect(test, stream); - - const coll_name = Random.id(); - const coll = new Mongo.Collection(coll_name, conn); - - const testDocCount = (count) => test.equal(coll.find({}).count(), count); - - const addDoc = () => { - stream.receive({ - msg: 'added', - collection: coll_name, - id: Random.id(), - fields: {} - }); - }; - - // Starting at 0 ticks. At this point we haven't advanced the fake clock at all. - - addDoc(); // 1st Doc - testDocCount(0); // No doc been recognized yet because it's buffered, waiting for more. - tick(6); // 6 total ticks - testDocCount(0); // Ensure that the doc still hasn't shown up, despite the clock moving forward. - tick(4) // 10 total ticks, 1st buffer interval - testDocCount(1); // No other docs have arrived, so we 'see' the 1st doc. - - addDoc(); // 2nd doc - tick(1); // 11 total ticks (1 since last flush) - testDocCount(1); // Again, second doc hasn't arrived because we're waiting for more... - tick(9); // 20 total ticks (10 ticks since last flush & the 2nd 10-tick interval) - testDocCount(2); // Now we're here and got the second document. - - // Add several docs, frequently enough that we buffer multiple times before the next flush. - addDoc(); // 3 docs - tick(6); // 26 ticks (6 since last flush) - addDoc(); // 4 docs - tick(6); // 32 ticks (12 since last flush) - addDoc(); // 5 docs - tick(6); // 38 ticks (18 since last flush) - addDoc(); // 6 docs - tick(6); // 44 ticks (24 since last flush) - addDoc(); // 7 docs - tick(9); // 53 ticks (33 since last flush) - addDoc(); // 8 docs - tick(9); // 62 ticks! (42 ticks since last flush, over max-age - next interval triggers flush) - testDocCount(2); // Still at 2 from before! (Just making sure) - tick(1); // Ok, 63 ticks (10 since last doc, so this should cause the flush of all the docs) - testDocCount(8); // See all the docs. - - // Put things back how they were. - clock.uninstall(); -}); - -Tinytest.add("livedata stub - subscribe", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - startAndConnect(test, stream); - - // subscribe - var callback_fired = false; - var sub = conn.subscribe('my_data', function () { - callback_fired = true; - }); - test.isFalse(callback_fired); - - test.length(stream.sent, 1); - var message = JSON.parse(stream.sent.shift()); - var id = message.id; - delete message.id; - test.equal(message, {msg: 'sub', name: 'my_data', params: []}); - - var reactivelyReady = false; - var autorunHandle = Tracker.autorun(function () { - reactivelyReady = sub.ready(); - }); - test.isFalse(reactivelyReady); - - // get the sub satisfied. callback fires. - stream.receive({msg: 'ready', 'subs': [id]}); - test.isTrue(callback_fired); - Tracker.flush(); - test.isTrue(reactivelyReady); - - // Unsubscribe. - sub.stop(); - test.length(stream.sent, 1); - message = JSON.parse(stream.sent.shift()); - test.equal(message, {msg: 'unsub', id: id}); - Tracker.flush(); - test.isFalse(reactivelyReady); - - // Resubscribe. - conn.subscribe('my_data'); - test.length(stream.sent, 1); - message = JSON.parse(stream.sent.shift()); - var id2 = message.id; - test.notEqual(id, id2); - delete message.id; - test.equal(message, {msg: 'sub', name: 'my_data', params: []}); -}); - - -Tinytest.add("livedata stub - reactive subscribe", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - startAndConnect(test, stream); - - var rFoo = new ReactiveVar('foo1'); - var rBar = new ReactiveVar('bar1'); - - var onReadyCount = {}; - var onReady = function (tag) { - return function () { - if (_.has(onReadyCount, tag)) - ++onReadyCount[tag]; - else - onReadyCount[tag] = 1; - }; - }; - - // Subscribe to some subs. - var stopperHandle, completerHandle; - var autorunHandle = Tracker.autorun(function () { - conn.subscribe("foo", rFoo.get(), onReady(rFoo.get())); - conn.subscribe("bar", rBar.get(), onReady(rBar.get())); - completerHandle = conn.subscribe("completer", onReady("completer")); - stopperHandle = conn.subscribe("stopper", onReady("stopper")); - }); - - var completerReady; - var readyAutorunHandle = Tracker.autorun(function() { - completerReady = completerHandle.ready(); - }); - - // Check sub messages. (Assume they are sent in the order executed.) - test.length(stream.sent, 4); - var message = JSON.parse(stream.sent.shift()); - var idFoo1 = message.id; - delete message.id; - test.equal(message, {msg: 'sub', name: 'foo', params: ['foo1']}); - - message = JSON.parse(stream.sent.shift()); - var idBar1 = message.id; - delete message.id; - test.equal(message, {msg: 'sub', name: 'bar', params: ['bar1']}); - - message = JSON.parse(stream.sent.shift()); - var idCompleter = message.id; - delete message.id; - test.equal(message, {msg: 'sub', name: 'completer', params: []}); - - message = JSON.parse(stream.sent.shift()); - var idStopper = message.id; - delete message.id; - test.equal(message, {msg: 'sub', name: 'stopper', params: []}); - - // Haven't hit onReady yet. - test.equal(onReadyCount, {}); - Tracker.flush(); - test.isFalse(completerReady); - - // "completer" gets ready now. its callback should fire. - stream.receive({msg: 'ready', 'subs': [idCompleter]}); - test.equal(onReadyCount, {completer: 1}); - test.length(stream.sent, 0); - Tracker.flush(); - test.isTrue(completerReady); - - // Stop 'stopper'. - stopperHandle.stop(); - test.length(stream.sent, 1); - message = JSON.parse(stream.sent.shift()); - test.equal(message, {msg: 'unsub', id: idStopper}); - - test.equal(onReadyCount, {completer: 1}); - Tracker.flush(); - test.isTrue(completerReady); - - // Change the foo subscription and flush. We should sub to the new foo - // subscription, re-sub to the stopper subscription, and then unsub from the old - // foo subscription. The bar subscription should be unaffected. The completer - // subscription should call its new onReady callback, because we always - // call onReady for a given reactively-saved subscription. - // The completerHandle should have been reestablished to the ready handle. - rFoo.set("foo2"); - Tracker.flush(); - test.length(stream.sent, 3); - - message = JSON.parse(stream.sent.shift()); - var idFoo2 = message.id; - delete message.id; - test.equal(message, {msg: 'sub', name: 'foo', params: ['foo2']}); - - message = JSON.parse(stream.sent.shift()); - var idStopperAgain = message.id; - delete message.id; - test.equal(message, {msg: 'sub', name: 'stopper', params: []}); - - message = JSON.parse(stream.sent.shift()); - test.equal(message, {msg: 'unsub', id: idFoo1}); - - test.equal(onReadyCount, {completer: 2}); - test.isTrue(completerReady); - - // Ready the stopper and bar subs. Completing stopper should call only the - // onReady from the new subscription because they were separate subscriptions - // started at different times and the first one was explicitly torn down by - // the client; completing bar should call the onReady from the new - // subscription because we always call onReady for a given reactively-saved - // subscription. - stream.receive({msg: 'ready', 'subs': [idStopperAgain, idBar1]}); - test.equal(onReadyCount, {completer: 2, bar1: 1, stopper: 1}); - - // Shut down the autorun. This should unsub us from all current subs at flush - // time. - autorunHandle.stop(); - Tracker.flush(); - test.isFalse(completerReady); - readyAutorunHandle.stop(); - - test.length(stream.sent, 4); - // The order of unsubs here is not important. - var unsubMessages = _.map(stream.sent, JSON.parse); - stream.sent.length = 0; - test.equal(_.unique(_.pluck(unsubMessages, 'msg')), ['unsub']); - var actualIds = _.pluck(unsubMessages, 'id'); - var expectedIds = [idFoo2, idBar1, idCompleter, idStopperAgain]; - actualIds.sort(); - expectedIds.sort(); - test.equal(actualIds, expectedIds); -}); - -Tinytest.add("livedata stub - reactive subscribe handle correct", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - startAndConnect(test, stream); - - var rFoo = new ReactiveVar('foo1'); - - // Subscribe to some subs. - var fooHandle, fooReady; - var autorunHandle = Tracker.autorun(function () { - fooHandle = conn.subscribe("foo", rFoo.get()); - Tracker.autorun(function() { - fooReady = fooHandle.ready(); - }); - }); - - var message = JSON.parse(stream.sent.shift()); - var idFoo1 = message.id; - delete message.id; - test.equal(message, {msg: 'sub', name: 'foo', params: ['foo1']}); - - // Not ready yet - Tracker.flush(); - test.isFalse(fooHandle.ready()); - test.isFalse(fooReady); - - // change the argument to foo. This will make a new handle, which isn't ready - // the ready autorun should invalidate, reading the new false value, and - // setting up a new dep which goes true soon - rFoo.set("foo2"); - Tracker.flush(); - test.length(stream.sent, 2); - - message = JSON.parse(stream.sent.shift()); - var idFoo2 = message.id; - delete message.id; - test.equal(message, {msg: 'sub', name: 'foo', params: ['foo2']}); - - message = JSON.parse(stream.sent.shift()); - test.equal(message, {msg: 'unsub', id: idFoo1}); - - Tracker.flush(); - test.isFalse(fooHandle.ready()); - test.isFalse(fooReady); - - // "foo" gets ready now. The handle should be ready and the autorun rerun - stream.receive({msg: 'ready', 'subs': [idFoo2]}); - test.length(stream.sent, 0); - Tracker.flush(); - test.isTrue(fooHandle.ready()); - test.isTrue(fooReady); - - // change the argument to foo. This will make a new handle, which isn't ready - // the ready autorun should invalidate, making fooReady false too - rFoo.set("foo3"); - Tracker.flush(); - test.length(stream.sent, 2); - - message = JSON.parse(stream.sent.shift()); - var idFoo3 = message.id; - delete message.id; - test.equal(message, {msg: 'sub', name: 'foo', params: ['foo3']}); - - message = JSON.parse(stream.sent.shift()); - test.equal(message, {msg: 'unsub', id: idFoo2}); - - Tracker.flush(); - test.isFalse(fooHandle.ready()); - test.isFalse(fooReady); - - // "foo" gets ready again - stream.receive({msg: 'ready', 'subs': [idFoo3]}); - test.length(stream.sent, 0); - Tracker.flush(); - test.isTrue(fooHandle.ready()); - test.isTrue(fooReady); - - autorunHandle.stop(); -}); - -Tinytest.add("livedata stub - this", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - startAndConnect(test, stream); - conn.methods({test_this: function() { - test.isTrue(this.isSimulation); - this.unblock(); // should be a no-op - }}); - - // should throw no exceptions - conn.call('test_this', _.identity); - // satisfy method, quiesce connection - var message = JSON.parse(stream.sent.shift()); - test.isUndefined(message.randomSeed); - test.equal(message, {msg: 'method', method: 'test_this', - params: [], id:message.id}); - test.length(stream.sent, 0); - - stream.receive({msg: 'result', id:message.id, result:null}); - stream.receive({msg: 'updated', 'methods': [message.id]}); - -}); - -if (Meteor.isClient) { - Tinytest.add("livedata stub - methods", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - startAndConnect(test, stream); - - var collName = Random.id(); - var coll = new Mongo.Collection(collName, {connection: conn}); - - // setup method - conn.methods({do_something: function (x) { - coll.insert({value: x}); - }}); - - // setup observers - var counts = {added: 0, removed: 0, changed: 0, moved: 0}; - var handle = coll.find({}).observe( - { addedAt: function () { counts.added += 1; }, - removedAt: function () { counts.removed += 1; }, - changedAt: function () { counts.changed += 1; }, - movedTo: function () { counts.moved += 1; } - }); - - - // call method with results callback - var callback1Fired = false; - conn.call('do_something', 'friday!', function (err, res) { - test.isUndefined(err); - test.equal(res, '1234'); - callback1Fired = true; - }); - test.isFalse(callback1Fired); - - // observers saw the method run. - test.equal(counts, {added: 1, removed: 0, changed: 0, moved: 0}); - - // get response from server - var message = testGotMessage(test, stream, {msg: 'method', - method: 'do_something', - params: ['friday!'], - id: '*', - randomSeed: '*'}); - - test.equal(coll.find({}).count(), 1); - test.equal(coll.find({value: 'friday!'}).count(), 1); - var docId = coll.findOne({value: 'friday!'})._id; - - // results does not yet result in callback, because data is not - // ready. - stream.receive({msg: 'result', id:message.id, result: "1234"}); - test.isFalse(callback1Fired); - - // result message doesn't affect data - test.equal(coll.find({}).count(), 1); - test.equal(coll.find({value: 'friday!'}).count(), 1); - test.equal(counts, {added: 1, removed: 0, changed: 0, moved: 0}); - - // data methods do not show up (not quiescent yet) - stream.receive({msg: 'added', collection: collName, id: MongoID.idStringify(docId), - fields: {value: 'tuesday'}}); - test.equal(coll.find({}).count(), 1); - test.equal(coll.find({value: 'friday!'}).count(), 1); - test.equal(counts, {added: 1, removed: 0, changed: 0, moved: 0}); - - // send another methods (unknown on client) - var callback2Fired = false; - conn.call('do_something_else', 'monday', function (err, res) { - callback2Fired = true; - }); - test.isFalse(callback1Fired); - test.isFalse(callback2Fired); - - // test we still send a method request to server - var message2 = JSON.parse(stream.sent.shift()); - test.isUndefined(message2.randomSeed); - test.equal(message2, {msg: 'method', method: 'do_something_else', - params: ['monday'], id: message2.id}); - - // get the first data satisfied message. changes are applied to database even - // though another method is outstanding, because the other method didn't have - // a stub. and its callback is called. - stream.receive({msg: 'updated', 'methods': [message.id]}); - test.isTrue(callback1Fired); - test.isFalse(callback2Fired); - - test.equal(coll.find({}).count(), 1); - test.equal(coll.find({value: 'tuesday'}).count(), 1); - test.equal(counts, {added: 1, removed: 0, changed: 1, moved: 0}); - - // second result - stream.receive({msg: 'result', id:message2.id, result:"bupkis"}); - test.isFalse(callback2Fired); - - // get second satisfied; no new changes are applied. - stream.receive({msg: 'updated', 'methods': [message2.id]}); - test.isTrue(callback2Fired); - - test.equal(coll.find({}).count(), 1); - test.equal(coll.find({value: 'tuesday', _id: docId}).count(), 1); - test.equal(counts, {added: 1, removed: 0, changed: 1, moved: 0}); - - handle.stop(); - }); -} - -Tinytest.add("livedata stub - mutating method args", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - startAndConnect(test, stream); - - conn.methods({mutateArgs: function (arg) { - arg.foo = 42; - }}); - - conn.call('mutateArgs', {foo: 50}, _.identity); - - // Method should be called with original arg, not mutated arg. - var message = JSON.parse(stream.sent.shift()); - test.isUndefined(message.randomSeed); - test.equal(message, {msg: 'method', method: 'mutateArgs', - params: [{foo: 50}], id: message.id}); - test.length(stream.sent, 0); -}); - -var observeCursor = function (test, cursor) { - var counts = {added: 0, removed: 0, changed: 0, moved: 0}; - var expectedCounts = _.clone(counts); - var handle = cursor.observe( - { addedAt: function () { counts.added += 1; }, - removedAt: function () { counts.removed += 1; }, - changedAt: function () { counts.changed += 1; }, - movedTo: function () { counts.moved += 1; } - }); - return { - stop: _.bind(handle.stop, handle), - expectCallbacks: function (delta) { - _.each(delta, function (mod, field) { - expectedCounts[field] += mod; - }); - test.equal(counts, expectedCounts); - } - }; -}; - -// method calls another method in simulation. see not sent. -if (Meteor.isClient) { - Tinytest.add("livedata stub - methods calling methods", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - startAndConnect(test, stream); - - var coll_name = Random.id(); - var coll = new Mongo.Collection(coll_name, {connection: conn}); - - // setup methods - conn.methods({ - do_something: function () { - conn.call('do_something_else'); - }, - do_something_else: function () { - coll.insert({a: 1}); - } - }); - - var o = observeCursor(test, coll.find()); - - // call method. - conn.call('do_something', _.identity); - - // see we only send message for outer methods - var message = testGotMessage(test, stream, {msg: 'method', - method: 'do_something', - params: [], - id: '*', - randomSeed: '*'}); - test.length(stream.sent, 0); - - // but inner method runs locally. - o.expectCallbacks({added: 1}); - test.equal(coll.find().count(), 1); - var docId = coll.findOne()._id; - test.equal(coll.findOne(), {_id: docId, a: 1}); - - // we get the results - stream.receive({msg: 'result', id:message.id, result:"1234"}); - - // get data from the method. data from this doc does not show up yet, but data - // from another doc does. - stream.receive({msg: 'added', collection: coll_name, id: MongoID.idStringify(docId), - fields: {value: 'tuesday'}}); - o.expectCallbacks(); - test.equal(coll.findOne(docId), {_id: docId, a: 1}); - stream.receive({msg: 'added', collection: coll_name, id: 'monkey', - fields: {value: 'bla'}}); - o.expectCallbacks({added: 1}); - test.equal(coll.findOne(docId), {_id: docId, a: 1}); - var newDoc = coll.findOne({value: 'bla'}); - test.isTrue(newDoc); - test.equal(newDoc, {_id: newDoc._id, value: 'bla'}); - - // get method satisfied. all data shows up. the 'a' field is reverted and - // 'value' field is set. - stream.receive({msg: 'updated', 'methods': [message.id]}); - o.expectCallbacks({changed: 1}); - test.equal(coll.findOne(docId), {_id: docId, value: 'tuesday'}); - test.equal(coll.findOne(newDoc._id), {_id: newDoc._id, value: 'bla'}); - - o.stop(); - }); -} -Tinytest.add("livedata stub - method call before connect", function (test) { - var stream = new StubStream; - var conn = newConnection(stream); - - var callbackOutput = []; - conn.call('someMethod', function (err, result) { - callbackOutput.push(result); - }); - test.equal(callbackOutput, []); - - // the real stream drops all output pre-connection - stream.sent.length = 0; - - // Now connect. - stream.reset(); - - testGotMessage(test, stream, makeConnectMessage()); - testGotMessage(test, stream, {msg: 'method', method: 'someMethod', - params: [], id: '*'}); -}); - -Tinytest.add("livedata stub - reconnect", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - startAndConnect(test, stream); - - var collName = Random.id(); - var coll = new Mongo.Collection(collName, {connection: conn}); - - var o = observeCursor(test, coll.find()); - - // subscribe - var subCallbackFired = false; - var sub = conn.subscribe('my_data', function () { - subCallbackFired = true; - }); - test.isFalse(subCallbackFired); - - var subMessage = JSON.parse(stream.sent.shift()); - test.equal(subMessage, {msg: 'sub', name: 'my_data', params: [], - id: subMessage.id}); - - // get some data. it shows up. - stream.receive({msg: 'added', collection: collName, - id: '1234', fields: {a:1}}); - - test.equal(coll.find({}).count(), 1); - o.expectCallbacks({added: 1}); - test.isFalse(subCallbackFired); - - stream.receive({msg: 'changed', collection: collName, - id: '1234', fields: {b:2}}); - stream.receive({msg: 'ready', - subs: [subMessage.id] // satisfy sub - }); - test.isTrue(subCallbackFired); - subCallbackFired = false; // re-arm for test that it doesn't fire again. - - test.equal(coll.find({a:1, b:2}).count(), 1); - o.expectCallbacks({changed: 1}); - - // call method. - var methodCallbackFired = false; - conn.call('do_something', function () { - methodCallbackFired = true; - }); - - conn.apply('do_something_else', [], {wait: true}, _.identity); - conn.apply('do_something_later', [], _.identity); - - test.isFalse(methodCallbackFired); - - // The non-wait method should send, but not the wait method. - var methodMessage = JSON.parse(stream.sent.shift()); - test.isUndefined(methodMessage.randomSeed); - test.equal(methodMessage, {msg: 'method', method: 'do_something', - params: [], id:methodMessage.id}); - test.equal(stream.sent.length, 0); - - // more data. shows up immediately because there was no relevant method stub. - stream.receive({msg: 'changed', collection: collName, - id: '1234', fields: {c:3}}); - test.equal(coll.findOne('1234'), {_id: '1234', a: 1, b: 2, c: 3}); - o.expectCallbacks({changed: 1}); - - // stream reset. reconnect! we send a connect, our pending method, and our - // sub. The wait method still is blocked. - stream.reset(); - - testGotMessage(test, stream, makeConnectMessage(SESSION_ID)); - testGotMessage(test, stream, methodMessage); - testGotMessage(test, stream, subMessage); - - // reconnect with different session id - stream.receive({msg: 'connected', session: SESSION_ID + 1}); - - // resend data. doesn't show up: we're in reconnect quiescence. - stream.receive({msg: 'added', collection: collName, - id: '1234', fields: {a:1, b:2, c:3, d: 4}}); - stream.receive({msg: 'added', collection: collName, - id: '2345', fields: {e: 5}}); - test.equal(coll.findOne('1234'), {_id: '1234', a: 1, b: 2, c: 3}); - test.isFalse(coll.findOne('2345')); - o.expectCallbacks(); - - // satisfy and return the method - stream.receive({msg: 'updated', - methods: [methodMessage.id]}); - test.isFalse(methodCallbackFired); - stream.receive({msg: 'result', id:methodMessage.id, result:"bupkis"}); - // The callback still doesn't fire (and we don't send the wait method): we're - // still in global quiescence - test.isFalse(methodCallbackFired); - test.equal(stream.sent.length, 0); - - // still no update. - test.equal(coll.findOne('1234'), {_id: '1234', a: 1, b: 2, c: 3}); - test.isFalse(coll.findOne('2345')); - o.expectCallbacks(); - - // re-satisfy sub - stream.receive({msg: 'ready', subs: [subMessage.id]}); - - // now the doc changes and method callback is called, and the wait method is - // sent. the sub callback isn't re-called. - test.isTrue(methodCallbackFired); - test.isFalse(subCallbackFired); - test.equal(coll.findOne('1234'), {_id: '1234', a: 1, b: 2, c: 3, d: 4}); - test.equal(coll.findOne('2345'), {_id: '2345', e: 5}); - o.expectCallbacks({added: 1, changed: 1}); - - var waitMethodMessage = JSON.parse(stream.sent.shift()); - test.isUndefined(waitMethodMessage.randomSeed); - test.equal(waitMethodMessage, {msg: 'method', method: 'do_something_else', - params: [], id: waitMethodMessage.id}); - test.equal(stream.sent.length, 0); - stream.receive({msg: 'result', id: waitMethodMessage.id, result: "bupkis"}); - test.equal(stream.sent.length, 0); - stream.receive({msg: 'updated', methods: [waitMethodMessage.id]}); - - // wait method done means we can send the third method - test.equal(stream.sent.length, 1); - var laterMethodMessage = JSON.parse(stream.sent.shift()); - test.isUndefined(laterMethodMessage.randomSeed); - test.equal(laterMethodMessage, {msg: 'method', method: 'do_something_later', - params: [], id: laterMethodMessage.id}); - - o.stop(); -}); - -if (Meteor.isClient) { - Tinytest.add("livedata stub - reconnect non-idempotent method", function(test) { - // This test is for https://github.com/meteor/meteor/issues/6108 - var stream = new StubStream(); - var conn = newConnection(stream); - - startAndConnect(test, stream); - - var firstMethodCallbackFired = false; - var firstMethodCallbackErrored = false; - var secondMethodCallbackFired = false; - var secondMethodCallbackErrored = false; - - // call with noRetry true so that the method should fail to retry on reconnect. - conn.apply('do_something', [], {noRetry: true}, function(error) { - firstMethodCallbackFired = true; - // failure on reconnect should trigger an error. - if (error && error.error === 'invocation-failed') { - firstMethodCallbackErrored = true; - } - }); - conn.apply('do_something_else', [], {noRetry: true}, function(error) { - secondMethodCallbackFired = true; - // failure on reconnect should trigger an error. - if (error && error.error === 'invocation-failed') { - secondMethodCallbackErrored = true; - } - }); - - // The method has not succeeded yet - test.isFalse(firstMethodCallbackFired); - test.isFalse(secondMethodCallbackFired); - - // send the methods - stream.sent.shift(); - stream.sent.shift(); - // reconnect - stream.reset(); - - // verify that a reconnect message was sent. - testGotMessage(test, stream, makeConnectMessage(SESSION_ID)); - // Make sure that the stream triggers connection. - stream.receive({msg: 'connected', session: SESSION_ID + 1}); - - //The method callback should fire even though the stream has not sent a response. - //the callback should have been fired with an error. - test.isTrue(firstMethodCallbackFired); - test.isTrue(firstMethodCallbackErrored); - test.isTrue(secondMethodCallbackFired); - test.isTrue(secondMethodCallbackErrored); - - // verify that the method message was not sent. - test.isUndefined(stream.sent.shift()); - }); -} - -function addReconnectTests(name, testFunc) { - Tinytest.add(name + " (deprecated)", function (test) { - function deprecatedSetOnReconnect(conn, handler) { - conn.onReconnect = handler; - } - testFunc.call(this, test, deprecatedSetOnReconnect); - }); - - Tinytest.add(name, function (test) { - var stopper; - function setOnReconnect(conn, handler) { - stopper && stopper.stop(); - stopper = DDP.onReconnect(function (reconnectingConn) { - if (reconnectingConn === conn) { - handler(); - } - }); - } - testFunc.call(this, test, setOnReconnect); - stopper && stopper.stop(); - }); -} - -if (Meteor.isClient) { - addReconnectTests("livedata stub - reconnect method which only got result", function (test, setOnReconnect) { - var stream = new StubStream; - var conn = newConnection(stream); - startAndConnect(test, stream); - - var collName = Random.id(); - var coll = new Mongo.Collection(collName, {connection: conn}); - var o = observeCursor(test, coll.find()); - - conn.methods({writeSomething: function () { - // stub write - coll.insert({foo: 'bar'}); - }}); - - test.equal(coll.find({foo: 'bar'}).count(), 0); - - // Call a method. We'll get the result but not data-done before reconnect. - var callbackOutput = []; - var onResultReceivedOutput = []; - conn.apply('writeSomething', [], - {onResultReceived: function (err, result) { - onResultReceivedOutput.push(result); - }}, - function (err, result) { - callbackOutput.push(result); - }); - // Stub write is visible. - test.equal(coll.find({foo: 'bar'}).count(), 1); - var stubWrittenId = coll.findOne({foo: 'bar'})._id; - o.expectCallbacks({added: 1}); - // Callback not called. - test.equal(callbackOutput, []); - test.equal(onResultReceivedOutput, []); - // Method sent. - var methodId = testGotMessage( - test, stream, {msg: 'method', method: 'writeSomething', - params: [], id: '*', randomSeed: '*'}).id; - test.equal(stream.sent.length, 0); - - // Get some data. - stream.receive({msg: 'added', collection: collName, - id: MongoID.idStringify(stubWrittenId), fields: {baz: 42}}); - // It doesn't show up yet. - test.equal(coll.find().count(), 1); - test.equal(coll.findOne(stubWrittenId), {_id: stubWrittenId, foo: 'bar'}); - o.expectCallbacks(); - - // Get the result. - stream.receive({msg: 'result', id: methodId, result: 'bla'}); - // Data unaffected. - test.equal(coll.find().count(), 1); - test.equal(coll.findOne(stubWrittenId), {_id: stubWrittenId, foo: 'bar'}); - o.expectCallbacks(); - // Callback not called, but onResultReceived is. - test.equal(callbackOutput, []); - test.equal(onResultReceivedOutput, ['bla']); - - // Reset stream. Method does NOT get resent, because its result is already - // in. Reconnect quiescence happens as soon as 'connected' is received because - // there are no pending methods or subs in need of revival. - stream.reset(); - testGotMessage(test, stream, makeConnectMessage(SESSION_ID)); - // Still holding out hope for session resumption, so nothing updated yet. - test.equal(coll.find().count(), 1); - test.equal(coll.findOne(stubWrittenId), {_id: stubWrittenId, foo: 'bar'}); - o.expectCallbacks(); - test.equal(callbackOutput, []); - - // Receive 'connected': time for reconnect quiescence! Data gets updated - // locally (ie, data is reset) and callback gets called. - stream.receive({msg: 'connected', session: SESSION_ID + 1}); - test.equal(coll.find().count(), 0); - o.expectCallbacks({removed: 1}); - test.equal(callbackOutput, ['bla']); - test.equal(onResultReceivedOutput, ['bla']); - stream.receive({msg: 'added', collection: collName, - id: MongoID.idStringify(stubWrittenId), fields: {baz: 42}}); - test.equal(coll.findOne(stubWrittenId), {_id: stubWrittenId, baz: 42}); - o.expectCallbacks({added: 1}); - - - - - // Run method again. We're going to do the same thing this time, except we're - // also going to use an onReconnect to insert another method at reconnect - // time, which will delay reconnect quiescence. - conn.apply('writeSomething', [], - {onResultReceived: function (err, result) { - onResultReceivedOutput.push(result); - }}, - function (err, result) { - callbackOutput.push(result); - }); - // Stub write is visible. - test.equal(coll.find({foo: 'bar'}).count(), 1); - var stubWrittenId2 = coll.findOne({foo: 'bar'})._id; - o.expectCallbacks({added: 1}); - // Callback not called. - test.equal(callbackOutput, ['bla']); - test.equal(onResultReceivedOutput, ['bla']); - // Method sent. - var methodId2 = testGotMessage( - test, stream, {msg: 'method', method: 'writeSomething', - params: [], id: '*', randomSeed: '*'}).id; - test.equal(stream.sent.length, 0); - - // Get some data. - stream.receive({msg: 'added', collection: collName, - id: MongoID.idStringify(stubWrittenId2), fields: {baz: 42}}); - // It doesn't show up yet. - test.equal(coll.find().count(), 2); - test.equal(coll.findOne(stubWrittenId2), {_id: stubWrittenId2, foo: 'bar'}); - o.expectCallbacks(); - - // Get the result. - stream.receive({msg: 'result', id: methodId2, result: 'blab'}); - // Data unaffected. - test.equal(coll.find().count(), 2); - test.equal(coll.findOne(stubWrittenId2), {_id: stubWrittenId2, foo: 'bar'}); - o.expectCallbacks(); - // Callback not called, but onResultReceived is. - test.equal(callbackOutput, ['bla']); - test.equal(onResultReceivedOutput, ['bla', 'blab']); - setOnReconnect(conn, function () { - conn.call('slowMethod', function (err, result) { - callbackOutput.push(result); - }); - }); - - // Reset stream. Method does NOT get resent, because its result is already in, - // but slowMethod gets called via onReconnect. Reconnect quiescence is now - // blocking on slowMethod. - stream.reset(); - testGotMessage(test, stream, makeConnectMessage(SESSION_ID + 1)); - var slowMethodId = testGotMessage( - test, stream, - {msg: 'method', method: 'slowMethod', params: [], id: '*'}).id; - // Still holding out hope for session resumption, so nothing updated yet. - test.equal(coll.find().count(), 2); - test.equal(coll.findOne(stubWrittenId2), {_id: stubWrittenId2, foo: 'bar'}); - o.expectCallbacks(); - test.equal(callbackOutput, ['bla']); - - // Receive 'connected'... but no reconnect quiescence yet due to slowMethod. - stream.receive({msg: 'connected', session: SESSION_ID + 2}); - test.equal(coll.find().count(), 2); - test.equal(coll.findOne(stubWrittenId2), {_id: stubWrittenId2, foo: 'bar'}); - o.expectCallbacks(); - test.equal(callbackOutput, ['bla']); - - // Receive data matching our stub. It doesn't take effect yet. - stream.receive({msg: 'added', collection: collName, - id: MongoID.idStringify(stubWrittenId2), fields: {foo: 'bar'}}); - o.expectCallbacks(); - - // slowMethod is done writing, so we get full reconnect quiescence (but no - // slowMethod callback)... ie, a reset followed by applying the data we just - // got, as well as calling the callback from the method that half-finished - // before reset. The net effect is deleting doc 'stubWrittenId'. - stream.receive({msg: 'updated', methods: [slowMethodId]}); - test.equal(coll.find().count(), 1); - test.equal(coll.findOne(stubWrittenId2), {_id: stubWrittenId2, foo: 'bar'}); - o.expectCallbacks({removed: 1}); - test.equal(callbackOutput, ['bla', 'blab']); - - // slowMethod returns a value now. - stream.receive({msg: 'result', id: slowMethodId, result: 'slow'}); - o.expectCallbacks(); - test.equal(callbackOutput, ['bla', 'blab', 'slow']); - - o.stop(); - }); -} -Tinytest.add("livedata stub - reconnect method which only got data", function (test) { - var stream = new StubStream; - var conn = newConnection(stream); - startAndConnect(test, stream); - - var collName = Random.id(); - var coll = new Mongo.Collection(collName, {connection: conn}); - var o = observeCursor(test, coll.find()); - - // Call a method. We'll get the data-done message but not the result before - // reconnect. - var callbackOutput = []; - var onResultReceivedOutput = []; - conn.apply('doLittle', [], - {onResultReceived: function (err, result) { - onResultReceivedOutput.push(result); - }}, - function (err, result) { - callbackOutput.push(result); - }); - // Callbacks not called. - test.equal(callbackOutput, []); - test.equal(onResultReceivedOutput, []); - // Method sent. - var methodId = testGotMessage( - test, stream, {msg: 'method', method: 'doLittle', - params: [], id: '*'}).id; - test.equal(stream.sent.length, 0); - - // Get some data. - stream.receive({msg: 'added', collection: collName, - id: 'photo', fields: {baz: 42}}); - // It shows up instantly because the stub didn't write anything. - test.equal(coll.find().count(), 1); - test.equal(coll.findOne('photo'), {_id: 'photo', baz: 42}); - o.expectCallbacks({added: 1}); - - // Get the data-done message. - stream.receive({msg: 'updated', methods: [methodId]}); - // Data still here. - test.equal(coll.find().count(), 1); - test.equal(coll.findOne('photo'), {_id: 'photo', baz: 42}); - o.expectCallbacks(); - // Method callback not called yet (no result yet). - test.equal(callbackOutput, []); - test.equal(onResultReceivedOutput, []); - - // Reset stream. Method gets resent (with same ID), and blocks reconnect - // quiescence. - stream.reset(); - testGotMessage(test, stream, makeConnectMessage(SESSION_ID)); - testGotMessage( - test, stream, {msg: 'method', method: 'doLittle', - params: [], id: methodId}); - // Still holding out hope for session resumption, so nothing updated yet. - test.equal(coll.find().count(), 1); - test.equal(coll.findOne('photo'), {_id: 'photo', baz: 42}); - o.expectCallbacks(); - test.equal(callbackOutput, []); - test.equal(onResultReceivedOutput, []); - - // Receive 'connected'. Still blocking on reconnect quiescence. - stream.receive({msg: 'connected', session: SESSION_ID + 1}); - test.equal(coll.find().count(), 1); - test.equal(coll.findOne('photo'), {_id: 'photo', baz: 42}); - o.expectCallbacks(); - test.equal(callbackOutput, []); - test.equal(onResultReceivedOutput, []); - - // Receive method result. onResultReceived is called but the main callback - // isn't (ie, we don't get confused by the fact that we got data-done the - // *FIRST* time through). - stream.receive({msg: 'result', id: methodId, result: 'res'}); - test.equal(callbackOutput, []); - test.equal(onResultReceivedOutput, ['res']); - - // Now we get data-done. Collection is reset and callback is called. - stream.receive({msg: 'updated', methods: [methodId]}); - test.equal(coll.find().count(), 0); - o.expectCallbacks({removed: 1}); - test.equal(callbackOutput, ['res']); - test.equal(onResultReceivedOutput, ['res']); - - o.stop(); -}); -if (Meteor.isClient) { - Tinytest.add("livedata stub - multiple stubs same doc", function (test) { - var stream = new StubStream; - var conn = newConnection(stream); - startAndConnect(test, stream); - - var collName = Random.id(); - var coll = new Mongo.Collection(collName, {connection: conn}); - var o = observeCursor(test, coll.find()); - - conn.methods({ - insertSomething: function () { - // stub write - coll.insert({foo: 'bar'}); - }, - updateIt: function (id) { - coll.update(id, {$set: {baz: 42}}); - } - }); - - test.equal(coll.find().count(), 0); - - // Call the insert method. - conn.call('insertSomething', _.identity); - // Stub write is visible. - test.equal(coll.find({foo: 'bar'}).count(), 1); - var stubWrittenId = coll.findOne({foo: 'bar'})._id; - o.expectCallbacks({added: 1}); - // Method sent. - var insertMethodId = testGotMessage( - test, stream, {msg: 'method', method: 'insertSomething', - params: [], id: '*', randomSeed: '*'}).id; - test.equal(stream.sent.length, 0); - - // Call update method. - conn.call('updateIt', stubWrittenId, _.identity); - // This stub write is visible too. - test.equal(coll.find().count(), 1); - test.equal(coll.findOne(stubWrittenId), - {_id: stubWrittenId, foo: 'bar', baz: 42}); - o.expectCallbacks({changed: 1}); - // Method sent. - var updateMethodId = testGotMessage( - test, stream, {msg: 'method', method: 'updateIt', - params: [stubWrittenId], id: '*'}).id; - test.equal(stream.sent.length, 0); - - // Get some data... slightly different than what we wrote. - stream.receive({msg: 'added', collection: collName, - id: MongoID.idStringify(stubWrittenId), fields: {foo: 'barb', other: 'field', - other2: 'bla'}}); - // It doesn't show up yet. - test.equal(coll.find().count(), 1); - test.equal(coll.findOne(stubWrittenId), - {_id: stubWrittenId, foo: 'bar', baz: 42}); - o.expectCallbacks(); - - // And get the first method-done. Still no updates to minimongo: we can't - // quiesce the doc until the second method is done. - stream.receive({msg: 'updated', methods: [insertMethodId]}); - test.equal(coll.find().count(), 1); - test.equal(coll.findOne(stubWrittenId), - {_id: stubWrittenId, foo: 'bar', baz: 42}); - o.expectCallbacks(); - - // More data. Not quite what we wrote. Also ignored for now. - stream.receive({msg: 'changed', collection: collName, - id: MongoID.idStringify(stubWrittenId), fields: {baz: 43}, cleared: ['other']}); - test.equal(coll.find().count(), 1); - test.equal(coll.findOne(stubWrittenId), - {_id: stubWrittenId, foo: 'bar', baz: 42}); - o.expectCallbacks(); - - // Second data-ready. Now everything takes effect! - stream.receive({msg: 'updated', methods: [updateMethodId]}); - test.equal(coll.find().count(), 1); - test.equal(coll.findOne(stubWrittenId), - {_id: stubWrittenId, foo: 'barb', other2: 'bla', - baz: 43}); - o.expectCallbacks({changed: 1}); - - o.stop(); - }); -} - -if (Meteor.isClient) { - Tinytest.add("livedata stub - unsent methods don't block quiescence", function (test) { - // This test is for https://github.com/meteor/meteor/issues/555 - - var stream = new StubStream; - var conn = newConnection(stream); - startAndConnect(test, stream); - - var collName = Random.id(); - var coll = new Mongo.Collection(collName, {connection: conn}); - - conn.methods({ - insertSomething: function () { - // stub write - coll.insert({foo: 'bar'}); - } - }); - - test.equal(coll.find().count(), 0); - - // Call a random method (no-op) - conn.call('no-op', _.identity); - // Call a wait method - conn.apply('no-op', [], {wait: true}, _.identity); - // Call a method with a stub that writes. - conn.call('insertSomething', _.identity); - - - // Stub write is visible. - test.equal(coll.find({foo: 'bar'}).count(), 1); - var stubWrittenId = coll.findOne({foo: 'bar'})._id; - - // first method sent - var firstMethodId = testGotMessage( - test, stream, {msg: 'method', method: 'no-op', - params: [], id: '*'}).id; - test.equal(stream.sent.length, 0); - - // ack the first method - stream.receive({msg: 'updated', methods: [firstMethodId]}); - stream.receive({msg: 'result', id: firstMethodId}); - - // Wait method sent. - var waitMethodId = testGotMessage( - test, stream, {msg: 'method', method: 'no-op', - params: [], id: '*'}).id; - test.equal(stream.sent.length, 0); - - // ack the wait method - stream.receive({msg: 'updated', methods: [waitMethodId]}); - stream.receive({msg: 'result', id: waitMethodId}); - - // insert method sent. - var insertMethodId = testGotMessage( - test, stream, {msg: 'method', method: 'insertSomething', - params: [], id: '*', randomSeed: '*'}).id; - test.equal(stream.sent.length, 0); - - // ack the insert method - stream.receive({msg: 'updated', methods: [insertMethodId]}); - stream.receive({msg: 'result', id: insertMethodId}); - - // simulation reverted. - test.equal(coll.find({foo: 'bar'}).count(), 0); - - }); -} -Tinytest.add("livedata stub - reactive resub", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - startAndConnect(test, stream); - - var readiedSubs = {}; - var markAllReady = function () { - // synthesize a "ready" message in response to any "sub" - // message with an id we haven't seen before - _.each(stream.sent, function (msg) { - msg = JSON.parse(msg); - if (msg.msg === 'sub' && ! _.has(readiedSubs, msg.id)) { - stream.receive({msg: 'ready', subs: [msg.id]}); - readiedSubs[msg.id] = true; - } - }); - }; - - var fooArg = new ReactiveVar('A'); - var fooReady = 0; - - var inner; - var outer = Tracker.autorun(function () { - inner = Tracker.autorun(function () { - conn.subscribe("foo-sub", fooArg.get(), - function () { fooReady++; }); - }); - }); - - markAllReady(); - var message = JSON.parse(stream.sent.shift()); - delete message.id; - test.equal(message, {msg: 'sub', name: 'foo-sub', params: ['A']}); - test.equal(fooReady, 1); - - // Rerun the inner autorun with different subscription - // arguments. - fooArg.set('B'); - test.isTrue(inner.invalidated); - Tracker.flush(); - test.isFalse(inner.invalidated); - markAllReady(); - message = JSON.parse(stream.sent.shift()); - delete message.id; - test.equal(message, {msg: 'sub', name: 'foo-sub', params: ['B']}); - message = JSON.parse(stream.sent.shift()); - delete message.id; - test.equal(message, {msg: 'unsub'}); - test.equal(fooReady, 2); - - // Rerun inner again with same args; should be no re-sub. - inner.invalidate(); - test.isTrue(inner.invalidated); - Tracker.flush(); - test.isFalse(inner.invalidated); - markAllReady(); - test.isUndefined(stream.sent.shift()); test.isUndefined(stream.sent.shift()); - test.equal(fooReady, 3); - - // Rerun outer! Should still be no re-sub even though - // the inner computation is stopped and a new one is - // started. - outer.invalidate(); - test.isTrue(inner.invalidated); - Tracker.flush(); - test.isFalse(inner.invalidated); - markAllReady(); - test.isUndefined(stream.sent.shift()); - test.equal(fooReady, 4); - - // Change the subscription. Now we should get an onReady. - fooArg.set('C'); - Tracker.flush(); - markAllReady(); - message = JSON.parse(stream.sent.shift()); - delete message.id; - test.equal(message, {msg: 'sub', name: 'foo-sub', params: ['C']}); - message = JSON.parse(stream.sent.shift()); - delete message.id; - test.equal(message, {msg: 'unsub'}); - test.equal(fooReady, 5); -}); - - - -Tinytest.add("livedata connection - reactive userId", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - test.equal(conn.userId(), null); - conn.setUserId(1337); - test.equal(conn.userId(), 1337); -}); - -Tinytest.add("livedata connection - two wait methods", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - startAndConnect(test, stream); - - var collName = Random.id(); - var coll = new Mongo.Collection(collName, {connection: conn}); - - // setup method - conn.methods({do_something: function (x) {}}); - - var responses = []; - conn.apply('do_something', ['one!'], function() { responses.push('one'); }); - var one_message = JSON.parse(stream.sent.shift()); - test.equal(one_message.params, ['one!']); - - conn.apply('do_something', ['two!'], {wait: true}, function() { - responses.push('two'); - }); - // 'two!' isn't sent yet, because it's a wait method. - test.equal(stream.sent.length, 0); - - conn.apply('do_something', ['three!'], function() { - responses.push('three'); - }); - conn.apply('do_something', ['four!'], function() { - responses.push('four'); - }); - - conn.apply('do_something', ['five!'], {wait: true}, function() { - responses.push('five'); - }); - - conn.apply('do_something', ['six!'], function() { responses.push('six'); }); - - // Verify that we did not send any more methods since we are still waiting on - // 'one!'. - test.equal(stream.sent.length, 0); - - // Receive some data. "one" is not a wait method and there are no stubs, so it - // gets applied immediately. - test.equal(coll.find().count(), 0); - stream.receive({msg: 'added', collection: collName, - id: 'foo', fields: {x: 1}}); - test.equal(coll.find().count(), 1); - test.equal(coll.findOne('foo'), {_id: 'foo', x: 1}); - - // Let "one!" finish. Both messages are required to fire the callback. - stream.receive({msg: 'result', id: one_message.id}); - test.equal(responses, []); - stream.receive({msg: 'updated', methods: [one_message.id]}); - test.equal(responses, ['one']); - - // Now we've send out "two!". - var two_message = JSON.parse(stream.sent.shift()); - test.equal(two_message.params, ['two!']); - - // But still haven't sent "three!". - test.equal(stream.sent.length, 0); - - // Receive more data. "two" is a wait method, so the data doesn't get applied - // yet. - stream.receive({msg: 'changed', collection: collName, - id: 'foo', fields: {y: 3}}); - test.equal(coll.find().count(), 1); - test.equal(coll.findOne('foo'), {_id: 'foo', x: 1}); - - // Let "two!" finish, with its end messages in the opposite order to "one!". - stream.receive({msg: 'updated', methods: [two_message.id]}); - test.equal(responses, ['one']); - test.equal(stream.sent.length, 0); - // data-done message is enough to allow data to be written. - test.equal(coll.find().count(), 1); - test.equal(coll.findOne('foo'), {_id: 'foo', x: 1, y: 3}); - stream.receive({msg: 'result', id: two_message.id}); - test.equal(responses, ['one', 'two']); - - // Verify that we just sent "three!" and "four!" now that we got - // responses for "one!" and "two!" - test.equal(stream.sent.length, 2); - var three_message = JSON.parse(stream.sent.shift()); - test.equal(three_message.params, ['three!']); - var four_message = JSON.parse(stream.sent.shift()); - test.equal(four_message.params, ['four!']); - - // Out of order response is OK for non-wait methods. - stream.receive({msg: 'result', id: three_message.id}); - stream.receive({msg: 'result', id: four_message.id}); - stream.receive({msg: 'updated', methods: [four_message.id]}); - test.equal(responses, ['one', 'two', 'four']); - test.equal(stream.sent.length, 0); - - // Let three finish too. - stream.receive({msg: 'updated', methods: [three_message.id]}); - test.equal(responses, ['one', 'two', 'four', 'three']); - - // Verify that we just sent "five!" (the next wait method). - test.equal(stream.sent.length, 1); - var five_message = JSON.parse(stream.sent.shift()); - test.equal(five_message.params, ['five!']); - test.equal(responses, ['one', 'two', 'four', 'three']); - - // Let five finish. - stream.receive({msg: 'result', id: five_message.id}); - stream.receive({msg: 'updated', methods: [five_message.id]}); - test.equal(responses, ['one', 'two', 'four', 'three', 'five']); - - var six_message = JSON.parse(stream.sent.shift()); - test.equal(six_message.params, ['six!']); -}); - -addReconnectTests("livedata connection - onReconnect prepends messages correctly with a wait method", function(test, setOnReconnect) { - var stream = new StubStream(); - var conn = newConnection(stream); - startAndConnect(test, stream); - - // setup method - conn.methods({do_something: function (x) {}}); - - setOnReconnect(conn, function() { - conn.apply('do_something', ['reconnect zero'], _.identity); - conn.apply('do_something', ['reconnect one'], _.identity); - conn.apply('do_something', ['reconnect two'], {wait: true}, _.identity); - conn.apply('do_something', ['reconnect three'], _.identity); - }); - - conn.apply('do_something', ['one'], _.identity); - conn.apply('do_something', ['two'], {wait: true}, _.identity); - conn.apply('do_something', ['three'], _.identity); - - // reconnect - stream.sent = []; - stream.reset(); - testGotMessage(test, stream, makeConnectMessage(conn._lastSessionId)); - - // Test that we sent what we expect to send, and we're blocked on - // what we expect to be blocked. The subsequent logic to correctly - // read the wait flag is tested separately. - test.equal(_.map(stream.sent, function(msg) { - return JSON.parse(msg).params[0]; - }), ['reconnect zero', 'reconnect one']); - - // white-box test: - test.equal(_.map(conn._outstandingMethodBlocks, function (block) { - return [block.wait, _.map(block.methods, function (method) { - return method._message.params[0]; - })]; - }), [ - [false, ['reconnect zero', 'reconnect one']], - [true, ['reconnect two']], - [false, ['reconnect three', 'one']], - [true, ['two']], - [false, ['three']] - ]); -}); - -Tinytest.add("livedata connection - ping without id", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - startAndConnect(test, stream); - - stream.receive({msg: 'ping'}); - testGotMessage(test, stream, {msg: 'pong'}); -}); - -Tinytest.add("livedata connection - ping with id", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - startAndConnect(test, stream); - - var id = Random.id(); - stream.receive({msg: 'ping', id: id}); - testGotMessage(test, stream, {msg: 'pong', id: id}); -}); - -_.each(LivedataTest.SUPPORTED_DDP_VERSIONS, function (version) { - Tinytest.addAsync("livedata connection - ping from " + version, - function (test, onComplete) { - var connection = new LivedataTest.Connection(getSelfConnectionUrl(), { - reloadWithOutstanding: true, - supportedDDPVersions: [version], - onDDPVersionNegotiationFailure: function () { test.fail(); onComplete(); }, - onConnected: function () { - test.equal(connection._version, version); - // It's a little naughty to access _stream and _send, but it works... - connection._stream.on('message', function (json) { - var msg = JSON.parse(json); - var done = false; - if (msg.msg === 'pong') { - test.notEqual(version, "pre1"); - done = true; - } else if (msg.msg === 'error') { - // Version pre1 does not play ping-pong - test.equal(version, "pre1"); - done = true; - } else { - Meteor._debug("Got unexpected message: " + json); - } - if (done) { - connection._stream.disconnect({_permanent: true}); - onComplete(); - } - }); - connection._send({msg: 'ping'}); - } - }); - }); -}); - -var getSelfConnectionUrl = function () { - if (Meteor.isClient) { - var ddpUrl = Meteor._relativeToSiteRootUrl("/"); - if (typeof __meteor_runtime_config__ !== "undefined") { - if (__meteor_runtime_config__.DDP_DEFAULT_CONNECTION_URL) - ddpUrl = __meteor_runtime_config__.DDP_DEFAULT_CONNECTION_URL; - } - return ddpUrl; - } else { - return Meteor.absoluteUrl(); - } -}; - -if (Meteor.isServer) { - Meteor.methods({ - reverse: function (arg) { - // Return something notably different from reverse.meteor.com. - return arg.split("").reverse().join("") + " LOCAL"; - } - }); -} - -testAsyncMulti("livedata connection - reconnect to a different server", [ - function (test, expect) { - var self = this; - self.conn = DDP.connect("reverse.meteor.com"); - pollUntil(expect, function () { - return self.conn.status().connected; - }, 5000, 100, true); // poll until connected, but don't fail if we don't connect - }, - function (test, expect) { - var self = this; - self.doTest = self.conn.status().connected; - if (self.doTest) { - self.conn.call("reverse", "foo", expect(function (err, res) { - test.equal(res, "oof"); - })); - } - }, - function (test, expect) { - var self = this; - if (self.doTest) { - self.conn.reconnect({url: getSelfConnectionUrl()}); - self.conn.call("reverse", "bar", expect(function (err, res) { - test.equal(res, "rab LOCAL"); - })); - } - } -]); - -Tinytest.addAsync("livedata connection - version negotiation requires renegotiating", - function (test, onComplete) { - var connection = new LivedataTest.Connection(getSelfConnectionUrl(), { - reloadWithOutstanding: true, - supportedDDPVersions: ["garbled", LivedataTest.SUPPORTED_DDP_VERSIONS[0]], - onDDPVersionNegotiationFailure: function () { test.fail(); onComplete(); }, - onConnected: function () { - test.equal(connection._version, LivedataTest.SUPPORTED_DDP_VERSIONS[0]); - connection._stream.disconnect({_permanent: true}); - onComplete(); - } - }); -}); - -Tinytest.addAsync("livedata connection - version negotiation error", - function (test, onComplete) { - var connection = new LivedataTest.Connection(getSelfConnectionUrl(), { - reloadWithOutstanding: true, - supportedDDPVersions: ["garbled", "more garbled"], - onDDPVersionNegotiationFailure: function () { - test.equal(connection.status().status, "failed"); - test.matches(connection.status().reason, /DDP version negotiation failed/); - test.isFalse(connection.status().connected); - onComplete(); - }, - onConnected: function () { - test.fail(); - onComplete(); - } - }); -}); - -addReconnectTests("livedata connection - onReconnect prepends messages correctly without a wait method", function(test, setOnReconnect) { - var stream = new StubStream(); - var conn = newConnection(stream); - startAndConnect(test, stream); - - // setup method - conn.methods({do_something: function (x) {}}); - - setOnReconnect(conn, function() { - conn.apply('do_something', ['reconnect one'], _.identity); - conn.apply('do_something', ['reconnect two'], _.identity); - conn.apply('do_something', ['reconnect three'], _.identity); - }); - - conn.apply('do_something', ['one'], _.identity); - conn.apply('do_something', ['two'], {wait: true}, _.identity); - conn.apply('do_something', ['three'], {wait: true}, _.identity); - conn.apply('do_something', ['four'], _.identity); - - // reconnect - stream.sent = []; - stream.reset(); - testGotMessage(test, stream, makeConnectMessage(conn._lastSessionId)); - - // Test that we sent what we expect to send, and we're blocked on - // what we expect to be blocked. The subsequent logic to correctly - // read the wait flag is tested separately. - test.equal(_.map(stream.sent, function(msg) { - return JSON.parse(msg).params[0]; - }), ['reconnect one', 'reconnect two', 'reconnect three', 'one']); - - // white-box test: - test.equal(_.map(conn._outstandingMethodBlocks, function (block) { - return [block.wait, _.map(block.methods, function (method) { - return method._message.params[0]; - })]; - }), [ - [false, ['reconnect one', 'reconnect two', 'reconnect three', 'one']], - [true, ['two']], - [true, ['three']], - [false, ['four']] - ]); -}); - -addReconnectTests("livedata connection - onReconnect with sent messages", function(test, setOnReconnect) { - var stream = new StubStream(); - var conn = newConnection(stream); - startAndConnect(test, stream); - - // setup method - conn.methods({do_something: function (x) {}}); - - setOnReconnect(conn, function() { - conn.apply('do_something', ['login'], {wait: true}, _.identity); - }); - - conn.apply('do_something', ['one'], _.identity); - - // initial connect - stream.sent = []; - stream.reset(); - testGotMessage( - test, stream, makeConnectMessage(conn._lastSessionId)); - - // Test that we sent just the login message. - var loginId = testGotMessage( - test, stream, {msg: 'method', method: 'do_something', - params: ['login'], id: '*'}).id; - - // we connect. - stream.receive({msg: 'connected', session: Random.id()}); - test.length(stream.sent, 0); - - // login got result (but not yet data) - stream.receive({msg: 'result', id: loginId, result: 'foo'}); - test.length(stream.sent, 0); - - // login got data. now we send next method. - stream.receive({msg: 'updated', methods: [loginId]}); - - testGotMessage( - test, stream, {msg: 'method', method: 'do_something', - params: ['one'], id: '*'}).id; -}); - - - -addReconnectTests("livedata stub - reconnect double wait method", function (test, setOnReconnect) { - var stream = new StubStream; - var conn = newConnection(stream); - startAndConnect(test, stream); - - var output = []; - setOnReconnect(conn, function () { - conn.apply('reconnectMethod', [], {wait: true}, function (err, result) { - output.push('reconnect'); - }); - }); - - conn.apply('halfwayMethod', [], {wait: true}, function (err, result) { - output.push('halfway'); - }); - - test.equal(output, []); - // Method sent. - var halfwayId = testGotMessage( - test, stream, {msg: 'method', method: 'halfwayMethod', - params: [], id: '*'}).id; - test.equal(stream.sent.length, 0); - - // Get the result. This means it will not be resent. - stream.receive({msg: 'result', id: halfwayId, result: 'bla'}); - // Callback not called. - test.equal(output, []); - - // Reset stream. halfwayMethod does NOT get resent, but reconnectMethod does! - // Reconnect quiescence happens when reconnectMethod is done. - stream.reset(); - testGotMessage(test, stream, makeConnectMessage(SESSION_ID)); - var reconnectId = testGotMessage( - test, stream, {msg: 'method', method: 'reconnectMethod', - params: [], id: '*'}).id; - test.length(stream.sent, 0); - // Still holding out hope for session resumption, so no callbacks yet. - test.equal(output, []); - - // Receive 'connected', but reconnect quiescence is blocking on - // reconnectMethod. - stream.receive({msg: 'connected', session: SESSION_ID + 1}); - test.equal(output, []); - - // Data-done for reconnectMethod. This gets us to reconnect quiescence, so - // halfwayMethod's callback fires. reconnectMethod's is still waiting on its - // result. - stream.receive({msg: 'updated', methods: [reconnectId]}); - test.equal(output.shift(), 'halfway'); - test.equal(output, []); - - // Get result of reconnectMethod. Its callback fires. - stream.receive({msg: 'result', id: reconnectId, result: 'foo'}); - test.equal(output.shift(), 'reconnect'); - test.equal(output, []); - - // Call another method. It should be delivered immediately. This is a - // regression test for a case where it never got delivered because there was - // an empty block in _outstandingMethodBlocks blocking it from being sent. - conn.call('lastMethod', _.identity); - testGotMessage(test, stream, - {msg: 'method', method: 'lastMethod', params: [], id: '*'}); -}); - -Tinytest.add("livedata stub - subscribe errors", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - startAndConnect(test, stream); - - // subscribe - var onReadyFired = false; - var subErrorInStopped = null; - var subErrorInError = null; - - conn.subscribe('unknownSub', { - onReady: function () { - onReadyFired = true; - }, - - // We now have two ways to get the error from a subscription: - // 1. onStop, which is called no matter what when the subscription is - // stopped (a lifecycle callback) - // 2. onError, which is deprecated and is called only if there is an - // error - onStop: function (error) { - subErrorInStopped = error; - }, - onError: function (error) { - subErrorInError = error; - } - }); - - test.isFalse(onReadyFired); - test.equal(subErrorInStopped, null); - - // XXX COMPAT WITH 1.0.3.1 #errorCallback - test.equal(subErrorInError, null); - - var subMessage = JSON.parse(stream.sent.shift()); - test.equal(subMessage, {msg: 'sub', name: 'unknownSub', params: [], - id: subMessage.id}); - - // Reject the sub. - stream.receive({msg: 'nosub', id: subMessage.id, - error: new Meteor.Error(404, "Subscription not found")}); - test.isFalse(onReadyFired); - - // Check the error passed to the stopped callback was correct - test.instanceOf(subErrorInStopped, Meteor.Error); - test.equal(subErrorInStopped.error, 404); - test.equal(subErrorInStopped.reason, "Subscription not found"); - - // Check the error passed to the error callback was correct - // XXX COMPAT WITH 1.0.3.1 #errorCallback - test.instanceOf(subErrorInError, Meteor.Error); - test.equal(subErrorInError.error, 404); - test.equal(subErrorInError.reason, "Subscription not found"); - - // stream reset: reconnect! - stream.reset(); - // We send a connect. - testGotMessage(test, stream, makeConnectMessage(SESSION_ID)); - // We should NOT re-sub to the sub, because we processed the error. - test.length(stream.sent, 0); - test.isFalse(onReadyFired); -}); - -Tinytest.add("livedata stub - subscribe stop", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - startAndConnect(test, stream); - - // subscribe - var onReadyFired = false; - var onStopFired = false; - var subErrorInStopped = null; - - var sub = conn.subscribe('my_data', { - onStop: function (error) { - onStopFired = true; - subErrorInStopped = error; - } - }); - - test.equal(subErrorInStopped, null); - - sub.stop(); - - test.isTrue(onStopFired); - test.equal(subErrorInStopped, undefined); -}); - -if (Meteor.isClient) { - Tinytest.add("livedata stub - stubs before connected", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - var collName = Random.id(); - var coll = new Mongo.Collection(collName, {connection: conn}); - - // Start and send "connect", but DON'T get 'connected' quite yet. - stream.reset(); // initial connection start. - - testGotMessage(test, stream, makeConnectMessage()); - test.length(stream.sent, 0); - - // Insert a document. The stub updates "conn" directly. - coll.insert({_id: "foo", bar: 42}, _.identity); - test.equal(coll.find().count(), 1); - test.equal(coll.findOne(), {_id: "foo", bar: 42}); - // It also sends the method message. - var methodMessage = JSON.parse(stream.sent.shift()); - test.isUndefined(methodMessage.randomSeed); - test.equal(methodMessage, {msg: 'method', method: '/' + collName + '/insert', - params: [{_id: "foo", bar: 42}], - id: methodMessage.id}); - test.length(stream.sent, 0); - - // Now receive a connected message. This should not clear the - // _documentsWrittenByStub state! - stream.receive({msg: 'connected', session: SESSION_ID}); - test.length(stream.sent, 0); - test.equal(coll.find().count(), 1); - - // Now receive the "updated" message for the method. This should revert the - // insert. - stream.receive({msg: 'updated', methods: [methodMessage.id]}); - test.length(stream.sent, 0); - test.equal(coll.find().count(), 0); - }); -} - -if (Meteor.isClient) { - Tinytest.add("livedata stub - method call between reset and quiescence", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream); - - startAndConnect(test, stream); - - var collName = Random.id(); - var coll = new Mongo.Collection(collName, {connection: conn}); - - conn.methods({ - update_value: function () { - coll.update('aaa', {value: 222}); - } - }); - - // Set up test subscription. - var sub = conn.subscribe('test_data'); - var subMessage = JSON.parse(stream.sent.shift()); - test.equal(subMessage, {msg: 'sub', name: 'test_data', - params: [], id:subMessage.id}); - test.length(stream.sent, 0); - - var subDocMessage = {msg: 'added', collection: collName, - id: 'aaa', fields: {value: 111}}; - - var subReadyMessage = {msg: 'ready', 'subs': [subMessage.id]}; - - stream.receive(subDocMessage); - stream.receive(subReadyMessage); - test.isTrue(coll.findOne('aaa').value == 111); - - // Initiate reconnect. - stream.reset(); - testGotMessage(test, stream, makeConnectMessage(SESSION_ID)); - testGotMessage(test, stream, subMessage); - stream.receive({msg: 'connected', session: SESSION_ID + 1}); - - // Now in reconnect, can still see the document. - test.isTrue(coll.findOne('aaa').value == 111); - - conn.call('update_value'); - - // Observe the stub-written value. - test.isTrue(coll.findOne('aaa').value == 222); - - var methodMessage = JSON.parse(stream.sent.shift()); - test.equal(methodMessage, {msg: 'method', method: 'update_value', - params: [], id:methodMessage.id}); - test.length(stream.sent, 0); - - stream.receive(subDocMessage); - stream.receive(subReadyMessage); - - // By this point quiescence is reached and stores have been reset. - - // The stub-written value is still there. - test.isTrue(coll.findOne('aaa').value == 222); - - stream.receive({msg: 'changed', collection: collName, - id: 'aaa', fields: {value: 333}}); - stream.receive({msg: 'updated', 'methods': [methodMessage.id]}); - stream.receive({msg: 'result', id:methodMessage.id, result:null}); - - // Server wrote a different value, make sure it's visible now. - test.isTrue(coll.findOne('aaa').value == 333); - }); - - Tinytest.add("livedata stub - buffering and methods interaction", function (test) { - var stream = new StubStream(); - var conn = newConnection(stream, { - // A very high values so that all messages are effectively buffered. - bufferedWritesInterval: 10000, - bufferedWritesMaxAge: 10000 - }); - - startAndConnect(test, stream); - - var collName = Random.id(); - var coll = new Mongo.Collection(collName, {connection: conn}); - - conn.methods({ - update_value: function () { - const value = coll.findOne('aaa').subscription; - // Method should have access to the latest value of the collection. - coll.update('aaa', {$set: {method: value + 110}}); - } - }); - - // Set up test subscription. - var sub = conn.subscribe('test_data'); - var subMessage = JSON.parse(stream.sent.shift()); - test.equal(subMessage, {msg: 'sub', name: 'test_data', - params: [], id:subMessage.id}); - test.length(stream.sent, 0); - - var subDocMessage = {msg: 'added', collection: collName, - id: 'aaa', fields: {subscription: 111}}; - - var subReadyMessage = {msg: 'ready', 'subs': [subMessage.id]}; - - stream.receive(subDocMessage); - stream.receive(subReadyMessage); - test.equal(coll.findOne('aaa').subscription, 111); - - var subDocChangeMessage = {msg: 'changed', collection: collName, - id: 'aaa', fields: {subscription: 112}}; - - stream.receive(subDocChangeMessage); - // Still 111 because buffer has not been flushed. - test.equal(coll.findOne('aaa').subscription, 111); - - // Call updates the stub. - conn.call('update_value'); - - // Observe the stub-written value. - test.equal(coll.findOne('aaa').method, 222); - // subscription field is updated to the latest value - // because of the method call. - test.equal(coll.findOne('aaa').subscription, 112); - - var methodMessage = JSON.parse(stream.sent.shift()); - test.equal(methodMessage, {msg: 'method', method: 'update_value', - params: [], id:methodMessage.id}); - test.length(stream.sent, 0); - - // "Server-side" change from the method arrives and method returns. - // With potentially fixed value for method field, if stub didn't - // use 112 as the subscription field value. - stream.receive({msg: 'changed', collection: collName, - id: 'aaa', fields: {method: 222}}); - stream.receive({msg: 'updated', 'methods': [methodMessage.id]}); - stream.receive({msg: 'result', id:methodMessage.id, result:null}); - - test.equal(coll.findOne('aaa').method, 222); - test.equal(coll.findOne('aaa').subscription, 112); - - // Buffer should already be flushed because of a non-update message. - // And after a flush we really want subscription field to be 112. - conn._flushBufferedWrites(); - test.equal(coll.findOne('aaa').method, 222); - test.equal(coll.findOne('aaa').subscription, 112); - }); -} - -// XXX also test: -// - reconnect, with session resume. -// - restart on update flag -// - on_update event -// - reloading when the app changes, including session migration diff --git a/packages/ddp-client/livedata_tests.js b/packages/ddp-client/livedata_tests.js deleted file mode 100644 index a7befa817e..0000000000 --- a/packages/ddp-client/livedata_tests.js +++ /dev/null @@ -1,874 +0,0 @@ -import { DDP, LivedataTest } from "./namespace.js"; - -// XXX should check error codes -var failure = function (test, code, reason) { - return function (error, result) { - test.equal(result, undefined); - test.isTrue(error && typeof error === "object"); - if (error && typeof error === "object") { - if (typeof code === "number") { - test.instanceOf(error, Meteor.Error); - code && test.equal(error.error, code); - reason && test.equal(error.reason, reason); - // XXX should check that other keys aren't present.. should - // probably use something like the Matcher we used to have - } else { - // for normal Javascript errors - test.instanceOf(error, Error); - test.equal(error.message, code); - } - } - }; -}; - -var failureOnStopped = function (test, code, reason) { - var f = failure(test, code, reason); - - return function (error) { - if (error) { - f(error); - } - } -}; - -Tinytest.add("livedata - Meteor.Error", function (test) { - var error = new Meteor.Error(123, "kittens", "puppies"); - test.instanceOf(error, Meteor.Error); - test.instanceOf(error, Error); - test.equal(error.error, 123); - test.equal(error.reason, "kittens"); - test.equal(error.details, "puppies"); -}); - -if (Meteor.isServer) { - Tinytest.add("livedata - version negotiation", function (test) { - var versionCheck = function (clientVersions, serverVersions, expected) { - test.equal( - DDPServer._calculateVersion(clientVersions, serverVersions), - expected); - }; - - versionCheck(["A", "B", "C"], ["A", "B", "C"], "A"); - versionCheck(["B", "C"], ["A", "B", "C"], "B"); - versionCheck(["A", "B", "C"], ["B", "C"], "B"); - versionCheck(["foo", "bar", "baz"], ["A", "B", "C"], "A"); - }); -} - -Tinytest.add("livedata - methods with colliding names", function (test) { - var x = Random.id(); - var m = {}; - m[x] = function () {}; - Meteor.methods(m); - - test.throws(function () { - Meteor.methods(m); - }); -}); - -Tinytest.add("livedata - non-function method", function (test) { - var x = Random.id(); - var m = {}; - m[x] = 'kitten'; - - test.throws(function () { - Meteor.methods(m); - }); -}); - -var echoTest = function (item) { - return function (test, expect) { - if (Meteor.isServer) { - test.equal(Meteor.call("echo", item), [item]); - test.equal(Meteor.call("echoOne", item), item); - } - if (Meteor.isClient) - test.equal(Meteor.call("echo", item), undefined); - - test.equal(Meteor.call("echo", item, expect(undefined, [item])), undefined); - test.equal(Meteor.call("echoOne", item, expect(undefined, item)), undefined); - }; -}; - -testAsyncMulti("livedata - basic method invocation", [ - // Unknown methods - function (test, expect) { - if (Meteor.isServer) { - // On server, with no callback, throws exception - try { - var ret = Meteor.call("unknown method"); - } catch (e) { - test.equal(e.error, 404); - var threw = true; - } - test.isTrue(threw); - test.equal(ret, undefined); - } - - if (Meteor.isClient) { - // On client, with no callback, just returns undefined - var ret = Meteor.call("unknown method"); - test.equal(ret, undefined); - } - - // On either, with a callback, calls the callback and does not throw - var ret = Meteor.call("unknown method", - expect(failure(test, 404, "Method 'unknown method' not found"))); - test.equal(ret, undefined); - }, - - function (test, expect) { - // make sure 'undefined' is preserved as such, instead of turning - // into null (JSON does not have 'undefined' so there is special - // code for this) - if (Meteor.isServer) - test.equal(Meteor.call("nothing"), undefined); - if (Meteor.isClient) - test.equal(Meteor.call("nothing"), undefined); - - test.equal(Meteor.call("nothing", expect(undefined, undefined)), undefined); - }, - - function (test, expect) { - if (Meteor.isServer) - test.equal(Meteor.call("echo"), []); - if (Meteor.isClient) - test.equal(Meteor.call("echo"), undefined); - - test.equal(Meteor.call("echo", expect(undefined, [])), undefined); - }, - - echoTest(new Date()), - echoTest({d: new Date(), s: "foobarbaz"}), - echoTest([new Date(), "foobarbaz"]), - echoTest(new Mongo.ObjectID()), - echoTest({o: new Mongo.ObjectID()}), - echoTest({$date: 30}), // literal - echoTest({$literal: {$date: 30}}), - echoTest(12), - echoTest(Infinity), - echoTest(-Infinity), - - function (test, expect) { - if (Meteor.isServer) - test.equal(Meteor.call("echo", 12, {x: 13}), [12, {x: 13}]); - if (Meteor.isClient) - test.equal(Meteor.call("echo", 12, {x: 13}), undefined); - - test.equal(Meteor.call("echo", 12, {x: 13}, - expect(undefined, [12, {x: 13}])), undefined); - }, - - // test that `wait: false` is respected - function (test, expect) { - if (Meteor.isClient) { - // For test isolation - var token = Random.id(); - Meteor.apply( - "delayedTrue", [token], {wait: false}, expect(function(err, res) { - test.equal(res, false); - })); - Meteor.apply("makeDelayedTrueImmediatelyReturnFalse", [token]); - } - }, - - // test that `wait: true` is respected - function(test, expect) { - if (Meteor.isClient) { - var token = Random.id(); - Meteor.apply( - "delayedTrue", [token], {wait: true}, expect(function(err, res) { - test.equal(res, true); - })); - Meteor.apply("makeDelayedTrueImmediatelyReturnFalse", [token]); - } - }, - - function (test, expect) { - // No callback - - if (Meteor.isServer) { - test.throws(function () { - Meteor.call("exception", "both"); - }); - test.throws(function () { - Meteor.call("exception", "server"); - }); - // No exception, because no code will run on the client - test.equal(Meteor.call("exception", "client"), undefined); - } - - if (Meteor.isClient) { - // The client exception is thrown away because it's in the - // stub. The server exception is throw away because we didn't - // give a callback. - test.equal(Meteor.call("exception", "both"), undefined); - test.equal(Meteor.call("exception", "server"), undefined); - test.equal(Meteor.call("exception", "client"), undefined); - - // If we pass throwStubExceptions then we *should* see thrown exceptions - // on the client - test.throws(function () { - Meteor.apply("exception", ["both"], {throwStubExceptions: true}); - }); - test.equal( - Meteor.apply("exception", ["server"], {throwStubExceptions: true}), - undefined); - test.throws(function () { - Meteor.apply("exception", ["client"], {throwStubExceptions: true}); - }); - } - - // With callback - - if (Meteor.isClient) { - test.equal( - Meteor.call("exception", "both", - expect(failure(test, 500, "Internal server error"))), - undefined); - test.equal( - Meteor.call("exception", "server", - expect(failure(test, 500, "Internal server error"))), - undefined); - test.equal(Meteor.call("exception", "client"), undefined); - } - - if (Meteor.isServer) { - test.equal( - Meteor.call("exception", "both", - expect(failure(test, "Test method throwing an exception"))), - undefined); - test.equal( - Meteor.call("exception", "server", - expect(failure(test, "Test method throwing an exception"))), - undefined); - test.equal(Meteor.call("exception", "client"), undefined); - } - }, - - function (test, expect) { - if (Meteor.isServer) { - var threw = false; - try { - Meteor.call("exception", "both", {intended: true}); - } catch (e) { - threw = true; - test.equal(e.error, 999); - test.equal(e.reason, "Client-visible test exception"); - } - test.isTrue(threw); - threw = false; - try { - Meteor.call("exception", "both", {intended: true, - throwThroughFuture: true}); - } catch (e) { - threw = true; - test.equal(e.error, 999); - test.equal(e.reason, "Client-visible test exception"); - } - test.isTrue(threw); - } - - if (Meteor.isClient) { - test.equal( - Meteor.call("exception", "both", {intended: true}, - expect(failure(test, 999, - "Client-visible test exception"))), - undefined); - test.equal( - Meteor.call("exception", "server", {intended: true}, - expect(failure(test, 999, - "Client-visible test exception"))), - undefined); - test.equal( - Meteor.call("exception", "server", {intended: true, - throwThroughFuture: true}, - expect(failure(test, 999, - "Client-visible test exception"))), - undefined); - } - } -]); - - - - -var checkBalances = function (test, a, b) { - var alice = Ledger.findOne({name: "alice", world: test.runId()}); - var bob = Ledger.findOne({name: "bob", world: test.runId()}); - test.equal(alice.balance, a); - test.equal(bob.balance, b); -}; - -// would be nice to have a database-aware test harness of some kind -- -// this is a big hack (and XXX pollutes the global test namespace) -testAsyncMulti("livedata - compound methods", [ - function (test, expect) { - if (Meteor.isClient) - Meteor.subscribe("ledger", test.runId(), expect()); - - Ledger.insert({name: "alice", balance: 100, world: test.runId()}, - expect(function () {})); - Ledger.insert({name: "bob", balance: 50, world: test.runId()}, - expect(function () {})); - }, - function (test, expect) { - Meteor.call('ledger/transfer', test.runId(), "alice", "bob", 10, - expect(function(err, result) { - test.equal(err, undefined); - test.equal(result, undefined); - checkBalances(test, 90, 60); - })); - checkBalances(test, 90, 60); - }, - function (test, expect) { - Meteor.call('ledger/transfer', test.runId(), "alice", "bob", 100, true, - expect(function (err, result) { - failure(test, 409)(err, result); - // Balances are reverted back to pre-stub values. - checkBalances(test, 90, 60); - })); - - if (Meteor.isClient) - // client can fool itself by cheating, but only until the sync - // finishes - checkBalances(test, -10, 160); - else - checkBalances(test, 90, 60); - } -]); - -// Replaces the Connection's `_livedata_data` method to push incoming -// messages on a given collection to an array. This can be used to -// verify that the right data is sent on the wire -// -// @param messages {Array} The array to which to append the messages -// @return {Function} A function to call to undo the eavesdropping -var eavesdropOnCollection = function(livedata_connection, - collection_name, messages) { - var old_livedata_data = _.bind( - livedata_connection._livedata_data, livedata_connection); - - // Kind of gross since all tests past this one will run with this - // hook set up. That's probably fine since we only check a specific - // collection but still... - // - // Should we consider having a separate connection per Tinytest or - // some similar scheme? - livedata_connection._livedata_data = function(msg) { - if (msg.collection && msg.collection === collection_name) { - messages.push(msg); - } - old_livedata_data(msg); - }; - - return function() { - livedata_connection._livedata_data = old_livedata_data; - }; -}; - -if (Meteor.isClient) { - testAsyncMulti("livedata - changing userid reruns subscriptions without flapping data on the wire", [ - function(test, expect) { - var messages = []; - var undoEavesdrop = eavesdropOnCollection( - Meteor.connection, "objectsWithUsers", messages); - - // A helper for testing incoming set and unset messages - // XXX should this be extracted as a general helper together with - // eavesdropOnCollection? - var expectMessages = function(expectedAddedMessageCount, - expectedRemovedMessageCount, - expectedNamesInCollection) { - var actualAddedMessageCount = 0; - var actualRemovedMessageCount = 0; - _.each(messages, function (msg) { - if (msg.msg === 'added') - ++actualAddedMessageCount; - else if (msg.msg === 'removed') - ++actualRemovedMessageCount; - else - test.fail({unexpected: JSON.stringify(msg)}); - }); - test.equal(actualAddedMessageCount, expectedAddedMessageCount); - test.equal(actualRemovedMessageCount, expectedRemovedMessageCount); - expectedNamesInCollection.sort(); - test.equal(_.pluck(objectsWithUsers.find({}, {sort: ['name']}).fetch(), - 'name'), - expectedNamesInCollection); - messages.length = 0; // clear messages without creating a new object - }; - - // make sure we're not already logged in. can happen if accounts - // tests fail oddly. - Meteor.apply("setUserId", [null], {wait: true}, expect(function () {})); - - Meteor.subscribe("objectsWithUsers", expect(function() { - expectMessages(1, 0, ["owned by none"]); - Meteor.apply("setUserId", ["1"], {wait: true}, afterFirstSetUserId); - })); - - var afterFirstSetUserId = expect(function() { - expectMessages(3, 1, [ - "owned by one - a", - "owned by one/two - a", - "owned by one/two - b"]); - Meteor.apply("setUserId", ["2"], {wait: true}, afterSecondSetUserId); - }); - - var afterSecondSetUserId = expect(function() { - expectMessages(2, 1, [ - "owned by one/two - a", - "owned by one/two - b", - "owned by two - a", - "owned by two - b"]); - Meteor.apply("setUserId", ["2"], {wait: true}, afterThirdSetUserId); - }); - - var afterThirdSetUserId = expect(function() { - // Nothing should have been sent since the results of the - // query are the same ("don't flap data on the wire") - expectMessages(0, 0, [ - "owned by one/two - a", - "owned by one/two - b", - "owned by two - a", - "owned by two - b"]); - undoEavesdrop(); - }); - }, function(test, expect) { - var key = Random.id(); - Meteor.subscribe("recordUserIdOnStop", key); - Meteor.apply("setUserId", ["100"], {wait: true}, expect(function () {})); - Meteor.apply("setUserId", ["101"], {wait: true}, expect(function () {})); - Meteor.call("userIdWhenStopped", key, expect(function (err, result) { - test.isFalse(err); - test.equal(result, "100"); - })); - // clean up - Meteor.apply("setUserId", [null], {wait: true}, expect(function () {})); - } - ]); -} - -Tinytest.add("livedata - setUserId error when called from server", function(test) { - if (Meteor.isServer) { - test.equal(errorThrownWhenCallingSetUserIdDirectlyOnServer.message, - "Can't call setUserId on a server initiated method call"); - } -}); - - -if (Meteor.isServer) { - var pubHandles = {}; -}; -Meteor.methods({ - "livedata/setup" : function (id) { - check(id, String); - if (Meteor.isServer) { - pubHandles[id] = {}; - Meteor.publish("pub1"+id, function () { - pubHandles[id].pub1 = this; - this.ready(); - }); - Meteor.publish("pub2"+id, function () { - pubHandles[id].pub2 = this; - this.ready(); - }); - - } - }, - "livedata/pub1go" : function (id) { - check(id, String); - if (Meteor.isServer) { - - pubHandles[id].pub1.added("MultiPubCollection" + id, "foo", {a: "aa"}); - return 1; - } - return 0; - }, - "livedata/pub2go" : function (id) { - check(id, String); - if (Meteor.isServer) { - pubHandles[id].pub2.added("MultiPubCollection" + id , "foo", {b: "bb"}); - return 2; - } - return 0; - } -}); - -if (Meteor.isClient) { - (function () { - var MultiPub; - var id = Random.id(); - testAsyncMulti("livedata - added from two different subs", [ - function (test, expect) { - Meteor.call('livedata/setup', id, expect(function () {})); - }, - function (test, expect) { - MultiPub = new Mongo.Collection("MultiPubCollection" + id); - var sub1 = Meteor.subscribe("pub1"+id, expect(function () {})); - var sub2 = Meteor.subscribe("pub2"+id, expect(function () {})); - }, - function (test, expect) { - Meteor.call("livedata/pub1go", id, expect(function (err, res) {test.equal(res, 1);})); - }, - function (test, expect) { - test.equal(MultiPub.findOne("foo"), {_id: "foo", a: "aa"}); - }, - function (test, expect) { - Meteor.call("livedata/pub2go", id, expect(function (err, res) {test.equal(res, 2);})); - }, - function (test, expect) { - test.equal(MultiPub.findOne("foo"), {_id: "foo", a: "aa", b: "bb"}); - } - ]); - })(); -}; - -if (Meteor.isClient) { - testAsyncMulti("livedata - overlapping universal subs", [ - function (test, expect) { - var coll = new Mongo.Collection("overlappingUniversalSubs"); - var token = Random.id(); - test.isFalse(coll.findOne(token)); - Meteor.call("testOverlappingSubs", token, expect(function (err) { - test.isFalse(err); - test.isTrue(coll.findOne(token)); - })); - } - ]); - - testAsyncMulti("livedata - runtime universal sub creation", [ - function (test, expect) { - var coll = new Mongo.Collection("runtimeSubCreation"); - var token = Random.id(); - test.isFalse(coll.findOne(token)); - Meteor.call("runtimeUniversalSubCreation", token, expect(function (err) { - test.isFalse(err); - test.isTrue(coll.findOne(token)); - })); - } - ]); - - testAsyncMulti("livedata - no setUserId after unblock", [ - function (test, expect) { - Meteor.call("setUserIdAfterUnblock", expect(function (err, result) { - test.isFalse(err); - test.isTrue(result); - })); - } - ]); - - testAsyncMulti("livedata - publisher errors with onError callback", (function () { - var conn, collName, coll; - var errorFromRerun; - var gotErrorFromStopper = false; - return [ - function (test, expect) { - // Use a separate connection so that we can safely check to see if - // conn._subscriptions is empty. - conn = new LivedataTest.Connection('/', - {reloadWithOutstanding: true}); - collName = Random.id(); - coll = new Mongo.Collection(collName, {connection: conn}); - - var testSubError = function (options) { - conn.subscribe("publisherErrors", collName, options, { - onReady: expect(), - onError: expect( - failure(test, - options.internalError ? 500 : 412, - options.internalError ? "Internal server error" - : "Explicit error")) - }); - }; - testSubError({throwInHandler: true}); - testSubError({throwInHandler: true, internalError: true}); - testSubError({errorInHandler: true}); - testSubError({errorInHandler: true, internalError: true}); - testSubError({errorLater: true}); - testSubError({errorLater: true, internalError: true}); - }, - function (test, expect) { - test.equal(coll.find().count(), 0); - test.equal(_.size(conn._subscriptions), 0); // white-box test - - conn.subscribe("publisherErrors", - collName, {throwWhenUserIdSet: true}, { - onReady: expect(), - onError: function (error) { - errorFromRerun = error; - } - }); - }, - function (test, expect) { - // Because the last subscription is ready, we should have a document. - test.equal(coll.find().count(), 1); - test.isFalse(errorFromRerun); - test.equal(_.size(conn._subscriptions), 1); // white-box test - conn.call('setUserId', 'bla', expect(function(){})); - }, - function (test, expect) { - // Now that we've re-run, we should have stopped the subscription, - // gotten a error, and lost the document. - test.equal(coll.find().count(), 0); - test.isTrue(errorFromRerun); - test.instanceOf(errorFromRerun, Meteor.Error); - test.equal(errorFromRerun.error, 412); - test.equal(errorFromRerun.reason, "Explicit error"); - test.equal(_.size(conn._subscriptions), 0); // white-box test - - conn.subscribe("publisherErrors", collName, {stopInHandler: true}, { - onError: function() { gotErrorFromStopper = true; } - }); - // Call a method. This method won't be processed until the publisher's - // function returns, so blocking on it being done ensures that we've - // gotten the removed/nosub/etc. - conn.call('nothing', expect(function(){})); - }, - function (test, expect) { - test.equal(coll.find().count(), 0); - // sub.stop does NOT call onError. - test.isFalse(gotErrorFromStopper); - test.equal(_.size(conn._subscriptions), 0); // white-box test - conn._stream.disconnect({_permanent: true}); - } - ];})()); - - testAsyncMulti("livedata - publisher errors with onStop callback", (function () { - var conn, collName, coll; - var errorFromRerun; - var gotErrorFromStopper = false; - return [ - function (test, expect) { - // Use a separate connection so that we can safely check to see if - // conn._subscriptions is empty. - conn = new LivedataTest.Connection('/', - {reloadWithOutstanding: true}); - collName = Random.id(); - coll = new Mongo.Collection(collName, {connection: conn}); - - var testSubError = function (options) { - conn.subscribe("publisherErrors", collName, options, { - onReady: expect(), - onStop: expect( - failureOnStopped(test, - options.internalError ? 500 : 412, - options.internalError ? "Internal server error" - : "Explicit error")) - }); - }; - testSubError({throwInHandler: true}); - testSubError({throwInHandler: true, internalError: true}); - testSubError({errorInHandler: true}); - testSubError({errorInHandler: true, internalError: true}); - testSubError({errorLater: true}); - testSubError({errorLater: true, internalError: true}); - }, - function (test, expect) { - test.equal(coll.find().count(), 0); - test.equal(_.size(conn._subscriptions), 0); // white-box test - - conn.subscribe("publisherErrors", - collName, {throwWhenUserIdSet: true}, { - onReady: expect(), - onStop: function (error) { - errorFromRerun = error; - } - }); - }, - function (test, expect) { - // Because the last subscription is ready, we should have a document. - test.equal(coll.find().count(), 1); - test.isFalse(errorFromRerun); - test.equal(_.size(conn._subscriptions), 1); // white-box test - conn.call('setUserId', 'bla', expect(function(){})); - }, - function (test, expect) { - // Now that we've re-run, we should have stopped the subscription, - // gotten a error, and lost the document. - test.equal(coll.find().count(), 0); - test.isTrue(errorFromRerun); - test.instanceOf(errorFromRerun, Meteor.Error); - test.equal(errorFromRerun.error, 412); - test.equal(errorFromRerun.reason, "Explicit error"); - test.equal(_.size(conn._subscriptions), 0); // white-box test - - conn.subscribe("publisherErrors", collName, {stopInHandler: true}, { - onStop: function(error) { - if (error) { - gotErrorFromStopper = true; - } - } - }); - // Call a method. This method won't be processed until the publisher's - // function returns, so blocking on it being done ensures that we've - // gotten the removed/nosub/etc. - conn.call('nothing', expect(function(){})); - }, - function (test, expect) { - test.equal(coll.find().count(), 0); - // sub.stop does NOT call onError. - test.isFalse(gotErrorFromStopper); - test.equal(_.size(conn._subscriptions), 0); // white-box test - conn._stream.disconnect({_permanent: true}); - } - ];})()); - - testAsyncMulti("livedata - publish multiple cursors", [ - function (test, expect) { - var sub = Meteor.subscribe("multiPublish", {normal: 1}, { - onReady: expect(function () { - test.isTrue(sub.ready()); - test.equal(One.find().count(), 2); - test.equal(Two.find().count(), 3); - }), - onError: failure() - }); - }, - function (test, expect) { - Meteor.subscribe("multiPublish", {dup: 1}, { - onReady: failure(), - onError: expect(failure(test, 500, "Internal server error")) - }); - }, - function (test, expect) { - Meteor.subscribe("multiPublish", {notCursor: 1}, { - onReady: failure(), - onError: expect(failure(test, 500, "Internal server error")) - }); - } - ]); -} - -var selfUrl = Meteor.isServer - ? Meteor.absoluteUrl() : Meteor._relativeToSiteRootUrl('/'); - -if (Meteor.isServer) { - Meteor.methods({ - "s2s": function (arg) { - check(arg, String); - return "s2s " + arg; - } - }); -} -(function () { - testAsyncMulti("livedata - connect works from both client and server", [ - function (test, expect) { - var self = this; - self.conn = DDP.connect(selfUrl); - pollUntil(expect, function () { - return self.conn.status().connected; - }, 10000); - }, - - function (test, expect) { - var self = this; - if (self.conn.status().connected) { - self.conn.call('s2s', 'foo', expect(function (err, res) { - if (err) - throw err; - test.equal(res, "s2s foo"); - })); - } - } - ]); -})(); - -if (Meteor.isServer) { - (function () { - testAsyncMulti("livedata - method call on server blocks in a fiber way", [ - function (test, expect) { - var self = this; - self.conn = DDP.connect(selfUrl); - pollUntil(expect, function () { - return self.conn.status().connected; - }, 10000); - }, - - function (test, expect) { - var self = this; - if (self.conn.status().connected) { - test.equal(self.conn.call('s2s', 'foo'), "s2s foo"); - } - } - ]); - })(); -} - -(function () { - testAsyncMulti("livedata - connect fails to unknown place", [ - function (test, expect) { - var self = this; - self.conn = DDP.connect("example.com", {_dontPrintErrors: true}); - Meteor.setTimeout(expect(function () { - test.isFalse(self.conn.status().connected, "Not connected"); - self.conn.close(); - }), 500); - } - ]); -})(); - -if (Meteor.isServer) { - Meteor.publish("publisherCloning", function () { - var self = this; - var fields = {x: {y: 42}}; - self.added("publisherCloning", "a", fields); - fields.x.y = 43; - self.changed("publisherCloning", "a", fields); - self.ready(); - }); -} else { - var PublisherCloningCollection = new Mongo.Collection("publisherCloning"); - testAsyncMulti("livedata - publish callbacks clone", [ - function (test, expect) { - Meteor.subscribe("publisherCloning", {normal: 1}, { - onReady: expect(function () { - test.equal(PublisherCloningCollection.findOne(), { - _id: "a", - x: {y: 43}}); - }), - onError: failure() - }); - } - ]); -} - -testAsyncMulti("livedata - result by value", [ - function (test, expect) { - var self = this; - self.testId = Random.id(); - Meteor.call('getArray', self.testId, expect(function (error, firstResult) { - test.isFalse(error); - test.isTrue(firstResult); - self.firstResult = firstResult; - })); - }, function (test, expect) { - var self = this; - Meteor.call('pushToArray', self.testId, 'xxx', expect(function (error) { - test.isFalse(error); - })); - }, function (test, expect) { - var self = this; - Meteor.call('getArray', self.testId, expect(function (error, secondResult) { - test.isFalse(error); - test.equal(self.firstResult.length + 1, secondResult.length); - })); - } -]); - -// XXX some things to test in greater detail: -// staying in simulation mode -// time warp -// serialization / beginAsync(true) / beginAsync(false) -// malformed messages (need raw wire access) -// method completion/satisfaction -// subscriptions (multiple APIs, including autorun?) -// subscription completion -// subscription attribute shadowing -// server method calling methods on other server (eg, should simulate) -// subscriptions and methods being idempotent -// reconnection -// reconnection not resulting in method re-execution -// reconnection tolerating all kinds of lost messages (including data) -// [probably lots more] diff --git a/packages/ddp-client/namespace.js b/packages/ddp-client/namespace.js deleted file mode 100644 index 67d0bdb26e..0000000000 --- a/packages/ddp-client/namespace.js +++ /dev/null @@ -1,6 +0,0 @@ -/** - * @namespace DDP - * @summary Namespace for DDP-related methods/classes. - */ -export const DDP = {}; -export const LivedataTest = {}; diff --git a/packages/ddp-client/package.js b/packages/ddp-client/package.js index 49a5987a0d..766e1b693d 100644 --- a/packages/ddp-client/package.js +++ b/packages/ddp-client/package.js @@ -1,52 +1,47 @@ Package.describe({ summary: "Meteor's latency-compensated distributed data client", - version: '2.2.0', + version: '2.3.3', documentation: null }); Npm.depends({ - "faye-websocket": "0.11.1", - "lolex": "1.4.0", - "permessage-deflate": "0.1.6" + lolex: '2.3.1' }); -Package.onUse(function (api) { - api.use(['check', 'random', 'ejson', 'underscore', 'tracker', - 'retry', 'id-map', 'ecmascript'], - ['client', 'server']); - - api.use('callback-hook', ['client', 'server']); - - // common functionality - api.use('ddp-common', ['client', 'server']); - - api.use('reload', 'client', {weak: true}); - - // we depend on _diffObjects, _applyChanges, - api.use('diff-sequence', ['client', 'server']); - // _idParse, _idStringify. - api.use('mongo-id', ['client', 'server']); - - api.addFiles(['sockjs-0.3.4.js', 'stream_client_sockjs.js'], 'client'); - api.addFiles('stream_client_nodejs.js', 'server'); - api.addFiles('stream_client_common.js', ['client', 'server']); - - api.addFiles('livedata_common.js', ['client', 'server']); - api.addFiles('random_stream.js', ['client', 'server']); - - api.addFiles('livedata_connection.js', ['client', 'server']); - - api.addFiles('client_convenience.js', 'client'); - - api.mainModule("namespace.js"); - api.export('DDP'); -}); - -Package.onTest(function (api) { - api.use('livedata', ['client', 'server']); - api.use('mongo', ['client', 'server']); - api.use('test-helpers', ['client', 'server']); +Package.onUse((api) => { api.use([ + 'check', + 'random', + 'ejson', + 'tracker', + 'retry', + 'id-map', + 'ecmascript', + 'callback-hook', + 'ddp-common', + 'reload', + 'socket-stream-client', + + // we depend on _diffObjects, _applyChanges, + 'diff-sequence', + + // _idParse, _idStringify. + 'mongo-id' + ], ['client', 'server']); + + api.use('reload', 'client', { weak: true }); + + // For backcompat where things use Package.ddp.DDP, etc + api.export('DDP'); + api.mainModule('client/client.js', 'client'); + api.mainModule('server/server.js', 'server'); +}); + +Package.onTest((api) => { + api.use([ + 'livedata', + 'mongo', + 'test-helpers', 'ecmascript', 'underscore', 'tinytest', @@ -55,17 +50,14 @@ Package.onTest(function (api) { 'reactive-var', 'mongo-id', 'diff-sequence', - 'ejson' + 'ejson', + 'ddp-common', + 'check' ]); - api.addFiles('stub_stream.js'); - api.addFiles('livedata_connection_tests.js', ['client', 'server']); - api.addFiles('livedata_tests.js', ['client', 'server']); - api.addFiles('livedata_test_service.js', ['client', 'server']); - api.addFiles('random_stream_tests.js', ['client', 'server']); - - api.use('http', 'client'); - api.addFiles(['stream_tests.js'], 'client'); - api.addFiles('stream_client_tests.js', 'server'); - api.use('check', ['client', 'server']); + api.addFiles('test/stub_stream.js'); + api.addFiles('test/livedata_connection_tests.js'); + api.addFiles('test/livedata_tests.js'); + api.addFiles('test/livedata_test_service.js'); + api.addFiles('test/random_stream_tests.js'); }); diff --git a/packages/ddp-client/random_stream.js b/packages/ddp-client/random_stream.js deleted file mode 100644 index 269cb7a9b0..0000000000 --- a/packages/ddp-client/random_stream.js +++ /dev/null @@ -1,11 +0,0 @@ -import { DDP } from "./namespace.js"; - -// Returns the named sequence of pseudo-random values. -// The scope will be DDP._CurrentMethodInvocation.get(), so the stream will produce -// consistent values for method calls on the client and server. -DDP.randomStream = function (name) { - var scope = DDP._CurrentMethodInvocation.get(); - return DDPCommon.RandomStream.get(scope, name); -}; - - diff --git a/packages/ddp-client/server/server.js b/packages/ddp-client/server/server.js new file mode 100644 index 0000000000..8566aba9c2 --- /dev/null +++ b/packages/ddp-client/server/server.js @@ -0,0 +1 @@ +export { DDP } from '../common/namespace.js'; diff --git a/packages/ddp-client/stream_client_common.js b/packages/ddp-client/stream_client_common.js deleted file mode 100644 index 14195f708f..0000000000 --- a/packages/ddp-client/stream_client_common.js +++ /dev/null @@ -1,268 +0,0 @@ -import { DDP, LivedataTest } from "./namespace.js"; - -// XXX from Underscore.String (http://epeli.github.com/underscore.string/) -var startsWith = function(str, starts) { - return str.length >= starts.length && - str.substring(0, starts.length) === starts; -}; -var endsWith = function(str, ends) { - return str.length >= ends.length && - str.substring(str.length - ends.length) === ends; -}; - -// @param url {String} URL to Meteor app, eg: -// "/" or "madewith.meteor.com" or "https://foo.meteor.com" -// or "ddp+sockjs://ddp--****-foo.meteor.com/sockjs" -// @returns {String} URL to the endpoint with the specific scheme and subPath, e.g. -// for scheme "http" and subPath "sockjs" -// "http://subdomain.meteor.com/sockjs" or "/sockjs" -// or "https://ddp--1234-foo.meteor.com/sockjs" -var translateUrl = function(url, newSchemeBase, subPath) { - if (! newSchemeBase) { - newSchemeBase = "http"; - } - - var ddpUrlMatch = url.match(/^ddp(i?)\+sockjs:\/\//); - var httpUrlMatch = url.match(/^http(s?):\/\//); - var newScheme; - if (ddpUrlMatch) { - // Remove scheme and split off the host. - var urlAfterDDP = url.substr(ddpUrlMatch[0].length); - newScheme = ddpUrlMatch[1] === "i" ? newSchemeBase : newSchemeBase + "s"; - var slashPos = urlAfterDDP.indexOf('/'); - var host = - slashPos === -1 ? urlAfterDDP : urlAfterDDP.substr(0, slashPos); - var rest = slashPos === -1 ? '' : urlAfterDDP.substr(slashPos); - - // In the host (ONLY!), change '*' characters into random digits. This - // allows different stream connections to connect to different hostnames - // and avoid browser per-hostname connection limits. - host = host.replace(/\*/g, function () { - return Math.floor(Random.fraction()*10); - }); - - return newScheme + '://' + host + rest; - } else if (httpUrlMatch) { - newScheme = !httpUrlMatch[1] ? newSchemeBase : newSchemeBase + "s"; - var urlAfterHttp = url.substr(httpUrlMatch[0].length); - url = newScheme + "://" + urlAfterHttp; - } - - // Prefix FQDNs but not relative URLs - if (url.indexOf("://") === -1 && !startsWith(url, "/")) { - url = newSchemeBase + "://" + url; - } - - // XXX This is not what we should be doing: if I have a site - // deployed at "/foo", then DDP.connect("/") should actually connect - // to "/", not to "/foo". "/" is an absolute path. (Contrast: if - // deployed at "/foo", it would be reasonable for DDP.connect("bar") - // to connect to "/foo/bar"). - // - // We should make this properly honor absolute paths rather than - // forcing the path to be relative to the site root. Simultaneously, - // we should set DDP_DEFAULT_CONNECTION_URL to include the site - // root. See also client_convenience.js #RationalizingRelativeDDPURLs - url = Meteor._relativeToSiteRootUrl(url); - - if (endsWith(url, "/")) - return url + subPath; - else - return url + "/" + subPath; -}; - -toSockjsUrl = function (url) { - return translateUrl(url, "http", "sockjs"); -}; - -toWebsocketUrl = function (url) { - var ret = translateUrl(url, "ws", "websocket"); - return ret; -}; - -LivedataTest.toSockjsUrl = toSockjsUrl; - - -_.extend(LivedataTest.ClientStream.prototype, { - - // Register for callbacks. - on: function (name, callback) { - var self = this; - - if (name !== 'message' && name !== 'reset' && name !== 'disconnect') - throw new Error("unknown event type: " + name); - - if (!self.eventCallbacks[name]) - self.eventCallbacks[name] = []; - self.eventCallbacks[name].push(callback); - }, - - - _initCommon: function (options) { - var self = this; - options = options || {}; - - //// Constants - - // how long to wait until we declare the connection attempt - // failed. - self.CONNECT_TIMEOUT = options.connectTimeoutMs || 10000; - - self.eventCallbacks = {}; // name -> [callback] - - self._forcedToDisconnect = false; - - //// Reactive status - self.currentStatus = { - status: "connecting", - connected: false, - retryCount: 0 - }; - - - self.statusListeners = typeof Tracker !== 'undefined' && new Tracker.Dependency; - self.statusChanged = function () { - if (self.statusListeners) - self.statusListeners.changed(); - }; - - //// Retry logic - self._retry = new Retry; - self.connectionTimer = null; - - }, - - // Trigger a reconnect. - reconnect: function (options) { - var self = this; - options = options || {}; - - if (options.url) { - self._changeUrl(options.url); - } - - if (options._sockjsOptions) { - self.options._sockjsOptions = options._sockjsOptions; - } - - if (self.currentStatus.connected) { - if (options._force || options.url) { - // force reconnect. - self._lostConnection(new DDP.ForcedReconnectError); - } // else, noop. - return; - } - - // if we're mid-connection, stop it. - if (self.currentStatus.status === "connecting") { - // Pretend it's a clean close. - self._lostConnection(); - } - - self._retry.clear(); - self.currentStatus.retryCount -= 1; // don't count manual retries - self._retryNow(); - }, - - disconnect: function (options) { - var self = this; - options = options || {}; - - // Failed is permanent. If we're failed, don't let people go back - // online by calling 'disconnect' then 'reconnect'. - if (self._forcedToDisconnect) - return; - - // If _permanent is set, permanently disconnect a stream. Once a stream - // is forced to disconnect, it can never reconnect. This is for - // error cases such as ddp version mismatch, where trying again - // won't fix the problem. - if (options._permanent) { - self._forcedToDisconnect = true; - } - - self._cleanup(); - self._retry.clear(); - - self.currentStatus = { - status: (options._permanent ? "failed" : "offline"), - connected: false, - retryCount: 0 - }; - - if (options._permanent && options._error) - self.currentStatus.reason = options._error; - - self.statusChanged(); - }, - - // maybeError is set unless it's a clean protocol-level close. - _lostConnection: function (maybeError) { - var self = this; - - self._cleanup(maybeError); - self._retryLater(maybeError); // sets status. no need to do it here. - }, - - // fired when we detect that we've gone online. try to reconnect - // immediately. - _online: function () { - // if we've requested to be offline by disconnecting, don't reconnect. - if (this.currentStatus.status != "offline") - this.reconnect(); - }, - - _retryLater: function (maybeError) { - var self = this; - - var timeout = 0; - if (self.options.retry || - (maybeError && maybeError.errorType === "DDP.ForcedReconnectError")) { - timeout = self._retry.retryLater( - self.currentStatus.retryCount, - _.bind(self._retryNow, self) - ); - self.currentStatus.status = "waiting"; - self.currentStatus.retryTime = (new Date()).getTime() + timeout; - } else { - self.currentStatus.status = "failed"; - delete self.currentStatus.retryTime; - } - - self.currentStatus.connected = false; - self.statusChanged(); - }, - - _retryNow: function () { - var self = this; - - if (self._forcedToDisconnect) - return; - - self.currentStatus.retryCount += 1; - self.currentStatus.status = "connecting"; - self.currentStatus.connected = false; - delete self.currentStatus.retryTime; - self.statusChanged(); - - self._launchConnection(); - }, - - - // Get current status. Reactive. - status: function () { - var self = this; - if (self.statusListeners) - self.statusListeners.depend(); - return self.currentStatus; - } -}); - -DDP.ConnectionError = Meteor.makeErrorType( - "DDP.ConnectionError", function (message) { - var self = this; - self.message = message; -}); - -DDP.ForcedReconnectError = Meteor.makeErrorType( - "DDP.ForcedReconnectError", function () {}); diff --git a/packages/ddp-client/stream_client_sockjs.js b/packages/ddp-client/stream_client_sockjs.js deleted file mode 100644 index 3170654547..0000000000 --- a/packages/ddp-client/stream_client_sockjs.js +++ /dev/null @@ -1,196 +0,0 @@ -import { DDP, LivedataTest } from "./namespace.js"; - -// @param url {String} URL to Meteor app -// "http://subdomain.meteor.com/" or "/" or -// "ddp+sockjs://foo-**.meteor.com/sockjs" -LivedataTest.ClientStream = function (url, options) { - var self = this; - self.options = _.extend({ - retry: true - }, options); - self._initCommon(self.options); - - //// Constants - - - // how long between hearing heartbeat from the server until we declare - // the connection dead. heartbeats come every 45s (stream_server.js) - // - // NOTE: this is a older timeout mechanism. We now send heartbeats at - // the DDP level (https://github.com/meteor/meteor/pull/1865), and - // expect those timeouts to kill a non-responsive connection before - // this timeout fires. This is kept around for compatibility (when - // talking to a server that doesn't support DDP heartbeats) and can be - // removed later. - self.HEARTBEAT_TIMEOUT = 100*1000; - - self.rawUrl = url; - self.socket = null; - - self.heartbeatTimer = null; - - // Listen to global 'online' event if we are running in a browser. - // (IE8 does not support addEventListener) - if (typeof window !== 'undefined' && window.addEventListener) - window.addEventListener("online", _.bind(self._online, self), - false /* useCapture. make FF3.6 happy. */); - - //// Kickoff! - self._launchConnection(); -}; - -_.extend(LivedataTest.ClientStream.prototype, { - - // data is a utf8 string. Data sent while not connected is dropped on - // the floor, and it is up the user of this API to retransmit lost - // messages on 'reset' - send: function (data) { - var self = this; - if (self.currentStatus.connected) { - self.socket.send(data); - } - }, - - // Changes where this connection points - _changeUrl: function (url) { - var self = this; - self.rawUrl = url; - }, - - _connected: function () { - var self = this; - - if (self.connectionTimer) { - clearTimeout(self.connectionTimer); - self.connectionTimer = null; - } - - if (self.currentStatus.connected) { - // already connected. do nothing. this probably shouldn't happen. - return; - } - - // update status - self.currentStatus.status = "connected"; - self.currentStatus.connected = true; - self.currentStatus.retryCount = 0; - self.statusChanged(); - - // fire resets. This must come after status change so that clients - // can call send from within a reset callback. - _.each(self.eventCallbacks.reset, function (callback) { callback(); }); - - }, - - _cleanup: function (maybeError) { - var self = this; - - self._clearConnectionAndHeartbeatTimers(); - if (self.socket) { - self.socket.onmessage = self.socket.onclose - = self.socket.onerror = self.socket.onheartbeat = function () {}; - self.socket.close(); - self.socket = null; - } - - _.each(self.eventCallbacks.disconnect, function (callback) { - callback(maybeError); - }); - }, - - _clearConnectionAndHeartbeatTimers: function () { - var self = this; - if (self.connectionTimer) { - clearTimeout(self.connectionTimer); - self.connectionTimer = null; - } - if (self.heartbeatTimer) { - clearTimeout(self.heartbeatTimer); - self.heartbeatTimer = null; - } - }, - - _heartbeat_timeout: function () { - var self = this; - Meteor._debug("Connection timeout. No sockjs heartbeat received."); - self._lostConnection(new DDP.ConnectionError("Heartbeat timed out")); - }, - - _heartbeat_received: function () { - var self = this; - // If we've already permanently shut down this stream, the timeout is - // already cleared, and we don't need to set it again. - if (self._forcedToDisconnect) - return; - if (self.heartbeatTimer) - clearTimeout(self.heartbeatTimer); - self.heartbeatTimer = setTimeout( - _.bind(self._heartbeat_timeout, self), - self.HEARTBEAT_TIMEOUT); - }, - - _sockjsProtocolsWhitelist: function () { - // only allow polling protocols. no streaming. streaming - // makes safari spin. - var protocolsWhitelist = [ - 'xdr-polling', 'xhr-polling', 'iframe-xhr-polling', 'jsonp-polling']; - - // iOS 4 and 5 and below crash when using websockets over certain - // proxies. this seems to be resolved with iOS 6. eg - // https://github.com/LearnBoost/socket.io/issues/193#issuecomment-7308865. - // - // iOS <4 doesn't support websockets at all so sockjs will just - // immediately fall back to http - var noWebsockets = navigator && - /iPhone|iPad|iPod/.test(navigator.userAgent) && - /OS 4_|OS 5_/.test(navigator.userAgent); - - if (!noWebsockets) - protocolsWhitelist = ['websocket'].concat(protocolsWhitelist); - - return protocolsWhitelist; - }, - - _launchConnection: function () { - var self = this; - self._cleanup(); // cleanup the old socket, if there was one. - - var options = _.extend({ - protocols_whitelist:self._sockjsProtocolsWhitelist() - }, self.options._sockjsOptions); - - // Convert raw URL to SockJS URL each time we open a connection, so that we - // can connect to random hostnames and get around browser per-host - // connection limits. - self.socket = new SockJS(toSockjsUrl(self.rawUrl), undefined, options); - self.socket.onopen = function (data) { - self._connected(); - }; - self.socket.onmessage = function (data) { - self._heartbeat_received(); - - if (self.currentStatus.connected) - _.each(self.eventCallbacks.message, function (callback) { - callback(data.data); - }); - }; - self.socket.onclose = function () { - self._lostConnection(); - }; - self.socket.onerror = function () { - // XXX is this ever called? - Meteor._debug("stream error", _.toArray(arguments), (new Date()).toDateString()); - }; - - self.socket.onheartbeat = function () { - self._heartbeat_received(); - }; - - if (self.connectionTimer) - clearTimeout(self.connectionTimer); - self.connectionTimer = setTimeout(function () { - self._lostConnection( - new DDP.ConnectionError("DDP connection timed out")); - }, self.CONNECT_TIMEOUT); - } -}); diff --git a/packages/ddp-client/stream_client_tests.js b/packages/ddp-client/stream_client_tests.js deleted file mode 100644 index 24a184f1d3..0000000000 --- a/packages/ddp-client/stream_client_tests.js +++ /dev/null @@ -1,26 +0,0 @@ -import { LivedataTest } from "./namespace.js"; - -var Fiber = Npm.require('fibers'); - -testAsyncMulti("stream client - callbacks run in a fiber", [ - function (test, expect) { - var stream = new LivedataTest.ClientStream(Meteor.absoluteUrl()); - - var messageFired = false; - var resetFired = false; - - stream.on('message', expect(function () { - test.isTrue(Fiber.current); - if (resetFired) - stream.disconnect(); - messageFired = true; - })); - - stream.on('reset', expect(function () { - test.isTrue(Fiber.current); - if (messageFired) - stream.disconnect(); - resetFired = true; - })); - } -]); diff --git a/packages/ddp-client/stream_tests.js b/packages/ddp-client/stream_tests.js deleted file mode 100644 index 8afc63e85a..0000000000 --- a/packages/ddp-client/stream_tests.js +++ /dev/null @@ -1,178 +0,0 @@ -import { LivedataTest } from "./namespace.js"; - -Tinytest.add("stream - status", function (test) { - // Very basic test. Just see that it runs and returns something. Not a - // lot of coverage, but enough that it would have caught a recent bug. - var status = Meteor.status(); - test.equal(typeof status, "object"); - test.isTrue(status.status); - // Make sure backward-compatiblity names are defined. - test.equal(status.retryCount, status.retryCount); - test.equal(status.retryTime, status.retryTime); -}); - -testAsyncMulti("stream - reconnect", [ - function (test, expect) { - var callback = _.once(expect(function() { - var status; - status = Meteor.status(); - test.equal(status.status, "connected"); - - Meteor.reconnect(); - status = Meteor.status(); - test.equal(status.status, "connected"); - - Meteor.reconnect({_force: true}); - status = Meteor.status(); - test.equal(status.status, "waiting"); - })); - - if (Meteor.status().status !== "connected") - Meteor.connection._stream.on('reset', callback); - else - callback(); - } -]); - -// Disconnecting and reconnecting transitions through the correct statuses. -testAsyncMulti("stream - basic disconnect", [ - function (test, expect) { - var history = []; - var stream = new LivedataTest.ClientStream("/"); - var onTestComplete = expect(function (unexpectedHistory) { - stream.disconnect(); - if (unexpectedHistory) { - test.fail("Unexpected status history: " + - JSON.stringify(unexpectedHistory)); - } - }); - - Tracker.autorun(function() { - var status = stream.status(); - - if (_.last(history) !== status.status) { - history.push(status.status); - - if (_.isEqual(history, ["connecting"])) { - // do nothing; wait for the next state - } else if (_.isEqual(history, ["connecting", "connected"])) { - stream.disconnect(); - } else if (_.isEqual(history, ["connecting", "connected", "offline"])) { - stream.reconnect(); - } else if (_.isEqual(history, ["connecting", "connected", "offline", - "connecting"])) { - // do nothing; wait for the next state - } else if (_.isEqual(history, ["connecting", "connected", "offline", - "connecting", "connected"])) { - onTestComplete(); - } else { - onTestComplete(history); - } - } - }); - } -]); - -// Remain offline if the online event is received while offline. -testAsyncMulti("stream - disconnect remains offline", [ - function (test, expect) { - var history = []; - var stream = new LivedataTest.ClientStream("/"); - var onTestComplete = expect(function (unexpectedHistory) { - stream.disconnect(); - if (unexpectedHistory) { - test.fail("Unexpected status history: " + - JSON.stringify(unexpectedHistory)); - } - }); - - Tracker.autorun(function() { - var status = stream.status(); - - if (_.last(history) !== status.status) { - history.push(status.status); - - if (_.isEqual(history, ["connecting"])) { - // do nothing; wait for the next status - } else if (_.isEqual(history, ["connecting", "connected"])) { - stream.disconnect(); - } else if (_.isEqual(history, ["connecting", "connected", "offline"])) { - stream._online(); - test.isTrue(status.status === "offline"); - onTestComplete(); - } else { - onTestComplete(history); - } - } - }); - } -]); - -Tinytest.add("stream - sockjs urls are computed correctly", function(test) { - var testHasSockjsUrl = function(raw, expectedSockjsUrl) { - var actual = LivedataTest.toSockjsUrl(raw); - if (expectedSockjsUrl instanceof RegExp) - test.isTrue(actual.match(expectedSockjsUrl), actual); - else - test.equal(actual, expectedSockjsUrl); - }; - - testHasSockjsUrl("http://subdomain.meteor.com/", - "http://subdomain.meteor.com/sockjs"); - testHasSockjsUrl("http://subdomain.meteor.com", - "http://subdomain.meteor.com/sockjs"); - testHasSockjsUrl("subdomain.meteor.com/", - "http://subdomain.meteor.com/sockjs"); - testHasSockjsUrl("subdomain.meteor.com", - "http://subdomain.meteor.com/sockjs"); - testHasSockjsUrl("/", Meteor._relativeToSiteRootUrl("/sockjs")); - - testHasSockjsUrl("http://localhost:3000/", "http://localhost:3000/sockjs"); - testHasSockjsUrl("http://localhost:3000", "http://localhost:3000/sockjs"); - testHasSockjsUrl("localhost:3000", "http://localhost:3000/sockjs"); - - testHasSockjsUrl("https://subdomain.meteor.com/", - "https://subdomain.meteor.com/sockjs"); - testHasSockjsUrl("https://subdomain.meteor.com", - "https://subdomain.meteor.com/sockjs"); - - testHasSockjsUrl("ddp+sockjs://ddp--****-foo.meteor.com/sockjs", - /^https:\/\/ddp--\d\d\d\d-foo\.meteor\.com\/sockjs$/); - testHasSockjsUrl("ddpi+sockjs://ddp--****-foo.meteor.com/sockjs", - /^http:\/\/ddp--\d\d\d\d-foo\.meteor\.com\/sockjs$/); -}); - -testAsyncMulti("stream - /websocket is a websocket endpoint", [ - function(test, expect) { - // - // Verify that /websocket and /websocket/ don't return the main page - // - _.each(['/websocket', '/websocket/'], function(path) { - HTTP.get(Meteor._relativeToSiteRootUrl(path), expect(function(error, result) { - test.isNotNull(error); - test.equal('Not a valid websocket request', result.content); - })); - }); - - // - // For sanity, also verify that /websockets and /websockets/ return - // the main page - // - - // Somewhat contorted but we can't call nested expects (XXX why?) - var pageContent; - var wrappedCallback = expect(function(error, result) { - test.isNull(error); - test.equal(pageContent, result.content); - }); - - HTTP.get(Meteor._relativeToSiteRootUrl('/'), expect(function(error, result) { - test.isNull(error); - pageContent = result.content; - - _.each(['/websockets', '/websockets/'], function(path) { - HTTP.get(Meteor._relativeToSiteRootUrl(path), wrappedCallback); - }); - })); - } -]); diff --git a/packages/ddp-client/test/livedata_connection_tests.js b/packages/ddp-client/test/livedata_connection_tests.js new file mode 100644 index 0000000000..b5cc17deae --- /dev/null +++ b/packages/ddp-client/test/livedata_connection_tests.js @@ -0,0 +1,2491 @@ +import lolex from 'lolex'; +import { DDP } from '../common/namespace.js'; +import { Connection } from '../common/livedata_connection.js'; + +var newConnection = function(stream, options) { + // Some of these tests leave outstanding methods with no result yet + // returned. This should not block us from re-running tests when sources + // change. + return new Connection( + stream, + _.extend( + { + reloadWithOutstanding: true, + bufferedWritesInterval: 0 + }, + options + ) + ); +}; + +var makeConnectMessage = function(session) { + var msg = { + msg: 'connect', + version: DDPCommon.SUPPORTED_DDP_VERSIONS[0], + support: DDPCommon.SUPPORTED_DDP_VERSIONS + }; + + if (session) msg.session = session; + return msg; +}; + +// Tests that stream got a message that matches expected. +// Expected is normally an object, and allows a wildcard value of '*', +// which will then match any value. +// Returns the message (parsed as a JSON object if expected is an object); +// which is particularly handy if you want to extract a value that was +// matched as a wildcard. +var testGotMessage = function(test, stream, expected) { + if (stream.sent.length === 0) { + test.fail({ error: 'no message received', expected: expected }); + return undefined; + } + + var got = stream.sent.shift(); + + if (typeof got === 'string' && typeof expected === 'object') + got = JSON.parse(got); + + // An expected value of '*' matches any value, and the matching value (or + // array of matching values, if there are multiple) is returned from this + // function. + if (typeof expected === 'object') { + var keysWithStarValues = []; + _.each(expected, function(v, k) { + if (v === '*') keysWithStarValues.push(k); + }); + _.each(keysWithStarValues, function(k) { + expected[k] = got[k]; + }); + } + + test.equal(got, expected); + return got; +}; + +var startAndConnect = function(test, stream) { + stream.reset(); // initial connection start. + + testGotMessage(test, stream, makeConnectMessage()); + test.length(stream.sent, 0); + + stream.receive({ msg: 'connected', session: SESSION_ID }); + test.length(stream.sent, 0); +}; + +var SESSION_ID = '17'; + +Tinytest.add('livedata stub - receive data', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + startAndConnect(test, stream); + + // data comes in for unknown collection. + var coll_name = Random.id(); + stream.receive({ + msg: 'added', + collection: coll_name, + id: '1234', + fields: { a: 1 } + }); + // break throught the black box and test internal state + test.length(conn._updatesForUnknownStores[coll_name], 1); + + // XXX: Test that the old signature of passing manager directly instead of in + // options works. + var coll = new Mongo.Collection(coll_name, conn); + + // queue has been emptied and doc is in db. + test.isUndefined(conn._updatesForUnknownStores[coll_name]); + test.equal(coll.find({}).fetch(), [{ _id: '1234', a: 1 }]); + + // second message. applied directly to the db. + stream.receive({ + msg: 'changed', + collection: coll_name, + id: '1234', + fields: { a: 2 } + }); + test.equal(coll.find({}).fetch(), [{ _id: '1234', a: 2 }]); + test.isUndefined(conn._updatesForUnknownStores[coll_name]); +}); + +Tinytest.add('livedata stub - buffering data', function(test) { + // Install special setTimeout that allows tick-by-tick control in tests using sinonjs 'lolex' + // This needs to be before the connection is instantiated. + const clock = lolex.install(); + const tick = timeout => clock.tick(timeout); + + const stream = new StubStream(); + const conn = newConnection(stream, { + bufferedWritesInterval: 10, + bufferedWritesMaxAge: 40 + }); + + startAndConnect(test, stream); + + const coll_name = Random.id(); + const coll = new Mongo.Collection(coll_name, conn); + + const testDocCount = count => test.equal(coll.find({}).count(), count); + + const addDoc = () => { + stream.receive({ + msg: 'added', + collection: coll_name, + id: Random.id(), + fields: {} + }); + }; + + // Starting at 0 ticks. At this point we haven't advanced the fake clock at all. + + addDoc(); // 1st Doc + testDocCount(0); // No doc been recognized yet because it's buffered, waiting for more. + tick(6); // 6 total ticks + testDocCount(0); // Ensure that the doc still hasn't shown up, despite the clock moving forward. + tick(4); // 10 total ticks, 1st buffer interval + testDocCount(1); // No other docs have arrived, so we 'see' the 1st doc. + + addDoc(); // 2nd doc + tick(1); // 11 total ticks (1 since last flush) + testDocCount(1); // Again, second doc hasn't arrived because we're waiting for more... + tick(9); // 20 total ticks (10 ticks since last flush & the 2nd 10-tick interval) + testDocCount(2); // Now we're here and got the second document. + + // Add several docs, frequently enough that we buffer multiple times before the next flush. + addDoc(); // 3 docs + tick(6); // 26 ticks (6 since last flush) + addDoc(); // 4 docs + tick(6); // 32 ticks (12 since last flush) + addDoc(); // 5 docs + tick(6); // 38 ticks (18 since last flush) + addDoc(); // 6 docs + tick(6); // 44 ticks (24 since last flush) + addDoc(); // 7 docs + tick(9); // 53 ticks (33 since last flush) + addDoc(); // 8 docs + tick(9); // 62 ticks! (42 ticks since last flush, over max-age - next interval triggers flush) + testDocCount(2); // Still at 2 from before! (Just making sure) + tick(1); // Ok, 63 ticks (10 since last doc, so this should cause the flush of all the docs) + testDocCount(8); // See all the docs. + + // Put things back how they were. + clock.uninstall(); +}); + +Tinytest.add('livedata stub - subscribe', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + startAndConnect(test, stream); + + // subscribe + var callback_fired = false; + var sub = conn.subscribe('my_data', function() { + callback_fired = true; + }); + test.isFalse(callback_fired); + + test.length(stream.sent, 1); + var message = JSON.parse(stream.sent.shift()); + var id = message.id; + delete message.id; + test.equal(message, { msg: 'sub', name: 'my_data', params: [] }); + + var reactivelyReady = false; + var autorunHandle = Tracker.autorun(function() { + reactivelyReady = sub.ready(); + }); + test.isFalse(reactivelyReady); + + // get the sub satisfied. callback fires. + stream.receive({ msg: 'ready', subs: [id] }); + test.isTrue(callback_fired); + Tracker.flush(); + test.isTrue(reactivelyReady); + + // Unsubscribe. + sub.stop(); + test.length(stream.sent, 1); + message = JSON.parse(stream.sent.shift()); + test.equal(message, { msg: 'unsub', id: id }); + Tracker.flush(); + test.isFalse(reactivelyReady); + + // Resubscribe. + conn.subscribe('my_data'); + test.length(stream.sent, 1); + message = JSON.parse(stream.sent.shift()); + var id2 = message.id; + test.notEqual(id, id2); + delete message.id; + test.equal(message, { msg: 'sub', name: 'my_data', params: [] }); +}); + +Tinytest.add('livedata stub - reactive subscribe', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + startAndConnect(test, stream); + + var rFoo = new ReactiveVar('foo1'); + var rBar = new ReactiveVar('bar1'); + + var onReadyCount = {}; + var onReady = function(tag) { + return function() { + if (_.has(onReadyCount, tag)) ++onReadyCount[tag]; + else onReadyCount[tag] = 1; + }; + }; + + // Subscribe to some subs. + var stopperHandle, completerHandle; + var autorunHandle = Tracker.autorun(function() { + conn.subscribe('foo', rFoo.get(), onReady(rFoo.get())); + conn.subscribe('bar', rBar.get(), onReady(rBar.get())); + completerHandle = conn.subscribe('completer', onReady('completer')); + stopperHandle = conn.subscribe('stopper', onReady('stopper')); + }); + + var completerReady; + var readyAutorunHandle = Tracker.autorun(function() { + completerReady = completerHandle.ready(); + }); + + // Check sub messages. (Assume they are sent in the order executed.) + test.length(stream.sent, 4); + var message = JSON.parse(stream.sent.shift()); + var idFoo1 = message.id; + delete message.id; + test.equal(message, { msg: 'sub', name: 'foo', params: ['foo1'] }); + + message = JSON.parse(stream.sent.shift()); + var idBar1 = message.id; + delete message.id; + test.equal(message, { msg: 'sub', name: 'bar', params: ['bar1'] }); + + message = JSON.parse(stream.sent.shift()); + var idCompleter = message.id; + delete message.id; + test.equal(message, { msg: 'sub', name: 'completer', params: [] }); + + message = JSON.parse(stream.sent.shift()); + var idStopper = message.id; + delete message.id; + test.equal(message, { msg: 'sub', name: 'stopper', params: [] }); + + // Haven't hit onReady yet. + test.equal(onReadyCount, {}); + Tracker.flush(); + test.isFalse(completerReady); + + // "completer" gets ready now. its callback should fire. + stream.receive({ msg: 'ready', subs: [idCompleter] }); + test.equal(onReadyCount, { completer: 1 }); + test.length(stream.sent, 0); + Tracker.flush(); + test.isTrue(completerReady); + + // Stop 'stopper'. + stopperHandle.stop(); + test.length(stream.sent, 1); + message = JSON.parse(stream.sent.shift()); + test.equal(message, { msg: 'unsub', id: idStopper }); + + test.equal(onReadyCount, { completer: 1 }); + Tracker.flush(); + test.isTrue(completerReady); + + // Change the foo subscription and flush. We should sub to the new foo + // subscription, re-sub to the stopper subscription, and then unsub from the old + // foo subscription. The bar subscription should be unaffected. The completer + // subscription should call its new onReady callback, because we always + // call onReady for a given reactively-saved subscription. + // The completerHandle should have been reestablished to the ready handle. + rFoo.set('foo2'); + Tracker.flush(); + test.length(stream.sent, 3); + + message = JSON.parse(stream.sent.shift()); + var idFoo2 = message.id; + delete message.id; + test.equal(message, { msg: 'sub', name: 'foo', params: ['foo2'] }); + + message = JSON.parse(stream.sent.shift()); + var idStopperAgain = message.id; + delete message.id; + test.equal(message, { msg: 'sub', name: 'stopper', params: [] }); + + message = JSON.parse(stream.sent.shift()); + test.equal(message, { msg: 'unsub', id: idFoo1 }); + + test.equal(onReadyCount, { completer: 2 }); + test.isTrue(completerReady); + + // Ready the stopper and bar subs. Completing stopper should call only the + // onReady from the new subscription because they were separate subscriptions + // started at different times and the first one was explicitly torn down by + // the client; completing bar should call the onReady from the new + // subscription because we always call onReady for a given reactively-saved + // subscription. + stream.receive({ msg: 'ready', subs: [idStopperAgain, idBar1] }); + test.equal(onReadyCount, { completer: 2, bar1: 1, stopper: 1 }); + + // Shut down the autorun. This should unsub us from all current subs at flush + // time. + autorunHandle.stop(); + Tracker.flush(); + test.isFalse(completerReady); + readyAutorunHandle.stop(); + + test.length(stream.sent, 4); + // The order of unsubs here is not important. + var unsubMessages = _.map(stream.sent, JSON.parse); + stream.sent.length = 0; + test.equal(_.unique(_.pluck(unsubMessages, 'msg')), ['unsub']); + var actualIds = _.pluck(unsubMessages, 'id'); + var expectedIds = [idFoo2, idBar1, idCompleter, idStopperAgain]; + actualIds.sort(); + expectedIds.sort(); + test.equal(actualIds, expectedIds); +}); + +Tinytest.add('livedata stub - reactive subscribe handle correct', function( + test +) { + var stream = new StubStream(); + var conn = newConnection(stream); + + startAndConnect(test, stream); + + var rFoo = new ReactiveVar('foo1'); + + // Subscribe to some subs. + var fooHandle, fooReady; + var autorunHandle = Tracker.autorun(function() { + fooHandle = conn.subscribe('foo', rFoo.get()); + Tracker.autorun(function() { + fooReady = fooHandle.ready(); + }); + }); + + var message = JSON.parse(stream.sent.shift()); + var idFoo1 = message.id; + delete message.id; + test.equal(message, { msg: 'sub', name: 'foo', params: ['foo1'] }); + + // Not ready yet + Tracker.flush(); + test.isFalse(fooHandle.ready()); + test.isFalse(fooReady); + + // change the argument to foo. This will make a new handle, which isn't ready + // the ready autorun should invalidate, reading the new false value, and + // setting up a new dep which goes true soon + rFoo.set('foo2'); + Tracker.flush(); + test.length(stream.sent, 2); + + message = JSON.parse(stream.sent.shift()); + var idFoo2 = message.id; + delete message.id; + test.equal(message, { msg: 'sub', name: 'foo', params: ['foo2'] }); + + message = JSON.parse(stream.sent.shift()); + test.equal(message, { msg: 'unsub', id: idFoo1 }); + + Tracker.flush(); + test.isFalse(fooHandle.ready()); + test.isFalse(fooReady); + + // "foo" gets ready now. The handle should be ready and the autorun rerun + stream.receive({ msg: 'ready', subs: [idFoo2] }); + test.length(stream.sent, 0); + Tracker.flush(); + test.isTrue(fooHandle.ready()); + test.isTrue(fooReady); + + // change the argument to foo. This will make a new handle, which isn't ready + // the ready autorun should invalidate, making fooReady false too + rFoo.set('foo3'); + Tracker.flush(); + test.length(stream.sent, 2); + + message = JSON.parse(stream.sent.shift()); + var idFoo3 = message.id; + delete message.id; + test.equal(message, { msg: 'sub', name: 'foo', params: ['foo3'] }); + + message = JSON.parse(stream.sent.shift()); + test.equal(message, { msg: 'unsub', id: idFoo2 }); + + Tracker.flush(); + test.isFalse(fooHandle.ready()); + test.isFalse(fooReady); + + // "foo" gets ready again + stream.receive({ msg: 'ready', subs: [idFoo3] }); + test.length(stream.sent, 0); + Tracker.flush(); + test.isTrue(fooHandle.ready()); + test.isTrue(fooReady); + + autorunHandle.stop(); +}); + +Tinytest.add('livedata stub - this', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + startAndConnect(test, stream); + conn.methods({ + test_this: function() { + test.isTrue(this.isSimulation); + this.unblock(); // should be a no-op + } + }); + + // should throw no exceptions + conn.call('test_this', _.identity); + // satisfy method, quiesce connection + var message = JSON.parse(stream.sent.shift()); + test.isUndefined(message.randomSeed); + test.equal(message, { + msg: 'method', + method: 'test_this', + params: [], + id: message.id + }); + test.length(stream.sent, 0); + + stream.receive({ msg: 'result', id: message.id, result: null }); + stream.receive({ msg: 'updated', methods: [message.id] }); +}); + +if (Meteor.isClient) { + Tinytest.add('livedata stub - methods', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + startAndConnect(test, stream); + + var collName = Random.id(); + var coll = new Mongo.Collection(collName, { connection: conn }); + + // setup method + conn.methods({ + do_something: function(x) { + coll.insert({ value: x }); + } + }); + + // setup observers + var counts = { added: 0, removed: 0, changed: 0, moved: 0 }; + var handle = coll.find({}).observe({ + addedAt: function() { + counts.added += 1; + }, + removedAt: function() { + counts.removed += 1; + }, + changedAt: function() { + counts.changed += 1; + }, + movedTo: function() { + counts.moved += 1; + } + }); + + // call method with results callback + var callback1Fired = false; + conn.call('do_something', 'friday!', function(err, res) { + test.isUndefined(err); + test.equal(res, '1234'); + callback1Fired = true; + }); + test.isFalse(callback1Fired); + + // observers saw the method run. + test.equal(counts, { added: 1, removed: 0, changed: 0, moved: 0 }); + + // get response from server + var message = testGotMessage(test, stream, { + msg: 'method', + method: 'do_something', + params: ['friday!'], + id: '*', + randomSeed: '*' + }); + + test.equal(coll.find({}).count(), 1); + test.equal(coll.find({ value: 'friday!' }).count(), 1); + var docId = coll.findOne({ value: 'friday!' })._id; + + // results does not yet result in callback, because data is not + // ready. + stream.receive({ msg: 'result', id: message.id, result: '1234' }); + test.isFalse(callback1Fired); + + // result message doesn't affect data + test.equal(coll.find({}).count(), 1); + test.equal(coll.find({ value: 'friday!' }).count(), 1); + test.equal(counts, { added: 1, removed: 0, changed: 0, moved: 0 }); + + // data methods do not show up (not quiescent yet) + stream.receive({ + msg: 'added', + collection: collName, + id: MongoID.idStringify(docId), + fields: { value: 'tuesday' } + }); + test.equal(coll.find({}).count(), 1); + test.equal(coll.find({ value: 'friday!' }).count(), 1); + test.equal(counts, { added: 1, removed: 0, changed: 0, moved: 0 }); + + // send another methods (unknown on client) + var callback2Fired = false; + conn.call('do_something_else', 'monday', function(err, res) { + callback2Fired = true; + }); + test.isFalse(callback1Fired); + test.isFalse(callback2Fired); + + // test we still send a method request to server + var message2 = JSON.parse(stream.sent.shift()); + test.isUndefined(message2.randomSeed); + test.equal(message2, { + msg: 'method', + method: 'do_something_else', + params: ['monday'], + id: message2.id + }); + + // get the first data satisfied message. changes are applied to database even + // though another method is outstanding, because the other method didn't have + // a stub. and its callback is called. + stream.receive({ msg: 'updated', methods: [message.id] }); + test.isTrue(callback1Fired); + test.isFalse(callback2Fired); + + test.equal(coll.find({}).count(), 1); + test.equal(coll.find({ value: 'tuesday' }).count(), 1); + test.equal(counts, { added: 1, removed: 0, changed: 1, moved: 0 }); + + // second result + stream.receive({ msg: 'result', id: message2.id, result: 'bupkis' }); + test.isFalse(callback2Fired); + + // get second satisfied; no new changes are applied. + stream.receive({ msg: 'updated', methods: [message2.id] }); + test.isTrue(callback2Fired); + + test.equal(coll.find({}).count(), 1); + test.equal(coll.find({ value: 'tuesday', _id: docId }).count(), 1); + test.equal(counts, { added: 1, removed: 0, changed: 1, moved: 0 }); + + handle.stop(); + }); +} + +Tinytest.add('livedata stub - mutating method args', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + startAndConnect(test, stream); + + conn.methods({ + mutateArgs: function(arg) { + arg.foo = 42; + } + }); + + conn.call('mutateArgs', { foo: 50 }, _.identity); + + // Method should be called with original arg, not mutated arg. + var message = JSON.parse(stream.sent.shift()); + test.isUndefined(message.randomSeed); + test.equal(message, { + msg: 'method', + method: 'mutateArgs', + params: [{ foo: 50 }], + id: message.id + }); + test.length(stream.sent, 0); +}); + +var observeCursor = function(test, cursor) { + var counts = { added: 0, removed: 0, changed: 0, moved: 0 }; + var expectedCounts = _.clone(counts); + var handle = cursor.observe({ + addedAt: function() { + counts.added += 1; + }, + removedAt: function() { + counts.removed += 1; + }, + changedAt: function() { + counts.changed += 1; + }, + movedTo: function() { + counts.moved += 1; + } + }); + return { + stop: _.bind(handle.stop, handle), + expectCallbacks: function(delta) { + _.each(delta, function(mod, field) { + expectedCounts[field] += mod; + }); + test.equal(counts, expectedCounts); + } + }; +}; + +// method calls another method in simulation. see not sent. +if (Meteor.isClient) { + Tinytest.add('livedata stub - methods calling methods', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + startAndConnect(test, stream); + + var coll_name = Random.id(); + var coll = new Mongo.Collection(coll_name, { connection: conn }); + + // setup methods + conn.methods({ + do_something: function() { + conn.call('do_something_else'); + }, + do_something_else: function() { + coll.insert({ a: 1 }); + } + }); + + var o = observeCursor(test, coll.find()); + + // call method. + conn.call('do_something', _.identity); + + // see we only send message for outer methods + var message = testGotMessage(test, stream, { + msg: 'method', + method: 'do_something', + params: [], + id: '*', + randomSeed: '*' + }); + test.length(stream.sent, 0); + + // but inner method runs locally. + o.expectCallbacks({ added: 1 }); + test.equal(coll.find().count(), 1); + var docId = coll.findOne()._id; + test.equal(coll.findOne(), { _id: docId, a: 1 }); + + // we get the results + stream.receive({ msg: 'result', id: message.id, result: '1234' }); + + // get data from the method. data from this doc does not show up yet, but data + // from another doc does. + stream.receive({ + msg: 'added', + collection: coll_name, + id: MongoID.idStringify(docId), + fields: { value: 'tuesday' } + }); + o.expectCallbacks(); + test.equal(coll.findOne(docId), { _id: docId, a: 1 }); + stream.receive({ + msg: 'added', + collection: coll_name, + id: 'monkey', + fields: { value: 'bla' } + }); + o.expectCallbacks({ added: 1 }); + test.equal(coll.findOne(docId), { _id: docId, a: 1 }); + var newDoc = coll.findOne({ value: 'bla' }); + test.isTrue(newDoc); + test.equal(newDoc, { _id: newDoc._id, value: 'bla' }); + + // get method satisfied. all data shows up. the 'a' field is reverted and + // 'value' field is set. + stream.receive({ msg: 'updated', methods: [message.id] }); + o.expectCallbacks({ changed: 1 }); + test.equal(coll.findOne(docId), { _id: docId, value: 'tuesday' }); + test.equal(coll.findOne(newDoc._id), { _id: newDoc._id, value: 'bla' }); + + o.stop(); + }); +} +Tinytest.add('livedata stub - method call before connect', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + var callbackOutput = []; + conn.call('someMethod', function(err, result) { + callbackOutput.push(result); + }); + test.equal(callbackOutput, []); + + // the real stream drops all output pre-connection + stream.sent.length = 0; + + // Now connect. + stream.reset(); + + testGotMessage(test, stream, makeConnectMessage()); + testGotMessage(test, stream, { + msg: 'method', + method: 'someMethod', + params: [], + id: '*' + }); +}); + +Tinytest.add('livedata stub - reconnect', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + startAndConnect(test, stream); + + var collName = Random.id(); + var coll = new Mongo.Collection(collName, { connection: conn }); + + var o = observeCursor(test, coll.find()); + + // subscribe + var subCallbackFired = false; + var sub = conn.subscribe('my_data', function() { + subCallbackFired = true; + }); + test.isFalse(subCallbackFired); + + var subMessage = JSON.parse(stream.sent.shift()); + test.equal(subMessage, { + msg: 'sub', + name: 'my_data', + params: [], + id: subMessage.id + }); + + // get some data. it shows up. + stream.receive({ + msg: 'added', + collection: collName, + id: '1234', + fields: { a: 1 } + }); + + test.equal(coll.find({}).count(), 1); + o.expectCallbacks({ added: 1 }); + test.isFalse(subCallbackFired); + + stream.receive({ + msg: 'changed', + collection: collName, + id: '1234', + fields: { b: 2 } + }); + stream.receive({ + msg: 'ready', + subs: [subMessage.id] // satisfy sub + }); + test.isTrue(subCallbackFired); + subCallbackFired = false; // re-arm for test that it doesn't fire again. + + test.equal(coll.find({ a: 1, b: 2 }).count(), 1); + o.expectCallbacks({ changed: 1 }); + + // call method. + var methodCallbackFired = false; + conn.call('do_something', function() { + methodCallbackFired = true; + }); + + conn.apply('do_something_else', [], { wait: true }, _.identity); + conn.apply('do_something_later', [], _.identity); + + test.isFalse(methodCallbackFired); + + // The non-wait method should send, but not the wait method. + var methodMessage = JSON.parse(stream.sent.shift()); + test.isUndefined(methodMessage.randomSeed); + test.equal(methodMessage, { + msg: 'method', + method: 'do_something', + params: [], + id: methodMessage.id + }); + test.equal(stream.sent.length, 0); + + // more data. shows up immediately because there was no relevant method stub. + stream.receive({ + msg: 'changed', + collection: collName, + id: '1234', + fields: { c: 3 } + }); + test.equal(coll.findOne('1234'), { _id: '1234', a: 1, b: 2, c: 3 }); + o.expectCallbacks({ changed: 1 }); + + // stream reset. reconnect! we send a connect, our pending method, and our + // sub. The wait method still is blocked. + stream.reset(); + + testGotMessage(test, stream, makeConnectMessage(SESSION_ID)); + testGotMessage(test, stream, methodMessage); + testGotMessage(test, stream, subMessage); + + // reconnect with different session id + stream.receive({ msg: 'connected', session: SESSION_ID + 1 }); + + // resend data. doesn't show up: we're in reconnect quiescence. + stream.receive({ + msg: 'added', + collection: collName, + id: '1234', + fields: { a: 1, b: 2, c: 3, d: 4 } + }); + stream.receive({ + msg: 'added', + collection: collName, + id: '2345', + fields: { e: 5 } + }); + test.equal(coll.findOne('1234'), { _id: '1234', a: 1, b: 2, c: 3 }); + test.isFalse(coll.findOne('2345')); + o.expectCallbacks(); + + // satisfy and return the method + stream.receive({ + msg: 'updated', + methods: [methodMessage.id] + }); + test.isFalse(methodCallbackFired); + stream.receive({ msg: 'result', id: methodMessage.id, result: 'bupkis' }); + // The callback still doesn't fire (and we don't send the wait method): we're + // still in global quiescence + test.isFalse(methodCallbackFired); + test.equal(stream.sent.length, 0); + + // still no update. + test.equal(coll.findOne('1234'), { _id: '1234', a: 1, b: 2, c: 3 }); + test.isFalse(coll.findOne('2345')); + o.expectCallbacks(); + + // re-satisfy sub + stream.receive({ msg: 'ready', subs: [subMessage.id] }); + + // now the doc changes and method callback is called, and the wait method is + // sent. the sub callback isn't re-called. + test.isTrue(methodCallbackFired); + test.isFalse(subCallbackFired); + test.equal(coll.findOne('1234'), { _id: '1234', a: 1, b: 2, c: 3, d: 4 }); + test.equal(coll.findOne('2345'), { _id: '2345', e: 5 }); + o.expectCallbacks({ added: 1, changed: 1 }); + + var waitMethodMessage = JSON.parse(stream.sent.shift()); + test.isUndefined(waitMethodMessage.randomSeed); + test.equal(waitMethodMessage, { + msg: 'method', + method: 'do_something_else', + params: [], + id: waitMethodMessage.id + }); + test.equal(stream.sent.length, 0); + stream.receive({ msg: 'result', id: waitMethodMessage.id, result: 'bupkis' }); + test.equal(stream.sent.length, 0); + stream.receive({ msg: 'updated', methods: [waitMethodMessage.id] }); + + // wait method done means we can send the third method + test.equal(stream.sent.length, 1); + var laterMethodMessage = JSON.parse(stream.sent.shift()); + test.isUndefined(laterMethodMessage.randomSeed); + test.equal(laterMethodMessage, { + msg: 'method', + method: 'do_something_later', + params: [], + id: laterMethodMessage.id + }); + + o.stop(); +}); + +if (Meteor.isClient) { + Tinytest.add('livedata stub - reconnect non-idempotent method', function( + test + ) { + // This test is for https://github.com/meteor/meteor/issues/6108 + var stream = new StubStream(); + var conn = newConnection(stream); + + startAndConnect(test, stream); + + var firstMethodCallbackFired = false; + var firstMethodCallbackErrored = false; + var secondMethodCallbackFired = false; + var secondMethodCallbackErrored = false; + + // call with noRetry true so that the method should fail to retry on reconnect. + conn.apply('do_something', [], { noRetry: true }, function(error) { + firstMethodCallbackFired = true; + // failure on reconnect should trigger an error. + if (error && error.error === 'invocation-failed') { + firstMethodCallbackErrored = true; + } + }); + conn.apply('do_something_else', [], { noRetry: true }, function(error) { + secondMethodCallbackFired = true; + // failure on reconnect should trigger an error. + if (error && error.error === 'invocation-failed') { + secondMethodCallbackErrored = true; + } + }); + + // The method has not succeeded yet + test.isFalse(firstMethodCallbackFired); + test.isFalse(secondMethodCallbackFired); + + // send the methods + stream.sent.shift(); + stream.sent.shift(); + // reconnect + stream.reset(); + + // verify that a reconnect message was sent. + testGotMessage(test, stream, makeConnectMessage(SESSION_ID)); + // Make sure that the stream triggers connection. + stream.receive({ msg: 'connected', session: SESSION_ID + 1 }); + + //The method callback should fire even though the stream has not sent a response. + //the callback should have been fired with an error. + test.isTrue(firstMethodCallbackFired); + test.isTrue(firstMethodCallbackErrored); + test.isTrue(secondMethodCallbackFired); + test.isTrue(secondMethodCallbackErrored); + + // verify that the method message was not sent. + test.isUndefined(stream.sent.shift()); + }); +} + +function addReconnectTests(name, testFunc) { + Tinytest.add(name + ' (deprecated)', function(test) { + function deprecatedSetOnReconnect(conn, handler) { + conn.onReconnect = handler; + } + testFunc.call(this, test, deprecatedSetOnReconnect); + }); + + Tinytest.add(name, function(test) { + var stopper; + function setOnReconnect(conn, handler) { + stopper && stopper.stop(); + stopper = DDP.onReconnect(function(reconnectingConn) { + if (reconnectingConn === conn) { + handler(); + } + }); + } + testFunc.call(this, test, setOnReconnect); + stopper && stopper.stop(); + }); +} + +if (Meteor.isClient) { + addReconnectTests( + 'livedata stub - reconnect method which only got result', + function(test, setOnReconnect) { + var stream = new StubStream(); + var conn = newConnection(stream); + startAndConnect(test, stream); + + var collName = Random.id(); + var coll = new Mongo.Collection(collName, { connection: conn }); + var o = observeCursor(test, coll.find()); + + conn.methods({ + writeSomething: function() { + // stub write + coll.insert({ foo: 'bar' }); + } + }); + + test.equal(coll.find({ foo: 'bar' }).count(), 0); + + // Call a method. We'll get the result but not data-done before reconnect. + var callbackOutput = []; + var onResultReceivedOutput = []; + conn.apply( + 'writeSomething', + [], + { + onResultReceived: function(err, result) { + onResultReceivedOutput.push(result); + } + }, + function(err, result) { + callbackOutput.push(result); + } + ); + // Stub write is visible. + test.equal(coll.find({ foo: 'bar' }).count(), 1); + var stubWrittenId = coll.findOne({ foo: 'bar' })._id; + o.expectCallbacks({ added: 1 }); + // Callback not called. + test.equal(callbackOutput, []); + test.equal(onResultReceivedOutput, []); + // Method sent. + var methodId = testGotMessage(test, stream, { + msg: 'method', + method: 'writeSomething', + params: [], + id: '*', + randomSeed: '*' + }).id; + test.equal(stream.sent.length, 0); + + // Get some data. + stream.receive({ + msg: 'added', + collection: collName, + id: MongoID.idStringify(stubWrittenId), + fields: { baz: 42 } + }); + // It doesn't show up yet. + test.equal(coll.find().count(), 1); + test.equal(coll.findOne(stubWrittenId), { + _id: stubWrittenId, + foo: 'bar' + }); + o.expectCallbacks(); + + // Get the result. + stream.receive({ msg: 'result', id: methodId, result: 'bla' }); + // Data unaffected. + test.equal(coll.find().count(), 1); + test.equal(coll.findOne(stubWrittenId), { + _id: stubWrittenId, + foo: 'bar' + }); + o.expectCallbacks(); + // Callback not called, but onResultReceived is. + test.equal(callbackOutput, []); + test.equal(onResultReceivedOutput, ['bla']); + + // Reset stream. Method does NOT get resent, because its result is already + // in. Reconnect quiescence happens as soon as 'connected' is received because + // there are no pending methods or subs in need of revival. + stream.reset(); + testGotMessage(test, stream, makeConnectMessage(SESSION_ID)); + // Still holding out hope for session resumption, so nothing updated yet. + test.equal(coll.find().count(), 1); + test.equal(coll.findOne(stubWrittenId), { + _id: stubWrittenId, + foo: 'bar' + }); + o.expectCallbacks(); + test.equal(callbackOutput, []); + + // Receive 'connected': time for reconnect quiescence! Data gets updated + // locally (ie, data is reset) and callback gets called. + stream.receive({ msg: 'connected', session: SESSION_ID + 1 }); + test.equal(coll.find().count(), 0); + o.expectCallbacks({ removed: 1 }); + test.equal(callbackOutput, ['bla']); + test.equal(onResultReceivedOutput, ['bla']); + stream.receive({ + msg: 'added', + collection: collName, + id: MongoID.idStringify(stubWrittenId), + fields: { baz: 42 } + }); + test.equal(coll.findOne(stubWrittenId), { _id: stubWrittenId, baz: 42 }); + o.expectCallbacks({ added: 1 }); + + // Run method again. We're going to do the same thing this time, except we're + // also going to use an onReconnect to insert another method at reconnect + // time, which will delay reconnect quiescence. + conn.apply( + 'writeSomething', + [], + { + onResultReceived: function(err, result) { + onResultReceivedOutput.push(result); + } + }, + function(err, result) { + callbackOutput.push(result); + } + ); + // Stub write is visible. + test.equal(coll.find({ foo: 'bar' }).count(), 1); + var stubWrittenId2 = coll.findOne({ foo: 'bar' })._id; + o.expectCallbacks({ added: 1 }); + // Callback not called. + test.equal(callbackOutput, ['bla']); + test.equal(onResultReceivedOutput, ['bla']); + // Method sent. + var methodId2 = testGotMessage(test, stream, { + msg: 'method', + method: 'writeSomething', + params: [], + id: '*', + randomSeed: '*' + }).id; + test.equal(stream.sent.length, 0); + + // Get some data. + stream.receive({ + msg: 'added', + collection: collName, + id: MongoID.idStringify(stubWrittenId2), + fields: { baz: 42 } + }); + // It doesn't show up yet. + test.equal(coll.find().count(), 2); + test.equal(coll.findOne(stubWrittenId2), { + _id: stubWrittenId2, + foo: 'bar' + }); + o.expectCallbacks(); + + // Get the result. + stream.receive({ msg: 'result', id: methodId2, result: 'blab' }); + // Data unaffected. + test.equal(coll.find().count(), 2); + test.equal(coll.findOne(stubWrittenId2), { + _id: stubWrittenId2, + foo: 'bar' + }); + o.expectCallbacks(); + // Callback not called, but onResultReceived is. + test.equal(callbackOutput, ['bla']); + test.equal(onResultReceivedOutput, ['bla', 'blab']); + setOnReconnect(conn, function() { + conn.call('slowMethod', function(err, result) { + callbackOutput.push(result); + }); + }); + + // Reset stream. Method does NOT get resent, because its result is already in, + // but slowMethod gets called via onReconnect. Reconnect quiescence is now + // blocking on slowMethod. + stream.reset(); + testGotMessage(test, stream, makeConnectMessage(SESSION_ID + 1)); + var slowMethodId = testGotMessage(test, stream, { + msg: 'method', + method: 'slowMethod', + params: [], + id: '*' + }).id; + // Still holding out hope for session resumption, so nothing updated yet. + test.equal(coll.find().count(), 2); + test.equal(coll.findOne(stubWrittenId2), { + _id: stubWrittenId2, + foo: 'bar' + }); + o.expectCallbacks(); + test.equal(callbackOutput, ['bla']); + + // Receive 'connected'... but no reconnect quiescence yet due to slowMethod. + stream.receive({ msg: 'connected', session: SESSION_ID + 2 }); + test.equal(coll.find().count(), 2); + test.equal(coll.findOne(stubWrittenId2), { + _id: stubWrittenId2, + foo: 'bar' + }); + o.expectCallbacks(); + test.equal(callbackOutput, ['bla']); + + // Receive data matching our stub. It doesn't take effect yet. + stream.receive({ + msg: 'added', + collection: collName, + id: MongoID.idStringify(stubWrittenId2), + fields: { foo: 'bar' } + }); + o.expectCallbacks(); + + // slowMethod is done writing, so we get full reconnect quiescence (but no + // slowMethod callback)... ie, a reset followed by applying the data we just + // got, as well as calling the callback from the method that half-finished + // before reset. The net effect is deleting doc 'stubWrittenId'. + stream.receive({ msg: 'updated', methods: [slowMethodId] }); + test.equal(coll.find().count(), 1); + test.equal(coll.findOne(stubWrittenId2), { + _id: stubWrittenId2, + foo: 'bar' + }); + o.expectCallbacks({ removed: 1 }); + test.equal(callbackOutput, ['bla', 'blab']); + + // slowMethod returns a value now. + stream.receive({ msg: 'result', id: slowMethodId, result: 'slow' }); + o.expectCallbacks(); + test.equal(callbackOutput, ['bla', 'blab', 'slow']); + + o.stop(); + } + ); +} +Tinytest.add('livedata stub - reconnect method which only got data', function( + test +) { + var stream = new StubStream(); + var conn = newConnection(stream); + startAndConnect(test, stream); + + var collName = Random.id(); + var coll = new Mongo.Collection(collName, { connection: conn }); + var o = observeCursor(test, coll.find()); + + // Call a method. We'll get the data-done message but not the result before + // reconnect. + var callbackOutput = []; + var onResultReceivedOutput = []; + conn.apply( + 'doLittle', + [], + { + onResultReceived: function(err, result) { + onResultReceivedOutput.push(result); + } + }, + function(err, result) { + callbackOutput.push(result); + } + ); + // Callbacks not called. + test.equal(callbackOutput, []); + test.equal(onResultReceivedOutput, []); + // Method sent. + var methodId = testGotMessage(test, stream, { + msg: 'method', + method: 'doLittle', + params: [], + id: '*' + }).id; + test.equal(stream.sent.length, 0); + + // Get some data. + stream.receive({ + msg: 'added', + collection: collName, + id: 'photo', + fields: { baz: 42 } + }); + // It shows up instantly because the stub didn't write anything. + test.equal(coll.find().count(), 1); + test.equal(coll.findOne('photo'), { _id: 'photo', baz: 42 }); + o.expectCallbacks({ added: 1 }); + + // Get the data-done message. + stream.receive({ msg: 'updated', methods: [methodId] }); + // Data still here. + test.equal(coll.find().count(), 1); + test.equal(coll.findOne('photo'), { _id: 'photo', baz: 42 }); + o.expectCallbacks(); + // Method callback not called yet (no result yet). + test.equal(callbackOutput, []); + test.equal(onResultReceivedOutput, []); + + // Reset stream. Method gets resent (with same ID), and blocks reconnect + // quiescence. + stream.reset(); + testGotMessage(test, stream, makeConnectMessage(SESSION_ID)); + testGotMessage(test, stream, { + msg: 'method', + method: 'doLittle', + params: [], + id: methodId + }); + // Still holding out hope for session resumption, so nothing updated yet. + test.equal(coll.find().count(), 1); + test.equal(coll.findOne('photo'), { _id: 'photo', baz: 42 }); + o.expectCallbacks(); + test.equal(callbackOutput, []); + test.equal(onResultReceivedOutput, []); + + // Receive 'connected'. Still blocking on reconnect quiescence. + stream.receive({ msg: 'connected', session: SESSION_ID + 1 }); + test.equal(coll.find().count(), 1); + test.equal(coll.findOne('photo'), { _id: 'photo', baz: 42 }); + o.expectCallbacks(); + test.equal(callbackOutput, []); + test.equal(onResultReceivedOutput, []); + + // Receive method result. onResultReceived is called but the main callback + // isn't (ie, we don't get confused by the fact that we got data-done the + // *FIRST* time through). + stream.receive({ msg: 'result', id: methodId, result: 'res' }); + test.equal(callbackOutput, []); + test.equal(onResultReceivedOutput, ['res']); + + // Now we get data-done. Collection is reset and callback is called. + stream.receive({ msg: 'updated', methods: [methodId] }); + test.equal(coll.find().count(), 0); + o.expectCallbacks({ removed: 1 }); + test.equal(callbackOutput, ['res']); + test.equal(onResultReceivedOutput, ['res']); + + o.stop(); +}); +if (Meteor.isClient) { + Tinytest.add('livedata stub - multiple stubs same doc', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + startAndConnect(test, stream); + + var collName = Random.id(); + var coll = new Mongo.Collection(collName, { connection: conn }); + var o = observeCursor(test, coll.find()); + + conn.methods({ + insertSomething: function() { + // stub write + coll.insert({ foo: 'bar' }); + }, + updateIt: function(id) { + coll.update(id, { $set: { baz: 42 } }); + } + }); + + test.equal(coll.find().count(), 0); + + // Call the insert method. + conn.call('insertSomething', _.identity); + // Stub write is visible. + test.equal(coll.find({ foo: 'bar' }).count(), 1); + var stubWrittenId = coll.findOne({ foo: 'bar' })._id; + o.expectCallbacks({ added: 1 }); + // Method sent. + var insertMethodId = testGotMessage(test, stream, { + msg: 'method', + method: 'insertSomething', + params: [], + id: '*', + randomSeed: '*' + }).id; + test.equal(stream.sent.length, 0); + + // Call update method. + conn.call('updateIt', stubWrittenId, _.identity); + // This stub write is visible too. + test.equal(coll.find().count(), 1); + test.equal(coll.findOne(stubWrittenId), { + _id: stubWrittenId, + foo: 'bar', + baz: 42 + }); + o.expectCallbacks({ changed: 1 }); + // Method sent. + var updateMethodId = testGotMessage(test, stream, { + msg: 'method', + method: 'updateIt', + params: [stubWrittenId], + id: '*' + }).id; + test.equal(stream.sent.length, 0); + + // Get some data... slightly different than what we wrote. + stream.receive({ + msg: 'added', + collection: collName, + id: MongoID.idStringify(stubWrittenId), + fields: { + foo: 'barb', + other: 'field', + other2: 'bla' + } + }); + // It doesn't show up yet. + test.equal(coll.find().count(), 1); + test.equal(coll.findOne(stubWrittenId), { + _id: stubWrittenId, + foo: 'bar', + baz: 42 + }); + o.expectCallbacks(); + + // And get the first method-done. Still no updates to minimongo: we can't + // quiesce the doc until the second method is done. + stream.receive({ msg: 'updated', methods: [insertMethodId] }); + test.equal(coll.find().count(), 1); + test.equal(coll.findOne(stubWrittenId), { + _id: stubWrittenId, + foo: 'bar', + baz: 42 + }); + o.expectCallbacks(); + + // More data. Not quite what we wrote. Also ignored for now. + stream.receive({ + msg: 'changed', + collection: collName, + id: MongoID.idStringify(stubWrittenId), + fields: { baz: 43 }, + cleared: ['other'] + }); + test.equal(coll.find().count(), 1); + test.equal(coll.findOne(stubWrittenId), { + _id: stubWrittenId, + foo: 'bar', + baz: 42 + }); + o.expectCallbacks(); + + // Second data-ready. Now everything takes effect! + stream.receive({ msg: 'updated', methods: [updateMethodId] }); + test.equal(coll.find().count(), 1); + test.equal(coll.findOne(stubWrittenId), { + _id: stubWrittenId, + foo: 'barb', + other2: 'bla', + baz: 43 + }); + o.expectCallbacks({ changed: 1 }); + + o.stop(); + }); +} + +if (Meteor.isClient) { + Tinytest.add( + "livedata stub - unsent methods don't block quiescence", + function(test) { + // This test is for https://github.com/meteor/meteor/issues/555 + + var stream = new StubStream(); + var conn = newConnection(stream); + startAndConnect(test, stream); + + var collName = Random.id(); + var coll = new Mongo.Collection(collName, { connection: conn }); + + conn.methods({ + insertSomething: function() { + // stub write + coll.insert({ foo: 'bar' }); + } + }); + + test.equal(coll.find().count(), 0); + + // Call a random method (no-op) + conn.call('no-op', _.identity); + // Call a wait method + conn.apply('no-op', [], { wait: true }, _.identity); + // Call a method with a stub that writes. + conn.call('insertSomething', _.identity); + + // Stub write is visible. + test.equal(coll.find({ foo: 'bar' }).count(), 1); + var stubWrittenId = coll.findOne({ foo: 'bar' })._id; + + // first method sent + var firstMethodId = testGotMessage(test, stream, { + msg: 'method', + method: 'no-op', + params: [], + id: '*' + }).id; + test.equal(stream.sent.length, 0); + + // ack the first method + stream.receive({ msg: 'updated', methods: [firstMethodId] }); + stream.receive({ msg: 'result', id: firstMethodId }); + + // Wait method sent. + var waitMethodId = testGotMessage(test, stream, { + msg: 'method', + method: 'no-op', + params: [], + id: '*' + }).id; + test.equal(stream.sent.length, 0); + + // ack the wait method + stream.receive({ msg: 'updated', methods: [waitMethodId] }); + stream.receive({ msg: 'result', id: waitMethodId }); + + // insert method sent. + var insertMethodId = testGotMessage(test, stream, { + msg: 'method', + method: 'insertSomething', + params: [], + id: '*', + randomSeed: '*' + }).id; + test.equal(stream.sent.length, 0); + + // ack the insert method + stream.receive({ msg: 'updated', methods: [insertMethodId] }); + stream.receive({ msg: 'result', id: insertMethodId }); + + // simulation reverted. + test.equal(coll.find({ foo: 'bar' }).count(), 0); + } + ); +} +Tinytest.add('livedata stub - reactive resub', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + startAndConnect(test, stream); + + var readiedSubs = {}; + var markAllReady = function() { + // synthesize a "ready" message in response to any "sub" + // message with an id we haven't seen before + _.each(stream.sent, function(msg) { + msg = JSON.parse(msg); + if (msg.msg === 'sub' && !_.has(readiedSubs, msg.id)) { + stream.receive({ msg: 'ready', subs: [msg.id] }); + readiedSubs[msg.id] = true; + } + }); + }; + + var fooArg = new ReactiveVar('A'); + var fooReady = 0; + + var inner; + var outer = Tracker.autorun(function() { + inner = Tracker.autorun(function() { + conn.subscribe('foo-sub', fooArg.get(), function() { + fooReady++; + }); + }); + }); + + markAllReady(); + var message = JSON.parse(stream.sent.shift()); + delete message.id; + test.equal(message, { msg: 'sub', name: 'foo-sub', params: ['A'] }); + test.equal(fooReady, 1); + + // Rerun the inner autorun with different subscription + // arguments. + fooArg.set('B'); + test.isTrue(inner.invalidated); + Tracker.flush(); + test.isFalse(inner.invalidated); + markAllReady(); + message = JSON.parse(stream.sent.shift()); + delete message.id; + test.equal(message, { msg: 'sub', name: 'foo-sub', params: ['B'] }); + message = JSON.parse(stream.sent.shift()); + delete message.id; + test.equal(message, { msg: 'unsub' }); + test.equal(fooReady, 2); + + // Rerun inner again with same args; should be no re-sub. + inner.invalidate(); + test.isTrue(inner.invalidated); + Tracker.flush(); + test.isFalse(inner.invalidated); + markAllReady(); + test.isUndefined(stream.sent.shift()); + test.isUndefined(stream.sent.shift()); + test.equal(fooReady, 3); + + // Rerun outer! Should still be no re-sub even though + // the inner computation is stopped and a new one is + // started. + outer.invalidate(); + test.isTrue(inner.invalidated); + Tracker.flush(); + test.isFalse(inner.invalidated); + markAllReady(); + test.isUndefined(stream.sent.shift()); + test.equal(fooReady, 4); + + // Change the subscription. Now we should get an onReady. + fooArg.set('C'); + Tracker.flush(); + markAllReady(); + message = JSON.parse(stream.sent.shift()); + delete message.id; + test.equal(message, { msg: 'sub', name: 'foo-sub', params: ['C'] }); + message = JSON.parse(stream.sent.shift()); + delete message.id; + test.equal(message, { msg: 'unsub' }); + test.equal(fooReady, 5); +}); + +Tinytest.add('livedata connection - reactive userId', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + test.equal(conn.userId(), null); + conn.setUserId(1337); + test.equal(conn.userId(), 1337); +}); + +Tinytest.add('livedata connection - two wait methods', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + startAndConnect(test, stream); + + var collName = Random.id(); + var coll = new Mongo.Collection(collName, { connection: conn }); + + // setup method + conn.methods({ do_something: function(x) {} }); + + var responses = []; + conn.apply('do_something', ['one!'], function() { + responses.push('one'); + }); + var one_message = JSON.parse(stream.sent.shift()); + test.equal(one_message.params, ['one!']); + + conn.apply('do_something', ['two!'], { wait: true }, function() { + responses.push('two'); + }); + // 'two!' isn't sent yet, because it's a wait method. + test.equal(stream.sent.length, 0); + + conn.apply('do_something', ['three!'], function() { + responses.push('three'); + }); + conn.apply('do_something', ['four!'], function() { + responses.push('four'); + }); + + conn.apply('do_something', ['five!'], { wait: true }, function() { + responses.push('five'); + }); + + conn.apply('do_something', ['six!'], function() { + responses.push('six'); + }); + + // Verify that we did not send any more methods since we are still waiting on + // 'one!'. + test.equal(stream.sent.length, 0); + + // Receive some data. "one" is not a wait method and there are no stubs, so it + // gets applied immediately. + test.equal(coll.find().count(), 0); + stream.receive({ + msg: 'added', + collection: collName, + id: 'foo', + fields: { x: 1 } + }); + test.equal(coll.find().count(), 1); + test.equal(coll.findOne('foo'), { _id: 'foo', x: 1 }); + + // Let "one!" finish. Both messages are required to fire the callback. + stream.receive({ msg: 'result', id: one_message.id }); + test.equal(responses, []); + stream.receive({ msg: 'updated', methods: [one_message.id] }); + test.equal(responses, ['one']); + + // Now we've send out "two!". + var two_message = JSON.parse(stream.sent.shift()); + test.equal(two_message.params, ['two!']); + + // But still haven't sent "three!". + test.equal(stream.sent.length, 0); + + // Receive more data. "two" is a wait method, so the data doesn't get applied + // yet. + stream.receive({ + msg: 'changed', + collection: collName, + id: 'foo', + fields: { y: 3 } + }); + test.equal(coll.find().count(), 1); + test.equal(coll.findOne('foo'), { _id: 'foo', x: 1 }); + + // Let "two!" finish, with its end messages in the opposite order to "one!". + stream.receive({ msg: 'updated', methods: [two_message.id] }); + test.equal(responses, ['one']); + test.equal(stream.sent.length, 0); + // data-done message is enough to allow data to be written. + test.equal(coll.find().count(), 1); + test.equal(coll.findOne('foo'), { _id: 'foo', x: 1, y: 3 }); + stream.receive({ msg: 'result', id: two_message.id }); + test.equal(responses, ['one', 'two']); + + // Verify that we just sent "three!" and "four!" now that we got + // responses for "one!" and "two!" + test.equal(stream.sent.length, 2); + var three_message = JSON.parse(stream.sent.shift()); + test.equal(three_message.params, ['three!']); + var four_message = JSON.parse(stream.sent.shift()); + test.equal(four_message.params, ['four!']); + + // Out of order response is OK for non-wait methods. + stream.receive({ msg: 'result', id: three_message.id }); + stream.receive({ msg: 'result', id: four_message.id }); + stream.receive({ msg: 'updated', methods: [four_message.id] }); + test.equal(responses, ['one', 'two', 'four']); + test.equal(stream.sent.length, 0); + + // Let three finish too. + stream.receive({ msg: 'updated', methods: [three_message.id] }); + test.equal(responses, ['one', 'two', 'four', 'three']); + + // Verify that we just sent "five!" (the next wait method). + test.equal(stream.sent.length, 1); + var five_message = JSON.parse(stream.sent.shift()); + test.equal(five_message.params, ['five!']); + test.equal(responses, ['one', 'two', 'four', 'three']); + + // Let five finish. + stream.receive({ msg: 'result', id: five_message.id }); + stream.receive({ msg: 'updated', methods: [five_message.id] }); + test.equal(responses, ['one', 'two', 'four', 'three', 'five']); + + var six_message = JSON.parse(stream.sent.shift()); + test.equal(six_message.params, ['six!']); +}); + +addReconnectTests( + 'livedata connection - onReconnect prepends messages correctly with a wait method', + function(test, setOnReconnect) { + var stream = new StubStream(); + var conn = newConnection(stream); + startAndConnect(test, stream); + + // setup method + conn.methods({ do_something: function(x) {} }); + + setOnReconnect(conn, function() { + conn.apply('do_something', ['reconnect zero'], _.identity); + conn.apply('do_something', ['reconnect one'], _.identity); + conn.apply('do_something', ['reconnect two'], { wait: true }, _.identity); + conn.apply('do_something', ['reconnect three'], _.identity); + }); + + conn.apply('do_something', ['one'], _.identity); + conn.apply('do_something', ['two'], { wait: true }, _.identity); + conn.apply('do_something', ['three'], _.identity); + + // reconnect + stream.sent = []; + stream.reset(); + testGotMessage(test, stream, makeConnectMessage(conn._lastSessionId)); + + // Test that we sent what we expect to send, and we're blocked on + // what we expect to be blocked. The subsequent logic to correctly + // read the wait flag is tested separately. + test.equal( + _.map(stream.sent, function(msg) { + return JSON.parse(msg).params[0]; + }), + ['reconnect zero', 'reconnect one'] + ); + + // white-box test: + test.equal( + _.map(conn._outstandingMethodBlocks, function(block) { + return [ + block.wait, + _.map(block.methods, function(method) { + return method._message.params[0]; + }) + ]; + }), + [ + [false, ['reconnect zero', 'reconnect one']], + [true, ['reconnect two']], + [false, ['reconnect three', 'one']], + [true, ['two']], + [false, ['three']] + ] + ); + } +); + +Tinytest.add('livedata connection - ping without id', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + startAndConnect(test, stream); + + stream.receive({ msg: 'ping' }); + testGotMessage(test, stream, { msg: 'pong' }); +}); + +Tinytest.add('livedata connection - ping with id', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + startAndConnect(test, stream); + + var id = Random.id(); + stream.receive({ msg: 'ping', id: id }); + testGotMessage(test, stream, { msg: 'pong', id: id }); +}); + +_.each(DDPCommon.SUPPORTED_DDP_VERSIONS, function(version) { + Tinytest.addAsync('livedata connection - ping from ' + version, function( + test, + onComplete + ) { + var connection = new Connection(getSelfConnectionUrl(), { + reloadWithOutstanding: true, + supportedDDPVersions: [version], + onDDPVersionNegotiationFailure: function() { + test.fail(); + onComplete(); + }, + onConnected: function() { + test.equal(connection._version, version); + // It's a little naughty to access _stream and _send, but it works... + connection._stream.on('message', function(json) { + var msg = JSON.parse(json); + var done = false; + if (msg.msg === 'pong') { + test.notEqual(version, 'pre1'); + done = true; + } else if (msg.msg === 'error') { + // Version pre1 does not play ping-pong + test.equal(version, 'pre1'); + done = true; + } else { + Meteor._debug('Got unexpected message: ' + json); + } + if (done) { + connection._stream.disconnect({ _permanent: true }); + onComplete(); + } + }); + connection._send({ msg: 'ping' }); + } + }); + }); +}); + +var getSelfConnectionUrl = function() { + if (Meteor.isClient) { + var ddpUrl = Meteor._relativeToSiteRootUrl('/'); + if (typeof __meteor_runtime_config__ !== 'undefined') { + if (__meteor_runtime_config__.DDP_DEFAULT_CONNECTION_URL) + ddpUrl = __meteor_runtime_config__.DDP_DEFAULT_CONNECTION_URL; + } + return ddpUrl; + } else { + return Meteor.absoluteUrl(); + } +}; + +if (Meteor.isServer) { + Meteor.methods({ + reverse: function(arg) { + // Return something notably different from reverse.meteor.com. + return ( + arg + .split('') + .reverse() + .join('') + ' LOCAL' + ); + } + }); +} + +testAsyncMulti('livedata connection - reconnect to a different server', [ + function(test, expect) { + var self = this; + self.conn = DDP.connect('reverse.meteor.com'); + pollUntil( + expect, + function() { + return self.conn.status().connected; + }, + 5000, + 100, + false + ); + }, + function(test, expect) { + var self = this; + self.doTest = self.conn.status().connected; + if (self.doTest) { + self.conn.call( + 'reverse', + 'foo', + expect(function(err, res) { + test.equal(res, 'oof'); + }) + ); + } + }, + function(test, expect) { + var self = this; + if (self.doTest) { + self.conn.reconnect({ url: getSelfConnectionUrl() }); + self.conn.call( + 'reverse', + 'bar', + expect(function(err, res) { + test.equal(res, 'rab LOCAL'); + }) + ); + } + } +]); + +Tinytest.addAsync( + 'livedata connection - version negotiation requires renegotiating', + function(test, onComplete) { + var connection = new Connection(getSelfConnectionUrl(), { + reloadWithOutstanding: true, + supportedDDPVersions: ['garbled', DDPCommon.SUPPORTED_DDP_VERSIONS[0]], + onDDPVersionNegotiationFailure: function() { + test.fail(); + onComplete(); + }, + onConnected: function() { + test.equal(connection._version, DDPCommon.SUPPORTED_DDP_VERSIONS[0]); + connection._stream.disconnect({ _permanent: true }); + onComplete(); + } + }); + } +); + +Tinytest.addAsync('livedata connection - version negotiation error', function( + test, + onComplete +) { + var connection = new Connection(getSelfConnectionUrl(), { + reloadWithOutstanding: true, + supportedDDPVersions: ['garbled', 'more garbled'], + onDDPVersionNegotiationFailure: function() { + test.equal(connection.status().status, 'failed'); + test.matches( + connection.status().reason, + /DDP version negotiation failed/ + ); + test.isFalse(connection.status().connected); + onComplete(); + }, + onConnected: function() { + test.fail(); + onComplete(); + } + }); +}); + +addReconnectTests( + 'livedata connection - onReconnect prepends messages correctly without a wait method', + function(test, setOnReconnect) { + var stream = new StubStream(); + var conn = newConnection(stream); + startAndConnect(test, stream); + + // setup method + conn.methods({ do_something: function(x) {} }); + + setOnReconnect(conn, function() { + conn.apply('do_something', ['reconnect one'], _.identity); + conn.apply('do_something', ['reconnect two'], _.identity); + conn.apply('do_something', ['reconnect three'], _.identity); + }); + + conn.apply('do_something', ['one'], _.identity); + conn.apply('do_something', ['two'], { wait: true }, _.identity); + conn.apply('do_something', ['three'], { wait: true }, _.identity); + conn.apply('do_something', ['four'], _.identity); + + // reconnect + stream.sent = []; + stream.reset(); + testGotMessage(test, stream, makeConnectMessage(conn._lastSessionId)); + + // Test that we sent what we expect to send, and we're blocked on + // what we expect to be blocked. The subsequent logic to correctly + // read the wait flag is tested separately. + test.equal( + _.map(stream.sent, function(msg) { + return JSON.parse(msg).params[0]; + }), + ['reconnect one', 'reconnect two', 'reconnect three', 'one'] + ); + + // white-box test: + test.equal( + _.map(conn._outstandingMethodBlocks, function(block) { + return [ + block.wait, + _.map(block.methods, function(method) { + return method._message.params[0]; + }) + ]; + }), + [ + [false, ['reconnect one', 'reconnect two', 'reconnect three', 'one']], + [true, ['two']], + [true, ['three']], + [false, ['four']] + ] + ); + } +); + +addReconnectTests( + 'livedata connection - onReconnect with sent messages', + function(test, setOnReconnect) { + var stream = new StubStream(); + var conn = newConnection(stream); + startAndConnect(test, stream); + + // setup method + conn.methods({ do_something: function(x) {} }); + + setOnReconnect(conn, function() { + conn.apply('do_something', ['login'], { wait: true }, _.identity); + }); + + conn.apply('do_something', ['one'], _.identity); + + // initial connect + stream.sent = []; + stream.reset(); + testGotMessage(test, stream, makeConnectMessage(conn._lastSessionId)); + + // Test that we sent just the login message. + var loginId = testGotMessage(test, stream, { + msg: 'method', + method: 'do_something', + params: ['login'], + id: '*' + }).id; + + // we connect. + stream.receive({ msg: 'connected', session: Random.id() }); + test.length(stream.sent, 0); + + // login got result (but not yet data) + stream.receive({ msg: 'result', id: loginId, result: 'foo' }); + test.length(stream.sent, 0); + + // login got data. now we send next method. + stream.receive({ msg: 'updated', methods: [loginId] }); + + testGotMessage(test, stream, { + msg: 'method', + method: 'do_something', + params: ['one'], + id: '*' + }).id; + } +); + +addReconnectTests('livedata stub - reconnect double wait method', function( + test, + setOnReconnect +) { + var stream = new StubStream(); + var conn = newConnection(stream); + startAndConnect(test, stream); + + var output = []; + setOnReconnect(conn, function() { + conn.apply('reconnectMethod', [], { wait: true }, function(err, result) { + output.push('reconnect'); + }); + }); + + conn.apply('halfwayMethod', [], { wait: true }, function(err, result) { + output.push('halfway'); + }); + + test.equal(output, []); + // Method sent. + var halfwayId = testGotMessage(test, stream, { + msg: 'method', + method: 'halfwayMethod', + params: [], + id: '*' + }).id; + test.equal(stream.sent.length, 0); + + // Get the result. This means it will not be resent. + stream.receive({ msg: 'result', id: halfwayId, result: 'bla' }); + // Callback not called. + test.equal(output, []); + + // Reset stream. halfwayMethod does NOT get resent, but reconnectMethod does! + // Reconnect quiescence happens when reconnectMethod is done. + stream.reset(); + testGotMessage(test, stream, makeConnectMessage(SESSION_ID)); + var reconnectId = testGotMessage(test, stream, { + msg: 'method', + method: 'reconnectMethod', + params: [], + id: '*' + }).id; + test.length(stream.sent, 0); + // Still holding out hope for session resumption, so no callbacks yet. + test.equal(output, []); + + // Receive 'connected', but reconnect quiescence is blocking on + // reconnectMethod. + stream.receive({ msg: 'connected', session: SESSION_ID + 1 }); + test.equal(output, []); + + // Data-done for reconnectMethod. This gets us to reconnect quiescence, so + // halfwayMethod's callback fires. reconnectMethod's is still waiting on its + // result. + stream.receive({ msg: 'updated', methods: [reconnectId] }); + test.equal(output.shift(), 'halfway'); + test.equal(output, []); + + // Get result of reconnectMethod. Its callback fires. + stream.receive({ msg: 'result', id: reconnectId, result: 'foo' }); + test.equal(output.shift(), 'reconnect'); + test.equal(output, []); + + // Call another method. It should be delivered immediately. This is a + // regression test for a case where it never got delivered because there was + // an empty block in _outstandingMethodBlocks blocking it from being sent. + conn.call('lastMethod', _.identity); + testGotMessage(test, stream, { + msg: 'method', + method: 'lastMethod', + params: [], + id: '*' + }); +}); + +Tinytest.add('livedata stub - subscribe errors', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + startAndConnect(test, stream); + + // subscribe + var onReadyFired = false; + var subErrorInStopped = null; + var subErrorInError = null; + + conn.subscribe('unknownSub', { + onReady: function() { + onReadyFired = true; + }, + + // We now have two ways to get the error from a subscription: + // 1. onStop, which is called no matter what when the subscription is + // stopped (a lifecycle callback) + // 2. onError, which is deprecated and is called only if there is an + // error + onStop: function(error) { + subErrorInStopped = error; + }, + onError: function(error) { + subErrorInError = error; + } + }); + + test.isFalse(onReadyFired); + test.equal(subErrorInStopped, null); + + // XXX COMPAT WITH 1.0.3.1 #errorCallback + test.equal(subErrorInError, null); + + var subMessage = JSON.parse(stream.sent.shift()); + test.equal(subMessage, { + msg: 'sub', + name: 'unknownSub', + params: [], + id: subMessage.id + }); + + // Reject the sub. + stream.receive({ + msg: 'nosub', + id: subMessage.id, + error: new Meteor.Error(404, 'Subscription not found') + }); + test.isFalse(onReadyFired); + + // Check the error passed to the stopped callback was correct + test.instanceOf(subErrorInStopped, Meteor.Error); + test.equal(subErrorInStopped.error, 404); + test.equal(subErrorInStopped.reason, 'Subscription not found'); + + // Check the error passed to the error callback was correct + // XXX COMPAT WITH 1.0.3.1 #errorCallback + test.instanceOf(subErrorInError, Meteor.Error); + test.equal(subErrorInError.error, 404); + test.equal(subErrorInError.reason, 'Subscription not found'); + + // stream reset: reconnect! + stream.reset(); + // We send a connect. + testGotMessage(test, stream, makeConnectMessage(SESSION_ID)); + // We should NOT re-sub to the sub, because we processed the error. + test.length(stream.sent, 0); + test.isFalse(onReadyFired); +}); + +Tinytest.add('livedata stub - subscribe stop', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + startAndConnect(test, stream); + + // subscribe + var onReadyFired = false; + var onStopFired = false; + var subErrorInStopped = null; + + var sub = conn.subscribe('my_data', { + onStop: function(error) { + onStopFired = true; + subErrorInStopped = error; + } + }); + + test.equal(subErrorInStopped, null); + + sub.stop(); + + test.isTrue(onStopFired); + test.equal(subErrorInStopped, undefined); +}); + +if (Meteor.isClient) { + Tinytest.add('livedata stub - stubs before connected', function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + var collName = Random.id(); + var coll = new Mongo.Collection(collName, { connection: conn }); + + // Start and send "connect", but DON'T get 'connected' quite yet. + stream.reset(); // initial connection start. + + testGotMessage(test, stream, makeConnectMessage()); + test.length(stream.sent, 0); + + // Insert a document. The stub updates "conn" directly. + coll.insert({ _id: 'foo', bar: 42 }, _.identity); + test.equal(coll.find().count(), 1); + test.equal(coll.findOne(), { _id: 'foo', bar: 42 }); + // It also sends the method message. + var methodMessage = JSON.parse(stream.sent.shift()); + test.isUndefined(methodMessage.randomSeed); + test.equal(methodMessage, { + msg: 'method', + method: '/' + collName + '/insert', + params: [{ _id: 'foo', bar: 42 }], + id: methodMessage.id + }); + test.length(stream.sent, 0); + + // Now receive a connected message. This should not clear the + // _documentsWrittenByStub state! + stream.receive({ msg: 'connected', session: SESSION_ID }); + test.length(stream.sent, 0); + test.equal(coll.find().count(), 1); + + // Now receive the "updated" message for the method. This should revert the + // insert. + stream.receive({ msg: 'updated', methods: [methodMessage.id] }); + test.length(stream.sent, 0); + test.equal(coll.find().count(), 0); + }); +} + +if (Meteor.isClient) { + Tinytest.add( + 'livedata stub - method call between reset and quiescence', + function(test) { + var stream = new StubStream(); + var conn = newConnection(stream); + + startAndConnect(test, stream); + + var collName = Random.id(); + var coll = new Mongo.Collection(collName, { connection: conn }); + + conn.methods({ + update_value: function() { + coll.update('aaa', { value: 222 }); + } + }); + + // Set up test subscription. + var sub = conn.subscribe('test_data'); + var subMessage = JSON.parse(stream.sent.shift()); + test.equal(subMessage, { + msg: 'sub', + name: 'test_data', + params: [], + id: subMessage.id + }); + test.length(stream.sent, 0); + + var subDocMessage = { + msg: 'added', + collection: collName, + id: 'aaa', + fields: { value: 111 } + }; + + var subReadyMessage = { msg: 'ready', subs: [subMessage.id] }; + + stream.receive(subDocMessage); + stream.receive(subReadyMessage); + test.isTrue(coll.findOne('aaa').value == 111); + + // Initiate reconnect. + stream.reset(); + testGotMessage(test, stream, makeConnectMessage(SESSION_ID)); + testGotMessage(test, stream, subMessage); + stream.receive({ msg: 'connected', session: SESSION_ID + 1 }); + + // Now in reconnect, can still see the document. + test.isTrue(coll.findOne('aaa').value == 111); + + conn.call('update_value'); + + // Observe the stub-written value. + test.isTrue(coll.findOne('aaa').value == 222); + + var methodMessage = JSON.parse(stream.sent.shift()); + test.equal(methodMessage, { + msg: 'method', + method: 'update_value', + params: [], + id: methodMessage.id + }); + test.length(stream.sent, 0); + + stream.receive(subDocMessage); + stream.receive(subReadyMessage); + + // By this point quiescence is reached and stores have been reset. + + // The stub-written value is still there. + test.isTrue(coll.findOne('aaa').value == 222); + + stream.receive({ + msg: 'changed', + collection: collName, + id: 'aaa', + fields: { value: 333 } + }); + stream.receive({ msg: 'updated', methods: [methodMessage.id] }); + stream.receive({ msg: 'result', id: methodMessage.id, result: null }); + + // Server wrote a different value, make sure it's visible now. + test.isTrue(coll.findOne('aaa').value == 333); + } + ); + + Tinytest.add('livedata stub - buffering and methods interaction', function( + test + ) { + var stream = new StubStream(); + var conn = newConnection(stream, { + // A very high values so that all messages are effectively buffered. + bufferedWritesInterval: 10000, + bufferedWritesMaxAge: 10000 + }); + + startAndConnect(test, stream); + + var collName = Random.id(); + var coll = new Mongo.Collection(collName, { connection: conn }); + + conn.methods({ + update_value: function() { + const value = coll.findOne('aaa').subscription; + // Method should have access to the latest value of the collection. + coll.update('aaa', { $set: { method: value + 110 } }); + } + }); + + // Set up test subscription. + var sub = conn.subscribe('test_data'); + var subMessage = JSON.parse(stream.sent.shift()); + test.equal(subMessage, { + msg: 'sub', + name: 'test_data', + params: [], + id: subMessage.id + }); + test.length(stream.sent, 0); + + var subDocMessage = { + msg: 'added', + collection: collName, + id: 'aaa', + fields: { subscription: 111 } + }; + + var subReadyMessage = { msg: 'ready', subs: [subMessage.id] }; + + stream.receive(subDocMessage); + stream.receive(subReadyMessage); + test.equal(coll.findOne('aaa').subscription, 111); + + var subDocChangeMessage = { + msg: 'changed', + collection: collName, + id: 'aaa', + fields: { subscription: 112 } + }; + + stream.receive(subDocChangeMessage); + // Still 111 because buffer has not been flushed. + test.equal(coll.findOne('aaa').subscription, 111); + + // Call updates the stub. + conn.call('update_value'); + + // Observe the stub-written value. + test.equal(coll.findOne('aaa').method, 222); + // subscription field is updated to the latest value + // because of the method call. + test.equal(coll.findOne('aaa').subscription, 112); + + var methodMessage = JSON.parse(stream.sent.shift()); + test.equal(methodMessage, { + msg: 'method', + method: 'update_value', + params: [], + id: methodMessage.id + }); + test.length(stream.sent, 0); + + // "Server-side" change from the method arrives and method returns. + // With potentially fixed value for method field, if stub didn't + // use 112 as the subscription field value. + stream.receive({ + msg: 'changed', + collection: collName, + id: 'aaa', + fields: { method: 222 } + }); + stream.receive({ msg: 'updated', methods: [methodMessage.id] }); + stream.receive({ msg: 'result', id: methodMessage.id, result: null }); + + test.equal(coll.findOne('aaa').method, 222); + test.equal(coll.findOne('aaa').subscription, 112); + + // Buffer should already be flushed because of a non-update message. + // And after a flush we really want subscription field to be 112. + conn._flushBufferedWrites(); + test.equal(coll.findOne('aaa').method, 222); + test.equal(coll.findOne('aaa').subscription, 112); + }); +} + +// XXX also test: +// - reconnect, with session resume. +// - restart on update flag +// - on_update event +// - reloading when the app changes, including session migration diff --git a/packages/ddp-client/livedata_test_service.js b/packages/ddp-client/test/livedata_test_service.js similarity index 54% rename from packages/ddp-client/livedata_test_service.js rename to packages/ddp-client/test/livedata_test_service.js index da47c046e5..7e4dbe26e0 100644 --- a/packages/ddp-client/livedata_test_service.js +++ b/packages/ddp-client/test/livedata_test_service.js @@ -1,40 +1,42 @@ Meteor.methods({ - nothing: function () { + nothing: function() { // No need to check if there are no arguments. }, - echo: function (/* arguments */) { + echo: function(/* arguments */) { check(arguments, [Match.Any]); return _.toArray(arguments); }, - echoOne: function (/*arguments*/) { + echoOne: function(/*arguments*/) { check(arguments, [Match.Any]); return arguments[0]; }, - exception: function (where, options) { + exception: function(where, options) { check(where, String); - check(options, Match.Optional({ - intended: Match.Optional(Boolean), - throwThroughFuture: Match.Optional(Boolean) - })); - options = options || {}; + check( + options, + Match.Optional({ + intended: Match.Optional(Boolean), + throwThroughFuture: Match.Optional(Boolean) + }) + ); + options = options || Object.create(null); var shouldThrow = - (Meteor.isServer && where === "server") || - (Meteor.isClient && where === "client") || - where === "both"; + (Meteor.isServer && where === 'server') || + (Meteor.isClient && where === 'client') || + where === 'both'; if (shouldThrow) { var e; if (options.intended) - e = new Meteor.Error(999, "Client-visible test exception"); - else - e = new Error("Test method throwing an exception"); - e.expected = true; + e = new Meteor.Error(999, 'Client-visible test exception'); + else e = new Error('Test method throwing an exception'); + e._expectedByTest = true; // We used to improperly serialize errors that were thrown through a // future first. if (Meteor.isServer && options.throwThroughFuture) { var Future = Npm.require('fibers/future'); - var f = new Future; + var f = new Future(); f['throw'](e); e = f.wait(); } @@ -55,15 +57,14 @@ Meteor.methods({ if (Meteor.isServer) { // Keys are random tokens, used to isolate multiple test invocations from each // other. - var waiters = {}; + var waiters = Object.create(null); var Future = Npm.require('fibers/future'); - var returnThroughFuture = function (token, returnValue) { + var returnThroughFuture = function(token, returnValue) { // Make sure that when we call return, the fields are already cleared. var record = waiters[token]; - if (!record) - return; + if (!record) return; delete waiters[token]; record.future['return'](returnValue); }; @@ -71,12 +72,12 @@ if (Meteor.isServer) { Meteor.methods({ delayedTrue: function(token) { check(token, String); - var record = waiters[token] = { + var record = (waiters[token] = { future: new Future(), timer: Meteor.setTimeout(function() { returnThroughFuture(token, true); }, 1000) - }; + }); this.unblock(); return record.future.wait(); @@ -84,8 +85,7 @@ if (Meteor.isServer) { makeDelayedTrueImmediatelyReturnFalse: function(token) { check(token, String); var record = waiters[token]; - if (!record) - return; // since delayedTrue's timeout had already run + if (!record) return; // since delayedTrue's timeout had already run clearTimeout(record.timer); returnThroughFuture(token, false); } @@ -94,51 +94,59 @@ if (Meteor.isServer) { /*****/ -Ledger = new Mongo.Collection("ledger"); +Ledger = new Mongo.Collection('ledger'); Ledger.allow({ - insert: function() { return true; }, - update: function() { return true; }, - remove: function() { return true; }, + insert: function() { + return true; + }, + update: function() { + return true; + }, + remove: function() { + return true; + }, fetch: [] }); -Meteor.startup(function () { - if (Meteor.isServer) - Ledger.remove({}); // XXX can this please be Ledger.remove()? +Meteor.startup(function() { + if (Meteor.isServer) Ledger.remove({}); // XXX can this please be Ledger.remove()? }); if (Meteor.isServer) - Meteor.publish('ledger', function (world) { + Meteor.publish('ledger', function(world) { check(world, String); - return Ledger.find({world: world}); + return Ledger.find({ world: world }); }); Meteor.methods({ - 'ledger/transfer': function (world, from_name, to_name, amount, cheat) { + 'ledger/transfer': function(world, from_name, to_name, amount, cheat) { check(world, String); check(from_name, String); check(to_name, String); check(amount, Number); check(cheat, Match.Optional(Boolean)); - var from = Ledger.findOne({name: from_name, world: world}); - var to = Ledger.findOne({name: to_name, world: world}); + var from = Ledger.findOne({ name: from_name, world: world }); + var to = Ledger.findOne({ name: to_name, world: world }); - if (Meteor.isServer) - cheat = false; + if (Meteor.isServer) cheat = false; if (!from) - throw new Meteor.Error(404, - "No such account " + from_name + " in " + world); + throw new Meteor.Error( + 404, + 'No such account ' + from_name + ' in ' + world + ); if (!to) - throw new Meteor.Error(404, - "No such account " + to_name + " in " + world); + throw new Meteor.Error( + 404, + 'No such account ' + to_name + ' in ' + world + ); if (from.balance < amount && !cheat) - throw new Meteor.Error(409, "Insufficient funds"); + throw new Meteor.Error(409, 'Insufficient funds'); - Ledger.update(from._id, {$inc: {balance: -amount}}); - Ledger.update(to._id, {$inc: {balance: amount}}); + Ledger.update(from._id, { $inc: { balance: -amount } }); + Ledger.update(to._id, { $inc: { balance: amount } }); } }); @@ -146,25 +154,33 @@ Meteor.methods({ /// Helpers for "livedata - changing userid reruns subscriptions..." -objectsWithUsers = new Mongo.Collection("objectsWithUsers"); +objectsWithUsers = new Mongo.Collection('objectsWithUsers'); if (Meteor.isServer) { objectsWithUsers.remove({}); - objectsWithUsers.insert({name: "owned by none", ownerUserIds: [null]}); - objectsWithUsers.insert({name: "owned by one - a", ownerUserIds: ["1"]}); - objectsWithUsers.insert({name: "owned by one/two - a", ownerUserIds: ["1", "2"]}); - objectsWithUsers.insert({name: "owned by one/two - b", ownerUserIds: ["1", "2"]}); - objectsWithUsers.insert({name: "owned by two - a", ownerUserIds: ["2"]}); - objectsWithUsers.insert({name: "owned by two - b", ownerUserIds: ["2"]}); + objectsWithUsers.insert({ name: 'owned by none', ownerUserIds: [null] }); + objectsWithUsers.insert({ name: 'owned by one - a', ownerUserIds: ['1'] }); + objectsWithUsers.insert({ + name: 'owned by one/two - a', + ownerUserIds: ['1', '2'] + }); + objectsWithUsers.insert({ + name: 'owned by one/two - b', + ownerUserIds: ['1', '2'] + }); + objectsWithUsers.insert({ name: 'owned by two - a', ownerUserIds: ['2'] }); + objectsWithUsers.insert({ name: 'owned by two - b', ownerUserIds: ['2'] }); - Meteor.publish("objectsWithUsers", function() { - return objectsWithUsers.find({ownerUserIds: this.userId}, - {fields: {ownerUserIds: 0}}); + Meteor.publish('objectsWithUsers', function() { + return objectsWithUsers.find( + { ownerUserIds: this.userId }, + { fields: { ownerUserIds: 0 } } + ); }); - (function () { - var userIdWhenStopped = {}; - Meteor.publish("recordUserIdOnStop", function (key) { + (function() { + var userIdWhenStopped = Object.create(null); + Meteor.publish('recordUserIdOnStop', function(key) { check(key, String); var self = this; self.onStop(function() { @@ -173,7 +189,7 @@ if (Meteor.isServer) { }); Meteor.methods({ - userIdWhenStopped: function (key) { + userIdWhenStopped: function(key) { check(key, String); return userIdWhenStopped[key]; } @@ -189,7 +205,7 @@ if (Meteor.isServer) { Meteor.startup(function() { errorThrownWhenCallingSetUserIdDirectlyOnServer = null; try { - Meteor.call("setUserId", "1000"); + Meteor.call('setUserId', '1000'); } catch (e) { errorThrownWhenCallingSetUserIdDirectlyOnServer = e; } @@ -200,14 +216,14 @@ if (Meteor.isServer) { if (Meteor.isServer) { Meteor.methods({ - setUserIdAfterUnblock: function () { + setUserIdAfterUnblock: function() { this.unblock(); var threw = false; var originalUserId = this.userId; try { // Calling setUserId after unblock should throw an error (and not mutate // userId). - this.setUserId(originalUserId + "bla"); + this.setUserId(originalUserId + 'bla'); } catch (e) { threw = true; } @@ -221,31 +237,33 @@ if (Meteor.isServer) { /// Helper for "livedata - overlapping universal subs" if (Meteor.isServer) { - (function(){ - var collName = "overlappingUniversalSubs"; + (function() { + var collName = 'overlappingUniversalSubs'; var universalSubscribers = [[], []]; - _.each([0, 1], function (index) { - Meteor.publish(null, function () { + _.each([0, 1], function(index) { + Meteor.publish(null, function() { var sub = this; universalSubscribers[index].push(sub); - sub.onStop(function () { + sub.onStop(function() { universalSubscribers[index] = _.without( - universalSubscribers[index], sub); + universalSubscribers[index], + sub + ); }); }); }); Meteor.methods({ - testOverlappingSubs: function (token) { + testOverlappingSubs: function(token) { check(token, String); - _.each(universalSubscribers[0], function (sub) { + _.each(universalSubscribers[0], function(sub) { sub.added(collName, token, {}); }); - _.each(universalSubscribers[1], function (sub) { + _.each(universalSubscribers[1], function(sub) { sub.added(collName, token, {}); }); - _.each(universalSubscribers[0], function (sub) { + _.each(universalSubscribers[0], function(sub) { sub.removed(collName, token); }); } @@ -257,10 +275,10 @@ if (Meteor.isServer) { if (Meteor.isServer) { Meteor.methods({ - runtimeUniversalSubCreation: function (token) { + runtimeUniversalSubCreation: function(token) { check(token, String); - Meteor.publish(null, function () { - this.added("runtimeSubCreation", token, {}); + Meteor.publish(null, function() { + this.added('runtimeSubCreation', token, {}); }); } }); @@ -269,7 +287,7 @@ if (Meteor.isServer) { /// Helper for "livedata - publisher errors" if (Meteor.isServer) { - Meteor.publish("publisherErrors", function (collName, options) { + Meteor.publish('publisherErrors', function(collName, options) { check(collName, String); // See below to see what options are accepted. check(options, Object); @@ -278,7 +296,7 @@ if (Meteor.isServer) { // First add a random item, which should be cleaned up. We use ready/onReady // to make sure that the second test block is only called after the added is // processed, so that there's any chance of the coll.find().count() failing. - sub.added(collName, Random.id(), {foo: 42}); + sub.added(collName, Random.id(), { foo: 42 }); sub.ready(); if (options.stopInHandler) { @@ -288,84 +306,71 @@ if (Meteor.isServer) { var error; if (options.internalError) { - error = new Error("Egads!"); - error.expected = true; // don't log + error = new Error('Egads!'); + error._expectedByTest = true; // don't log } else { - error = new Meteor.Error(412, "Explicit error"); + error = new Meteor.Error(412, 'Explicit error'); } if (options.throwInHandler) { throw error; } else if (options.errorInHandler) { sub.error(error); } else if (options.throwWhenUserIdSet) { - if (sub.userId) - throw error; + if (sub.userId) throw error; } else if (options.errorLater) { - Meteor.defer(function () { + Meteor.defer(function() { sub.error(error); }); } }); } - /*****/ /// Helpers for "livedata - publish multiple cursors" -One = new Mongo.Collection("collectionOne"); -Two = new Mongo.Collection("collectionTwo"); +One = new Mongo.Collection('collectionOne'); +Two = new Mongo.Collection('collectionTwo'); if (Meteor.isServer) { One.remove({}); - One.insert({name: "value1"}); - One.insert({name: "value2"}); + One.insert({ name: 'value1' }); + One.insert({ name: 'value2' }); Two.remove({}); - Two.insert({name: "value3"}); - Two.insert({name: "value4"}); - Two.insert({name: "value5"}); + Two.insert({ name: 'value3' }); + Two.insert({ name: 'value4' }); + Two.insert({ name: 'value5' }); - Meteor.publish("multiPublish", function (options) { + Meteor.publish('multiPublish', function(options) { // See below to see what options are accepted. check(options, Object); if (options.normal) { - return [ - One.find(), - Two.find() - ]; + return [One.find(), Two.find()]; } else if (options.dup) { // Suppress the log of the expected internal error. Meteor._suppress_log(1); return [ One.find(), - One.find({name: "value2"}), // multiple cursors for one collection - error + One.find({ name: 'value2' }), // multiple cursors for one collection - error Two.find() ]; } else if (options.notCursor) { // Suppress the log of the expected internal error. Meteor._suppress_log(1); - return [ - One.find(), - "not a cursor", - Two.find() - ]; - } else - throw "unexpected options"; + return [One.find(), 'not a cursor', Two.find()]; + } else throw 'unexpected options'; }); } - /// Helper for "livedata - result by value" -var resultByValueArrays = {}; +var resultByValueArrays = Object.create(null); Meteor.methods({ - 'getArray': function (testId) { - if (! _.has(resultByValueArrays, testId)) - resultByValueArrays[testId] = []; + getArray: function(testId) { + if (!_.has(resultByValueArrays, testId)) resultByValueArrays[testId] = []; return resultByValueArrays[testId]; }, - 'pushToArray': function (testId, value) { - if (! _.has(resultByValueArrays, testId)) - resultByValueArrays[testId] = []; + pushToArray: function(testId, value) { + if (!_.has(resultByValueArrays, testId)) resultByValueArrays[testId] = []; resultByValueArrays[testId].push(value); } }); diff --git a/packages/ddp-client/test/livedata_tests.js b/packages/ddp-client/test/livedata_tests.js new file mode 100644 index 0000000000..34d95449ee --- /dev/null +++ b/packages/ddp-client/test/livedata_tests.js @@ -0,0 +1,1095 @@ +import { DDP } from '../common/namespace.js'; +import { Connection } from '../common/livedata_connection.js'; + +// XXX should check error codes +var failure = function(test, code, reason) { + return function(error, result) { + test.equal(result, undefined); + test.isTrue(error && typeof error === 'object'); + if (error && typeof error === 'object') { + if (typeof code === 'number') { + test.instanceOf(error, Meteor.Error); + code && test.equal(error.error, code); + reason && test.equal(error.reason, reason); + // XXX should check that other keys aren't present.. should + // probably use something like the Matcher we used to have + } else { + // for normal Javascript errors + test.instanceOf(error, Error); + test.equal(error.message, code); + } + } + }; +}; + +var failureOnStopped = function(test, code, reason) { + var f = failure(test, code, reason); + + return function(error) { + if (error) { + f(error); + } + }; +}; + +Tinytest.add('livedata - Meteor.Error', function(test) { + var error = new Meteor.Error(123, 'kittens', 'puppies'); + test.instanceOf(error, Meteor.Error); + test.instanceOf(error, Error); + test.equal(error.error, 123); + test.equal(error.reason, 'kittens'); + test.equal(error.details, 'puppies'); +}); + +if (Meteor.isServer) { + Tinytest.add('livedata - version negotiation', function(test) { + var versionCheck = function(clientVersions, serverVersions, expected) { + test.equal( + DDPServer._calculateVersion(clientVersions, serverVersions), + expected + ); + }; + + versionCheck(['A', 'B', 'C'], ['A', 'B', 'C'], 'A'); + versionCheck(['B', 'C'], ['A', 'B', 'C'], 'B'); + versionCheck(['A', 'B', 'C'], ['B', 'C'], 'B'); + versionCheck(['foo', 'bar', 'baz'], ['A', 'B', 'C'], 'A'); + }); +} + +Tinytest.add('livedata - methods with colliding names', function(test) { + var x = Random.id(); + var m = {}; + m[x] = function() {}; + Meteor.methods(m); + + test.throws(function() { + Meteor.methods(m); + }); +}); + +Tinytest.add('livedata - non-function method', function(test) { + var x = Random.id(); + var m = {}; + m[x] = 'kitten'; + + test.throws(function() { + Meteor.methods(m); + }); +}); + +var echoTest = function(item) { + return function(test, expect) { + if (Meteor.isServer) { + test.equal(Meteor.call('echo', item), [item]); + test.equal(Meteor.call('echoOne', item), item); + } + if (Meteor.isClient) test.equal(Meteor.call('echo', item), undefined); + + test.equal(Meteor.call('echo', item, expect(undefined, [item])), undefined); + test.equal( + Meteor.call('echoOne', item, expect(undefined, item)), + undefined + ); + }; +}; + +testAsyncMulti('livedata - basic method invocation', [ + // Unknown methods + function(test, expect) { + if (Meteor.isServer) { + // On server, with no callback, throws exception + try { + var ret = Meteor.call('unknown method'); + } catch (e) { + test.equal(e.error, 404); + var threw = true; + } + test.isTrue(threw); + test.equal(ret, undefined); + } + + if (Meteor.isClient) { + // On client, with no callback, just returns undefined + var ret = Meteor.call('unknown method'); + test.equal(ret, undefined); + } + + // On either, with a callback, calls the callback and does not throw + var ret = Meteor.call( + 'unknown method', + expect(failure(test, 404, "Method 'unknown method' not found")) + ); + test.equal(ret, undefined); + }, + + function(test, expect) { + // make sure 'undefined' is preserved as such, instead of turning + // into null (JSON does not have 'undefined' so there is special + // code for this) + if (Meteor.isServer) test.equal(Meteor.call('nothing'), undefined); + if (Meteor.isClient) test.equal(Meteor.call('nothing'), undefined); + + test.equal(Meteor.call('nothing', expect(undefined, undefined)), undefined); + }, + + function(test, expect) { + if (Meteor.isServer) test.equal(Meteor.call('echo'), []); + if (Meteor.isClient) test.equal(Meteor.call('echo'), undefined); + + test.equal(Meteor.call('echo', expect(undefined, [])), undefined); + }, + + echoTest(new Date()), + echoTest({ d: new Date(), s: 'foobarbaz' }), + echoTest([new Date(), 'foobarbaz']), + echoTest(new Mongo.ObjectID()), + echoTest({ o: new Mongo.ObjectID() }), + echoTest({ $date: 30 }), // literal + echoTest({ $literal: { $date: 30 } }), + echoTest(12), + echoTest(Infinity), + echoTest(-Infinity), + + function(test, expect) { + if (Meteor.isServer) + test.equal(Meteor.call('echo', 12, { x: 13 }), [12, { x: 13 }]); + if (Meteor.isClient) + test.equal(Meteor.call('echo', 12, { x: 13 }), undefined); + + test.equal( + Meteor.call('echo', 12, { x: 13 }, expect(undefined, [12, { x: 13 }])), + undefined + ); + }, + + // test that `wait: false` is respected + function(test, expect) { + if (Meteor.isClient) { + // For test isolation + var token = Random.id(); + Meteor.apply( + 'delayedTrue', + [token], + { wait: false }, + expect(function(err, res) { + test.equal(res, false); + }) + ); + Meteor.apply('makeDelayedTrueImmediatelyReturnFalse', [token]); + } + }, + + // test that `wait: true` is respected + function(test, expect) { + if (Meteor.isClient) { + var token = Random.id(); + Meteor.apply( + 'delayedTrue', + [token], + { wait: true }, + expect(function(err, res) { + test.equal(res, true); + }) + ); + Meteor.apply('makeDelayedTrueImmediatelyReturnFalse', [token]); + } + }, + + function(test, expect) { + // No callback + + if (Meteor.isServer) { + test.throws(function() { + Meteor.call('exception', 'both'); + }); + test.throws(function() { + Meteor.call('exception', 'server'); + }); + // No exception, because no code will run on the client + test.equal(Meteor.call('exception', 'client'), undefined); + } + + if (Meteor.isClient) { + // The client exception is thrown away because it's in the + // stub. The server exception is throw away because we didn't + // give a callback. + test.equal(Meteor.call('exception', 'both'), undefined); + test.equal(Meteor.call('exception', 'server'), undefined); + test.equal(Meteor.call('exception', 'client'), undefined); + + // If we pass throwStubExceptions then we *should* see thrown exceptions + // on the client + test.throws(function() { + Meteor.apply('exception', ['both'], { throwStubExceptions: true }); + }); + test.equal( + Meteor.apply('exception', ['server'], { throwStubExceptions: true }), + undefined + ); + test.throws(function() { + Meteor.apply('exception', ['client'], { throwStubExceptions: true }); + }); + } + + // With callback + + if (Meteor.isClient) { + test.equal( + Meteor.call( + 'exception', + 'both', + expect(failure(test, 500, 'Internal server error')) + ), + undefined + ); + test.equal( + Meteor.call( + 'exception', + 'server', + expect(failure(test, 500, 'Internal server error')) + ), + undefined + ); + test.equal(Meteor.call('exception', 'client'), undefined); + } + + if (Meteor.isServer) { + test.equal( + Meteor.call( + 'exception', + 'both', + expect(failure(test, 'Test method throwing an exception')) + ), + undefined + ); + test.equal( + Meteor.call( + 'exception', + 'server', + expect(failure(test, 'Test method throwing an exception')) + ), + undefined + ); + test.equal(Meteor.call('exception', 'client'), undefined); + } + }, + + function(test, expect) { + if (Meteor.isServer) { + var threw = false; + try { + Meteor.call('exception', 'both', { intended: true }); + } catch (e) { + threw = true; + test.equal(e.error, 999); + test.equal(e.reason, 'Client-visible test exception'); + } + test.isTrue(threw); + threw = false; + try { + Meteor.call('exception', 'both', { + intended: true, + throwThroughFuture: true + }); + } catch (e) { + threw = true; + test.equal(e.error, 999); + test.equal(e.reason, 'Client-visible test exception'); + } + test.isTrue(threw); + } + + if (Meteor.isClient) { + test.equal( + Meteor.call( + 'exception', + 'both', + { intended: true }, + expect(failure(test, 999, 'Client-visible test exception')) + ), + undefined + ); + test.equal( + Meteor.call( + 'exception', + 'server', + { intended: true }, + expect(failure(test, 999, 'Client-visible test exception')) + ), + undefined + ); + test.equal( + Meteor.call( + 'exception', + 'server', + { + intended: true, + throwThroughFuture: true + }, + expect(failure(test, 999, 'Client-visible test exception')) + ), + undefined + ); + } + } +]); + +var checkBalances = function(test, a, b) { + var alice = Ledger.findOne({ name: 'alice', world: test.runId() }); + var bob = Ledger.findOne({ name: 'bob', world: test.runId() }); + test.equal(alice.balance, a); + test.equal(bob.balance, b); +}; + +// would be nice to have a database-aware test harness of some kind -- +// this is a big hack (and XXX pollutes the global test namespace) +testAsyncMulti('livedata - compound methods', [ + function(test, expect) { + if (Meteor.isClient) Meteor.subscribe('ledger', test.runId(), expect()); + + Ledger.insert( + { name: 'alice', balance: 100, world: test.runId() }, + expect(function() {}) + ); + Ledger.insert( + { name: 'bob', balance: 50, world: test.runId() }, + expect(function() {}) + ); + }, + function(test, expect) { + Meteor.call( + 'ledger/transfer', + test.runId(), + 'alice', + 'bob', + 10, + expect(function(err, result) { + test.equal(err, undefined); + test.equal(result, undefined); + checkBalances(test, 90, 60); + }) + ); + checkBalances(test, 90, 60); + }, + function(test, expect) { + Meteor.call( + 'ledger/transfer', + test.runId(), + 'alice', + 'bob', + 100, + true, + expect(function(err, result) { + failure(test, 409)(err, result); + // Balances are reverted back to pre-stub values. + checkBalances(test, 90, 60); + }) + ); + + if (Meteor.isClient) + // client can fool itself by cheating, but only until the sync + // finishes + checkBalances(test, -10, 160); + else checkBalances(test, 90, 60); + } +]); + +// Replaces the Connection's `_livedata_data` method to push incoming +// messages on a given collection to an array. This can be used to +// verify that the right data is sent on the wire +// +// @param messages {Array} The array to which to append the messages +// @return {Function} A function to call to undo the eavesdropping +var eavesdropOnCollection = function( + livedata_connection, + collection_name, + messages +) { + var old_livedata_data = _.bind( + livedata_connection._livedata_data, + livedata_connection + ); + + // Kind of gross since all tests past this one will run with this + // hook set up. That's probably fine since we only check a specific + // collection but still... + // + // Should we consider having a separate connection per Tinytest or + // some similar scheme? + livedata_connection._livedata_data = function(msg) { + if (msg.collection && msg.collection === collection_name) { + messages.push(msg); + } + old_livedata_data(msg); + }; + + return function() { + livedata_connection._livedata_data = old_livedata_data; + }; +}; + +if (Meteor.isClient) { + testAsyncMulti( + 'livedata - changing userid reruns subscriptions without flapping data on the wire', + [ + function(test, expect) { + var messages = []; + var undoEavesdrop = eavesdropOnCollection( + Meteor.connection, + 'objectsWithUsers', + messages + ); + + // A helper for testing incoming set and unset messages + // XXX should this be extracted as a general helper together with + // eavesdropOnCollection? + var expectMessages = function( + expectedAddedMessageCount, + expectedRemovedMessageCount, + expectedNamesInCollection + ) { + var actualAddedMessageCount = 0; + var actualRemovedMessageCount = 0; + _.each(messages, function(msg) { + if (msg.msg === 'added') ++actualAddedMessageCount; + else if (msg.msg === 'removed') ++actualRemovedMessageCount; + else test.fail({ unexpected: JSON.stringify(msg) }); + }); + test.equal(actualAddedMessageCount, expectedAddedMessageCount); + test.equal(actualRemovedMessageCount, expectedRemovedMessageCount); + expectedNamesInCollection.sort(); + test.equal( + _.pluck( + objectsWithUsers.find({}, { sort: ['name'] }).fetch(), + 'name' + ), + expectedNamesInCollection + ); + messages.length = 0; // clear messages without creating a new object + }; + + // make sure we're not already logged in. can happen if accounts + // tests fail oddly. + Meteor.apply( + 'setUserId', + [null], + { wait: true }, + expect(function() {}) + ); + + Meteor.subscribe( + 'objectsWithUsers', + expect(function() { + expectMessages(1, 0, ['owned by none']); + Meteor.apply( + 'setUserId', + ['1'], + { wait: true }, + afterFirstSetUserId + ); + }) + ); + + var afterFirstSetUserId = expect(function() { + expectMessages(3, 1, [ + 'owned by one - a', + 'owned by one/two - a', + 'owned by one/two - b' + ]); + Meteor.apply( + 'setUserId', + ['2'], + { wait: true }, + afterSecondSetUserId + ); + }); + + var afterSecondSetUserId = expect(function() { + expectMessages(2, 1, [ + 'owned by one/two - a', + 'owned by one/two - b', + 'owned by two - a', + 'owned by two - b' + ]); + Meteor.apply('setUserId', ['2'], { wait: true }, afterThirdSetUserId); + }); + + var afterThirdSetUserId = expect(function() { + // Nothing should have been sent since the results of the + // query are the same ("don't flap data on the wire") + expectMessages(0, 0, [ + 'owned by one/two - a', + 'owned by one/two - b', + 'owned by two - a', + 'owned by two - b' + ]); + undoEavesdrop(); + }); + }, + function(test, expect) { + var key = Random.id(); + Meteor.subscribe('recordUserIdOnStop', key); + Meteor.apply( + 'setUserId', + ['100'], + { wait: true }, + expect(function() {}) + ); + Meteor.apply( + 'setUserId', + ['101'], + { wait: true }, + expect(function() {}) + ); + Meteor.call( + 'userIdWhenStopped', + key, + expect(function(err, result) { + test.isFalse(err); + test.equal(result, '100'); + }) + ); + // clean up + Meteor.apply( + 'setUserId', + [null], + { wait: true }, + expect(function() {}) + ); + } + ] + ); +} + +Tinytest.add('livedata - setUserId error when called from server', function( + test +) { + if (Meteor.isServer) { + test.equal( + errorThrownWhenCallingSetUserIdDirectlyOnServer.message, + "Can't call setUserId on a server initiated method call" + ); + } +}); + +if (Meteor.isServer) { + var pubHandles = {}; +} +Meteor.methods({ + 'livedata/setup': function(id) { + check(id, String); + if (Meteor.isServer) { + pubHandles[id] = {}; + Meteor.publish('pub1' + id, function() { + pubHandles[id].pub1 = this; + this.ready(); + }); + Meteor.publish('pub2' + id, function() { + pubHandles[id].pub2 = this; + this.ready(); + }); + } + }, + 'livedata/pub1go': function(id) { + check(id, String); + if (Meteor.isServer) { + pubHandles[id].pub1.added('MultiPubCollection' + id, 'foo', { a: 'aa' }); + return 1; + } + return 0; + }, + 'livedata/pub2go': function(id) { + check(id, String); + if (Meteor.isServer) { + pubHandles[id].pub2.added('MultiPubCollection' + id, 'foo', { b: 'bb' }); + return 2; + } + return 0; + } +}); + +if (Meteor.isClient) { + (function() { + var MultiPub; + var id = Random.id(); + testAsyncMulti('livedata - added from two different subs', [ + function(test, expect) { + Meteor.call('livedata/setup', id, expect(function() {})); + }, + function(test, expect) { + MultiPub = new Mongo.Collection('MultiPubCollection' + id); + var sub1 = Meteor.subscribe('pub1' + id, expect(function() {})); + var sub2 = Meteor.subscribe('pub2' + id, expect(function() {})); + }, + function(test, expect) { + Meteor.call( + 'livedata/pub1go', + id, + expect(function(err, res) { + test.equal(res, 1); + }) + ); + }, + function(test, expect) { + test.equal(MultiPub.findOne('foo'), { _id: 'foo', a: 'aa' }); + }, + function(test, expect) { + Meteor.call( + 'livedata/pub2go', + id, + expect(function(err, res) { + test.equal(res, 2); + }) + ); + }, + function(test, expect) { + test.equal(MultiPub.findOne('foo'), { _id: 'foo', a: 'aa', b: 'bb' }); + } + ]); + })(); +} + +if (Meteor.isClient) { + testAsyncMulti('livedata - overlapping universal subs', [ + function(test, expect) { + var coll = new Mongo.Collection('overlappingUniversalSubs'); + var token = Random.id(); + test.isFalse(coll.findOne(token)); + Meteor.call( + 'testOverlappingSubs', + token, + expect(function(err) { + test.isFalse(err); + test.isTrue(coll.findOne(token)); + }) + ); + } + ]); + + testAsyncMulti('livedata - runtime universal sub creation', [ + function(test, expect) { + var coll = new Mongo.Collection('runtimeSubCreation'); + var token = Random.id(); + test.isFalse(coll.findOne(token)); + Meteor.call( + 'runtimeUniversalSubCreation', + token, + expect(function(err) { + test.isFalse(err); + test.isTrue(coll.findOne(token)); + }) + ); + } + ]); + + testAsyncMulti('livedata - no setUserId after unblock', [ + function(test, expect) { + Meteor.call( + 'setUserIdAfterUnblock', + expect(function(err, result) { + test.isFalse(err); + test.isTrue(result); + }) + ); + } + ]); + + testAsyncMulti( + 'livedata - publisher errors with onError callback', + (function() { + var conn, collName, coll; + var errorFromRerun; + var gotErrorFromStopper = false; + return [ + function(test, expect) { + // Use a separate connection so that we can safely check to see if + // conn._subscriptions is empty. + conn = new Connection('/', { + reloadWithOutstanding: true + }); + collName = Random.id(); + coll = new Mongo.Collection(collName, { connection: conn }); + + var testSubError = function(options) { + conn.subscribe('publisherErrors', collName, options, { + onReady: expect(), + onError: expect( + failure( + test, + options.internalError ? 500 : 412, + options.internalError + ? 'Internal server error' + : 'Explicit error' + ) + ) + }); + }; + testSubError({ throwInHandler: true }); + testSubError({ throwInHandler: true, internalError: true }); + testSubError({ errorInHandler: true }); + testSubError({ errorInHandler: true, internalError: true }); + testSubError({ errorLater: true }); + testSubError({ errorLater: true, internalError: true }); + }, + function(test, expect) { + test.equal(coll.find().count(), 0); + test.equal(_.size(conn._subscriptions), 0); // white-box test + + conn.subscribe( + 'publisherErrors', + collName, + { throwWhenUserIdSet: true }, + { + onReady: expect(), + onError: function(error) { + errorFromRerun = error; + } + } + ); + }, + function(test, expect) { + // Because the last subscription is ready, we should have a document. + test.equal(coll.find().count(), 1); + test.isFalse(errorFromRerun); + test.equal(_.size(conn._subscriptions), 1); // white-box test + conn.call('setUserId', 'bla', expect(function() {})); + }, + function(test, expect) { + // Now that we've re-run, we should have stopped the subscription, + // gotten a error, and lost the document. + test.equal(coll.find().count(), 0); + test.isTrue(errorFromRerun); + test.instanceOf(errorFromRerun, Meteor.Error); + test.equal(errorFromRerun.error, 412); + test.equal(errorFromRerun.reason, 'Explicit error'); + test.equal(_.size(conn._subscriptions), 0); // white-box test + + conn.subscribe( + 'publisherErrors', + collName, + { stopInHandler: true }, + { + onError: function() { + gotErrorFromStopper = true; + } + } + ); + // Call a method. This method won't be processed until the publisher's + // function returns, so blocking on it being done ensures that we've + // gotten the removed/nosub/etc. + conn.call('nothing', expect(function() {})); + }, + function(test, expect) { + test.equal(coll.find().count(), 0); + // sub.stop does NOT call onError. + test.isFalse(gotErrorFromStopper); + test.equal(_.size(conn._subscriptions), 0); // white-box test + conn._stream.disconnect({ _permanent: true }); + } + ]; + })() + ); + + testAsyncMulti( + 'livedata - publisher errors with onStop callback', + (function() { + var conn, collName, coll; + var errorFromRerun; + var gotErrorFromStopper = false; + return [ + function(test, expect) { + // Use a separate connection so that we can safely check to see if + // conn._subscriptions is empty. + conn = new Connection('/', { + reloadWithOutstanding: true + }); + collName = Random.id(); + coll = new Mongo.Collection(collName, { connection: conn }); + + var testSubError = function(options) { + conn.subscribe('publisherErrors', collName, options, { + onReady: expect(), + onStop: expect( + failureOnStopped( + test, + options.internalError ? 500 : 412, + options.internalError + ? 'Internal server error' + : 'Explicit error' + ) + ) + }); + }; + testSubError({ throwInHandler: true }); + testSubError({ throwInHandler: true, internalError: true }); + testSubError({ errorInHandler: true }); + testSubError({ errorInHandler: true, internalError: true }); + testSubError({ errorLater: true }); + testSubError({ errorLater: true, internalError: true }); + }, + function(test, expect) { + test.equal(coll.find().count(), 0); + test.equal(_.size(conn._subscriptions), 0); // white-box test + + conn.subscribe( + 'publisherErrors', + collName, + { throwWhenUserIdSet: true }, + { + onReady: expect(), + onStop: function(error) { + errorFromRerun = error; + } + } + ); + }, + function(test, expect) { + // Because the last subscription is ready, we should have a document. + test.equal(coll.find().count(), 1); + test.isFalse(errorFromRerun); + test.equal(_.size(conn._subscriptions), 1); // white-box test + conn.call('setUserId', 'bla', expect(function() {})); + }, + function(test, expect) { + // Now that we've re-run, we should have stopped the subscription, + // gotten a error, and lost the document. + test.equal(coll.find().count(), 0); + test.isTrue(errorFromRerun); + test.instanceOf(errorFromRerun, Meteor.Error); + test.equal(errorFromRerun.error, 412); + test.equal(errorFromRerun.reason, 'Explicit error'); + test.equal(_.size(conn._subscriptions), 0); // white-box test + + conn.subscribe( + 'publisherErrors', + collName, + { stopInHandler: true }, + { + onStop: function(error) { + if (error) { + gotErrorFromStopper = true; + } + } + } + ); + // Call a method. This method won't be processed until the publisher's + // function returns, so blocking on it being done ensures that we've + // gotten the removed/nosub/etc. + conn.call('nothing', expect(function() {})); + }, + function(test, expect) { + test.equal(coll.find().count(), 0); + // sub.stop does NOT call onError. + test.isFalse(gotErrorFromStopper); + test.equal(_.size(conn._subscriptions), 0); // white-box test + conn._stream.disconnect({ _permanent: true }); + } + ]; + })() + ); + + testAsyncMulti('livedata - publish multiple cursors', [ + function(test, expect) { + var sub = Meteor.subscribe( + 'multiPublish', + { normal: 1 }, + { + onReady: expect(function() { + test.isTrue(sub.ready()); + test.equal(One.find().count(), 2); + test.equal(Two.find().count(), 3); + }), + onError: failure() + } + ); + }, + function(test, expect) { + Meteor.subscribe( + 'multiPublish', + { dup: 1 }, + { + onReady: failure(), + onError: expect(failure(test, 500, 'Internal server error')) + } + ); + }, + function(test, expect) { + Meteor.subscribe( + 'multiPublish', + { notCursor: 1 }, + { + onReady: failure(), + onError: expect(failure(test, 500, 'Internal server error')) + } + ); + } + ]); +} + +var selfUrl = Meteor.isServer + ? Meteor.absoluteUrl() + : Meteor._relativeToSiteRootUrl('/'); + +if (Meteor.isServer) { + Meteor.methods({ + s2s: function(arg) { + check(arg, String); + return 's2s ' + arg; + } + }); +} +(function() { + testAsyncMulti('livedata - connect works from both client and server', [ + function(test, expect) { + var self = this; + self.conn = DDP.connect(selfUrl); + pollUntil( + expect, + function() { + return self.conn.status().connected; + }, + 10000 + ); + }, + + function(test, expect) { + var self = this; + if (self.conn.status().connected) { + self.conn.call( + 's2s', + 'foo', + expect(function(err, res) { + if (err) throw err; + test.equal(res, 's2s foo'); + }) + ); + } + } + ]); +})(); + +if (Meteor.isServer) { + (function() { + testAsyncMulti('livedata - method call on server blocks in a fiber way', [ + function(test, expect) { + var self = this; + self.conn = DDP.connect(selfUrl); + pollUntil( + expect, + function() { + return self.conn.status().connected; + }, + 10000 + ); + }, + + function(test, expect) { + var self = this; + if (self.conn.status().connected) { + test.equal(self.conn.call('s2s', 'foo'), 's2s foo'); + } + } + ]); + })(); +} + +(function() { + testAsyncMulti('livedata - connect fails to unknown place', [ + function(test, expect) { + var self = this; + self.conn = DDP.connect('example.com', { _dontPrintErrors: true }); + Meteor.setTimeout( + expect(function() { + test.isFalse(self.conn.status().connected, 'Not connected'); + self.conn.close(); + }), + 500 + ); + } + ]); +})(); + +if (Meteor.isServer) { + Meteor.publish('publisherCloning', function() { + var self = this; + var fields = { x: { y: 42 } }; + self.added('publisherCloning', 'a', fields); + fields.x.y = 43; + self.changed('publisherCloning', 'a', fields); + self.ready(); + }); +} else { + var PublisherCloningCollection = new Mongo.Collection('publisherCloning'); + testAsyncMulti('livedata - publish callbacks clone', [ + function(test, expect) { + Meteor.subscribe( + 'publisherCloning', + { normal: 1 }, + { + onReady: expect(function() { + test.equal(PublisherCloningCollection.findOne(), { + _id: 'a', + x: { y: 43 } + }); + }), + onError: failure() + } + ); + } + ]); +} + +testAsyncMulti('livedata - result by value', [ + function(test, expect) { + var self = this; + self.testId = Random.id(); + Meteor.call( + 'getArray', + self.testId, + expect(function(error, firstResult) { + test.isFalse(error); + test.isTrue(firstResult); + self.firstResult = firstResult; + }) + ); + }, + function(test, expect) { + var self = this; + Meteor.call( + 'pushToArray', + self.testId, + 'xxx', + expect(function(error) { + test.isFalse(error); + }) + ); + }, + function(test, expect) { + var self = this; + Meteor.call( + 'getArray', + self.testId, + expect(function(error, secondResult) { + test.isFalse(error); + test.equal(self.firstResult.length + 1, secondResult.length); + }) + ); + } +]); + +// XXX some things to test in greater detail: +// staying in simulation mode +// time warp +// serialization / beginAsync(true) / beginAsync(false) +// malformed messages (need raw wire access) +// method completion/satisfaction +// subscriptions (multiple APIs, including autorun?) +// subscription completion +// subscription attribute shadowing +// server method calling methods on other server (eg, should simulate) +// subscriptions and methods being idempotent +// reconnection +// reconnection not resulting in method re-execution +// reconnection tolerating all kinds of lost messages (including data) +// [probably lots more] diff --git a/packages/ddp-client/random_stream_tests.js b/packages/ddp-client/test/random_stream_tests.js similarity index 88% rename from packages/ddp-client/random_stream_tests.js rename to packages/ddp-client/test/random_stream_tests.js index 91a3c38d92..2fe2c6e463 100644 --- a/packages/ddp-client/random_stream_tests.js +++ b/packages/ddp-client/test/random_stream_tests.js @@ -1,8 +1,8 @@ -Tinytest.add("livedata - DDP.randomStream", function (test) { +Tinytest.add('livedata - DDP.randomStream', function(test) { var randomSeed = Random.id(); var context = { randomSeed: randomSeed }; - var sequence = DDP._CurrentMethodInvocation.withValue(context, function () { + var sequence = DDP._CurrentMethodInvocation.withValue(context, function() { return DDP.randomStream('1'); }); @@ -21,7 +21,7 @@ Tinytest.add("livedata - DDP.randomStream", function (test) { test.equal(id1, id1Cloned); // We should get the same sequence when we use the same key - sequence = DDP._CurrentMethodInvocation.withValue(context, function () { + sequence = DDP._CurrentMethodInvocation.withValue(context, function() { return DDP.randomStream('1'); }); seeds = sequence.alea.args; @@ -39,6 +39,6 @@ Tinytest.add("livedata - DDP.randomStream", function (test) { test.equal(id2, id2Cloned); }); -Tinytest.add("livedata - DDP.randomStream with no-args", function (test) { +Tinytest.add('livedata - DDP.randomStream with no-args', function(test) { DDP.randomStream().id(); }); diff --git a/packages/ddp-client/stub_stream.js b/packages/ddp-client/test/stub_stream.js similarity index 51% rename from packages/ddp-client/stub_stream.js rename to packages/ddp-client/test/stub_stream.js index 5901f276a1..c922affd12 100644 --- a/packages/ddp-client/stub_stream.js +++ b/packages/ddp-client/test/stub_stream.js @@ -1,55 +1,52 @@ -StubStream = function () { +StubStream = function() { var self = this; self.sent = []; - self.callbacks = {}; + self.callbacks = Object.create(null); }; - _.extend(StubStream.prototype, { // Methods from Stream - on: function (name, callback) { + on: function(name, callback) { var self = this; - if (!self.callbacks[name]) - self.callbacks[name] = [callback]; - else - self.callbacks[name].push(callback); + if (!self.callbacks[name]) self.callbacks[name] = [callback]; + else self.callbacks[name].push(callback); }, - send: function (data) { + send: function(data) { var self = this; self.sent.push(data); }, - status: function () { - return {status: "connected", fake: true}; + status: function() { + return { status: 'connected', fake: true }; }, - reconnect: function () { + reconnect: function() { // no-op }, - _lostConnection: function () { + _lostConnection: function() { // no-op }, // Methods for tests - receive: function (data) { + receive: function(data) { var self = this; if (typeof data === 'object') { data = EJSON.stringify(data); } - _.each(self.callbacks['message'], function (cb) { + _.each(self.callbacks['message'], function(cb) { cb(data); }); }, - reset: function () { + reset: function() { var self = this; - _.each(self.callbacks['reset'], function (cb) { + _.each(self.callbacks['reset'], function(cb) { cb(); }); }, diff --git a/packages/ddp-common/heartbeat.js b/packages/ddp-common/heartbeat.js index 4baca5f861..5338561989 100644 --- a/packages/ddp-common/heartbeat.js +++ b/packages/ddp-common/heartbeat.js @@ -5,96 +5,85 @@ // sendPing: function to call to send a ping on the connection. // onTimeout: function to call to close the connection. -DDPCommon.Heartbeat = function (options) { - var self = this; +DDPCommon.Heartbeat = class Heartbeat { + constructor(options) { + this.heartbeatInterval = options.heartbeatInterval; + this.heartbeatTimeout = options.heartbeatTimeout; + this._sendPing = options.sendPing; + this._onTimeout = options.onTimeout; + this._seenPacket = false; - self.heartbeatInterval = options.heartbeatInterval; - self.heartbeatTimeout = options.heartbeatTimeout; - self._sendPing = options.sendPing; - self._onTimeout = options.onTimeout; - self._seenPacket = false; + this._heartbeatIntervalHandle = null; + this._heartbeatTimeoutHandle = null; + } - self._heartbeatIntervalHandle = null; - self._heartbeatTimeoutHandle = null; -}; + stop() { + this._clearHeartbeatIntervalTimer(); + this._clearHeartbeatTimeoutTimer(); + } -_.extend(DDPCommon.Heartbeat.prototype, { - stop: function () { - var self = this; - self._clearHeartbeatIntervalTimer(); - self._clearHeartbeatTimeoutTimer(); - }, + start() { + this.stop(); + this._startHeartbeatIntervalTimer(); + } - start: function () { - var self = this; - self.stop(); - self._startHeartbeatIntervalTimer(); - }, - - _startHeartbeatIntervalTimer: function () { - var self = this; - self._heartbeatIntervalHandle = Meteor.setInterval( - _.bind(self._heartbeatIntervalFired, self), - self.heartbeatInterval + _startHeartbeatIntervalTimer() { + this._heartbeatIntervalHandle = Meteor.setInterval( + () => this._heartbeatIntervalFired(), + this.heartbeatInterval ); - }, + } - _startHeartbeatTimeoutTimer: function () { - var self = this; - self._heartbeatTimeoutHandle = Meteor.setTimeout( - _.bind(self._heartbeatTimeoutFired, self), - self.heartbeatTimeout + _startHeartbeatTimeoutTimer() { + this._heartbeatTimeoutHandle = Meteor.setTimeout( + () => this._heartbeatTimeoutFired(), + this.heartbeatTimeout ); - }, + } - _clearHeartbeatIntervalTimer: function () { - var self = this; - if (self._heartbeatIntervalHandle) { - Meteor.clearInterval(self._heartbeatIntervalHandle); - self._heartbeatIntervalHandle = null; + _clearHeartbeatIntervalTimer() { + if (this._heartbeatIntervalHandle) { + Meteor.clearInterval(this._heartbeatIntervalHandle); + this._heartbeatIntervalHandle = null; } - }, + } - _clearHeartbeatTimeoutTimer: function () { - var self = this; - if (self._heartbeatTimeoutHandle) { - Meteor.clearTimeout(self._heartbeatTimeoutHandle); - self._heartbeatTimeoutHandle = null; + _clearHeartbeatTimeoutTimer() { + if (this._heartbeatTimeoutHandle) { + Meteor.clearTimeout(this._heartbeatTimeoutHandle); + this._heartbeatTimeoutHandle = null; } - }, + } // The heartbeat interval timer is fired when we should send a ping. - _heartbeatIntervalFired: function () { - var self = this; + _heartbeatIntervalFired() { // don't send ping if we've seen a packet since we last checked, // *or* if we have already sent a ping and are awaiting a timeout. // That shouldn't happen, but it's possible if - // `self.heartbeatInterval` is smaller than - // `self.heartbeatTimeout`. - if (! self._seenPacket && ! self._heartbeatTimeoutHandle) { - self._sendPing(); + // `this.heartbeatInterval` is smaller than + // `this.heartbeatTimeout`. + if (! this._seenPacket && ! this._heartbeatTimeoutHandle) { + this._sendPing(); // Set up timeout, in case a pong doesn't arrive in time. - self._startHeartbeatTimeoutTimer(); + this._startHeartbeatTimeoutTimer(); } - self._seenPacket = false; - }, + this._seenPacket = false; + } // The heartbeat timeout timer is fired when we sent a ping, but we // timed out waiting for the pong. - _heartbeatTimeoutFired: function () { - var self = this; - self._heartbeatTimeoutHandle = null; - self._onTimeout(); - }, + _heartbeatTimeoutFired() { + this._heartbeatTimeoutHandle = null; + this._onTimeout(); + } - messageReceived: function () { - var self = this; + messageReceived() { // Tell periodic checkin that we have seen a packet, and thus it // does not need to send a ping this cycle. - self._seenPacket = true; + this._seenPacket = true; // If we were waiting for a pong, we got it. - if (self._heartbeatTimeoutHandle) { - self._clearHeartbeatTimeoutTimer(); + if (this._heartbeatTimeoutHandle) { + this._clearHeartbeatTimeoutTimer(); } } -}); +}; diff --git a/packages/ddp-common/method_invocation.js b/packages/ddp-common/method_invocation.js index d2b8cd510e..578e855de0 100644 --- a/packages/ddp-common/method_invocation.js +++ b/packages/ddp-common/method_invocation.js @@ -7,78 +7,75 @@ * @instanceName this * @showInstanceName true */ -DDPCommon.MethodInvocation = function (options) { - var self = this; +DDPCommon.MethodInvocation = class MethodInvocation { + constructor(options) { + // true if we're running not the actual method, but a stub (that is, + // if we're on a client (which may be a browser, or in the future a + // server connecting to another server) and presently running a + // simulation of a server-side method for latency compensation + // purposes). not currently true except in a client such as a browser, + // since there's usually no point in running stubs unless you have a + // zero-latency connection to the user. - // true if we're running not the actual method, but a stub (that is, - // if we're on a client (which may be a browser, or in the future a - // server connecting to another server) and presently running a - // simulation of a server-side method for latency compensation - // purposes). not currently true except in a client such as a browser, - // since there's usually no point in running stubs unless you have a - // zero-latency connection to the user. + /** + * @summary Access inside a method invocation. Boolean value, true if this invocation is a stub. + * @locus Anywhere + * @name isSimulation + * @memberOf DDPCommon.MethodInvocation + * @instance + * @type {Boolean} + */ + this.isSimulation = options.isSimulation; - /** - * @summary Access inside a method invocation. Boolean value, true if this invocation is a stub. - * @locus Anywhere - * @name isSimulation - * @memberOf DDPCommon.MethodInvocation - * @instance - * @type {Boolean} - */ - this.isSimulation = options.isSimulation; + // call this function to allow other method invocations (from the + // same client) to continue running without waiting for this one to + // complete. + this._unblock = options.unblock || function () {}; + this._calledUnblock = false; - // call this function to allow other method invocations (from the - // same client) to continue running without waiting for this one to - // complete. - this._unblock = options.unblock || function () {}; - this._calledUnblock = false; + // current user id - // current user id + /** + * @summary The id of the user that made this method call, or `null` if no user was logged in. + * @locus Anywhere + * @name userId + * @memberOf DDPCommon.MethodInvocation + * @instance + */ + this.userId = options.userId; - /** - * @summary The id of the user that made this method call, or `null` if no user was logged in. - * @locus Anywhere - * @name userId - * @memberOf DDPCommon.MethodInvocation - * @instance - */ - this.userId = options.userId; + // sets current user id in all appropriate server contexts and + // reruns subscriptions + this._setUserId = options.setUserId || function () {}; - // sets current user id in all appropriate server contexts and - // reruns subscriptions - this._setUserId = options.setUserId || function () {}; + // On the server, the connection this method call came in on. - // On the server, the connection this method call came in on. + /** + * @summary Access inside a method invocation. The [connection](#meteor_onconnection) that this method was received on. `null` if the method is not associated with a connection, eg. a server initiated method call. Calls to methods made from a server method which was in turn initiated from the client share the same `connection`. + * @locus Server + * @name connection + * @memberOf DDPCommon.MethodInvocation + * @instance + */ + this.connection = options.connection; - /** - * @summary Access inside a method invocation. The [connection](#meteor_onconnection) that this method was received on. `null` if the method is not associated with a connection, eg. a server initiated method call. Calls to methods made from a server method which was in turn initiated from the client share the same `connection`. - * @locus Server - * @name connection - * @memberOf DDPCommon.MethodInvocation - * @instance - */ - this.connection = options.connection; + // The seed for randomStream value generation + this.randomSeed = options.randomSeed; - // The seed for randomStream value generation - this.randomSeed = options.randomSeed; + // This is set by RandomStream.get; and holds the random stream state + this.randomStream = null; + } - // This is set by RandomStream.get; and holds the random stream state - this.randomStream = null; -}; - -_.extend(DDPCommon.MethodInvocation.prototype, { /** * @summary Call inside a method invocation. Allow subsequent method from this client to begin running in a new fiber. * @locus Server * @memberOf DDPCommon.MethodInvocation * @instance */ - unblock: function () { - var self = this; - self._calledUnblock = true; - self._unblock(); - }, + unblock() { + this._calledUnblock = true; + this._unblock(); + } /** * @summary Set the logged in user. @@ -87,11 +84,11 @@ _.extend(DDPCommon.MethodInvocation.prototype, { * @instance * @param {String | null} userId The value that should be returned by `userId` on this connection. */ - setUserId: function(userId) { - var self = this; - if (self._calledUnblock) + setUserId(userId) { + if (this._calledUnblock) { throw new Error("Can't call setUserId in a method after calling unblock"); - self.userId = userId; - self._setUserId(userId); + } + this.userId = userId; + this._setUserId(userId); } -}); +}; diff --git a/packages/ddp-common/package.js b/packages/ddp-common/package.js index 4dfbbae2d3..101cf0c67d 100644 --- a/packages/ddp-common/package.js +++ b/packages/ddp-common/package.js @@ -1,13 +1,18 @@ Package.describe({ summary: "Code shared beween ddp-client and ddp-server", - version: '1.3.0', + version: '1.4.0', documentation: null }); Package.onUse(function (api) { - api.use(['check', 'random', 'ejson', 'underscore', 'tracker', - 'retry'], - ['client', 'server']); + api.use([ + 'check', + 'random', + 'ecmascript', + 'ejson', + 'tracker', + 'retry', + ], ['client', 'server']); api.addFiles('namespace.js'); diff --git a/packages/ddp-common/random_stream.js b/packages/ddp-common/random_stream.js index dce28d42f1..28001f8ccb 100644 --- a/packages/ddp-common/random_stream.js +++ b/packages/ddp-common/random_stream.js @@ -21,12 +21,30 @@ // If an array, will be used as-is // If a value, will be converted to a single-value array // If omitted, a random array will be used as the seed. -DDPCommon.RandomStream = function (options) { - var self = this; +DDPCommon.RandomStream = class RandomStream { + constructor(options) { + this.seed = [].concat(options.seed || randomToken()); + this.sequences = Object.create(null); + } - this.seed = [].concat(options.seed || randomToken()); + // Get a random sequence with the specified name, creating it if does not exist. + // New sequences are seeded with the seed concatenated with the name. + // By passing a seed into Random.create, we use the Alea generator. + _sequence(name) { + var self = this; - this.sequences = {}; + var sequence = self.sequences[name] || null; + if (sequence === null) { + var sequenceSeed = self.seed.concat(name); + for (var i = 0; i < sequenceSeed.length; i++) { + if (typeof sequenceSeed[i] === "function") { + sequenceSeed[i] = sequenceSeed[i](); + } + } + self.sequences[name] = sequence = Random.createWithSeeds.apply(null, sequenceSeed); + } + return sequence; + } }; // Returns a random string of sufficient length for a random seed. @@ -65,7 +83,6 @@ DDPCommon.RandomStream.get = function (scope, name) { return randomStream._sequence(name); }; - // Creates a randomSeed for passing to a method call. // Note that we take enclosing as an argument, // though we expect it to be DDP._CurrentMethodInvocation.get() @@ -76,24 +93,3 @@ DDPCommon.makeRpcSeed = function (enclosing, methodName) { var stream = DDPCommon.RandomStream.get(enclosing, '/rpc/' + methodName); return stream.hexString(20); }; - -_.extend(DDPCommon.RandomStream.prototype, { - // Get a random sequence with the specified name, creating it if does not exist. - // New sequences are seeded with the seed concatenated with the name. - // By passing a seed into Random.create, we use the Alea generator. - _sequence: function (name) { - var self = this; - - var sequence = self.sequences[name] || null; - if (sequence === null) { - var sequenceSeed = self.seed.concat(name); - for (var i = 0; i < sequenceSeed.length; i++) { - if (_.isFunction(sequenceSeed[i])) { - sequenceSeed[i] = sequenceSeed[i](); - } - } - self.sequences[name] = sequence = Random.createWithSeeds.apply(null, sequenceSeed); - } - return sequence; - } -}); diff --git a/packages/ddp-common/utils.js b/packages/ddp-common/utils.js index a5344d1a96..1dab0637b8 100644 --- a/packages/ddp-common/utils.js +++ b/packages/ddp-common/utils.js @@ -1,3 +1,43 @@ +"use strict"; + +export const hasOwn = Object.prototype.hasOwnProperty; +export const slice = Array.prototype.slice; + +export function keys(obj) { + return Object.keys(Object(obj)); +} + +export function isEmpty(obj) { + if (obj == null) { + return true; + } + + if (Array.isArray(obj) || + typeof obj === "string") { + return obj.length === 0; + } + + for (const key in obj) { + if (hasOwn.call(obj, key)) { + return false; + } + } + + return true; +} + +export function last(array, n, guard) { + if (array == null) { + return; + } + + if ((n == null) || guard) { + return array[array.length - 1]; + } + + return slice.call(array, Math.max(array.length - n, 0)); +} + DDPCommon.SUPPORTED_DDP_VERSIONS = [ '1', 'pre2', 'pre1' ]; DDPCommon.parseDDP = function (stringMessage) { @@ -17,48 +57,61 @@ DDPCommon.parseDDP = function (stringMessage) { // switch between "cleared" rep of unsetting fields and "undefined" // rep of same - if (_.has(msg, 'cleared')) { - if (!_.has(msg, 'fields')) + if (hasOwn.call(msg, 'cleared')) { + if (! hasOwn.call(msg, 'fields')) { msg.fields = {}; - _.each(msg.cleared, function (clearKey) { + } + msg.cleared.forEach(clearKey => { msg.fields[clearKey] = undefined; }); delete msg.cleared; } - _.each(['fields', 'params', 'result'], function (field) { - if (_.has(msg, field)) + ['fields', 'params', 'result'].forEach(field => { + if (hasOwn.call(msg, field)) { msg[field] = EJSON._adjustTypesFromJSONValue(msg[field]); + } }); return msg; }; DDPCommon.stringifyDDP = function (msg) { - var copy = EJSON.clone(msg); + const copy = EJSON.clone(msg); + // swizzle 'changed' messages from 'fields undefined' rep to 'fields // and cleared' rep - if (_.has(msg, 'fields')) { - var cleared = []; - _.each(msg.fields, function (value, key) { - if (value === undefined) { + if (hasOwn.call(msg, 'fields')) { + const cleared = []; + + Object.keys(msg.fields).forEach(key => { + const value = msg.fields[key]; + + if (typeof value === "undefined") { cleared.push(key); delete copy.fields[key]; } }); - if (!_.isEmpty(cleared)) + + if (! isEmpty(cleared)) { copy.cleared = cleared; - if (_.isEmpty(copy.fields)) + } + + if (isEmpty(copy.fields)) { delete copy.fields; + } } + // adjust types to basic - _.each(['fields', 'params', 'result'], function (field) { - if (_.has(copy, field)) + ['fields', 'params', 'result'].forEach(field => { + if (hasOwn.call(copy, field)) { copy[field] = EJSON._adjustTypesToJSONValue(copy[field]); + } }); + if (msg.id && typeof msg.id !== 'string') { throw new Error("Message id is not a string"); } + return JSON.stringify(copy); }; - diff --git a/packages/ddp-rate-limiter/ddp-rate-limiter-test-service.js b/packages/ddp-rate-limiter/ddp-rate-limiter-test-service.js index 1c121bc25a..69709aa30a 100644 --- a/packages/ddp-rate-limiter/ddp-rate-limiter-test-service.js +++ b/packages/ddp-rate-limiter/ddp-rate-limiter-test-service.js @@ -1,9 +1,14 @@ +import { Meteor } from 'meteor/meteor'; +import { Accounts } from 'meteor/accounts-base'; +import { DDPRateLimiter } from 'meteor/ddp-rate-limiter'; +import { RATE_LIMIT_NUM_CALLS, RATE_LIMIT_INTERVAL_TIME_MS } from './ddp-rate-limiter-tests-common'; + Meteor.methods({ // Adds in a new rule with the specific intervalTime and connectionId as // passed in to speed up testing & allow the rule to apply to the connection // testing the rate limit. - addRuleToDDPRateLimiter: function () { - var connection = this.connection; + addRuleToDDPRateLimiter() { + const connection = this.connection; connection.lastRateLimitEvent = connection.lastRateLimitEvent || {}; connection.lastMethodName = connection.lastMethodName || ''; // XXX In Javascript v8 engine, we are currently guaranteed the ordering of @@ -12,36 +17,36 @@ Meteor.methods({ // test. // // This is important because we use `connection.lastMethodName` to - // ignore the "getLastRateLimitEvent" method so that it can return + // ignore the 'getLastRateLimitEvent' method so that it can return // the actual last rate limit event rather than the one - // corresponding to the method call to "getLastRateLimitEvent". + // corresponding to the method call to 'getLastRateLimitEvent'. this.ruleId = DDPRateLimiter.addRule({ - name: function (name) { + name(name) { connection.lastMethodName = name; if (name !== 'getLastRateLimitEvent') { connection.lastRateLimitEvent.name = name; } - return name !== "a-method-that-is-not-rate-limited"; + return name !== 'a-method-that-is-not-rate-limited'; }, - userId: function (userId) { + userId(userId) { connection.lastRateLimitEvent.userId = userId; return true; }, - type: function (type) { + type(type) { // Special check to return proper name since 'getLastRateLimitEvent' // is another method call - if (connection.lastMethodName !== 'getLastRateLimitEvent'){ + if (connection.lastMethodName !== 'getLastRateLimitEvent') { connection.lastRateLimitEvent.type = type; } return true; }, - clientAddress: function (clientAddress) { - connection.lastRateLimitEvent.clientAddress = clientAddress + clientAddress(clientAddress) { + connection.lastRateLimitEvent.clientAddress = clientAddress; return true; }, - connectionId: this.connection.id - }, RATE_LIMIT_NUM_CALLS, RATE_LIMIT_INTERVAL_TIME_MS, function(reply, ruleInput) { - if (connection.lastMethodName !== 'getLastRateLimitEvent'){ + connectionId: this.connection.id, + }, RATE_LIMIT_NUM_CALLS, RATE_LIMIT_INTERVAL_TIME_MS, (reply, ruleInput) => { + if (connection.lastMethodName !== 'getLastRateLimitEvent') { connection.lastRateLimitEvent.reply = reply; connection.lastRateLimitEvent.ruleInput = ruleInput; } @@ -49,34 +54,32 @@ Meteor.methods({ return this.ruleId; }, - getLastRateLimitEvent: function () { + getLastRateLimitEvent() { return this.connection.lastRateLimitEvent; }, // Server side method to remove rule from DDP Rate Limiter - removeRuleFromDDPRateLimiter: function (id) { + removeRuleFromDDPRateLimiter(id) { return DDPRateLimiter.removeRule(id); }, // Print all the server rules for debugging purposes. - printCurrentListOfRules: function () { + printCurrentListOfRules() { console.log('Current list of rules :', DDPRateLimiter.printRules()); }, - removeUserByUsername: function (username) { - Meteor.users.remove({username: username}); + removeUserByUsername(username) { + Meteor.users.remove({ username }); }, - dummyMethod: function () { - return "yup"; + dummyMethod() { + return 'yup'; }, - 'a-method-that-is-not-rate-limited': function () { - return "not-rate-limited"; + 'a-method-that-is-not-rate-limited'() { + return 'not-rate-limited'; }, - addDefaultAccountsRateLimitRule: function () { + addDefaultAccountsRateLimitRule() { Accounts.addDefaultRateLimit(); }, - removeDefaultAccountsRateLimitRule: function () { + removeDefaultAccountsRateLimitRule() { return Accounts.removeDefaultRateLimit(); - } + }, }); -Meteor.publish("testSubscription", function () { - return []; -}); +Meteor.publish('testSubscription', () => []); diff --git a/packages/ddp-rate-limiter/ddp-rate-limiter-tests-common.js b/packages/ddp-rate-limiter/ddp-rate-limiter-tests-common.js index 72ca8d2bfc..d3ff4d38ca 100644 --- a/packages/ddp-rate-limiter/ddp-rate-limiter-tests-common.js +++ b/packages/ddp-rate-limiter/ddp-rate-limiter-tests-common.js @@ -1,3 +1,3 @@ // Common settings for DDPRateLimiter tests. -RATE_LIMIT_NUM_CALLS = 5; -RATE_LIMIT_INTERVAL_TIME_MS = 5000; \ No newline at end of file +export const RATE_LIMIT_NUM_CALLS = 5; +export const RATE_LIMIT_INTERVAL_TIME_MS = 5000; diff --git a/packages/ddp-rate-limiter/ddp-rate-limiter-tests.js b/packages/ddp-rate-limiter/ddp-rate-limiter-tests.js index 3c300181bf..c99f27366c 100644 --- a/packages/ddp-rate-limiter/ddp-rate-limiter-tests.js +++ b/packages/ddp-rate-limiter/ddp-rate-limiter-tests.js @@ -1,46 +1,49 @@ +import { Meteor } from 'meteor/meteor'; +import { Random } from 'meteor/random'; +import { Accounts } from 'meteor/accounts-base'; +import { RATE_LIMIT_NUM_CALLS, RATE_LIMIT_INTERVAL_TIME_MS } from './ddp-rate-limiter-tests-common'; + // Test that we do hit the default login rate limit. // XXX Removed to fix testing as other packages currently hit the default rate // limit. -testAsyncMulti("ddp rate limiter - default rate limit", [ + +testAsyncMulti('ddp rate limiter - default rate limit', [ function (test, expect) { // Add in the default rate limiter rule Meteor.call('addDefaultAccountsRateLimitRule'); - _.extend(this, createTestUser(test, expect)); + Object.assign(this, createTestUser(test, expect)); }, function (test, expect) { - Meteor.logout(expect(function (error) { + Meteor.logout(expect((error) => { test.equal(error, undefined); test.equal(Meteor.user(), null); })); }, function (test, expect) { - var self = this; - callFnMultipleTimesThenExpectResult(test, expect, - Meteor.loginWithPassword.bind(Meteor, self.username, 'fakePassword'), + Meteor.loginWithPassword.bind(Meteor, this.username, 'fakePassword'), { expectedError: 403, expectedResult: undefined, expectedRateLimitWillBeHit: true, - expectedIntervalTimeInMs: 10000 - } + expectedIntervalTimeInMs: 10000, + }, ); }, function (test, expect) { - Meteor.call("removeUserByUsername", this.username, expect(function () {})); + Meteor.call('removeUserByUsername', this.username, expect(() => {})); // Remove the default rate limiter rule Meteor.call('removeDefaultAccountsRateLimitRule'); - } + }, ]); -testAsyncMulti("ddp rate limiter - matchers get passed correct arguments", [ +testAsyncMulti('ddp rate limiter - matchers get passed correct arguments', [ function (test, expect) { - _.extend(this, createTestUser(test, expect)); + Object.assign(this, createTestUser(test, expect)); }, function (test, expect) { - var self = this; - Meteor.call("addRuleToDDPRateLimiter", expect(function(error, result) { - self.ruleId = result; + Meteor.call('addRuleToDDPRateLimiter', expect((error, result) => { + this.ruleId = result; })); }, function (test, expect) { @@ -48,52 +51,50 @@ testAsyncMulti("ddp rate limiter - matchers get passed correct arguments", [ Meteor.call.bind(Meteor, 'dummyMethod'), { expectedError: undefined, - expectedResult: "yup", - expectedRateLimitWillBeHit: true - } + expectedResult: 'yup', + expectedRateLimitWillBeHit: true, + }, ); }, function (test, expect) { - var self = this; Meteor.call( - "getLastRateLimitEvent", expect(function (error, result) { + 'getLastRateLimitEvent', expect((error, result) => { test.equal(error, undefined); test.equal(result.userId, Meteor.userId()); - test.equal(result.type, "method"); - test.equal(result.name, "dummyMethod"); - test.isNotUndefined(result.clientAddress, "clientAddress is not defined"); + test.equal(result.type, 'method'); + test.equal(result.name, 'dummyMethod'); + test.isNotUndefined(result.clientAddress, 'clientAddress is not defined'); })); }, function (test, expect) { - Meteor.call("removeUserByUsername", this.username, expect(function () {})); + Meteor.call('removeUserByUsername', this.username, expect(() => {})); }, function (test, expect) { - var self = this; // Cleanup - Meteor.call('removeRuleFromDDPRateLimiter', self.ruleId, - expect(function(error, result) { - test.equal(result,true); - })); - } + Meteor.call('removeRuleFromDDPRateLimiter', this.ruleId, + expect((error, result) => { + test.equal(result, true); + }), + ); + }, ]); -testAsyncMulti("ddp rate limiter - callbacks get passed correct arguments", [ +testAsyncMulti('ddp rate limiter - callbacks get passed correct arguments', [ function (test, expect) { - _.extend(this, createTestUser(test, expect)); + Object.assign(this, createTestUser(test, expect)); }, function (test, expect) { - var self = this; - Meteor.call("addRuleToDDPRateLimiter", expect(function(error, result) { - self.ruleId = result; + Meteor.call('addRuleToDDPRateLimiter', expect((error, result) => { + this.ruleId = result; })); }, function (test, expect) { - Meteor.call('dummyMethod', expect(function() {})); + Meteor.call('dummyMethod', expect(() => {})); }, function (test, expect) { - var self = this; Meteor.call( - "getLastRateLimitEvent", expect(function (error, result) { + 'getLastRateLimitEvent', + expect((error, result) => { test.isTrue(result.reply.allowed); test.isTrue(result.reply.timeToReset < RATE_LIMIT_INTERVAL_TIME_MS + 100); test.equal(result.reply.numInvocationsLeft, 4); @@ -101,7 +102,8 @@ testAsyncMulti("ddp rate limiter - callbacks get passed correct arguments", [ test.equal(result.ruleInput.userId, Meteor.userId()); test.equal(result.ruleInput.type, 'method'); test.equal(result.ruleInput.name, 'dummyMethod'); - })); + }), + ); }, function (test, expect) { // Wait for the rule to reset @@ -109,81 +111,75 @@ testAsyncMulti("ddp rate limiter - callbacks get passed correct arguments", [ }, function (test, expect) { // Call RATE_LIMIT_NUM_CALLS + 1 times to make the rule exceed limit and reject the execution - for (var i = 0; i < RATE_LIMIT_NUM_CALLS + 1; i++) { - Meteor.call('dummyMethod', expect(function() {})); + for (let i = 0; i < RATE_LIMIT_NUM_CALLS + 1; i++) { + Meteor.call('dummyMethod', expect(() => {})); } }, function (test, expect) { - var self = this; - Meteor.call( - "getLastRateLimitEvent", expect(function (error, result) { - test.isFalse(result.reply.allowed); - test.isTrue(result.reply.timeToReset < RATE_LIMIT_INTERVAL_TIME_MS + 100); - test.equal(result.reply.numInvocationsLeft, 0); + Meteor.call('getLastRateLimitEvent', expect((error, result) => { + test.isFalse(result.reply.allowed); + test.isTrue(result.reply.timeToReset < RATE_LIMIT_INTERVAL_TIME_MS + 100); + test.equal(result.reply.numInvocationsLeft, 0); - test.equal(result.ruleInput.userId, Meteor.userId()); - test.equal(result.ruleInput.type, 'method'); - test.equal(result.ruleInput.name, 'dummyMethod'); - })); - }, - function (test, expect) { - Meteor.call("removeUserByUsername", this.username, expect(function () {})); - }, - function (test, expect) { - var self = this; - // Cleanup - Meteor.call('removeRuleFromDDPRateLimiter', self.ruleId, - expect(function(error, result) { - test.equal(result,true); + test.equal(result.ruleInput.userId, Meteor.userId()); + test.equal(result.ruleInput.type, 'method'); + test.equal(result.ruleInput.name, 'dummyMethod'); })); - } + }, + function (test, expect) { + Meteor.call('removeUserByUsername', this.username, expect(() => {})); + }, + function (test, expect) { + // Cleanup + Meteor.call('removeRuleFromDDPRateLimiter', this.ruleId, + expect((error, result) => { + test.equal(result, true); + }), + ); + }, ]); -testAsyncMulti("ddp rate limiter - we can return with type 'subscription'", [ +testAsyncMulti('ddp rate limiter - we can return with type \'subscription\'', [ function (test, expect) { - var self = this; - Meteor.call("addRuleToDDPRateLimiter", expect( - function(error, result) { - self.ruleId = result; + Meteor.call('addRuleToDDPRateLimiter', expect( + (error, result) => { + this.ruleId = result; })); }, function (test, expect) { Meteor.subscribe('testSubscription'); - Meteor.call('getLastRateLimitEvent', expect(function(error, result){ + Meteor.call('getLastRateLimitEvent', expect((error, result) =>{ test.equal(error, undefined); - test.equal(result.type, "subscription"); - test.equal(result.name, "testSubscription"); - test.isNotUndefined(result.clientAddress, "clientAddress is not defined"); + test.equal(result.type, 'subscription'); + test.equal(result.name, 'testSubscription'); + test.isNotUndefined(result.clientAddress, 'clientAddress is not defined'); })); }, function (test, expect) { - var self = this; // Cleanup - Meteor.call('removeRuleFromDDPRateLimiter', self.ruleId, - expect(function(error, result) { + Meteor.call('removeRuleFromDDPRateLimiter', this.ruleId, + expect((error, result) => { test.equal(result, true); - })); - } -]); - -testAsyncMulti("ddp rate limiter - rate limits to subscriptions", [ - function (test, expect) { - var self = this; - Meteor.call("addRuleToDDPRateLimiter", expect( - function(error, result) { - self.ruleId = result; - }) + }), ); }, +]); + +testAsyncMulti('ddp rate limiter - rate limits to subscriptions', [ function (test, expect) { - this.doSub = function (cb) { + Meteor.call('addRuleToDDPRateLimiter', expect((error, result) => { + this.ruleId = result; + })); + }, + function (test, expect) { + this.doSub = (cb) => { Meteor.subscribe('testSubscription', { - onReady: function () { + onReady() { cb(null, true); }, - onStop: function (error) { + onStop(error) { cb(error, undefined); - } + }, }); }; @@ -191,46 +187,47 @@ testAsyncMulti("ddp rate limiter - rate limits to subscriptions", [ { expectedError: null, expectedResult: true, - expectedRateLimitWillBeHit: true - } + expectedRateLimitWillBeHit: true, + }, ); }, function (test, expect) { // After removing rule, subscriptions are no longer rate limited. - var self = this; - Meteor.call('removeRuleFromDDPRateLimiter', self.ruleId, - expect(function(error, result) { - test.equal(result,true); - })); + Meteor.call('removeRuleFromDDPRateLimiter', this.ruleId, + expect((error, result) => { + test.equal(result, true); + }), + ); }, function (test, expect) { callFnMultipleTimesThenExpectResult(test, expect, this.doSub, - { - expectedError: null, - expectedResult: true, - expectedIntervalTimeInMs: false - }); + { + expectedError: null, + expectedResult: true, + expectedIntervalTimeInMs: false, + }, + ); callFnMultipleTimesThenExpectResult(test, expect, this.doSub, - { - expectedError: null, - expectedResult: true, - expectedIntervalTimeInMs: false - }); - } + { + expectedError: null, + expectedResult: true, + expectedIntervalTimeInMs: false, + }, + ); + }, ]); // - If you wait 5 seconds you are no longer rate limited -testAsyncMulti("ddp rate limiter - rate limit resets after " + - "RATE_LIMIT_INTERVAL_TIME_MS", [ +testAsyncMulti('ddp rate limiter - rate limit resets after ' + + 'RATE_LIMIT_INTERVAL_TIME_MS', [ function (test, expect) { - _.extend(this, createTestUser(test, expect)); + Object.assign(this, createTestUser(test, expect)); }, function (test, expect) { - var self = this; - Meteor.call("addRuleToDDPRateLimiter", expect(function(error, result) { - self.ruleId = result; + Meteor.call('addRuleToDDPRateLimiter', expect((error, result) => { + this.ruleId = result; })); }, @@ -239,9 +236,9 @@ testAsyncMulti("ddp rate limiter - rate limit resets after " + Meteor.call.bind(Meteor, 'dummyMethod'), { expectedError: undefined, - expectedResult: "yup", - expectedRateLimitWillBeHit: true - } + expectedResult: 'yup', + expectedRateLimitWillBeHit: true, + }, ); }, function (test, expect) { @@ -252,26 +249,25 @@ testAsyncMulti("ddp rate limiter - rate limit resets after " + Meteor.call.bind(Meteor, 'dummyMethod'), { expectedError: undefined, - expectedResult: "yup", - expectedRateLimitWillBeHit: true - } + expectedResult: 'yup', + expectedRateLimitWillBeHit: true, + }, ); }, function (test, expect) { - var self = this; - Meteor.call('removeRuleFromDDPRateLimiter', self.ruleId, - expect(function(error, result) { + Meteor.call('removeRuleFromDDPRateLimiter', this.ruleId, + expect((error, result) => { test.equal(result, true); - })); - } + }), + ); + }, ]); -testAsyncMulti("ddp rate limiter - 'a-method-that-is-not-rate-limited' is not" + - " rate limited", [ +testAsyncMulti('ddp rate limiter - \'a-method-that-is-not-rate-limited\' is not' + + ' rate limited', [ function (test, expect) { - var self = this; - Meteor.call('addRuleToDDPRateLimiter', expect(function(error, result){ - self.ruleId = result; + Meteor.call('addRuleToDDPRateLimiter', expect((error, result) =>{ + this.ruleId = result; })); }, function (test, expect) { @@ -279,95 +275,97 @@ testAsyncMulti("ddp rate limiter - 'a-method-that-is-not-rate-limited' is not" + Meteor.call.bind(Meteor, 'a-method-that-is-not-rate-limited'), { expectedError: undefined, - expectedResult: "not-rate-limited", - expectedRateLimitWillBeHit: false - }); + expectedResult: 'not-rate-limited', + expectedRateLimitWillBeHit: false, + }, + ); }, function (test, expect) { - var self = this; - Meteor.call('removeRuleFromDDPRateLimiter', self.ruleId, - expect(function(error, result) { + Meteor.call('removeRuleFromDDPRateLimiter', this.ruleId, + expect((error, result) => { test.equal(result, true); - })); - } + }), + ); + }, ]); // When we have a rate limited client and we remove the rate limit rule, // all requests should be allowed immediately afterwards. -testAsyncMulti("ddp rate limiter - test removing rule with rateLimited " + - "client lets them send new queries", [ +testAsyncMulti('ddp rate limiter - test removing rule with rateLimited ' + + 'client lets them send new queries', [ function (test, expect) { - _.extend(this, createTestUser(test, expect)); + Object.assign(this, createTestUser(test, expect)); }, function (test, expect) { - var self = this; - Meteor.call("addRuleToDDPRateLimiter", expect(function(error, result) { - self.ruleId = result; + Meteor.call('addRuleToDDPRateLimiter', expect((error, result) => { + this.ruleId = result; })); }, function (test, expect) { - Meteor.logout(expect(function (error) { + Meteor.logout(expect((error) => { test.equal(error, undefined); test.equal(Meteor.user(), null); })); }, function (test, expect) { - var self = this; // By removing the rule from the DDP rate limiter, we no longer restrict // them even though they were rate limited - Meteor.call('removeRuleFromDDPRateLimiter', self.ruleId, - expect(function(error, result) { - test.equal(result,true); - })); + Meteor.call('removeRuleFromDDPRateLimiter', this.ruleId, + expect((error, result) => { + test.equal(result, true); + }), + ); }, function (test, expect) { callFnMultipleTimesThenExpectResult(test, expect, Meteor.call.bind(Meteor, 'dummyMethod'), { expectedError: undefined, - expectedResult: "yup", - expectedRateLimitWillBeHit: false - } + expectedResult: 'yup', + expectedRateLimitWillBeHit: false, + }, ); callFnMultipleTimesThenExpectResult(test, expect, Meteor.call.bind(Meteor, 'dummyMethod'), { expectedError: undefined, - expectedResult: "yup", - expectedRateLimitWillBeHit: false - } + expectedResult: 'yup', + expectedRateLimitWillBeHit: false, + }, ); }, function (test, expect) { - Meteor.call("removeUserByUsername", this.username, expect(function () {})); - } + Meteor.call('removeUserByUsername', this.username, expect(function () {})); + }, ]); function createTestUser(test, expect) { const username = Random.id(); - const email = Random.id() + '-intercept@example.com'; + const email = `${Random.id()}-intercept@example.com`; const password = 'password'; - Accounts.createUser({ - username: username, - email: email, - password: password - }, - expect(function (error, result) { - test.equal(error, undefined); - test.notEqual(Meteor.userId(), null); - })); + Accounts.createUser( + { + username, + email, + password, + }, + expect((error) => { + test.equal(error, undefined); + test.notEqual(Meteor.userId(), null); + }), + ); - return {username, email, password}; -}; + return { username, email, password }; +} /** * A utility function that runs an arbitrary JavaScript function with a single * Node-style callback argument multiple times, verifying that the callback is * fired with certain arguments; then run the function one more time, - * conditionally verifying that the callback is now fired with the "too-many- - * request" rate limit error. + * conditionally verifying that the callback is now fired with the 'too-many- + * request' rate limit error. * * @param test As in testAsyncMulti * @param expect As in testAsyncMulti @@ -379,21 +377,28 @@ function createTestUser(test, expect) { * @param {boolean} expectedRateLimitWillBeHit Should we hit rate limit */ function callFnMultipleTimesThenExpectResult( - test, expect, fn, {expectedError, expectedResult, expectedRateLimitWillBeHit, - expectedIntervalTimeInMs}) { - - for (var i = 0; i < RATE_LIMIT_NUM_CALLS; i++) { - fn(expect(function (error, result) { + test, + expect, + fn, + { + expectedError, + expectedResult, + expectedRateLimitWillBeHit, + expectedIntervalTimeInMs, + }, +) { + for (let i = 0; i < RATE_LIMIT_NUM_CALLS; i++) { + fn(expect((error, result) => { test.equal(error && error.error, expectedError); test.equal(result, expectedResult); })); } - fn(expect(function (error, result) { + fn(expect((error, result) => { if (expectedRateLimitWillBeHit) { - test.equal(error && error.error, 'too-many-requests', 'error : ' + error); - test.isTrue(error && error.details.timeToReset < - expectedIntervalTimeInMs || RATE_LIMIT_INTERVAL_TIME_MS, 'too long'); + test.equal(error && error.error, 'too-many-requests', `error : ${error}`); + test.isTrue((error && error.details.timeToReset < + expectedIntervalTimeInMs) || RATE_LIMIT_INTERVAL_TIME_MS, 'too long'); test.equal(result, undefined, 'result is not undefined'); } else { test.equal(error && error.error, expectedError); diff --git a/packages/ddp-rate-limiter/ddp-rate-limiter.js b/packages/ddp-rate-limiter/ddp-rate-limiter.js index c2b5c335f2..5ffd6dac02 100644 --- a/packages/ddp-rate-limiter/ddp-rate-limiter.js +++ b/packages/ddp-rate-limiter/ddp-rate-limiter.js @@ -1,19 +1,23 @@ +import { RateLimiter } from 'meteor/rate-limit'; + // Rate Limiter built into DDP with a default error message. See README or // online documentation for more details. -DDPRateLimiter = {}; +const DDPRateLimiter = {}; -var errorMessage = function (rateLimitResult) { - return "Error, too many requests. Please slow down. You must wait " + - Math.ceil(rateLimitResult.timeToReset / 1000) + " seconds before " + - "trying again."; +let errorMessage = (rateLimitResult) => { + return 'Error, too many requests. Please slow down. You must wait ' + + `${Math.ceil(rateLimitResult.timeToReset / 1000)} seconds before ` + + 'trying again.'; }; -var rateLimiter = new RateLimiter(); -DDPRateLimiter.getErrorMessage = function (rateLimitResult) { - if (typeof errorMessage === 'function') +const rateLimiter = new RateLimiter(); + +DDPRateLimiter.getErrorMessage = (rateLimitResult) => { + if (typeof errorMessage === 'function') { return errorMessage(rateLimitResult); - else + } else { return errorMessage; + } }; /** @@ -25,7 +29,7 @@ DDPRateLimiter.getErrorMessage = function (rateLimitResult) { * of the error message. * @locus Server */ -DDPRateLimiter.setErrorMessage = function (message) { +DDPRateLimiter.setErrorMessage = (message) => { errorMessage = message; }; @@ -67,13 +71,10 @@ DDPRateLimiter.setErrorMessage = function (message) { * @param {function} callback function to be called after a rule is executed. * @locus Server */ -DDPRateLimiter.addRule = function (matcher, numRequests, timeInterval, callback) { - return rateLimiter.addRule(matcher, numRequests, timeInterval, callback); -}; +DDPRateLimiter.addRule = (matcher, numRequests, timeInterval, callback) => + rateLimiter.addRule(matcher, numRequests, timeInterval, callback); -DDPRateLimiter.printRules = function () { - return rateLimiter.rules; -}; +DDPRateLimiter.printRules = () => rateLimiter.rules; /** * @summary Removes the specified rule from the rate limiter. If rule had @@ -82,16 +83,14 @@ DDPRateLimiter.printRules = function () { * @return {boolean} True if a rule was removed. * @locus Server */ -DDPRateLimiter.removeRule = function (id) { - return rateLimiter.removeRule(id); -}; +DDPRateLimiter.removeRule = id => rateLimiter.removeRule(id); // This is accessed inside livedata_server.js, but shouldn't be called by any // user. -DDPRateLimiter._increment = function (input) { +DDPRateLimiter._increment = (input) => { rateLimiter.increment(input); }; -DDPRateLimiter._check = function (input) { - return rateLimiter.check(input); -}; +DDPRateLimiter._check = input => rateLimiter.check(input); + +export { DDPRateLimiter }; diff --git a/packages/ddp-rate-limiter/package.js b/packages/ddp-rate-limiter/package.js index ae990e7a93..730cededb7 100644 --- a/packages/ddp-rate-limiter/package.js +++ b/packages/ddp-rate-limiter/package.js @@ -8,13 +8,14 @@ Package.describe({ git: '', // By default, Meteor will default to using README.md for documentation. // To avoid submitting documentation, set this field to null. - documentation: 'README.md' + documentation: 'README.md', }); Package.onUse(function(api) { api.use('rate-limit', 'server'); + api.use('ecmascript'); api.export('DDPRateLimiter', 'server'); - api.addFiles('ddp-rate-limiter.js', 'server'); + api.mainModule('ddp-rate-limiter.js', 'server'); }); Package.onTest(function(api) { @@ -24,7 +25,6 @@ Package.onTest(function(api) { 'ddp', 'ecmascript', 'es5-shim']); api.use('ddp-rate-limiter'); - api.addFiles('ddp-rate-limiter-tests-common.js'); - api.addFiles('ddp-rate-limiter-test-service.js', 'server'); - api.addFiles('ddp-rate-limiter-tests.js', 'client'); + api.mainModule('ddp-rate-limiter-test-service.js', 'server'); + api.mainModule('ddp-rate-limiter-tests.js', 'client'); }); diff --git a/packages/ddp-server/crossbar.js b/packages/ddp-server/crossbar.js index c09512eb4c..6672059f99 100644 --- a/packages/ddp-server/crossbar.js +++ b/packages/ddp-server/crossbar.js @@ -11,6 +11,7 @@ DDPServer._Crossbar = function (options) { // keys 'trigger', 'callback'. As a hack, the empty string means "no // collection". self.listenersByCollection = {}; + self.listenersByCollectionCount = {}; self.factPackage = options.factPackage || "livedata"; self.factName = options.factName || null; }; @@ -48,23 +49,27 @@ _.extend(DDPServer._Crossbar.prototype, { var record = {trigger: EJSON.clone(trigger), callback: callback}; if (! _.has(self.listenersByCollection, collection)) { self.listenersByCollection[collection] = {}; + self.listenersByCollectionCount[collection] = 0; } self.listenersByCollection[collection][id] = record; + self.listenersByCollectionCount[collection]++; - if (self.factName && Package.facts) { - Package.facts.Facts.incrementServerFact( + if (self.factName && Package['facts-base']) { + Package['facts-base'].Facts.incrementServerFact( self.factPackage, self.factName, 1); } return { stop: function () { - if (self.factName && Package.facts) { - Package.facts.Facts.incrementServerFact( + if (self.factName && Package['facts-base']) { + Package['facts-base'].Facts.incrementServerFact( self.factPackage, self.factName, -1); } delete self.listenersByCollection[collection][id]; - if (_.isEmpty(self.listenersByCollection[collection])) { + self.listenersByCollectionCount[collection]--; + if (self.listenersByCollectionCount[collection] === 0) { delete self.listenersByCollection[collection]; + delete self.listenersByCollectionCount[collection]; } } }; diff --git a/packages/ddp-server/livedata_server.js b/packages/ddp-server/livedata_server.js index cc8831304f..3b94a3e8e9 100644 --- a/packages/ddp-server/livedata_server.js +++ b/packages/ddp-server/livedata_server.js @@ -14,8 +14,8 @@ var Fiber = Npm.require('fibers'); // Represents a single document in a SessionCollectionView var SessionDocumentView = function () { var self = this; - self.existsIn = {}; // set of subscriptionHandle - self.dataByKey = {}; // key-> [ {subscriptionHandle, value} by precedence] + self.existsIn = new Set(); // set of subscriptionHandle + self.dataByKey = new Map(); // key-> [ {subscriptionHandle, value} by precedence] }; DDPServer._SessionDocumentView = SessionDocumentView; @@ -26,7 +26,7 @@ _.extend(SessionDocumentView.prototype, { getFields: function () { var self = this; var ret = {}; - _.each(self.dataByKey, function (precedenceList, key) { + self.dataByKey.forEach(function (precedenceList, key) { ret[key] = precedenceList[0].value; }); return ret; @@ -37,7 +37,7 @@ _.extend(SessionDocumentView.prototype, { // Publish API ignores _id if present in fields if (key === "_id") return; - var precedenceList = self.dataByKey[key]; + var precedenceList = self.dataByKey.get(key); // It's okay to clear fields that didn't exist. No need to throw // an error. @@ -56,8 +56,8 @@ _.extend(SessionDocumentView.prototype, { break; } } - if (_.isEmpty(precedenceList)) { - delete self.dataByKey[key]; + if (precedenceList.length === 0) { + self.dataByKey.delete(key); changeCollector[key] = undefined; } else if (removedValue !== undefined && !EJSON.equals(removedValue, precedenceList[0].value)) { @@ -75,17 +75,17 @@ _.extend(SessionDocumentView.prototype, { // Don't share state with the data passed in by the user. value = EJSON.clone(value); - if (!_.has(self.dataByKey, key)) { - self.dataByKey[key] = [{subscriptionHandle: subscriptionHandle, - value: value}]; + if (!self.dataByKey.has(key)) { + self.dataByKey.set(key, [{subscriptionHandle: subscriptionHandle, + value: value}]); changeCollector[key] = value; return; } - var precedenceList = self.dataByKey[key]; + var precedenceList = self.dataByKey.get(key); var elt; if (!isAdd) { - elt = _.find(precedenceList, function (precedence) { - return precedence.subscriptionHandle === subscriptionHandle; + elt = precedenceList.find(function (precedence) { + return precedence.subscriptionHandle === subscriptionHandle; }); } @@ -112,7 +112,7 @@ _.extend(SessionDocumentView.prototype, { var SessionCollectionView = function (collectionName, sessionCallbacks) { var self = this; self.collectionName = collectionName; - self.documents = {}; + self.documents = new Map(); self.callbacks = sessionCallbacks; }; @@ -123,12 +123,12 @@ _.extend(SessionCollectionView.prototype, { isEmpty: function () { var self = this; - return _.isEmpty(self.documents); + return self.documents.size === 0; }, diff: function (previous) { var self = this; - DiffSequence.diffObjects(previous.documents, self.documents, { + DiffSequence.diffMaps(previous.documents, self.documents, { both: _.bind(self.diffDocument, self), rightOnly: function (id, nowDV) { @@ -161,14 +161,14 @@ _.extend(SessionCollectionView.prototype, { added: function (subscriptionHandle, id, fields) { var self = this; - var docView = self.documents[id]; + var docView = self.documents.get(id); var added = false; if (!docView) { added = true; docView = new SessionDocumentView(); - self.documents[id] = docView; + self.documents.set(id, docView); } - docView.existsIn[subscriptionHandle] = true; + docView.existsIn.add(subscriptionHandle); var changeCollector = {}; _.each(fields, function (value, key) { docView.changeField( @@ -183,7 +183,7 @@ _.extend(SessionCollectionView.prototype, { changed: function (subscriptionHandle, id, changed) { var self = this; var changedResult = {}; - var docView = self.documents[id]; + var docView = self.documents.get(id); if (!docView) throw new Error("Could not find element with id " + id + " to change"); _.each(changed, function (value, key) { @@ -197,21 +197,21 @@ _.extend(SessionCollectionView.prototype, { removed: function (subscriptionHandle, id) { var self = this; - var docView = self.documents[id]; + var docView = self.documents.get(id); if (!docView) { var err = new Error("Removed nonexistent document " + id); throw err; } - delete docView.existsIn[subscriptionHandle]; - if (_.isEmpty(docView.existsIn)) { + docView.existsIn.delete(subscriptionHandle); + if (docView.existsIn.size === 0) { // it is gone from everyone self.callbacks.removed(self.collectionName, id); - delete self.documents[id]; + self.documents.delete(id); } else { var changed = {}; // remove this subscription from every precedence list // and record the changes - _.each(docView.dataByKey, function (precedenceList, key) { + docView.dataByKey.forEach(function (precedenceList, key) { docView.clearField(subscriptionHandle, key, changed); }); @@ -242,12 +242,12 @@ var Session = function (server, version, socket, options) { self.workerRunning = false; // Sub objects for active subscriptions - self._namedSubs = {}; + self._namedSubs = new Map(); self._universalSubs = []; self.userId = null; - self.collectionViews = {}; + self.collectionViews = new Map(); // Set this to false to not send messages when collectionViews are // modified. This is done when rerunning subs in _setUserId and those messages @@ -318,7 +318,7 @@ var Session = function (server, version, socket, options) { self.heartbeat.start(); } - Package.facts && Package.facts.Facts.incrementServerFact( + Package['facts-base'] && Package['facts-base'].Facts.incrementServerFact( "livedata", "sessions", 1); }; @@ -373,12 +373,12 @@ _.extend(Session.prototype, { getCollectionView: function (collectionName) { var self = this; - if (_.has(self.collectionViews, collectionName)) { - return self.collectionViews[collectionName]; - } - var ret = new SessionCollectionView(collectionName, + var ret = self.collectionViews.get(collectionName); + if (!ret) { + ret = new SessionCollectionView(collectionName, self.getSendCallbacks()); - self.collectionViews[collectionName] = ret; + self.collectionViews.set(collectionName, ret); + } return ret; }, @@ -393,7 +393,7 @@ _.extend(Session.prototype, { var view = self.getCollectionView(collectionName); view.removed(subscriptionHandle, id); if (view.isEmpty()) { - delete self.collectionViews[collectionName]; + self.collectionViews.delete(collectionName); } }, @@ -428,7 +428,7 @@ _.extend(Session.prototype, { // Drop the merge box data immediately. self.inQueue = null; - self.collectionViews = {}; + self.collectionViews = new Map(); if (self.heartbeat) { self.heartbeat.stop(); @@ -440,7 +440,7 @@ _.extend(Session.prototype, { self.socket._meteorSession = null; } - Package.facts && Package.facts.Facts.incrementServerFact( + Package['facts-base'] && Package['facts-base'].Facts.incrementServerFact( "livedata", "sessions", -1); Meteor.defer(function () { @@ -585,7 +585,7 @@ _.extend(Session.prototype, { return; } - if (_.has(self._namedSubs, msg.id)) + if (self._namedSubs.has(msg.id)) // subs are idempotent, or rather, they are ignored if a sub // with that id already exists. this is important during // reconnect. @@ -753,23 +753,23 @@ _.extend(Session.prototype, { _eachSub: function (f) { var self = this; - _.each(self._namedSubs, f); - _.each(self._universalSubs, f); + self._namedSubs.forEach(f); + self._universalSubs.forEach(f); }, _diffCollectionViews: function (beforeCVs) { var self = this; - DiffSequence.diffObjects(beforeCVs, self.collectionViews, { + DiffSequence.diffMaps(beforeCVs, self.collectionViews, { both: function (collectionName, leftValue, rightValue) { rightValue.diff(leftValue); }, rightOnly: function (collectionName, rightValue) { - _.each(rightValue.documents, function (docView, id) { + rightValue.documents.forEach(function (docView, id) { self.sendAdded(collectionName, id, docView.getFields()); }); }, leftOnly: function (collectionName, leftValue) { - _.each(leftValue.documents, function (doc, id) { + leftValue.documents.forEach(function (doc, id) { self.sendRemoved(collectionName, id); }); } @@ -806,7 +806,7 @@ _.extend(Session.prototype, { // update the userId. self._isSending = false; var beforeCVs = self.collectionViews; - self.collectionViews = {}; + self.collectionViews = new Map(); self.userId = userId; // _setUserId is normally called from a Meteor method with @@ -816,14 +816,15 @@ _.extend(Session.prototype, { DDP._CurrentMethodInvocation.withValue(undefined, function () { // Save the old named subs, and reset to having no subscriptions. var oldNamedSubs = self._namedSubs; - self._namedSubs = {}; + self._namedSubs = new Map(); self._universalSubs = []; - _.each(oldNamedSubs, function (sub, subscriptionId) { - self._namedSubs[subscriptionId] = sub._recreate(); + oldNamedSubs.forEach(function (sub, subscriptionId) { + var newSub = sub._recreate(); + self._namedSubs.set(subscriptionId, newSub); // nb: if the handler throws or calls this.error(), it will in fact // immediately send its 'nosub'. This is OK, though. - self._namedSubs[subscriptionId]._runHandler(); + newSub._runHandler(); }); // Allow newly-created universal subs to be started on our connection in @@ -852,7 +853,7 @@ _.extend(Session.prototype, { var sub = new Subscription( self, handler, subId, params, name); if (subId) - self._namedSubs[subId] = sub; + self._namedSubs.set(subId, sub); else self._universalSubs.push(sub); @@ -864,12 +865,14 @@ _.extend(Session.prototype, { var self = this; var subName = null; - - if (subId && self._namedSubs[subId]) { - subName = self._namedSubs[subId]._name; - self._namedSubs[subId]._removeAllDocuments(); - self._namedSubs[subId]._deactivate(); - delete self._namedSubs[subId]; + if (subId) { + var maybeSub = self._namedSubs.get(subId); + if (maybeSub) { + subName = maybeSub._name; + maybeSub._removeAllDocuments(); + maybeSub._deactivate(); + self._namedSubs.delete(subId); + } } var response = {msg: 'nosub', id: subId}; @@ -889,12 +892,12 @@ _.extend(Session.prototype, { _deactivateAllSubscriptions: function () { var self = this; - _.each(self._namedSubs, function (sub, id) { + self._namedSubs.forEach(function (sub, id) { sub._deactivate(); }); - self._namedSubs = {}; + self._namedSubs = new Map(); - _.each(self._universalSubs, function (sub) { + self._universalSubs.forEach(function (sub) { sub._deactivate(); }); self._universalSubs = []; @@ -993,7 +996,7 @@ var Subscription = function ( // the set of (collection, documentid) that this subscription has // an opinion about - self._documents = {}; + self._documents = new Map(); // remember if we are ready. self._ready = false; @@ -1023,7 +1026,7 @@ var Subscription = function ( idParse: MongoID.idParse }; - Package.facts && Package.facts.Facts.incrementServerFact( + Package['facts-base'] && Package['facts-base'].Facts.incrementServerFact( "livedata", "subscriptions", 1); }; @@ -1142,7 +1145,7 @@ _.extend(Subscription.prototype, { return; self._deactivated = true; self._callStopCallbacks(); - Package.facts && Package.facts.Facts.incrementServerFact( + Package['facts-base'] && Package['facts-base'].Facts.incrementServerFact( "livedata", "subscriptions", -1); }, @@ -1160,10 +1163,8 @@ _.extend(Subscription.prototype, { _removeAllDocuments: function () { var self = this; Meteor._noYieldsAllowed(function () { - _.each(self._documents, function(collectionDocs, collectionName) { - // Iterate over _.keys instead of the dictionary itself, since we'll be - // mutating it. - _.each(_.keys(collectionDocs), function (strId) { + self._documents.forEach(function (collectionDocs, collectionName) { + collectionDocs.forEach(function (strId) { self.removed(collectionName, self._idFilter.idParse(strId)); }); }); @@ -1252,7 +1253,12 @@ _.extend(Subscription.prototype, { if (self._isDeactivated()) return; id = self._idFilter.idStringify(id); - Meteor._ensure(self._documents, collectionName)[id] = true; + let ids = self._documents.get(collectionName); + if (ids == null) { + ids = new Set(); + self._documents.set(collectionName, ids); + } + ids.add(id); self._session.added(self._subscriptionHandle, collectionName, id, fields); }, @@ -1288,7 +1294,7 @@ _.extend(Subscription.prototype, { id = self._idFilter.idStringify(id); // We don't bother to delete sets of things in a collection if the // collection is empty. It could break _removeAllDocuments. - delete self._documents[collectionName][id]; + self._documents.get(collectionName).delete(id); self._session.removed(self._subscriptionHandle, collectionName, id); }, @@ -1350,7 +1356,7 @@ Server = function (options) { self.method_handlers = {}; - self.sessions = {}; // map from id to session + self.sessions = new Map(); // map from id to session self.stream_server = new StreamServer; @@ -1399,8 +1405,7 @@ Server = function (options) { socket._meteorSession.processMessage(msg); } catch (e) { // XXX print stack nicely - Meteor._debug("Internal exception while processing message", msg, - e.message, e.stack); + Meteor._debug("Internal exception while processing message", msg, e); } }); @@ -1472,7 +1477,7 @@ _.extend(Server.prototype, { // Note: Troposphere depends on the ability to mutate // Meteor.server.options.heartbeatTimeout! This is a hack, but it's life. socket._meteorSession = new Session(self, version, socket, self.options); - self.sessions[socket._meteorSession.id] = socket._meteorSession; + self.sessions.set(socket._meteorSession.id, socket._meteorSession); self.onConnectionHook.each(function (callback) { if (socket._meteorSession) callback(socket._meteorSession.connectionHandle); @@ -1553,7 +1558,7 @@ _.extend(Server.prototype, { // Spin up the new publisher on any existing session too. Run each // session's subscription in a new Fiber, so that there's no change for // self.sessions to change while we're running this loop. - _.each(self.sessions, function (session) { + self.sessions.forEach(function (session) { if (!session._dontStartNewUniversalSubs) { Fiber(function() { session._startSubscription(handler); @@ -1571,9 +1576,7 @@ _.extend(Server.prototype, { _removeSession: function (session) { var self = this; - if (self.sessions[session.id]) { - delete self.sessions[session.id]; - } + self.sessions.delete(session.id); }, /** @@ -1693,7 +1696,7 @@ _.extend(Server.prototype, { _urlForSession: function (sessionId) { var self = this; - var session = self.sessions[sessionId]; + var session = self.sessions.get(sessionId); if (session) return session._socketUrl; else @@ -1732,12 +1735,12 @@ var wrapInternalException = function (exception, context) { return exception; } - // tests can set the 'expected' flag on an exception so it won't go to the - // server log - if (!exception.expected) { - Meteor._debug("Exception " + context, exception.stack); + // Tests can set the '_expectedByTest' flag on an exception so it won't go to + // the server log. + if (!exception._expectedByTest) { + Meteor._debug("Exception " + context, exception); if (exception.sanitizedError) { - Meteor._debug("Sanitized and reported to the client as:", exception.sanitizedError.message); + Meteor._debug("Sanitized and reported to the client as:", exception.sanitizedError); Meteor._debug(); } } diff --git a/packages/ddp-server/package.js b/packages/ddp-server/package.js index de09ed3af1..c738ca316b 100644 --- a/packages/ddp-server/package.js +++ b/packages/ddp-server/package.js @@ -1,6 +1,6 @@ Package.describe({ summary: "Meteor's latency-compensated distributed data server", - version: '2.1.1', + version: '2.2.0', documentation: null }); @@ -31,7 +31,7 @@ Package.onUse(function (api) { api.use('autopublish', 'server', {weak: true}); // If the facts package is loaded, publish some statistics. - api.use('facts', 'server', {weak: true}); + api.use('facts-base', 'server', {weak: true}); api.use('callback-hook', 'server'); diff --git a/packages/ddp-server/writefence.js b/packages/ddp-server/writefence.js index a114bfd971..e9310c9f7f 100644 --- a/packages/ddp-server/writefence.js +++ b/packages/ddp-server/writefence.js @@ -99,7 +99,7 @@ _.extend(DDPServer._WriteFence.prototype, { try { func(self); } catch (err) { - Meteor._debug("exception in write fence callback:", err); + Meteor._debug("exception in write fence callback", err); } } diff --git a/packages/ddp/README.md b/packages/ddp/README.md index f56c6cf13c..0f4fb03507 100644 --- a/packages/ddp/README.md +++ b/packages/ddp/README.md @@ -10,10 +10,10 @@ specification](https://github.com/meteor/meteor/blob/devel/packages/ddp/DDP.md). This package is used by nearly every Meteor application and provides a full implementation of DDP in JavaScript. API documentation is on the -[main Meteor documentation page](http://docs.meteor.com/), under +[main Meteor documentation page](https://docs.meteor.com/), under "Publish and subscribe", "Methods", and "Server connections". Note in particular that clients can use -[`DDP.connect`](http://docs.meteor.com/#ddp_connect) to open a DDP +[`DDP.connect`](https://docs.meteor.com/api/connections.html#DDP-connect) to open a DDP connection to any DDP service on the Internet. ### Supported platforms diff --git a/packages/facts/.gitignore b/packages/deprecated/facts/.gitignore similarity index 100% rename from packages/facts/.gitignore rename to packages/deprecated/facts/.gitignore diff --git a/packages/facts/README.md b/packages/deprecated/facts/README.md similarity index 72% rename from packages/facts/README.md rename to packages/deprecated/facts/README.md index 15358322c6..d6ca4f5445 100644 --- a/packages/facts/README.md +++ b/packages/deprecated/facts/README.md @@ -2,4 +2,4 @@ [Source code of released version](https://github.com/meteor/meteor/tree/master/packages/facts) | [Source code of development version](https://github.com/meteor/meteor/tree/devel/packages/facts) *** -This is an internal Meteor package. \ No newline at end of file +This is a legacy internal Meteor package. Use facts-ui or facts-base instead. diff --git a/packages/facts/facts.html b/packages/deprecated/facts/facts.html similarity index 100% rename from packages/facts/facts.html rename to packages/deprecated/facts/facts.html diff --git a/packages/facts/facts.js b/packages/deprecated/facts/facts.js similarity index 100% rename from packages/facts/facts.js rename to packages/deprecated/facts/facts.js diff --git a/packages/facts/package.js b/packages/deprecated/facts/package.js similarity index 100% rename from packages/facts/package.js rename to packages/deprecated/facts/package.js diff --git a/packages/jquery/.gitignore b/packages/deprecated/markdown/.gitignore similarity index 100% rename from packages/jquery/.gitignore rename to packages/deprecated/markdown/.gitignore diff --git a/packages/markdown/README.md b/packages/deprecated/markdown/README.md similarity index 100% rename from packages/markdown/README.md rename to packages/deprecated/markdown/README.md diff --git a/packages/markdown/license.txt b/packages/deprecated/markdown/license.txt similarity index 100% rename from packages/markdown/license.txt rename to packages/deprecated/markdown/license.txt diff --git a/packages/markdown/package.js b/packages/deprecated/markdown/package.js similarity index 100% rename from packages/markdown/package.js rename to packages/deprecated/markdown/package.js diff --git a/packages/markdown/showdown.js b/packages/deprecated/markdown/showdown.js similarity index 100% rename from packages/markdown/showdown.js rename to packages/deprecated/markdown/showdown.js diff --git a/packages/markdown/template-integration.js b/packages/deprecated/markdown/template-integration.js similarity index 100% rename from packages/markdown/template-integration.js rename to packages/deprecated/markdown/template-integration.js diff --git a/packages/markdown/.gitignore b/packages/deprecated/showdown/.gitignore similarity index 100% rename from packages/markdown/.gitignore rename to packages/deprecated/showdown/.gitignore diff --git a/packages/showdown/README.md b/packages/deprecated/showdown/README.md similarity index 100% rename from packages/showdown/README.md rename to packages/deprecated/showdown/README.md diff --git a/packages/showdown/package.js b/packages/deprecated/showdown/package.js similarity index 100% rename from packages/showdown/package.js rename to packages/deprecated/showdown/package.js diff --git a/packages/showdown/.gitignore b/packages/deprecated/stylus/.gitignore similarity index 100% rename from packages/showdown/.gitignore rename to packages/deprecated/stylus/.gitignore diff --git a/packages/standard-minifier-js/.npm/plugin/minifyStd/.gitignore b/packages/deprecated/stylus/.npm/plugin/compileStylusBatch/.gitignore similarity index 100% rename from packages/standard-minifier-js/.npm/plugin/minifyStd/.gitignore rename to packages/deprecated/stylus/.npm/plugin/compileStylusBatch/.gitignore diff --git a/packages/standard-minifier-css/.npm/plugin/minifyStd/README b/packages/deprecated/stylus/.npm/plugin/compileStylusBatch/README similarity index 100% rename from packages/standard-minifier-css/.npm/plugin/minifyStd/README rename to packages/deprecated/stylus/.npm/plugin/compileStylusBatch/README diff --git a/packages/stylus/.npm/plugin/compileStylusBatch/npm-shrinkwrap.json b/packages/deprecated/stylus/.npm/plugin/compileStylusBatch/npm-shrinkwrap.json similarity index 89% rename from packages/stylus/.npm/plugin/compileStylusBatch/npm-shrinkwrap.json rename to packages/deprecated/stylus/.npm/plugin/compileStylusBatch/npm-shrinkwrap.json index 4fd3768234..91eda21624 100644 --- a/packages/stylus/.npm/plugin/compileStylusBatch/npm-shrinkwrap.json +++ b/packages/deprecated/stylus/.npm/plugin/compileStylusBatch/npm-shrinkwrap.json @@ -32,9 +32,9 @@ "integrity": "sha1-lS/0jVZGPTtTj4XvL46t39KEsTM=" }, "caniuse-db": { - "version": "1.0.30000693", - "resolved": "https://registry.npmjs.org/caniuse-db/-/caniuse-db-1.0.30000693.tgz", - "integrity": "sha1-hRDnqasErcyiOl3O+jTfnSjBziA=" + "version": "1.0.30000793", + "resolved": "https://registry.npmjs.org/caniuse-db/-/caniuse-db-1.0.30000793.tgz", + "integrity": "sha1-PADGbkI6ehkHx92Wdpp4sq+opy4=" }, "concat-map": { "version": "0.0.1", @@ -47,9 +47,9 @@ "integrity": "sha1-Mh9s9zeCpv91ERE5D8BeLGV9jJs=" }, "debug": { - "version": "2.6.8", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.8.tgz", - "integrity": "sha1-5zFTHKLt4n0YgiJCfaF4IdaP9Pw=" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==" }, "fs.realpath": { "version": "1.0.0", @@ -77,9 +77,9 @@ "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" }, "js-base64": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/js-base64/-/js-base64-2.1.9.tgz", - "integrity": "sha1-8OgK4DmkvWVLXygfyT8EqRSn/M4=" + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/js-base64/-/js-base64-2.4.1.tgz", + "integrity": "sha512-2h586r2I/CqU7z1aa1kBgWaVAXWAZK+zHnceGi/jFgn7+7VSluxYer/i3xOZVearCxxXvyDkLtTBo+OeJCA3kA==" }, "minimatch": { "version": "3.0.4", @@ -137,9 +137,9 @@ "integrity": "sha1-1M9vGXdGSMSSrFfCmPavs8BMrv4=", "dependencies": { "source-map": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.6.tgz", - "integrity": "sha1-dc449SvwczxafwwRjYEzSiu19BI=" + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=" } } }, @@ -160,7 +160,7 @@ }, "stylus": { "version": "https://github.com/meteor/stylus/tarball/bb47a357d132ca843718c63998eb37b90013a449", - "integrity": "sha1-X7DTXdx6DgWUHDyZ5wgPUSFBLi8=" + "integrity": "sha512-j6fvtoNfjx/TEIlIOZ53OqbP6uDdF5HsQidsRfvp0IfW0D5PCtV8IeHVQa4jjbhF9PbjOXX/rrt5lP4CGpgtfw==" }, "supports-color": { "version": "3.2.3", diff --git a/packages/stylus/README.md b/packages/deprecated/stylus/README.md similarity index 92% rename from packages/stylus/README.md rename to packages/deprecated/stylus/README.md index b2e6866c50..4d5893cbcd 100644 --- a/packages/stylus/README.md +++ b/packages/deprecated/stylus/README.md @@ -2,6 +2,10 @@ [Source code of released version](https://github.com/meteor/meteor/tree/master/packages/stylus) | [Source code of development version](https://github.com/meteor/meteor/tree/devel/packages/stylus) *** +**DEPRECATED:** This package is no longer supported/maintained as part of the +Meteor project. To continue using the last supported version of this package, +pin your package version to 2.513.13 (`meteor add stylus@=2.513.13`). + [Stylus](http://learnboost.github.com/stylus/) is a CSS pre-processor with a simple syntax and expressive dynamic behavior. It allows for more compact stylesheets and helps reduce code duplication in CSS files. @@ -97,4 +101,3 @@ not supported at the moment: - globbing: `@import './folder/*'` - importing `index.styl`: `@import ./folder/` - should automatically load `./folder/index.styl` - diff --git a/packages/deprecated/stylus/deprecation_notice.js b/packages/deprecated/stylus/deprecation_notice.js new file mode 100644 index 0000000000..59e32f8c49 --- /dev/null +++ b/packages/deprecated/stylus/deprecation_notice.js @@ -0,0 +1,7 @@ +console.warn([ + "The `stylus` package has been deprecated.", + "", + "To continue using the last supported version", + "of this package, pin your package version to", + "2.513.13 (`meteor add stylus@=2.513.13`).", +].join("\n")); diff --git a/packages/stylus/package.js b/packages/deprecated/stylus/package.js similarity index 92% rename from packages/stylus/package.js rename to packages/deprecated/stylus/package.js index 14190a006d..2a7197eafb 100644 --- a/packages/stylus/package.js +++ b/packages/deprecated/stylus/package.js @@ -1,6 +1,6 @@ Package.describe({ summary: 'Expressive, dynamic, robust CSS', - version: "2.513.13" + version: "2.513.15" }); Package.registerBuildPlugin({ @@ -18,6 +18,7 @@ Package.registerBuildPlugin({ Package.onUse(function (api) { api.use('isobuild:compiler-plugin@1.0.0'); + api.addFiles("deprecation_notice.js"); }); Package.onTest(function (api) { diff --git a/packages/stylus/plugin/compile-stylus.js b/packages/deprecated/stylus/plugin/compile-stylus.js similarity index 99% rename from packages/stylus/plugin/compile-stylus.js rename to packages/deprecated/stylus/plugin/compile-stylus.js index 21de2eda24..cc0003d3e3 100644 --- a/packages/stylus/plugin/compile-stylus.js +++ b/packages/deprecated/stylus/plugin/compile-stylus.js @@ -21,6 +21,7 @@ class StylusCompiler extends MultiFileCachingCompiler { getCacheKey(inputFile) { return [ + inputFile.getArch(), inputFile.getSourceHash(), inputFile.getFileOptions(), ]; diff --git a/packages/stylus/stylus_tests.html b/packages/deprecated/stylus/stylus_tests.html similarity index 100% rename from packages/stylus/stylus_tests.html rename to packages/deprecated/stylus/stylus_tests.html diff --git a/packages/stylus/stylus_tests.import.styl b/packages/deprecated/stylus/stylus_tests.import.styl similarity index 100% rename from packages/stylus/stylus_tests.import.styl rename to packages/deprecated/stylus/stylus_tests.import.styl diff --git a/packages/stylus/stylus_tests.js b/packages/deprecated/stylus/stylus_tests.js similarity index 100% rename from packages/stylus/stylus_tests.js rename to packages/deprecated/stylus/stylus_tests.js diff --git a/packages/stylus/stylus_tests.styl b/packages/deprecated/stylus/stylus_tests.styl similarity index 100% rename from packages/stylus/stylus_tests.styl rename to packages/deprecated/stylus/stylus_tests.styl diff --git a/packages/diff-sequence/diff.js b/packages/diff-sequence/diff.js index bc5983e57e..d24fa616ed 100644 --- a/packages/diff-sequence/diff.js +++ b/packages/diff-sequence/diff.js @@ -1,4 +1,15 @@ -DiffSequence = {}; +export const DiffSequence = {}; + +const hasOwn = Object.prototype.hasOwnProperty; + +function isObjEmpty(obj) { + for (let key in Object(obj)) { + if (hasOwn.call(obj, key)) { + return false; + } + } + return true; +} // ordered: bool. // old_results and new_results: collections of documents. @@ -31,7 +42,7 @@ DiffSequence.diffQueryUnorderedChanges = function (oldResults, newResults, var projectedOld = projectionFn(oldDoc); var changedFields = DiffSequence.makeChangedFields(projectedNew, projectedOld); - if (! _.isEmpty(changedFields)) { + if (! isObjEmpty(changedFields)) { observer.changed(id, changedFields); } } @@ -50,21 +61,20 @@ DiffSequence.diffQueryUnorderedChanges = function (oldResults, newResults, } }; - DiffSequence.diffQueryOrderedChanges = function (old_results, new_results, observer, options) { options = options || {}; var projectionFn = options.projectionFn || EJSON.clone; var new_presence_of_id = {}; - _.each(new_results, function (doc) { + new_results.forEach(function (doc) { if (new_presence_of_id[doc._id]) Meteor._debug("Duplicate _id in new_results"); new_presence_of_id[doc._id] = true; }); var old_index_of_id = {}; - _.each(old_results, function (doc, i) { + old_results.forEach(function (doc, i) { if (doc._id in old_index_of_id) Meteor._debug("Duplicate _id in old_results"); old_index_of_id[doc._id] = i; @@ -155,19 +165,20 @@ DiffSequence.diffQueryOrderedChanges = function (old_results, new_results, // an id of "null" unmoved.push(new_results.length); - _.each(old_results, function (doc) { + old_results.forEach(function (doc) { if (!new_presence_of_id[doc._id]) observer.removed && observer.removed(doc._id); }); + // for each group of things in the new_results that is anchored by an unmoved // element, iterate through the things before it. var startOfGroup = 0; - _.each(unmoved, function (endOfGroup) { + unmoved.forEach(function (endOfGroup) { var groupId = new_results[endOfGroup] ? new_results[endOfGroup]._id : null; var oldDoc, newDoc, fields, projectedNew, projectedOld; for (var i = startOfGroup; i < endOfGroup; i++) { newDoc = new_results[i]; - if (!_.has(old_index_of_id, newDoc._id)) { + if (!hasOwn.call(old_index_of_id, newDoc._id)) { fields = projectionFn(newDoc); delete fields._id; observer.addedBefore && observer.addedBefore(newDoc._id, fields, groupId); @@ -178,7 +189,7 @@ DiffSequence.diffQueryOrderedChanges = function (old_results, new_results, projectedNew = projectionFn(newDoc); projectedOld = projectionFn(oldDoc); fields = DiffSequence.makeChangedFields(projectedNew, projectedOld); - if (!_.isEmpty(fields)) { + if (!isObjEmpty(fields)) { observer.changed && observer.changed(newDoc._id, fields); } observer.movedBefore && observer.movedBefore(newDoc._id, groupId); @@ -190,7 +201,7 @@ DiffSequence.diffQueryOrderedChanges = function (old_results, new_results, projectedNew = projectionFn(newDoc); projectedOld = projectionFn(oldDoc); fields = DiffSequence.makeChangedFields(projectedNew, projectedOld); - if (!_.isEmpty(fields)) { + if (!isObjEmpty(fields)) { observer.changed && observer.changed(newDoc._id, fields); } } @@ -208,16 +219,39 @@ DiffSequence.diffQueryOrderedChanges = function (old_results, new_results, // both: function (key, leftValue, rightValue) {...}, // } DiffSequence.diffObjects = function (left, right, callbacks) { - _.each(left, function (leftValue, key) { - if (_.has(right, key)) + Object.keys(left).forEach(key => { + const leftValue = left[key]; + if (hasOwn.call(right, key)) { callbacks.both && callbacks.both(key, leftValue, right[key]); - else + } else { callbacks.leftOnly && callbacks.leftOnly(key, leftValue); + } }); + if (callbacks.rightOnly) { - _.each(right, function(rightValue, key) { - if (!_.has(left, key)) + Object.keys(right).forEach(key => { + const rightValue = right[key]; + if (! hasOwn.call(left, key)) { callbacks.rightOnly(key, rightValue); + } + }); + } +}; + +DiffSequence.diffMaps = function (left, right, callbacks) { + left.forEach(function (leftValue, key) { + if (right.has(key)){ + callbacks.both && callbacks.both(key, leftValue, right.get(key)); + } else { + callbacks.leftOnly && callbacks.leftOnly(key, leftValue); + } + }); + + if (callbacks.rightOnly) { + right.forEach(function (rightValue, key) { + if (!left.has(key)){ + callbacks.rightOnly(key, rightValue); + } }); } }; @@ -241,11 +275,13 @@ DiffSequence.makeChangedFields = function (newDoc, oldDoc) { }; DiffSequence.applyChanges = function (doc, changeFields) { - _.each(changeFields, function (value, key) { - if (value === undefined) + Object.keys(changeFields).forEach(key => { + const value = changeFields[key]; + if (typeof value === "undefined") { delete doc[key]; - else + } else { doc[key] = value; + } }); }; diff --git a/packages/diff-sequence/package.js b/packages/diff-sequence/package.js index 0725708bdc..8e937db42e 100644 --- a/packages/diff-sequence/package.js +++ b/packages/diff-sequence/package.js @@ -1,15 +1,14 @@ Package.describe({ summary: "An implementation of a diff algorithm on arrays and objects.", - version: '1.0.7', + version: '1.1.0', documentation: null }); Package.onUse(function (api) { + api.use('ecmascript'); + api.use('ejson'); + api.mainModule('diff.js'); api.export('DiffSequence'); - api.use(['underscore', 'ejson']); - api.addFiles([ - 'diff.js' - ]); }); Package.onTest(function (api) { diff --git a/packages/dynamic-import/cache.js b/packages/dynamic-import/cache.js index 14cc5d08fd..350f3a60f3 100644 --- a/packages/dynamic-import/cache.js +++ b/packages/dynamic-import/cache.js @@ -1,4 +1,3 @@ -var hasOwn = Object.prototype.hasOwnProperty; var dbPromise; var canUseCache = diff --git a/packages/dynamic-import/client.js b/packages/dynamic-import/client.js index 2be3d7fa2c..e63f14d2d6 100644 --- a/packages/dynamic-import/client.js +++ b/packages/dynamic-import/client.js @@ -1,5 +1,6 @@ var Module = module.constructor; var cache = require("./cache.js"); +var meteorInstall = require("meteor/modules").meteorInstall; // Call module.dynamicImport(id) to fetch a module and any/all of its // dependencies that have not already been fetched, and evaluate them as @@ -110,17 +111,34 @@ function makeModuleFunction(id, source, options) { }; } +var secretKey = null; +exports.setSecretKey = function (key) { + secretKey = key; +}; + +var fetchURL = require("./common.js").fetchURL; + function fetchMissing(missingTree) { - // Update lastFetchMissingPromise immediately, without waiting for - // the results to be delivered. - return new Promise(function (resolve, reject) { - Meteor.call( - "__dynamicImport", - missingTree, - function (error, resultsTree) { - error ? reject(error) : resolve(resultsTree); - } - ); + // If the hostname of the URL returned by Meteor.absoluteUrl differs + // from location.host, then we'll be making a cross-origin request here, + // but that's fine because the dynamic-import server sets appropriate + // CORS headers to enable fetching dynamic modules from any + // origin. Browsers that check CORS do so by sending an additional + // preflight OPTIONS request, which may add latency to the first dynamic + // import() request, so it's a good idea for ROOT_URL to match + // location.host if possible, though not strictly necessary. + var url = Meteor.absoluteUrl(fetchURL); + + if (secretKey) { + url += "key=" + secretKey; + } + + return fetch(url, { + method: "POST", + body: JSON.stringify(missingTree) + }).then(function (res) { + if (! res.ok) throw res; + return res.json(); }); } @@ -138,7 +156,7 @@ function addToTree(tree, id, value) { function getNamespace(module, id) { var namespace; - module.watch(module.require(id), { + module.link(id, { "*": function (ns) { namespace = ns; } diff --git a/packages/dynamic-import/common.js b/packages/dynamic-import/common.js new file mode 100644 index 0000000000..517fd6556b --- /dev/null +++ b/packages/dynamic-import/common.js @@ -0,0 +1 @@ +exports.fetchURL = "/__meteor__/dynamic-import/fetch"; diff --git a/packages/dynamic-import/dynamic-versions.js b/packages/dynamic-import/dynamic-versions.js index a903477aca..41b749a2f7 100644 --- a/packages/dynamic-import/dynamic-versions.js +++ b/packages/dynamic-import/dynamic-versions.js @@ -30,3 +30,57 @@ exports.get = function (id) { return version; }; + +function getFlatModuleArray(tree) { + var parts = [""]; + var result = []; + + function walk(t) { + if (t && typeof t === "object") { + Object.keys(t).forEach(function (key) { + parts.push(key); + walk(t[key]); + parts.pop(); + }); + } else if (typeof t === "string") { + result.push(parts.join("/")); + } + } + + walk(tree); + + return result; +} + +// If Package.appcache is loaded, preload additional modules after the +// core bundle has been loaded. +function precacheOnLoad(event) { + // Check inside onload to make sure Package.appcache has had a chance to + // become available. + if (! Package.appcache) { + return; + } + + // Prefetch in chunks to reduce overhead. If we call module.prefetch(id) + // multiple times in the same tick of the event loop, all those modules + // will be fetched in one HTTP POST request. + function prefetchInChunks(modules, amount) { + Promise.all(modules.splice(0, amount).map(function (id) { + return module.prefetch(id); + })).then(function () { + if (modules.length > 0) { + prefetchInChunks(modules, amount); + } + }); + } + + // Get a flat array of modules and start prefetching. + prefetchInChunks(getFlatModuleArray(versions), 50); +} + +// Use window.onload to only prefetch after the main bundle has loaded. +if (global.addEventListener) { + global.addEventListener('load', precacheOnLoad, false); +} else if (global.attachEvent) { + global.attachEvent('onload', precacheOnLoad); +} diff --git a/packages/dynamic-import/package.js b/packages/dynamic-import/package.js index f23cabe351..b57a77067b 100644 --- a/packages/dynamic-import/package.js +++ b/packages/dynamic-import/package.js @@ -1,6 +1,6 @@ Package.describe({ name: "dynamic-import", - version: "0.2.1", + version: "0.5.0", summary: "Runtime support for Meteor 1.5 dynamic import(...) syntax", documentation: "README.md" }); @@ -9,14 +9,11 @@ Package.onUse(function (api) { // Do not allow this package to be used in pre-Meteor 1.5 apps. api.use("isobuild:dynamic-import@1.5.0"); - // Modify browser policy only if browser-policy packages are used. - api.use("browser-policy-content", { weak: true }); - api.use("modules"); api.use("promise"); - api.use("ddp"); - api.use("check"); - api.use("ecmascript", "server"); + api.use("fetch"); + api.use("modern-browsers"); + api.use("inter-process-messaging", "server"); api.mainModule("client.js", "client"); api.mainModule("server.js", "server"); diff --git a/packages/dynamic-import/security.js b/packages/dynamic-import/security.js index f8ccb576f2..6b83e8b191 100644 --- a/packages/dynamic-import/security.js +++ b/packages/dynamic-import/security.js @@ -1,20 +1,22 @@ -const bpc = Package["browser-policy-content"]; -const BP = bpc && bpc.BrowserPolicy; -const BPc = BP && BP.content; -if (BPc) { - // The ability to evaluate new code is essential for loading dynamic - // modules. Without eval, we would be forced to load modules using - // ", but normal {{spacebars}} escaping escapes too much! See - // https://github.com/meteor/meteor/issues/3730 - meteorRuntimeConfig: JSON.stringify( - encodeURIComponent(JSON.stringify(runtimeConfig))), - rootUrlPathPrefix: __meteor_runtime_config__.ROOT_URL_PATH_PREFIX || '', - bundledJsCssUrlRewriteHook: bundledJsCssUrlRewriteHook, - inlineScriptsAllowed: WebAppInternals.inlineScriptsAllowed(), - inline: additionalOptions.inline - } - }, additionalOptions) - ); + + return new Boilerplate(arch, manifest, _.extend({ + pathMapper(itemPath) { + return pathJoin(archPath[arch], itemPath); + }, + baseDataExtension: { + additionalStaticJs: _.map( + additionalStaticJs || [], + function (contents, pathname) { + return { + pathname: pathname, + contents: contents + }; + } + ), + // Convert to a JSON string, then get rid of most weird characters, then + // wrap in double quotes. (The outermost JSON.stringify really ought to + // just be "wrap in double quotes" but we use it to be safe.) This might + // end up inside a ", but normal {{spacebars}} escaping escapes too much! See + // https://github.com/meteor/meteor/issues/3730 + meteorRuntimeConfig: JSON.stringify( + encodeURIComponent(JSON.stringify(runtimeConfig))), + rootUrlPathPrefix: __meteor_runtime_config__.ROOT_URL_PATH_PREFIX || '', + bundledJsCssUrlRewriteHook: bundledJsCssUrlRewriteHook, + sriMode: sriMode, + inlineScriptsAllowed: WebAppInternals.inlineScriptsAllowed(), + inline: additionalOptions.inline + } + }, additionalOptions)); }; -// A mapping from url path to "info". Where "info" has the following fields: +// A mapping from url path to architecture (e.g. "web.browser") to static +// file information with the following fields: // - type: the type of file to be served // - cacheable: optionally, whether the file should be cached or not // - sourceMapUrl: optionally, the url of the source map @@ -380,11 +357,16 @@ WebAppInternals.generateBoilerplateInstance = function (arch, // - content: the stringified content that should be served at this path // - absolutePath: the absolute path on disk to the file -var staticFiles; +var staticFilesByArch; // Serve static files from the manifest or added with // `addStaticJs`. Exported for tests. -WebAppInternals.staticFilesMiddleware = function (staticFiles, req, res, next) { +WebAppInternals.staticFilesMiddleware = async function ( + staticFilesByArch, + req, + res, + next, +) { if ('GET' != req.method && 'HEAD' != req.method && 'OPTIONS' != req.method) { next(); return; @@ -416,7 +398,17 @@ WebAppInternals.staticFilesMiddleware = function (staticFiles, req, res, next) { return; } - if (!_.has(staticFiles, pathname)) { + const { arch, path } = getArchAndPath( + pathname, + identifyBrowser(req.headers["user-agent"]), + ); + + // If pauseClient(arch) has been called, program.paused will be a + // Promise that will be resolved when the program is unpaused. + await WebApp.clientPrograms[arch].paused; + + const info = getStaticFileInfo(pathname, path, arch); + if (! info) { next(); return; } @@ -425,14 +417,20 @@ WebAppInternals.staticFilesMiddleware = function (staticFiles, req, res, next) { // 'send' and yield to the event loop, we never call another handler with // 'next'. - var info = staticFiles[pathname]; - // Cacheable files are files that should never change. Typically // named by their hash (eg meteor bundled js and css files). // We cache them ~forever (1yr). - var maxAge = info.cacheable - ? 1000 * 60 * 60 * 24 * 365 - : 0; + const maxAge = info.cacheable + ? 1000 * 60 * 60 * 24 * 365 + : 0; + + if (info.cacheable) { + // Since we use req.headers["user-agent"] to determine whether the + // client should receive modern or legacy resources, tell the client + // to invalidate cached resources when/if its user agent string + // changes in the future. + res.setHeader("Vary", "User-Agent"); + } // Set the X-SourceMap header, which current Chrome, FireFox, and Safari // understand. (The SourceMap header is slightly more spec-correct but FF @@ -464,32 +462,95 @@ WebAppInternals.staticFilesMiddleware = function (staticFiles, req, res, next) { res.end(); } else { send(req, info.absolutePath, { - maxage: maxAge, - dotfiles: 'allow', // if we specified a dotfile in the manifest, serve it - lastModified: false // don't set last-modified based on the file date - }).on('error', function (err) { - Log.error("Error serving static file " + err); - res.writeHead(500); - res.end(); - }) - .on('directory', function () { - Log.error("Unexpected directory " + info.absolutePath); - res.writeHead(500); - res.end(); - }) - .pipe(res); + maxage: maxAge, + dotfiles: 'allow', // if we specified a dotfile in the manifest, serve it + lastModified: false // don't set last-modified based on the file date + }).on('error', function (err) { + Log.error("Error serving static file " + err); + res.writeHead(500); + res.end(); + }).on('directory', function () { + Log.error("Unexpected directory " + info.absolutePath); + res.writeHead(500); + res.end(); + }).pipe(res); } }; -var getUrlPrefixForArch = function (arch) { - // XXX we rely on the fact that arch names don't contain slashes - // in that case we would need to uri escape it +function getStaticFileInfo(originalPath, path, arch) { + if (! hasOwn.call(WebApp.clientPrograms, arch)) { + return null; + } - // We add '__' to the beginning of non-standard archs to "scope" the url - // to Meteor internals. - return arch === WebApp.defaultArch ? - '' : '/' + '__' + arch.replace(/^web\./, ''); -}; + // Get a list of all available static file architectures, with arch + // first in the list if it exists. + const staticArchList = Object.keys(staticFilesByArch); + const archIndex = staticArchList.indexOf(arch); + if (archIndex > 0) { + staticArchList.unshift(staticArchList.splice(archIndex, 1)[0]); + } + + let info = null; + + staticArchList.some(arch => { + const staticFiles = staticFilesByArch[arch]; + + function finalize(path) { + info = staticFiles[path]; + // Sometimes we register a lazy function instead of actual data in + // the staticFiles manifest. + if (typeof info === "function") { + info = staticFiles[path] = info(); + } + return info; + } + + // If staticFiles contains originalPath with the arch inferred above, + // use that information. + if (hasOwn.call(staticFiles, originalPath)) { + return finalize(originalPath); + } + + // If getArchAndPath returned an alternate path, try that instead. + if (path !== originalPath && + hasOwn.call(staticFiles, path)) { + return finalize(path); + } + }); + + return info; +} + +function getArchAndPath(path, browser) { + const pathParts = path.split("/"); + const archKey = pathParts[1]; + + if (archKey.startsWith("__")) { + const archCleaned = "web." + archKey.slice(2); + if (hasOwn.call(WebApp.clientPrograms, archCleaned)) { + pathParts.splice(1, 1); // Remove the archKey part. + return { + arch: archCleaned, + path: pathParts.join("/"), + }; + } + } + + // TODO Perhaps one day we could infer Cordova clients here, so that we + // wouldn't have to use prefixed "/__cordova/..." URLs. + const arch = isModern(browser) + ? "web.browser" + : "web.browser.legacy"; + + if (hasOwn.call(WebApp.clientPrograms, arch)) { + return { arch, path }; + } + + return { + arch: WebApp.defaultArch, + path, + }; +} // Parse the passed in port value. Return the port as-is if it's a String // (e.g. a Windows Server style named pipe), otherwise return the port as an @@ -505,6 +566,16 @@ WebAppInternals.parsePort = port => { return parsedPort; } +import { onMessage } from "meteor/inter-process-messaging"; + +onMessage("webapp-pause-client", async ({ arch }) => { + WebAppInternals.pauseClient(arch); +}); + +onMessage("webapp-reload-client", async ({ arch }) => { + WebAppInternals.generateClientProgram(arch); +}); + function runWebAppServer() { var shuttingDown = false; var syncQueue = new Meteor._SynchronousQueue(); @@ -515,79 +586,15 @@ function runWebAppServer() { WebAppInternals.reloadClientPrograms = function () { syncQueue.runTask(function() { - staticFiles = {}; - var generateClientProgram = function (clientPath, arch) { - // read the control for the client we'll be serving up - var clientJsonPath = pathJoin(__meteor_bootstrap__.serverDir, - clientPath); - var clientDir = pathDirname(clientJsonPath); - var clientJson = JSON.parse(readUtf8FileSync(clientJsonPath)); - if (clientJson.format !== "web-program-pre1") - throw new Error("Unsupported format for client assets: " + - JSON.stringify(clientJson.format)); + staticFilesByArch = Object.create(null); - if (! clientJsonPath || ! clientDir || ! clientJson) - throw new Error("Client config file not parsed."); - - var urlPrefix = getUrlPrefixForArch(arch); - - var manifest = clientJson.manifest; - _.each(manifest, function (item) { - if (item.url && item.where === "client") { - staticFiles[urlPrefix + getItemPathname(item.url)] = { - absolutePath: pathJoin(clientDir, item.path), - cacheable: item.cacheable, - hash: item.hash, - // Link from source to its map - sourceMapUrl: item.sourceMapUrl, - type: item.type - }; - - if (item.sourceMap) { - // Serve the source map too, under the specified URL. We assume all - // source maps are cacheable. - staticFiles[urlPrefix + getItemPathname(item.sourceMapUrl)] = { - absolutePath: pathJoin(clientDir, item.sourceMap), - cacheable: true - }; - } - } - }); - - var program = { - format: "web-program-pre1", - manifest: manifest, - version: process.env.AUTOUPDATE_VERSION || - WebAppHashing.calculateClientHash( - manifest, - null, - _.pick(__meteor_runtime_config__, "PUBLIC_SETTINGS") - ), - cordovaCompatibilityVersions: clientJson.cordovaCompatibilityVersions, - PUBLIC_SETTINGS: __meteor_runtime_config__.PUBLIC_SETTINGS - }; - - WebApp.clientPrograms[arch] = program; - - // Serve the program as a string at /foo//manifest.json - // XXX change manifest.json -> program.json - staticFiles[urlPrefix + getItemPathname('/manifest.json')] = { - content: JSON.stringify(program), - cacheable: false, - hash: program.version, - type: "json" - }; - }; + const { configJson } = __meteor_bootstrap__; + const clientArchs = configJson.clientArchs || + Object.keys(configJson.clientPaths); try { - var clientPaths = __meteor_bootstrap__.configJson.clientPaths; - _.each(clientPaths, function (clientPath, arch) { - archPath[arch] = pathDirname(clientPath); - generateClientProgram(clientPath, arch); - }); - - // Exported for tests. - WebAppInternals.staticFiles = staticFiles; + clientArchs.forEach(generateClientProgram); + WebAppInternals.staticFilesByArch = staticFilesByArch; } catch (e) { Log.error("Error reloading the client program: " + e.stack); process.exit(1); @@ -595,55 +602,206 @@ function runWebAppServer() { }); }; + // Pause any incoming requests and make them wait for the program to be + // unpaused the next time generateClientProgram(arch) is called. + WebAppInternals.pauseClient = function (arch) { + syncQueue.runTask(() => { + const program = WebApp.clientPrograms[arch]; + const { unpause } = program; + program.paused = new Promise(resolve => { + if (typeof unpause === "function") { + // If there happens to be an existing program.unpause function, + // compose it with the resolve function. + program.unpause = function () { + unpause(); + resolve(); + }; + } else { + program.unpause = resolve; + } + }); + }); + }; + + WebAppInternals.generateClientProgram = function (arch) { + syncQueue.runTask(() => generateClientProgram(arch)); + }; + + function generateClientProgram(arch) { + const clientDir = pathJoin( + pathDirname(__meteor_bootstrap__.serverDir), + arch, + ); + + // read the control for the client we'll be serving up + const programJsonPath = pathJoin(clientDir, "program.json"); + + let programJson; + try { + programJson = JSON.parse(readFileSync(programJsonPath)); + } catch (e) { + if (e.code === "ENOENT") return; + throw e; + } + + if (programJson.format !== "web-program-pre1") { + throw new Error("Unsupported format for client assets: " + + JSON.stringify(programJson.format)); + } + + if (! programJsonPath || ! clientDir || ! programJson) { + throw new Error("Client config file not parsed."); + } + + archPath[arch] = clientDir; + const staticFiles = staticFilesByArch[arch] = Object.create(null); + + const { manifest } = programJson; + manifest.forEach(item => { + if (item.url && item.where === "client") { + staticFiles[getItemPathname(item.url)] = { + absolutePath: pathJoin(clientDir, item.path), + cacheable: item.cacheable, + hash: item.hash, + // Link from source to its map + sourceMapUrl: item.sourceMapUrl, + type: item.type + }; + + if (item.sourceMap) { + // Serve the source map too, under the specified URL. We assume + // all source maps are cacheable. + staticFiles[getItemPathname(item.sourceMapUrl)] = { + absolutePath: pathJoin(clientDir, item.sourceMap), + cacheable: true + }; + } + } + }); + + const { PUBLIC_SETTINGS } = __meteor_runtime_config__; + const configOverrides = { + PUBLIC_SETTINGS, + // Since the minimum modern versions defined in the modern-versions + // package affect which bundle a given client receives, any changes + // in those versions should trigger a corresponding change in the + // versions calculated below. + minimumModernVersionsHash: calculateHashOfMinimumVersions(), + }; + + const oldProgram = WebApp.clientPrograms[arch]; + const newProgram = WebApp.clientPrograms[arch] = { + format: "web-program-pre1", + manifest: manifest, + // Use arrow functions so that these versions can be lazily + // calculated later, and so that they will not be included in the + // staticFiles[manifestUrl].content string below. + version: () => WebAppHashing.calculateClientHash( + manifest, null, configOverrides), + versionRefreshable: () => WebAppHashing.calculateClientHash( + manifest, type => type === "css", configOverrides), + versionNonRefreshable: () => WebAppHashing.calculateClientHash( + manifest, type => type !== "css", configOverrides), + cordovaCompatibilityVersions: programJson.cordovaCompatibilityVersions, + PUBLIC_SETTINGS, + }; + + // Expose program details as a string reachable via the following URL. + const manifestUrlPrefix = "/__" + arch.replace(/^web\./, ""); + const manifestUrl = manifestUrlPrefix + getItemPathname("/manifest.json"); + + staticFiles[manifestUrl] = () => { + if (Package.autoupdate) { + const { + AUTOUPDATE_VERSION = + Package.autoupdate.Autoupdate.autoupdateVersion + } = process.env; + + if (AUTOUPDATE_VERSION) { + newProgram.version = AUTOUPDATE_VERSION; + } + } + + if (typeof newProgram.version === "function") { + newProgram.version = newProgram.version(); + } + + return { + content: JSON.stringify(newProgram), + cacheable: false, + hash: newProgram.version, + type: "json" + }; + }; + + generateBoilerplateForArch(arch); + + // If there are any requests waiting on oldProgram.paused, let them + // continue now (using the new program). + if (oldProgram && + oldProgram.paused) { + oldProgram.unpause(); + } + }; + + const defaultOptionsForArch = { + 'web.cordova': { + runtimeConfigOverrides: { + // XXX We use absoluteUrl() here so that we serve https:// + // URLs to cordova clients if force-ssl is in use. If we were + // to use __meteor_runtime_config__.ROOT_URL instead of + // absoluteUrl(), then Cordova clients would immediately get a + // HCP setting their DDP_DEFAULT_CONNECTION_URL to + // http://example.meteor.com. This breaks the app, because + // force-ssl doesn't serve CORS headers on 302 + // redirects. (Plus it's undesirable to have clients + // connecting to http://example.meteor.com when force-ssl is + // in use.) + DDP_DEFAULT_CONNECTION_URL: process.env.MOBILE_DDP_URL || + Meteor.absoluteUrl(), + ROOT_URL: process.env.MOBILE_ROOT_URL || + Meteor.absoluteUrl() + } + }, + + "web.browser": { + runtimeConfigOverrides: { + isModern: true, + } + }, + + "web.browser.legacy": { + runtimeConfigOverrides: { + isModern: false, + } + }, + }; + WebAppInternals.generateBoilerplate = function () { // This boilerplate will be served to the mobile devices when used with // Meteor/Cordova for the Hot-Code Push and since the file will be served by // the device's server, it is important to set the DDP url to the actual // Meteor server accepting DDP connections and not the device's file server. - var defaultOptionsForArch = { - 'web.cordova': { - runtimeConfigOverrides: { - // XXX We use absoluteUrl() here so that we serve https:// - // URLs to cordova clients if force-ssl is in use. If we were - // to use __meteor_runtime_config__.ROOT_URL instead of - // absoluteUrl(), then Cordova clients would immediately get a - // HCP setting their DDP_DEFAULT_CONNECTION_URL to - // http://example.meteor.com. This breaks the app, because - // force-ssl doesn't serve CORS headers on 302 - // redirects. (Plus it's undesirable to have clients - // connecting to http://example.meteor.com when force-ssl is - // in use.) - DDP_DEFAULT_CONNECTION_URL: process.env.MOBILE_DDP_URL || - Meteor.absoluteUrl(), - ROOT_URL: process.env.MOBILE_ROOT_URL || - Meteor.absoluteUrl() - } - } - }; - syncQueue.runTask(function() { - _.each(WebApp.clientPrograms, function (program, archName) { - boilerplateByArch[archName] = - WebAppInternals.generateBoilerplateInstance( - archName, program.manifest, - defaultOptionsForArch[archName]); - }); - - // Clear the memoized boilerplate cache. - memoizedBoilerplate = {}; - - // Configure CSS injection for the default arch - // XXX implement the CSS injection for all archs? - var cssFiles = boilerplateByArch[WebApp.defaultArch].baseData.css; - // Rewrite all CSS files (which are written directly to - - - - - -
- -

[Put Your License Agreement Here]

- -

Lorem -ipsum dolor sit amet, consectetur adipiscing elit. Donec ultricies ultricies -arcu id commodo. Ut dignissim ante nec urna elementum imperdiet. Praesent -pretium condimentum orci sit amet laoreet. Fusce condimentum tempor leo, vitae -tempor quam interdum et. Nulla ante dui, tincidunt sed porta nec, pulvinar in -felis. Pellentesque pellentesque ornare nibh id accumsan. In eu arcu nibh. -Aenean ut vulputate nisl. Proin at nibh lacinia urna elementum commodo. Donec semper -lorem quis neque fringilla quis facilisis dolor dignissim. Suspendisse a massa -in odio viverra vehicula. Curabitur id lectus purus, non bibendum arcu. Cras -dictum, turpis eget gravida condimentum, turpis ante varius leo, molestie -pulvinar mauris libero ut leo. Morbi libero diam, sollicitudin id interdum sit -amet, rhoncus eget turpis.

- -

Vestibulum arcu dui, suscipit -vitae suscipit laoreet, posuere eget risus. Sed vitae massa in justo vehicula -elementum sed at arcu. Pellentesque arcu ante, accumsan sed lacinia sed, -lacinia vel urna. Vivamus at ligula nulla, lobortis ultricies tortor. Vivamus -suscipit dolor non velit adipiscing venenatis. Nullam adipiscing accumsan -condimentum. Nullam ut lorem neque, et iaculis felis. Donec sed massa diam, et -feugiat velit. Pellentesque facilisis mi a odio ultrices facilisis. Nam porta, -lorem feugiat aliquet placerat, ipsum risus accumsan sem, euismod bibendum -velit dolor sed turpis. Suspendisse tincidunt, lectus congue rhoncus -sollicitudin, magna erat porttitor ante, eget sodales ante sem eu augue. Aenean -risus risus, mattis vitae pretium at, venenatis dictum est. Vestibulum -consectetur euismod magna vel sodales.

- -

Quisque eu urna lacus. Nunc eget -dictum odio. Pellentesque vel dolor leo. Praesent aliquet, erat vel fringilla -lobortis, ante nibh dapibus augue, sed semper ligula odio vitae nulla. Nullam -dictum gravida lectus nec lacinia. Vivamus fermentum ultricies lobortis. In -quis magna massa, ut commodo lectus. Donec nunc velit, gravida id euismod -luctus, luctus sed ante. Nunc elementum mollis sapien, ac interdum quam -fringilla eu. Pellentesque habitant morbi tristique senectus et netus et -malesuada fames ac turpis egestas. Aliquam dui enim, tempus vel aliquam sed, -pretium sed nisi. Fusce vitae magna nec nibh aliquam suscipit vel a felis. -Etiam gravida, lorem id laoreet gravida, nisl leo hendrerit dolor, quis porta -mauris enim vitae justo. Maecenas congue, felis at rhoncus vestibulum, nisl -urna pellentesque urna, id iaculis augue metus eget neque. Curabitur pretium -risus nec tortor vulputate et rhoncus ipsum gravida.

- -

Maecenas elementum volutpat arcu, -nec ultricies velit faucibus facilisis. Etiam sem lacus, mattis eget gravida -ut, rhoncus porta lorem. Vivamus volutpat dui sit amet risus mollis venenatis. -Quisque velit velit, condimentum id pharetra sed, varius vitae lectus. Praesent -nisi turpis, porttitor nec accumsan a, aliquam sit amet augue. Quisque -facilisis enim sed enim pretium tristique. Aliquam commodo varius mi, ut -iaculis nunc tempus non. Vivamus velit nunc, dictum a dignissim vitae, commodo -id metus. Integer volutpat, neque at ultrices dignissim, felis nulla commodo -orci, at auctor nisl quam nec sem. Pellentesque interdum pellentesque nulla, ac -fermentum tortor porttitor a. Etiam condimentum aliquet sapien vel adipiscing. Quisque -est velit, vulputate a iaculis ut, tempus in nibh. Cras consectetur quam nibh, -vel lobortis justo. Aenean a elit leo. Aenean mollis dolor a odio accumsan -mollis. Proin blandit neque erat. Integer lectus urna, volutpat quis vulputate -et, fermentum et ante. Maecenas vitae ultricies dui.

- -

Mauris accumsan varius luctus. In -hac habitasse platea dictumst. Mauris malesuada tempus nibh, porta accumsan -tortor tincidunt eu. Proin orci mauris, commodo id malesuada nec, adipiscing ut -sapien. Phasellus at dui vel nisi adipiscing auctor quis vel sem. Phasellus -fermentum, enim id sodales ullamcorper, magna sapien facilisis urna, ac -lobortis nunc augue ut ligula. Quisque vulputate nibh sed velit pharetra -dictum. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque -mi metus, vestibulum vitae dapibus vel, ultrices at tellus. Vivamus iaculis dui -a nisl faucibus eget congue velit gravida. Donec vel quam ligula. Nam magna -ante, vulputate id pharetra ac, dapibus commodo velit. Cras nisl nulla, congue -sit amet laoreet eget, cursus et lorem. Mauris diam tortor, porttitor vitae -fringilla id, iaculis sit amet eros. Donec sed enim eget felis fermentum -posuere. Pellentesque sodales feugiat dolor, vel molestie est adipiscing non. -Morbi tincidunt viverra felis ut hendrerit.

- -
- - - - diff --git a/scripts/windows/installer/WiXInstaller/Resources/License.rtf b/scripts/windows/installer/WiXInstaller/Resources/License.rtf deleted file mode 100755 index 1a18323618..0000000000 Binary files a/scripts/windows/installer/WiXInstaller/Resources/License.rtf and /dev/null differ diff --git a/scripts/windows/installer/WiXInstaller/Resources/LogoSide.png b/scripts/windows/installer/WiXInstaller/Resources/LogoSide.png deleted file mode 100755 index 70802b3a53..0000000000 Binary files a/scripts/windows/installer/WiXInstaller/Resources/LogoSide.png and /dev/null differ diff --git a/scripts/windows/installer/WiXInstaller/Resources/Product.ico b/scripts/windows/installer/WiXInstaller/Resources/Product.ico deleted file mode 100755 index e070f56d51..0000000000 Binary files a/scripts/windows/installer/WiXInstaller/Resources/Product.ico and /dev/null differ diff --git a/scripts/windows/installer/WiXInstaller/Resources/Theme_Meteor.wxl b/scripts/windows/installer/WiXInstaller/Resources/Theme_Meteor.wxl deleted file mode 100755 index c53a728bb2..0000000000 --- a/scripts/windows/installer/WiXInstaller/Resources/Theme_Meteor.wxl +++ /dev/null @@ -1,104 +0,0 @@ - - - Install [WixBundleName] - [WixBundleName] - Welcome to the [WixBundleName] Setup - Setup will install [WixBundleName] on your computer. During the wizard, you will be able to adjust product features and settings that fit best to your needs. - Version [WixBundleVersion] - - Are you sure you want to cancel? - Setup Help - - /passive | /quiet - displays minimal UI with no prompts or displays no UI and no prompts. By default UI and all prompts are displayed. - - /toconsole - similar to /quiet except that progress is redirected to console. - - /log log.txt - logs to a specific file. By default a log file is created in %TEMP%. - - &Close - I &agree to the license terms and conditions - &Options - &Install - &Cancel - < Back - Next > - Skip this step - Install - Install Meteor - - Version [WixBundleVersion] <a href="#">upgrade available</a> - Current user password: - - [WixBundleName] install directory - Please select the way you want [WixBundleName] to be installed on your computer. - Install location: - Install this application for: - For all users on this computer (recommended) - Only for me ([LogonUser]) - - [WixBundleName] registration - Sign in with your Meteor developer account or create a new one now. - Sign up for a new account - Sign in with an existing account - Email Address - Username - Username or Email - Password - Skip [WixBundleName] registration and continue with setup - - - - Setup Options - Adjust setup parameters in order to your needs. - Install location: - &Browse... - Database location: - B&rowse... - &OK - &Cancel - - - - - Installing [WixBundleName] - Repairing [WixBundleName] - Uninstalling [WixBundleName] - Please wait while Setup installs [WixBundleName] on your computer. - Please wait while Setup repairs [WixBundleName] installed on your computer. - Please wait while [WixBundleName] is removed from your computer. - Processing: - Initializing... - Initializing... - Initializing... - - &Cancel - [WixBundleName] Maintenance - The setup wizard allows you to repair or remove [WixBundleName] from your computer. If you have a [WixBundleName] application currently running, it is recommended to close it. Click Repair if you want to repair [WixBundleName], Uninstall to remove it or click Cancel to close this maintenance wizard. - &Repair - &Uninstall - &Close - Meteor is now installed on your computer. - Repair Successful - Meteor has been uninstalled. - -To get started, open a new command prompt and type "meteor". If you're running Windows 8.1, you may need to restart your machine. - -To learn more about Meteor, check out the <a href="https://www.meteor.com/try">Meteor tutorial</a> and read the <a href="https://docs.meteor.com">official documentation</a>. If you need help, ask a question on the official forums at <a href="https://forums.meteor.com">forums.meteor.com</a> or <a href="https://stackoverflow.com/questions/tagged/meteor">Stack Overflow</a>. - -Check out the <a href="https://github.com/meteor/meteor/wiki/Preview-of-Meteor-on-Windows">Wiki page on GitHub</a> to learn more about the Meteor Windows preview. - - Following non critical errors occured: - Setup has finished repairing [WixBundleName] installed on your computer. The application may be launched by selecting the installed icons. - We hope to see you again soon! - &Launch - You must restart your computer before you can use the software. - &Restart - &Close - Setup Failed - Repair Failed - Uninstall Failed - One or more issues caused the setup to fail. Please fix the issues and then retry setup. For more information see the <a href="#">log file</a>. - You must restart your computer to complete the rollback of the software. - &Restart - &Close - \ No newline at end of file diff --git a/scripts/windows/installer/WiXInstaller/Resources/Theme_Meteor.xml b/scripts/windows/installer/WiXInstaller/Resources/Theme_Meteor.xml deleted file mode 100755 index 0679a60653..0000000000 --- a/scripts/windows/installer/WiXInstaller/Resources/Theme_Meteor.xml +++ /dev/null @@ -1,113 +0,0 @@ - - - #(loc.Caption) - Segoe UI - Segoe UI - Tahoma - Segoe UI - Tahoma - Segoe UI - Segoe UI - Segoe UI - - - - - - #(loc.InstallLicenseLinkText) --> - - - - - - #(loc.InstallDirHeader) - #(loc.InstallDirMessage) - - #(loc.InstallScopeLabel) - - - - #(loc.InstallDirPathLabel) - - - - - - - - - - - - - #(loc.ExecuteProgressActionDataText) - - - - - - - -Before getting started, please take a moment to -create a Meteor developer account. - - -Registering tells us who's using Meteor for Windows and helps us make it better. -You'll also need your account to publish Atmosphere packages and get -notified about critical security updates when they happen. - - - - - - - - #(loc.RegisterEmail): - #(loc.RegisterUser): - #(loc.RegisterPass): - - [RegisterEmail] - [RegisterUser] - [RegisterPass] - - - - - #(loc.RegisterUsernameOrEmail): - #(loc.RegisterPass): - - [LoginUsernameOrEmail] - [LoginPass] - - - - - - - - - #(loc.ModifyHeader) - #(loc.ModifyInfo) - - - - - - [varSuccessHeader] - [varSuccessInfo] - - - - - - [varFailureHeader] - #(loc.FailureHyperlinkLogText) - - - #(loc.FailureRestartText) - - - - diff --git a/scripts/windows/installer/WiXInstaller/Resources/TopHeader.png b/scripts/windows/installer/WiXInstaller/Resources/TopHeader.png deleted file mode 100755 index dc92e121f9..0000000000 Binary files a/scripts/windows/installer/WiXInstaller/Resources/TopHeader.png and /dev/null differ diff --git a/scripts/windows/installer/WiXInstaller/Resources/dummy.file b/scripts/windows/installer/WiXInstaller/Resources/dummy.file deleted file mode 100755 index e69de29bb2..0000000000 diff --git a/scripts/windows/installer/WiXInstaller/Resources/meteor-logo-sm.jpg b/scripts/windows/installer/WiXInstaller/Resources/meteor-logo-sm.jpg deleted file mode 100644 index 91a38ea0be..0000000000 Binary files a/scripts/windows/installer/WiXInstaller/Resources/meteor-logo-sm.jpg and /dev/null differ diff --git a/scripts/windows/installer/WiXInstaller/SetupPackage.wixproj b/scripts/windows/installer/WiXInstaller/SetupPackage.wixproj deleted file mode 100755 index 1751b3538d..0000000000 --- a/scripts/windows/installer/WiXInstaller/SetupPackage.wixproj +++ /dev/null @@ -1,71 +0,0 @@ - - - - Debug - x86 - 3.8 - 7b569f5b-5d73-4e7b-be41-041a2f22a521 - 2.0 - Bundle - $(MSBuildExtensionsPath32)\Microsoft\WiX\v3.x\Wix.targets - $(MSBuildExtensionsPath)\Microsoft\WiX\v3.x\Wix.targets - SetupPackage - - - bin\$(Configuration)\$(Platform)\ - obj\$(Configuration)\ - True - True - False - InstallMeteor - - - bin\$(Configuration)\$(Platform)\ - obj\$(Configuration)\ - True - True - False - InstallMeteor - - - - - - - $(WixExtDir)\WixNetFxExtension.dll - WixNetFxExtension - - - ..\WiXBalExtension\build\WixBalExtensionExt.dll - WixBalExtensionExt - - - $(WixExtDir)\WixUtilExtension.dll - WixUtilExtension - - - - - - - - - - - - - - - - - copy "$(ProjectDir)$(OutDir)$(TargetFileName)" "$(ProjectDir)..\Release\" /Y - - - \ No newline at end of file diff --git a/scripts/windows/installer/WiXSDK/inc/BalBaseBootstrapperApplication.h b/scripts/windows/installer/WiXSDK/inc/BalBaseBootstrapperApplication.h deleted file mode 100644 index 7ab91a456d..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/BalBaseBootstrapperApplication.h +++ /dev/null @@ -1,651 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -//------------------------------------------------------------------------------------------------- - -#include -#include - -#include "IBootstrapperEngine.h" -#include "IBootstrapperApplication.h" - -#include "balutil.h" -#include "balretry.h" - -class CBalBaseBootstrapperApplication : public IBootstrapperApplication -{ -public: // IUnknown - virtual STDMETHODIMP QueryInterface( - __in REFIID riid, - __out LPVOID *ppvObject - ) - { - if (!ppvObject) - { - return E_INVALIDARG; - } - - *ppvObject = NULL; - - if (::IsEqualIID(__uuidof(IBootstrapperApplication), riid)) - { - *ppvObject = static_cast(this); - } - else if (::IsEqualIID(IID_IUnknown, riid)) - { - *ppvObject = static_cast(this); - } - else // no interface for requested iid - { - return E_NOINTERFACE; - } - - AddRef(); - return S_OK; - } - - virtual STDMETHODIMP_(ULONG) AddRef() - { - return ::InterlockedIncrement(&this->m_cReferences); - } - - virtual STDMETHODIMP_(ULONG) Release() - { - long l = ::InterlockedDecrement(&this->m_cReferences); - if (0 < l) - { - return l; - } - - delete this; - return 0; - } - -public: // IBurnUserExperience - virtual STDMETHODIMP OnStartup() - { - return S_OK; - } - - virtual STDMETHODIMP_(int) OnShutdown() - { - return IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnSystemShutdown( - __in DWORD dwEndSession, - __in int /*nRecommendation*/ - ) - { - // Allow requests to shut down when critical or not applying. - if (ENDSESSION_CRITICAL & dwEndSession || !m_fApplying) - { - return IDOK; - } - - return IDCANCEL; - } - - virtual STDMETHODIMP_(int) OnDetectBegin( - __in BOOL /*fInstalled*/, - __in DWORD /*cPackages*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnDetectForwardCompatibleBundle( - __in_z LPCWSTR /*wzBundleId*/, - __in BOOTSTRAPPER_RELATION_TYPE /*relationType*/, - __in_z LPCWSTR /*wzBundleTag*/, - __in BOOL /*fPerMachine*/, - __in DWORD64 /*dw64Version*/, - __in int nRecommendation - ) - { - return CheckCanceled() ? IDCANCEL : nRecommendation; - } - - virtual STDMETHODIMP_(int) OnDetectUpdateBegin( - __in_z LPCWSTR /*wzUpdateLocation*/, - __in int nRecommendation - ) - { - return CheckCanceled() ? IDCANCEL : nRecommendation; - } - - virtual STDMETHODIMP_(void) OnDetectUpdateComplete( - __in HRESULT /*hrStatus*/, - __in_z_opt LPCWSTR /*wzUpdateLocation*/ - ) - { - } - - virtual STDMETHODIMP_(int) OnDetectPriorBundle( - __in_z LPCWSTR /*wzBundleId*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnDetectPackageBegin( - __in_z LPCWSTR /*wzPackageId*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnDetectRelatedBundle( - __in_z LPCWSTR /*wzBundleId*/, - __in BOOTSTRAPPER_RELATION_TYPE /*relationType*/, - __in_z LPCWSTR /*wzBundleTag*/, - __in BOOL /*fPerMachine*/, - __in DWORD64 /*dw64Version*/, - __in BOOTSTRAPPER_RELATED_OPERATION /*operation*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnDetectRelatedMsiPackage( - __in_z LPCWSTR /*wzPackageId*/, - __in_z LPCWSTR /*wzProductCode*/, - __in BOOL /*fPerMachine*/, - __in DWORD64 /*dw64Version*/, - __in BOOTSTRAPPER_RELATED_OPERATION /*operation*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnDetectTargetMsiPackage( - __in_z LPCWSTR /*wzPackageId*/, - __in_z LPCWSTR /*wzProductCode*/, - __in BOOTSTRAPPER_PACKAGE_STATE /*patchState*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnDetectMsiFeature( - __in_z LPCWSTR /*wzPackageId*/, - __in_z LPCWSTR /*wzFeatureId*/, - __in BOOTSTRAPPER_FEATURE_STATE /*state*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(void) OnDetectPackageComplete( - __in_z LPCWSTR /*wzPackageId*/, - __in HRESULT /*hrStatus*/, - __in BOOTSTRAPPER_PACKAGE_STATE /*state*/ - ) - { - } - - virtual STDMETHODIMP_(void) OnDetectComplete( - __in HRESULT /*hrStatus*/ - ) - { - } - - virtual STDMETHODIMP_(int) OnPlanBegin( - __in DWORD /*cPackages*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnPlanRelatedBundle( - __in_z LPCWSTR /*wzBundleId*/, - __inout BOOTSTRAPPER_REQUEST_STATE* /*pRequestedState*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnPlanPackageBegin( - __in_z LPCWSTR /*wzPackageId*/, - __inout BOOTSTRAPPER_REQUEST_STATE* /*pRequestState*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnPlanTargetMsiPackage( - __in_z LPCWSTR /*wzPackageId*/, - __in_z LPCWSTR /*wzProductCode*/, - __inout BOOTSTRAPPER_REQUEST_STATE* /*pRequestedState*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnPlanMsiFeature( - __in_z LPCWSTR /*wzPackageId*/, - __in_z LPCWSTR /*wzFeatureId*/, - __inout BOOTSTRAPPER_FEATURE_STATE* /*pRequestedState*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(void) OnPlanPackageComplete( - __in_z LPCWSTR /*wzPackageId*/, - __in HRESULT /*hrStatus*/, - __in BOOTSTRAPPER_PACKAGE_STATE /*state*/, - __in BOOTSTRAPPER_REQUEST_STATE /*requested*/, - __in BOOTSTRAPPER_ACTION_STATE /*execute*/, - __in BOOTSTRAPPER_ACTION_STATE /*rollback*/ - ) - { - } - - virtual STDMETHODIMP_(void) OnPlanComplete( - __in HRESULT /*hrStatus*/ - ) - { - } - - virtual STDMETHODIMP_(int) OnApplyBegin() - { - m_fApplying = TRUE; - - m_dwProgressPercentage = 0; - m_dwOverallProgressPercentage = 0; - - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnElevate() - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnRegisterBegin() - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(void) OnRegisterComplete( - __in HRESULT /*hrStatus*/ - ) - { - return; - } - - virtual STDMETHODIMP_(void) OnUnregisterBegin() - { - return; - } - - virtual STDMETHODIMP_(void) OnUnregisterComplete( - __in HRESULT /*hrStatus*/ - ) - { - return; - } - - virtual STDMETHODIMP_(int) OnApplyComplete( - __in HRESULT /*hrStatus*/, - __in BOOTSTRAPPER_APPLY_RESTART restart - ) - { - m_fApplying = FALSE; - return BOOTSTRAPPER_APPLY_RESTART_REQUIRED == restart ? IDRESTART : CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnCacheBegin() - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnCachePackageBegin( - __in_z LPCWSTR /*wzPackageId*/, - __in DWORD /*cCachePayloads*/, - __in DWORD64 /*dw64PackageCacheSize*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnCacheAcquireBegin( - __in_z LPCWSTR wzPackageOrContainerId, - __in_z_opt LPCWSTR wzPayloadId, - __in BOOTSTRAPPER_CACHE_OPERATION /*operation*/, - __in_z LPCWSTR /*wzSource*/ - ) - { - BalRetryStartPackage(BALRETRY_TYPE_CACHE, wzPackageOrContainerId, wzPayloadId); - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnCacheAcquireProgress( - __in_z LPCWSTR /*wzPackageOrContainerId*/, - __in_z_opt LPCWSTR /*wzPayloadId*/, - __in DWORD64 /*dw64Progress*/, - __in DWORD64 /*dw64Total*/, - __in DWORD /*dwOverallPercentage*/ - ) - { - HRESULT hr = S_OK; - int nResult = IDNOACTION; - - // Send progress even though we don't update the numbers to at least give the caller an opportunity - // to cancel. - if (BOOTSTRAPPER_DISPLAY_EMBEDDED == m_display) - { - hr = m_pEngine->SendEmbeddedProgress(m_dwProgressPercentage, m_dwOverallProgressPercentage, &nResult); - BalExitOnFailure(hr, "Failed to send embedded cache progress."); - } - - LExit: - return FAILED(hr) ? IDERROR : CheckCanceled() ? IDCANCEL : nResult; - } - - virtual STDMETHODIMP_(int) OnCacheAcquireComplete( - __in_z LPCWSTR wzPackageOrContainerId, - __in_z_opt LPCWSTR wzPayloadId, - __in HRESULT hrStatus, - __in int nRecommendation - ) - { - int nResult = CheckCanceled() ? IDCANCEL : BalRetryEndPackage(BALRETRY_TYPE_CACHE, wzPackageOrContainerId, wzPayloadId, hrStatus); - return IDNOACTION == nResult ? nRecommendation : nResult; - } - - virtual STDMETHODIMP_(int) OnCacheVerifyBegin( - __in_z LPCWSTR /*wzPackageId*/, - __in_z LPCWSTR /*wzPayloadId*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnCacheVerifyComplete( - __in_z LPCWSTR /*wzPackageId*/, - __in_z LPCWSTR /*wzPayloadId*/, - __in HRESULT /*hrStatus*/, - __in int nRecommendation - ) - { - return CheckCanceled() ? IDCANCEL : nRecommendation; - } - - virtual STDMETHODIMP_(int) OnCachePackageComplete( - __in_z LPCWSTR /*wzPackageId*/, - __in HRESULT /*hrStatus*/, - __in int nRecommendation - ) - { - return CheckCanceled() ? IDCANCEL : nRecommendation; - } - - virtual STDMETHODIMP_(void) OnCacheComplete( - __in HRESULT /*hrStatus*/ - ) - { - } - - virtual STDMETHODIMP_(int) OnExecuteBegin( - __in DWORD /*cExecutingPackages*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnExecutePackageBegin( - __in_z LPCWSTR wzPackageId, - __in BOOL fExecute - ) - { - // Only track retry on execution (not rollback). - if (fExecute) - { - BalRetryStartPackage(BALRETRY_TYPE_EXECUTE, wzPackageId, NULL); - } - - m_fRollingBack = !fExecute; - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnExecutePatchTarget( - __in_z LPCWSTR /*wzPackageId*/, - __in_z LPCWSTR /*wzTargetProductCode*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnError( - __in BOOTSTRAPPER_ERROR_TYPE errorType, - __in_z LPCWSTR wzPackageId, - __in DWORD dwCode, - __in_z LPCWSTR /*wzError*/, - __in DWORD /*dwUIHint*/, - __in DWORD /*cData*/, - __in_ecount_z_opt(cData) LPCWSTR* /*rgwzData*/, - __in int nRecommendation - ) - { - BalRetryErrorOccurred(wzPackageId, dwCode); - - if (BOOTSTRAPPER_DISPLAY_FULL == m_display) - { - if (BOOTSTRAPPER_ERROR_TYPE_HTTP_AUTH_SERVER == errorType ||BOOTSTRAPPER_ERROR_TYPE_HTTP_AUTH_PROXY == errorType) - { - nRecommendation = IDTRYAGAIN; - } - } - - return CheckCanceled() ? IDCANCEL : nRecommendation; - } - - virtual STDMETHODIMP_(int) OnProgress( - __in DWORD dwProgressPercentage, - __in DWORD dwOverallProgressPercentage - ) - { - HRESULT hr = S_OK; - int nResult = IDNOACTION; - - m_dwProgressPercentage = dwProgressPercentage; - m_dwOverallProgressPercentage = dwOverallProgressPercentage; - - if (BOOTSTRAPPER_DISPLAY_EMBEDDED == m_display) - { - hr = m_pEngine->SendEmbeddedProgress(m_dwProgressPercentage, m_dwOverallProgressPercentage, &nResult); - BalExitOnFailure(hr, "Failed to send embedded overall progress."); - } - - LExit: - return FAILED(hr) ? IDERROR : CheckCanceled() ? IDCANCEL : nResult; - } - - virtual STDMETHODIMP_(int) OnDownloadPayloadBegin( - __in_z LPCWSTR /*wzPayloadId*/, - __in_z LPCWSTR /*wzPayloadFileName*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnDownloadPayloadComplete( - __in_z LPCWSTR /*wzPayloadId*/, - __in_z LPCWSTR /*wzPayloadFileName*/, - __in HRESULT /*hrStatus*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnExecuteProgress( - __in_z LPCWSTR /*wzPackageId*/, - __in DWORD /*dwProgressPercentage*/, - __in DWORD /*dwOverallProgressPercentage*/ - ) - { - HRESULT hr = S_OK; - int nResult = IDNOACTION; - - // Send progress even though we don't update the numbers to at least give the caller an opportunity - // to cancel. - if (BOOTSTRAPPER_DISPLAY_EMBEDDED == m_display) - { - hr = m_pEngine->SendEmbeddedProgress(m_dwProgressPercentage, m_dwOverallProgressPercentage, &nResult); - BalExitOnFailure(hr, "Failed to send embedded execute progress."); - } - - LExit: - return FAILED(hr) ? IDERROR : CheckCanceled() ? IDCANCEL : nResult; - } - - virtual STDMETHODIMP_(int) OnExecuteMsiMessage( - __in_z LPCWSTR /*wzPackageId*/, - __in INSTALLMESSAGE /*mt*/, - __in UINT /*uiFlags*/, - __in_z LPCWSTR /*wzMessage*/, - __in DWORD /*cData*/, - __in_ecount_z_opt(cData) LPCWSTR* /*rgwzData*/, - __in int nRecommendation - ) - { - return CheckCanceled() ? IDCANCEL : nRecommendation; - } - - virtual STDMETHODIMP_(int) OnExecuteFilesInUse( - __in_z LPCWSTR /*wzPackageId*/, - __in DWORD /*cFiles*/, - __in_ecount_z(cFiles) LPCWSTR* /*rgwzFiles*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - - virtual STDMETHODIMP_(int) OnExecutePackageComplete( - __in_z LPCWSTR wzPackageId, - __in HRESULT hrExitCode, - __in BOOTSTRAPPER_APPLY_RESTART /*restart*/, - __in int nRecommendation - ) - { - int nResult = CheckCanceled() ? IDCANCEL : CheckCanceled() ? IDCANCEL : BalRetryEndPackage(BALRETRY_TYPE_EXECUTE, wzPackageId, NULL, hrExitCode); - return IDNOACTION == nResult ? nRecommendation : nResult; - } - - virtual STDMETHODIMP_(void) OnExecuteComplete( - __in HRESULT /*hrStatus*/ - ) - { - } - - virtual STDMETHODIMP_(int) OnResolveSource( - __in_z LPCWSTR /*wzPackageOrContainerId*/, - __in_z_opt LPCWSTR /*wzPayloadId*/, - __in_z LPCWSTR /*wzLocalSource*/, - __in_z_opt LPCWSTR /*wzDownloadSource*/ - ) - { - return CheckCanceled() ? IDCANCEL : IDNOACTION; - } - -protected: - // - // PromptCancel - prompts the user to close (if not forced). - // - virtual BOOL PromptCancel( - __in HWND hWnd, - __in BOOL fForceCancel, - __in_z LPCWSTR wzMessage, - __in_z LPCWSTR wzCaption - ) - { - ::EnterCriticalSection(&m_csCanceled); - - // Only prompt the user to close if we have not canceled already. - if (!m_fCanceled) - { - if (fForceCancel) - { - m_fCanceled = TRUE; - } - else - { - m_fCanceled = (IDYES == ::MessageBoxW(hWnd, wzMessage, wzCaption, MB_YESNO | MB_ICONEXCLAMATION)); - } - } - - ::LeaveCriticalSection(&m_csCanceled); - - return m_fCanceled; - } - - // - // CheckCanceled - waits if the cancel dialog is up and checks to see if the user canceled the operation. - // - BOOL CheckCanceled() - { - ::EnterCriticalSection(&m_csCanceled); - ::LeaveCriticalSection(&m_csCanceled); - return m_fRollingBack ? FALSE : m_fCanceled; - } - - BOOL IsRollingBack() - { - return m_fRollingBack; - } - - BOOL IsCanceled() - { - return m_fCanceled; - } - - CBalBaseBootstrapperApplication( - __in IBootstrapperEngine* pEngine, - __in const BOOTSTRAPPER_COMMAND* pCommand, - __in DWORD dwRetryCount = 0, - __in DWORD dwRetryTimeout = 1000 - ) - { - m_cReferences = 1; - m_display = pCommand->display; - m_restart = pCommand->restart; - - pEngine->AddRef(); - m_pEngine = pEngine; - - ::InitializeCriticalSection(&m_csCanceled); - m_fCanceled = FALSE; - m_fApplying = FALSE; - m_fRollingBack = FALSE; - - BalRetryInitialize(dwRetryCount, dwRetryTimeout); - } - - virtual ~CBalBaseBootstrapperApplication() - { - BalRetryUninitialize(); - ::DeleteCriticalSection(&m_csCanceled); - - ReleaseNullObject(m_pEngine); - } - -private: - long m_cReferences; - BOOTSTRAPPER_DISPLAY m_display; - BOOTSTRAPPER_RESTART m_restart; - IBootstrapperEngine* m_pEngine; - - CRITICAL_SECTION m_csCanceled; - BOOL m_fCanceled; - BOOL m_fApplying; - BOOL m_fRollingBack; - - DWORD m_dwProgressPercentage; - DWORD m_dwOverallProgressPercentage; -}; diff --git a/scripts/windows/installer/WiXSDK/inc/IBootstrapperApplication.h b/scripts/windows/installer/WiXSDK/inc/IBootstrapperApplication.h deleted file mode 100644 index f4ae288f16..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/IBootstrapperApplication.h +++ /dev/null @@ -1,704 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// IBootstrapperApplication implemented by a bootstrapper application and used by bootstrapper engine. -// -//------------------------------------------------------------------------------------------------- - -#pragma once - - -enum BOOTSTRAPPER_DISPLAY -{ - BOOTSTRAPPER_DISPLAY_UNKNOWN, - BOOTSTRAPPER_DISPLAY_EMBEDDED, - BOOTSTRAPPER_DISPLAY_NONE, - BOOTSTRAPPER_DISPLAY_PASSIVE, - BOOTSTRAPPER_DISPLAY_FULL, -}; - - -enum BOOTSTRAPPER_RESTART -{ - BOOTSTRAPPER_RESTART_UNKNOWN, - BOOTSTRAPPER_RESTART_NEVER, - BOOTSTRAPPER_RESTART_PROMPT, - BOOTSTRAPPER_RESTART_AUTOMATIC, - BOOTSTRAPPER_RESTART_ALWAYS, -}; - - -enum BOOTSTRAPPER_RESUME_TYPE -{ - BOOTSTRAPPER_RESUME_TYPE_NONE, - BOOTSTRAPPER_RESUME_TYPE_INVALID, // resume information is present but invalid - BOOTSTRAPPER_RESUME_TYPE_INTERRUPTED, // relaunched after an unexpected interruption - BOOTSTRAPPER_RESUME_TYPE_REBOOT_PENDING, // reboot has not taken place yet - BOOTSTRAPPER_RESUME_TYPE_REBOOT, // relaunched after reboot - BOOTSTRAPPER_RESUME_TYPE_SUSPEND, // relaunched after suspend - BOOTSTRAPPER_RESUME_TYPE_ARP, // launched from ARP -}; - - -enum BOOTSTRAPPER_ERROR_TYPE -{ - BOOTSTRAPPER_ERROR_TYPE_ELEVATE, // error occurred trying to elevate. - BOOTSTRAPPER_ERROR_TYPE_WINDOWS_INSTALLER, // error came from windows installer. - BOOTSTRAPPER_ERROR_TYPE_EXE_PACKAGE, // error came from an exe package. - BOOTSTRAPPER_ERROR_TYPE_HTTP_AUTH_SERVER, // error occurred trying to authenticate with HTTP server. - BOOTSTRAPPER_ERROR_TYPE_HTTP_AUTH_PROXY, // error occurred trying to authenticate with HTTP proxy. - BOOTSTRAPPER_ERROR_TYPE_APPLY, // error occurred during apply. -}; - - -enum BOOTSTRAPPER_RELATED_OPERATION -{ - BOOTSTRAPPER_RELATED_OPERATION_NONE, - BOOTSTRAPPER_RELATED_OPERATION_DOWNGRADE, - BOOTSTRAPPER_RELATED_OPERATION_MINOR_UPDATE, - BOOTSTRAPPER_RELATED_OPERATION_MAJOR_UPGRADE, - BOOTSTRAPPER_RELATED_OPERATION_REMOVE, - BOOTSTRAPPER_RELATED_OPERATION_INSTALL, - BOOTSTRAPPER_RELATED_OPERATION_REPAIR, -}; - - -enum BOOTSTRAPPER_CACHE_OPERATION -{ - BOOTSTRAPPER_CACHE_OPERATION_COPY, - BOOTSTRAPPER_CACHE_OPERATION_DOWNLOAD, - BOOTSTRAPPER_CACHE_OPERATION_EXTRACT, -}; - - -enum BOOTSTRAPPER_APPLY_RESTART -{ - BOOTSTRAPPER_APPLY_RESTART_NONE, - BOOTSTRAPPER_APPLY_RESTART_REQUIRED, - BOOTSTRAPPER_APPLY_RESTART_INITIATED, -}; - - -enum BOOTSTRAPPER_RELATION_TYPE -{ - BOOTSTRAPPER_RELATION_NONE, - BOOTSTRAPPER_RELATION_DETECT, - BOOTSTRAPPER_RELATION_UPGRADE, - BOOTSTRAPPER_RELATION_ADDON, - BOOTSTRAPPER_RELATION_PATCH, - BOOTSTRAPPER_RELATION_DEPENDENT, - BOOTSTRAPPER_RELATION_UPDATE, -}; - - -struct BOOTSTRAPPER_COMMAND -{ - BOOTSTRAPPER_ACTION action; - BOOTSTRAPPER_DISPLAY display; - BOOTSTRAPPER_RESTART restart; - - LPWSTR wzCommandLine; - int nCmdShow; - - BOOTSTRAPPER_RESUME_TYPE resumeType; - HWND hwndSplashScreen; - - // If this was run from a related bundle, specifies the relation type - BOOTSTRAPPER_RELATION_TYPE relationType; - BOOL fPassthrough; - - LPWSTR wzLayoutDirectory; -}; - - -DECLARE_INTERFACE_IID_(IBootstrapperApplication, IUnknown, "53C31D56-49C0-426B-AB06-099D717C67FE") -{ - // OnStartup - called when the engine is ready for the bootstrapper application to start. - // - STDMETHOD(OnStartup)() = 0; - - // OnShutdown - called after the bootstrapper application quits the engine. - // - // Return: - // IDRESTART instructs the engine to restart. The engine will not launch again after the machine - // is rebooted. Ignored if reboot was already initiated by OnExecutePackageComplete(). - // - // IDRELOAD_BOOTSTRAPPER instructs the engine to unload the bootstrapper application and restart - // the engine which will load the bootstrapper application again. Typically - // used to switch from a native bootstrapper application to a managed one. - // - // All other return codes are ignored. - STDMETHOD_(int, OnShutdown)() = 0; - - // OnSystemShutdown - called when the operating system is instructed to shutdown the machine. - // - // Return: - // IDCANCEL instructs the engine to block the shutdown of the machine. - // - // All other return codes allow the shutdown to commence. - STDMETHOD_(int, OnSystemShutdown)( - __in DWORD dwEndSession, - __in int nRecommendation - ) = 0; - - // OnDetectBegin - called when the engine begins detection. - // - // Return: - // IDCANCEL instructs the engine to stop detection. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnDetectBegin)( - __in BOOL fInstalled, - __in DWORD cPackages - ) = 0; - - // OnDetectForwardCompatibleBundle - called when the engine detects a forward compatible bundle. - // - // Return: - // IDOK instructs the engine to use the forward compatible bundle. - // - // IDCANCEL instructs the engine to stop detection. - // - // IDNOACTION instructs the engine to not use the forward compatible bundle. - STDMETHOD_(int, OnDetectForwardCompatibleBundle)( - __in_z LPCWSTR wzBundleId, - __in BOOTSTRAPPER_RELATION_TYPE relationType, - __in_z LPCWSTR wzBundleTag, - __in BOOL fPerMachine, - __in DWORD64 dw64Version, - __in int nRecommendation - ) = 0; - - // OnDetectUpdateBegin - called when the engine begins detection for bundle update. - // - // Return: - // IDOK instructs the engine to attempt update detection. - // - // IDCANCEL instructs the engine to stop detection. - // - // IDNOACTION instructs the engine to skip update detection. - STDMETHOD_(int, OnDetectUpdateBegin)( - __in_z LPCWSTR wzUpdateLocation, - __in int nRecommendation - ) = 0; - - // OnDetectUpdateComplete - called when the engine completes detection for bundle update. - // - // Remarks: - // wzUpdateLocation is null if no update was available. - STDMETHOD_(void, OnDetectUpdateComplete)( - __in HRESULT hrStatus, - __in_z_opt LPCWSTR wzUpdateLocation - ) = 0; - - // OnDetectRelatedBundle - called when the engine detects a related bundle. - // - // Return: - // IDCANCEL instructs the engine to stop detection. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnDetectRelatedBundle)( - __in_z LPCWSTR wzBundleId, - __in BOOTSTRAPPER_RELATION_TYPE relationType, - __in_z LPCWSTR wzBundleTag, - __in BOOL fPerMachine, - __in DWORD64 dw64Version, - __in BOOTSTRAPPER_RELATED_OPERATION operation - ) = 0; - - // OnDetectPackageBegin - called when the engine begins detecting a package. - // - // Return: - // IDCANCEL instructs the engine to stop detection. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnDetectPackageBegin)( - __in_z LPCWSTR wzPackageId - ) = 0; - - // OnDetectRelatedMsiPackage - called when the engine begins detects a related package. - // - // Return: - // IDCANCEL instructs the engine to stop detection. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnDetectRelatedMsiPackage)( - __in_z LPCWSTR wzPackageId, - __in_z LPCWSTR wzProductCode, - __in BOOL fPerMachine, - __in DWORD64 dw64Version, - __in BOOTSTRAPPER_RELATED_OPERATION operation - ) = 0; - - // OnDetectTargetMsiPackage - called when the engine detects a target MSI package for - // an MSP package. - // - // Return: - // IDCANCEL instructs the engine to stop detection. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnDetectTargetMsiPackage)( - __in_z LPCWSTR wzPackageId, - __in_z LPCWSTR wzProductCode, - __in BOOTSTRAPPER_PACKAGE_STATE patchState - ) = 0; - - // OnDetectMsiFeature - called when the engine detects a feature in an MSI package. - // - // Return: - // IDCANCEL instructs the engine to stop detection. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnDetectMsiFeature)( - __in_z LPCWSTR wzPackageId, - __in_z LPCWSTR wzFeatureId, - __in BOOTSTRAPPER_FEATURE_STATE state - ) = 0; - - // OnDetectPackageComplete - called after the engine detects a package. - // - STDMETHOD_(void, OnDetectPackageComplete)( - __in_z LPCWSTR wzPackageId, - __in HRESULT hrStatus, - __in BOOTSTRAPPER_PACKAGE_STATE state - ) = 0; - - // OnDetectPackageComplete - called after the engine completes detection. - // - STDMETHOD_(void, OnDetectComplete)( - __in HRESULT hrStatus - ) = 0; - - // OnPlanBegin - called when the engine begins planning. - // - // Return: - // IDCANCEL instructs the engine to stop planning. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnPlanBegin)( - __in DWORD cPackages - ) = 0; - - // OnPlanRelatedBundle - called when the engine begins planning a related bundle. - // - // Return: - // IDCANCEL instructs the engine to stop planning. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnPlanRelatedBundle)( - __in_z LPCWSTR wzBundleId, - __inout BOOTSTRAPPER_REQUEST_STATE* pRequestedState - ) = 0; - - // OnPlanPackageBegin - called when the engine begins planning a package. - // - // Return: - // IDCANCEL instructs the engine to stop planning. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnPlanPackageBegin)( - __in_z LPCWSTR wzPackageId, - __inout BOOTSTRAPPER_REQUEST_STATE* pRequestedState - ) = 0; - - // OnPlanTargetMsiPackage - called when the engine plans an MSP package - // to apply to an MSI package. - // - // Return: - // IDCANCEL instructs the engine to stop planning. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnPlanTargetMsiPackage)( - __in_z LPCWSTR wzPackageId, - __in_z LPCWSTR wzProductCode, - __inout BOOTSTRAPPER_REQUEST_STATE* pRequestedState - ) = 0; - - // OnPlanMsiFeature - called when the engine plans a feature in an - // MSI package. - // - // Return: - // IDCANCEL instructs the engine to stop planning. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnPlanMsiFeature)( - __in_z LPCWSTR wzPackageId, - __in_z LPCWSTR wzFeatureId, - __inout BOOTSTRAPPER_FEATURE_STATE* pRequestedState - ) = 0; - - // OnPlanPackageComplete - called after the engine plans a package. - // - STDMETHOD_(void, OnPlanPackageComplete)( - __in_z LPCWSTR wzPackageId, - __in HRESULT hrStatus, - __in BOOTSTRAPPER_PACKAGE_STATE state, - __in BOOTSTRAPPER_REQUEST_STATE requested, - __in BOOTSTRAPPER_ACTION_STATE execute, - __in BOOTSTRAPPER_ACTION_STATE rollback - ) = 0; - - // OnPlanComplete - called when the engine completes planning. - // - STDMETHOD_(void, OnPlanComplete)( - __in HRESULT hrStatus - ) = 0; - - // OnApplyBegin - called when the engine begins applying the plan. - // - // Return: - // IDCANCEL instructs the engine to stop applying. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnApplyBegin)() = 0; - - // OnElevate - called before the engine displays an elevation prompt. - // Will only happen once per execution of the engine. - // - // Return: - // IDCANCEL instructs the engine to abort elevation and stop applying. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnElevate)() = 0; - - // OnProgress - called when the engine makes progress. - // - // Return: - // IDCANCEL instructs the engine to stop applying. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnProgress)( - __in DWORD dwProgressPercentage, - __in DWORD dwOverallPercentage - ) = 0; - - // OnError - called when the engine encounters an error. - // - // Return: - // uiFlags is a combination of valid ID* return values appropriate for - // the error. - // - // IDNOACTION instructs the engine to pass the error through to default - // handling which usually results in the apply failing. - STDMETHOD_(int, OnError)( - __in BOOTSTRAPPER_ERROR_TYPE errorType, - __in_z_opt LPCWSTR wzPackageId, - __in DWORD dwCode, - __in_z_opt LPCWSTR wzError, - __in DWORD uiFlags, - __in DWORD cData, - __in_ecount_z_opt(cData) LPCWSTR* rgwzData, - __in int nRecommendation - ) = 0; - - // OnRegisterBegin - called when the engine registers the bundle. - // - // Return: - // IDCANCEL instructs the engine to stop applying. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnRegisterBegin)() = 0; - - // OnRegisterComplete - called when the engine registration is - // complete. - // - STDMETHOD_(void, OnRegisterComplete)( - __in HRESULT hrStatus - ) = 0; - - // OnCacheBegin - called when the engine begins caching. - // - // Return: - // IDCANCEL instructs the engine to stop caching. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnCacheBegin)() = 0; - - // OnCachePackageBegin - called when the engine begins caching - // a package. - // - // Return: - // IDCANCEL instructs the engine to stop caching. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnCachePackageBegin)( - __in_z LPCWSTR wzPackageId, - __in DWORD cCachePayloads, - __in DWORD64 dw64PackageCacheSize - ) = 0; - - // OnCacheAcquireBegin - called when the engine begins copying or - // downloading a payload to the working folder. - // - // Return: - // IDCANCEL instructs the engine to stop caching. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnCacheAcquireBegin)( - __in_z_opt LPCWSTR wzPackageOrContainerId, - __in_z_opt LPCWSTR wzPayloadId, - __in BOOTSTRAPPER_CACHE_OPERATION operation, - __in_z LPCWSTR wzSource - ) = 0; - - // OnCacheAcquireProgress - called when the engine makes progresss copying - // or downloading a payload to the working folder. - // - // Return: - // IDCANCEL instructs the engine to stop caching. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnCacheAcquireProgress)( - __in_z_opt LPCWSTR wzPackageOrContainerId, - __in_z_opt LPCWSTR wzPayloadId, - __in DWORD64 dw64Progress, - __in DWORD64 dw64Total, - __in DWORD dwOverallPercentage - ) = 0; - - // OnResolveSource - called when a payload or container cannot be found locally. - // - // Parameters: - // wzPayloadId will be NULL when resolving a container. - // wzDownloadSource will be NULL if the container or payload does not provide a DownloadURL. - // - // Return: - // IDRETRY instructs the engine to try the local source again. - // - // IDDOWNLOAD instructs the engine to try the download source. - // - // All other return codes result in an error. - // - // Notes: - // It is expected the BA may call IBurnCore::SetLocalSource() or IBurnCore::SetDownloadSource() - // to update the source location before returning IDRETRY or IDDOWNLOAD. - STDMETHOD_(int, OnResolveSource)( - __in_z LPCWSTR wzPackageOrContainerId, - __in_z_opt LPCWSTR wzPayloadId, - __in_z LPCWSTR wzLocalSource, - __in_z_opt LPCWSTR wzDownloadSource - ) = 0; - - // OnCacheAcquireComplete - called after the engine copied or downloaded - // a payload to the working folder. - // - // Return: - // IDRETRY instructs the engine to try the copy or download of the payload again. - // - // All other return codes are ignored. - STDMETHOD_(int, OnCacheAcquireComplete)( - __in_z_opt LPCWSTR wzPackageOrContainerId, - __in_z_opt LPCWSTR wzPayloadId, - __in HRESULT hrStatus, - __in int nRecommendation - ) = 0; - - // OnCacheVerifyBegin - called when the engine begins to verify then copy - // a payload or container to the package cache folder. - // - // Return: - // IDCANCEL instructs the engine to stop caching. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnCacheVerifyBegin)( - __in_z_opt LPCWSTR wzPackageOrContainerId, - __in_z_opt LPCWSTR wzPayloadId - ) = 0; - - // OnCacheVerifyComplete - called after the engine verifies and copies - // a payload or container to the package cache folder. - // - // Return: - // IDRETRY instructs the engine to try the verification of the payload again. - // Ignored if hrStatus is success. - // - // IDTRYAGAIN instructs the engine to acquire the payload again. Ignored if - // hrStatus is success. - // - // All other return codes are ignored. - STDMETHOD_(int, OnCacheVerifyComplete)( - __in_z_opt LPCWSTR wzPackageOrContainerId, - __in_z_opt LPCWSTR wzPayloadId, - __in HRESULT hrStatus, - __in int nRecommendation - ) = 0; - - // OnCachePackageComplete - called after the engine attempts to copy or download all - // payloads of a package into the package cache folder. - // - // Return: - // IDIGNORE instructs the engine to ignore non-vital package failures and continue with the - // caching. Ignored if hrStatus is a success or the package is vital. - // - // IDRETRY instructs the engine to try the acquisition and verification of the package - // again. Ignored if hrStatus is a success. - // - // All other return codes are ignored. - STDMETHOD_(int, OnCachePackageComplete)( - __in_z LPCWSTR wzPackageId, - __in HRESULT hrStatus, - __in int nRecommendation - ) = 0; - - // OnCacheComplete - called when the engine caching is complete. - // - STDMETHOD_(void, OnCacheComplete)( - __in HRESULT hrStatus - ) = 0; - - // OnExecuteBegin - called when the engine begins executing the plan. - // - // Return: - // IDCANCEL instructs the engine to stop applying. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnExecuteBegin)( - __in DWORD cExecutingPackages - ) = 0; - - // OnExecuteBegin - called when the engine begins executing a package. - // - // Return: - // IDCANCEL instructs the engine to stop applying. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnExecutePackageBegin)( - __in_z LPCWSTR wzPackageId, - __in BOOL fExecute - ) = 0; - - // OnExecutePatchTarget - called when the engine executes one or more patches targeting - // a product. - // - // Return: - // IDCANCEL instructs the engine to stop applying. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnExecutePatchTarget)( - __in_z LPCWSTR wzPackageId, - __in_z LPCWSTR wzTargetProductCode - ) = 0; - - // OnExecuteProgress - called when the engine makes progress executing a package. - // - // Return: - // IDCANCEL instructs the engine to stop applying. - // - // IDNOACTION instructs the engine to continue. - STDMETHOD_(int, OnExecuteProgress)( - __in_z LPCWSTR wzPackageId, - __in DWORD dwProgressPercentage, - __in DWORD dwOverallPercentage - ) = 0; - - // OnExecuteMsiMessage - called when the engine receives an MSI package message. - // - // Return: - // uiFlags is a combination of valid ID* return values appropriate for - // the message. - // - // IDNOACTION instructs the engine to pass the message through to default - // handling which usually results in the execution continuing. - STDMETHOD_(int, OnExecuteMsiMessage)( - __in_z LPCWSTR wzPackageId, - __in INSTALLMESSAGE mt, - __in UINT uiFlags, - __in_z LPCWSTR wzMessage, - __in DWORD cData, - __in_ecount_z_opt(cData) LPCWSTR* rgwzData, - __in int nRecommendation - ) = 0; - - // OnExecuteFilesInUse - called when the engine encounters files in use while - // executing a package. - // - // Return: - // IDOK instructs the engine to let the Restart Manager attempt to close the - // applications to avoid a restart. - // - // IDCANCEL instructs the engine to abort the execution and start rollback. - // - // IDIGNORE instructs the engine to ignore the running applications. A restart will be - // required. - // - // IDRETRY instructs the engine to check if the applications are still running again. - // - // IDNOACTION is equivalent to ignoring the running applications. A restart will be - // required. - STDMETHOD_(int, OnExecuteFilesInUse)( - __in_z LPCWSTR wzPackageId, - __in DWORD cFiles, - __in_ecount_z(cFiles) LPCWSTR* rgwzFiles - ) = 0; - - // OnExecutePackageComplete - called when a package execution is complete. - // - // Parameters: - // restart will indicate whether this package requires a reboot or initiated the reboot already. - // - // Return: - // IDIGNORE instructs the engine to ignore non-vital package failures and continue with the - // install. Ignored if hrStatus is a success or the package is vital. - // - // IDRETRY instructs the engine to try the execution of the package again. Ignored if hrStatus - // is a success. - // - // IDRESTART instructs the engine to stop processing the chain and restart. The engine will - // launch again after the machine is restarted. - // - // IDSUSPEND instructs the engine to stop processing the chain and suspend the current state. - // - // All other return codes are ignored. - STDMETHOD_(int, OnExecutePackageComplete)( - __in_z LPCWSTR wzPackageId, - __in HRESULT hrStatus, - __in BOOTSTRAPPER_APPLY_RESTART restart, - __in int nRecommendation - ) = 0; - - // OnExecuteComplete - called when the engine execution is complete. - // - STDMETHOD_(void, OnExecuteComplete)( - __in HRESULT hrStatus - ) = 0; - - // OnUnregisterBegin - called when the engine unregisters the bundle. - // - STDMETHOD_(void, OnUnregisterBegin)() = 0; - - // OnUnregisterComplete - called when the engine unregistration is complete. - // - STDMETHOD_(void, OnUnregisterComplete)( - __in HRESULT hrStatus - ) = 0; - - // OnApplyComplete - called after the plan has been applied. - // - // Parameters: - // restart will indicate whether any package required a reboot or initiated the reboot already. - // - // Return: - // IDRESTART instructs the engine to restart. The engine will not launch again after the machine - // is rebooted. Ignored if reboot was already initiated by OnExecutePackageComplete(). - // - // All other return codes are ignored. - STDMETHOD_(int, OnApplyComplete)( - __in HRESULT hrStatus, - __in BOOTSTRAPPER_APPLY_RESTART restart - ) = 0; -}; - - -extern "C" typedef HRESULT (WINAPI *PFN_BOOTSTRAPPER_APPLICATION_CREATE)( - __in IBootstrapperEngine* pEngine, - __in const BOOTSTRAPPER_COMMAND* pCommand, - __out IBootstrapperApplication** ppApplication - ); - -extern "C" typedef void (WINAPI *PFN_BOOTSTRAPPER_APPLICATION_DESTROY)(); diff --git a/scripts/windows/installer/WiXSDK/inc/IBootstrapperBAFunction.h b/scripts/windows/installer/WiXSDK/inc/IBootstrapperBAFunction.h deleted file mode 100644 index c0c9b3b94f..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/IBootstrapperBAFunction.h +++ /dev/null @@ -1,29 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -//------------------------------------------------------------------------------------------------- - -#pragma once - -#include - -#include "IBootstrapperEngine.h" - -interface IBootstrapperBAFunction -{ - STDMETHOD(OnDetect)() = 0; - STDMETHOD(OnDetectComplete)() = 0; - STDMETHOD(OnPlan)() = 0; - STDMETHOD(OnPlanComplete)() = 0; -}; - -extern "C" typedef HRESULT (WINAPI *PFN_BOOTSTRAPPER_BA_FUNCTION_CREATE)( - __in IBootstrapperEngine* pEngine, - __in HMODULE hModule, - __out IBootstrapperBAFunction** ppBAFunction - ); - diff --git a/scripts/windows/installer/WiXSDK/inc/IBootstrapperEngine.h b/scripts/windows/installer/WiXSDK/inc/IBootstrapperEngine.h deleted file mode 100644 index 604d4b9781..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/IBootstrapperEngine.h +++ /dev/null @@ -1,220 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// IBoostrapperEngine implemented by engine and used by bootstrapper application. -// -//------------------------------------------------------------------------------------------------- - -#pragma once - -#define IDERROR -1 -#define IDNOACTION 0 - -#define IDDOWNLOAD 101 // Only valid as a return code from OnResolveSource() to instruct the engine to use the download source. -#define IDRESTART 102 -#define IDSUSPEND 103 -#define IDRELOAD_BOOTSTRAPPER 104 - -enum BOOTSTRAPPER_ACTION -{ - BOOTSTRAPPER_ACTION_UNKNOWN, - BOOTSTRAPPER_ACTION_HELP, - BOOTSTRAPPER_ACTION_LAYOUT, - BOOTSTRAPPER_ACTION_UNINSTALL, - BOOTSTRAPPER_ACTION_INSTALL, - BOOTSTRAPPER_ACTION_MODIFY, - BOOTSTRAPPER_ACTION_REPAIR, - BOOTSTRAPPER_ACTION_UPDATE_REPLACE, - BOOTSTRAPPER_ACTION_UPDATE_REPLACE_EMBEDDED, -}; - -enum BOOTSTRAPPER_ACTION_STATE -{ - BOOTSTRAPPER_ACTION_STATE_NONE, - BOOTSTRAPPER_ACTION_STATE_UNINSTALL, - BOOTSTRAPPER_ACTION_STATE_INSTALL, - BOOTSTRAPPER_ACTION_STATE_ADMIN_INSTALL, - BOOTSTRAPPER_ACTION_STATE_MODIFY, - BOOTSTRAPPER_ACTION_STATE_REPAIR, - BOOTSTRAPPER_ACTION_STATE_MINOR_UPGRADE, - BOOTSTRAPPER_ACTION_STATE_MAJOR_UPGRADE, - BOOTSTRAPPER_ACTION_STATE_PATCH, -}; - -enum BOOTSTRAPPER_PACKAGE_STATE -{ - BOOTSTRAPPER_PACKAGE_STATE_UNKNOWN, - BOOTSTRAPPER_PACKAGE_STATE_OBSOLETE, - BOOTSTRAPPER_PACKAGE_STATE_ABSENT, - BOOTSTRAPPER_PACKAGE_STATE_CACHED, - BOOTSTRAPPER_PACKAGE_STATE_PRESENT, - BOOTSTRAPPER_PACKAGE_STATE_SUPERSEDED, -}; - -enum BOOTSTRAPPER_REQUEST_STATE -{ - BOOTSTRAPPER_REQUEST_STATE_NONE, - BOOTSTRAPPER_REQUEST_STATE_FORCE_ABSENT, - BOOTSTRAPPER_REQUEST_STATE_ABSENT, - BOOTSTRAPPER_REQUEST_STATE_CACHE, - BOOTSTRAPPER_REQUEST_STATE_PRESENT, - BOOTSTRAPPER_REQUEST_STATE_REPAIR, -}; - -enum BOOTSTRAPPER_FEATURE_STATE -{ - BOOTSTRAPPER_FEATURE_STATE_UNKNOWN, - BOOTSTRAPPER_FEATURE_STATE_ABSENT, - BOOTSTRAPPER_FEATURE_STATE_ADVERTISED, - BOOTSTRAPPER_FEATURE_STATE_LOCAL, - BOOTSTRAPPER_FEATURE_STATE_SOURCE, -}; - -enum BOOTSTRAPPER_FEATURE_ACTION -{ - BOOTSTRAPPER_FEATURE_ACTION_NONE, - BOOTSTRAPPER_FEATURE_ACTION_ADDLOCAL, - BOOTSTRAPPER_FEATURE_ACTION_ADDSOURCE, - BOOTSTRAPPER_FEATURE_ACTION_ADDDEFAULT, - BOOTSTRAPPER_FEATURE_ACTION_REINSTALL, - BOOTSTRAPPER_FEATURE_ACTION_ADVERTISE, - BOOTSTRAPPER_FEATURE_ACTION_REMOVE, -}; - -enum BOOTSTRAPPER_LOG_LEVEL -{ - BOOTSTRAPPER_LOG_LEVEL_NONE, // turns off report (only valid for XXXSetLevel()) - BOOTSTRAPPER_LOG_LEVEL_STANDARD, // written if reporting is on - BOOTSTRAPPER_LOG_LEVEL_VERBOSE, // written only if verbose reporting is on - BOOTSTRAPPER_LOG_LEVEL_DEBUG, // reporting useful when debugging code - BOOTSTRAPPER_LOG_LEVEL_ERROR, // always gets reported, but can never be specified -}; - -enum BOOTSTRAPPER_UPDATE_HASH_TYPE -{ - BOOTSTRAPPER_UPDATE_HASH_TYPE_NONE, - BOOTSTRAPPER_UPDATE_HASH_TYPE_SHA1, -}; - - -DECLARE_INTERFACE_IID_(IBootstrapperEngine, IUnknown, "6480D616-27A0-44D7-905B-81512C29C2FB") -{ - STDMETHOD(GetPackageCount)( - __out DWORD* pcPackages - ) = 0; - - STDMETHOD(GetVariableNumeric)( - __in_z LPCWSTR wzVariable, - __out LONGLONG* pllValue - ) = 0; - - STDMETHOD(GetVariableString)( - __in_z LPCWSTR wzVariable, - __out_ecount_opt(*pcchValue) LPWSTR wzValue, - __inout DWORD* pcchValue - ) = 0; - - STDMETHOD(GetVariableVersion)( - __in_z LPCWSTR wzVariable, - __out DWORD64* pqwValue - ) = 0; - - STDMETHOD(FormatString)( - __in_z LPCWSTR wzIn, - __out_ecount_opt(*pcchOut) LPWSTR wzOut, - __inout DWORD* pcchOut - ) = 0; - - STDMETHOD(EscapeString)( - __in_z LPCWSTR wzIn, - __out_ecount_opt(*pcchOut) LPWSTR wzOut, - __inout DWORD* pcchOut - ) = 0; - - STDMETHOD(EvaluateCondition)( - __in_z LPCWSTR wzCondition, - __out BOOL* pf - ) = 0; - - STDMETHOD(Log)( - __in BOOTSTRAPPER_LOG_LEVEL level, - __in_z LPCWSTR wzMessage - ) = 0; - - STDMETHOD(SendEmbeddedError)( - __in DWORD dwErrorCode, - __in_z_opt LPCWSTR wzMessage, - __in DWORD dwUIHint, - __out int* pnResult - ) = 0; - - STDMETHOD(SendEmbeddedProgress)( - __in DWORD dwProgressPercentage, - __in DWORD dwOverallProgressPercentage, - __out int* pnResult - ) = 0; - - STDMETHOD(SetUpdate)( - __in_z_opt LPCWSTR wzLocalSource, - __in_z_opt LPCWSTR wzDownloadSource, - __in DWORD64 qwSize, - __in BOOTSTRAPPER_UPDATE_HASH_TYPE hashType, - __in_bcount_opt(cbHash) BYTE* rgbHash, - __in DWORD cbHash - ) = 0; - - STDMETHOD(SetLocalSource)( - __in_z LPCWSTR wzPackageOrContainerId, - __in_z_opt LPCWSTR wzPayloadId, - __in_z LPCWSTR wzPath - ) = 0; - - STDMETHOD(SetDownloadSource)( - __in_z LPCWSTR wzPackageOrContainerId, - __in_z_opt LPCWSTR wzPayloadId, - __in_z LPCWSTR wzUrl, - __in_z_opt LPWSTR wzUser, - __in_z_opt LPWSTR wzPassword - ) = 0; - - STDMETHOD(SetVariableNumeric)( - __in_z LPCWSTR wzVariable, - __in LONGLONG llValue - ) = 0; - - STDMETHOD(SetVariableString)( - __in_z LPCWSTR wzVariable, - __in_z_opt LPCWSTR wzValue - ) = 0; - - STDMETHOD(SetVariableVersion)( - __in_z LPCWSTR wzVariable, - __in DWORD64 qwValue - ) = 0; - - STDMETHOD(CloseSplashScreen)() = 0; - - STDMETHOD(Detect)() = 0; - - STDMETHOD(Plan)( - __in BOOTSTRAPPER_ACTION action - ) = 0; - - STDMETHOD(Elevate)( - __in_opt HWND hwndParent - ) = 0; - - STDMETHOD(Apply)( - __in_opt HWND hwndParent - ) = 0; - - STDMETHOD(Quit)( - __in DWORD dwExitCode - ) = 0; -}; diff --git a/scripts/windows/installer/WiXSDK/inc/aclutil.h b/scripts/windows/installer/WiXSDK/inc/aclutil.h deleted file mode 100644 index ee688ec328..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/aclutil.h +++ /dev/null @@ -1,151 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Access Control List helper functions. -// -//------------------------------------------------------------------------------------------------- - -#include -#include - -#define ReleaseSid(x) if (x) { AclFreeSid(x); } -#define ReleaseNullSid(x) if (x) { AclFreeSid(x); x = NULL; } - -#ifdef __cplusplus -extern "C" { -#endif - -// structs -struct ACL_ACCESS -{ - BOOL fDenyAccess; - DWORD dwAccessMask; - - // TODO: consider using a union - LPCWSTR pwzAccountName; // NOTE: the last three items in this structure are ignored if this is not NULL - - SID_IDENTIFIER_AUTHORITY sia; // used if pwzAccountName is NULL - BYTE nSubAuthorityCount; - DWORD nSubAuthority[8]; -}; - -struct ACL_ACE -{ - DWORD dwFlags; - DWORD dwMask; - PSID psid; -}; - - -// functions -HRESULT DAPI AclCheckAccess( - __in HANDLE hToken, - __in ACL_ACCESS* paa - ); -HRESULT DAPI AclCheckAdministratorAccess( - __in HANDLE hToken - ); -HRESULT DAPI AclCheckLocalSystemAccess( - __in HANDLE hToken - ); - -HRESULT DAPI AclGetWellKnownSid( - __in WELL_KNOWN_SID_TYPE wkst, - __deref_out PSID* ppsid - ); -HRESULT DAPI AclGetAccountSid( - __in_opt LPCWSTR wzSystem, - __in_z LPCWSTR wzAccount, - __deref_out PSID* ppsid - ); -HRESULT DAPI AclGetAccountSidString( - __in_z LPCWSTR wzSystem, - __in_z LPCWSTR wzAccount, - __deref_out_z LPWSTR* ppwzSid - ); - -HRESULT DAPI AclCreateDacl( - __in_ecount(cDeny) ACL_ACE rgaaDeny[], - __in DWORD cDeny, - __in_ecount(cAllow) ACL_ACE rgaaAllow[], - __in DWORD cAllow, - __deref_out ACL** ppAcl - ); -HRESULT DAPI AclAddToDacl( - __in ACL* pAcl, - __in_ecount_opt(cDeny) const ACL_ACE rgaaDeny[], - __in DWORD cDeny, - __in_ecount_opt(cAllow) const ACL_ACE rgaaAllow[], - __in DWORD cAllow, - __deref_out ACL** ppAclNew - ); -HRESULT DAPI AclMergeDacls( - __in const ACL* pAcl1, - __in const ACL* pAcl2, - __deref_out ACL** ppAclNew - ); -HRESULT DAPI AclCreateDaclOld( - __in_ecount(cAclAccesses) ACL_ACCESS* paa, - __in DWORD cAclAccesses, - __deref_out ACL** ppAcl - ); -HRESULT DAPI AclCreateSecurityDescriptor( - __in_ecount(cAclAccesses) ACL_ACCESS* paa, - __in DWORD cAclAccesses, - __deref_out SECURITY_DESCRIPTOR** ppsd - ); -HRESULT DAPI AclCreateSecurityDescriptorFromDacl( - __in ACL* pACL, - __deref_out SECURITY_DESCRIPTOR** ppsd - ); -HRESULT __cdecl AclCreateSecurityDescriptorFromString( - __deref_out SECURITY_DESCRIPTOR** ppsd, - __in_z __format_string LPCWSTR wzSddlFormat, - ... - ); -HRESULT DAPI AclDuplicateSecurityDescriptor( - __in SECURITY_DESCRIPTOR* psd, - __deref_out SECURITY_DESCRIPTOR** ppsd - ); -HRESULT DAPI AclGetSecurityDescriptor( - __in_z LPCWSTR wzObject, - __in SE_OBJECT_TYPE sot, - __in SECURITY_INFORMATION securityInformation, - __deref_out SECURITY_DESCRIPTOR** ppsd - ); -HRESULT DAPI AclSetSecurityWithRetry( - __in_z LPCWSTR wzObject, - __in SE_OBJECT_TYPE sot, - __in SECURITY_INFORMATION securityInformation, - __in_opt PSID psidOwner, - __in_opt PSID psidGroup, - __in_opt PACL pDacl, - __in_opt PACL pSacl, - __in DWORD cRetry, - __in DWORD dwWaitMilliseconds - ); - -HRESULT DAPI AclFreeSid( - __in PSID psid - ); -HRESULT DAPI AclFreeDacl( - __in ACL* pACL - ); -HRESULT DAPI AclFreeSecurityDescriptor( - __in SECURITY_DESCRIPTOR* psd - ); - -HRESULT DAPI AclAddAdminToSecurityDescriptor( - __in SECURITY_DESCRIPTOR* pSecurity, - __deref_out SECURITY_DESCRIPTOR** ppSecurityNew - ); -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/apuputil.h b/scripts/windows/installer/WiXSDK/inc/apuputil.h deleted file mode 100644 index 6bcaecca16..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/apuputil.h +++ /dev/null @@ -1,92 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for Application Update helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -#define ReleaseApupChain(p) if (p) { ApupFreeChain(p); p = NULL; } -#define ReleaseNullApupChain(p) if (p) { ApupFreeChain(p); p = NULL; } - - -const LPCWSTR APPLICATION_SYNDICATION_NAMESPACE = L"http://appsyndication.org/2006/appsyn"; - -enum APUP_HASH_ALGORITHM -{ - APUP_HASH_ALGORITHM_UNKNOWN, - APUP_HASH_ALGORITHM_MD5, - APUP_HASH_ALGORITHM_SHA1, - APUP_HASH_ALGORITHM_SHA256, -}; - - -struct APPLICATION_UPDATE_ENCLOSURE -{ - LPWSTR wzUrl; - LPWSTR wzLocalName; - DWORD64 dw64Size; - - BYTE* rgbDigest; - DWORD cbDigest; - APUP_HASH_ALGORITHM digestAlgorithm; - - BOOL fInstaller; -}; - - -struct APPLICATION_UPDATE_ENTRY -{ - LPWSTR wzApplicationId; - LPWSTR wzApplicationType; - - LPWSTR wzUpgradeId; - BOOL fUpgradeExclusive; - DWORD64 dw64Version; - DWORD64 dw64UpgradeVersion; - - DWORD64 dw64TotalSize; - - DWORD cEnclosures; - APPLICATION_UPDATE_ENCLOSURE* rgEnclosures; -}; - - -struct APPLICATION_UPDATE_CHAIN -{ - LPWSTR wzDefaultApplicationId; - LPWSTR wzDefaultApplicationType; - - DWORD cEntries; - APPLICATION_UPDATE_ENTRY* rgEntries; -}; - - -HRESULT DAPI ApupAllocChainFromAtom( - __in ATOM_FEED* pFeed, - __out APPLICATION_UPDATE_CHAIN** ppChain - ); - -HRESULT DAPI ApupFilterChain( - __in APPLICATION_UPDATE_CHAIN* pChain, - __in DWORD64 dw64Version, - __out APPLICATION_UPDATE_CHAIN** ppFilteredChain - ); - -void DAPI ApupFreeChain( - __in APPLICATION_UPDATE_CHAIN* pChain - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/atomutil.h b/scripts/windows/installer/WiXSDK/inc/atomutil.h deleted file mode 100644 index 7f4f71e3d6..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/atomutil.h +++ /dev/null @@ -1,158 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// ATOM helper functions. -// -//------------------------------------------------------------------------------------------------- - -#pragma once - - -#ifdef __cplusplus -extern "C" { -#endif - -#define ReleaseAtomFeed(p) if (p) { AtomFreeFeed(p); } -#define ReleaseNullAtomFeed(p) if (p) { AtomFreeFeed(p); p = NULL; } - - -struct ATOM_UNKNOWN_ATTRIBUTE -{ - LPWSTR wzNamespace; - LPWSTR wzAttribute; - LPWSTR wzValue; - - ATOM_UNKNOWN_ATTRIBUTE* pNext; -}; - -struct ATOM_UNKNOWN_ELEMENT -{ - LPWSTR wzNamespace; - LPWSTR wzElement; - LPWSTR wzValue; - - ATOM_UNKNOWN_ATTRIBUTE* pAttributes; - ATOM_UNKNOWN_ELEMENT* pNext; -}; - -struct ATOM_LINK -{ - LPWSTR wzRel; - LPWSTR wzTitle; - LPWSTR wzType; - LPWSTR wzUrl; - LPWSTR wzValue; - DWORD64 dw64Length; - - ATOM_UNKNOWN_ATTRIBUTE* pUnknownAttributes; - ATOM_UNKNOWN_ELEMENT* pUnknownElements; -}; - -struct ATOM_CONTENT -{ - LPWSTR wzType; - LPWSTR wzUrl; - LPWSTR wzValue; - - ATOM_UNKNOWN_ELEMENT* pUnknownElements; -}; - -struct ATOM_AUTHOR -{ - LPWSTR wzName; - LPWSTR wzEmail; - LPWSTR wzUrl; -}; - -struct ATOM_CATEGORY -{ - LPWSTR wzLabel; - LPWSTR wzScheme; - LPWSTR wzTerm; - - ATOM_UNKNOWN_ELEMENT* pUnknownElements; -}; - -struct ATOM_ENTRY -{ - LPWSTR wzId; - LPWSTR wzSummary; - LPWSTR wzTitle; - FILETIME ftPublished; - FILETIME ftUpdated; - - ATOM_CONTENT* pContent; - - DWORD cAuthors; - ATOM_AUTHOR* rgAuthors; - - DWORD cCategories; - ATOM_CATEGORY* rgCategories; - - DWORD cLinks; - ATOM_LINK* rgLinks; - - IXMLDOMNode* pixn; - ATOM_UNKNOWN_ELEMENT* pUnknownElements; -}; - -struct ATOM_FEED -{ - LPWSTR wzGenerator; - LPWSTR wzIcon; - LPWSTR wzId; - LPWSTR wzLogo; - LPWSTR wzSubtitle; - LPWSTR wzTitle; - FILETIME ftUpdated; - - DWORD cAuthors; - ATOM_AUTHOR* rgAuthors; - - DWORD cCategories; - ATOM_CATEGORY* rgCategories; - - DWORD cEntries; - ATOM_ENTRY* rgEntries; - - DWORD cLinks; - ATOM_LINK* rgLinks; - - IXMLDOMNode* pixn; - ATOM_UNKNOWN_ELEMENT* pUnknownElements; -}; - -HRESULT DAPI AtomInitialize( - ); - -void DAPI AtomUninitialize( - ); - -HRESULT DAPI AtomParseFromString( - __in_z LPCWSTR wzAtomString, - __out ATOM_FEED **ppFeed - ); - -HRESULT DAPI AtomParseFromFile( - __in_z LPCWSTR wzAtomFile, - __out ATOM_FEED **ppFeed - ); - -HRESULT DAPI AtomParseFromDocument( - __in IXMLDOMDocument* pixdDocument, - __out ATOM_FEED **ppFeed - ); - -void DAPI AtomFreeFeed( - __in_xcount(pFeed->cItems) ATOM_FEED *pFEED - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/balcondition.h b/scripts/windows/installer/WiXSDK/inc/balcondition.h deleted file mode 100644 index 8f3a78a740..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/balcondition.h +++ /dev/null @@ -1,69 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Bootstrapper Application Layer condition utility. -// -//------------------------------------------------------------------------------------------------- - - -#ifdef __cplusplus -extern "C" { -#endif - -typedef struct _BAL_CONDITION -{ - LPWSTR sczCondition; - LPWSTR sczMessage; -} BAL_CONDITION; - - -typedef struct _BAL_CONDITIONS -{ - BAL_CONDITION* rgConditions; - DWORD cConditions; -} BAL_CONDITIONS; - - -/******************************************************************* - BalConditionsParseFromXml - loads the conditions from the UX manifest. - -********************************************************************/ -DAPI_(HRESULT) BalConditionsParseFromXml( - __in BAL_CONDITIONS* pConditions, - __in IXMLDOMDocument* pixdManifest, - __in_opt WIX_LOCALIZATION* pWixLoc - ); - - -/******************************************************************* - BalConditionEvaluate - evaluates condition against the provided IBurnCore. - - NOTE: psczMessage is optional. -********************************************************************/ -DAPI_(HRESULT) BalConditionEvaluate( - __in BAL_CONDITION* pCondition, - __in IBootstrapperEngine* pEngine, - __out BOOL* pfResult, - __out_z_opt LPWSTR* psczMessage - ); - - -/******************************************************************* - BalConditionsUninitialize - uninitializes any conditions previously loaded. - -********************************************************************/ -DAPI_(void) BalConditionsUninitialize( - __in BAL_CONDITIONS* pConditions - ); - - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/balinfo.h b/scripts/windows/installer/WiXSDK/inc/balinfo.h deleted file mode 100644 index 3a1a8ac3e7..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/balinfo.h +++ /dev/null @@ -1,106 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Bootstrapper Application Layer package utility. -// -//------------------------------------------------------------------------------------------------- - - -#ifdef __cplusplus -extern "C" { -#endif - -enum BAL_INFO_PACKAGE_TYPE -{ - BAL_INFO_PACKAGE_TYPE_UNKNOWN, - BAL_INFO_PACKAGE_TYPE_EXE, - BAL_INFO_PACKAGE_TYPE_MSI, - BAL_INFO_PACKAGE_TYPE_MSP, - BAL_INFO_PACKAGE_TYPE_MSU, - BAL_INFO_PACKAGE_TYPE_BUNDLE_UPGRADE, - BAL_INFO_PACKAGE_TYPE_BUNDLE_ADDON, - BAL_INFO_PACKAGE_TYPE_BUNDLE_PATCH, -}; - - -typedef struct _BAL_INFO_PACKAGE -{ - LPWSTR sczId; - LPWSTR sczDisplayName; - LPWSTR sczDescription; - BAL_INFO_PACKAGE_TYPE type; - BOOL fPermanent; - BOOL fVital; - BOOL fDisplayInternalUI; -} BAL_INFO_PACKAGE; - - -typedef struct _BAL_INFO_PACKAGES -{ - BAL_INFO_PACKAGE* rgPackages; - DWORD cPackages; -} BAL_INFO_PACKAGES; - - -typedef struct _BAL_INFO_BUNDLE -{ - BOOL fPerMachine; - LPWSTR sczName; - LPWSTR sczLogVariable; - BAL_INFO_PACKAGES packages; -} BAL_INFO_BUNDLE; - - -/******************************************************************* - BalInfoParseFromXml - loads the bundle and package info from the UX - manifest. - -********************************************************************/ -DAPI_(HRESULT) BalInfoParseFromXml( - __in BAL_INFO_BUNDLE* pBundle, - __in IXMLDOMDocument* pixdManifest - ); - - -/******************************************************************* - BalInfoAddRelatedBundleAsPackage - adds a related bundle as a package. - - ********************************************************************/ -DAPI_(HRESULT) BalInfoAddRelatedBundleAsPackage( - __in BAL_INFO_PACKAGES* pPackages, - __in LPCWSTR wzId, - __in BOOTSTRAPPER_RELATION_TYPE relationType, - __in BOOL fPerMachine - ); - - -/******************************************************************* - BalInfoFindPackageById - finds a package by its id. - - ********************************************************************/ -DAPI_(HRESULT) BalInfoFindPackageById( - __in BAL_INFO_PACKAGES* pPackages, - __in LPCWSTR wzId, - __out BAL_INFO_PACKAGE** ppPackage - ); - - -/******************************************************************* - BalInfoUninitialize - uninitializes any info previously loaded. - -********************************************************************/ -DAPI_(void) BalInfoUninitialize( - __in BAL_INFO_BUNDLE* pBundle - ); - - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/balretry.h b/scripts/windows/installer/WiXSDK/inc/balretry.h deleted file mode 100644 index 0e78189953..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/balretry.h +++ /dev/null @@ -1,75 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Bootstrapper Application Layer retry utility. -// -//------------------------------------------------------------------------------------------------- - - -#ifdef __cplusplus -extern "C" { -#endif - -enum BALRETRY_TYPE -{ - BALRETRY_TYPE_CACHE, - BALRETRY_TYPE_EXECUTE, -}; - -/******************************************************************* - BalRetryInitialize - initialize the retry count and timeout between - retries (in milliseconds). -********************************************************************/ -DAPI_(void) BalRetryInitialize( - __in DWORD dwMaxRetries, - __in DWORD dwTimeout - ); - -/******************************************************************* - BalRetryUninitialize - call to cleanup any memory allocated during - use of the retry utility. -********************************************************************/ -DAPI_(void) BalRetryUninitialize(); - -/******************************************************************* - BalRetryStartPackage - call when a package begins to be modified. If - the package is being retried, the function will - wait the specified timeout. -********************************************************************/ -DAPI_(void) BalRetryStartPackage( - __in BALRETRY_TYPE type, - __in_z_opt LPCWSTR wzPackageId, - __in_z_opt LPCWSTR wzPayloadId - ); - -/******************************************************************* - BalRetryErrorOccured - call when an error occurs for the retry utility - to consider. -********************************************************************/ -DAPI_(void) BalRetryErrorOccurred( - __in_z_opt LPCWSTR wzPackageId, - __in DWORD dwError - ); - -/******************************************************************* - BalRetryEndPackage - returns IDRETRY is a retry is recommended or - IDNOACTION if a retry is not recommended. -********************************************************************/ -DAPI_(int) BalRetryEndPackage( - __in BALRETRY_TYPE type, - __in_z_opt LPCWSTR wzPackageId, - __in_z_opt LPCWSTR wzPayloadId, - __in HRESULT hrError - ); - - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/balutil.h b/scripts/windows/installer/WiXSDK/inc/balutil.h deleted file mode 100644 index 89c2b69f7b..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/balutil.h +++ /dev/null @@ -1,120 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Burn utility library. -// -//------------------------------------------------------------------------------------------------- - -#include "dutil.h" - - -#ifdef __cplusplus -extern "C" { -#endif - -#define BalExitOnFailure(x, f) if (FAILED(x)) { BalLogError(x, f); ExitTrace(x, f); goto LExit; } -#define BalExitOnFailure1(x, f, s) if (FAILED(x)) { BalLogError(x, f, s); ExitTrace1(x, f, s); goto LExit; } -#define BalExitOnFailure2(x, f, s, t) if (FAILED(x)) { BalLogError(x, f, s, t); ExitTrace2(x, f, s, t); goto LExit; } -#define BalExitOnFailure3(x, f, s, t, u) if (FAILED(x)) { BalLogError(x, f, s, t, u); ExitTrace3(x, f, s, t, u); goto LExit; } - -#define BalExitOnRootFailure(x, f) if (FAILED(x)) { BalLogError(x, f); Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace(x, f); goto LExit; } -#define BalExitOnRootFailure1(x, f, s) if (FAILED(x)) { BalLogError(x, f, s); Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace1(x, f, s); goto LExit; } -#define BalExitOnRootFailure2(x, f, s, t) if (FAILED(x)) { BalLogError(x, f, s, t); Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace2(x, f, s, t); goto LExit; } -#define BalExitOnRootFailure3(x, f, s, t, u) if (FAILED(x)) { BalLogError(x, f, s, t, u); Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace3(x, f, s, t, u); goto LExit; } - -#define BalExitOnNullWithLastError(p, x, f) if (NULL == p) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (!FAILED(x)) { x = E_FAIL; } BalLogError(x, f); ExitTrace(x, f); goto LExit; } -#define BalExitOnNullWithLastError1(p, x, f, s) if (NULL == p) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (!FAILED(x)) { x = E_FAIL; } BalLogError(x, f, s); ExitTrace1(x, f, s); goto LExit; } - - -/******************************************************************* - BalInitialize - remembers the engine interface to enable logging and - other functions. - -********************************************************************/ -DAPI_(void) BalInitialize( - __in IBootstrapperEngine* pEngine - ); - -/******************************************************************* - BalUninitialize - cleans up utility layer internals. - -********************************************************************/ -DAPI_(void) BalUninitialize(); - -/******************************************************************* - BalManifestLoad - loads the Application manifest into an XML document. - -********************************************************************/ -DAPI_(HRESULT) BalManifestLoad( - __in HMODULE hUXModule, - __out IXMLDOMDocument** ppixdManifest - ); - -/******************************************************************* -BalFormatString - formats a string using variables in the engine. - - Note: Use StrFree() to release psczOut. -********************************************************************/ -DAPI_(HRESULT) BalFormatString( - __in_z LPCWSTR wzFormat, - __inout LPWSTR* psczOut - ); - -/******************************************************************* -BalGetNumericVariable - gets a number from a variable in the engine. - - Note: Returns E_NOTFOUND if variable does not exist. -********************************************************************/ -DAPI_(HRESULT) BalGetNumericVariable( - __in_z LPCWSTR wzVariable, - __out LONGLONG* pllValue - ); - -/******************************************************************* -BalStringVariableExists - checks if a string variable exists in the engine. - -********************************************************************/ -DAPI_(BOOL) BalStringVariableExists( - __in_z LPCWSTR wzVariable - ); - -/******************************************************************* -BalGetStringVariable - gets a string from a variable in the engine. - - Note: Use StrFree() to release psczValue. -********************************************************************/ -DAPI_(HRESULT) BalGetStringVariable( - __in_z LPCWSTR wzVariable, - __inout LPWSTR* psczValue - ); - -/******************************************************************* - BalLog - logs a message with the engine. - -********************************************************************/ -DAPIV_(HRESULT) BalLog( - __in BOOTSTRAPPER_LOG_LEVEL level, - __in_z __format_string LPCSTR szFormat, - ... - ); - -/******************************************************************* - BalLogError - logs an error message with the engine. - -********************************************************************/ -DAPIV_(HRESULT) BalLogError( - __in HRESULT hr, - __in_z __format_string LPCSTR szFormat, - ... - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/buffutil.h b/scripts/windows/installer/WiXSDK/inc/buffutil.h deleted file mode 100644 index ae85f3067a..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/buffutil.h +++ /dev/null @@ -1,92 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Binary serialization helper functions. -// -//------------------------------------------------------------------------------------------------- - -#pragma once - - -#ifdef __cplusplus -extern "C" { -#endif - - -// macro definitions - -#define ReleaseBuffer ReleaseMem -#define ReleaseNullBuffer ReleaseNullMem -#define BuffFree MemFree - - -// function declarations - -HRESULT BuffReadNumber( - __in_bcount(cbBuffer) const BYTE* pbBuffer, - __in SIZE_T cbBuffer, - __inout SIZE_T* piBuffer, - __out DWORD* pdw - ); -HRESULT BuffReadNumber64( - __in_bcount(cbBuffer) const BYTE* pbBuffer, - __in SIZE_T cbBuffer, - __inout SIZE_T* piBuffer, - __out DWORD64* pdw64 - ); -HRESULT BuffReadString( - __in_bcount(cbBuffer) const BYTE* pbBuffer, - __in SIZE_T cbBuffer, - __inout SIZE_T* piBuffer, - __deref_out_z LPWSTR* pscz - ); -HRESULT BuffReadStringAnsi( - __in_bcount(cbBuffer) const BYTE* pbBuffer, - __in SIZE_T cbBuffer, - __inout SIZE_T* piBuffer, - __deref_out_z LPSTR* pscz - ); -HRESULT BuffReadStream( - __in_bcount(cbBuffer) const BYTE* pbBuffer, - __in SIZE_T cbBuffer, - __inout SIZE_T* piBuffer, - __deref_out_bcount(*pcbStream) BYTE** ppbStream, - __out SIZE_T* pcbStream - ); - -HRESULT BuffWriteNumber( - __deref_out_bcount(*piBuffer) BYTE** ppbBuffer, - __inout SIZE_T* piBuffer, - __in DWORD dw - ); -HRESULT BuffWriteNumber64( - __deref_out_bcount(*piBuffer) BYTE** ppbBuffer, - __inout SIZE_T* piBuffer, - __in DWORD64 dw64 - ); -HRESULT BuffWriteString( - __deref_out_bcount(*piBuffer) BYTE** ppbBuffer, - __inout SIZE_T* piBuffer, - __in_z_opt LPCWSTR scz - ); -HRESULT BuffWriteStringAnsi( - __deref_out_bcount(*piBuffer) BYTE** ppbBuffer, - __inout SIZE_T* piBuffer, - __in_z_opt LPCSTR scz - ); -HRESULT BuffWriteStream( - __deref_out_bcount(*piBuffer) BYTE** ppbBuffer, - __inout SIZE_T* piBuffer, - __in_bcount(cbStream) const BYTE* pbStream, - __in SIZE_T cbStream - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/cabcutil.h b/scripts/windows/installer/WiXSDK/inc/cabcutil.h deleted file mode 100644 index 5f7c30dae7..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/cabcutil.h +++ /dev/null @@ -1,72 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for cabinet creation helper functions. -// -//------------------------------------------------------------------------------------------------- - -#include -#include -#include - -// Callback from PFNFCIGETNEXTCABINET CabCGetNextCabinet method -// First argument is the name of splitting cabinet without extension e.g. "cab1" -// Second argument is name of the new cabinet that would be formed by splitting e.g. "cab1b.cab" -// Third argument is the file token of the first file present in the splitting cabinet -typedef void (__stdcall * FileSplitCabNamesCallback)(LPWSTR, LPWSTR, LPWSTR); - -#define CAB_MAX_SIZE 0x7FFFFFFF // (see KB: Q174866) - -#ifdef __cplusplus -extern "C" { -#endif - -extern const int CABC_HANDLE_BYTES; - -// time vs. space trade-off -enum COMPRESSION_TYPE -{ - COMPRESSION_TYPE_NONE, // fastest - COMPRESSION_TYPE_LOW, - COMPRESSION_TYPE_MEDIUM, - COMPRESSION_TYPE_HIGH, // smallest - COMPRESSION_TYPE_MSZIP -}; - -// functions -HRESULT DAPI CabCBegin( - __in_z LPCWSTR wzCab, - __in_z LPCWSTR wzCabDir, - __in DWORD dwMaxFiles, - __in DWORD dwMaxSize, - __in DWORD dwMaxThresh, - __in COMPRESSION_TYPE ct, - __out_bcount(CABC_HANDLE_BYTES) HANDLE *phContext - ); -HRESULT DAPI CabCNextCab( - __in_bcount(CABC_HANDLE_BYTES) HANDLE hContext - ); -HRESULT DAPI CabCAddFile( - __in_z LPCWSTR wzFile, - __in_z_opt LPCWSTR wzToken, - __in_opt PMSIFILEHASHINFO pmfHash, - __in_bcount(CABC_HANDLE_BYTES) HANDLE hContext - ); -HRESULT DAPI CabCFinish( - __in_bcount(CABC_HANDLE_BYTES) HANDLE hContext, - __in_opt FileSplitCabNamesCallback fileSplitCabNamesCallback - ); -void DAPI CabCCancel( - __in_bcount(CABC_HANDLE_BYTES) HANDLE hContext - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/cabutil.h b/scripts/windows/installer/WiXSDK/inc/cabutil.h deleted file mode 100644 index 7792046643..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/cabutil.h +++ /dev/null @@ -1,66 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for cabinet decompression helper functions -// -//------------------------------------------------------------------------------------------------- - -#include -#include - -#ifdef __cplusplus -extern "C" { -#endif - -// structs - - -// callback function prototypes -typedef HRESULT (*CAB_CALLBACK_OPEN_FILE)(LPCWSTR wzFile, INT_PTR* ppFile); -typedef HRESULT (*CAB_CALLBACK_READ_FILE)(INT_PTR pFile, LPVOID pvData, DWORD cbData, DWORD* pcbRead); -typedef HRESULT (*CAB_CALLBACK_WRITE_FILE)(INT_PTR pFile, LPVOID pvData, DWORD cbData, DWORD* pcbRead); -typedef LONG (*CAB_CALLBACK_SEEK_FILE)(INT_PTR pFile, DWORD dwMove, DWORD dwMoveMethod); -typedef HRESULT (*CAB_CALLBACK_CLOSE_FILE)(INT_PTR pFile); - -typedef HRESULT (*CAB_CALLBACK_BEGIN_FILE)(LPCWSTR wzFileId, FILETIME* pftFileTime, DWORD cbFileSize, LPVOID pvContext, INT_PTR* ppFile); -typedef HRESULT (*CAB_CALLBACK_END_FILE)(LPCWSTR wzFileId, LPVOID pvContext, INT_PTR pFile); -typedef HRESULT (*CAB_CALLBACK_PROGRESS)(BOOL fBeginFile, LPCWSTR wzFileId, LPVOID pvContext); - -// function type with calling convention of __stdcall that .NET 1.1 understands only -// .NET 2.0 will not need this -typedef INT_PTR (FAR __stdcall *STDCALL_PFNFDINOTIFY)(FDINOTIFICATIONTYPE fdint, PFDINOTIFICATION pfdin); - - -// functions -HRESULT DAPI CabInitialize( - __in BOOL fDelayLoad - ); -void DAPI CabUninitialize( - ); - -HRESULT DAPI CabExtract( - __in_z LPCWSTR wzCabinet, - __in_z LPCWSTR wzExtractFile, - __in_z LPCWSTR wzExtractDir, - __in_opt CAB_CALLBACK_PROGRESS pfnProgress, - __in_opt LPVOID pvContext, - __in DWORD64 dw64EmbeddedOffset - ); - -HRESULT DAPI CabEnumerate( - __in_z LPCWSTR wzCabinet, - __in_z LPCWSTR wzEnumerateFile, - __in STDCALL_PFNFDINOTIFY pfnNotify, - __in DWORD64 dw64EmbeddedOffset - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/certutil.h b/scripts/windows/installer/WiXSDK/inc/certutil.h deleted file mode 100644 index 5cd27d4aae..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/certutil.h +++ /dev/null @@ -1,76 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Certificate helper functions. -// -//------------------------------------------------------------------------------------------------- - -#define ReleaseCertStore(p) if (p) { ::CertCloseStore(p, 0); p = NULL; } -#define ReleaseCertContext(p) if (p) { ::CertFreeCertificateContext(p); p = NULL; } -#define ReleaseCertChain(p) if (p) { ::CertFreeCertificateChain(p); p = NULL; } - -#ifdef __cplusplus -extern "C" { -#endif - -HRESULT DAPI CertReadProperty( - __in PCCERT_CONTEXT pCertContext, - __in DWORD dwProperty, - __out_bcount(*pcbValue) LPVOID pvValue, - __out_opt DWORD* pcbValue - ); - -HRESULT DAPI CertGetAuthenticodeSigningTimestamp( - __in CMSG_SIGNER_INFO* pSignerInfo, - __out FILETIME* pft - ); - -HRESULT DAPI GetCryptProvFromCert( - __in_opt HWND hwnd, - __in PCCERT_CONTEXT pCert, - __out HCRYPTPROV *phCryptProv, - __out DWORD *pdwKeySpec, - __in BOOL *pfDidCryptAcquire, - __deref_opt_out LPWSTR *ppwszTmpContainer, - __deref_opt_out LPWSTR *ppwszProviderName, - __out DWORD *pdwProviderType - ); - -HRESULT DAPI FreeCryptProvFromCert( - __in BOOL fAcquired, - __in HCRYPTPROV hProv, - __in_opt LPWSTR pwszCapiProvider, - __in DWORD dwProviderType, - __in_opt LPWSTR pwszTmpContainer - ); - -HRESULT DAPI GetProvSecurityDesc( - __in HCRYPTPROV hProv, - __deref_out SECURITY_DESCRIPTOR** pSecurity - ); - -HRESULT DAPI SetProvSecurityDesc( - __in HCRYPTPROV hProv, - __in SECURITY_DESCRIPTOR* pSecurity - ); - -BOOL DAPI CertHasPrivateKey( - __in PCCERT_CONTEXT pCertContext, - __out_opt DWORD* pdwKeySpec - ); - -HRESULT DAPI CertInstallSingleCertificate( - __in HCERTSTORE hStore, - __in PCCERT_CONTEXT pCertContext, - __in LPCWSTR wzName - ); -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/conutil.h b/scripts/windows/installer/WiXSDK/inc/conutil.h deleted file mode 100644 index 13bb1aa685..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/conutil.h +++ /dev/null @@ -1,110 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Console helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -#define ConsoleExitOnFailure(x, c, f) if (FAILED(x)) { ConsoleWriteError(x, c, f); ExitTrace(x, f); goto LExit; } -#define ConsoleExitOnFailure1(x, c, f, s) if (FAILED(x)) { ConsoleWriteError(x, c, f, s, NULL); ExitTrace1(x, f, s); goto LExit; } -#define ConsoleExitOnFailure2(x, c, f, s, t) if (FAILED(x)) { ConsoleWriteError(x, c, f, s, t); ExitTrace2(x, f, s, t); goto LExit; } -#define ConsoleExitOnFailure3(x, c, f, s, t, u) if (FAILED(x)) { ConsoleWriteError(x, c, f, s, t, u); ExitTrace3(x, f, s, t, u); goto LExit; } - -#define ConsoleExitOnLastError(x, c, f) { x = ::GetLastError(); x = HRESULT_FROM_WIN32(x); if (FAILED(x)) { ConsoleWriteError(x, c, f); ExitTrace(x, f); goto LExit; } } -#define ConsoleExitOnLastError1(x, c, f, s) { x = ::GetLastError(); x = HRESULT_FROM_WIN32(x); if (FAILED(x)) { ConsoleWriteError(x, c, f, s, NULL); ExitTrace1(x, f, s); goto LExit; } } -#define ConsoleExitOnLastError2(x, c, f, s, t) { x = ::GetLastError(); x = HRESULT_FROM_WIN32(x); if (FAILED(x)) { ConsoleWriteError(x, c, f, s, t); ExitTrace2(x, f, s, t); goto LExit; } } -#define ConsoleExitOnLastError3(x, c, f, s, t, u) { x = ::GetLastError(); x = HRESULT_FROM_WIN32(x); if (FAILED(x)) { ConsoleWriteError(x, c, f, s, t, u); ExitTrace3(x, f, s, t, u); goto LExit; } } - -#define ConsoleExitOnNull(p, x, e, c, f) if (NULL == p) { x = e; ConsoleWriteError(x, c, f); ExitTrace(x, f); goto LExit; } -#define ConsoleExitOnNull1(p, x, e, c, f, s) if (NULL == p) { x = e; ConsoleWriteError(x, c, f, s, NULL); ExitTrace1(x, f, s); goto LExit; } -#define ConsoleExitOnNull2(p, x, e, c, f, s, t) if (NULL == p) { x = e; ConsoleWriteError(x, c, f, s, t); ExitTrace2(x, f, s, t); goto LExit; } -#define ConsoleExitOnNull3(p, x, e, c, f, s, t, u) if (NULL == p) { x = e; ConsoleWriteError(x, c, f, s, t, u); ExitTrace2(x, f, s, t, u); goto LExit; } - - -// the following macros need to go away -#define ConsoleTrace(l, f) { ConsoleWriteLine(CONSOLE_COLOR_NORMAL, f); Trace(l, f); } -#define ConsoleTrace1(l, f, s) { ConsoleWriteLine(CONSOLE_COLOR_NORMAL, f, s); Trace1(l, f, s); } -#define ConsoleTrace2(l, f, s, t) { ConsoleWriteLine(CONSOLE_COLOR_NORMAL, f, s, t); Trace2(l, f, s, t); } -#define ConsoleTrace3(l, f, s, t, u) { ConsoleWriteLine(CONSOLE_COLOR_NORMAL, f, s, t, u); Trace3(l, f, s, t, u); } - -#define ConsoleWarning(f) { ConsoleWriteLine(CONSOLE_COLOR_YELLOW, f); Trace(REPORT_STANDARD, f); } -#define ConsoleWarning1(f, s) { ConsoleWriteLine(CONSOLE_COLOR_YELLOW, f, s); Trace1(REPORT_STANDARD, f, s); } -#define ConsoleWarning2(f, s, t) { ConsoleWriteLine(CONSOLE_COLOR_YELLOW, f, s, t); Trace2(REPORT_STANDARD, f, s, t); } -#define ConsoleWarning3(f, s, t, u) { ConsoleWriteLine(CONSOLE_COLOR_YELLOW, f, s, t, u); Trace3(REPORT_STANDARD, f, s, t, u); } - -#define ConsoleError(x, f) { ConsoleWriteError(x, CONSOLE_COLOR_RED, f); TraceError(x, f); } -#define ConsoleError1(x, f, s) { ConsoleWriteError(x, CONSOLE_COLOR_RED, f, s); TraceError1(x, f, s); } -#define ConsoleError2(x, f, s, t) { ConsoleWriteError(x, CONSOLE_COLOR_RED, f, s, t); TraceError2(x, f, s, t); } -#define ConsoleError3(x, f, s, t, u) { ConsoleWriteError(x, CONSOLE_COLOR_RED, f, s, t, u); TraceError3(x, f, s, t, u); } - - -// enums -enum CONSOLE_COLOR { CONSOLE_COLOR_NORMAL, CONSOLE_COLOR_RED, CONSOLE_COLOR_YELLOW, CONSOLE_COLOR_GREEN }; - -// structs - -// functions -HRESULT DAPI ConsoleInitialize(); -void DAPI ConsoleUninitialize(); - -void DAPI ConsoleGreen(); -void DAPI ConsoleRed(); -void DAPI ConsoleYellow(); -void DAPI ConsoleNormal(); - -HRESULT DAPI ConsoleWrite( - CONSOLE_COLOR cc, - __in_z __format_string LPCSTR szFormat, - ... - ); -HRESULT DAPI ConsoleWriteLine( - CONSOLE_COLOR cc, - __in_z __format_string LPCSTR szFormat, - ... - ); -HRESULT DAPI ConsoleWriteError( - HRESULT hrError, - CONSOLE_COLOR cc, - __in_z __format_string LPCSTR szFormat, - ... - ); - -HRESULT DAPI ConsoleReadW( - __deref_out_z LPWSTR* ppwzBuffer - ); - -HRESULT DAPI ConsoleReadStringA( - __deref_out_ecount_part(cchCharBuffer,*pcchNumCharReturn) LPSTR* szCharBuffer, - CONST DWORD cchCharBuffer, - __out DWORD* pcchNumCharReturn - ); -HRESULT DAPI ConsoleReadStringW( - __deref_out_ecount_part(cchCharBuffer,*pcchNumCharReturn) LPWSTR* szCharBuffer, - CONST DWORD cchCharBuffer, - __out DWORD* pcchNumCharReturn - ); - -HRESULT DAPI ConsoleReadNonBlockingW( - __deref_out_ecount_opt(*pcchSize) LPWSTR* ppwzBuffer, - __out DWORD* pcchSize, - BOOL fReadLine - ); - -HRESULT DAPI ConsoleSetReadHidden(void); -HRESULT DAPI ConsoleSetReadNormal(void); - -#ifdef __cplusplus -} -#endif - diff --git a/scripts/windows/installer/WiXSDK/inc/cryputil.h b/scripts/windows/installer/WiXSDK/inc/cryputil.h deleted file mode 100644 index 2cb69ec2a9..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/cryputil.h +++ /dev/null @@ -1,72 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Cryptography helper functions. -// -//------------------------------------------------------------------------------------------------- - -#define ReleaseCryptMsg(p) if (p) { ::CryptMsgClose(p); p = NULL; } - -#ifdef __cplusplus -extern "C" { -#endif - - -#define SHA1_HASH_LEN 20 - -// function declarations - -HRESULT DAPI CrypDecodeObject( - __in_z LPCSTR szStructType, - __in_ecount(cbData) const BYTE* pbData, - __in DWORD cbData, - __in DWORD dwFlags, - __out LPVOID* ppvObject, - __out_opt DWORD* pcbObject - ); - -HRESULT DAPI CrypMsgGetParam( - __in HCRYPTMSG hCryptMsg, - __in DWORD dwType, - __in DWORD dwIndex, - __out LPVOID* ppvData, - __out_opt DWORD* pcbData - ); - -HRESULT DAPI CrypHashFile( - __in_z LPCWSTR wzFilePath, - __in DWORD dwProvType, - __in ALG_ID algid, - __out_bcount(cbHash) BYTE* pbHash, - __in DWORD cbHash, - __out_opt DWORD64* pqwBytesHashed - ); - -HRESULT DAPI CrypHashFileHandle( - __in HANDLE hFile, - __in DWORD dwProvType, - __in ALG_ID algid, - __out_bcount(cbHash) BYTE* pbHash, - __in DWORD cbHash, - __out_opt DWORD64* pqwBytesHashed - ); - -HRESULT DAPI CrypHashBuffer( - __in_bcount(cbBuffer) const BYTE* pbBuffer, - __in SIZE_T cbBuffer, - __in DWORD dwProvType, - __in ALG_ID algid, - __out_bcount(cbHash) BYTE* pbHash, - __in DWORD cbHash - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/custommsierrors.h b/scripts/windows/installer/WiXSDK/inc/custommsierrors.h deleted file mode 100644 index ca9505a0ac..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/custommsierrors.h +++ /dev/null @@ -1,153 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -//------------------------------------------------------------------------------------------------- - -//--------------------------------------------------------------------------- -// Indexes for custom errors in the MSI -// -// Note: Custom Errors must be in the range 25000-30000, all other error -// codes are reserved for the Windows Installer as standard error ranges -// NEVER reuse an error number or you're likely to break the builds. -//--------------------------------------------------------------------------- - -// Instructions: -// 1. add the index to this file -// 2. define the error table row -// 3. #include CustomMsiErrors to refer to the index -// 4. Import Misc\CustomErrors { MYDEFINE=1 }; with your errorgroup under MYDEFINE - - -//--------------------------------------------------------------------------- -// GLOBAL 25501-25600 -#define GLOBAL_ERROR_BASE 25501 - -#define msierrSecureObjectsFailedCreateSD 25520 -#define msierrSecureObjectsFailedSet 25521 -#define msierrSecureObjectsUnknownType 25522 - -#define msierrXmlFileFailedRead 25530 -#define msierrXmlFileFailedOpen 25531 -#define msierrXmlFileFailedSelect 25532 -#define msierrXmlFileFailedSave 25533 - -#define msierrXmlConfigFailedRead 25540 -#define msierrXmlConfigFailedOpen 25541 -#define msierrXmlConfigFailedSelect 25542 -#define msierrXmlConfigFailedSave 25543 - -#define msierrFirewallCannotConnect 25580 - -//--------------------------------------------------------------------------- -// Server CustomAction Errors -// SERVER range: 26001-26100 -#define SERVER_ERROR_BASE 26000 - -#define msierrIISCannotConnect 26001 -#define msierrIISFailedReadWebSite 26002 -#define msierrIISFailedReadWebDirs 26003 -#define msierrIISFailedReadVDirs 26004 -#define msierrIISFailedReadFilters 26005 -#define msierrIISFailedReadAppPool 26006 -#define msierrIISFailedReadMimeMap 26007 -#define msierrIISFailedReadProp 26008 -#define msierrIISFailedReadWebSvcExt 26009 -#define msierrIISFailedReadWebError 26010 -#define msierrIISFailedReadHttpHeader 26011 - -#define msierrIISFailedSchedTransaction 26031 -#define msierrIISFailedSchedInstallWebs 26032 -#define msierrIISFailedSchedInstallWebDirs 26033 -#define msierrIISFailedSchedInstallVDirs 26034 -#define msierrIISFailedSchedInstallFilters 26035 -#define msierrIISFailedSchedInstallAppPool 26036 -#define msierrIISFailedSchedInstallProp 26037 -#define msierrIISFailedSchedInstallWebSvcExt 26038 - -#define msierrIISFailedSchedUninstallWebs 26051 -#define msierrIISFailedSchedUninstallWebDirs 26052 -#define msierrIISFailedSchedUninstallVDirs 26053 -#define msierrIISFailedSchedUninstallFilters 26054 -#define msierrIISFailedSchedUninstallAppPool 26055 -#define msierrIISFailedSchedUninstallProp 26056 -#define msierrIISFailedSchedUninstallWebSvcExt 26057 - -#define msierrIISFailedStartTransaction 26101 -#define msierrIISFailedOpenKey 26102 -#define msierrIISFailedCreateKey 26103 -#define msierrIISFailedWriteData 26104 -#define msierrIISFailedCreateApp 26105 -#define msierrIISFailedDeleteKey 26106 -#define msierrIISFailedDeleteApp 26107 -#define msierrIISFailedDeleteValue 26108 -#define msierrIISFailedCommitInUse 26109 - -#define msierrSQLFailedCreateDatabase 26201 -#define msierrSQLFailedDropDatabase 26202 -#define msierrSQLFailedConnectDatabase 26203 -#define msierrSQLFailedExecString 26204 -#define msierrSQLDatabaseAlreadyExists 26205 - -#define msierrPERFMONFailedRegisterDLL 26251 -#define msierrPERFMONFailedUnregisterDLL 26252 -#define msierrInstallPerfCounterData 26253 -#define msierrUninstallPerfCounterData 26254 - -#define msierrSMBFailedCreate 26301 -#define msierrSMBFailedDrop 26302 - -#define msierrCERTFailedOpen 26351 -#define msierrCERTFailedAdd 26352 - -#define msierrUSRFailedUserCreate 26401 -#define msierrUSRFailedUserCreatePswd 26402 -#define msierrUSRFailedUserGroupAdd 26403 -#define msierrUSRFailedUserCreateExists 26404 -#define msierrUSRFailedGrantLogonAsService 26405 - -#define msierrDependencyMissingDependencies 26451 -#define msierrDependencyHasDependents 26452 - -//-------------------------------------------------------------------------- -// Managed code CustomAction Errors -// MANAGED range: 27000-27100 -#define MANAGED_ERROR_BASE 27000 - -#define msierrDotNetRuntimeRequired 27000 -//--------------------------------------------------------------------------- -// Public CustomAction Errors -// PUBLIC range: 28001-28100 -#define PUBLIC_ERROR_BASE 28000 - -#define msierrComPlusCannotConnect 28001 -#define msierrComPlusPartitionReadFailed 28002 -#define msierrComPlusPartitionRoleReadFailed 28003 -#define msierrComPlusUserInPartitionRoleReadFailed 28004 -#define msierrComPlusPartitionUserReadFailed 28005 -#define msierrComPlusApplicationReadFailed 28006 -#define msierrComPlusApplicationRoleReadFailed 28007 -#define msierrComPlusUserInApplicationRoleReadFailed 28008 -#define msierrComPlusAssembliesReadFailed 28009 -#define msierrComPlusSubscriptionReadFailed 28010 -#define msierrComPlusPartitionDependency 28011 -#define msierrComPlusPartitionNotFound 28012 -#define msierrComPlusPartitionIdConflict 28013 -#define msierrComPlusPartitionNameConflict 28014 -#define msierrComPlusApplicationDependency 28015 -#define msierrComPlusApplicationNotFound 28016 -#define msierrComPlusApplicationIdConflict 28017 -#define msierrComPlusApplicationNameConflict 28018 -#define msierrComPlusApplicationRoleDependency 28019 -#define msierrComPlusApplicationRoleNotFound 28020 -#define msierrComPlusApplicationRoleConflict 28021 -#define msierrComPlusAssemblyDependency 28022 -#define msierrComPlusSubscriptionIdConflict 28023 -#define msierrComPlusSubscriptionNameConflict 28024 -#define msierrComPlusFailedLookupNames 28025 - -#define msierrMsmqCannotConnect 28101 diff --git a/scripts/windows/installer/WiXSDK/inc/dictutil.h b/scripts/windows/installer/WiXSDK/inc/dictutil.h deleted file mode 100644 index ed690c1325..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/dictutil.h +++ /dev/null @@ -1,79 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for string dict helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -#define ReleaseDict(sdh) if (sdh) { DictDestroy(sdh); } -#define ReleaseNullDict(sdh) if (sdh) { DictDestroy(sdh); sdh = NULL; } - -typedef void* STRINGDICT_HANDLE; -typedef const void* C_STRINGDICT_HANDLE; - -extern const int STRINGDICT_HANDLE_BYTES; - -enum DICT_FLAG -{ - DICT_FLAG_NONE = 0, - DICT_FLAG_CASEINSENSITIVE = 1 -}; - -HRESULT DAPI DictCreateWithEmbeddedKey( - __out_bcount(STRINGDICT_HANDLE_BYTES) STRINGDICT_HANDLE* psdHandle, - __in DWORD dwNumExpectedItems, - __in_opt void **ppvArray, - __in size_t cByteOffset, - __in DICT_FLAG dfFlags - ); -HRESULT DAPI DictCreateStringList( - __out_bcount(STRINGDICT_HANDLE_BYTES) STRINGDICT_HANDLE* psdHandle, - __in DWORD dwNumExpectedItems, - __in DICT_FLAG dfFlags - ); -HRESULT DAPI DictCreateStringListFromArray( - __out_bcount(STRINGDICT_HANDLE_BYTES) STRINGDICT_HANDLE* psdHandle, - __in_ecount(cStringArray) const LPCWSTR* rgwzStringArray, - __in const DWORD cStringArray, - __in DICT_FLAG dfFlags - ); -HRESULT DAPI DictCompareStringListToArray( - __in_bcount(STRINGDICT_HANDLE_BYTES) STRINGDICT_HANDLE sdStringList, - __in_ecount(cStringArray) const LPCWSTR* rgwzStringArray, - __in const DWORD cStringArray - ); -HRESULT DAPI DictAddKey( - __in_bcount(STRINGDICT_HANDLE_BYTES) STRINGDICT_HANDLE sdHandle, - __in_z LPCWSTR szString - ); -HRESULT DAPI DictAddValue( - __in_bcount(STRINGDICT_HANDLE_BYTES) STRINGDICT_HANDLE sdHandle, - __in void *pvValue - ); -HRESULT DAPI DictKeyExists( - __in_bcount(STRINGDICT_HANDLE_BYTES) C_STRINGDICT_HANDLE sdHandle, - __in_z LPCWSTR szString - ); -HRESULT DAPI DictGetValue( - __in_bcount(STRINGDICT_HANDLE_BYTES) C_STRINGDICT_HANDLE sdHandle, - __in_z LPCWSTR szString, - __out void **ppvValue - ); -void DAPI DictDestroy( - __in_bcount(STRINGDICT_HANDLE_BYTES) STRINGDICT_HANDLE sdHandle - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/dirutil.h b/scripts/windows/installer/WiXSDK/inc/dirutil.h deleted file mode 100644 index ff04ad44ec..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/dirutil.h +++ /dev/null @@ -1,65 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Directory helper functions. -// -//------------------------------------------------------------------------------------------------- - -#pragma once - -enum DIR_DELETE -{ - DIR_DELETE_FILES = 1, - DIR_DELETE_RECURSE = 2, - DIR_DELETE_SCHEDULE = 4, -}; - -#ifdef __cplusplus -extern "C" { -#endif - -BOOL DAPI DirExists( - __in_z LPCWSTR wzPath, - __out_opt DWORD *pdwAttributes - ); - -HRESULT DAPI DirCreateTempPath( - __in_z LPCWSTR wzPrefix, - __out_ecount_z(cchPath) LPWSTR wzPath, - __in DWORD cchPath - ); - -HRESULT DAPI DirEnsureExists( - __in_z LPCWSTR wzPath, - __in_opt LPSECURITY_ATTRIBUTES psa - ); - -HRESULT DAPI DirEnsureDelete( - __in_z LPCWSTR wzPath, - __in BOOL fDeleteFiles, - __in BOOL fRecurse - ); - -HRESULT DAPI DirEnsureDeleteEx( - __in_z LPCWSTR wzPath, - __in DWORD dwFlags - ); - -HRESULT DAPI DirGetCurrent( - __deref_out_z LPWSTR* psczCurrentDirectory - ); - -HRESULT DAPI DirSetCurrent( - __in_z LPCWSTR wzDirectory - ); - -#ifdef __cplusplus -} -#endif - diff --git a/scripts/windows/installer/WiXSDK/inc/dutil.h b/scripts/windows/installer/WiXSDK/inc/dutil.h deleted file mode 100644 index bad9ec2e4a..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/dutil.h +++ /dev/null @@ -1,215 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for utility layer that provides standard support for asserts, exit macros -// -//------------------------------------------------------------------------------------------------- - -#define DAPI __stdcall -#define DAPIV __cdecl // used only for functions taking variable length arguments - -#define DAPI_(type) EXTERN_C type DAPI -#define DAPIV_(type) EXTERN_C type DAPIV - - -// enums -enum REPORT_LEVEL -{ - REPORT_NONE, // turns off report (only valid for XXXSetLevel()) - REPORT_WARNING, // written if want only warnings or reporting is on in general - REPORT_STANDARD, // written if reporting is on - REPORT_VERBOSE, // written only if verbose reporting is on - REPORT_DEBUG, // reporting useful when debugging code - REPORT_ERROR, // always gets reported, but can never be specified -}; - -// asserts and traces -typedef BOOL (DAPI *DUTIL_ASSERTDISPLAYFUNCTION)(__in_z LPCSTR sz); - -extern "C" void DAPI Dutil_SetAssertModule(__in HMODULE hAssertModule); -extern "C" void DAPI Dutil_SetAssertDisplayFunction(__in DUTIL_ASSERTDISPLAYFUNCTION pfn); -extern "C" void DAPI Dutil_Assert(__in_z LPCSTR szFile, __in int iLine); -extern "C" void DAPI Dutil_AssertSz(__in_z LPCSTR szFile, __in int iLine, __in_z LPCSTR szMsg); - -extern "C" void DAPI Dutil_TraceSetLevel(__in REPORT_LEVEL ll, __in BOOL fTraceFilenames); -extern "C" REPORT_LEVEL DAPI Dutil_TraceGetLevel(); -extern "C" void __cdecl Dutil_Trace(__in_z LPCSTR szFile, __in int iLine, __in REPORT_LEVEL rl, __in_z __format_string LPCSTR szMessage, ...); -extern "C" void __cdecl Dutil_TraceError(__in_z LPCSTR szFile, __in int iLine, __in REPORT_LEVEL rl, __in HRESULT hr, __in_z __format_string LPCSTR szMessage, ...); -extern "C" void DAPI Dutil_RootFailure(__in_z LPCSTR szFile, __in int iLine, __in HRESULT hrError); - -#ifdef DEBUG - -#define AssertSetModule(m) (void)Dutil_SetAssertModule(m) -#define AssertSetDisplayFunction(pfn) (void)Dutil_SetAssertDisplayFunction(pfn) -#define Assert(f) ((f) ? (void)0 : (void)Dutil_Assert(__FILE__, __LINE__)) -#define AssertSz(f, sz) ((f) ? (void)0 : (void)Dutil_AssertSz(__FILE__, __LINE__, sz)) - -#define TraceSetLevel(l, f) (void)Dutil_TraceSetLevel(l, f) -#define TraceGetLevel() (REPORT_LEVEL)Dutil_TraceGetLevel() -#define Trace(l, f) (void)Dutil_Trace(__FILE__, __LINE__, l, f, NULL) -#define Trace1(l, f, s) (void)Dutil_Trace(__FILE__, __LINE__, l, f, s) -#define Trace2(l, f, s, t) (void)Dutil_Trace(__FILE__, __LINE__, l, f, s, t) -#define Trace3(l, f, s, t, u) (void)Dutil_Trace(__FILE__, __LINE__, l, f, s, t, u) - -#define TraceError(x, f) (void)Dutil_TraceError(__FILE__, __LINE__, REPORT_ERROR, x, f, NULL) -#define TraceError1(x, f, s) (void)Dutil_TraceError(__FILE__, __LINE__, REPORT_ERROR, x, f, s) -#define TraceError2(x, f, s, t) (void)Dutil_TraceError(__FILE__, __LINE__, REPORT_ERROR, x, f, s, t) -#define TraceError3(x, f, s, t, u) (void)Dutil_TraceError(__FILE__, __LINE__, REPORT_ERROR, x, f, s, t, u) - -#define TraceErrorDebug(x, f) (void)Dutil_TraceError(__FILE__, __LINE__, REPORT_DEBUG, x, f, NULL) -#define TraceErrorDebug1(x, f, s) (void)Dutil_TraceError(__FILE__, __LINE__, REPORT_DEBUG, x, f, s) -#define TraceErrorDebug2(x, f, s, t) (void)Dutil_TraceError(__FILE__, __LINE__, REPORT_DEBUG, x, f, s, t) -#define TraceErrorDebug3(x, f, s, t, u) (void)Dutil_TraceError(__FILE__, __LINE__, REPORT_DEBUG, x, f, s, t, u) - -#else // !DEBUG - -#define AssertSetModule(m) -#define AssertSetDisplayFunction(pfn) -#define Assert(f) -#define AssertSz(f, sz) - -#define TraceSetLevel(l, f) -#define Trace(l, f) -#define Trace1(l, f, s) -#define Trace2(l, f, s, t) -#define Trace3(l, f, s, t, u) - -#define TraceError(x, f) -#define TraceError1(x, f, s) -#define TraceError2(x, f, s, t) -#define TraceError3(x, f, s, t, u) - -#define TraceErrorDebug(x, f) -#define TraceErrorDebug1(x, f, s) -#define TraceErrorDebug2(x, f, s, t) -#define TraceErrorDebug3(x, f, s, t, u) - -#endif // DEBUG - - -// ExitTrace can be overriden -#ifndef ExitTrace -#define ExitTrace TraceError -#endif -#ifndef ExitTrace1 -#define ExitTrace1 TraceError1 -#endif -#ifndef ExitTrace2 -#define ExitTrace2 TraceError2 -#endif -#ifndef ExitTrace3 -#define ExitTrace3 TraceError3 -#endif - -// Exit macros -#define ExitFunction() { goto LExit; } -#define ExitFunction1(x) { x; goto LExit; } - -#define ExitOnLastError(x, s) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (FAILED(x)) { Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace(x, s); goto LExit; } } -#define ExitOnLastError1(x, f, s) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (FAILED(x)) { Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace1(x, f, s); goto LExit; } } -#define ExitOnLastError2(x, f, s, t) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (FAILED(x)) { Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace2(x, f, s, t); goto LExit; } } - -#define ExitOnLastErrorDebugTrace(x, s) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (FAILED(x)) { Dutil_RootFailure(__FILE__, __LINE__, x); TraceErrorDebug(x, s); goto LExit; } } -#define ExitOnLastErrorDebugTrace1(x, f, s) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (FAILED(x)) { Dutil_RootFailure(__FILE__, __LINE__, x); TraceErrorDebug1(x, f, s); goto LExit; } } -#define ExitOnLastErrorDebugTrace2(x, f, s, t) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (FAILED(x)) { Dutil_RootFailure(__FILE__, __LINE__, x); TraceErrorDebug2(x, f, s, t); goto LExit; } } - -#define ExitWithLastError(x, s) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (!FAILED(x)) { x = E_FAIL; } Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace(x, s); goto LExit; } -#define ExitWithLastError1(x, f, s) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (!FAILED(x)) { x = E_FAIL; } Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace1(x, f, s); goto LExit; } -#define ExitWithLastError2(x, f, s, t) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (!FAILED(x)) { x = E_FAIL; } Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace2(x, f, s, t); goto LExit; } -#define ExitWithLastError3(x, f, s, t, u) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (!FAILED(x)) { x = E_FAIL; } Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace3(x, f, s, t, u); goto LExit; } - -#define ExitOnFailure(x, s) if (FAILED(x)) { ExitTrace(x, s); goto LExit; } -#define ExitOnFailure1(x, f, s) if (FAILED(x)) { ExitTrace1(x, f, s); goto LExit; } -#define ExitOnFailure2(x, f, s, t) if (FAILED(x)) { ExitTrace2(x, f, s, t); goto LExit; } -#define ExitOnFailure3(x, f, s, t, u) if (FAILED(x)) { ExitTrace3(x, f, s, t, u); goto LExit; } - -#define ExitOnRootFailure(x, s) if (FAILED(x)) { Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace(x, s); goto LExit; } -#define ExitOnRootFailure1(x, f, s) if (FAILED(x)) { Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace1(x, f, s); goto LExit; } -#define ExitOnRootFailure2(x, f, s, t) if (FAILED(x)) { Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace2(x, f, s, t); goto LExit; } -#define ExitOnRootFailure3(x, f, s, t, u) if (FAILED(x)) { Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace3(x, f, s, t, u); goto LExit; } - -#define ExitOnFailureDebugTrace(x, s) if (FAILED(x)) { TraceErrorDebug(x, s); goto LExit; } -#define ExitOnFailureDebugTrace1(x, f, s) if (FAILED(x)) { TraceErrorDebug1(x, f, s); goto LExit; } -#define ExitOnFailureDebugTrace2(x, f, s, t) if (FAILED(x)) { TraceErrorDebug2(x, f, s, t); goto LExit; } -#define ExitOnFailureDebugTrace3(x, f, s, t, u) if (FAILED(x)) { TraceErrorDebug3(x, f, s, t, u); goto LExit; } - -#define ExitOnNull(p, x, e, s) if (NULL == p) { x = e; Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace(x, s); goto LExit; } -#define ExitOnNull1(p, x, e, f, s) if (NULL == p) { x = e; Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace1(x, f, s); goto LExit; } -#define ExitOnNull2(p, x, e, f, s, t) if (NULL == p) { x = e; Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace2(x, f, s, t); goto LExit; } - -#define ExitOnNullWithLastError(p, x, s) if (NULL == p) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (!FAILED(x)) { x = E_FAIL; } Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace(x, s); goto LExit; } -#define ExitOnNullWithLastError1(p, x, f, s) if (NULL == p) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (!FAILED(x)) { x = E_FAIL; } Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace1(x, f, s); goto LExit; } - -#define ExitOnNullDebugTrace(p, x, e, s) if (NULL == p) { x = e; Dutil_RootFailure(__FILE__, __LINE__, x); TraceErrorDebug(x, s); goto LExit; } -#define ExitOnNullDebugTrace1(p, x, e, f, s) if (NULL == p) { x = e; Dutil_RootFailure(__FILE__, __LINE__, x); TraceErrorDebug1(x, f, s); goto LExit; } - -#define ExitOnInvalidHandleWithLastError(p, x, s) if (INVALID_HANDLE_VALUE == p) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (!FAILED(x)) { x = E_FAIL; } Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace(x, s); goto LExit; } -#define ExitOnInvalidHandleWithLastError1(p, x, f, s) if (INVALID_HANDLE_VALUE == p) { DWORD Dutil_er = ::GetLastError(); x = HRESULT_FROM_WIN32(Dutil_er); if (!FAILED(x)) { x = E_FAIL; } Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace1(x, f, s); goto LExit; } - -#define ExitOnWin32Error(e, x, s) if (ERROR_SUCCESS != e) { x = HRESULT_FROM_WIN32(e); if (!FAILED(x)) { x = E_FAIL; } Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace(x, s); goto LExit; } -#define ExitOnWin32Error1(e, x, f, s) if (ERROR_SUCCESS != e) { x = HRESULT_FROM_WIN32(e); if (!FAILED(x)) { x = E_FAIL; } Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace1(x, f, s); goto LExit; } -#define ExitOnWin32Error2(e, x, f, s, t) if (ERROR_SUCCESS != e) { x = HRESULT_FROM_WIN32(e); if (!FAILED(x)) { x = E_FAIL; } Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace2(x, f, s, t); goto LExit; } - -// release macros -#define ReleaseObject(x) if (x) { x->Release(); } -#define ReleaseObjectArray(prg, cel) if (prg) { for (DWORD Dutil_ReleaseObjectArrayIndex = 0; Dutil_ReleaseObjectArrayIndex < cel; ++Dutil_ReleaseObjectArrayIndex) { ReleaseObject(prg[Dutil_ReleaseObjectArrayIndex]); } ReleaseMem(prg); } -#define ReleaseVariant(x) { ::VariantClear(&x); } -#define ReleaseNullObject(x) if (x) { (x)->Release(); x = NULL; } -#define ReleaseCertificate(x) if (x) { ::CertFreeCertificateContext(x); x=NULL; } -#define ReleaseHandle(x) if (x) { ::CloseHandle(x); x = NULL; } - - -// useful defines and macros -#define Unused(x) ((void)x) - -#ifndef countof -#if 1 -#define countof(ary) (sizeof(ary) / sizeof(ary[0])) -#else -#ifndef __cplusplus -#define countof(ary) (sizeof(ary) / sizeof(ary[0])) -#else -template static char countofVerify(void const *, T) throw() { return 0; } -template static void countofVerify(T *const, T *const *) throw() {}; -#define countof(arr) (sizeof(countofVerify(arr,&(arr))) * sizeof(arr)/sizeof(*(arr))) -#endif -#endif -#endif - -#define roundup(x, n) roundup_typed(x, n, DWORD) -#define roundup_typed(x, n, t) (((t)(x) + ((t)(n) - 1)) & ~((t)(n) - 1)) - -#define HRESULT_FROM_RPC(x) ((HRESULT) ((x) | FACILITY_RPC)) - -#ifndef MAXSIZE_T -#define MAXSIZE_T ((SIZE_T)~((SIZE_T)0)) -#endif - -typedef const BYTE* LPCBYTE; - -#define E_FILENOTFOUND HRESULT_FROM_WIN32(ERROR_FILE_NOT_FOUND) -#define E_PATHNOTFOUND HRESULT_FROM_WIN32(ERROR_PATH_NOT_FOUND) -#define E_INVALIDDATA HRESULT_FROM_WIN32(ERROR_INVALID_DATA) -#define E_INVALIDSTATE HRESULT_FROM_WIN32(ERROR_INVALID_STATE) -#define E_INSUFFICIENT_BUFFER HRESULT_FROM_WIN32(ERROR_INSUFFICIENT_BUFFER) -#define E_MOREDATA HRESULT_FROM_WIN32(ERROR_MORE_DATA) -#define E_NOMOREITEMS HRESULT_FROM_WIN32(ERROR_NO_MORE_ITEMS) -#define E_NOTFOUND HRESULT_FROM_WIN32(ERROR_NOT_FOUND) -#define E_MODNOTFOUND HRESULT_FROM_WIN32(ERROR_MOD_NOT_FOUND) -#define E_BADCONFIGURATION HRESULT_FROM_WIN32(ERROR_BAD_CONFIGURATION) - -#define AddRefAndRelease(x) { x->AddRef(); x->Release(); } - -#define MAKEDWORD(lo, hi) ((DWORD)MAKELONG(lo, hi)) -#define MAKEQWORDVERSION(mj, mi, b, r) (((DWORD64)MAKELONG(r, b)) | (((DWORD64)MAKELONG(mi, mj)) << 32)) - -// other functions -extern "C" HRESULT DAPI LoadSystemLibrary(__in_z LPCWSTR wzModuleName, __out HMODULE *phModule); -extern "C" HRESULT DAPI LoadSystemLibraryWithPath(__in_z LPCWSTR wzModuleName, __out HMODULE *phModule, __deref_out_z_opt LPWSTR* psczPath); diff --git a/scripts/windows/installer/WiXSDK/inc/eseutil.h b/scripts/windows/installer/WiXSDK/inc/eseutil.h deleted file mode 100644 index 0ed8cc31b5..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/eseutil.h +++ /dev/null @@ -1,233 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for Extensible Storage Engine (Jetblue) helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -#define ReleaseEseQuery(pqh) if (pqh) { EseFinishQuery(pqh); } -#define ReleaseNullEseQuery(pqh) if (pqh) { EseFinishQuery(pqh); pqh = NULL; } - -struct ESE_COLUMN_SCHEMA -{ - JET_COLUMNID jcColumn; - LPCWSTR pszName; - JET_COLTYP jcColumnType; - BOOL fKey; // If this column is part of the key of the table - BOOL fFixed; - BOOL fNullable; - BOOL fAutoIncrement; -}; - -struct ESE_TABLE_SCHEMA -{ - JET_TABLEID jtTable; - LPCWSTR pszName; - DWORD dwColumns; - ESE_COLUMN_SCHEMA *pcsColumns; -}; - -struct ESE_DATABASE_SCHEMA -{ - DWORD dwTables; - ESE_TABLE_SCHEMA *ptsTables; -}; - -enum ESE_QUERY_TYPE -{ - ESE_QUERY_EXACT, - ESE_QUERY_FROM_TOP, - ESE_QUERY_FROM_BOTTOM -}; - -typedef void* ESE_QUERY_HANDLE; - -HRESULT DAPI EseBeginSession( - __out JET_INSTANCE *pjiInstance, - __out JET_SESID *pjsSession, - __in_z LPCWSTR pszInstance, - __in_z LPCWSTR pszPath - ); -HRESULT DAPI EseEndSession( - __in JET_INSTANCE jiInstance, - __in JET_SESID jsSession - ); -HRESULT DAPI EseEnsureDatabase( - __in JET_SESID jsSession, - __in_z LPCWSTR pszFile, - __in ESE_DATABASE_SCHEMA *pdsSchema, - __out JET_DBID* pjdbDb, - __in BOOL fExclusive, - __in BOOL fReadonly - ); -HRESULT DAPI EseCloseDatabase( - __in JET_SESID jsSession, - __in JET_DBID jdbDb - ); -HRESULT DAPI EseCreateTable( - __in JET_SESID jsSession, - __in JET_DBID jdbDb, - __in_z LPCWSTR pszTable, - __out JET_TABLEID *pjtTable - ); -HRESULT DAPI EseOpenTable( - __in JET_SESID jsSession, - __in JET_DBID jdbDb, - __in_z LPCWSTR pszTable, - __out JET_TABLEID *pjtTable - ); -HRESULT DAPI EseCloseTable( - __in JET_SESID jsSession, - __in JET_TABLEID jtTable - ); -HRESULT DAPI EseEnsureColumn( - __in JET_SESID jsSession, - __in JET_TABLEID jtTable, - __in_z LPCWSTR pszColumnName, - __in JET_COLTYP jcColumnType, - __in ULONG ulColumnSize, - __in BOOL fFixed, - __in BOOL fNullable, - __out_opt JET_COLUMNID *pjcColumn - ); -HRESULT DAPI EseGetColumn( - __in JET_SESID jsSession, - __in JET_TABLEID jtTable, - __in_z LPCWSTR pszColumnName, - __out JET_COLUMNID *pjcColumn - ); -HRESULT DAPI EseMoveCursor( - __in JET_SESID jsSession, - __in JET_TABLEID jtTable, - __in LONG lRow - ); -HRESULT DAPI EseDeleteRow( - __in JET_SESID jsSession, - __in JET_TABLEID jtTable - ); -HRESULT DAPI EseBeginTransaction( - __in JET_SESID jsSession - ); -HRESULT DAPI EseRollbackTransaction( - __in JET_SESID jsSession, - __in BOOL fAll - ); -HRESULT DAPI EseCommitTransaction( - __in JET_SESID jsSession - ); -HRESULT DAPI EsePrepareUpdate( - __in JET_SESID jsSession, - __in JET_TABLEID jtTable, - __in ULONG ulPrep - ); -HRESULT DAPI EseFinishUpdate( - __in JET_SESID jsSession, - __in JET_TABLEID jtTable, - __in BOOL fSeekToInsertedRecord - ); -HRESULT DAPI EseSetColumnBinary( - __in JET_SESID jsSession, - __in ESE_TABLE_SCHEMA tsTable, - __in DWORD dwColumn, - __in_bcount(cbBuffer) const BYTE* pbBuffer, - __in SIZE_T cbBuffer - ); -HRESULT DAPI EseSetColumnDword( - __in JET_SESID jsSession, - __in ESE_TABLE_SCHEMA tsTable, - __in DWORD dwColumn, - __in DWORD dwValue - ); -HRESULT DAPI EseSetColumnBool( - __in JET_SESID jsSession, - __in ESE_TABLE_SCHEMA tsTable, - __in DWORD dwColumn, - __in BOOL fValue - ); -HRESULT DAPI EseSetColumnString( - __in JET_SESID jsSession, - __in ESE_TABLE_SCHEMA tsTable, - __in DWORD dwColumn, - __in_z LPCWSTR pszValue - ); -HRESULT DAPI EseSetColumnEmpty( - __in JET_SESID jsSession, - __in ESE_TABLE_SCHEMA tsTable, - __in DWORD dwColumn - ); -HRESULT DAPI EseGetColumnBinary( - __in JET_SESID jsSession, - __in ESE_TABLE_SCHEMA tsTable, - __in DWORD dwColumn, - __deref_out_bcount(*piBuffer) BYTE** ppbBuffer, - __inout SIZE_T* piBuffer - ); -HRESULT DAPI EseGetColumnDword( - __in JET_SESID jsSession, - __in ESE_TABLE_SCHEMA tsTable, - __in DWORD dwColumn, - __out DWORD *pdwValue - ); -HRESULT DAPI EseGetColumnBool( - __in JET_SESID jsSession, - __in ESE_TABLE_SCHEMA tsTable, - __in DWORD dwColumn, - __out BOOL *pfValue - ); -HRESULT DAPI EseGetColumnString( - __in JET_SESID jsSession, - __in ESE_TABLE_SCHEMA tsTable, - __in DWORD dwColumn, - __out LPWSTR *ppszValue - ); - -// Call this once for each key column in the table -HRESULT DAPI EseBeginQuery( - __in JET_SESID jsSession, - __in JET_TABLEID jtTable, - __in ESE_QUERY_TYPE qtQueryType, - __out ESE_QUERY_HANDLE *peqhHandle - ); -HRESULT DAPI EseSetQueryColumnBinary( - __in ESE_QUERY_HANDLE eqhHandle, - __in_bcount(cbBuffer) const BYTE* pbBuffer, - __in SIZE_T cbBuffer, - __in BOOL fFinal // If this is true, all other key columns in the query will be set to "*" - ); -HRESULT DAPI EseSetQueryColumnDword( - __in ESE_QUERY_HANDLE eqhHandle, - __in DWORD dwData, - __in BOOL fFinal // If this is true, all other key columns in the query will be set to "*" - ); -HRESULT DAPI EseSetQueryColumnBool( - __in ESE_QUERY_HANDLE eqhHandle, - __in BOOL fValue, - __in BOOL fFinal // If this is true, all other key columns in the query will be set to "*" - ); -HRESULT DAPI EseSetQueryColumnString( - __in ESE_QUERY_HANDLE eqhHandle, - __in_z LPCWSTR pszString, - __in BOOL fFinal // If this is true, all other key columns in the query will be set to "*" - ); -HRESULT DAPI EseFinishQuery( - __in ESE_QUERY_HANDLE eqhHandle - ); -// Once all columns have been set up, call this and read the result -HRESULT DAPI EseRunQuery( - __in ESE_QUERY_HANDLE eqhHandle - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/fileutil.h b/scripts/windows/installer/WiXSDK/inc/fileutil.h deleted file mode 100644 index 212a1ec44b..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/fileutil.h +++ /dev/null @@ -1,229 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for file helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -#define ReleaseFile(h) if (INVALID_HANDLE_VALUE != h) { ::CloseHandle(h); h = INVALID_HANDLE_VALUE; } -#define ReleaseFileHandle(h) if (INVALID_HANDLE_VALUE != h) { ::CloseHandle(h); h = INVALID_HANDLE_VALUE; } -#define ReleaseFileFindHandle(h) if (INVALID_HANDLE_VALUE != h) { ::FindClose(h); h = INVALID_HANDLE_VALUE; } - -#define FILEMAKEVERSION(major, minor, build, revision) static_cast((static_cast(major & 0xFFFF) << 48) \ - | (static_cast(minor & 0xFFFF) << 32) \ - | (static_cast(build & 0xFFFF) << 16) \ - | (static_cast(revision & 0xFFFF))) - -enum FILE_ARCHITECTURE -{ - FILE_ARCHITECTURE_UNKNOWN, - FILE_ARCHITECTURE_X86, - FILE_ARCHITECTURE_X64, - FILE_ARCHITECTURE_IA64, -}; - -enum FILE_ENCODING -{ - FILE_ENCODING_UNSPECIFIED = 0, - // TODO: distinguish between non-BOM utf-8 and ANSI in the future? - FILE_ENCODING_UTF8, - FILE_ENCODING_UTF8_WITH_BOM, - FILE_ENCODING_UTF16, - FILE_ENCODING_UTF16_WITH_BOM, -}; - - -LPWSTR DAPI FileFromPath( - __in_z LPCWSTR wzPath - ); -HRESULT DAPI FileResolvePath( - __in_z LPCWSTR wzRelativePath, - __out LPWSTR *ppwzFullPath - ); -HRESULT DAPI FileStripExtension( - __in_z LPCWSTR wzFileName, - __out LPWSTR *ppwzFileNameNoExtension - ); -HRESULT DAPI FileChangeExtension( - __in_z LPCWSTR wzFileName, - __in_z LPCWSTR wzNewExtension, - __out LPWSTR *ppwzFileNameNewExtension - ); -HRESULT DAPI FileAddSuffixToBaseName( - __in_z LPCWSTR wzFileName, - __in_z LPCWSTR wzSuffix, - __out_z LPWSTR* psczNewFileName - ); -HRESULT DAPI FileVersionFromString( - __in_z LPCWSTR wzVersion, - __out DWORD *pdwVerMajor, - __out DWORD* pdwVerMinor - ); -HRESULT DAPI FileVersionFromStringEx( - __in_z LPCWSTR wzVersion, - __in DWORD cchVersion, - __out DWORD64* pqwVersion - ); -HRESULT DAPI FileVersionToStringEx( - __in DWORD64 qwVersion, - __out LPWSTR* psczVersion - ); -HRESULT DAPI FileSetPointer( - __in HANDLE hFile, - __in DWORD64 dw64Move, - __out_opt DWORD64* pdw64NewPosition, - __in DWORD dwMoveMethod - ); -HRESULT DAPI FileSize( - __in_z LPCWSTR pwzFileName, - __out LONGLONG* pllSize - ); -HRESULT DAPI FileSizeByHandle( - __in HANDLE hFile, - __out LONGLONG* pllSize - ); -BOOL DAPI FileExistsEx( - __in_z LPCWSTR wzPath, - __out_opt DWORD *pdwAttributes - ); -BOOL DAPI FileExistsAfterRestart( - __in_z LPCWSTR wzPath, - __out_opt DWORD *pdwAttributes - ); -HRESULT DAPI FileRemoveFromPendingRename( - __in_z LPCWSTR wzPath - ); -HRESULT DAPI FileRead( - __deref_out_bcount_full(*pcbDest) LPBYTE* ppbDest, - __out DWORD* pcbDest, - __in_z LPCWSTR wzSrcPath - ); -HRESULT DAPI FileReadUntil( - __deref_out_bcount_full(*pcbDest) LPBYTE* ppbDest, - __out_range(<=, cbMaxRead) DWORD* pcbDest, - __in_z LPCWSTR wzSrcPath, - __in DWORD cbMaxRead - ); -HRESULT DAPI FileReadPartial( - __deref_out_bcount_full(*pcbDest) LPBYTE* ppbDest, - __out_range(<=, cbMaxRead) DWORD* pcbDest, - __in_z LPCWSTR wzSrcPath, - __in BOOL fSeek, - __in DWORD cbStartPosition, - __in DWORD cbMaxRead, - __in BOOL fPartialOK - ); -HRESULT DAPI FileWrite( - __in_z LPCWSTR pwzFileName, - __in DWORD dwFlagsAndAttributes, - __in_bcount_opt(cbData) LPCBYTE pbData, - __in DWORD cbData, - __out_opt HANDLE* pHandle - ); -HRESULT DAPI FileWriteHandle( - __in HANDLE hFile, - __in_bcount_opt(cbData) LPCBYTE pbData, - __in DWORD cbData - ); -HRESULT DAPI FileCopyUsingHandles( - __in HANDLE hSource, - __in HANDLE hTarget, - __in DWORD64 cbCopy, - __out_opt DWORD64* pcbCopied - ); -HRESULT DAPI FileEnsureCopy( - __in_z LPCWSTR wzSource, - __in_z LPCWSTR wzTarget, - __in BOOL fOverwrite - ); -HRESULT DAPI FileEnsureCopyWithRetry( - __in LPCWSTR wzSource, - __in LPCWSTR wzTarget, - __in BOOL fOverwrite, - __in DWORD cRetry, - __in DWORD dwWaitMilliseconds - ); -HRESULT DAPI FileEnsureMove( - __in_z LPCWSTR wzSource, - __in_z LPCWSTR wzTarget, - __in BOOL fOverwrite, - __in BOOL fAllowCopy - ); -HRESULT DAPI FileEnsureMoveWithRetry( - __in LPCWSTR wzSource, - __in LPCWSTR wzTarget, - __in BOOL fOverwrite, - __in BOOL fAllowCopy, - __in DWORD cRetry, - __in DWORD dwWaitMilliseconds - ); -HRESULT DAPI FileCreateTemp( - __in_z LPCWSTR wzPrefix, - __in_z LPCWSTR wzExtension, - __deref_opt_out_z LPWSTR* ppwzTempFile, - __out_opt HANDLE* phTempFile - ); -HRESULT DAPI FileCreateTempW( - __in_z LPCWSTR wzPrefix, - __in_z LPCWSTR wzExtension, - __deref_opt_out_z LPWSTR* ppwzTempFile, - __out_opt HANDLE* phTempFile - ); -HRESULT DAPI FileVersion( - __in_z LPCWSTR wzFilename, - __out DWORD *pdwVerMajor, - __out DWORD* pdwVerMinor - ); -HRESULT DAPI FileIsSame( - __in_z LPCWSTR wzFile1, - __in_z LPCWSTR wzFile2, - __out LPBOOL lpfSameFile - ); -HRESULT DAPI FileEnsureDelete( - __in_z LPCWSTR wzFile - ); -HRESULT DAPI FileGetTime( - __in_z LPCWSTR wzFile, - __out_opt LPFILETIME lpCreationTime, - __out_opt LPFILETIME lpLastAccessTime, - __out_opt LPFILETIME lpLastWriteTime - ); -HRESULT DAPI FileSetTime( - __in_z LPCWSTR wzFile, - __in_opt const FILETIME *lpCreationTime, - __in_opt const FILETIME *lpLastAccessTime, - __in_opt const FILETIME *lpLastWriteTime - ); -HRESULT DAPI FileResetTime( - __in_z LPCWSTR wzFile - ); -HRESULT DAPI FileExecutableArchitecture( - __in_z LPCWSTR wzFile, - __out FILE_ARCHITECTURE *pArchitecture - ); -HRESULT DAPI FileToString( - __in_z LPCWSTR wzFile, - __out LPWSTR *psczString, - __out_opt FILE_ENCODING *pfeEncoding - ); -HRESULT DAPI FileFromString( - __in_z LPCWSTR wzFile, - __in DWORD dwFlagsAndAttributes, - __in_z LPCWSTR sczString, - __in FILE_ENCODING feEncoding - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/gdiputil.h b/scripts/windows/installer/WiXSDK/inc/gdiputil.h deleted file mode 100644 index 0241f7b2dd..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/gdiputil.h +++ /dev/null @@ -1,41 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// GDI+ helper functions. -// -//------------------------------------------------------------------------------------------------- - -#pragma once - -#define ExitOnGdipFailure(g, x, s) { x = GdipHresultFromStatus(g); if (FAILED(x)) { Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace(x, s); goto LExit; } } -#define ExitOnGdipFailure1(g, x, f, s) { x = GdipHresultFromStatus(g); if (FAILED(x)) { Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace1(x, f, s); goto LExit; } } -#define ExitOnGdipFailure2(g, x, f, s, t) { x = GdipHresultFromStatus(g); if (FAILED(x)) { Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace2(x, f, s, t); goto LExit; } } - -#ifdef __cplusplus -extern "C" { -#endif - -HRESULT DAPI GdipBitmapFromResource( - __in_opt HINSTANCE hinst, - __in_z LPCSTR szId, - __out Gdiplus::Bitmap **ppBitmap - ); - -HRESULT DAPI GdipBitmapFromFile( - __in_z LPCWSTR wzFileName, - __out Gdiplus::Bitmap **ppBitmap - ); - -HRESULT DAPI GdipHresultFromStatus( - __in Gdiplus::Status gs - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/iis7util.h b/scripts/windows/installer/WiXSDK/inc/iis7util.h deleted file mode 100644 index 953c6b17ee..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/iis7util.h +++ /dev/null @@ -1,233 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// IIS7 helper functions. -// -//------------------------------------------------------------------------------------------------- - - -#ifdef __cplusplus -extern "C" { -#endif - -// IIS Config schema names -#define IIS_CONFIG_ADD L"add" -#define IIS_CONFIG_ALLOWED L"allowed" -#define IIS_CONFIG_APPHOST_ROOT L"MACHINE/WEBROOT/APPHOST" -#define IIS_CONFIG_APPLICATION L"application" -#define IIS_CONFIG_APPPOOL L"applicationPool" -#define IIS_CONFIG_APPPOOL_AUTO L"autoStart" -#define IIS_CONFIG_APPPOOL_SECTION L"system.applicationHost/applicationPools" -#define IIS_CONFIG_AUTOSTART L"serverAutoStart" -#define IIS_CONFIG_BINDING L"binding" -#define IIS_CONFIG_BINDINGINFO L"bindingInformation" -#define IIS_CONFIG_BINDINGS L"bindings" -#define IIS_CONFIG_DESC L"description" -#define IIS_CONFIG_EXECUTABLE L"scriptProcessor" -#define IIS_CONFIG_ENABLED L"enabled" -#define IIS_CONFIG_ENABLE32 L"enable32BitAppOnWin64" -#define IIS_CONFIG_FILEEXT L"fileExtension" -#define IIS_CONFIG_FILTER L"filter" -#define IIS_CONFIG_GROUPID L"groupId" -#define IIS_CONFIG_HEADERS L"customHeaders" -#define IIS_CONFIG_HTTPERRORS_SECTION L"system.webServer/httpErrors" -#define IIS_CONFIG_ID L"id" -#define IIS_CONFIG_ISAPI_SECTION L"system.webServer/isapiFilters" -#define IIS_CONFIG_HTTPPROTO_SECTION L"system.webServer/httpProtocol" -#define IIS_CONFIG_LOG_SECTION L"system.applicationHost/log" -#define IIS_CONFIG_LOG_UTF8 L"logInUTF8" -#define IIS_CONFIG_LIMITS L"limits" -#define IIS_CONFIG_PIPELINEMODE L"managedPipelineMode" -#define IIS_CONFIG_MANAGEDRUNTIMEVERSION L"managedRuntimeVersion" -#define IIS_CONFIG_WEBLOG L"logFile" -#define IIS_CONFIG_LOGFORMAT L"logFormat" -#define IIS_CONFIG_MIMEMAP L"mimeMap" -#define IIS_CONFIG_MIMETYPE L"mimeType" -#define IIS_CONFIG_MODULES L"modules" -#define IIS_CONFIG_NAME L"name" -#define IIS_CONFIG_PATH L"path" -#define IIS_CONFIG_PHYSPATH L"physicalPath" -#define IIS_CONFIG_PROTOCOL L"protocol" -#define IIS_CONFIG_RESTRICTION_SECTION L"system.webServer/security/isapiCgiRestriction" -#define IIS_CONFIG_SITE L"site" -#define IIS_CONFIG_SITE_ID L"id" -#define IIS_CONFIG_SITES_SECTION L"system.applicationHost/sites" -#define IIS_CONFIG_CONNECTTIMEOUT L"connectionTimeout" -#define IIS_CONFIG_VDIR L"virtualDirectory" -#define IIS_CONFIG_VALUE L"value" -#define IIS_CONFIG_VERBS L"verb" -#define IIS_CONFIG_WEBLIMITS_SECTION L"system.applicationHost/webLimits" -#define IIS_CONFIG_WEBLIMITS_MAXBAND L"maxGlobalBandwidth" -#define IIS_CONFIG_TRUE L"true" -#define IIS_CONFIG_FALSE L"false" -#define IIS_CONFIG_ERROR L"error" -#define IIS_CONFIG_STATUSCODE L"statusCode" -#define IIS_CONFIG_SUBSTATUS L"subStatusCode" -#define IIS_CONFIG_LANGPATH L"prefixLanguageFilePath" -#define IIS_CONFIG_RESPMODE L"responseMode" -#define IIS_CONFIG_CLEAR L"clear" -#define IIS_CONFIG_RECYCLING L"recycling" -#define IIS_CONFIG_PEROIDRESTART L"periodicRestart" -#define IIS_CONFIG_TIME L"time" -#define IIS_CONFIG_REQUESTS L"requests" -#define IIS_CONFIG_SCHEDULE L"schedule" -#define IIS_CONFIG_MEMORY L"memory" -#define IIS_CONFIG_PRIVMEMORY L"privateMemory" -#define IIS_CONFIG_PROCESSMODEL L"processModel" -#define IIS_CONFIG_IDLETIMEOUT L"idleTimeout" -#define IIS_CONFIG_QUEUELENGTH L"queueLength" -#define IIS_CONFIG_IDENITITYTYPE L"identityType" -#define IIS_CONFIG_LOCALSYSTEM L"LocalSystem" -#define IIS_CONFIG_LOCALSERVICE L"LocalService" -#define IIS_CONFIG_NETWORKSERVICE L"NetworkService" -#define IIS_CONFIG_SPECIFICUSER L"SpecificUser" -#define IIS_CONFIG_APPLICATIONPOOLIDENTITY L"ApplicationPoolIdentity" -#define IIS_CONFIG_USERNAME L"userName" -#define IIS_CONFIG_PASSWORD L"password" -#define IIS_CONFIG_CPU L"cpu" -#define IIS_CONFIG_LIMIT L"limit" -#define IIS_CONFIG_CPU_ACTION L"action" -#define IIS_CONFIG_KILLW3WP L"KillW3wp" -#define IIS_CONFIG_NOACTION L"NoAction" -#define IIS_CONFIG_RESETINTERVAL L"resetInterval" -#define IIS_CONFIG_MAXWRKPROCESSES L"maxProcesses" -#define IIS_CONFIG_HANDLERS_SECTION L"system.webServer/handlers" -#define IIS_CONFIG_DEFAULTDOC_SECTION L"system.webServer/defaultDocument" -#define IIS_CONFIG_ASP_SECTION L"system.webServer/asp" -#define IIS_CONFIG_SCRIPTERROR L"scriptErrorSentToBrowser" -#define IIS_CONFIG_STATICCONTENT_SECTION L"system.webServer/staticContent" -#define IIS_CONFIG_HTTPEXPIRES L"httpExpires" -#define IIS_CONFIG_MAXAGE L"cacheControlMaxAge" -#define IIS_CONFIG_CLIENTCACHE L"clientCache" -#define IIS_CONFIG_CACHECONTROLMODE L"cacheControlMode" -#define IIS_CONFIG_USEMAXAGE L"UseMaxAge" -#define IIS_CONFIG_USEEXPIRES L"UseExpires" -#define IIS_CONFIG_CACHECUST L"cacheControlCustom" -#define IIS_CONFIG_ASP_SECTION L"system.webServer/asp" -#define IIS_CONFIG_SESSION L"session" -#define IIS_CONFIG_ALLOWSTATE L"allowSessionState" -#define IIS_CONFIG_TIMEOUT L"timeout" -#define IIS_CONFIG_BUFFERING L"bufferingOn" -#define IIS_CONFIG_PARENTPATHS L"enableParentPaths" -#define IIS_CONFIG_SCRIPTLANG L"scriptLanguage" -#define IIS_CONFIG_SCRIPTTIMEOUT L"scriptTimeout" -#define IIS_CONFIG_LIMITS L"limits" -#define IIS_CONFIG_ALLOWDEBUG L"appAllowDebugging" -#define IIS_CONFIG_ALLOWCLIENTDEBUG L"appAllowClientDebug" -#define IIS_CONFIG_CERTIFICATEHASH L"certificateHash" -#define IIS_CONFIG_CERTIFICATESTORENAME L"certificateStoreName" -#define IIS_CONFIG_HTTPLOGGING_SECTION L"system.webServer/httpLogging" -#define IIS_CONFIG_DONTLOG L"dontLog" - -typedef BOOL (CALLBACK* ENUMAPHOSTELEMENTPROC)(IAppHostElement*, LPVOID); -typedef BOOL (CALLBACK* VARIANTCOMPARATORPROC)(VARIANT*, VARIANT*); - -HRESULT DAPI Iis7PutPropertyVariant( - __in IAppHostElement *pElement, - __in LPCWSTR wzPropName, - __in VARIANT vtPut - ); - -HRESULT DAPI Iis7PutPropertyInteger( - __in IAppHostElement *pElement, - __in LPCWSTR wzPropName, - __in DWORD dValue - ); - -HRESULT DAPI Iis7PutPropertyString( - __in IAppHostElement *pElement, - __in LPCWSTR wzPropName, - __in LPCWSTR wzString - ); - -HRESULT DAPI Iis7PutPropertyBool( - __in IAppHostElement *pElement, - __in LPCWSTR wzPropName, - __in BOOL fValue); - -HRESULT DAPI Iis7GetPropertyVariant( - __in IAppHostElement *pElement, - __in LPCWSTR wzPropName, - __in VARIANT* vtGet - ); - -HRESULT DAPI Iis7GetPropertyString( - __in IAppHostElement *pElement, - __in LPCWSTR wzPropName, - __in LPWSTR* psczGet - ); - -struct IIS7_APPHOSTELEMENTCOMPARISON -{ - LPCWSTR sczElementName; - LPCWSTR sczAttributeName; - VARIANT* pvAttributeValue; - VARIANTCOMPARATORPROC pComparator; -}; - -BOOL DAPI Iis7IsMatchingAppHostElement( - __in IAppHostElement *pElement, - __in IIS7_APPHOSTELEMENTCOMPARISON* pComparison - ); - -HRESULT DAPI Iis7FindAppHostElementString( - __in IAppHostElementCollection *pCollection, - __in LPCWSTR wzElementName, - __in LPCWSTR wzAttributeName, - __in LPCWSTR wzAttributeValue, - __out IAppHostElement** ppElement, - __out DWORD* pdwIndex - ); - -HRESULT DAPI Iis7FindAppHostElementPath( - __in IAppHostElementCollection *pCollection, - __in LPCWSTR wzElementName, - __in LPCWSTR wzAttributeName, - __in LPCWSTR wzAttributeValue, - __out IAppHostElement** ppElement, - __out DWORD* pdwIndex - ); - -HRESULT DAPI Iis7FindAppHostElementInteger( - __in IAppHostElementCollection *pCollection, - __in LPCWSTR wzElementName, - __in LPCWSTR wzAttributeName, - __in DWORD dwAttributeValue, - __out IAppHostElement** ppElement, - __out DWORD* pdwIndex - ); - -HRESULT DAPI Iis7FindAppHostElementVariant( - __in IAppHostElementCollection *pCollection, - __in LPCWSTR wzElementName, - __in LPCWSTR wzAttributeName, - __in VARIANT* pvAttributeValue, - __out IAppHostElement** ppElement, - __out DWORD* pdwIndex - ); - -HRESULT DAPI Iis7EnumAppHostElements( - __in IAppHostElementCollection *pCollection, - __in ENUMAPHOSTELEMENTPROC pCallback, - __in LPVOID pContext, - __out IAppHostElement** ppElement, - __out DWORD* pdwIndex - ); - -HRESULT DAPI Iis7FindAppHostMethod( - __in IAppHostMethodCollection *pCollection, - __in LPCWSTR wzMethodName, - __out IAppHostMethod** ppMethod, - __out DWORD* pdwIndex - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/inetutil.h b/scripts/windows/installer/WiXSDK/inc/inetutil.h deleted file mode 100644 index 1b875735a3..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/inetutil.h +++ /dev/null @@ -1,49 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Internet utilites. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -#define ReleaseInternet(h) if (h) { ::InternetCloseHandle(h); h = NULL; } -#define ReleaseNullInternet(h) if (h) { ::InternetCloseHandle(h); h = NULL; } - - -// functions -HRESULT DAPI InternetGetSizeByHandle( - __in HINTERNET hiFile, - __out LONGLONG* pllSize - ); - -HRESULT DAPI InternetGetCreateTimeByHandle( - __in HINTERNET hiFile, - __out LPFILETIME pft - ); - -HRESULT DAPI InternetQueryInfoString( - __in HINTERNET h, - __in DWORD dwInfo, - __deref_out_z LPWSTR* psczValue - ); - -HRESULT DAPI InternetQueryInfoNumber( - __in HINTERNET h, - __in DWORD dwInfo, - __out LONG* plInfo - ); - -#ifdef __cplusplus -} -#endif - diff --git a/scripts/windows/installer/WiXSDK/inc/iniutil.h b/scripts/windows/installer/WiXSDK/inc/iniutil.h deleted file mode 100644 index b2dfee9681..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/iniutil.h +++ /dev/null @@ -1,83 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Ini/cfg file helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -#define ReleaseIni(ih) if (ih) { IniUninitialize(ih); } -#define ReleaseNullIni(ih) if (ih) { IniUninitialize(ih); ih = NULL; } - -typedef void* INI_HANDLE; -typedef const void* C_INI_HANDLE; - -extern const int INI_HANDLE_BYTES; - -struct INI_VALUE -{ - LPCWSTR wzName; - LPCWSTR wzValue; - - DWORD dwLineNumber; -}; - -HRESULT DAPI IniInitialize( - __out_bcount(INI_HANDLE_BYTES) INI_HANDLE* piHandle - ); -void DAPI IniUninitialize( - __in_bcount(INI_HANDLE_BYTES) INI_HANDLE piHandle - ); -HRESULT DAPI IniSetOpenTag( - __inout_bcount(INI_HANDLE_BYTES) INI_HANDLE piHandle, - __in_z_opt LPCWSTR wzOpenTagPrefix, - __in_z_opt LPCWSTR wzOpenTagPostfix - ); -HRESULT DAPI IniSetValueStyle( - __inout_bcount(INI_HANDLE_BYTES) INI_HANDLE piHandle, - __in_z_opt LPCWSTR wzValuePrefix, - __in_z_opt LPCWSTR wzValueSeparator - ); -HRESULT DAPI IniSetCommentStyle( - __inout_bcount(INI_HANDLE_BYTES) INI_HANDLE piHandle, - __in_z_opt LPCWSTR wzLinePrefix - ); -HRESULT DAPI IniParse( - __inout_bcount(INI_HANDLE_BYTES) INI_HANDLE piHandle, - __in LPCWSTR wzPath, - __out_opt FILE_ENCODING *pfeEncodingFound - ); -HRESULT DAPI IniGetValueList( - __in_bcount(INI_HANDLE_BYTES) INI_HANDLE piHandle, - __deref_out_ecount_opt(pcValues) INI_VALUE** prgivValues, - __out DWORD *pcValues - ); -HRESULT DAPI IniGetValue( - __in_bcount(INI_HANDLE_BYTES) INI_HANDLE piHandle, - __in LPCWSTR wzValueName, - __deref_out_z LPWSTR* psczValue - ); -HRESULT DAPI IniSetValue( - __in_bcount(INI_HANDLE_BYTES) INI_HANDLE piHandle, - __in LPCWSTR wzValueName, - __in_z_opt LPCWSTR wzValue - ); -HRESULT DAPI IniWriteFile( - __in_bcount(INI_HANDLE_BYTES) INI_HANDLE piHandle, - __in_z_opt LPCWSTR wzPath, - __in FILE_ENCODING feOverrideEncoding - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/jsonutil.h b/scripts/windows/installer/WiXSDK/inc/jsonutil.h deleted file mode 100644 index 56c1481753..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/jsonutil.h +++ /dev/null @@ -1,122 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// JavaScript Object Notation (JSON) helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -enum JSON_TOKEN -{ - JSON_TOKEN_NONE, - JSON_TOKEN_ARRAY_START, - JSON_TOKEN_ARRAY_VALUE, - JSON_TOKEN_ARRAY_END, - JSON_TOKEN_OBJECT_START, - JSON_TOKEN_OBJECT_KEY, - JSON_TOKEN_OBJECT_VALUE, - JSON_TOKEN_OBJECT_END, - JSON_TOKEN_VALUE, -}; - -typedef struct _JSON_VALUE -{ -} JSON_VALUE; - -typedef struct _JSON_READER -{ - CRITICAL_SECTION cs; - LPWSTR sczJson; - - LPWSTR pwz; - JSON_TOKEN token; -} JSON_READER; - -typedef struct _JSON_WRITER -{ - CRITICAL_SECTION cs; - LPWSTR sczJson; - - JSON_TOKEN* rgTokenStack; - DWORD cTokens; - DWORD cMaxTokens; -} JSON_WRITER; - - -DAPI_(HRESULT) JsonInitializeReader( - __in_z LPCWSTR wzJson, - __in JSON_READER* pReader - ); - -DAPI_(void) JsonUninitializeReader( - __in JSON_READER* pReader - ); - -DAPI_(HRESULT) JsonReadNext( - __in JSON_READER* pReader, - __out JSON_TOKEN* pToken, - __out JSON_VALUE* pValue - ); - -DAPI_(HRESULT) JsonReadValue( - __in JSON_READER* pReader, - __in JSON_VALUE* pValue - ); - -DAPI_(HRESULT) JsonInitializeWriter( - __in JSON_WRITER* pWriter - ); - -DAPI_(void) JsonUninitializeWriter( - __in JSON_WRITER* pWriter - ); - -DAPI_(HRESULT) JsonWriteBool( - __in JSON_WRITER* pWriter, - __in BOOL fValue - ); - -DAPI_(HRESULT) JsonWriteNumber( - __in JSON_WRITER* pWriter, - __in DWORD dwValue - ); - -DAPI_(HRESULT) JsonWriteString( - __in JSON_WRITER* pWriter, - __in_z LPCWSTR wzValue - ); - -DAPI_(HRESULT) JsonWriteArrayStart( - __in JSON_WRITER* pWriter - ); - -DAPI_(HRESULT) JsonWriteArrayEnd( - __in JSON_WRITER* pWriter - ); - -DAPI_(HRESULT) JsonWriteObjectStart( - __in JSON_WRITER* pWriter - ); - -DAPI_(HRESULT) JsonWriteObjectKey( - __in JSON_WRITER* pWriter, - __in_z LPCWSTR wzKey - ); - -DAPI_(HRESULT) JsonWriteObjectEnd( - __in JSON_WRITER* pWriter - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/locutil.h b/scripts/windows/installer/WiXSDK/inc/locutil.h deleted file mode 100644 index 5046d5703b..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/locutil.h +++ /dev/null @@ -1,120 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for localization helper functions. -// -//------------------------------------------------------------------------------------------------- -#pragma once - -#ifdef __cplusplus -extern "C" { -#endif - -struct LOC_STRING -{ - LPWSTR wzId; - LPWSTR wzText; - BOOL bOverridable; -}; - -const int LOC_CONTROL_NOT_SET = INT_MAX; - -struct LOC_CONTROL -{ - LPWSTR wzControl; - int nX; - int nY; - int nWidth; - int nHeight; - LPWSTR wzText; -}; - -const int WIX_LOCALIZATION_LANGUAGE_NOT_SET = INT_MAX; - -struct WIX_LOCALIZATION -{ - DWORD dwLangId; - - DWORD cLocStrings; - LOC_STRING* rgLocStrings; - - DWORD cLocControls; - LOC_CONTROL* rgLocControls; -}; - -/******************************************************************** - LocProbeForFile - Searches for a localization file on disk. - -*******************************************************************/ -HRESULT DAPI LocProbeForFile( - __in_z LPCWSTR wzBasePath, - __in_z LPCWSTR wzLocFileName, - __in_z_opt LPCWSTR wzLanguage, - __inout LPWSTR* psczPath - ); - -/******************************************************************** - LocLoadFromFile - Loads a localization file - -*******************************************************************/ -HRESULT DAPI LocLoadFromFile( - __in_z LPCWSTR wzWxlFile, - __out WIX_LOCALIZATION** ppWixLoc - ); - -/******************************************************************** - LocLoadFromResource - loads a localization file from a module's data - resource. - - NOTE: The resource data must be UTF-8 encoded. -*******************************************************************/ -HRESULT DAPI LocLoadFromResource( - __in HMODULE hModule, - __in_z LPCSTR szResource, - __out WIX_LOCALIZATION** ppWixLoc - ); - -/******************************************************************** - LocFree - free memory allocated when loading a localization file - -*******************************************************************/ -void DAPI LocFree( - __in_opt WIX_LOCALIZATION* pWixLoc - ); - -/******************************************************************** - LocLocalizeString - replace any #(loc.id) in a string with the - correct sub string -*******************************************************************/ -HRESULT DAPI LocLocalizeString( - __in const WIX_LOCALIZATION* pWixLoc, - __inout LPWSTR* psczInput - ); - -/******************************************************************** - LocGetControl - returns a control's localization information -*******************************************************************/ -HRESULT DAPI LocGetControl( - __in const WIX_LOCALIZATION* pWixLoc, - __in_z LPCWSTR wzId, - __out LOC_CONTROL** ppLocControl - ); - -/******************************************************************** - LocGetString - returns a string's localization information -*******************************************************************/ -extern "C" HRESULT DAPI LocGetString( - __in const WIX_LOCALIZATION* pWixLoc, - __in_z LPCWSTR wzId, - __out LOC_STRING** ppLocString - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/logutil.h b/scripts/windows/installer/WiXSDK/inc/logutil.h deleted file mode 100644 index 73ce81f7e1..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/logutil.h +++ /dev/null @@ -1,210 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Logging helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -#define LogExitOnFailure(x, i, f) if (FAILED(x)) { LogErrorId(x, i, NULL, NULL, NULL); ExitTrace(x, f); goto LExit; } -#define LogExitOnFailure1(x, i, f, s) if (FAILED(x)) { LogErrorId(x, i, s, NULL, NULL); ExitTrace1(x, f, s); goto LExit; } -#define LogExitOnFailure2(x, i, f, s, t) if (FAILED(x)) { LogErrorId(x, i, s, t, NULL); ExitTrace2(x, f, s, t); goto LExit; } -#define LogExitOnFailure3(x, i, f, s, t, u) if (FAILED(x)) { LogErrorId(x, i, s, t, u); ExitTrace3(x, f, s, t, u); goto LExit; } - -#define LogExitOnRootFailure(x, i, f) if (FAILED(x)) { LogErrorId(x, i, NULL, NULL, NULL); Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace(x, f); goto LExit; } -#define LogExitOnRootFailure1(x, i, f, s) if (FAILED(x)) { LogErrorId(x, i, s, NULL, NULL); Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace1(x, f, s); goto LExit; } -#define LogExitOnRootFailure2(x, i, f, s, t) if (FAILED(x)) { LogErrorId(x, i, s, t, NULL); Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace2(x, f, s, t); goto LExit; } -#define LogExitOnRootFailure3(x, i, f, s, t, u) if (FAILED(x)) { LogErrorId(x, i, s, t, u); Dutil_RootFailure(__FILE__, __LINE__, x); ExitTrace3(x, f, s, t, u); goto LExit; } - -typedef HRESULT (DAPI *PFN_LOGSTRINGWORKRAW)( - __in_z LPCSTR szString, - __in_opt LPVOID pvContext - ); - -// enums - -// structs - -// functions -BOOL DAPI IsLogInitialized(); - -BOOL DAPI IsLogOpen(); - -void DAPI LogInitialize( - __in HMODULE hModule - ); - -HRESULT DAPI LogOpen( - __in_z_opt LPCWSTR wzDirectory, - __in_z LPCWSTR wzLog, - __in_z_opt LPCWSTR wzPostfix, - __in_z_opt LPCWSTR wzExt, - __in BOOL fAppend, - __in BOOL fHeader, - __out_z_opt LPWSTR* psczLogPath - ); - -void DAPI LogDisable(); - -void DAPI LogRedirect( - __in_opt PFN_LOGSTRINGWORKRAW vpfLogStringWorkRaw, - __in_opt LPVOID pvContext - ); - -HRESULT DAPI LogRename( - __in_z LPCWSTR wzNewPath - ); - -void DAPI LogClose( - __in BOOL fFooter - ); - -void DAPI LogUninitialize( - __in BOOL fFooter - ); - -BOOL DAPI LogIsOpen(); - -HRESULT DAPI LogSetSpecialParams( - __in_z_opt LPCWSTR wzSpecialBeginLine, - __in_z_opt LPCWSTR wzSpecialAfterTimeStamp, - __in_z_opt LPCWSTR wzSpecialEndLine - ); - -REPORT_LEVEL DAPI LogSetLevel( - __in REPORT_LEVEL rl, - __in BOOL fLogChange - ); - -REPORT_LEVEL DAPI LogGetLevel(); - -HRESULT DAPI LogGetPath( - __out_ecount_z(cchLogPath) LPWSTR pwzLogPath, - __in DWORD cchLogPath - ); - -HANDLE DAPI LogGetHandle(); - -HRESULT DAPIV LogString( - __in REPORT_LEVEL rl, - __in_z __format_string LPCSTR szFormat, - ... - ); - -HRESULT DAPI LogStringArgs( - __in REPORT_LEVEL rl, - __in_z __format_string LPCSTR szFormat, - __in va_list args - ); - -HRESULT DAPIV LogStringLine( - __in REPORT_LEVEL rl, - __in_z __format_string LPCSTR szFormat, - ... - ); - -HRESULT DAPI LogStringLineArgs( - __in REPORT_LEVEL rl, - __in_z __format_string LPCSTR szFormat, - __in va_list args - ); - -HRESULT DAPI LogIdModuleArgs( - __in REPORT_LEVEL rl, - __in DWORD dwLogId, - __in_opt HMODULE hModule, - __in va_list args - ); - -/* - * Wraps LogIdModuleArgs, so inline to save the function call - */ - -inline HRESULT LogId( - __in REPORT_LEVEL rl, - __in DWORD dwLogId, - ... - ) -{ - HRESULT hr = S_OK; - va_list args; - - va_start(args, dwLogId); - hr = LogIdModuleArgs(rl, dwLogId, NULL, args); - va_end(args); - - return hr; -} - - -/* - * Wraps LogIdModuleArgs, so inline to save the function call - */ - -inline HRESULT LogIdArgs( - __in REPORT_LEVEL rl, - __in DWORD dwLogId, - __in va_list args - ) -{ - return LogIdModuleArgs(rl, dwLogId, NULL, args); -} - -HRESULT DAPIV LogErrorString( - __in HRESULT hrError, - __in_z __format_string LPCSTR szFormat, - ... - ); - -HRESULT DAPI LogErrorStringArgs( - __in HRESULT hrError, - __in_z __format_string LPCSTR szFormat, - __in va_list args - ); - -HRESULT DAPI LogErrorIdModule( - __in HRESULT hrError, - __in DWORD dwLogId, - __in_opt HMODULE hModule, - __in_z_opt LPCWSTR wzString1, - __in_z_opt LPCWSTR wzString2, - __in_z_opt LPCWSTR wzString3 - ); - -inline HRESULT LogErrorId( - __in HRESULT hrError, - __in DWORD dwLogId, - __in_z_opt LPCWSTR wzString1, - __in_z_opt LPCWSTR wzString2, - __in_z_opt LPCWSTR wzString3 - ) -{ - return LogErrorIdModule(hrError, dwLogId, NULL, wzString1, wzString2, wzString3); -} - -HRESULT DAPI LogHeader(); - -HRESULT DAPI LogFooter(); - -HRESULT LogStringWorkRaw( - __in_z LPCSTR szLogData - ); - -// begin the switch of LogXXX to LogStringXXX -#define Log LogString -#define LogLine LogStringLine - -#ifdef __cplusplus -} -#endif - diff --git a/scripts/windows/installer/WiXSDK/inc/memutil.h b/scripts/windows/installer/WiXSDK/inc/memutil.h deleted file mode 100644 index d3dea7f8c9..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/memutil.h +++ /dev/null @@ -1,59 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for memory helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -#define ReleaseMem(p) if (p) { MemFree(p); } -#define ReleaseNullMem(p) if (p) { MemFree(p); p = NULL; } - -HRESULT DAPI MemInitialize(); -void DAPI MemUninitialize(); - -LPVOID DAPI MemAlloc( - __in SIZE_T cbSize, - __in BOOL fZero - ); -LPVOID DAPI MemReAlloc( - __in LPVOID pv, - __in SIZE_T cbSize, - __in BOOL fZero - ); -HRESULT DAPI MemEnsureArraySize( - __deref_out_bcount(cArray * cbArrayType) LPVOID* ppvArray, - __in DWORD cArray, - __in SIZE_T cbArrayType, - __in DWORD dwGrowthCount - ); -HRESULT DAPI MemInsertIntoArray( - __deref_out_bcount((cExistingArray + cNumInsertItems) * cbArrayType) LPVOID* ppvArray, - __in DWORD dwInsertIndex, - __in DWORD cNumInsertItems, - __in DWORD cExistingArray, - __in SIZE_T cbArrayType, - __in DWORD dwGrowthCount - ); - -HRESULT DAPI MemFree( - __in LPVOID pv - ); -SIZE_T DAPI MemSize( - __in LPCVOID pv - ); - -#ifdef __cplusplus -} -#endif - diff --git a/scripts/windows/installer/WiXSDK/inc/metautil.h b/scripts/windows/installer/WiXSDK/inc/metautil.h deleted file mode 100644 index b4df17d33c..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/metautil.h +++ /dev/null @@ -1,62 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// IIS Metabase helper functions. -// -//------------------------------------------------------------------------------------------------- - -#include -#include -#include -#include - -#ifdef __cplusplus -extern "C" { -#endif - -// structs - -// prototypes -HRESULT DAPI MetaFindWebBase( - __in IMSAdminBaseW* piMetabase, - __in_z LPCWSTR wzIP, - __in int iPort, - __in_z LPCWSTR wzHeader, - __in BOOL fSecure, - __out_ecount(cchWebBase) LPWSTR wzWebBase, - __in DWORD cchWebBase - ); -HRESULT DAPI MetaFindFreeWebBase( - __in IMSAdminBaseW* piMetabase, - __out_ecount(cchWebBase) LPWSTR wzWebBase, - __in DWORD cchWebBase - ); - -HRESULT DAPI MetaOpenKey( - __in IMSAdminBaseW* piMetabase, - __in METADATA_HANDLE mhKey, - __in_z LPCWSTR wzKey, - __in DWORD dwAccess, - __in DWORD cRetries, - __out METADATA_HANDLE* pmh - ); -HRESULT DAPI MetaGetValue( - __in IMSAdminBaseW* piMetabase, - __in METADATA_HANDLE mhKey, - __in_z LPCWSTR wzKey, - __inout METADATA_RECORD* pmr - ); -void DAPI MetaFreeValue( - __in METADATA_RECORD* pmr - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/osutil.h b/scripts/windows/installer/WiXSDK/inc/osutil.h deleted file mode 100644 index b5de193814..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/osutil.h +++ /dev/null @@ -1,49 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Operating system helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -enum OS_VERSION -{ - OS_VERSION_UNKNOWN, - OS_VERSION_WINNT, - OS_VERSION_WIN2000, - OS_VERSION_WINXP, - OS_VERSION_WIN2003, - OS_VERSION_VISTA, - OS_VERSION_WIN2008, - OS_VERSION_WIN7, - OS_VERSION_WIN2008_R2, - OS_VERSION_FUTURE -}; - -void DAPI OsGetVersion( - __out OS_VERSION* pVersion, - __out DWORD* pdwServicePack - ); -HRESULT DAPI OsCouldRunPrivileged( - __out BOOL* pfPrivileged - ); -HRESULT DAPI OsIsRunningPrivileged( - __out BOOL* pfPrivileged - ); -HRESULT DAPI OsIsUacEnabled( - __out BOOL* pfUacEnabled - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/pathutil.h b/scripts/windows/installer/WiXSDK/inc/pathutil.h deleted file mode 100644 index 0956cd2596..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/pathutil.h +++ /dev/null @@ -1,213 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for path helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -enum PATH_EXPAND -{ - PATH_EXPAND_ENVIRONMENT = 0x0001, - PATH_EXPAND_FULLPATH = 0x0002, -}; - - -/******************************************************************* - PathCommandLineAppend - appends a command line argument on to a - string such that ::CommandLineToArgv() will shred them correctly - (i.e. quote arguments with spaces in them). -********************************************************************/ -DAPI_(HRESULT) PathCommandLineAppend( - __deref_out_z LPWSTR* psczCommandLine, - __in_z LPCWSTR wzArgument - ); - -/******************************************************************* - PathFile - returns a pointer to the file part of the path. -********************************************************************/ -DAPI_(LPWSTR) PathFile( - __in_z LPCWSTR wzPath - ); - -/******************************************************************* - PathExtension - returns a pointer to the extension part of the path - (including the dot). -********************************************************************/ -DAPI_(LPCWSTR) PathExtension( - __in_z LPCWSTR wzPath - ); - -/******************************************************************* - PathGetDirectory - extracts the directory from a path. -********************************************************************/ -DAPI_(HRESULT) PathGetDirectory( - __in_z LPCWSTR wzPath, - __out LPWSTR *psczDirectory - ); - -/******************************************************************* - PathExpand - gets the full path to a file resolving environment - variables along the way. -********************************************************************/ -DAPI_(HRESULT) PathExpand( - __out LPWSTR *psczFullPath, - __in_z LPCWSTR wzRelativePath, - __in DWORD dwResolveFlags - ); - -/******************************************************************* - PathPrefix - prefixes a full path with \\?\ or \\?\UNC as - appropriate. -********************************************************************/ -DAPI_(HRESULT) PathPrefix( - __inout LPWSTR *psczFullPath - ); - -/******************************************************************* - PathFixedBackslashTerminate - appends a \ if path does not have it - already, but fails if the buffer is - insufficient. -********************************************************************/ -DAPI_(HRESULT) PathFixedBackslashTerminate( - __inout_ecount_z(cchPath) LPWSTR wzPath, - __in DWORD_PTR cchPath - ); - -/******************************************************************* - PathBackslashTerminate - appends a \ if path does not have it - already. -********************************************************************/ -DAPI_(HRESULT) PathBackslashTerminate( - __inout LPWSTR* psczPath - ); - -/******************************************************************* - PathForCurrentProcess - gets the full path to the currently executing - process or (optionally) a module inside the process. -********************************************************************/ -DAPI_(HRESULT) PathForCurrentProcess( - __inout LPWSTR *psczFullPath, - __in_opt HMODULE hModule - ); - -/******************************************************************* - PathRelativeToModule - gets the name of a file in the same - directory as the current process or (optionally) a module inside - the process -********************************************************************/ -DAPI_(HRESULT) PathRelativeToModule( - __inout LPWSTR *psczFullPath, - __in_opt LPCWSTR wzFileName, - __in_opt HMODULE hModule - ); - -/******************************************************************* - PathCreateTempFile - - Note: if wzDirectory is null, ::GetTempPath() will be used instead. - if wzFileNameTemplate is null, GetTempFileName() will be used instead. -*******************************************************************/ -DAPI_(HRESULT) PathCreateTempFile( - __in_opt LPCWSTR wzDirectory, - __in_opt __format_string LPCWSTR wzFileNameTemplate, - __in DWORD dwUniqueCount, - __in DWORD dwFileAttributes, - __out_opt LPWSTR* psczTempFile, - __out_opt HANDLE* phTempFile - ); - -/******************************************************************* - PathCreateTimeBasedTempFile - creates an empty temp file based on current - system time -********************************************************************/ -DAPI_(HRESULT) PathCreateTimeBasedTempFile( - __in_z_opt LPCWSTR wzDirectory, - __in_z LPCWSTR wzPrefix, - __in_z_opt LPCWSTR wzPostfix, - __in_z LPCWSTR wzExtension, - __deref_opt_out_z LPWSTR* psczTempFile, - __out_opt HANDLE* phTempFile - ); - -/******************************************************************* - PathCreateTempDirectory - - Note: if wzDirectory is null, ::GetTempPath() will be used instead. -*******************************************************************/ -DAPI_(HRESULT) PathCreateTempDirectory( - __in_opt LPCWSTR wzDirectory, - __in __format_string LPCWSTR wzDirectoryNameTemplate, - __in DWORD dwUniqueCount, - __out LPWSTR* psczTempDirectory - ); - -/******************************************************************* - PathGetKnownFolder - returns the path to a well-known shell folder - -*******************************************************************/ -DAPI_(HRESULT) PathGetKnownFolder( - __in int csidl, - __out LPWSTR* psczKnownFolder - ); - -/******************************************************************* - PathIsAbsolute - returns true if the path is absolute; false - otherwise. -*******************************************************************/ -DAPI_(BOOL) PathIsAbsolute( - __in_z LPCWSTR wzPath - ); - -/******************************************************************* - PathConcat - like .NET's Path.Combine, lets you build up a path - one piece -- file or directory -- at a time. -*******************************************************************/ -DAPI_(HRESULT) PathConcat( - __in_opt LPCWSTR wzPath1, - __in_opt LPCWSTR wzPath2, - __deref_out_z LPWSTR* psczCombined - ); - -/******************************************************************* - PathEnsureQuoted - ensures that a path is quoted; optionally, - this function also terminates a directory with a backslash - if it is not already. -*******************************************************************/ -DAPI_(HRESULT) PathEnsureQuoted( - __inout LPWSTR* ppszPath, - __in BOOL fDirectory - ); - -/******************************************************************* - PathCompare - compares the fully expanded path of the two paths using - ::CompareStringW(). -*******************************************************************/ -DAPI_(HRESULT) PathCompare( - __in_z LPCWSTR wzPath1, - __in_z LPCWSTR wzPath2, - __out int* pnResult - ); - -/******************************************************************* - PathCompress - sets the compression state on an existing file or - directory. A no-op on file systems that don't - support compression. -*******************************************************************/ -DAPI_(HRESULT) PathCompress( - __in_z LPCWSTR wzPath - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/perfutil.h b/scripts/windows/installer/WiXSDK/inc/perfutil.h deleted file mode 100644 index 0245db351a..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/perfutil.h +++ /dev/null @@ -1,34 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Performance helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -// structs - - -// functions -void DAPI PerfInitialize( - ); -void DAPI PerfClickTime( - __out_opt LARGE_INTEGER* pliElapsed - ); -double DAPI PerfConvertToSeconds( - __in const LARGE_INTEGER* pli - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/polcutil.h b/scripts/windows/installer/WiXSDK/inc/polcutil.h deleted file mode 100644 index 95f2c4a735..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/polcutil.h +++ /dev/null @@ -1,47 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for Policy utility functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -/******************************************************************** -PolcReadNumber - reads a number from policy. - -NOTE: S_FALSE returned if policy not set. -NOTE: out is set to default on S_FALSE or any error. -********************************************************************/ -HRESULT DAPI PolcReadNumber( - __in_z LPCWSTR wzPolicyPath, - __in_z LPCWSTR wzPolicyName, - __in DWORD dwDefault, - __out DWORD* pdw - ); - -/******************************************************************** -PolcReadString - reads a string from policy. - -NOTE: S_FALSE returned if policy not set. -NOTE: out is set to default on S_FALSE or any error. -********************************************************************/ -HRESULT DAPI PolcReadString( - __in_z LPCWSTR wzPolicyPath, - __in_z LPCWSTR wzPolicyName, - __in_z_opt LPCWSTR wzDefault, - __deref_out_z LPWSTR* pscz - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/procutil.h b/scripts/windows/installer/WiXSDK/inc/procutil.h deleted file mode 100644 index 7b7e5a2d2a..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/procutil.h +++ /dev/null @@ -1,85 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for process helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -// structs -typedef struct _PROC_FILESYSTEMREDIRECTION -{ - BOOL fDisabled; - LPVOID pvRevertState; -} PROC_FILESYSTEMREDIRECTION; - -HRESULT DAPI ProcElevated( - __in HANDLE hProcess, - __out BOOL* pfElevated - ); - -HRESULT DAPI ProcWow64( - __in HANDLE hProcess, - __out BOOL* pfWow64 - ); -HRESULT DAPI ProcDisableWowFileSystemRedirection( - __in PROC_FILESYSTEMREDIRECTION* pfsr - ); -HRESULT DAPI ProcRevertWowFileSystemRedirection( - __in PROC_FILESYSTEMREDIRECTION* pfsr - ); - -HRESULT DAPI ProcExec( - __in_z LPCWSTR wzExecutablePath, - __in_z_opt LPCWSTR wzCommandLine, - __in int nCmdShow, - __out HANDLE *phProcess - ); -HRESULT DAPI ProcExecute( - __in_z LPWSTR wzCommand, - __out HANDLE *phProcess, - __out_opt HANDLE *phChildStdIn, - __out_opt HANDLE *phChildStdOutErr - ); -HRESULT DAPI ProcWaitForCompletion( - __in HANDLE hProcess, - __in DWORD dwTimeout, - __out DWORD *pReturnCode - ); -HRESULT DAPI ProcWaitForIds( - __in_ecount(cProcessIds) const DWORD* pdwProcessIds, - __in DWORD cProcessIds, - __in DWORD dwMilliseconds - ); -HRESULT DAPI ProcCloseIds( - __in_ecount(cProcessIds) const DWORD* pdwProcessIds, - __in DWORD cProcessIds - ); - -// following code in proc2utl.cpp due to dependency on PSAPI.DLL. -HRESULT DAPI ProcFindAllIdsFromExeName( - __in_z LPCWSTR wzExeName, - __out DWORD** ppdwProcessIds, - __out DWORD* pcProcessIds - ); - -// following code in proc3utl.cpp due to dependency on Wtsapi32.DLL. -HRESULT DAPI ProcExecuteAsInteractiveUser( - __in_z LPCWSTR wzExecutablePath, - __in_z LPCWSTR wzCommand, - __out HANDLE *phProcess - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/regutil.h b/scripts/windows/installer/WiXSDK/inc/regutil.h deleted file mode 100644 index 97d132fa0d..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/regutil.h +++ /dev/null @@ -1,240 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Registry helper functions. -// -//------------------------------------------------------------------------------------------------- - -#pragma once - - -#ifdef __cplusplus -extern "C" { -#endif - - -#define ReleaseRegKey(h) if (h) { ::RegCloseKey(h); h = NULL; } - -enum REG_KEY_BITNESS -{ - REG_KEY_DEFAULT = 0, - REG_KEY_32BIT = 1, - REG_KEY_64BIT = 2 -}; - -typedef LSTATUS (APIENTRY *PFN_REGCREATEKEYEXW)( - __in HKEY hKey, - __in LPCWSTR lpSubKey, - __reserved DWORD Reserved, - __in_opt LPWSTR lpClass, - __in DWORD dwOptions, - __in REGSAM samDesired, - __in_opt CONST LPSECURITY_ATTRIBUTES lpSecurityAttributes, - __out PHKEY phkResult, - __out_opt LPDWORD lpdwDisposition - ); -typedef LSTATUS (APIENTRY *PFN_REGOPENKEYEXW)( - __in HKEY hKey, - __in_opt LPCWSTR lpSubKey, - __reserved DWORD ulOptions, - __in REGSAM samDesired, - __out PHKEY phkResult - ); -typedef LSTATUS (APIENTRY *PFN_REGDELETEKEYEXW)( - __in HKEY hKey, - __in LPCWSTR lpSubKey, - __in REGSAM samDesired, - __reserved DWORD Reserved - ); -typedef LSTATUS (APIENTRY *PFN_REGDELETEKEYW)( - __in HKEY hKey, - __in LPCWSTR lpSubKey - ); -typedef LSTATUS (APIENTRY *PFN_REGENUMKEYEXW)( - __in HKEY hKey, - __in DWORD dwIndex, - __out LPWSTR lpName, - __inout LPDWORD lpcName, - __reserved LPDWORD lpReserved, - __inout LPWSTR lpClass, - __inout_opt LPDWORD lpcClass, - __out_opt PFILETIME lpftLastWriteTime - ); -typedef LSTATUS (APIENTRY *PFN_REGENUMVALUEW)( - __in HKEY hKey, - __in DWORD dwIndex, - __out LPWSTR lpValueName, - __inout LPDWORD lpcchValueName, - __reserved LPDWORD lpReserved, - __out_opt LPDWORD lpType, - __out_opt LPBYTE lpData, - __out_opt LPDWORD lpcbData - ); -typedef LSTATUS (APIENTRY *PFN_REGQUERYINFOKEYW)( - __in HKEY hKey, - __out LPWSTR lpClass, - __inout_opt LPDWORD lpcClass, - __reserved LPDWORD lpReserved, - __out_opt LPDWORD lpcSubKeys, - __out_opt LPDWORD lpcMaxSubKeyLen, - __out_opt LPDWORD lpcMaxClassLen, - __out_opt LPDWORD lpcValues, - __out_opt LPDWORD lpcMaxValueNameLen, - __out_opt LPDWORD lpcMaxValueLen, - __out_opt LPDWORD lpcbSecurityDescriptor, - __out_opt PFILETIME lpftLastWriteTime - ); -typedef LSTATUS (APIENTRY *PFN_REGQUERYVALUEEXW)( - __in HKEY hKey, - __in_opt LPCWSTR lpValueName, - __reserved LPDWORD lpReserved, - __out_opt LPDWORD lpType, - __out_bcount_part_opt(*lpcbData, *lpcbData) __out_data_source(REGISTRY) LPBYTE lpData, - __inout_opt LPDWORD lpcbData - ); -typedef LSTATUS (APIENTRY *PFN_REGSETVALUEEXW)( - __in HKEY hKey, - __in_opt LPCWSTR lpValueName, - __reserved DWORD Reserved, - __in DWORD dwType, - __in_bcount_opt(cbData) CONST BYTE* lpData, - __in DWORD cbData - ); -typedef LSTATUS (APIENTRY *PFN_REGDELETEVALUEW)( - __in HKEY hKey, - __in_opt LPCWSTR lpValueName - ); - -HRESULT DAPI RegInitialize(); -void DAPI RegUninitialize(); - -void DAPI RegFunctionOverride( - __in_opt PFN_REGCREATEKEYEXW pfnRegCreateKeyExW, - __in_opt PFN_REGOPENKEYEXW pfnRegOpenKeyExW, - __in_opt PFN_REGDELETEKEYEXW pfnRegDeleteKeyExW, - __in_opt PFN_REGENUMKEYEXW pfnRegEnumKeyExW, - __in_opt PFN_REGENUMVALUEW pfnRegEnumValueW, - __in_opt PFN_REGQUERYINFOKEYW pfnRegQueryInfoKeyW, - __in_opt PFN_REGQUERYVALUEEXW pfnRegQueryValueExW, - __in_opt PFN_REGSETVALUEEXW pfnRegSetValueExW, - __in_opt PFN_REGDELETEVALUEW pfnRegDeleteValueW - ); -HRESULT DAPI RegCreate( - __in HKEY hkRoot, - __in_z LPCWSTR wzSubKey, - __in DWORD dwAccess, - __out HKEY* phk - ); -HRESULT DAPI RegCreateEx( - __in HKEY hkRoot, - __in_z LPCWSTR wzSubKey, - __in DWORD dwAccess, - __in BOOL fVolatile, - __in_opt SECURITY_ATTRIBUTES* pSecurityAttributes, - __out HKEY* phk, - __out_opt BOOL* pfCreated - ); -HRESULT DAPI RegOpen( - __in HKEY hkRoot, - __in_z LPCWSTR wzSubKey, - __in DWORD dwAccess, - __out HKEY* phk - ); -HRESULT DAPI RegDelete( - __in HKEY hkRoot, - __in_z LPCWSTR wzSubKey, - __in REG_KEY_BITNESS kbKeyBitness, - __in BOOL fDeleteTree - ); -HRESULT DAPI RegKeyEnum( - __in HKEY hk, - __in DWORD dwIndex, - __deref_out_z LPWSTR* psczKey - ); -HRESULT DAPI RegValueEnum( - __in HKEY hk, - __in DWORD dwIndex, - __deref_out_z LPWSTR* psczName, - __out_opt DWORD *pdwType - ); -HRESULT DAPI RegReadBinary( - __in HKEY hk, - __in_z_opt LPCWSTR wzName, - __deref_out_bcount_opt(*pcbBuffer) BYTE** ppbBuffer, - __out SIZE_T *pcbBuffer - ); -HRESULT DAPI RegReadString( - __in HKEY hk, - __in_z_opt LPCWSTR wzName, - __deref_out_z LPWSTR* psczValue - ); -HRESULT DAPI RegReadStringArray( - __in HKEY hk, - __in_z_opt LPCWSTR wzName, - __deref_out_ecount_opt(pcStrings) LPWSTR** prgsczStrings, - __out DWORD *pcStrings - ); -HRESULT DAPI RegReadVersion( - __in HKEY hk, - __in_z_opt LPCWSTR wzName, - __out DWORD64* pdw64Version - ); -HRESULT DAPI RegReadNumber( - __in HKEY hk, - __in_z_opt LPCWSTR wzName, - __out DWORD* pdwValue - ); -HRESULT DAPI RegReadQword( - __in HKEY hk, - __in_z_opt LPCWSTR wzName, - __out DWORD64* pqwValue - ); -HRESULT DAPI RegWriteBinary( - __in HKEY hk, - __in_z_opt LPCWSTR wzName, - __in_bcount(cbBuffer) const BYTE *pbBuffer, - __in DWORD cbBuffer - ); -HRESULT DAPI RegWriteString( - __in HKEY hk, - __in_z_opt LPCWSTR wzName, - __in_z_opt LPCWSTR wzValue - ); -HRESULT DAPI RegWriteStringArray( - __in HKEY hk, - __in_z_opt LPCWSTR wzName, - __in_ecount(cValues) LPWSTR *rgwzStrings, - __in DWORD cStrings - ); -HRESULT DAPI RegWriteStringFormatted( - __in HKEY hk, - __in_z_opt LPCWSTR wzName, - __in __format_string LPCWSTR szFormat, - ... - ); -HRESULT DAPI RegWriteNumber( - __in HKEY hk, - __in_z_opt LPCWSTR wzName, - __in DWORD dwValue - ); -HRESULT DAPI RegWriteQword( - __in HKEY hk, - __in_z_opt LPCWSTR wzName, - __in DWORD64 qwValue - ); -HRESULT DAPI RegQueryKey( - __in HKEY hk, - __out_opt DWORD* pcSubKeys, - __out_opt DWORD* pcValues - ); - -#ifdef __cplusplus -} -#endif - diff --git a/scripts/windows/installer/WiXSDK/inc/resrutil.h b/scripts/windows/installer/WiXSDK/inc/resrutil.h deleted file mode 100644 index 9accf8e3a3..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/resrutil.h +++ /dev/null @@ -1,55 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Resource read helper functions. -// -//------------------------------------------------------------------------------------------------- - -#pragma once - - -#ifdef __cplusplus -extern "C" { -#endif - -HRESULT DAPI ResGetStringLangId( - __in_opt LPCWSTR wzPath, - __in UINT uID, - __out WORD *pwLangId - ); - -HRESULT DAPI ResReadString( - __in HINSTANCE hinst, - __in UINT uID, - __deref_out_z LPWSTR* ppwzString - ); - -HRESULT DAPI ResReadStringAnsi( - __in HINSTANCE hinst, - __in UINT uID, - __deref_out_z LPSTR* ppszString - ); - -HRESULT DAPI ResReadData( - __in_opt HINSTANCE hinst, - __in_z LPCSTR szDataName, - __deref_out_bcount(*pcb) PVOID *ppv, - __out DWORD *pcb - ); - -HRESULT DAPI ResExportDataToFile( - __in_z LPCSTR szDataName, - __in_z LPCWSTR wzTargetFile, - __in DWORD dwCreationDisposition - ); - -#ifdef __cplusplus -} -#endif - diff --git a/scripts/windows/installer/WiXSDK/inc/reswutil.h b/scripts/windows/installer/WiXSDK/inc/reswutil.h deleted file mode 100644 index e8933dfae6..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/reswutil.h +++ /dev/null @@ -1,43 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Resource writer helper functions. -// -//------------------------------------------------------------------------------------------------- - -#pragma once - - -#ifdef __cplusplus -extern "C" { -#endif - -HRESULT DAPI ResWriteString( - __in_z LPCWSTR wzResourceFile, - __in DWORD dwDataId, - __in_z LPCWSTR wzData, - __in WORD wLangId - ); - -HRESULT DAPI ResWriteData( - __in_z LPCWSTR wzResourceFile, - __in_z LPCSTR szDataName, - __in PVOID pData, - __in DWORD cbData - ); - -HRESULT DAPI ResImportDataFromFile( - __in_z LPCWSTR wzTargetFile, - __in_z LPCWSTR wzSourceFile, - __in_z LPCSTR szDataName - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/rexutil.h b/scripts/windows/installer/WiXSDK/inc/rexutil.h deleted file mode 100644 index 771e0bae24..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/rexutil.h +++ /dev/null @@ -1,64 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Resource Cabinet Extract Utilities -// -//------------------------------------------------------------------------------------------------- - -#include -#include - -#ifdef __cplusplus -extern "C" { -#endif - -// defines -#define FILETABLESIZE 40 - -// structs -struct MEM_FILE -{ - LPCBYTE vpStart; - UINT uiCurrent; - UINT uiLength; -}; - -enum FAKE_FILE_TYPE { NORMAL_FILE, MEMORY_FILE }; - -typedef HRESULT (*REX_CALLBACK_PROGRESS)(BOOL fBeginFile, LPCWSTR wzFileId, LPVOID pvContext); -typedef VOID (*REX_CALLBACK_WRITE)(UINT cb); - - -struct FAKE_FILE // used __in internal file table -{ - BOOL fUsed; - FAKE_FILE_TYPE fftType; - MEM_FILE mfFile; // State for memory file - HANDLE hFile; // Handle for disk file -}; - -// functions -HRESULT RexInitialize(); -void RexUninitialize(); - -HRESULT RexExtract( - __in_z LPCSTR szResource, - __in_z LPCWSTR wzExtractId, - __in_z LPCWSTR wzExtractDir, - __in_z LPCWSTR wzExtractName, - __in REX_CALLBACK_PROGRESS pfnProgress, - __in REX_CALLBACK_WRITE pfnWrite, - __in LPVOID pvContext - ); - -#ifdef __cplusplus -} -#endif - diff --git a/scripts/windows/installer/WiXSDK/inc/rmutil.h b/scripts/windows/installer/WiXSDK/inc/rmutil.h deleted file mode 100644 index beec457d86..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/rmutil.h +++ /dev/null @@ -1,56 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for Restart Manager utility functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -typedef struct _RMU_SESSION *PRMU_SESSION; - -HRESULT DAPI RmuJoinSession( - __out PRMU_SESSION *ppSession, - __in_z LPCWSTR wzSessionKey - ); - -HRESULT DAPI RmuAddFile( - __in PRMU_SESSION pSession, - __in_z LPCWSTR wzPath - ); - -HRESULT DAPI RmuAddProcessById( - __in PRMU_SESSION pSession, - __in DWORD dwProcessId - ); - -HRESULT DAPI RmuAddProcessesByName( - __in PRMU_SESSION pSession, - __in_z LPCWSTR wzProcessName - ); - -HRESULT DAPI RmuAddService( - __in PRMU_SESSION pSession, - __in_z LPCWSTR wzServiceName - ); - -HRESULT DAPI RmuRegisterResources( - __in PRMU_SESSION pSession - ); - -HRESULT DAPI RmuEndSession( - __in PRMU_SESSION pSession - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/rssutil.h b/scripts/windows/installer/WiXSDK/inc/rssutil.h deleted file mode 100644 index 9df757c7f8..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/rssutil.h +++ /dev/null @@ -1,101 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// RSS helper functions. -// -//------------------------------------------------------------------------------------------------- - -#pragma once - - -#ifdef __cplusplus -extern "C" { -#endif - -#define ReleaseRssChannel(p) if (p) { RssFreeChannel(p); } -#define ReleaseNullRssChannel(p) if (p) { RssFreeChannel(p); p = NULL; } - - -struct RSS_UNKNOWN_ATTRIBUTE -{ - LPWSTR wzNamespace; - LPWSTR wzAttribute; - LPWSTR wzValue; - - RSS_UNKNOWN_ATTRIBUTE* pNext; -}; - -struct RSS_UNKNOWN_ELEMENT -{ - LPWSTR wzNamespace; - LPWSTR wzElement; - LPWSTR wzValue; - - RSS_UNKNOWN_ATTRIBUTE* pAttributes; - RSS_UNKNOWN_ELEMENT* pNext; -}; - -struct RSS_ITEM -{ - LPWSTR wzTitle; - LPWSTR wzLink; - LPWSTR wzDescription; - - LPWSTR wzGuid; - FILETIME ftPublished; - - LPWSTR wzEnclosureUrl; - DWORD dwEnclosureSize; - LPWSTR wzEnclosureType; - - RSS_UNKNOWN_ELEMENT* pUnknownElements; -}; - -struct RSS_CHANNEL -{ - LPWSTR wzTitle; - LPWSTR wzLink; - LPWSTR wzDescription; - DWORD dwTimeToLive; - - RSS_UNKNOWN_ELEMENT* pUnknownElements; - - DWORD cItems; - RSS_ITEM rgItems[1]; -}; - -HRESULT DAPI RssInitialize( - ); - -void DAPI RssUninitialize( - ); - -HRESULT DAPI RssParseFromString( - __in_z LPCWSTR wzRssString, - __out RSS_CHANNEL **ppChannel - ); - -HRESULT DAPI RssParseFromFile( - __in_z LPCWSTR wzRssFile, - __out RSS_CHANNEL **ppChannel - ); - -// Adding this until we have the updated specstrings.h -#ifndef __in_xcount -#define __in_xcount(size) -#endif - -void DAPI RssFreeChannel( - __in_xcount(pChannel->cItems) RSS_CHANNEL *pChannel - ); - -#ifdef __cplusplus -} -#endif - diff --git a/scripts/windows/installer/WiXSDK/inc/sceutil.h b/scripts/windows/installer/WiXSDK/inc/sceutil.h deleted file mode 100644 index 9a3fd52099..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/sceutil.h +++ /dev/null @@ -1,271 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for SQL Compact Edition helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -#include -#include -#include - -typedef void* SCE_DATABASE_HANDLE; -typedef void* SCE_ROW_HANDLE; -typedef void* SCE_QUERY_HANDLE; -typedef void* SCE_QUERY_RESULTS_HANDLE; - -extern const int SCE_ROW_HANDLE_BYTES; -extern const int SCE_QUERY_HANDLE_BYTES; -extern const int SCE_QUERY_RESULTS_HANDLE_BYTES; - -#define ReleaseSceRow(prrh) if (prrh) { SceFreeRow(prrh); } -#define ReleaseNullSceRow(prrh) if (prrh) { SceFreeRow(prrh); prrh = NULL; } -#define ReleaseSceQuery(pqh) if (pqh) { SceFreeQuery(pqh); } -#define ReleaseNullSceQuery(pqh) if (pqh) { SceFreeQuery(pqh); pqh = NULL; } -#define ReleaseSceQueryResults(pqh) if (pqh) { SceFreeQueryResults(pqh); } -#define ReleaseNullSceQueryResults(pqh) if (pqh) { SceFreeQueryResults(pqh); pqh = NULL; } - -struct SCE_COLUMN_SCHEMA -{ - LPCWSTR wzName; - DBTYPE dbtColumnType; - DWORD dwLength; - BOOL fPrimaryKey; // If this column is the primary key - BOOL fNullable; - BOOL fAutoIncrement; - - LPWSTR wzRelationName; - DWORD dwForeignKeyTable; - DWORD dwForeignKeyColumn; -}; - -struct SCE_INDEX_SCHEMA -{ - LPWSTR wzName; - - DWORD *rgColumns; - DWORD cColumns; -}; - -struct SCE_TABLE_SCHEMA -{ - LPCWSTR wzName; - DWORD cColumns; - SCE_COLUMN_SCHEMA *rgColumns; - - DWORD cIndexes; - SCE_INDEX_SCHEMA *rgIndexes; - - // Internal to SCEUtil - consumers shouldn't access or modify - // TODO: enforce / hide in a handle of some sort? - IRowset *pIRowset; - IRowsetChange *pIRowsetChange; -}; - -struct SCE_DATABASE_SCHEMA -{ - DWORD cTables; - SCE_TABLE_SCHEMA *rgTables; -}; - -struct SCE_DATABASE -{ - SCE_DATABASE_HANDLE sdbHandle; - SCE_DATABASE_SCHEMA *pdsSchema; -}; - -HRESULT DAPI SceCreateDatabase( - __in_z LPCWSTR sczFile, - __deref_out SCE_DATABASE **ppDatabase - ); -HRESULT DAPI SceOpenDatabase( - __in_z LPCWSTR sczFile, - __in LPCWSTR wzSchemaType, - __in DWORD dwExpectedVersion, - __deref_out SCE_DATABASE **ppDatabase, - __in BOOL fReadOnly - ); -HRESULT DAPI SceEnsureDatabase( - __in_z LPCWSTR sczFile, - __in LPCWSTR wzSchemaType, - __in DWORD dwExpectedVersion, - __in SCE_DATABASE_SCHEMA *pdsSchema, - __deref_out SCE_DATABASE **ppDatabase - ); -HRESULT DAPI SceIsTableEmpty( - __in SCE_DATABASE *pDatabase, - __in DWORD dwTableIndex, - __out BOOL *pfEmpty - ); -HRESULT DAPI SceGetFirstRow( - __in SCE_DATABASE *pDatabase, - __in DWORD dwTableIndex, - __deref_out_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE *pRowHandle - ); -HRESULT DAPI SceGetNextRow( - __in SCE_DATABASE *pDatabase, - __in DWORD dwTableIndex, - __deref_out_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE *pRowHandle - ); -HRESULT DAPI SceBeginTransaction( - __in SCE_DATABASE *pDatabase - ); -HRESULT DAPI SceCommitTransaction( - __in SCE_DATABASE *pDatabase - ); -HRESULT DAPI SceRollbackTransaction( - __in SCE_DATABASE *pDatabase - ); -HRESULT DAPI SceDeleteRow( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE *pRowHandle - ); -HRESULT DAPI ScePrepareInsert( - __in SCE_DATABASE *pDatabase, - __in DWORD dwTableIndex, - __deref_out_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE *pRowHandle - ); -HRESULT DAPI SceFinishUpdate( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowHandle - ); -HRESULT DAPI SceSetColumnBinary( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowHandle, - __in DWORD dwColumnIndex, - __in_bcount(cbBuffer) const BYTE* pbBuffer, - __in SIZE_T cbBuffer - ); -HRESULT DAPI SceSetColumnDword( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowHandle, - __in DWORD dwColumnIndex, - __in const DWORD dwValue - ); -HRESULT DAPI SceSetColumnQword( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowHandle, - __in DWORD dwColumnIndex, - __in const DWORD64 qwValue - ); -HRESULT DAPI SceSetColumnBool( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowHandle, - __in DWORD dwColumnIndex, - __in const BOOL fValue - ); -HRESULT DAPI SceSetColumnString( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowHandle, - __in DWORD dwColumnIndex, - __in_z_opt LPCWSTR wzValue - ); -HRESULT DAPI SceSetColumnSystemTime( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowHandle, - __in DWORD dwColumnIndex, - __in const SYSTEMTIME *pst - ); -HRESULT DAPI SceSetColumnEmpty( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowHandle, - __in DWORD dwColumnIndex - ); -HRESULT DAPI SceGetColumnBinary( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowReadHandle, - __in DWORD dwColumnIndex, - __out_opt BYTE **ppbBuffer, - __inout SIZE_T *pcbBuffer - ); -HRESULT DAPI SceGetColumnDword( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowReadHandle, - __in DWORD dwColumnIndex, - __out DWORD *pdwValue - ); -HRESULT DAPI SceGetColumnQword( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowReadHandle, - __in DWORD dwColumnIndex, - __out DWORD64 *pqwValue - ); -HRESULT DAPI SceGetColumnBool( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowReadHandle, - __in DWORD dwColumnIndex, - __out BOOL *pfValue - ); -HRESULT DAPI SceGetColumnString( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowReadHandle, - __in DWORD dwColumnIndex, - __out_z LPWSTR *psczValue - ); -HRESULT DAPI SceGetColumnSystemTime( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowReadHandle, - __in DWORD dwColumnIndex, - __out SYSTEMTIME *pst - ); -HRESULT DAPI SceBeginQuery( - __in SCE_DATABASE *pDatabase, - __in DWORD dwTableIndex, - __in DWORD dwIndex, - __deref_out_bcount(SCE_QUERY_HANDLE_BYTES) SCE_QUERY_HANDLE *psqhHandle - ); -HRESULT DAPI SceSetQueryColumnBinary( - __in_bcount(SCE_QUERY_BYTES) SCE_QUERY_HANDLE sqhHandle, - __in_bcount(cbBuffer) const BYTE* pbBuffer, - __in SIZE_T cbBuffer - ); -HRESULT DAPI SceSetQueryColumnDword( - __in_bcount(SCE_QUERY_BYTES) SCE_QUERY_HANDLE sqhHandle, - __in const DWORD dwValue - ); -HRESULT DAPI SceSetQueryColumnQword( - __in_bcount(SCE_QUERY_BYTES) SCE_QUERY_HANDLE sqhHandle, - __in const DWORD64 qwValue - ); -HRESULT DAPI SceSetQueryColumnBool( - __in_bcount(SCE_QUERY_BYTES) SCE_QUERY_HANDLE sqhHandle, - __in const BOOL fValue - ); -HRESULT DAPI SceSetQueryColumnString( - __in_bcount(SCE_QUERY_BYTES) SCE_QUERY_HANDLE sqhHandle, - __in_z_opt LPCWSTR wzString - ); -HRESULT DAPI SceSetQueryColumnSystemTime( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowHandle, - __in const SYSTEMTIME *pst - ); -HRESULT DAPI SceSetQueryColumnEmpty( - __in_bcount(SCE_QUERY_BYTES) SCE_QUERY_HANDLE sqhHandle - ); -HRESULT DAPI SceRunQueryExact( - __in_bcount(SCE_QUERY_BYTES) SCE_QUERY_HANDLE *psqhHandle, - __deref_out_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE *pRowHandle - ); -HRESULT DAPI SceRunQueryRange( - __in_bcount(SCE_QUERY_BYTES) SCE_QUERY_HANDLE *psqhHandle, - __deref_out_bcount(SCE_QUERY_RESULTS_BYTES) SCE_QUERY_RESULTS_HANDLE *psqrhHandle - ); -HRESULT DAPI SceGetNextResultRow( - __in_bcount(SCE_QUERY_RESULTS_BYTES) SCE_QUERY_RESULTS_HANDLE sqrhHandle, - __deref_out_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE *pRowHandle - ); -void DAPI SceCloseTable( - __in SCE_TABLE_SCHEMA *pTable - ); -HRESULT DAPI SceCloseDatabase( - __in SCE_DATABASE *pDatabase - ); -void DAPI SceFreeRow( - __in_bcount(SCE_ROW_HANDLE_BYTES) SCE_ROW_HANDLE rowReadHandle - ); -void DAPI SceFreeQuery( - __in_bcount(SCE_QUERY_BYTES) SCE_QUERY_HANDLE sqhHandle - ); -void DAPI SceFreeQueryResults( - __in_bcount(SCE_QUERY_RESULTS_BYTES) SCE_QUERY_RESULTS_HANDLE sqrhHandle - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/shelutil.h b/scripts/windows/installer/WiXSDK/inc/shelutil.h deleted file mode 100644 index 462dcacbbe..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/shelutil.h +++ /dev/null @@ -1,57 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for shell helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifndef REFKNOWNFOLDERID -#define REFKNOWNFOLDERID REFGUID -#endif - -#ifdef __cplusplus -extern "C" { -#endif - -typedef BOOL (STDAPICALLTYPE *PFN_SHELLEXECUTEEXW)( - __inout LPSHELLEXECUTEINFOW lpExecInfo - ); - -void DAPI ShelFunctionOverride( - __in_opt PFN_SHELLEXECUTEEXW pfnShellExecuteExW - ); -HRESULT DAPI ShelExec( - __in_z LPCWSTR wzTargetPath, - __in_opt LPCWSTR wzParameters, - __in_opt LPCWSTR wzVerb, - __in_opt LPCWSTR wzWorkingDirectory, - __in int nShowCmd, - __in_opt HWND hwndParent, - __out_opt HANDLE* phProcess - ); -HRESULT DAPI ShelExecUnelevated( - __in_z LPCWSTR wzTargetPath, - __in_z_opt LPCWSTR wzParameters, - __in_z_opt LPCWSTR wzVerb, - __in_z_opt LPCWSTR wzWorkingDirectory, - __in int nShowCmd - ); -HRESULT DAPI ShelGetFolder( - __out_z LPWSTR* psczFolderPath, - __in int csidlFolder - ); -HRESULT DAPI ShelGetKnownFolder( - __out_z LPWSTR* psczFolderPath, - __in REFKNOWNFOLDERID rfidFolder - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/sqlutil.h b/scripts/windows/installer/WiXSDK/inc/sqlutil.h deleted file mode 100644 index eadcb31c61..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/sqlutil.h +++ /dev/null @@ -1,146 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// SQL helper functions. -// -//------------------------------------------------------------------------------------------------- - -#include -#include -#include - - -#ifdef __cplusplus -extern "C" { -#endif - -// Adding this until the SQL annotations are published to specstrings.h -#ifndef __sql_command -#define __sql_command -#endif - -// structs -struct SQL_FILESPEC -{ - WCHAR wzName[MAX_PATH]; - WCHAR wzFilename[MAX_PATH]; - WCHAR wzSize[MAX_PATH]; - WCHAR wzMaxSize[MAX_PATH]; - WCHAR wzGrow[MAX_PATH]; -}; - - -// functions -HRESULT DAPI SqlConnectDatabase( - __in_z LPCWSTR wzServer, - __in_z LPCWSTR wzInstance, - __in_z LPCWSTR wzDatabase, - __in BOOL fIntegratedAuth, - __in_z LPCWSTR wzUser, - __in_z LPCWSTR wzPassword, - __out IDBCreateSession** ppidbSession - ); -HRESULT DAPI SqlStartTransaction( - __in IDBCreateSession* pidbSession, - __out IDBCreateCommand** ppidbCommand, - __out ITransaction** ppit - ); -HRESULT DAPI SqlEndTransaction( - __in ITransaction* pit, - __in BOOL fCommit - ); -HRESULT DAPI SqlDatabaseExists( - __in_z LPCWSTR wzServer, - __in_z LPCWSTR wzInstance, - __in_z LPCWSTR wzDatabase, - __in BOOL fIntegratedAuth, - __in_z LPCWSTR wzUser, - __in_z LPCWSTR wzPassword, - __out_opt BSTR* pbstrErrorDescription - ); -HRESULT DAPI SqlSessionDatabaseExists( - __in IDBCreateSession* pidbSession, - __in_z LPCWSTR wzDatabase, - __out_opt BSTR* pbstrErrorDescription - ); -HRESULT DAPI SqlDatabaseEnsureExists( - __in_z LPCWSTR wzServer, - __in_z LPCWSTR wzInstance, - __in_z LPCWSTR wzDatabase, - __in BOOL fIntegratedAuth, - __in_z LPCWSTR wzUser, - __in_z LPCWSTR wzPassword, - __in_opt const SQL_FILESPEC* psfDatabase, - __in_opt const SQL_FILESPEC* psfLog, - __out_opt BSTR* pbstrErrorDescription - ); -HRESULT DAPI SqlSessionDatabaseEnsureExists( - __in IDBCreateSession* pidbSession, - __in_z LPCWSTR wzDatabase, - __in_opt const SQL_FILESPEC* psfDatabase, - __in_opt const SQL_FILESPEC* psfLog, - __out_opt BSTR* pbstrErrorDescription - ); -HRESULT DAPI SqlCreateDatabase( - __in_z LPCWSTR wzServer, - __in_z LPCWSTR wzInstance, - __in_z LPCWSTR wzDatabase, - __in BOOL fIntegratedAuth, - __in_z LPCWSTR wzUser, - __in_z LPCWSTR wzPassword, - __in_opt const SQL_FILESPEC* psfDatabase, - __in_opt const SQL_FILESPEC* psfLog, - __out_opt BSTR* pbstrErrorDescription - ); -HRESULT DAPI SqlSessionCreateDatabase( - __in IDBCreateSession* pidbSession, - __in_z LPCWSTR wzDatabase, - __in_opt const SQL_FILESPEC* psfDatabase, - __in_opt const SQL_FILESPEC* psfLog, - __out_opt BSTR* pbstrErrorDescription - ); -HRESULT DAPI SqlDropDatabase( - __in_z LPCWSTR wzServer, - __in_z LPCWSTR wzInstance, - __in_z LPCWSTR wzDatabase, - __in BOOL fIntegratedAuth, - __in_z LPCWSTR wzUser, - __in_z LPCWSTR wzPassword, - __out_opt BSTR* pbstrErrorDescription - ); -HRESULT DAPI SqlSessionDropDatabase( - __in IDBCreateSession* pidbSession, - __in_z LPCWSTR wzDatabase, - __out_opt BSTR* pbstrErrorDescription - ); -HRESULT DAPI SqlSessionExecuteQuery( - __in IDBCreateSession* pidbSession, - __in __sql_command LPCWSTR wzSql, - __out_opt IRowset** ppirs, - __out_opt DBROWCOUNT* pcRows, - __out_opt BSTR* pbstrErrorDescription - ); -HRESULT DAPI SqlCommandExecuteQuery( - __in IDBCreateCommand* pidbCommand, - __in __sql_command LPCWSTR wzSql, - __out IRowset** ppirs, - __out DBROWCOUNT* pcRows - ); -HRESULT DAPI SqlGetErrorInfo( - __in IUnknown* pObjectWithError, - __in REFIID IID_InterfaceWithError, - __in DWORD dwLocaleId, - __out_opt BSTR* pbstrErrorSource, - __out_opt BSTR* pbstrErrorDescription - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/srputil.h b/scripts/windows/installer/WiXSDK/inc/srputil.h deleted file mode 100644 index 952b2470f1..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/srputil.h +++ /dev/null @@ -1,57 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// System restore point helper functions. -// -//------------------------------------------------------------------------------------------------- - -#pragma once - - -#ifdef __cplusplus -extern "C" { -#endif - - -enum SRP_ACTION -{ - SRP_ACTION_UNKNOWN, - SRP_ACTION_UNINSTALL, - SRP_ACTION_INSTALL, - SRP_ACTION_MODIFY, -}; - - -/******************************************************************** - SrpInitialize - initializes system restore point functionality. - -*******************************************************************/ -DAPI_(HRESULT) SrpInitialize( - __in BOOL fInitializeComSecurity - ); - -/******************************************************************** - SrpUninitialize - uninitializes system restore point functionality. - -*******************************************************************/ -DAPI_(void) SrpUninitialize(); - -/******************************************************************** - SrpCreateRestorePoint - creates a system restore point. - -*******************************************************************/ -DAPI_(HRESULT) SrpCreateRestorePoint( - __in_z LPCWSTR wzApplicationName, - __in SRP_ACTION action - ); - -#ifdef __cplusplus -} -#endif - diff --git a/scripts/windows/installer/WiXSDK/inc/strutil.h b/scripts/windows/installer/WiXSDK/inc/strutil.h deleted file mode 100644 index e8dc1acba9..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/strutil.h +++ /dev/null @@ -1,279 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for string helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -#define ReleaseStr(pwz) if (pwz) { StrFree(pwz); } -#define ReleaseNullStr(pwz) if (pwz) { StrFree(pwz); pwz = NULL; } -#define ReleaseBSTR(bstr) if (bstr) { ::SysFreeString(bstr); } -#define ReleaseNullBSTR(bstr) if (bstr) { ::SysFreeString(bstr); bstr = NULL; } -#define ReleaseStrArray(rg, c) { if (rg) { StrArrayFree(rg, c); } } -#define ReleaseNullStrArray(rg, c) { if (rg) { StrArrayFree(rg, c); c = 0; rg = NULL; } } - -#define DeclareConstBSTR(bstr_const, wz) const WCHAR bstr_const[] = { 0x00, 0x00, sizeof(wz)-sizeof(WCHAR), 0x00, wz } -#define UseConstBSTR(bstr_const) const_cast(bstr_const + 4) - -HRESULT DAPI StrAlloc( - __deref_out_ecount_part(cch, 0) LPWSTR* ppwz, - __in DWORD_PTR cch - ); -HRESULT DAPI StrTrimCapacity( - __deref_out_z LPWSTR* ppwz - ); -HRESULT DAPI StrTrimWhitespace( - __deref_out_z LPWSTR* ppwz, - __in_z LPCWSTR wzSource - ); -HRESULT DAPI StrAnsiAlloc( - __deref_out_ecount_part(cch, 0) LPSTR* ppz, - __in DWORD_PTR cch - ); -HRESULT DAPI StrAnsiTrimCapacity( - __deref_out_z LPSTR* ppz - ); -HRESULT DAPI StrAnsiTrimWhitespace( - __deref_out_z LPSTR* ppz, - __in_z LPCSTR szSource - ); -HRESULT DAPI StrAllocString( - __deref_out_ecount_z(cchSource+1) LPWSTR* ppwz, - __in_z LPCWSTR wzSource, - __in DWORD_PTR cchSource - ); -HRESULT DAPI StrAnsiAllocString( - __deref_out_ecount_z(cchSource+1) LPSTR* ppsz, - __in_z LPCWSTR wzSource, - __in DWORD_PTR cchSource, - __in UINT uiCodepage - ); -HRESULT DAPI StrAllocStringAnsi( - __deref_out_ecount_z(cchSource+1) LPWSTR* ppwz, - __in_z LPCSTR szSource, - __in DWORD_PTR cchSource, - __in UINT uiCodepage - ); -HRESULT DAPI StrAnsiAllocStringAnsi( - __deref_out_ecount_z(cchSource+1) LPSTR* ppsz, - __in_z LPCSTR szSource, - __in DWORD_PTR cchSource - ); -HRESULT DAPI StrAllocPrefix( - __deref_out_z LPWSTR* ppwz, - __in_z LPCWSTR wzPrefix, - __in DWORD_PTR cchPrefix - ); -HRESULT DAPI StrAllocConcat( - __deref_out_z LPWSTR* ppwz, - __in_z LPCWSTR wzSource, - __in DWORD_PTR cchSource - ); -HRESULT DAPI StrAnsiAllocConcat( - __deref_out_z LPSTR* ppz, - __in_z LPCSTR pzSource, - __in DWORD_PTR cchSource - ); -HRESULT __cdecl StrAllocFormatted( - __deref_out_z LPWSTR* ppwz, - __in __format_string LPCWSTR wzFormat, - ... - ); -HRESULT __cdecl StrAnsiAllocFormatted( - __deref_out_z LPSTR* ppsz, - __in __format_string LPCSTR szFormat, - ... - ); -HRESULT DAPI StrAllocFormattedArgs( - __deref_out_z LPWSTR* ppwz, - __in __format_string LPCWSTR wzFormat, - __in va_list args - ); -HRESULT DAPI StrAnsiAllocFormattedArgs( - __deref_out_z LPSTR* ppsz, - __in __format_string LPCSTR szFormat, - __in va_list args - ); -HRESULT DAPI StrAllocFromError( - __inout LPWSTR *ppwzMessage, - __in HRESULT hrError, - __in_opt HMODULE hModule, - ... - ); - -HRESULT DAPI StrMaxLength( - __in LPCVOID p, - __out DWORD_PTR* pcch - ); -HRESULT DAPI StrSize( - __in LPCVOID p, - __out DWORD_PTR* pcb - ); - -HRESULT DAPI StrFree( - __in LPVOID p - ); - - -HRESULT DAPI StrReplaceStringAll( - __inout LPWSTR* ppwzOriginal, - __in_z LPCWSTR wzOldSubString, - __in_z LPCWSTR wzNewSubString - ); -HRESULT DAPI StrReplaceString( - __inout LPWSTR* ppwzOriginal, - __inout DWORD* pdwStartIndex, - __in_z LPCWSTR wzOldSubString, - __in_z LPCWSTR wzNewSubString - ); - -HRESULT DAPI StrHexEncode( - __in_ecount(cbSource) const BYTE* pbSource, - __in DWORD_PTR cbSource, - __out_ecount(cchDest) LPWSTR wzDest, - __in DWORD_PTR cchDest - ); -HRESULT DAPI StrHexDecode( - __in_z LPCWSTR wzSource, - __out_bcount(cbDest) BYTE* pbDest, - __in DWORD_PTR cbDest - ); -HRESULT DAPI StrAllocHexDecode( - __in_z LPCWSTR wzSource, - __out_bcount(*pcbDest) BYTE** ppbDest, - __out_opt DWORD* pcbDest - ); - -HRESULT DAPI StrAllocBase85Encode( - __in_bcount_opt(cbSource) const BYTE* pbSource, - __in DWORD_PTR cbSource, - __deref_out_z LPWSTR* pwzDest - ); -HRESULT DAPI StrAllocBase85Decode( - __in_z LPCWSTR wzSource, - __deref_out_bcount(*pcbDest) BYTE** hbDest, - __out DWORD_PTR* pcbDest - ); - -HRESULT DAPI MultiSzLen( - __in_ecount(*pcch) __nullnullterminated LPCWSTR pwzMultiSz, - __out DWORD_PTR* pcch - ); -HRESULT DAPI MultiSzPrepend( - __deref_inout_ecount(*pcchMultiSz) __nullnullterminated LPWSTR* ppwzMultiSz, - __inout_opt DWORD_PTR *pcchMultiSz, - __in __nullnullterminated LPCWSTR pwzInsert - ); -HRESULT DAPI MultiSzFindSubstring( - __in __nullnullterminated LPCWSTR pwzMultiSz, - __in __nullnullterminated LPCWSTR pwzSubstring, - __out_opt DWORD_PTR* pdwIndex, - __deref_opt_out_z LPCWSTR* ppwzFoundIn - ); -HRESULT DAPI MultiSzFindString( - __in __nullnullterminated LPCWSTR pwzMultiSz, - __in __nullnullterminated LPCWSTR pwzString, - __out_opt DWORD_PTR* pdwIndex, - __deref_opt_out __nullnullterminated LPCWSTR* ppwzFound - ); -HRESULT DAPI MultiSzRemoveString( - __deref_inout __nullnullterminated LPWSTR* ppwzMultiSz, - __in DWORD_PTR dwIndex - ); -HRESULT DAPI MultiSzInsertString( - __deref_inout_z LPWSTR* ppwzMultiSz, - __inout_opt DWORD_PTR *pcchMultiSz, - __in DWORD_PTR dwIndex, - __in_z LPCWSTR pwzInsert - ); -HRESULT DAPI MultiSzReplaceString( - __deref_inout __nullnullterminated LPWSTR* ppwzMultiSz, - __in DWORD_PTR dwIndex, - __in_z LPCWSTR pwzString - ); - -LPCWSTR wcsistr( - __in_z LPCWSTR wzString, - __in_z LPCWSTR wzCharSet - ); - -HRESULT DAPI StrStringToInt16( - __in_z LPCWSTR wzIn, - __in DWORD cchIn, - __out SHORT* psOut - ); -HRESULT DAPI StrStringToUInt16( - __in_z LPCWSTR wzIn, - __in DWORD cchIn, - __out USHORT* pusOut - ); -HRESULT DAPI StrStringToInt32( - __in_z LPCWSTR wzIn, - __in DWORD cchIn, - __out INT* piOut - ); -HRESULT DAPI StrStringToUInt32( - __in_z LPCWSTR wzIn, - __in DWORD cchIn, - __out UINT* puiOut - ); -HRESULT DAPI StrStringToInt64( - __in_z LPCWSTR wzIn, - __in DWORD cchIn, - __out LONGLONG* pllOut - ); -HRESULT DAPI StrStringToUInt64( - __in_z LPCWSTR wzIn, - __in DWORD cchIn, - __out ULONGLONG* pullOut - ); -void DAPI StrStringToUpper( - __inout_z LPWSTR wzIn - ); -void DAPI StrStringToLower( - __inout_z LPWSTR wzIn - ); -HRESULT DAPI StrAllocStringToUpperInvariant( - __deref_out_z LPWSTR* pscz, - __in_z LPCWSTR wzSource, - __in int cchSource - ); -HRESULT DAPI StrAllocStringToLowerInvariant( - __deref_out_z LPWSTR* pscz, - __in_z LPCWSTR wzSource, - __in int cchSource - ); - -HRESULT DAPI StrArrayAllocString( - __deref_inout_ecount_opt(*pcStrArray) LPWSTR **prgsczStrArray, - __inout LPUINT pcStrArray, - __in_z LPCWSTR wzSource, - __in DWORD_PTR cchSource - ); - -HRESULT DAPI StrArrayFree( - __in_ecount(cStrArray) LPWSTR *rgsczStrArray, - __in UINT cStrArray - ); - -HRESULT DAPI StrSplitAllocArray( - __deref_inout_ecount_opt(*pcStrArray) LPWSTR **prgsczStrArray, - __inout LPUINT pcStrArray, - __in_z LPCWSTR wzSource, - __in_z LPCWSTR wzDelim - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/svcutil.h b/scripts/windows/installer/WiXSDK/inc/svcutil.h deleted file mode 100644 index d7d87ffb7f..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/svcutil.h +++ /dev/null @@ -1,31 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for Windows service helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - - -#define ReleaseServiceHandle(h) if (h) { ::CloseServiceHandle(h); h = NULL; } - - -HRESULT DAPI SvcQueryConfig( - __in SC_HANDLE sch, - __out QUERY_SERVICE_CONFIGW** ppConfig - ); - - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/thmutil.h b/scripts/windows/installer/WiXSDK/inc/thmutil.h deleted file mode 100644 index 4ef2814429..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/thmutil.h +++ /dev/null @@ -1,559 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Theme helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -#define ReleaseTheme(p) if (p) { ThemeFree(p); p = NULL; } - -enum THEME_CONTROL_DATA -{ - THEME_CONTROL_DATA_HOVER = 1, -}; - -enum THEME_CONTROL_TYPE -{ - THEME_CONTROL_TYPE_UNKNOWN, - THEME_CONTROL_TYPE_BILLBOARD, - THEME_CONTROL_TYPE_BUTTON, - THEME_CONTROL_TYPE_CHECKBOX, - THEME_CONTROL_TYPE_EDITBOX, - THEME_CONTROL_TYPE_HYPERLINK, - THEME_CONTROL_TYPE_HYPERTEXT, - THEME_CONTROL_TYPE_IMAGE, - THEME_CONTROL_TYPE_PROGRESSBAR, - THEME_CONTROL_TYPE_RICHEDIT, - THEME_CONTROL_TYPE_STATIC, - THEME_CONTROL_TYPE_TEXT, - THEME_CONTROL_TYPE_LISTVIEW, - THEME_CONTROL_TYPE_TREEVIEW, - THEME_CONTROL_TYPE_TAB, -}; - - -struct THEME_BILLBOARD -{ - HBITMAP hImage; - LPWSTR sczUrl; -}; - - -struct THEME_COLUMN -{ - LPWSTR pszName; - UINT uStringId; - int nWidth; -}; - - -struct THEME_TAB -{ - LPWSTR pszName; - UINT uStringId; -}; - -// THEME_ASSIGN_CONTROL_ID - Used to apply a specific id to a named control (usually -// to set the WM_COMMAND). -struct THEME_ASSIGN_CONTROL_ID -{ - WORD wId; // id to apply to control - LPCWSTR wzName; // name of control to match -}; - -const DWORD THEME_FIRST_ASSIGN_CONTROL_ID = 1024; // Recommended first control id to be assigned. - -struct THEME_CONTROL -{ - THEME_CONTROL_TYPE type; - - WORD wId; - WORD wPageId; - - LPWSTR sczName; // optional name for control, only used to apply control id. - LPWSTR sczText; - int nX; - int nY; - int nHeight; - int nWidth; - int nSourceX; - int nSourceY; - UINT uStringId; - - HBITMAP hImage; - - // Don't free these; it's just a handle to the central image lists stored in THEME. The handle is freed once, there. - HIMAGELIST rghImageList[4]; - - DWORD dwStyle; - DWORD dwExtendedStyle; - DWORD dwInternalStyle; - - DWORD dwFontId; - DWORD dwFontHoverId; - DWORD dwFontSelectedId; - - // Used by billboard controls - THEME_BILLBOARD* ptbBillboards; - DWORD cBillboards; - WORD wBillboardInterval; - WORD wBillboardUrls; - BOOL fBillboardLoops; - - // Used by listview controls - THEME_COLUMN *ptcColumns; - DWORD cColumns; - - // Used by tab controls - THEME_TAB *pttTabs; - DWORD cTabs; - - // state variables that should be ignored - HWND hWnd; - DWORD dwData; // type specific data -}; - - -struct THEME_IMAGELIST -{ - LPWSTR sczName; - - HIMAGELIST hImageList; -}; - -struct THEME_PAGE -{ - WORD wId; - LPWSTR sczName; - - DWORD cControlIndices; - DWORD* rgdwControlIndices; -}; - -struct THEME_FONT -{ - HFONT hFont; - COLORREF crForeground; - HBRUSH hForeground; - COLORREF crBackground; - HBRUSH hBackground; -}; - - -struct THEME -{ - WORD wId; - - DWORD dwStyle; - DWORD dwFontId; - HANDLE hIcon; - LPWSTR sczCaption; - int nHeight; - int nWidth; - int nSourceX; - int nSourceY; - UINT uStringId; - - HBITMAP hImage; - - DWORD cFonts; - THEME_FONT* rgFonts; - - DWORD cPages; - THEME_PAGE* rgPages; - - DWORD cImageLists; - THEME_IMAGELIST* rgImageLists; - - DWORD cControls; - THEME_CONTROL* rgControls; - - // state variables that should be ignored - HWND hwndParent; // parent for loaded controls - HWND hwndHover; // current hwnd hovered over -}; - - -/******************************************************************** - ThemeInitialize - initialized theme management. - -*******************************************************************/ -DAPI_(HRESULT) ThemeInitialize( - __in_opt HMODULE hModule - ); - -/******************************************************************** - ThemeUninitialize - unitialize theme management. - -*******************************************************************/ -DAPI_(void) ThemeUninitialize(); - -/******************************************************************** - ThemeLoadFromFile - loads a theme from a loose file. - - *******************************************************************/ -DAPI_(HRESULT) ThemeLoadFromFile( - __in_z LPCWSTR wzThemeFile, - __out THEME** ppTheme - ); - -/******************************************************************** - ThemeLoadFromResource - loads a theme from a module's data resource. - - NOTE: The resource data must be UTF-8 encoded. -*******************************************************************/ -DAPI_(HRESULT) ThemeLoadFromResource( - __in_opt HMODULE hModule, - __in_z LPCSTR szResource, - __out THEME** ppTheme - ); - -/******************************************************************** - ThemeFree - frees any memory associated with a theme. - -*******************************************************************/ -DAPI_(void) ThemeFree( - __in THEME* pTheme - ); - -/******************************************************************** - ThemeLoadControls - creates the windows for all the theme controls. - -*******************************************************************/ -DAPI_(HRESULT) ThemeLoadControls( - __in THEME* pTheme, - __in HWND hwndParent, - __in_ecount_opt(cAssignControlIds) const THEME_ASSIGN_CONTROL_ID* rgAssignControlIds, - __in DWORD cAssignControlIds - ); - -/******************************************************************** - ThemeUnloadControls - resets all the theme control windows so the theme - controls can be reloaded. - -*******************************************************************/ -DAPI_(void) ThemeUnloadControls( - __in THEME* pTheme - ); - -/******************************************************************** - ThemeLocalize - Localizes all of the strings in the them. - -*******************************************************************/ -DAPI_(HRESULT) ThemeLocalize( - __in THEME *pTheme, - __in const WIX_LOCALIZATION *pLocStringSet - ); - -DAPI_(HRESULT) ThemeLoadStrings( - __in THEME* pTheme, - __in HMODULE hResModule - ); - -/******************************************************************** - ThemeLoadRichEditFromFile - Attach a richedit control to a RTF file. - - *******************************************************************/ -DAPI_(HRESULT) ThemeLoadRichEditFromFile( - __in THEME* pTheme, - __in DWORD dwControl, - __in_z LPCWSTR wzFileName, - __in HMODULE hModule - ); - -/******************************************************************** - ThemeLoadRichEditFromResource - Attach a richedit control to resource data. - - *******************************************************************/ -DAPI_(HRESULT) ThemeLoadRichEditFromResource( - __in THEME* pTheme, - __in DWORD dwControl, - __in_z LPCSTR szResourceName, - __in HMODULE hModule - ); - -/******************************************************************** - ThemeLoadRichEditFromResourceToHWnd - Attach a richedit control (by - HWND) to resource data. - - *******************************************************************/ -DAPI_(HRESULT) ThemeLoadRichEditFromResourceToHWnd( - __in HWND hWnd, - __in_z LPCSTR szResourceName, - __in HMODULE hModule - ); - -/******************************************************************** - ThemeHandleKeyboardMessage - will translate the message using the active - accelerator table. - -*******************************************************************/ -DAPI_(BOOL) ThemeHandleKeyboardMessage( - __in_opt THEME* pTheme, - __in HWND hWnd, - __in MSG* pMsg - ); - -/******************************************************************** - ThemeDefWindowProc - replacement for DefWindowProc() when using theme. - -*******************************************************************/ -LRESULT CALLBACK ThemeDefWindowProc( - __in_opt THEME* pTheme, - __in HWND hWnd, - __in UINT uMsg, - __in WPARAM wParam, - __in LPARAM lParam - ); - -/******************************************************************** - ThemeGetPageIds - gets the page ids for the theme via page names. - -*******************************************************************/ -DAPI_(void) ThemeGetPageIds( - __in const THEME* pTheme, - __in_ecount(cGetPages) LPCWSTR* rgwzFindNames, - __in_ecount(cGetPages) DWORD* rgdwPageIds, - __in DWORD cGetPages - ); - -/******************************************************************** - ThemeGetPage - gets a theme page by id. - - *******************************************************************/ -DAPI_(THEME_PAGE*) ThemeGetPage( - __in const THEME* pTheme, - __in DWORD dwPage - ); - -/******************************************************************** - ThemeShowPage - shows or hides all of the controls in the page at one time. - - *******************************************************************/ -DAPI_(void) ThemeShowPage( - __in THEME* pTheme, - __in DWORD dwPage, - __in int nCmdShow - ); - -/******************************************************************** - ThemeControlExists - check if a control with the specified id exists. - - *******************************************************************/ -DAPI_(BOOL) ThemeControlExists( - __in THEME* pTheme, - __in DWORD dwControl - ); - -/******************************************************************** - ThemeControlEnable - enables/disables a control. - - *******************************************************************/ -DAPI_(void) ThemeControlEnable( - __in THEME* pTheme, - __in DWORD dwControl, - __in BOOL fEnable - ); - -/******************************************************************** - ThemeControlEnabled - returns whether a control is enabled/disabled. - - *******************************************************************/ -DAPI_(BOOL) ThemeControlEnabled( - __in THEME* pTheme, - __in DWORD dwControl - ); - -/******************************************************************** - ThemeControlElevates - sets/removes the shield icon on a control. - - *******************************************************************/ -DAPI_(void) ThemeControlElevates( - __in THEME* pTheme, - __in DWORD dwControl, - __in BOOL fElevates - ); - -/******************************************************************** - ThemeShowControl - shows/hides a control. - - *******************************************************************/ -DAPI_(void) ThemeShowControl( - __in THEME* pTheme, - __in DWORD dwControl, - __in int nCmdShow - ); - -/******************************************************************** - ThemeControlVisible - returns whether a control is visible. - - *******************************************************************/ -DAPI_(BOOL) ThemeControlVisible( - __in THEME* pTheme, - __in DWORD dwControl - ); - -DAPI_(BOOL) ThemePostControlMessage( - __in THEME* pTheme, - __in DWORD dwControl, - __in UINT Msg, - __in WPARAM wParam, - __in LPARAM lParam - ); - -DAPI_(LRESULT) ThemeSendControlMessage( - __in THEME* pTheme, - __in DWORD dwControl, - __in UINT Msg, - __in WPARAM wParam, - __in LPARAM lParam - ); - -/******************************************************************** - ThemeDrawBackground - draws the theme background. - -*******************************************************************/ -DAPI_(HRESULT) ThemeDrawBackground( - __in THEME* pTheme, - __in PAINTSTRUCT* pps - ); - -/******************************************************************** - ThemeDrawControl - draw an owner drawn control. - -*******************************************************************/ -DAPI_(HRESULT) ThemeDrawControl( - __in THEME* pTheme, - __in DRAWITEMSTRUCT* pdis - ); - -/******************************************************************** - ThemeHoverControl - mark a control as hover. - -*******************************************************************/ -DAPI_(BOOL) ThemeHoverControl( - __in THEME* pTheme, - __in HWND hwndParent, - __in HWND hwndControl - ); - -/******************************************************************** - ThemeIsControlChecked - gets whether a control is checked. Only - really useful for checkbox controls. - -*******************************************************************/ -DAPI_(BOOL) ThemeIsControlChecked( - __in THEME* pTheme, - __in DWORD dwControl - ); - -/******************************************************************** - ThemeSetControlColor - sets the color of text for a control. - -*******************************************************************/ -DAPI_(BOOL) ThemeSetControlColor( - __in THEME* pTheme, - __in HDC hdc, - __in HWND hWnd, - __out HBRUSH* phBackgroundBrush - ); - -/******************************************************************** - ThemeStartBillboard - starts a billboard control changing images according - to their interval. - - NOTE: iImage specifies the image to start on. If iImage is - greater than the number of images, the last image shown - will be the start image. -*******************************************************************/ -DAPI_(HRESULT) ThemeStartBillboard( - __in const THEME* pTheme, - __in DWORD dwControl, - __in WORD iImage - ); - -/******************************************************************** - ThemeStopBillboard - stops a billboard control from changing images. - -*******************************************************************/ -DAPI_(HRESULT) ThemeStopBillboard( - __in const THEME* pTheme, - __in DWORD dwControl - ); - -/******************************************************************** - ThemeSetProgressControl - sets the current percentage complete in a - progress bar control. - -*******************************************************************/ -DAPI_(HRESULT) ThemeSetProgressControl( - __in THEME* pTheme, - __in DWORD dwControl, - __in DWORD dwProgressPercentage - ); - -/******************************************************************** - ThemeSetProgressControlColor - sets the current color of a - progress bar control. - -*******************************************************************/ -DAPI_(HRESULT) ThemeSetProgressControlColor( - __in THEME* pTheme, - __in DWORD dwControl, - __in DWORD dwColorIndex - ); - -/******************************************************************** - ThemeSetTextControl - sets the text of a control. - -*******************************************************************/ -DAPI_(HRESULT) ThemeSetTextControl( - __in THEME* pTheme, - __in DWORD dwControl, - __in_z LPCWSTR wzText - ); - -/******************************************************************** - ThemeGetTextControl - gets the text of a control. - -*******************************************************************/ -DAPI_(HRESULT) ThemeGetTextControl( - __in const THEME* pTheme, - __in DWORD dwControl, - __out LPWSTR* psczText - ); - -/******************************************************************** - ThemeUpdateCaption - updates the caption in the theme. - -*******************************************************************/ -DAPI_(HRESULT) ThemeUpdateCaption( - __in THEME* pTheme, - __in_z LPCWSTR wzCaption - ); - -/******************************************************************** - ThemeSetFocus - set the focus to the control supplied or the next - enabled control if it is disabled. - -*******************************************************************/ -DAPI_(void) ThemeSetFocus( - __in THEME* pTheme, - __in DWORD dwControl - ); - -#ifdef __cplusplus -} -#endif - diff --git a/scripts/windows/installer/WiXSDK/inc/timeutil.h b/scripts/windows/installer/WiXSDK/inc/timeutil.h deleted file mode 100644 index b5f4eabd79..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/timeutil.h +++ /dev/null @@ -1,45 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Time helper functions. -// -//------------------------------------------------------------------------------------------------- - - -#ifdef __cplusplus -extern "C" { -#endif - -HRESULT DAPI TimeFromString( - __in_z LPCWSTR wzTime, - __out FILETIME* pFileTime - ); -HRESULT DAPI TimeFromString3339( - __in_z LPCWSTR wzTime, - __out FILETIME* pFileTime - ); -HRESULT DAPI TimeCurrentTime( - __deref_out_z LPWSTR* ppwz, - __in BOOL fGMT - ); -HRESULT DAPI TimeCurrentDateTime( - __deref_out_z LPWSTR* ppwz, - __in BOOL fGMT - ); -HRESULT DAPI TimeSystemDateTime( - __deref_out_z LPWSTR* ppwz, - __in const SYSTEMTIME *pst, - __in BOOL fGMT - ); - -#ifdef __cplusplus -} -#endif - diff --git a/scripts/windows/installer/WiXSDK/inc/uriutil.h b/scripts/windows/installer/WiXSDK/inc/uriutil.h deleted file mode 100644 index 08079f2da8..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/uriutil.h +++ /dev/null @@ -1,111 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// URI helper functions. -// -//------------------------------------------------------------------------------------------------- - -#pragma once - -#include "wininet.h" - - -#ifdef __cplusplus -extern "C" { -#endif - -enum URI_PROTOCOL -{ - URI_PROTOCOL_UNKNOWN, - URI_PROTOCOL_FILE, - URI_PROTOCOL_FTP, - URI_PROTOCOL_HTTP, - URI_PROTOCOL_HTTPS, - URI_PROTOCOL_LOCAL, - URI_PROTOCOL_UNC -}; - -typedef struct _URI_INFO -{ - INTERNET_SCHEME scheme; - LPWSTR sczHostName; - INTERNET_PORT port; - LPWSTR sczUser; - LPWSTR sczPassword; - LPWSTR sczPath; - LPWSTR sczQueryString; -} URI_INFO; - - -HRESULT DAPI UriCanonicalize( - __inout_z LPWSTR* psczUri - ); - -HRESULT DAPI UriCrack( - __in_z LPCWSTR wzUri, - __out_opt INTERNET_SCHEME* pScheme, - __deref_opt_out_z LPWSTR* psczHostName, - __out_opt INTERNET_PORT* pPort, - __deref_opt_out_z LPWSTR* psczUser, - __deref_opt_out_z LPWSTR* psczPassword, - __deref_opt_out_z LPWSTR* psczPath, - __deref_opt_out_z LPWSTR* psczQueryString - ); - -HRESULT DAPI UriCrackEx( - __in_z LPCWSTR wzUri, - __in URI_INFO* pUriInfo - ); - -void DAPI UriInfoUninitialize( - __in URI_INFO* pUriInfo - ); - -HRESULT DAPI UriCreate( - __inout_z LPWSTR* psczUri, - __in INTERNET_SCHEME scheme, - __in_z_opt LPWSTR wzHostName, - __in INTERNET_PORT port, - __in_z_opt LPWSTR wzUser, - __in_z_opt LPWSTR wzPassword, - __in_z_opt LPWSTR wzPath, - __in_z_opt LPWSTR wzQueryString - ); - -HRESULT DAPI UriCanonicalize( - __inout_z LPWSTR* psczUri - ); - -HRESULT DAPI UriFile( - __deref_out_z LPWSTR* psczFile, - __in_z LPCWSTR wzUri - ); - -HRESULT DAPI UriProtocol( - __in_z LPCWSTR wzUri, - __out URI_PROTOCOL* pProtocol - ); - -HRESULT DAPI UriRoot( - __in_z LPCWSTR wzUri, - __out LPWSTR* ppwzRoot, - __out_opt URI_PROTOCOL* pProtocol - ); - -HRESULT DAPI UriResolve( - __in_z LPCWSTR wzUri, - __in_opt LPCWSTR wzBaseUri, - __out LPWSTR* ppwzResolvedUri, - __out_opt const URI_PROTOCOL* pResolvedProtocol - ); - -#ifdef __cplusplus -} -#endif - diff --git a/scripts/windows/installer/WiXSDK/inc/userutil.h b/scripts/windows/installer/WiXSDK/inc/userutil.h deleted file mode 100644 index 569bc8a539..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/userutil.h +++ /dev/null @@ -1,42 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// User helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -HRESULT DAPI UserBuildDomainUserName( - __out_ecount_z(cchDest) LPWSTR wzDest, - __in int cchDest, - __in_z LPCWSTR pwzName, - __in_z LPCWSTR pwzDomain - ); - -HRESULT DAPI UserCheckIsMember( - __in_z LPCWSTR pwzName, - __in_z LPCWSTR pwzDomain, - __in_z LPCWSTR pwzGroupName, - __in_z LPCWSTR pwzGroupDomain, - __out LPBOOL lpfMember - ); - -HRESULT DAPI UserCreateADsPath( - __in_z LPCWSTR wzObjectDomain, - __in_z LPCWSTR wzObjectName, - __out BSTR *pbstrAdsPath - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/wcalog.h b/scripts/windows/installer/WiXSDK/inc/wcalog.h deleted file mode 100644 index b42f09827f..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/wcalog.h +++ /dev/null @@ -1,24 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Private header for internal logging functions -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -BOOL WIXAPI IsVerboseLogging(); -HRESULT WIXAPI SetVerboseLoggingAtom(BOOL bValue); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/wcautil.h b/scripts/windows/installer/WiXSDK/inc/wcautil.h deleted file mode 100644 index a8e5feff15..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/wcautil.h +++ /dev/null @@ -1,372 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Windows Installer XML CustomAction utility library. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -#define WIXAPI __stdcall -#define ExitTrace WcaLogError -#define ExitTrace1 WcaLogError -#define ExitTrace2 WcaLogError -#define ExitTrace3 WcaLogError - -#include "dutil.h" - -#define MessageExitOnLastError(x, e, s) { x = ::GetLastError(); x = HRESULT_FROM_WIN32(x); if (FAILED(x)) { ExitTrace(x, "%s", s); WcaErrorMessage(e, x, MB_OK, 0); goto LExit; } } -#define MessageExitOnLastError1(x, e, f, s) { x = ::GetLastError(); x = HRESULT_FROM_WIN32(x); if (FAILED(x)) { ExitTrace1(x, f, s); WcaErrorMessage(e, x, MB_OK, 1, s); goto LExit; } } - -#define MessageExitOnFailure(x, e, s) if (FAILED(x)) { ExitTrace(x, "%s", s); WcaErrorMessage(e, x, INSTALLMESSAGE_ERROR | MB_OK, 0); goto LExit; } -#define MessageExitOnFailure1(x, e, f, s) if (FAILED(x)) { ExitTrace1(x, f, s); WcaErrorMessage(e, x, INSTALLMESSAGE_ERROR | MB_OK, 1, s); goto LExit; } -#define MessageExitOnFailure2(x, e, f, s, t) if (FAILED(x)) { ExitTrace2(x, f, s, t); WcaErrorMessage(e, x, INSTALLMESSAGE_ERROR | MB_OK, 2, s, t); goto LExit; } -#define MessageExitOnFailure3(x, e, f, s, t, u) if (FAILED(x)) { ExitTrace2(x, f, s, t, u); WcaErrorMessage(e, x, INSTALLMESSAGE_ERROR | MB_OK, 3, s, t, u); goto LExit; } - -#define MessageExitOnNullWithLastError(p, x, e, s) if (NULL == p) { x = ::GetLastError(); x = HRESULT_FROM_WIN32(x); if (!FAILED(x)) { x = E_FAIL; } ExitTrace(x, "%s", s); WcaErrorMessage(e, x, MB_OK, 0); goto LExit; } -#define MessageExitOnNullWithLastError1(p, x, e, f, s) if (NULL == p) { x = ::GetLastError(); x = HRESULT_FROM_WIN32(x); if (!FAILED(x)) { x = E_FAIL; } ExitTrace(x, f, s); WcaErrorMessage(e, x, MB_OK, 1, s); goto LExit; } -#define MessageExitOnNullWithLastError2(p, x, e, f, s, t) if (NULL == p) { x = ::GetLastError(); x = HRESULT_FROM_WIN32(x); if (!FAILED(x)) { x = E_FAIL; } ExitTrace(x, f, s, t); WcaErrorMessage(e, x, MB_OK, 2, s, t); goto LExit; } - -// Generic action enum. -enum WCA_ACTION -{ - WCA_ACTION_NONE, - WCA_ACTION_INSTALL, - WCA_ACTION_UNINSTALL, -}; - -enum WCA_CASCRIPT -{ - WCA_CASCRIPT_SCHEDULED, - WCA_CASCRIPT_ROLLBACK, -}; - -enum WCA_CASCRIPT_CLOSE -{ - WCA_CASCRIPT_CLOSE_PRESERVE, - WCA_CASCRIPT_CLOSE_DELETE, -}; - -enum WCA_TODO -{ - WCA_TODO_UNKNOWN, - WCA_TODO_INSTALL, - WCA_TODO_UNINSTALL, - WCA_TODO_REINSTALL, -}; - -typedef struct WCA_CASCRIPT_STRUCT -{ - LPWSTR pwzScriptPath; - HANDLE hScriptFile; -} *WCA_CASCRIPT_HANDLE; - -void WIXAPI WcaGlobalInitialize( - __in HINSTANCE hInst - ); -void WIXAPI WcaGlobalFinalize(); - -HRESULT WIXAPI WcaInitialize( - __in MSIHANDLE hInstall, - __in_z PCSTR szCustomActionLogName - ); -UINT WIXAPI WcaFinalize( - __in UINT iReturnValue - ); -BOOL WIXAPI WcaIsInitialized(); - -MSIHANDLE WIXAPI WcaGetInstallHandle(); -MSIHANDLE WIXAPI WcaGetDatabaseHandle(); - -const char* WIXAPI WcaGetLogName(); - -void WIXAPI WcaSetReturnValue( - __in UINT iReturnValue - ); -BOOL WIXAPI WcaCancelDetected(); - -const int LOG_BUFFER = 2048; -enum LOGLEVEL -{ - LOGMSG_TRACEONLY, // Never written to the log file (except in DEBUG builds) - LOGMSG_VERBOSE, // Written to log when LOGVERBOSE - LOGMSG_STANDARD // Written to log whenever informational logging is enabled -}; - -void __cdecl WcaLog( - __in LOGLEVEL llv, - __in_z __format_string PCSTR fmt, ... - ); -BOOL WIXAPI WcaDisplayAssert( - __in LPCSTR sz - ); -void __cdecl WcaLogError( - __in HRESULT hr, - __in LPCSTR szMessage, - ... - ); - -UINT WIXAPI WcaProcessMessage( - __in INSTALLMESSAGE eMessageType, - __in MSIHANDLE hRecord - ); -UINT __cdecl WcaErrorMessage( - __in int iError, - __in HRESULT hrError, - __in UINT uiType, - __in DWORD cArgs, - ... - ); -HRESULT WIXAPI WcaProgressMessage( - __in UINT uiCost, - __in BOOL fExtendProgressBar - ); - -BOOL WIXAPI WcaIsInstalling( - __in INSTALLSTATE isInstalled, - __in INSTALLSTATE isAction - ); -BOOL WIXAPI WcaIsReInstalling( - __in INSTALLSTATE isInstalled, - __in INSTALLSTATE isAction - ); -BOOL WIXAPI WcaIsUninstalling( - __in INSTALLSTATE isInstalled, - __in INSTALLSTATE isAction - ); - -HRESULT WIXAPI WcaSetComponentState( - __in_z LPCWSTR wzComponent, - __in INSTALLSTATE isState - ); - -HRESULT WIXAPI WcaTableExists( - __in_z LPCWSTR wzTable - ); - -HRESULT WIXAPI WcaOpenView( - __in_z LPCWSTR wzSql, - __out MSIHANDLE* phView - ); -HRESULT WIXAPI WcaExecuteView( - __in MSIHANDLE hView, - __in MSIHANDLE hRec - ); -HRESULT WIXAPI WcaOpenExecuteView( - __in_z LPCWSTR wzSql, - __out MSIHANDLE* phView - ); -HRESULT WIXAPI WcaFetchRecord( - __in MSIHANDLE hView, - __out MSIHANDLE* phRec - ); -HRESULT WIXAPI WcaFetchSingleRecord( - __in MSIHANDLE hView, - __out MSIHANDLE* phRec - ); - -HRESULT WIXAPI WcaGetProperty( - __in_z LPCWSTR wzProperty, - __inout LPWSTR* ppwzData - ); -HRESULT WIXAPI WcaGetFormattedProperty( - __in_z LPCWSTR wzProperty, - __out LPWSTR* ppwzData - ); -HRESULT WIXAPI WcaGetFormattedString( - __in_z LPCWSTR wzString, - __out LPWSTR* ppwzData - ); -HRESULT WIXAPI WcaGetIntProperty( - __in_z LPCWSTR wzProperty, - __inout int* piData - ); -HRESULT WIXAPI WcaGetTargetPath( - __in_z LPCWSTR wzFolder, - __out LPWSTR* ppwzData - ); -HRESULT WIXAPI WcaSetProperty( - __in_z LPCWSTR wzPropertyName, - __in_z LPCWSTR wzPropertyValue - ); -HRESULT WIXAPI WcaSetIntProperty( - __in_z LPCWSTR wzPropertyName, - __in int nPropertyValue - ); -BOOL WIXAPI WcaIsPropertySet( - __in LPCSTR szProperty - ); -BOOL WIXAPI WcaIsUnicodePropertySet( - __in LPCWSTR wzProperty - ); - -HRESULT WIXAPI WcaGetRecordInteger( - __in MSIHANDLE hRec, - __in UINT uiField, - __inout int* piData - ); -HRESULT WIXAPI WcaGetRecordString( - __in MSIHANDLE hRec, - __in UINT uiField, - __inout LPWSTR* ppwzData - ); -HRESULT WIXAPI WcaGetRecordFormattedInteger( - __in MSIHANDLE hRec, - __in UINT uiField, - __out int* piData - ); -HRESULT WIXAPI WcaGetRecordFormattedString( - __in MSIHANDLE hRec, - __in UINT uiField, - __inout LPWSTR* ppwzData - ); - -HRESULT WIXAPI WcaAllocStream( - __deref_out_bcount_part(cbData, 0) BYTE** ppbData, - __in DWORD cbData - ); -HRESULT WIXAPI WcaFreeStream( - __in BYTE* pbData - ); - -HRESULT WIXAPI WcaGetRecordStream( - __in MSIHANDLE hRecBinary, - __in UINT uiField, - __deref_out_bcount_full(*pcbData) BYTE** ppbData, - __out DWORD* pcbData - ); -HRESULT WIXAPI WcaSetRecordString( - __in MSIHANDLE hRec, - __in UINT uiField, - __in_z LPCWSTR wzData - ); -HRESULT WIXAPI WcaSetRecordInteger( - __in MSIHANDLE hRec, - __in UINT uiField, - __in int iValue - ); - -HRESULT WIXAPI WcaDoDeferredAction( - __in_z LPCWSTR wzAction, - __in_z LPCWSTR wzCustomActionData, - __in UINT uiCost - ); -DWORD WIXAPI WcaCountOfCustomActionDataRecords( - __in_z LPCWSTR wzData - ); - -HRESULT WIXAPI WcaReadStringFromCaData( - __deref_in LPWSTR* ppwzCustomActionData, - __deref_out_z LPWSTR* ppwzString - ); -HRESULT WIXAPI WcaReadIntegerFromCaData( - __deref_in LPWSTR* ppwzCustomActionData, - __out int* piResult - ); -HRESULT WIXAPI WcaReadStreamFromCaData( - __deref_in LPWSTR* ppwzCustomActionData, - __deref_out_bcount(*pcbData) BYTE** ppbData, - __out DWORD_PTR* pcbData - ); -HRESULT WIXAPI WcaWriteStringToCaData( - __in_z LPCWSTR wzString, - __deref_inout_z LPWSTR* ppwzCustomActionData - ); -HRESULT WIXAPI WcaWriteIntegerToCaData( - __in int i, - __deref_out_z_opt LPWSTR* ppwzCustomActionData - ); -HRESULT WIXAPI WcaWriteStreamToCaData( - __in_bcount(cbData) const BYTE* pbData, - __in DWORD cbData, - __deref_inout_z_opt LPWSTR* ppwzCustomActionData - ); - -HRESULT __cdecl WcaAddTempRecord( - __inout MSIHANDLE* phTableView, - __inout MSIHANDLE* phColumns, - __in_z LPCWSTR wzTable, - __out_opt MSIDBERROR* pdbError, - __in UINT uiUniquifyColumn, - __in UINT cColumns, - ... - ); - -HRESULT WIXAPI WcaDumpTable( - __in_z LPCWSTR wzTable - ); - -HRESULT WIXAPI WcaDeferredActionRequiresReboot(); -BOOL WIXAPI WcaDidDeferredActionRequireReboot(); - -HRESULT WIXAPI WcaCaScriptCreateKey( - __out LPWSTR* ppwzScriptKey - ); - -HRESULT WIXAPI WcaCaScriptCreate( - __in WCA_ACTION action, - __in WCA_CASCRIPT script, - __in BOOL fImpersonated, - __in_z LPCWSTR wzScriptKey, - __in BOOL fAppend, - __out WCA_CASCRIPT_HANDLE* phScript - ); - -HRESULT WIXAPI WcaCaScriptOpen( - __in WCA_ACTION action, - __in WCA_CASCRIPT script, - __in BOOL fImpersonated, - __in_z LPCWSTR wzScriptKey, - __out WCA_CASCRIPT_HANDLE* phScript - ); - -void WIXAPI WcaCaScriptClose( - __in_opt WCA_CASCRIPT_HANDLE hScript, - __in WCA_CASCRIPT_CLOSE closeOperation - ); - -HRESULT WIXAPI WcaCaScriptReadAsCustomActionData( - __in WCA_CASCRIPT_HANDLE hScript, - __out LPWSTR* ppwzCustomActionData - ); - -HRESULT WIXAPI WcaCaScriptWriteString( - __in WCA_CASCRIPT_HANDLE hScript, - __in_z LPCWSTR wzValue - ); - -HRESULT WIXAPI WcaCaScriptWriteNumber( - __in WCA_CASCRIPT_HANDLE hScript, - __in DWORD dwValue - ); - -void WIXAPI WcaCaScriptFlush( - __in WCA_CASCRIPT_HANDLE hScript - ); - -void WIXAPI WcaCaScriptCleanup( - __in_z LPCWSTR wzProductCode, - __in BOOL fImpersonated - ); - -HRESULT WIXAPI QuietExec( - __inout_z LPWSTR wzCommand, - __in DWORD dwTimeout - ); - -WCA_TODO WIXAPI WcaGetComponentToDo( - __in_z LPCWSTR wzComponentId - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/wcawow64.h b/scripts/windows/installer/WiXSDK/inc/wcawow64.h deleted file mode 100644 index f714107b85..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/wcawow64.h +++ /dev/null @@ -1,30 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Windows Installer XML CustomAction utility library for Wow64 API-related functionality. -// -//------------------------------------------------------------------------------------------------- - -#include "wcautil.h" - -#ifdef __cplusplus -extern "C" { -#endif - -HRESULT WIXAPI WcaInitializeWow64(); -BOOL WIXAPI WcaIsWow64Process(); -BOOL WIXAPI WcaIsWow64Initialized(); -HRESULT WIXAPI WcaDisableWow64FSRedirection(); -HRESULT WIXAPI WcaRevertWow64FSRedirection(); -HRESULT WIXAPI WcaFinalizeWow64(); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/wcawrapquery.h b/scripts/windows/installer/WiXSDK/inc/wcawrapquery.h deleted file mode 100644 index ac831fc8af..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/wcawrapquery.h +++ /dev/null @@ -1,141 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Windows Installer XML CustomAction utility library wrappers meant to wrap an MSI view as -// opened by an immediate custom action and transmit it to a deferred custom action -// -//------------------------------------------------------------------------------------------------- - -#include "wcautil.h" - -// Enumerations -enum eWrapQueryAction -{ - wqaTableBegin = 1, - wqaTableFinish, - wqaRowBegin, - wqaRowFinish -}; - -enum eColumnDataType -{ - cdtString = 1, - cdtInt, - cdtStream, - cdtUnknown -}; - -enum eFormatMaskColumn -{ - efmcColumn1 = 1, - efmcColumn2 = 1 << 1, - efmcColumn3 = 1 << 2, - efmcColumn4 = 1 << 3, - efmcColumn5 = 1 << 4, - efmcColumn6 = 1 << 5, - efmcColumn7 = 1 << 6, - efmcColumn8 = 1 << 7, - efmcColumn9 = 1 << 8, - efmcColumn10 = 1 << 9, - efmcColumn11 = 1 << 10, - efmcColumn12 = 1 << 11, - efmcColumn13 = 1 << 12, - efmcColumn14 = 1 << 13, - efmcColumn15 = 1 << 14, - efmcColumn16 = 1 << 15, - efmcColumn17 = 1 << 16, - efmcColumn18 = 1 << 17, - efmcColumn19 = 1 << 18, - efmcColumn20 = 1 << 19, - efmcColumn21 = 1 << 20, - efmcColumn22 = 1 << 21, - efmcColumn23 = 1 << 22, - efmcColumn24 = 1 << 23, - efmcColumn25 = 1 << 24, - efmcColumn26 = 1 << 25, - efmcColumn27 = 1 << 26, - efmcColumn28 = 1 << 27, - efmcColumn29 = 1 << 28, - efmcColumn30 = 1 << 29, - efmcColumn31 = 1 << 30, - efmcColumn32 = 1 << 31, -}; - -// Keeps track of the query instance for the reading CA (deferred CA) -typedef struct WCA_WRAPQUERY_STRUCT -{ - // These are used to size our dynamic arrays below - DWORD dwColumns, dwRows, dwNextIndex; - - // Dynamic arrays of column schema information - eColumnDataType *pcdtColumnType; - LPWSTR *ppwzColumnNames; - - // Dynamic array of raw record data - MSIHANDLE *phRecords; -} *WCA_WRAPQUERY_HANDLE; - -// Wrap a query -// Setting the pfFormatMask enables control over which fields will be formatted, and which will be left unchanged -// Setting dwComponentColumn to something other than 0xFFFFFFFF tells WcaWrapQuery to add two additional columns to the right side of the table -// - ISInstalled and ISAction - which map to the ComponentState of the component (the component is found in the column specified) -// Note that if a component is NULL, the component state columns will also be left null, and it will be up to the deferred CA to fail or ignore the case appropriately -// Setting dwDirectoryColumn to something other than 0xFFFFFFFF tells WcaWrapQuery to add two more additional columns to the right side of the table -// - SourcePath and TargetPath - which map to the Directory's Source and Target Path (the directory is found in the column specified) -// Note that if a directory is NULL, the directory source/target path columns will also be left null, and it will be up to the deferred CA to fail or ignore the case appropriately -HRESULT WIXAPI WcaWrapQuery( - __in_z LPCWSTR pwzQuery, - __inout LPWSTR * ppwzCustomActionData, - __in_opt DWORD dwFormatMask, - __in_opt DWORD dwComponentColumn, - __in_opt DWORD dwDirectoryColumn - ); -// This wraps an empty table query into the custom action data - this is a way to indicate to the deferred custom action that a necessary table doesn't exist, or its query returned no results -HRESULT WIXAPI WcaWrapEmptyQuery( - __inout LPWSTR * ppwzCustomActionData - ); - -// Open a new unwrap query operation, with data from the ppwzCustomActionData string -HRESULT WIXAPI WcaBeginUnwrapQuery( - __out WCA_WRAPQUERY_HANDLE * phWrapQuery, - __inout LPWSTR * ppwzCustomActionData - ); - -// Get the number of records in a query being unwrapped -DWORD WIXAPI WcaGetQueryRecords( - __in const WCA_WRAPQUERY_HANDLE hWrapQuery - ); - -// This function resets a query back to its first row, so that the next fetch returns the first record -void WIXAPI WcaFetchWrappedReset( - __in WCA_WRAPQUERY_HANDLE hWrapQuery - ); -// Fetch the next record in this query -// NOTE: the MSIHANDLE returned by this function should not be released, as it is the same handle used by the query object to maintain the item. -// so, don't use this function with PMSIHANDLE objects! -HRESULT WIXAPI WcaFetchWrappedRecord( - __in WCA_WRAPQUERY_HANDLE hWrapQuery, - __out MSIHANDLE* phRec - ); - -// Fetch the next record in the query where the string value in column dwComparisonColumn equals the value pwzExpectedValue -// NOTE: the MSIHANDLE returned by this function should not be released, as it is the same handle used by the query object to maintain the item. -// so, don't use this function with PMSIHANDLE objects! -HRESULT WIXAPI WcaFetchWrappedRecordWhereString( - __in WCA_WRAPQUERY_HANDLE hWrapQuery, - __in DWORD dwComparisonColumn, - __in_z LPCWSTR pwzExpectedValue, - __out MSIHANDLE* phRec - ); - -// Release a query ID (frees memory, and frees the ID for a new query) -void WIXAPI WcaFinishUnwrapQuery( - __in_opt WCA_WRAPQUERY_HANDLE hWrapQuery - ); diff --git a/scripts/windows/installer/WiXSDK/inc/wiutil.h b/scripts/windows/installer/WiXSDK/inc/wiutil.h deleted file mode 100644 index 123340d249..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/wiutil.h +++ /dev/null @@ -1,367 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for Windows Installer helper functions. -// -//------------------------------------------------------------------------------------------------- - -#ifdef __cplusplus -extern "C" { -#endif - -// constants - -#define IDNOACTION 0 -#define WIU_MB_OKIGNORECANCELRETRY 0xE - -#define MAX_DARWIN_KEY 73 -#define MAX_DARWIN_COLUMN 255 - -#define WIU_LOG_DEFAULT INSTALLLOGMODE_FATALEXIT | INSTALLLOGMODE_ERROR | INSTALLLOGMODE_WARNING | \ - INSTALLLOGMODE_USER | INSTALLLOGMODE_INFO | INSTALLLOGMODE_RESOLVESOURCE | \ - INSTALLLOGMODE_OUTOFDISKSPACE | INSTALLLOGMODE_ACTIONSTART | \ - INSTALLLOGMODE_ACTIONDATA | INSTALLLOGMODE_COMMONDATA | INSTALLLOGMODE_PROPERTYDUMP - -#define ReleaseMsi(h) if (h) { ::MsiCloseHandle(h); } -#define ReleaseNullMsi(h) if (h) { ::MsiCloseHandle(h); h = NULL; } - - -enum WIU_RESTART -{ - WIU_RESTART_NONE, - WIU_RESTART_REQUIRED, - WIU_RESTART_INITIATED, -}; - -enum WIU_MSI_EXECUTE_MESSAGE_TYPE -{ - WIU_MSI_EXECUTE_MESSAGE_NONE, - WIU_MSI_EXECUTE_MESSAGE_PROGRESS, - WIU_MSI_EXECUTE_MESSAGE_ERROR, - WIU_MSI_EXECUTE_MESSAGE_MSI_MESSAGE, - WIU_MSI_EXECUTE_MESSAGE_MSI_FILES_IN_USE, -}; - - -// structures - -typedef struct _WIU_MSI_EXECUTE_MESSAGE -{ - WIU_MSI_EXECUTE_MESSAGE_TYPE type; - DWORD dwAllowedResults; - - DWORD cData; - LPCWSTR* rgwzData; - - INT nResultRecommendation; // recommended return result for this message based on analysis of real world installs. - - union - { - struct - { - DWORD dwPercentage; - } progress; - struct - { - DWORD dwErrorCode; - LPCWSTR wzMessage; - } error; - struct - { - INSTALLMESSAGE mt; - LPCWSTR wzMessage; - } msiMessage; - struct - { - DWORD cFiles; - LPCWSTR* rgwzFiles; - } msiFilesInUse; - }; -} WIU_MSI_EXECUTE_MESSAGE; - -typedef struct _WIU_MSI_PROGRESS -{ - DWORD dwTotal; - DWORD dwCompleted; - DWORD dwStep; - BOOL fMoveForward; - BOOL fEnableActionData; - BOOL fScriptInProgress; -} WIU_MSI_PROGRESS; - - -typedef int (*PFN_MSIEXECUTEMESSAGEHANDLER)( - __in WIU_MSI_EXECUTE_MESSAGE* pMessage, - __in_opt LPVOID pvContext - ); - -typedef struct _WIU_MSI_EXECUTE_CONTEXT -{ - BOOL fRollback; - PFN_MSIEXECUTEMESSAGEHANDLER pfnMessageHandler; - LPVOID pvContext; - WIU_MSI_PROGRESS rgMsiProgress[64]; - DWORD dwCurrentProgressIndex; - - INSTALLUILEVEL previousInstallUILevel; - HWND hwndPreviousParentWindow; - INSTALLUI_HANDLERW pfnPreviousExternalUI; - INSTALLUI_HANDLER_RECORD pfnPreviousExternalUIRecord; - - BOOL fSetPreviousExternalUIRecord; - BOOL fSetPreviousExternalUI; -} WIU_MSI_EXECUTE_CONTEXT; - - -// typedefs -typedef UINT (WINAPI *PFN_MSIENABLELOGW)( - __in DWORD dwLogMode, - __in_z LPCWSTR szLogFile, - __in DWORD dwLogAttributes - ); -typedef UINT (WINAPI *PFN_MSIGETPRODUCTINFOW)( - __in LPCWSTR szProductCode, - __in LPCWSTR szProperty, - __out_ecount_opt(*pcchValue) LPWSTR szValue, - __inout LPDWORD pcchValue - ); -typedef INSTALLSTATE (WINAPI *PFN_MSIGETCOMPONENTPATHW)( - __in LPCWSTR szProduct, - __in LPCWSTR szComponent, - __out_ecount_opt(*pcchBuf) LPWSTR lpPathBuf, - __inout_opt LPDWORD pcchBuf - ); -typedef INSTALLSTATE (WINAPI *PFN_MSILOCATECOMPONENTW)( - __in LPCWSTR szComponent, - __out_ecount_opt(*pcchBuf) LPWSTR lpPathBuf, - __inout_opt LPDWORD pcchBuf - ); -typedef UINT (WINAPI *PFN_MSIGETPRODUCTINFOEXW)( - __in LPCWSTR szProductCode, - __in_opt LPCWSTR szUserSid, - __in MSIINSTALLCONTEXT dwContext, - __in LPCWSTR szProperty, - __out_ecount_opt(*pcchValue) LPWSTR szValue, - __inout_opt LPDWORD pcchValue - ); -typedef INSTALLSTATE (WINAPI *PFN_MSIQUERYFEATURESTATEW)( - __in LPCWSTR szProduct, - __in LPCWSTR szFeature - ); -typedef UINT (WINAPI *PFN_MSIGETPATCHINFOEXW)( - __in_z LPCWSTR wzPatchCode, - __in_z LPCWSTR wzProductCode, - __in_z_opt LPCWSTR wzUserSid, - __in MSIINSTALLCONTEXT dwContext, - __in_z LPCWSTR wzProperty, - __out_opt LPWSTR wzValue, - __inout DWORD* pcchValue - ); -typedef UINT (WINAPI *PFN_MSIDETERMINEPATCHSEQUENCEW)( - __in_z LPCWSTR wzProductCode, - __in_z_opt LPCWSTR wzUserSid, - __in MSIINSTALLCONTEXT context, - __in DWORD cPatchInfo, - __in PMSIPATCHSEQUENCEINFOW pPatchInfo - ); -typedef UINT (WINAPI *PFN_MSIINSTALLPRODUCTW)( - __in LPCWSTR szPackagePath, - __in_opt LPCWSTR szCommandLine - ); -typedef UINT (WINAPI *PFN_MSICONFIGUREPRODUCTEXW)( - __in LPCWSTR szProduct, - __in int iInstallLevel, - __in INSTALLSTATE eInstallState, - __in_opt LPCWSTR szCommandLine - ); -typedef UINT (WINAPI *PFN_MSIREMOVEPATCHESW)( - __in_z LPCWSTR wzPatchList, - __in_z LPCWSTR wzProductCode, - __in INSTALLTYPE eUninstallType, - __in_z_opt LPCWSTR szPropertyList - ); -typedef INSTALLUILEVEL (WINAPI *PFN_MSISETINTERNALUI)( - __in INSTALLUILEVEL dwUILevel, - __inout_opt HWND *phWnd - ); -typedef UINT (WINAPI *PFN_MSISETEXTERNALUIRECORD)( - __in_opt INSTALLUI_HANDLER_RECORD puiHandler, - __in DWORD dwMessageFilter, - __in_opt LPVOID pvContext, - __out_opt PINSTALLUI_HANDLER_RECORD ppuiPrevHandler - ); -typedef INSTALLUI_HANDLERW (WINAPI *PFN_MSISETEXTERNALUIW)( - __in_opt INSTALLUI_HANDLERW puiHandler, - __in DWORD dwMessageFilter, - __in_opt LPVOID pvContext - ); -typedef UINT (WINAPI *PFN_MSIENUMPRODUCTSW)( - __in DWORD iProductIndex, - __out_ecount(MAX_GUID_CHARS + 1) LPWSTR lpProductBuf - ); -typedef UINT (WINAPI *PFN_MSIENUMPRODUCTSEXW)( - __in_z_opt LPCWSTR wzProductCode, - __in_z_opt LPCWSTR wzUserSid, - __in DWORD dwContext, - __in DWORD dwIndex, - __out_opt WCHAR wzInstalledProductCode[39], - __out_opt MSIINSTALLCONTEXT *pdwInstalledContext, - __out_opt LPWSTR wzSid, - __inout_opt LPDWORD pcchSid - ); - -typedef UINT (WINAPI *PFN_MSIENUMRELATEDPRODUCTSW)( - __in LPCWSTR lpUpgradeCode, - __reserved DWORD dwReserved, - __in DWORD iProductIndex, - __out_ecount(MAX_GUID_CHARS + 1) LPWSTR lpProductBuf - ); -typedef UINT (WINAPI *PFN_MSISOURCELISTADDSOURCEEXW)( - __in LPCWSTR szProductCodeOrPatchCode, - __in_opt LPCWSTR szUserSid, - __in MSIINSTALLCONTEXT dwContext, - __in DWORD dwOptions, - __in LPCWSTR szSource, - __in_opt DWORD dwIndex - ); - - -HRESULT DAPI WiuInitialize( - ); -void DAPI WiuUninitialize( - ); -void DAPI WiuFunctionOverride( - __in_opt PFN_MSIENABLELOGW pfnMsiEnableLogW, - __in_opt PFN_MSIGETCOMPONENTPATHW pfnMsiGetComponentPathW, - __in_opt PFN_MSILOCATECOMPONENTW pfnMsiLocateComponentW, - __in_opt PFN_MSIQUERYFEATURESTATEW pfnMsiQueryFeatureStateW, - __in_opt PFN_MSIGETPRODUCTINFOW pfnMsiGetProductInfoW, - __in_opt PFN_MSIGETPRODUCTINFOEXW pfnMsiGetProductInfoExW, - __in_opt PFN_MSIINSTALLPRODUCTW pfnMsiInstallProductW, - __in_opt PFN_MSICONFIGUREPRODUCTEXW pfnMsiConfigureProductExW, - __in_opt PFN_MSISETINTERNALUI pfnMsiSetInternalUI, - __in_opt PFN_MSISETEXTERNALUIW pfnMsiSetExternalUIW, - __in_opt PFN_MSIENUMRELATEDPRODUCTSW pfnMsiEnumRelatedProductsW, - __in_opt PFN_MSISETEXTERNALUIRECORD pfnMsiSetExternalUIRecord, - __in_opt PFN_MSISOURCELISTADDSOURCEEXW pfnMsiSourceListAddSourceExW - ); -HRESULT DAPI WiuGetComponentPath( - __in_z LPCWSTR wzProductCode, - __in_z LPCWSTR wzComponentId, - __out INSTALLSTATE* pInstallState, - __out_z LPWSTR* psczValue - ); -HRESULT DAPI WiuLocateComponent( - __in_z LPCWSTR wzComponentId, - __out INSTALLSTATE* pInstallState, - __out_z LPWSTR* psczValue - ); -HRESULT DAPI WiuQueryFeatureState( - __in_z LPCWSTR wzProduct, - __in_z LPCWSTR wzFeature, - __out INSTALLSTATE* pInstallState - ); -HRESULT DAPI WiuGetProductInfo( - __in_z LPCWSTR wzProductCode, - __in_z LPCWSTR wzProperty, - __out LPWSTR* psczValue - ); -HRESULT DAPI WiuGetProductInfoEx( - __in_z LPCWSTR wzProductCode, - __in_z_opt LPCWSTR wzUserSid, - __in MSIINSTALLCONTEXT dwContext, - __in_z LPCWSTR wzProperty, - __out LPWSTR* psczValue - ); -HRESULT DAPI WiuGetProductProperty( - __in MSIHANDLE hProduct, - __in_z LPCWSTR wzProperty, - __out LPWSTR* psczValue - ); -HRESULT DAPI WiuGetPatchInfoEx( - __in_z LPCWSTR wzPatchCode, - __in_z LPCWSTR wzProductCode, - __in_z_opt LPCWSTR wzUserSid, - __in MSIINSTALLCONTEXT dwContext, - __in_z LPCWSTR wzProperty, - __out LPWSTR* psczValue - ); -HRESULT DAPI WiuDeterminePatchSequence( - __in_z LPCWSTR wzProductCode, - __in_z_opt LPCWSTR wzUserSid, - __in MSIINSTALLCONTEXT context, - __in PMSIPATCHSEQUENCEINFOW pPatchInfo, - __in DWORD cPatchInfo - ); -HRESULT DAPI WiuEnumProducts( - __in DWORD iProductIndex, - __out_ecount(MAX_GUID_CHARS + 1) LPWSTR wzProductCode - ); -HRESULT DAPI WiuEnumProductsEx( - __in_z_opt LPCWSTR wzProductCode, - __in_z_opt LPCWSTR wzUserSid, - __in DWORD dwContext, - __in DWORD dwIndex, - __out_opt WCHAR wzInstalledProductCode[39], - __out_opt MSIINSTALLCONTEXT *pdwInstalledContext, - __out_opt LPWSTR wzSid, - __inout_opt LPDWORD pcchSid - ); -HRESULT DAPI WiuEnumRelatedProducts( - __in_z LPCWSTR wzUpgradeCode, - __in DWORD iProductIndex, - __out_ecount(MAX_GUID_CHARS + 1) LPWSTR wzProductCode - ); -HRESULT DAPI WiuEnableLog( - __in DWORD dwLogMode, - __in_z LPCWSTR wzLogFile, - __in DWORD dwLogAttributes - ); -HRESULT DAPI WiuInitializeExternalUI( - __in PFN_MSIEXECUTEMESSAGEHANDLER pfnMessageHandler, - __in INSTALLUILEVEL internalUILevel, - __in HWND hwndParent, - __in LPVOID pvContext, - __in BOOL fRollback, - __in WIU_MSI_EXECUTE_CONTEXT* pExecuteContext - ); -void DAPI WiuUninitializeExternalUI( - __in WIU_MSI_EXECUTE_CONTEXT* pExecuteContext - ); -HRESULT DAPI WiuConfigureProductEx( - __in_z LPCWSTR wzProduct, - __in int iInstallLevel, - __in INSTALLSTATE eInstallState, - __in_z LPCWSTR wzCommandLine, - __out WIU_RESTART* pRestart - ); -HRESULT DAPI WiuInstallProduct( - __in_z LPCWSTR wzPackagPath, - __in_z LPCWSTR wzCommandLine, - __out WIU_RESTART* pRestart - ); -HRESULT DAPI WiuRemovePatches( - __in_z LPCWSTR wzPatchList, - __in_z LPCWSTR wzProductCode, - __in_z LPCWSTR wzPropertyList, - __out WIU_RESTART* pRestart - ); -HRESULT DAPI WiuSourceListAddSourceEx( - __in_z LPCWSTR wzProductCodeOrPatchCode, - __in_z_opt LPCWSTR wzUserSid, - __in MSIINSTALLCONTEXT dwContext, - __in DWORD dwCode, - __in_z LPCWSTR wzSource, - __in_opt DWORD dwIndex - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/inc/wuautil.h b/scripts/windows/installer/WiXSDK/inc/wuautil.h deleted file mode 100644 index 59c2853089..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/wuautil.h +++ /dev/null @@ -1,31 +0,0 @@ -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// Header for Windows Update Agent helpers. -// -//------------------------------------------------------------------------------------------------- - -#pragma once - - -#if defined(__cplusplus) -extern "C" { -#endif - -HRESULT DAPI WuaPauseAutomaticUpdates(); - -HRESULT DAPI WuaResumeAutomaticUpdates(); - -HRESULT DAPI WuaRestartRequired( - __out BOOL* pfRestartRequired - ); - -#if defined(__cplusplus) -} -#endif \ No newline at end of file diff --git a/scripts/windows/installer/WiXSDK/inc/xmlutil.h b/scripts/windows/installer/WiXSDK/inc/xmlutil.h deleted file mode 100644 index bef0d8a8c5..0000000000 --- a/scripts/windows/installer/WiXSDK/inc/xmlutil.h +++ /dev/null @@ -1,178 +0,0 @@ -#pragma once -//------------------------------------------------------------------------------------------------- -// -// Copyright (c) 2004, Outercurve Foundation. -// This software is released under Microsoft Reciprocal License (MS-RL). -// The license and further copyright text can be found in the file -// LICENSE.TXT at the root directory of the distribution. -// -// -// -// XML helper functions. -// -//------------------------------------------------------------------------------------------------- - -// constant XML CLSIDs and IIDs -extern __declspec(selectany) const CLSID XmlUtil_CLSID_DOMDocument = {0x2933BF90, 0x7B36, 0x11d2, {0xB2, 0x0E, 0x00, 0xC0, 0x4F, 0x98, 0x3E, 0x60}}; -extern __declspec(selectany) const CLSID XmlUtil_CLSID_DOMDocument20 = {0xF6D90F11, 0x9C73, 0x11D3, {0xB3, 0x2E, 0x00, 0xC0, 0x4F, 0x99, 0x0B, 0xB4}}; -extern __declspec(selectany) const CLSID XmlUtil_CLSID_DOMDocument26 = {0xf5078f1b, 0xc551, 0x11d3, {0x89, 0xb9, 0x00, 0x00, 0xf8, 0x1f, 0xe2, 0x21}}; -extern __declspec(selectany) const CLSID XmlUtil_CLSID_DOMDocument30 = {0xf5078f32, 0xc551, 0x11d3, {0x89, 0xb9, 0x00, 0x00, 0xf8, 0x1f, 0xe2, 0x21}}; -extern __declspec(selectany) const CLSID XmlUtil_CLSID_DOMDocument40 = {0x88d969c0, 0xf192, 0x11d4, {0xa6, 0x5f, 0x00, 0x40, 0x96, 0x32, 0x51, 0xe5}}; -extern __declspec(selectany) const CLSID XmlUtil_CLSID_DOMDocument50 = {0x88d969e5, 0xf192, 0x11d4, {0xa6, 0x5f, 0x00, 0x40, 0x96, 0x32, 0x51, 0xe5}}; -extern __declspec(selectany) const CLSID XmlUtil_CLSID_DOMDocument60 = {0x88d96a05, 0xf192, 0x11d4, {0xa6, 0x5f, 0x00, 0x40, 0x96, 0x32, 0x51, 0xe5}}; -extern __declspec(selectany) const CLSID XmlUtil_CLSID_XMLSchemaCache = {0x88d969c2, 0xf192, 0x11d4, {0xa6, 0x5f, 0x00, 0x40, 0x96, 0x32, 0x51, 0xe5}}; - -extern __declspec(selectany) const IID XmlUtil_IID_IXMLDOMDocument = {0x2933BF81, 0x7B36, 0x11D2, {0xB2, 0x0E, 0x00, 0xC0, 0x4F, 0x98, 0x3E, 0x60}}; -extern __declspec(selectany) const IID XmlUtil_IID_IXMLDOMDocument2 = {0x2933BF95, 0x7B36, 0x11D2, {0xB2, 0x0E, 0x00, 0xC0, 0x4F, 0x98, 0x3E, 0x60}}; -extern __declspec(selectany) const IID XmlUtil_IID_IXMLDOMSchemaCollection = {0x373984C8, 0xB845, 0x449B, {0x91, 0xE7, 0x45, 0xAC, 0x83, 0x03, 0x6A, 0xDE}}; - -enum XML_LOAD_ATTRIBUTE -{ - XML_LOAD_PRESERVE_WHITESPACE = 1, -}; - - -#ifdef __cplusplus -extern "C" { -#endif - -HRESULT DAPI XmlInitialize(); -void DAPI XmlUninitialize(); - -HRESULT DAPI XmlCreateElement( - __in IXMLDOMDocument *pixdDocument, - __in_z LPCWSTR wzElementName, - __out IXMLDOMElement **ppixnElement - ); -HRESULT DAPI XmlCreateDocument( - __in_opt LPCWSTR pwzElementName, - __out IXMLDOMDocument** ppixdDocument, - __out_opt IXMLDOMElement** ppixeRootElement = NULL - ); -HRESULT DAPI XmlLoadDocument( - __in_z LPCWSTR wzDocument, - __out IXMLDOMDocument** ppixdDocument - ); -HRESULT DAPI XmlLoadDocumentEx( - __in_z LPCWSTR wzDocument, - __in DWORD dwAttributes, - __out IXMLDOMDocument** ppixdDocument - ); -HRESULT DAPI XmlLoadDocumentFromFile( - __in_z LPCWSTR wzPath, - __out IXMLDOMDocument** ppixdDocument - ); -HRESULT DAPI XmlLoadDocumentFromBuffer( - __in_bcount(cbSource) const BYTE* pbSource, - __in DWORD cbSource, - __out IXMLDOMDocument** ppixdDocument - ); -HRESULT DAPI XmlLoadDocumentFromFileEx( - __in_z LPCWSTR wzPath, - __in DWORD dwAttributes, - __out IXMLDOMDocument** ppixdDocument - ); -HRESULT DAPI XmlSelectSingleNode( - __in IXMLDOMNode* pixnParent, - __in_z LPCWSTR wzXPath, - __out IXMLDOMNode **ppixnChild - ); -HRESULT DAPI XmlSetAttribute( - __in IXMLDOMNode* pixnNode, - __in_z LPCWSTR pwzAttribute, - __in_z LPCWSTR pwzAttributeValue - ); -HRESULT DAPI XmlCreateTextNode( - __in IXMLDOMDocument *pixdDocument, - __in_z LPCWSTR wzText, - __out IXMLDOMText **ppixnTextNode - ); -HRESULT DAPI XmlGetText( - __in IXMLDOMNode* pixnNode, - __deref_out_z BSTR* pbstrText - ); -HRESULT DAPI XmlGetAttribute( - __in IXMLDOMNode* pixnNode, - __in_z LPCWSTR pwzAttribute, - __deref_out_z BSTR* pbstrAttributeValue - ); -HRESULT DAPI XmlGetAttributeEx( - __in IXMLDOMNode* pixnNode, - __in_z LPCWSTR wzAttribute, - __deref_out_z LPWSTR* psczAttributeValue - ); -HRESULT DAPI XmlGetYesNoAttribute( - __in IXMLDOMNode* pixnNode, - __in_z LPCWSTR wzAttribute, - __out BOOL* pfYes - ); -HRESULT DAPI XmlGetAttributeNumber( - __in IXMLDOMNode* pixnNode, - __in_z LPCWSTR pwzAttribute, - __out DWORD* pdwValue - ); -HRESULT DAPI XmlGetAttributeNumberBase( - __in IXMLDOMNode* pixnNode, - __in_z LPCWSTR pwzAttribute, - __in int nBase, - __out DWORD* pdwValue - ); -HRESULT DAPI XmlGetAttributeLargeNumber( - __in IXMLDOMNode* pixnNode, - __in_z LPCWSTR pwzAttribute, - __out DWORD64* pdw64Value - ); -HRESULT DAPI XmlGetNamedItem( - __in IXMLDOMNamedNodeMap *pixnmAttributes, - __in_opt LPCWSTR wzName, - __out IXMLDOMNode **ppixnNamedItem - ); -HRESULT DAPI XmlSetText( - __in IXMLDOMNode* pixnNode, - __in_z LPCWSTR pwzText - ); -HRESULT DAPI XmlSetTextNumber( - __in IXMLDOMNode *pixnNode, - __in DWORD dwValue - ); -HRESULT DAPI XmlCreateChild( - __in IXMLDOMNode* pixnParent, - __in_z LPCWSTR pwzElementType, - __out IXMLDOMNode** ppixnChild - ); -HRESULT DAPI XmlRemoveAttribute( - __in IXMLDOMNode* pixnNode, - __in_z LPCWSTR pwzAttribute - ); -HRESULT DAPI XmlSelectNodes( - __in IXMLDOMNode* pixnParent, - __in_z LPCWSTR wzXPath, - __out IXMLDOMNodeList **ppixnChild - ); -HRESULT DAPI XmlNextAttribute( - __in IXMLDOMNamedNodeMap* pixnnm, - __out IXMLDOMNode** pixnAttribute, - __deref_opt_out_z_opt BSTR* pbstrAttribute - ); -HRESULT DAPI XmlNextElement( - __in IXMLDOMNodeList* pixnl, - __out IXMLDOMNode** pixnElement, - __deref_opt_out_z_opt BSTR* pbstrElement - ); -HRESULT DAPI XmlRemoveChildren( - __in IXMLDOMNode* pixnSource, - __in_z LPCWSTR pwzXPath - ); -HRESULT DAPI XmlSaveDocument( - __in IXMLDOMDocument* pixdDocument, - __inout LPCWSTR wzPath - ); -HRESULT DAPI XmlSaveDocumentToBuffer( - __in IXMLDOMDocument* pixdDocument, - __deref_out_bcount(*pcbDest) BYTE** ppbDest, - __out DWORD* pcbDest - ); - -#ifdef __cplusplus -} -#endif diff --git a/scripts/windows/installer/WiXSDK/vs2010/lib/x86/balutil.lib b/scripts/windows/installer/WiXSDK/vs2010/lib/x86/balutil.lib deleted file mode 100644 index 395bdd6277..0000000000 Binary files a/scripts/windows/installer/WiXSDK/vs2010/lib/x86/balutil.lib and /dev/null differ diff --git a/scripts/windows/installer/WiXSDK/vs2010/lib/x86/deputil.lib b/scripts/windows/installer/WiXSDK/vs2010/lib/x86/deputil.lib deleted file mode 100644 index a50ac90476..0000000000 Binary files a/scripts/windows/installer/WiXSDK/vs2010/lib/x86/deputil.lib and /dev/null differ diff --git a/scripts/windows/installer/WiXSDK/vs2010/lib/x86/dutil.lib b/scripts/windows/installer/WiXSDK/vs2010/lib/x86/dutil.lib deleted file mode 100644 index 01218ae9ad..0000000000 Binary files a/scripts/windows/installer/WiXSDK/vs2010/lib/x86/dutil.lib and /dev/null differ diff --git a/scripts/windows/installer/WiXSDK/vs2010/lib/x86/wcautil.lib b/scripts/windows/installer/WiXSDK/vs2010/lib/x86/wcautil.lib deleted file mode 100644 index e46cbd4d5d..0000000000 Binary files a/scripts/windows/installer/WiXSDK/vs2010/lib/x86/wcautil.lib and /dev/null differ diff --git a/scripts/windows/installer/WiXSDK/vs2012/lib/x86/balutil.lib b/scripts/windows/installer/WiXSDK/vs2012/lib/x86/balutil.lib deleted file mode 100644 index 2a91dde774..0000000000 Binary files a/scripts/windows/installer/WiXSDK/vs2012/lib/x86/balutil.lib and /dev/null differ diff --git a/scripts/windows/installer/WiXSDK/vs2012/lib/x86/deputil.lib b/scripts/windows/installer/WiXSDK/vs2012/lib/x86/deputil.lib deleted file mode 100644 index 01e37e7e52..0000000000 Binary files a/scripts/windows/installer/WiXSDK/vs2012/lib/x86/deputil.lib and /dev/null differ diff --git a/scripts/windows/installer/WiXSDK/vs2012/lib/x86/dutil.lib b/scripts/windows/installer/WiXSDK/vs2012/lib/x86/dutil.lib deleted file mode 100644 index 09344c7718..0000000000 Binary files a/scripts/windows/installer/WiXSDK/vs2012/lib/x86/dutil.lib and /dev/null differ diff --git a/scripts/windows/installer/WiXSDK/vs2012/lib/x86/wcautil.lib b/scripts/windows/installer/WiXSDK/vs2012/lib/x86/wcautil.lib deleted file mode 100644 index dce658c30f..0000000000 Binary files a/scripts/windows/installer/WiXSDK/vs2012/lib/x86/wcautil.lib and /dev/null differ diff --git a/scripts/windows/installer/WiXSDK/vs2013/lib/x86/balutil.lib b/scripts/windows/installer/WiXSDK/vs2013/lib/x86/balutil.lib deleted file mode 100644 index 1a46d16c42..0000000000 Binary files a/scripts/windows/installer/WiXSDK/vs2013/lib/x86/balutil.lib and /dev/null differ diff --git a/scripts/windows/installer/WiXSDK/vs2013/lib/x86/deputil.lib b/scripts/windows/installer/WiXSDK/vs2013/lib/x86/deputil.lib deleted file mode 100644 index cd372adf78..0000000000 Binary files a/scripts/windows/installer/WiXSDK/vs2013/lib/x86/deputil.lib and /dev/null differ diff --git a/scripts/windows/installer/WiXSDK/vs2013/lib/x86/dutil.lib b/scripts/windows/installer/WiXSDK/vs2013/lib/x86/dutil.lib deleted file mode 100644 index 529651fd04..0000000000 Binary files a/scripts/windows/installer/WiXSDK/vs2013/lib/x86/dutil.lib and /dev/null differ diff --git a/scripts/windows/installer/WiXSDK/vs2013/lib/x86/wcautil.lib b/scripts/windows/installer/WiXSDK/vs2013/lib/x86/wcautil.lib deleted file mode 100644 index 4a43e90f78..0000000000 Binary files a/scripts/windows/installer/WiXSDK/vs2013/lib/x86/wcautil.lib and /dev/null differ diff --git a/scripts/windows/installer/build.bat b/scripts/windows/installer/build.bat deleted file mode 100755 index b936db2db3..0000000000 --- a/scripts/windows/installer/build.bat +++ /dev/null @@ -1,92 +0,0 @@ -@echo off - -set MSBUILD="%SystemRoot%\Microsoft.NET\Framework\v4.0.30319\msbuild.exe" - - -IF "%1"=="" GOTO :BUILD -IF "%1"=="clean" GOTO :CLEAN - -:BUILD - -if not exist Release md Release - -echo Building WiXBalExtension... -pushd WiXBalExtension -Call Build -popd - -rem GOTO :Installer - -echo Building custom action collection 32-bit library (WiXHelper project) -%MSBUILD% WiXHelper\WiXHelper.vcxproj /t:Rebuild /p:Configuration="Release" /p:Platform=Win32 /p:DefineConstants="TRACE" /clp:ErrorsOnly -if %errorlevel% neq 0 ( - echo Build failed. - rem pause - goto :EOF -) - - -rem We don't have a 64 bit msi package, so I will command this -rem echo Building custom action collection 64-bit library (WiXHelper project) -rem %MSBUILD% WiXHelper\WiXHelper.vcxproj /t:Rebuild /p:Configuration="Release" /p:Platform=x64 /p:DefineConstants="TRACE" /clp:ErrorsOnly -rem if %errorlevel% neq 0 ( -rem echo Build failed. -rem pause -rem goto :EOF -rem ) - - -:Installer - -echo Building Meteor installer package... -%MSBUILD% MeteorSetup.sln /t:Rebuild /p:Configuration="Release" /p:Platform="x86" /p:DefineConstants="TRACE" /clp:ErrorsOnly -if %errorlevel% neq 0 ( - echo Build failed - rem pause - goto :EOF -) - - -goto :EOF - -REM ***************************************************************** -REM End of Main -REM ***************************************************************** - - -:CLEAN -IF EXIST WiXHelper\*.sdf DEL /Q WiXHelper\*.sdf -IF EXIST WiXHelper\*.suo DEL /Q /A:H WiXHelper\*.suo -IF EXIST WiXBalExtension\*.sdf DEL /Q WiXBalExtension\*.sdf -IF EXIST WiXBalExtension\*.suo DEL /Q /A:H WiXBalExtension\*.suo - -Call :DeleteDir "ipch" -Call :DeleteDir "WiXHelper\ipch" -Call :DeleteDir "WiXHelper\bin" -Call :DeleteDir "WiXHelper\obj" -Call :DeleteDir "WiXInstaller\bin" -Call :DeleteDir "WiXInstaller\obj" -Call :DeleteDir "WiXBalExtension\ipch" -Call :DeleteDir "WiXBalExtension\wixstdba\Release" -Call :DeleteDir "WiXBalExtension\wixstdba\Debug" -Call :DeleteDir "WiXBalExtension\wixlib\obj" -Call :DeleteDir "WiXBalExtension\wixext\obj" -Call :DeleteDir "WiXBalExtension\wixext\bin" -Call :DeleteDir "WiXBalExtension\bafunctions\Release" -Call :DeleteDir "WiXBalExtension\bafunctions\Debug" -Call :DeleteDir "WiXBalExtension\build\Xsd" - -for /f "usebackq delims=" %%I in (`dir /s /b WiXBalExtension\build\*`) do if not %%~nxI==WixBalExtensionExt.dll del /Q "%%I" -goto :EOF - - - -REM ***************************************************************** -REM Delete/create directory -REM ***************************************************************** -:DeleteDir -rd %1% /q/s 2>nul 1>nul -goto :EOF - - - diff --git a/tools/README.md b/tools/README.md index 1f61c8f4fe..40d057058e 100644 --- a/tools/README.md +++ b/tools/README.md @@ -79,7 +79,7 @@ argument, it will be matched against test names: ``` You can also run a particular file, or list all tests matching certain -pattern, run with phantom or browserstack. +pattern, run with puppeteer (default), phantom or browserstack. See more at `./meteor help self-test`. If you want to learn how to write a self-test, see the `tool-testing` diff --git a/tools/cli/commands-cordova.js b/tools/cli/commands-cordova.js index 99abed31b4..ffec0c23c7 100644 --- a/tools/cli/commands-cordova.js +++ b/tools/cli/commands-cordova.js @@ -8,7 +8,7 @@ import { CORDOVA_PLATFORMS, ensureDevBundleDependencies, filterPlatforms, -} from '../cordova'; +} from '../cordova/index.js'; function createProjectContext(appDir) { import { ProjectContext } from '../project-context.js'; @@ -34,7 +34,7 @@ function doAddPlatform(options) { let installedPlatforms = projectContext.platformList.getPlatforms(); main.captureAndExit('', 'adding platforms', () => { - for (platform of platformsToAdd) { + for (var platform of platformsToAdd) { if (_.contains(installedPlatforms, platform)) { buildmessage.error(`${platform}: platform is already added`); } else if (!_.contains(CORDOVA_PLATFORMS, platform)) { @@ -49,7 +49,7 @@ function doAddPlatform(options) { const cordovaProject = new CordovaProject(projectContext); if (buildmessage.jobHasMessages()) return; - installedPlatforms = installedPlatforms.concat(platformsToAdd) + installedPlatforms = installedPlatforms.concat(platformsToAdd); const cordovaPlatforms = filterPlatforms(installedPlatforms); cordovaProject.ensurePlatformsAreSynchronized(cordovaPlatforms); @@ -60,7 +60,7 @@ function doAddPlatform(options) { // Only write the new platform list when we have succesfully synchronized projectContext.platformList.write(installedPlatforms); - for (platform of platformsToAdd) { + for (var platform of platformsToAdd) { Console.info(`${platform}: added platform`); if (_.contains(cordovaPlatforms, platform)) { cordovaProject.checkPlatformRequirements(platform); diff --git a/tools/cli/commands-packages.js b/tools/cli/commands-packages.js index ed5b6f38b9..85ea491d0f 100644 --- a/tools/cli/commands-packages.js +++ b/tools/cli/commands-packages.js @@ -23,7 +23,11 @@ var packageMapModule = require('../packaging/package-map.js'); var packageClient = require('../packaging/package-client.js'); var tropohouse = require('../packaging/tropohouse.js'); -import * as cordova from '../cordova'; +import { + ensureDevBundleDependencies, + newPluginId, + splitPluginsAndPackages, +} from '../cordova/index.js'; import { updateMeteorToolSymlink } from "../packaging/updater.js"; // For each release (or package), we store a meta-record with its name, @@ -929,9 +933,22 @@ main.registerCommand({ throw Error("no isopack for " + packageName); } - var existingBuild = - catalog.official.getBuildWithPreciseBuildArchitectures( - oldVersionRecord, isopk.buildArchitectures()); + const existingBuild = + // First try with the non-simplified build architecture + // list, which is likely to be something like + // os+web.browser+web.browser.legacy+web.cordova: + catalog.official.getBuildWithPreciseBuildArchitectures( + oldVersionRecord, + isopk.buildArchitectures(), + ) || + // If that fails, fall back to the simplified architecture + // list (e.g. os+web.browser+web.cordova), to match packages + // published before the web.browser.legacy architecture was + // introduced (in Meteor 1.7). + catalog.official.getBuildWithPreciseBuildArchitectures( + oldVersionRecord, + isopk.buildArchitectures(true), + ); var somethingChanged; @@ -1998,34 +2015,39 @@ main.registerCommand({ // Split arguments into Cordova plugins and packages const { plugins: pluginsToAdd, packages: packagesToAdd } = - cordova.splitPluginsAndPackages(options.args); + splitPluginsAndPackages(options.args); if (!_.isEmpty(pluginsToAdd)) { - let plugins = projectContext.cordovaPluginsFile.getPluginVersions(); - let changed = false; + function cordovaPluginAdd() { + const plugins = projectContext.cordovaPluginsFile.getPluginVersions(); + let changed = false; - for (target of pluginsToAdd) { - let [id, version] = target.split('@'); + for (target of pluginsToAdd) { + const { id, version } = + require('../cordova/package-id-version-parser.js').parse(target); + const newId = newPluginId(id); - const newId = cordova.newPluginId(id); - - if (!(version && utils.isValidVersion(version, {forCordova: true}))) { - Console.error(`${id}: Meteor requires either an exact version \ -(e.g. ${id}@1.0.0), a Git URL with a SHA reference, or a local path.`); - exitCode = 1; - } else if (newId) { - plugins[newId] = version; - Console.info(`Added Cordova plugin ${newId}@${version} \ -(plugin has been renamed as part of moving to npm).`); - changed = true; - } else { - plugins[id] = version; - Console.info(`Added Cordova plugin ${id}@${version}.`); - changed = true; + if (!(version && utils.isValidVersion(version, {forCordova: true}))) { + Console.error(`${id}: Meteor requires either an exact version \ + (e.g. ${id}@1.0.0), a Git URL with a SHA reference, or a local path.`); + exitCode = 1; + } else if (newId) { + plugins[newId] = version; + Console.info(`Added Cordova plugin ${newId}@${version} \ + (plugin has been renamed as part of moving to npm).`); + changed = true; + } else { + plugins[id] = version; + Console.info(`Added Cordova plugin ${id}@${version}.`); + changed = true; + } } + + changed && projectContext.cordovaPluginsFile.write(plugins); } - changed && projectContext.cordovaPluginsFile.write(plugins); + ensureDevBundleDependencies(); + cordovaPluginAdd(); } if (_.isEmpty(packagesToAdd)) { @@ -2191,34 +2213,39 @@ main.registerCommand({ // Split arguments into Cordova plugins and packages const { plugins: pluginsToRemove, packages } = - cordova.splitPluginsAndPackages(options.args); + splitPluginsAndPackages(options.args); if (!_.isEmpty(pluginsToRemove)) { - let plugins = projectContext.cordovaPluginsFile.getPluginVersions(); - let changed = false; + function cordovaPluginRemove() { + const plugins = projectContext.cordovaPluginsFile.getPluginVersions(); + let changed = false; - for (id of pluginsToRemove) { - const newId = cordova.newPluginId(id); + for (id of pluginsToRemove) { + const newId = newPluginId(id); - if (/@/.test(id)) { - Console.error(`${id}: do not specify version constraints.`); - exitCode = 1; - } else if (_.has(plugins, id)) { - delete plugins[id]; - Console.info(`Removed Cordova plugin ${id}.`); - changed = true; - } else if (newId && _.has(plugins, newId)) { - delete plugins[newId]; - Console.info(`Removed Cordova plugin ${newId} \ -(plugin has been renamed as part of moving to npm).`); - changed = true; - } else { - Console.error(`Cordova plugin ${id} is not in this project.`); - exitCode = 1; + if (/@/.test(id)) { + Console.error(`${id}: do not specify version constraints.`); + exitCode = 1; + } else if (_.has(plugins, id)) { + delete plugins[id]; + Console.info(`Removed Cordova plugin ${id}.`); + changed = true; + } else if (newId && _.has(plugins, newId)) { + delete plugins[newId]; + Console.info(`Removed Cordova plugin ${newId} \ + (plugin has been renamed as part of moving to npm).`); + changed = true; + } else { + Console.error(`Cordova plugin ${id} is not in this project.`); + exitCode = 1; + } } + + changed && projectContext.cordovaPluginsFile.write(plugins); } - changed && projectContext.cordovaPluginsFile.write(plugins); + ensureDevBundleDependencies(); + cordovaPluginRemove(); } if (_.isEmpty(packages)) { diff --git a/tools/cli/commands.js b/tools/cli/commands.js index cea192fda4..8b7b18c65d 100644 --- a/tools/cli/commands.js +++ b/tools/cli/commands.js @@ -16,6 +16,9 @@ var Console = require('../console/console.js').Console; var projectContextModule = require('../project-context.js'); var release = require('../packaging/release.js'); +const { Profile } = require("../tool-env/profile.js"); + +import { ensureDevBundleDependencies } from '../cordova/index.js'; import { CordovaRunner } from '../cordova/runner.js'; import { iOSRunTarget, AndroidRunTarget } from '../cordova/run-targets.js'; @@ -381,9 +384,12 @@ function doRunCommand(options) { runLog.setRawLogs(true); } - let webArchs = ['web.browser']; - if (!_.isEmpty(runTargets) || options['mobile-server']) { - webArchs.push("web.cordova"); + let webArchs = projectContext.platformList.getWebArchs(); + if (! _.isEmpty(runTargets) || + options['mobile-server']) { + if (webArchs.indexOf("web.cordova") < 0) { + webArchs.push("web.cordova"); + } } let cordovaRunner; @@ -403,7 +409,6 @@ function doRunCommand(options) { }); } - import { ensureDevBundleDependencies } from '../cordova'; ensureDevBundleDependencies(); prepareCordovaProject(); } @@ -419,7 +424,7 @@ function doRunCommand(options) { settingsFile: options.settings, buildOptions: { minifyMode: options.production ? 'production' : 'development', - buildMode: options.production && 'production', + buildMode: options.production ? 'production' : 'development', webArchs: webArchs }, rootUrl: process.env.ROOT_URL, @@ -486,8 +491,10 @@ main.registerCommand({ list: { type: Boolean }, example: { type: String }, package: { type: Boolean }, + bare: { type: Boolean }, + minimal: { type: Boolean }, full: { type: Boolean }, - bare: { type: Boolean } + react: { type: Boolean }, }, catalogRefresh: new catalog.Refresh.Never() }, function (options) { @@ -582,7 +589,8 @@ main.registerCommand({ return contents; } }, - ignore: [/^local$/] + ignore: [/^local$/], + preserveSymlinks: true, }); } catch (err) { Console.error("Could not create package: " + err.message); @@ -622,7 +630,7 @@ main.registerCommand({ if (options.list) { Console.info("Available examples:"); _.each(EXAMPLE_REPOSITORIES, function (repoInfo, name) { - const branchInfo = repoInfo.branch ? `#${repoInfo.branch}` : ''; + const branchInfo = repoInfo.branch ? `/tree/${repoInfo.branch}` : ''; Console.info( Console.command(`${name}: ${repoInfo.repo}${branchInfo}`), Console.options({ indent: 2 })); @@ -649,7 +657,7 @@ main.registerCommand({ const branchOption = repoInfo.branch ? ` -b ${repoInfo.branch}` : ''; const path = options.args.length === 1 ? ` ${options.args[0]}` : ''; - Console.info(`To create the ${options.example} example, please run:`) + Console.info(`To create the ${options.example} example, please run:`); Console.info( Console.command(`git clone ${repoInfo.repo}${branchOption}${path}`), Console.options({ indent: 2 })); @@ -723,20 +731,22 @@ main.registerCommand({ }); } - var toIgnore = [/^local$/, /^\.id$/] + var toIgnore = [/^local$/, /^\.id$/]; if (destinationHasCodeFiles) { // If there is already source code in the directory, don't copy our // skeleton app code over it. Just create the .meteor folder and metadata - toIgnore.push(/(\.html|\.js|\.css)/) + toIgnore.push(/(\.html|\.js|\.css)/); } - let skelName = 'skel'; - - if(options.bare){ - skelName += '-bare'; - } - else if(options.full){ - skelName += '-full'; + let skelName = "skel"; + if (options.minimal) { + skelName += "-minimal"; + } else if (options.bare) { + skelName += "-bare"; + } else if (options.full) { + skelName += "-full"; + } else if (options.react) { + skelName += "-react"; } files.cp_r(files.pathJoin(__dirnameConverted, '..', 'static-assets', skelName), appPath, { @@ -750,7 +760,8 @@ main.registerCommand({ return contents; } }, - ignore: toIgnore + ignore: toIgnore, + preserveSymlinks: true, }); // We are actually working with a new meteor project at this point, so @@ -796,7 +807,12 @@ main.registerCommand({ // the packages (or maybe an unpredictable subset based on what happens to be // in the template's versions file). - require("./default-npm-deps.js").install(appPath); + // Since some of the project skeletons include npm `devDependencies`, we need + // to make sure they're included when running `npm install`. + require("./default-npm-deps.js").install( + appPath, + { includeDevDependencies: true } + ); var appNameToDisplay = appPathAsEntered === "." ? "current directory" : `'${appPathAsEntered}'`; @@ -811,6 +827,12 @@ main.registerCommand({ // do next. Console.info("To run your new app:"); + function cmd(text) { + Console.info(Console.command(text), Console.options({ + indent: 2 + })); + } + if (appPathAsEntered !== ".") { // Wrap the app path in quotes if it contains spaces const appPathWithQuotesIfSpaces = appPathAsEntered.indexOf(' ') === -1 ? @@ -818,13 +840,10 @@ main.registerCommand({ `'${appPathAsEntered}'`; // Don't tell people to 'cd .' - Console.info( - Console.command("cd " + appPathWithQuotesIfSpaces), - Console.options({ indent: 2 })); + cmd("cd " + appPathWithQuotesIfSpaces); } - Console.info( - Console.command("meteor"), Console.options({ indent: 2 })); + cmd("meteor"); Console.info(""); Console.info("If you are new to Meteor, try some of the learning resources here:"); @@ -832,13 +851,23 @@ main.registerCommand({ Console.url("https://www.meteor.com/tutorials"), Console.options({ indent: 2 })); - if (!options.full && !options.bare){ - // Notice people about --bare and --full - const bareOptionNotice = 'meteor create --bare to create an empty app.'; - const fullOptionNotice = 'meteor create --full to create a scaffolded app.'; + if (! options.bare && + ! options.minimal && + ! options.full && + ! options.react) { + // Notify people about --bare, --minimal, --full, and --react. + Console.info([ + "", + "To start with a different app template, try one of the following:", + "", + ].join("\n")); - Console.info(""); - Console.info(bareOptionNotice + '\n' + fullOptionNotice); + cmd("meteor create --bare # to create an empty app"); + cmd("meteor create --minimal # to create an app with as few " + + "Meteor packages as possible"); + cmd("meteor create --full # to create a more complete " + + "scaffolded app"); + cmd("meteor create --react # to create a basic React-based app"); } Console.info(""); @@ -871,27 +900,40 @@ var buildCommands = { catalogRefresh: new catalog.Refresh.Never() }; -main.registerCommand(_.extend({ name: 'build' }, buildCommands), - function (options) { - return buildCommand(options); +main.registerCommand({ + name: "build", + ...buildCommands, +}, async function (options) { + return Profile.run( + "meteor build", + () => Promise.await(buildCommand(options)) + ); }); // Deprecated -- identical functionality to 'build' with one exception: it // doesn't output a directory with all builds but rather only one tarball with // server/client programs. // XXX COMPAT WITH 0.9.1.1 -main.registerCommand(_.extend({ name: 'bundle', hidden: true - }, buildCommands), - function (options) { +main.registerCommand({ + name: "bundle", + hidden: true, + ...buildCommands, +}, async function (options) { + Console.error( + "This command has been deprecated in favor of " + + Console.command("'meteor build'") + ", which allows you to " + + "build for multiple platforms and outputs a directory instead of " + + "a single tarball. See " + Console.command("'meteor help build'") + " " + + "for more information."); + Console.error(); - Console.error( - "This command has been deprecated in favor of " + - Console.command("'meteor build'") + ", which allows you to " + - "build for multiple platforms and outputs a directory instead of " + - "a single tarball. See " + Console.command("'meteor help build'") + " " + - "for more information."); - Console.error(); - return buildCommand(_.extend(options, { _bundleOnly: true })); + return Profile.run( + "meteor bundle", + () => Promise.await(buildCommand({ + ...options, + _bundleOnly: true, + })) + ); }); var buildCommand = function (options) { @@ -908,9 +950,8 @@ var buildCommand = function (options) { // of the file, not a constant 'bundle' (a bit obnoxious for // machines, but worth it for humans) - // Error handling for options.architecture. We must pass in only one of three - // architectures. See archinfo.js for more information on what the - // architectures are, what they mean, et cetera. + // Error handling for options.architecture. See archinfo.js for more + // information on what the architectures are, what they mean, et cetera. if (options.architecture && !_.has(archinfo.VALID_ARCHITECTURES, options.architecture)) { showInvalidArchMsg(options.architecture); @@ -925,6 +966,8 @@ var buildCommand = function (options) { }); main.captureAndExit("=> Errors while initializing project:", function () { + // TODO Fix the nested Profile.run warning here, without interfering + // with METEOR_PROFILE output for other commands, like `meteor run`. projectContext.prepareProjectForBuild(); }); projectContext.packageMapDelta.displayOnConsole(); @@ -1042,10 +1085,9 @@ ${Console.command("meteor build ../output")}`, main.captureAndExit('', () => { import { - ensureDevBundleDependencies, pluginVersionsFromStarManifest, displayNameForPlatform, - } from '../cordova'; + } from '../cordova/index.js'; ensureDevBundleDependencies(); @@ -1099,7 +1141,7 @@ https://guide.meteor.com/mobile.html#submitting-ios `, "utf8"); } else if (platform === 'android') { const apkPath = files.pathJoin(buildPath, 'build/outputs/apk', - options.debug ? 'android-debug.apk' : 'android-release-unsigned.apk') + options.debug ? 'android-debug.apk' : 'android-release-unsigned.apk'); if (files.exists(apkPath)) { files.copyFile(apkPath, files.pathJoin(platformOutputPath, @@ -1348,14 +1390,23 @@ main.registerCommand({ // people to deploy from checkout or do other weird shit. We are not // responsible for the consequences. 'override-architecture-with-local' : { type: Boolean }, - 'allow-incompatible-update': { type: Boolean } + 'allow-incompatible-update': { type: Boolean }, + 'deploy-polling-timeout': { type: Number }, + 'no-wait': { type: Boolean }, }, allowUnrecognizedOptions: true, requiresApp: function (options) { return ! options.delete; }, catalogRefresh: new catalog.Refresh.Never() -}, function (options, {rawOptions}) { +}, async function (...args) { + return Profile.run( + "meteor deploy", + () => Promise.await(deployCommand(...args)) + ); +}); + +function deployCommand(options, { rawOptions }) { var site = options.args[0]; if (options.delete) { @@ -1399,6 +1450,7 @@ main.registerCommand({ }); main.captureAndExit("=> Errors while initializing project:", function () { + // TODO Fix nested Profile.run warning here, too. projectContext.prepareProjectForBuild(); }); projectContext.packageMapDelta.displayOnConsole(); @@ -1409,12 +1461,21 @@ main.registerCommand({ serverArch: buildArch }; + let deployPollingTimeoutMs = null; + if (options['deploy-polling-timeout']) { + deployPollingTimeoutMs = options['deploy-polling-timeout']; + } + + const waitForDeploy = !options['no-wait']; + var deployResult = deploy.bundleAndDeploy({ projectContext: projectContext, site: site, settingsFile: options.settings, buildOptions: buildOptions, - rawOptions + rawOptions, + deployPollingTimeoutMs, + waitForDeploy, }); if (deployResult === 0) { @@ -1428,22 +1489,7 @@ main.registerCommand({ } return deployResult; -}); - -/////////////////////////////////////////////////////////////////////////////// -// logs -/////////////////////////////////////////////////////////////////////////////// - -main.registerCommand({ - name: 'logs', - minArgs: 1, - maxArgs: 1, - catalogRefresh: new catalog.Refresh.Never() -}, function (options) { - var site = qualifySitename(options.args[0]); - - return deploy.logs(site); -}); +} /////////////////////////////////////////////////////////////////////////////// // authorized @@ -1828,7 +1874,6 @@ function doTestCommand(options) { }); } - import { ensureDevBundleDependencies } from '../cordova'; ensureDevBundleDependencies(); prepareCordovaProject(); } @@ -1883,7 +1928,8 @@ var getTestPackageNames = function (projectContext, packageNames) { version = projectContext.localCatalog.getVersionBySourceRoot( files.pathResolve(p)); if (! version) { - throw Error("should have been caught when initializing catalog?"); + buildmessage.error("Package not found in local catalog"); + return; } if (version.testName) { testPackages.push(version.testName); @@ -2178,6 +2224,7 @@ main.registerCommand({ slow: { type: Boolean }, galaxy: { type: Boolean }, browserstack: { type: Boolean }, + phantom: { type: Boolean }, // Indicates whether these self-tests are running headless, e.g. in a // continuous integration testing environment, where visual niceties // like progress bars and spinners are unimportant. @@ -2191,6 +2238,7 @@ main.registerCommand({ // Only run tests with this tag 'with-tag': { type: String }, junit: { type: String }, + retries: { type: Number, default: 2 }, }, hidden: true, catalogRefresh: new catalog.Refresh.Never() @@ -2265,8 +2313,10 @@ main.registerCommand({ return 0; } - var clients = { - browserstack: options.browserstack + const clients = { + puppeteer: true, // Puppeteer is always enabled. + phantom: options.phantom, + browserstack: options.browserstack, }; if (options.headless) { @@ -2285,6 +2335,7 @@ main.registerCommand({ fileRegexp: fileRegexp, excludeRegexp: excludeRegexp, // other options + retries: options.retries, historyLines: options.history, clients: clients, junit: options.junit && files.pathResolve(options.junit), diff --git a/tools/cli/default-npm-deps.js b/tools/cli/default-npm-deps.js index 95df83b24d..4d4fa2a5c7 100644 --- a/tools/cli/default-npm-deps.js +++ b/tools/cli/default-npm-deps.js @@ -8,7 +8,7 @@ import { const INSTALL_JOB_MESSAGE = "installing npm dependencies"; -export function install(appDir) { +export function install(appDir, options) { const packageJsonPath = pathJoin(appDir, "package.json"); const needTempPackageJson = ! statOrNull(packageJsonPath); @@ -25,9 +25,13 @@ export function install(appDir) { } const ok = buildmessage.enterJob(INSTALL_JOB_MESSAGE, function () { - const { runNpmCommand } = require("../isobuild/meteor-npm.js"); + const npmCommand = ["install"]; + if (options && options.includeDevDependencies) { + npmCommand.push("--production=false"); + } - const installResult = runNpmCommand(["install"], appDir); + const { runNpmCommand } = require("../isobuild/meteor-npm.js"); + const installResult = runNpmCommand(npmCommand, appDir); if (! installResult.success) { buildmessage.error( "Could not install npm dependencies for test-packages: " + diff --git a/tools/cli/dev-bundle-bin-helpers.js b/tools/cli/dev-bundle-bin-helpers.js index 136f6b0693..ce0ffa13d3 100644 --- a/tools/cli/dev-bundle-bin-helpers.js +++ b/tools/cli/dev-bundle-bin-helpers.js @@ -70,6 +70,10 @@ exports.getEnv = function (options) { var paths = [ // When npm looks for node, it must find dev_bundle/bin/node. path.join(devBundleDir, "bin"), + + // When npm looks for meteor, it should find dev_bundle/../meteor. + path.dirname(devBundleDir), + // Also make available any scripts installed by packages in // dev_bundle/lib/node_modules, such as node-gyp. path.join(devBundleDir, "lib", "node_modules", ".bin") diff --git a/tools/cli/help.txt b/tools/cli/help.txt index 5efa3897a1..d359e87cae 100644 --- a/tools/cli/help.txt +++ b/tools/cli/help.txt @@ -142,15 +142,17 @@ Options: >>> create Create a new project. -Usage: meteor create [--release ] [--bare|--full] +Usage: meteor create [--release ] [--bare|--minimal|--full|--react] meteor create [--release ] --example [] meteor create --list meteor create --package [] Make a subdirectory named if it doesn't exist and create a new Meteor app there. You can pass an absolute path, relative path, or '.' for the current -directory. Use the --bare option to create an empty app. To scaffold the app -use the --full option. +directory. Use the --bare option to create an empty app, or the --minimal option +to create an empty app with as few Meteor packages as possible. To scaffold a +more complete app, use the --full option. To create a basic React-based app, +pass the --react option. With the --package option, creates a Meteor package instead of an app. If you're in an app, the package will go in the app's top-level 'packages' directory; @@ -170,7 +172,9 @@ Options: --example Example template to use. --list Show list of available examples. --bare Create an empty app. - --full Create a scaffolded app. + --minimal Create an app with as few Meteor packages as possible. + --full Create a fully scaffolded app. + --react Create a basic React-based app. >>> update @@ -386,13 +390,14 @@ Options: --architecture Builds the server for a different architecture than your developer machine's architecture. Valid architectures include os.osx.x86_64, os.linux.x86_64, os.linux.x86_32, - and os.windows.x86_32. Note: This option selects the - architecture of the binary-dependent Atmosphere packages - you would like bundled into your application, when those - packages were specifically published for multiple - architectures (i.e. with meteor publish-for-arch). If - your project doesn't use any Atmosphere packages that - have binary dependencies, --architecture has no effect. + os.windows.x86_32, and os.windows.x86_64. Note: This + option selects the architecture of the binary-dependent + Atmosphere packages you would like bundled into your + application, when those packages were specifically + published for multiple architectures (i.e. with meteor + publish-for-arch). If your project doesn't use any + Atmosphere packages that have binary dependencies, + --architecture has no effect. --allow-incompatible-update Allow packages in your project to be upgraded or downgraded to versions that are potentially incompatible with the current versions, if required to satisfy all @@ -439,26 +444,22 @@ commands can be accessed by pressing the up arrow. >>> mongo -Connect to the Mongo database for the specified site. +Connect to the local Mongo database Usage: meteor mongo [--url] Opens a Mongo shell to view or manipulate collections. -If site is specified, this is the hosted Mongo database for the deployed -Meteor site. - -If no site is specified, this is the current project's local development -database. In this case, the current working directory must be a -Meteor project directory, and the Meteor application must already be -running. - Instead of opening a shell, specifying --url (-U) will return a URL suitable for an external program to connect to the database. For remote databases on deployed applications, the URL is valid for one hour. Options: - --url, -U return a Mongo database URL + --url, -U return a Mongo database URL +Currently, this feature can only be used when developing locally. +The opened Mongo shell connects to the current project's local +development database. The current working directory must be a Meteor +project directory, and the Meteor application must already be running. >>> reset Reset the project state. Erases the local database. @@ -468,16 +469,16 @@ Reset the current project to a fresh state. Removes all local data. >>> deploy -Deploy this project to Meteor. +Deploy this project to Galaxy, Meteor's hosting service. Usage: meteor deploy [--settings settings.json] [--debug] [--delete] Deploys the project in your current directory to Meteor's servers. -You can deploy to any available name under 'meteor.com' +You can deploy to any available name under 'meteorapp.com' without any additional configuration, for example, -'myapp.meteor.com'. If you deploy to a custom domain, such as +'myapp.meteorapp.com'. If you deploy to a custom domain, such as 'myapp.mydomain.com', then you'll also need to configure your domain's -DNS records. See the Meteor docs for details. +DNS records. See the Meteor / Galaxy docs (http://galaxy-guide.meteor.com/dns.html) for details. The --settings flag can be used to pass deploy-specific information to the application. It will be available at runtime in Meteor.settings, but only @@ -498,14 +499,11 @@ Options: downgraded to versions that are potentially incompatible with the current versions, if required to satisfy all package version constraints. - - ->>> logs -Show logs for specified site. -Usage: meteor logs - -Retrieves the server logs for the requested site. - + --deploy-polling-timeout The number of milliseconds to wait for build/deploy + success or failure after a successful upload of your app's + minified code; defaults to 15 minutes. + --no-wait Exits when Meteor has uploaded the app's code instead of + waiting for the deploy to conclude. >>> authorized View or change authorized users and organizations for a site. @@ -517,7 +515,7 @@ Usage: meteor authorized [--list] Without an argument (or with --list), list the users and organizations that are administrators for a particular site that was deployed with 'meteor deploy' -For free hosting: +For hosting: With --add, add an authorized user or organization to a site. Use this to give your collaborators the ability to work with your sites. @@ -533,8 +531,8 @@ With --transfer, transfer the ownership of the application to a new user or organization. Options: - --add add an authorized user or organization (for free hosting) - --remove remove an authorized user or organization (for free hosting) + --add add an authorized user or organization + --remove remove an authorized user or organization --transfer transfer the (Galaxy) app to a new user or organization --list list authorized users and organizations (the default) @@ -615,9 +613,8 @@ Options: test app rebuild. --extra-packages Run with additional packages (comma separated, for example: --extra-packages "package-name1, package-name2@1.2.3") - --driver-package Name of the optional test driver package to use to run - tests and display results. For example: - --driver-package practicalmeteor:mocha + --driver-package Test driver package to use to run tests and display results. + For example: --driver-package meteortesting:mocha >>> test Test the application @@ -625,25 +622,47 @@ Usage: meteor test --driver-package [options] meteor test --full-app --driver-package [options] Runs tests against the application. -Will start a special app based on a test driver (specified with ---driver-package -- read more about driver packages at -http://guide.meteor.com/testing.html#driver-packages) which handles the -task of running tests and displaying the results in the browser when you -visit it. +Will start a temporary app based on a test driver (specified with +--driver-package: http://guide.meteor.com/testing.html#driver-packages) which +handles the task of running tests and displaying the results in the browser. -In normal test mode, no files in your application are eagerly loaded, aside -from test files (files named *.test[s].* or *.spec[s].* placed anywhere -in your application). You can import your app's modules from within your -tests and use them as normal. +In normal 'meteor test' mode, no files in your application are eagerly loaded, +aside from test files (files named *.test[s].* or *.spec[s].* placed anywhere +in your application). These eagerly-loaded test modules can import application +modules in order to test application logic. -In "full app" test mode, your app is loaded as usual, and then made hidden, -and your tests can inspect and effect the running state. Test files are -loaded similarly to unit test mode, but must be called *.app-test[s].* or -*.app-spec[s].*. +In 'meteor test --full-app' mode, your app is loaded as usual, then hidden, so +that your tests can inspect and interact with the full running application. +Test files are loaded similarly to 'meteor test' mode, but must be called +*.app-test[s].* or *.app-spec[s].*. -Open the test dashboard in your browser to run the tests and see the -results. By default the URL is localhost:3000 but that can be changed -with --port. +Note: as of Meteor 1.7, you can override the default test loading rules +described in the previous two paragraphs by including a meteor.testModule +section in your package.json file: + + "meteor": { + "testModule": { + "client": "client/tests.js", + "server": "server/tests.js" + } + } + +If your client and server test files are the same, this can be simplified to + + "meteor": { + "testModule": "tests.js" + } + +When meteor.testModule is defined in package.json, the same test module will +be loaded whether or not you use the --full-app option. Any tests that need to +know whether the --full-app option was used may check Meteor.isAppTest, which +is true when running 'meteor test --full-app'. The module specified by +meteor.testModule can import other test modules at runtime, so it is still +possible to distribute test files across your codebase. + +Once your application starts up in testing mode, open the test dashboard in +your browser to run the tests and see the results. By default the URL is +localhost:3000 but that can be changed with --port. Read more about testing your application in the Testing Article of the Meteor Guide - https://guide.meteor.com/testing.html @@ -684,7 +703,7 @@ Options: Run tests of the 'meteor' tool. Usage: meteor self-test [pattern] [--list] [--file pattern] [--changed] [--slow] [--force-online] [--history n] - [--browserstack] + [--browserstack] [--phantom] [--retries n] Runs internal tests. Exits with status 0 on success. @@ -714,13 +733,10 @@ shown on test failure. The default is 10. Pass --browserstack to enable client side tests using BrowserStack. --browserstack requires s3cmd credentials. +Pass --phantom to enable client side tests using Phantom. - ->>> open-ide -Open mobile build project in associated IDE. -Usage: meteor open-ide [ios] - -Open mobile build project in associated IDE. +By default, failed tests are retried twice. Use --retries to change +the number of retries, or disable retries by setting the value to 0. >>> admin @@ -760,12 +776,14 @@ Usage: meteor cordova >>> list-sites -List sites for which you are authorized. +List sites for which you are authorized. Lists by region, default is us-east-1. Usage: meteor list-sites List the sites that you have deployed with 'meteor deploy', and sites for which other users have authorized you with the 'meteor authorized' -command. +command. To see sites in a region other than us-east-1, set the DEPLOY_HOSTNAME +environment variable. For example, +`DEPLOY_HOSTNAME=eu-west-1.galaxy-deploy.meteor.com meteor list-sites` >>> publish-release @@ -1021,6 +1039,8 @@ Set the readme field on the latest published version of a core package to the readme at a given git commit, or the readme at a given git tag. >>> admin get-machine +Open an ssh shell to a machine in the meteor build farm. + The 'meteor admin get-machine' command has been disabled and the build farm has been discontinued. As of Meteor 1.4, packages with binary dependencies are automatically compiled when they are installed in an application, assuming the diff --git a/tools/cli/main.js b/tools/cli/main.js index d2b3836368..1ccf77ebe8 100644 --- a/tools/cli/main.js +++ b/tools/cli/main.js @@ -16,6 +16,7 @@ var catalog = require('../packaging/catalog/catalog.js'); var buildmessage = require('../utils/buildmessage.js'); var httpHelpers = require('../utils/http-helpers.js'); const archinfo = require('../utils/archinfo.js'); +import { isEmacs } from "../utils/utils.js"; var main = exports; @@ -538,6 +539,10 @@ var springboard = function (rel, options) { process.env['METEOR_SPRINGBOARD_RELEASE'] = options.releaseOverride; } + // Release our connection to the sqlite catalog database for the current + // process, so that the springboarded process can reestablish it. + catalog.official.closePermanently(); + if (process.platform === 'win32') { process.exit(new Promise(function (resolve) { var batPath = files.convertToOSPath(executable + ".bat"); @@ -561,6 +566,10 @@ var oldSpringboard = function (toolsVersion) { var cmd = files.pathJoin(warehouse.getToolsDir(toolsVersion), 'bin', 'meteor'); + // Release our connection to the sqlite catalog database for the current + // process, so that the springboarded process can reestablish it. + catalog.official.closePermanently(); + // Now exec; we're not coming back. require('kexec')(cmd, newArgv); throw Error('exec failed?'); @@ -583,7 +592,7 @@ Fiber(function () { // reversing node's normal setting of O_NONBLOCK on the evaluation // of process.stdin (because Node unblocks stdio when forking). This // fixes execution of Mongo from within Emacs shell. - if (process.env.EMACS == "t") { + if (isEmacs()) { process.stdin; var child_process = require('child_process'); child_process.spawn('true', [], {stdio: 'inherit'}); @@ -591,7 +600,7 @@ Fiber(function () { // Check required Node version. // This code is duplicated in tools/server/boot.js. - var MIN_NODE_VERSION = 'v0.10.41'; + var MIN_NODE_VERSION = 'v8.0.0'; if (require('semver').lt(process.version, MIN_NODE_VERSION)) { Console.error( 'Meteor requires Node ' + MIN_NODE_VERSION + ' or later.'); @@ -610,7 +619,7 @@ Fiber(function () { // meteor package, and that'll look a lot uglier. if (process.env.ROOT_URL) { var parsedUrl = require('url').parse(process.env.ROOT_URL); - if (!parsedUrl.host) { + if (!parsedUrl.host || ['http:', 'https:'].indexOf(parsedUrl.protocol) === -1) { Console.error('$ROOT_URL, if specified, must be an URL.'); process.exit(1); } diff --git a/tools/console/console.js b/tools/console/console.js index 4501ae3473..73df9f1cde 100644 --- a/tools/console/console.js +++ b/tools/console/console.js @@ -55,26 +55,28 @@ /// /// In addition to printing functions, the Console class provides progress bar /// support, that is mostly handled through buildmessage.js. -var _ = require('underscore'); -var readline = require('readline'); -var util = require('util'); -var buildmessage = require('../utils/buildmessage.js'); +import { createInterface } from "readline"; +import { format as utilFormat } from "util"; +import { getRootProgress } from "../utils/buildmessage.js"; // XXX: Are we happy with chalk (and its sub-dependencies)? -var chalk = require('chalk'); -var cleanup = require('../tool-env/cleanup.js'); -var utils = require('../utils/utils.js'); -var wordwrap = require('wordwrap'); +import chalk from "chalk"; +import { onExit as cleanupOnExit } from "../tool-env/cleanup.js"; +import wordwrap from "wordwrap"; +import { + isEmacs, + sleepMs, + Throttled, + ThrottledYield, +} from "../utils/utils.js"; -var PROGRESS_DEBUG = !!process.env.METEOR_PROGRESS_DEBUG; -var FORCE_PRETTY=undefined; +const PROGRESS_DEBUG = !!process.env.METEOR_PROGRESS_DEBUG; // Set the default CR to \r unless we're running with cmd -var CARRIAGE_RETURN = process.platform === 'win32' && +const CARRIAGE_RETURN = process.platform === 'win32' && process.stdout.isTTY && process.argv[1].toLowerCase().includes('cmd') ? new Array(249).join('\b') : '\r'; -if (process.env.METEOR_PRETTY_OUTPUT) { - FORCE_PRETTY = process.env.METEOR_PRETTY_OUTPUT != '0'; -} +const FORCE_PRETTY = process.env.METEOR_PRETTY_OUTPUT && + process.env.METEOR_PRETTY_OUTPUT != '0'; if (! process.env.METEOR_COLOR) { chalk.enabled = false; @@ -233,7 +235,7 @@ class SpinnerRenderer { // Renders a progressbar. Based on the npm 'progress' module, but tailored to our needs (i.e. renders to string) class ProgressBarRenderer { constructor(format, options) { - options = options || {}; + options = options || Object.create(null); this.fmt = format; this.curr = 0; @@ -417,7 +419,7 @@ class StatusPoller { this._console = console; this._pollPromise = null; - this._throttledStatusPoll = new utils.Throttled({ + this._throttledStatusPoll = new Throttled({ interval: STATUS_INTERVAL_MS }); this._startPoller(); @@ -430,10 +432,10 @@ class StatusPoller { } this._pollPromise = (async() => { - utils.sleepMs(STATUS_INTERVAL_MS); + sleepMs(STATUS_INTERVAL_MS); while (! this._stop) { this.statusPoll(); - utils.sleepMs(STATUS_INTERVAL_MS); + sleepMs(STATUS_INTERVAL_MS); } })(); } @@ -451,7 +453,7 @@ class StatusPoller { _statusPoll() { // XXX: Early exit here if we're not showing status at all? - var rootProgress = buildmessage.getRootProgress(); + var rootProgress = getRootProgress(); if (PROGRESS_DEBUG) { // It can be handy for dev purposes to see all the executing tasks rootProgress.dump(process.stdout, {skipDone: true}); @@ -545,7 +547,7 @@ class Console extends ConsoleBase { constructor(options) { super(); - options = options || {}; + options = options || Object.create(null); this._headless = !! ( process.env.METEOR_HEADLESS && @@ -557,13 +559,13 @@ class Console extends ConsoleBase { this._statusPoller = null; - this._throttledYield = new utils.ThrottledYield(); + this._throttledYield = new ThrottledYield(); this.verbose = false; // Legacy helpers - this.stdout = {}; - this.stderr = {}; + this.stdout = Object.create(null); + this.stderr = Object.create(null); this._stream = process.stdout; @@ -579,15 +581,11 @@ class Console extends ConsoleBase { } } - cleanup.onExit((sig) => { + cleanupOnExit((sig) => { this.enableProgressDisplay(false); }); } - isInteractive() { - return !this._headless; - } - setPretty(pretty) { // If we're being forced, do nothing. if (FORCE_PRETTY !== undefined) { @@ -717,12 +715,13 @@ class Console extends ConsoleBase { // If the last argument is an instance of ConsoleOptions, then we should // separate it out, and only send the first N-1 arguments to be parsed as a // message. - if (_.last(args) instanceof ConsoleOptions) { - msgArgs = _.initial(args); - options = _.last(args).options; + const lastArg = args && args.length && args[args.length - 1]; + if (lastArg instanceof ConsoleOptions) { + msgArgs = args.slice(0, -1); + options = lastArg.options; } else { msgArgs = args; - options = {}; + options = Object.create(null); } var message = this._format(msgArgs); return { message: message, options: options }; @@ -1052,10 +1051,10 @@ class Console extends ConsoleBase { // printing directories, for examle. // - indent: indent the entire table by a given number of spaces. printTwoColumns(rows, options) { - options = options || {}; + options = options || Object.create(null); var longest = ''; - _.each(rows, row => { + rows.forEach(row => { var col0 = row[0] || ''; if (col0.length > longest.length) { longest = col0; @@ -1068,7 +1067,7 @@ class Console extends ConsoleBase { options.indent ? Array(options.indent + 1).join(' ') : ""; var out = ''; - _.each(rows, row => { + rows.forEach(row => { var col0 = row[0] || ''; var col1 = row[1] || ''; var line = indent + this.bold(col0) + pad.substr(col0.length); @@ -1088,7 +1087,7 @@ class Console extends ConsoleBase { // Format logs according to the spec in utils. _format(logArguments) { - return util.format.apply(util, logArguments); + return utilFormat(...logArguments); } // Wraps long strings to the length of user's terminal. Inserts linebreaks @@ -1101,7 +1100,7 @@ class Console extends ConsoleBase { // - indent: (see: Console.options) // _wrapText(text, options) { - options = options || {}; + options = options || Object.create(null); // Compute the maximum offset on the bulk of the message. var maxIndent = 0; @@ -1125,7 +1124,7 @@ class Console extends ConsoleBase { } else { wrappedText = text; } - wrappedText = _.map(wrappedText.split('\n'), s => { + wrappedText = wrappedText.split('\n').map(s => { if (s === "") { return ""; } @@ -1180,13 +1179,9 @@ class Console extends ConsoleBase { } else if ((! this._stream.isTTY) || (! this._pretty)) { // No progress bar if not in pretty / on TTY. newProgressDisplay = new ProgressDisplayNone(this); - } else if (this._stream.isTTY && ! this._stream.columns) { - // We might be in a pseudo-TTY that doesn't support - // clearLine() and cursorTo(...). - // It's important that we only enter status message mode - // if this._pretty, so that we don't start displaying - // status messages too soon. - // XXX See note where ProgressDisplayStatus is defined. + } else if (isEmacs() || this.isPseudoTTY()) { + // Resort to a more basic mode if we're in an environment which + // misbehaves when using clearLine() and cursorTo(...). newProgressDisplay = new ProgressDisplayStatus(this); } else { // Otherwise we can do the full progress bar @@ -1208,10 +1203,18 @@ class Console extends ConsoleBase { this._setProgressDisplay(newProgressDisplay); } + isPseudoTTY() { + return this._stream && this._stream.isTTY && ! this._stream.columns; + } + isHeadless() { return this._headless; } + isInteractive() { + return ! this._headless; + } + setHeadless(headless = true) { this._headless = !! headless; @@ -1235,7 +1238,7 @@ class Console extends ConsoleBase { // - prompt (string) // - stream: defaults to process.stdout (you might want process.stderr) readLine(options) { - options = _.extend({ + options = Object.assign(Object.create(null), { echo: true, stream: this._stream }, options); @@ -1256,12 +1259,12 @@ class Console extends ConsoleBase { this._setProgressDisplay(new ProgressDisplayNone()); // Read a line, throwing away the echoed characters into our dummy stream. - var rl = readline.createInterface({ + var rl = createInterface({ input: process.stdin, output: options.echo ? options.stream : silentStream, // `terminal: options.stream.isTTY` is the default, but emacs shell users // don't want fancy ANSI. - terminal: options.stream.isTTY && process.env.EMACS !== 't' + terminal: options.stream.isTTY && ! isEmacs() }); if (! options.echo) { diff --git a/tools/cordova/builder.js b/tools/cordova/builder.js index 4f54eb2bb9..12cdbe7a4a 100644 --- a/tools/cordova/builder.js +++ b/tools/cordova/builder.js @@ -1,8 +1,10 @@ import _ from 'underscore'; import util from 'util'; +import path from 'path'; import { Console } from '../console/console.js'; import buildmessage from '../utils/buildmessage.js'; import files from '../fs/files.js'; +import { optimisticReadJsonOrNull } from "../fs/optimistic.js"; import bundler from '../isobuild/bundler.js'; import archinfo from '../utils/archinfo.js'; import release from '../packaging/release.js'; @@ -52,14 +54,9 @@ const launchIosSizes = { 'iphone6p_portrait': '1242x2208', 'iphone6p_landscape': '2208x1242', 'iphoneX_portrait': '1125x2436', - 'iphoneX_landscape': '2436x1125', + 'iphoneX_landscape': '2436x1125', 'ipad_portrait_2x': '1536x2048', 'ipad_landscape_2x': '2048x1536', - // Not yet supported in Xcode 9 or Cordova iOS 4.5.3 - // 'ipad_portrait_pro_10_5': '1668x2224', - // 'ipad_landscape_pro_10_5': '2224x1668', - // 'ipad_portrait_pro_12_9': '2048x2732', - // 'ipad_landscape_pro_12_9': '2732x2048', // Legacy 'iphone': '320x480', 'iphone_2x': '640x960', @@ -130,6 +127,9 @@ export class CordovaBuilder { // Custom elements that will be appended into config.xml's widgets this.custom = []; + // Resource files that will be appended to platform bundle and config.xml + this.resourceFiles = []; + const packageMap = this.projectContext.packageMap; if (packageMap && packageMap.getInfo('launch-screen')) { @@ -325,6 +325,12 @@ export class CordovaBuilder { this.configureAndCopyImages(launchAndroidSizes, platformElement.android, 'splash'); } + this.configureAndCopyResourceFiles( + this.resourceFiles, + platformElement.ios, + platformElement.android + ); + Console.debug('Writing new config.xml'); const configXmlPath = files.pathJoin(this.projectRoot, 'config.xml'); @@ -380,6 +386,31 @@ export class CordovaBuilder { }); } + configureAndCopyResourceFiles(resourceFiles, iosElement, androidElement) { + _.each(resourceFiles, resourceFile => { + // Copy file in cordova project root directory + var filename = path.parse(resourceFile.src).base; + files.copyFile( + files.pathResolve(this.projectContext.projectDir, resourceFile.src), + files.pathJoin(this.projectRoot, filename)); + // And entry in config.xml + if (!resourceFile.platform || + (resourceFile.platform && resourceFile.platform === "android")) { + androidElement.element('resource-file', { + src: resourceFile.src, + target: resourceFile.target + }); + } + if (!resourceFile.platform || + (resourceFile.platform && resourceFile.platform === "ios")) { + iosElement.element('resource-file', { + src: resourceFile.src, + target: resourceFile.target + }); + } + }); + } + copyWWW(bundlePath) { const wwwPath = files.pathJoin(this.projectRoot, 'www'); @@ -411,7 +442,10 @@ export class CordovaBuilder { // Write program.json files.writeFile(programJsonPath, JSON.stringify(program), 'utf8'); - const bootstrapPage = this.generateBootstrapPage(applicationPath, program, publicSettings); + const bootstrapPage = this.generateBootstrapPage( + applicationPath, program, publicSettings + ).await(); + files.writeFile(files.pathJoin(applicationPath, 'index.html'), bootstrapPage, 'utf8'); } @@ -421,9 +455,19 @@ export class CordovaBuilder { configDummy.PUBLIC_SETTINGS = publicSettings || {}; const { WebAppHashing } = loadIsopackage('webapp-hashing'); + const { AUTOUPDATE_VERSION } = process.env; - program.version = - WebAppHashing.calculateClientHash(program.manifest, null, configDummy); + program.version = AUTOUPDATE_VERSION || + WebAppHashing.calculateClientHash( + program.manifest, null, configDummy); + + program.versionRefreshable = AUTOUPDATE_VERSION || + WebAppHashing.calculateClientHash( + program.manifest, type => type === "css", configDummy); + + program.versionNonRefreshable = AUTOUPDATE_VERSION || + WebAppHashing.calculateClientHash( + program.manifest, type => type !== "css", configDummy); } generateBootstrapPage(applicationPath, program, publicSettings) { @@ -431,7 +475,6 @@ export class CordovaBuilder { release.current.isCheckout() ? "none" : release.current.name; const manifest = program.manifest; - const autoupdateVersion = process.env.AUTOUPDATE_VERSION || program.version; const mobileServerUrl = this.options.mobileServerUrl; @@ -441,7 +484,15 @@ export class CordovaBuilder { // XXX propagate it from this.options? ROOT_URL_PATH_PREFIX: '', DDP_DEFAULT_CONNECTION_URL: mobileServerUrl, - autoupdateVersionCordova: autoupdateVersion, + autoupdate: { + versions: { + "web.cordova": { + version: program.version, + versionRefreshable: program.versionRefreshable, + versionNonRefreshable: program.versionNonRefreshable + } + } + }, appId: this.projectContext.appIdentifier, meteorEnv: { NODE_ENV: process.env.NODE_ENV || "production", @@ -465,7 +516,7 @@ export class CordovaBuilder { } }); - return boilerplate.toHTML(); + return boilerplate.toHTMLAsync(); } copyBuildOverride() { @@ -481,6 +532,15 @@ export class CordovaBuilder { } function createAppConfiguration(builder) { + const { settingsFile } = builder.options; + let settings = null; + if (settingsFile) { + settings = optimisticReadJsonOrNull(settingsFile); + if (! settings) { + throw new Error("Unreadable --settings file: " + settingsFile); + } + } + /** * @namespace App * @global @@ -527,6 +587,15 @@ Valid platforms are: ios, android.`); } }, + /** + * @summary Like `Meteor.settings`, contains data read from a JSON + * file provided via the `--settings` command-line option at + * build time, or null if no settings were provided. + * @memberOf App + * @type {Object} + */ + settings, + /** * @summary Set the build-time configuration for a Cordova plugin. * @param {String} id The identifier of the plugin you want to @@ -682,11 +751,27 @@ configuration. The key may be deprecated.`); * * `App.appendToConfig('');` * - * @param {String} element The XML you want to include + * @param {String} element The XML you want to include * @memberOf App */ appendToConfig: function (xml) { builder.custom.push(xml); }, + + /** + * @summary Add a resource file for your build as described in the + * [Cordova documentation](http://cordova.apache.org/docs/en/7.x/config_ref/index.html#resource-file). + * @param {String} src The project resource path. + * @param {String} target Resource destination in build. + * @param {String} [platform] Optional. A platform name (either `ios` or `android`, both if ommited) to add a resource-file entry. + * @memberOf App + */ + addResourceFile: function (src, target, platform) { + builder.resourceFiles.push({ + src: src, + target: target, + platform: platform + }); + } }; } diff --git a/tools/cordova/index.js b/tools/cordova/index.js index 58658ad78a..7763f41929 100644 --- a/tools/cordova/index.js +++ b/tools/cordova/index.js @@ -9,13 +9,16 @@ export const CORDOVA_PLATFORMS = ['ios', 'android']; export const CORDOVA_DEV_BUNDLE_VERSIONS = { 'cordova-lib': '7.1.0', - 'cordova-common': '1.5.1', + 'cordova-common': '2.1.1', 'cordova-registry-mapper': '1.1.15', }; export const CORDOVA_PLATFORM_VERSIONS = { - 'android': '6.3.0', - 'ios': '4.5.3' + // This commit represents cordova-android@6.4.0 plus + // https://github.com/apache/cordova-android/pull/417, aka + // https://github.com/meteor/cordova-android/tree/v6.4.0-with-pr-417: + 'android': 'https://github.com/meteor/cordova-android/tarball/317db7df0f7a054444197bc6d28453cf4ab23280', + 'ios': '4.5.4' }; const PLATFORM_TO_DISPLAY_NAME_MAP = { diff --git a/tools/cordova/package-id-version-parser.js b/tools/cordova/package-id-version-parser.js new file mode 100644 index 0000000000..4e031962bc --- /dev/null +++ b/tools/cordova/package-id-version-parser.js @@ -0,0 +1,22 @@ +// Accepts a combined Cordova package ID + version string, then parses out +// and returns the ID and version in a package details object. +// +// Example `packageIdAndVersion` formats: +// some-cordova-plugin@1.0.0 +// @somescope/some-cordova-plugin@1.0.0 +exports.parse = packageIdAndVersion => { + const packageDetails = {}; + if (packageIdAndVersion) { + const [ + _matchText, + scope, + packageName, + version, + ] = packageIdAndVersion.match( + /^(?:@([^\/]+)\/)?([^\/@]+)@?(.+)?/ + ); + packageDetails.id = (scope ? `@${scope}/` : '') + packageName; + packageDetails.version = version ? version : null; + } + return packageDetails; +}; diff --git a/tools/cordova/project.js b/tools/cordova/project.js index f5614a8393..e118a0fd30 100644 --- a/tools/cordova/project.js +++ b/tools/cordova/project.js @@ -7,6 +7,7 @@ import semver from 'semver'; import files from '../fs/files.js'; import utils from '../utils/utils.js'; import { Console } from '../console/console.js'; +import { Profile } from '../tool-env/profile.js'; import buildmessage from '../utils/buildmessage.js'; import main from '../cli/main.js'; import httpHelpers from '../utils/http-helpers.js'; @@ -68,8 +69,8 @@ const pinnedPluginVersions = { "cordova-plugin-media": "3.0.1", "cordova-plugin-media-capture": "1.4.3", "cordova-plugin-network-information": "1.3.3", - "cordova-plugin-splashscreen": "4.0.3", - "cordova-plugin-statusbar": "2.2.3", + "cordova-plugin-splashscreen": "4.1.0", + "cordova-plugin-statusbar": "2.3.0", "cordova-plugin-test-framework": "1.1.5", "cordova-plugin-vibration": "2.1.5", "cordova-plugin-whitelist": "1.3.2", @@ -109,6 +110,12 @@ export class CordovaProject { return true; } + if (! semver.valid(pinnedVersion)) { + // If pinnedVersion is not a semantic version but instead + // something like a GitHub tarball URL, assume not outdated. + return false; + } + return semver.lt(installedVersion, pinnedVersion); }); @@ -426,15 +433,16 @@ from Cordova project`, async () => { // tell us if plugins have been fetched from a Git SHA URL or a local path. // So we overwrite the declared versions with versions from // listFetchedPluginVersions that do contain this information. - listInstalledPluginVersions() { + listInstalledPluginVersions(usePluginInfoId = false) { const pluginInfoProvider = new PluginInfoProvider(); const installedPluginVersions = pluginInfoProvider.getAllWithinSearchPath( files.convertToOSPath(this.pluginsDir)); const fetchedPluginVersions = this.listFetchedPluginVersions(); return _.object(installedPluginVersions.map(pluginInfo => { - const id = pluginInfo.id; - const version = fetchedPluginVersions[id] || pluginInfo.version; - return [id, version]; + const fetchedPlugin = fetchedPluginVersions[pluginInfo.id]; + const id = fetchedPlugin.id; + const version = fetchedPlugin.version || pluginInfo.version; + return [usePluginInfoId ? pluginInfo.id : id, version]; })); } @@ -453,17 +461,21 @@ from Cordova project`, async () => { const fetchedPluginsMetadata = JSON.parse(files.readFile( fetchJsonPath, 'utf8')); - return _.object(_.map(fetchedPluginsMetadata, (metadata, id) => { + return _.object(_.map(fetchedPluginsMetadata, (metadata, name) => { const source = metadata.source; + + const idWithVersion = source.id ? source.id : name; + const scoped = idWithVersion[0] === '@'; + const id = `${scoped ? '@' : ''}${idWithVersion.split('@')[scoped ? 1 : 0]}`; let version; if (source.type === 'registry') { - version = source.id.split('@')[1]; + version = idWithVersion.split('@')[scoped ? 2 : 1]; } else if (source.type === 'git') { - version = `${source.url}#${source.ref}`; + version = `${source.url}${'ref' in source ? `#${source.ref}` : ''}`; } else if (source.type === 'local') { version = `file://${source.path}`; } - return [id, version]; + return [name, { id, version }]; })); } @@ -515,9 +527,7 @@ from Cordova project`, async () => { { cli_variables: config, link: utils.isUrlWithFileScheme(version) }); this.runCommands(`adding plugin ${target} \ -to Cordova project`, async () => { - await cordova_lib.plugin('add', [target], commandOptions); - }); +to Cordova project`, cordova_lib.plugin.bind(undefined, 'add', [target], commandOptions)); } } @@ -528,9 +538,7 @@ to Cordova project`, async () => { } this.runCommands(`removing plugins ${plugins} \ -from Cordova project`, async () => { - await cordova_lib.plugin('rm', plugins, this.defaultOptions); - }); +from Cordova project`, cordova_lib.plugin.bind(undefined, 'rm', plugins, this.defaultOptions)); } // Ensures that the Cordova plugins are synchronized with the app-level @@ -600,28 +608,57 @@ mobile-config.js accordingly.`); installedPluginVersions[id] !== version) { // We do not have the plugin installed or the version has changed. shouldReinstallAllPlugins = true; + Console.debug(`Plugin ${id} version have changed or it was added, will \ +perform cordova plugins reinstall`); } } }); - if (!_.isEmpty(pluginsFromLocalPath)) { - Console.debug('Reinstalling Cordova plugins added from the local path'); - } + const installedPluginsByName = Object.keys(this.listInstalledPluginVersions(true)); // Check to see if we have any installed plugins that are not in the // current set of plugins. - _.each(installedPluginVersions, (version, id) => { - if (!_.has(pluginVersions, id)) { - shouldReinstallAllPlugins = true; - } - }); + if (!shouldReinstallAllPlugins) { + // We need to know which plugins were installed because they were + // declared in cordova-plugins and which are just dependencies of others. + // Luckily for us android.json and ios.json have that information. + const androidJsonPath = files.pathJoin(this.pluginsDir, 'android.json'); + const iosJsonPath = files.pathJoin(this.pluginsDir, 'ios.json'); + + const androidJson = files.exists(androidJsonPath) ? JSON.parse(files.readFile( + androidJsonPath, 'utf8')) : { installed_plugins: {} }; + const iosJson = files.exists(iosJsonPath) ? JSON.parse(files.readFile( + iosJsonPath, 'utf8')) : { installed_plugins: {} }; + + let previouslyInstalledPlugins = _.union( + Object.keys(androidJson.installed_plugins), Object.keys(iosJson.installed_plugins)); + + // Now the problem is we have a list of names the plugins (name defined in the plugin.xml) + // while in cordova-plugins we have can have their npm ids. We need to translate the list. + const fetched = this.listFetchedPluginVersions(); + previouslyInstalledPlugins = previouslyInstalledPlugins.map(name => { + return fetched[name].id; + }); + + previouslyInstalledPlugins.forEach(id => { + if (!_.has(pluginVersions, id)) { + Console.debug(`Plugin ${id} was removed, will \ +perform cordova plugins reinstall`); + shouldReinstallAllPlugins = true; + } + }); + } + + if (!_.isEmpty(pluginsFromLocalPath) && !shouldReinstallAllPlugins) { + Console.debug('Reinstalling Cordova plugins added from the local path'); + } // We either reinstall all plugins or only those fetched from a local // path. if (shouldReinstallAllPlugins || !_.isEmpty(pluginsFromLocalPath)) { let pluginsToRemove; if (shouldReinstallAllPlugins) { - pluginsToRemove = Object.keys(installedPluginVersions); + pluginsToRemove = installedPluginsByName; } else { // Only try to remove plugins that are currently installed. pluginsToRemove = _.intersection( @@ -652,10 +689,53 @@ mobile-config.js accordingly.`); end: pluginsToInstallCount }); }); + + this.ensurePluginsWereInstalled(pluginVersionsToInstall, pluginsConfiguration, true); } }); } + // Ensures that the Cordova plugins are installed + ensurePluginsWereInstalled(requiredPlugins, pluginsConfiguration, retryInstall) { + // List of all installed plugins. This should work for global / local / scoped cordova plugins. + // Examples: + // cordova-plugin-whitelist@1.3.2 => { 'cordova-plugin-whitelist': '1.3.2' } + // com.cordova.plugin@file://.cordova-plugins/plugin => { 'com.cordova.plugin': 'file://.cordova-plugins/plugin' } + // @scope/plugin@1.0.0 => { 'com.cordova.plugin': 'scope/plugin' } + const installed = this.listInstalledPluginVersions(); + const installedPluginsNames = Object.keys(installed); + const installedPluginsVersions = Object.values(installed); + const missingPlugins = {}; + + Object.keys(requiredPlugins).filter(plugin => { + if (!installedPluginsNames.includes(plugin)) { + Console.debug(`Plugin ${plugin} was not installed.`); + if (retryInstall) { + Console.debug(`Retrying to install ${plugin}.`); + this.addPlugin( + plugin, + requiredPlugins[plugin], + pluginsConfiguration[plugin] + ); + } + missingPlugins[plugin] = requiredPlugins[plugin]; + } + }); + + // All plugins were installed + if (Object.keys(missingPlugins).length === 0) { + return; + } + + // Check one more time after re-installation. + if (retryInstall) { + this.ensurePluginsWereInstalled(missingPlugins, pluginsConfiguration, false); + } else { + // Fail, to prevent building and publishing faulty mobile app without at this moment we need to stop. + throw new Error(`Some Cordova plugins installation failed: (${Object.keys(missingPlugins).join(', ')}).`); + } + } + ensurePinnedPluginVersions(pluginVersions) { assert(pluginVersions); @@ -758,3 +838,16 @@ running again with the --verbose option to help diagnose the issue.)`), } } } + +const CPp = CordovaProject.prototype; +["prepareFromAppBundle", + "prepareForPlatform", + "buildForPlatform", +].forEach(name => { + CPp[name] = Profile(platform => { + const prefix = `CordovaProject#${name}`; + return name.endsWith("ForPlatform") ? `${prefix} for ${ + displayNameForPlatform(platform) + }` : prefix; + }, CPp[name]); +}); diff --git a/tools/cordova/run-targets.js b/tools/cordova/run-targets.js index 76b60d0e1b..47e0e2cb73 100644 --- a/tools/cordova/run-targets.js +++ b/tools/cordova/run-targets.js @@ -58,10 +58,9 @@ function openXcodeProject(projectDir) { return; } - const projectFilePath = files.pathJoin(projectDir, projectFilename); try { - execFileSync('open', [projectFilePath]); + execFileSync('open', ['-a', 'Xcode', projectDir]); Console.info(); Console.info( @@ -172,6 +171,7 @@ function logFromAndroidLogcatLine(Log, line) { // "I/Tag( PID): message" let match = line.match(/^([A-Z])\/([^\(]*?)\(\s*(\d+)\): (.*)$/); + let priority, tag, pid, message, logLevel, filename, lineNumber; if (match) { [, priority, tag, pid, message] = match; diff --git a/tools/fs/files.js b/tools/fs/files.js index 0d26d153f6..d55948eb29 100644 --- a/tools/fs/files.js +++ b/tools/fs/files.js @@ -246,6 +246,10 @@ files.getSettings = function (filename, watchSet) { var str = buffer.toString('utf8'); + // The use of a byte order mark crashes JSON parsing. Since a BOM is not + // required (or recommended) when using UTF-8, let's remove it if it exists. + str = str.charCodeAt(0) === 0xFEFF ? str.slice(1) : str; + // Ensure that the string is parseable in JSON, but there's no reason to use // the object value of it yet. if (str.match(/\S/)) { @@ -293,6 +297,15 @@ function statOrNull(path, preserveSymlinks) { } } +export function realpathOrNull(path) { + try { + return files.realpath(path); + } catch (e) { + if (e.code !== "ENOENT") throw e; + return null; + } +} + files.rm_recursive_async = (path) => { return new Promise((resolve, reject) => { rimraf(files.convertToOSPath(path), err => err @@ -306,7 +319,9 @@ files.rm_recursive = Profile("files.rm_recursive", (path) => { try { rimraf.sync(files.convertToOSPath(path)); } catch (e) { - if (e.code === "ENOTEMPTY" && canYield()) { + if ((e.code === "ENOTEMPTY" || + e.code === "EPERM") && + canYield()) { files.rm_recursive_async(path).await(); return; } @@ -489,7 +504,7 @@ files.cp_r = function(from, to, options = {}) { files.mkdir_p(files.pathDirname(to)); if (stat.isSymbolicLink()) { - files.symlink(files.readlink(from), to); + symlinkWithOverwrite(files.readlink(from), to); } else { // Create the file as readable and writable by everyone, and @@ -511,6 +526,38 @@ files.cp_r = function(from, to, options = {}) { } }; +// create a symlink, overwriting the target link, file, or directory +// if it exists +export function symlinkWithOverwrite(source, target) { + const args = [source, target]; + + if (process.platform === "win32") { + const absoluteSource = files.pathResolve(target, source); + + if (files.stat(absoluteSource).isDirectory()) { + args[2] = "junction"; + } + } + + try { + files.symlink(...args); + } catch (e) { + if (e.code === "EEXIST") { + if (files.lstat(target).isSymbolicLink() && + files.readlink(target) === source) { + // If the target already points to the desired source, we don't + // need to do anything. + return; + } + // overwrite existing link, file, or directory + files.rm_recursive(target); + files.symlink(...args); + } else { + throw e; + } + } +} + /** * Get every path in a directory recursively, treating symlinks as files * @param {String} dir The directory to walk, either relative to options.cwd or completely absolute @@ -938,14 +985,16 @@ files.createTarGzStream = function (dirPath, options) { // Tar-gzips a directory into a tarball on disk, synchronously. // The tar archive will contain a top-level directory named after dirPath. -files.createTarball = function (dirPath, tarball, options) { +files.createTarball = Profile(function (dirPath, tarball) { + return "files.createTarball " + files.pathBasename(tarball); +}, function (dirPath, tarball, options) { var out = files.createWriteStream(tarball); new Promise(function (resolve, reject) { out.on('error', reject); out.on('close', resolve); files.createTarGzStream(dirPath, options).pipe(out); }).await(); -}; +}); // Use this if you'd like to replace a directory with another // directory as close to atomically as possible. It's better than @@ -997,7 +1046,9 @@ files.renameDirAlmostAtomically = // limitations, we'll resort to copying. if (forceCopy) { files.rm_recursive(toDir); - files.cp_r(fromDir, toDir); + files.cp_r(fromDir, toDir, { + preserveSymlinks: true, + }); } // ... and take out the trash. @@ -1615,7 +1666,10 @@ let dependOnPathSalt = 0; export const dependOnPath = require("optimism").wrap( // Always return something different to prevent optimism from // second-guessing the dirtiness of this function. - path => ++dependOnPathSalt + path => ++dependOnPathSalt, + // This function is disposable because we don't care about its result, + // only its role in optimistic dependency tracking/dirtying. + { disposable: true } ); function wrapDestructiveFsFunc(name, pathArgIndices) { @@ -1744,7 +1798,7 @@ if (files.isWindowsLikeFilesystem()) { } if (! success) { - files.cp_r(from, to); + files.cp_r(from, to, { preserveSymlinks: true }); files.rm_recursive(from); } }; diff --git a/tools/fs/optimistic.js b/tools/fs/optimistic.js index eb596a8fbf..4bddc674be 100644 --- a/tools/fs/optimistic.js +++ b/tools/fs/optimistic.js @@ -163,7 +163,11 @@ const dependOnDirectory = wrap(dir => { watcher = null; } }; - } + }, + + // This function is disposable because we don't care about its result, + // only its role in optimistic dependency tracking/dirtying. + disposable: true }); // Called when an optimistic function detects the given file does not @@ -189,6 +193,10 @@ const dependOnNodeModules = wrap(nodeModulesDir => { assert(pathIsAbsolute(nodeModulesDir)); assert(nodeModulesDir.endsWith(pathSep + "node_modules")); return dependOnDirectory(nodeModulesDir); +}, { + // This function is disposable because we don't care about its result, + // only its role in optimistic dependency tracking/dirtying. + disposable: true }); // Invalidate all optimistic results derived from paths involving the diff --git a/tools/fs/safe-watcher.js b/tools/fs/safe-watcher.js index d15ca2d4b3..dae54bd9c0 100644 --- a/tools/fs/safe-watcher.js +++ b/tools/fs/safe-watcher.js @@ -366,7 +366,7 @@ async function maybeSuggestRaisingWatchLimit(error) { "It looks like a simple tweak to your system's configuration will " + "make many tools (including this Meteor command) more efficient. " + "To learn more, see " + - Console.url("https://github.com/meteor/meteor/wiki/File-Change-Watcher-Efficiency")); + Console.url("https://github.com/meteor/docs/blob/master/long-form/file-change-watcher-efficiency.md")); } } } diff --git a/tools/fs/watch.js b/tools/fs/watch.js index ab0d2dc143..896cff26af 100644 --- a/tools/fs/watch.js +++ b/tools/fs/watch.js @@ -11,6 +11,9 @@ import { optimisticHashOrNull, } from "./optimistic.js"; +const WATCH_COALESCE_MS = + process.env.METEOR_FILE_WATCH_COALESCE_MS || 100; + // Watch for changes to a set of files, and the first time that any of // the files change, call a user-provided callback. (If you want a // second callback, you'll need to create a second Watcher.) @@ -257,13 +260,17 @@ export function readFile(absPath) { } }; -export function sha1(...args) { - return Profile("sha1", function () { - var hash = createHash('sha1'); - args.forEach(arg => hash.update(arg)); - return hash.digest('hex'); - })(); -} +export const sha1 = Profile("sha1", function (...args) { + var hash = createHash('sha1'); + args.forEach(arg => hash.update(arg)); + return hash.digest('hex'); +}); + +export const sha512 = Profile("sha512", function (...args) { + var hash = createHash('sha512'); + args.forEach(arg => hash.update(arg)); + return hash.digest('base64'); +}); export function readDirectory({absPath, include, exclude, names}) { // Read the directory. @@ -496,11 +503,12 @@ export class Watcher { // --hard), or a file was deleted and then recreated by an editor like // Vim. Because detecting changes can be costly, and because we care // most about the settled state of the file system, we use the - // funcUtils.coalesce helper to delay calls to the callback by 100ms, - // canceling any additional calls if they happen within that window of - // time, so that a rapid succession of calls will tend to trigger only - // one inspection of the file system. - return coalesce(100, function onWatchEvent() { + // funcUtils.coalesce helper to delay calls to the callback by + // METEOR_FILE_WATCH_COALESCE_MS or 100 milliseconds, canceling any + // additional calls if they happen within that window of time, so that + // a rapid succession of calls will tend to trigger only one inspection + // of the file system. + return coalesce(WATCH_COALESCE_MS, function onWatchEvent() { if (self.stopped) { return; } diff --git a/tools/isobuild/builder.js b/tools/isobuild/builder.js index b5d3f573cb..08435bece0 100644 --- a/tools/isobuild/builder.js +++ b/tools/isobuild/builder.js @@ -1,6 +1,8 @@ import assert from "assert"; import {WatchSet, readAndWatchFile, sha1} from '../fs/watch.js'; -import files from '../fs/files.js'; +import files, { + symlinkWithOverwrite, +} from '../fs/files.js'; import NpmDiscards from './npm-discards.js'; import {Profile} from '../tool-env/profile.js'; import { @@ -56,7 +58,15 @@ const ENABLE_IN_PLACE_BUILDER_REPLACEMENT = // structure; and the hashes of the contents correspond to the // writtenHashes data strcture. export default class Builder { - constructor({outputPath, previousBuilder}) { + constructor({ + outputPath, + previousBuilder, + // Even though in-place builds are disabled by default on some + // platforms (Windows), they can be forcibly reenabled with this + // option, in cases where it's safe and/or necessary to avoid + // clobbering existing files. + forceInPlaceBuild = false, + }) { this.outputPath = outputPath; // Paths already written to. Map from canonicalized relPath (no @@ -81,7 +91,8 @@ export default class Builder { // If we have a previous builder and we are allowed to re-use it, // let's keep all the older files on the file-system and replace // only outdated ones + write the new files in the same path - if (previousBuilder && ENABLE_IN_PLACE_BUILDER_REPLACEMENT) { + if (previousBuilder && + (forceInPlaceBuild || ENABLE_IN_PLACE_BUILDER_REPLACEMENT)) { if (previousBuilder.outputPath !== outputPath) { throw new Error( `previousBuilder option can only be set to a builder with the same output path. @@ -133,7 +144,15 @@ Previous builder: ${previousBuilder.outputPath}, this builder: ${outputPath}` if (partial in this.previousUsedAsFile) { if (this.previousUsedAsFile[partial]) { // was previously used as file, delete it, create a directory - files.unlink(partial); + try { + files.unlink(partial); + } catch (e) { + // If files.unlink(partial) failed because the file does not + // exist, then we can just pretend the unlink succeeded. + if (e.code !== "ENOENT") { + throw e; + } + } } else { // is already a directory needToMkdir = false; @@ -390,6 +409,59 @@ Previous builder: ${previousBuilder.outputPath}, this builder: ${outputPath}` return generated; } + // A version of copyDirectory that works better for copying node_modules + // directories when symlinks are involved. + copyNodeModulesDirectory(options) { + // Although the options.from directory should probably be a + // node_modules directory, the only essential precondition here is + // that the destination directory is a node_modules directory. + // assert.strictEqual(files.pathBasename(options.from), "node_modules"); + assert.strictEqual(files.pathBasename(options.to), "node_modules"); + + if (options.symlink) { + // If we're going to use symlinks to speed up this copy, then we + // need to make sure we've reserved all directories that are not + // package directories, such as the node_modules directory itself, + // as well as node_modules/meteor and the parent directories of any + // scoped npm packages. + this._ensureAllNonPackageDirectories( + files.realpath(options.from), + options.to + ); + } + + // Call this._copyDirectory rather than this.copyDirectory so that the + // subBuilder hacks from Builder#enter won't apply a second time. + return this._copyDirectory(options); + } + + _ensureAllNonPackageDirectories(absFromDir, relToDir) { + const dirStat = optimisticStatOrNull(absFromDir); + if (! (dirStat && dirStat.isDirectory())) { + return; + } + + const absFromPackageJson = + files.pathJoin(absFromDir, "package.json"); + + const stat = optimisticStatOrNull(absFromPackageJson); + if (stat && stat.isFile()) { + // If the directory has a package.json file, it's a package + // directory, and we should not call this._ensureDirectory, so that + // the package directory can later be symlinked in copyDirectory. + return; + } + + this._ensureDirectory(relToDir); + + optimisticReaddir(absFromDir).forEach(item => { + this._ensureAllNonPackageDirectories( + files.pathJoin(absFromDir, item), + files.pathJoin(relToDir, item) + ); + }); + } + // Recursively copy a directory and all of its contents into the // bundle. But if the symlink option was passed to the Builder // constructor, then make a symlink instead, if possible. @@ -408,7 +480,13 @@ Previous builder: ${previousBuilder.outputPath}, this builder: ${outputPath}` // entries that end with a slash if it's a directory. // - specificFiles: just copy these paths (specified as relative to 'to'). // - symlink: true if the directory should be symlinked instead of copying - copyDirectory({ + copyDirectory(options) { + // TODO(benjamn) Remove this wrapper when Builder#enter is no longer + // implemented using ridiculous hacks. + return this._copyDirectory(options); + } + + _copyDirectory({ from, to, ignore, specificFiles, @@ -446,11 +524,9 @@ Previous builder: ${previousBuilder.outputPath}, this builder: ${outputPath}` }); } - const walk = ( - absFrom, - relTo, - _currentRealRootDir = absFrom - ) => { + const rootDir = files.realpath(from); + + const walk = (absFrom, relTo) => { if (symlink && ! (relTo in this.usedAsFile)) { this._ensureDirectory(files.pathDirname(relTo)); const absTo = files.pathResolve(this.buildPath, relTo); @@ -469,8 +545,8 @@ Previous builder: ${previousBuilder.outputPath}, this builder: ${outputPath}` } // Returns files.realpath(thisAbsFrom), iff it is external to - // _currentRealRootDir, using caching because this function might - // be called more than once. + // rootDir, using caching because this function might be called + // more than once. let cachedExternalPath; const getExternalPath = () => { if (typeof cachedExternalPath !== "undefined") { @@ -488,7 +564,7 @@ Previous builder: ${previousBuilder.outputPath}, this builder: ${outputPath}` } const isExternal = - files.pathRelative(_currentRealRootDir, real).startsWith(".."); + files.pathRelative(rootDir, real).startsWith(".."); // Now cachedExternalPath is either a string or false. return cachedExternalPath = isExternal && real; @@ -503,22 +579,11 @@ Previous builder: ${previousBuilder.outputPath}, this builder: ${outputPath}` // external files, and this file is a symbolic link that points // to an external file, update fileStatus so that we copy this // file as a normal file rather than as a symbolic link. - const externalPath = getExternalPath(); if (externalPath) { - // Copy from the real path rather than the link path. - thisAbsFrom = externalPath; - // Update fileStatus to match the actual file rather than the // symbolic link, thus forcing the file to be copied below. fileStatus = optimisticLStatOrNull(externalPath); - - if (fileStatus && fileStatus.isDirectory()) { - // Update _currentRealRootDir so that we can judge - // isExternal relative to this new root directory when - // traversing nested directories. - _currentRealRootDir = externalPath; - } } } @@ -549,7 +614,7 @@ Previous builder: ${previousBuilder.outputPath}, this builder: ${outputPath}` } if (isDirectory) { - walk(thisAbsFrom, thisRelTo, _currentRealRootDir); + walk(thisAbsFrom, thisRelTo); return; } @@ -598,7 +663,7 @@ Previous builder: ${previousBuilder.outputPath}, this builder: ${outputPath}` }); }; - walk(files.realpath(from), to); + walk(rootDir, to); } // Returns a new Builder-compatible object that works just like a @@ -608,21 +673,31 @@ Previous builder: ${previousBuilder.outputPath}, this builder: ${outputPath}` // The sub-builder returned does not have all Builder methods (for // example, complete() wouldn't make sense) and you should not rely // on it being instanceof Builder. + // + // TODO(benjamn) This nonsense should be ripped out by any means + // necessary... whenever someone has the time. enter(relPath) { - const methods = ["write", "writeJson", "reserve", "generateFilename", - "copyDirectory", "enter"]; const subBuilder = {}; const relPathWithSep = relPath + files.pathSep; + const methods = [ + "write", + "writeJson", + "reserve", + "generateFilename", + "copyDirectory", + "copyNodeModulesDirectory", + "enter", + ]; methods.forEach(method => { subBuilder[method] = (...args) => { - if (method !== "copyDirectory") { - // Normal method (relPath as first argument) - args[0] = files.pathJoin(relPath, args[0]); - } else { - // with copyDirectory the path we have to fix up is inside - // an options hash + if (method === "copyDirectory" || + method === "copyNodeModulesDirectory") { + // The copy methods take their relative paths via options.to. args[0].to = files.pathJoin(relPath, args[0].to); + } else { + // Other methods have relPath as the first argument. + args[0] = files.pathJoin(relPath, args[0]); } let ret = this[method](...args); @@ -739,31 +814,15 @@ function symlinkIfPossible(source, target) { } } -// create a symlink, overwriting the target link, file, or directory -// if it exists -function symlinkWithOverwrite(source, target) { - try { - files.symlink(source, target); - } catch (e) { - if (e.code === "EEXIST") { - // overwrite existing link, file, or directory - files.rm_recursive(target); - files.symlink(source, target); - } else if (e.code === "EPERM" && - process.platform === "win32") { - files.rm_recursive(target); - // This will work only if source refers to a directory, but that's a - // chance worth taking. - files.symlink(source, target, "junction"); - } else { - throw e; - } - } -} - // Wrap slow methods into Profiler calls const slowBuilderMethods = [ - '_ensureDirectory', 'write', 'enter', 'copyDirectory', 'enter', 'complete' + "_ensureDirectory", + "write", + "enter", + "copyDirectory", + "copyNodeModulesDirectory", + "enter", + "complete", ]; slowBuilderMethods.forEach(method => { diff --git a/tools/isobuild/bundler.js b/tools/isobuild/bundler.js index 9f37416648..a473bd5c9d 100644 --- a/tools/isobuild/bundler.js +++ b/tools/isobuild/bundler.js @@ -482,7 +482,7 @@ export class NodeModulesDirectory { return true; } - const real = realpathOrNull(path); + const real = files.realpathOrNull(path); if (typeof real === "string" && real !== path) { // If node_modules/.bin/command is a symlink, determine the @@ -522,15 +522,6 @@ export class NodeModulesDirectory { } } -function realpathOrNull(path) { - try { - return files.realpath(path); - } catch (e) { - if (e.code !== "ENOENT") throw e; - return null; - } -} - /////////////////////////////////////////////////////////////////////////////// // File /////////////////////////////////////////////////////////////////////////////// @@ -538,7 +529,6 @@ function realpathOrNull(path) { // Allowed options: // - sourcePath: path to file on disk that will provide our contents // - data: contents of the file as a Buffer -// - hash: optional, sha1 hash of the file contents, if known // - sourceMap: if 'data' is given, can be given instead of // sourcePath. a string or a JS Object. Will be stored as Object. // - cacheable @@ -575,6 +565,23 @@ class File { // file is not intended to be served over HTTP). this.url = null; + // A prefix that will be prepended to this.url. + // Prefixing is currently restricted to web.cordova URLs. + if (options.arch.startsWith("web.") && + // Using the isModern function from the modern-browsers package, + // the webapp and dynamic-import packages can automatically + // determine whether a client should receive resources from the + // web.browser or web.browser.legacy architecture, so those + // architectures do not need a URL prefix. Other architectures, + // such as web.cordova, still need a prefix like /__cordova/. + options.arch !== "web.browser" && + options.arch !== "web.browser.legacy") { + this.urlPrefix = "/__" + + options.arch.split(".").slice(1).join("."); + } else { + this.urlPrefix = ""; + } + // Is this file guaranteed to never change, so that we can let it be // cached forever? Only makes sense of self.url is set. this.cacheable = options.cacheable || false; @@ -590,8 +597,8 @@ class File { this.assets = null; this._contents = options.data || null; // contents, if known, as a Buffer - this._hashOfContents = options.hash || null; this._hash = null; + this._sri = null; } toString() { @@ -605,19 +612,23 @@ class File { hash() { if (! this._hash) { - if (! this._hashOfContents) { - this._hashOfContents = watch.sha1(this.contents()); - } - this._hash = watch.sha1( String(File._salt()), - this._hashOfContents, + this.sri(), ); } return this._hash; } + sri() { + if (! this._sri) { + this._sri = watch.sha512(this.contents()); + } + + return this._sri; + } + // Omit encoding to get a buffer, or provide something like 'utf8' // to get a string contents(encoding) { @@ -633,12 +644,10 @@ class File { } setContents(b) { - if (!(b instanceof Buffer)) { - throw new Error("Must set contents to a Buffer"); - } + assert.ok(Buffer.isBuffer(b), "Must pass Buffer to File#setContents"); this._contents = b; - // Un-cache hash. - this._hashOfContents = this._hash = null; + // Bust the hash cache. + this._hash = this._sri = null; } size() { @@ -653,7 +662,8 @@ class File { // url, useful for query parameters. setUrlToHash(fileAndUrlSuffix, urlSuffix) { urlSuffix = urlSuffix || ""; - this.url = "/" + this.hash() + fileAndUrlSuffix + urlSuffix; + this.url = this.urlPrefix + "/" + + this.hash() + fileAndUrlSuffix + urlSuffix; this.cacheable = true; this.targetPath = this.hash() + fileAndUrlSuffix; } @@ -677,7 +687,7 @@ class File { // Given a relative path like 'a/b/c' (where '/' is this system's // path component separator), produce a URL that always starts with // a forward slash and that uses a literal forward slash as the - // component separator. + // component separator. Also optionally add browser.legacy prefix. setUrlFromRelPath(relPath) { var url = relPath; @@ -688,7 +698,7 @@ class File { // XXX replacing colons with underscores as colon is hard to escape later // on different targets and generally is not a good separator for web. url = colonConverter.convert(url); - this.url = url; + this.url = this.urlPrefix + url; } setTargetPathFromRelPath(relPath) { @@ -822,7 +832,10 @@ class Target { packages: packages || [] }); - const sourceBatches = this._runCompilerPlugins(); + const sourceBatches = this._runCompilerPlugins({ + minifiers, + minifyMode, + }); // Link JavaScript and set up this.js, etc. this._emitResources(sourceBatches); @@ -1032,16 +1045,60 @@ class Target { // Run all the compiler plugins on all source files in the project. Returns an // array of PackageSourceBatches which contain the results of this processing. - _runCompilerPlugins() { + _runCompilerPlugins({ + minifiers = [], + minifyMode = "development", + }) { buildmessage.assertInJob(); + + const minifiersByExt = Object.create(null); + if (this instanceof ClientTarget) { + ["js", "css"].forEach(ext => { + minifiers.some(minifier => { + if (_.contains(minifier.extensions, ext)) { + return minifiersByExt[ext] = minifier; + } + }); + }); + } + + const target = this; const processor = new compilerPluginModule.CompilerPluginProcessor({ unibuilds: this.unibuilds, arch: this.arch, sourceRoot: this.sourceRoot, isopackCache: this.isopackCache, - linkerCacheDir: - (this.bundlerCacheDir && files.pathJoin(this.bundlerCacheDir, 'linker')) + linkerCacheDir: this.bundlerCacheDir && + files.pathJoin(this.bundlerCacheDir, 'linker'), + + // Takes a CssOutputResource and returns a string of minified CSS, + // or null to indicate no minification occurred. + // TODO Cache result by resource hash? + minifyCssResource(resource) { + if (! minifiersByExt.css || + minifyMode === "development") { + // Indicates the caller should use the original resource.data + // without minification. + return null; + } + + const file = new File({ + info: 'resource ' + resource.servePath, + arch: target.arch, + data: resource.data, + }); + + file.setTargetPathFromRelPath( + stripLeadingSlash(resource.servePath)); + + return minifyCssFiles([file], { + arch: target.arch, + minifier: minifiersByExt.css, + minifyMode, + }).map(file => file.contents("utf8")).join("\n"); + } }); + return processor.runCompilerPlugins(); } @@ -1078,7 +1135,9 @@ class Target { const isApp = ! name; // Emit the resources - const resources = sourceBatch.getResources(jsOutputFilesMap.get(name)); + const resources = sourceBatch.getResources( + jsOutputFilesMap.get(name).files, + ); // First, find all the assets, so that we can associate them with each js // resource (for os unibuilds). @@ -1088,25 +1147,40 @@ class Target { return; } - const f = new File({ + const fileOptions = { info: 'unbuild ' + resource, + arch: this.arch, data: resource.data, cacheable: false, - hash: resource.hash - }); + hash: resource.hash, + }; - const relPath = isOs - ? files.pathJoin('assets', resource.servePath) - : stripLeadingSlash(resource.servePath); - f.setTargetPathFromRelPath(relPath); + const file = new File(fileOptions); + const assetFiles = [file]; - if (isWeb) { - f.setUrlFromRelPath(resource.servePath); - } else { - unibuildAssets[resource.path] = resource.data; + if (file.urlPrefix.length > 0) { + const noPrefix = new File(fileOptions); + noPrefix.urlPrefix = ""; + // If the file has a URL prefix, add another resource for this + // asset without the prefix. + assetFiles.push(noPrefix); } - this.asset.push(f); + assetFiles.forEach(f => { + const relPath = isOs + ? files.pathJoin('assets', resource.servePath) + : stripLeadingSlash(resource.servePath); + + f.setTargetPathFromRelPath(relPath); + + if (isWeb) { + f.setUrlFromRelPath(resource.servePath); + } else { + unibuildAssets[resource.path] = resource.data; + } + + this.asset.push(f); + }); }); // Now look for the other kinds of resources. @@ -1134,7 +1208,12 @@ class Target { return; } - const f = new File({ info: 'resource ' + resource.servePath, data: resource.data, cacheable: false}); + const f = new File({ + info: 'resource ' + resource.servePath, + arch: this.arch, + data: resource.data, + cacheable: false, + }); const relPath = stripLeadingSlash(resource.servePath); f.setTargetPathFromRelPath(relPath); @@ -1213,9 +1292,10 @@ class Target { minifyJs(minifierDef, minifyMode) { const staticFiles = []; const dynamicFiles = []; + const { arch } = this; this.js.forEach(file => { - const jsf = new JsFile(file, { arch: this.arch }); + const jsf = new JsFile(file, { arch }); if (file.targetPath.startsWith("dynamic/")) { // Make sure file._hash is cached. @@ -1240,16 +1320,19 @@ class Target { } }); - var minifier = minifierDef.userPlugin.processFilesForBundle - .bind(minifierDef.userPlugin); + var markedMinifier = buildmessage.markBoundary( + minifierDef.userPlugin.processFilesForBundle, + minifierDef.userPlugin + ); buildmessage.enterJob('minifying app code', function () { try { - var markedMinifier = buildmessage.markBoundary(minifier); - markedMinifier(staticFiles, { minifyMode }); - dynamicFiles.forEach(file => { - markedMinifier([file], { minifyMode }); - }); + Promise.all([ + markedMinifier(staticFiles, { minifyMode }), + ...dynamicFiles.map( + file => markedMinifier([file], { minifyMode }) + ), + ]).await(); } catch (e) { buildmessage.exception(e); } @@ -1267,6 +1350,7 @@ class Target { const newFile = new File({ info: 'minified js', + arch, data: Buffer.from(file.data, 'utf8'), }); @@ -1295,6 +1379,7 @@ class Target { const contents = newFile.contents(); const statsFile = new File({ info: "bundle size stats JSON", + arch, data: Buffer.from(JSON.stringify({ minifier: { name: minifierDef.isopack.name, @@ -1366,26 +1451,43 @@ class Target { } } + // Overrides a cordova dependency version. + _overrideCordovaDependencyVersion(scoped, id, name, version) { + if (!scoped) { + this.cordovaDependencies[id] = version; + } else { + if (id in this.cordovaDependencies) { + delete this.cordovaDependencies[id]; + } + this.cordovaDependencies[name] = version; + } + } + // Add a Cordova plugin dependency to the target. If the same plugin // has already been added at a different version and `override` is // false, use whichever version is newest. If `override` is true, then // we always add the exact version specified, overriding any other // version that has already been added. + // Additionally we need to be sure that a cordova-plugin-name gets + // overriden with @scope/cordova-plugin-name. _addCordovaDependency(name, version, override) { if (! this.cordovaDependencies) { return; } + const scoped = name[0] === '@'; + const id = scoped ? name.split('/')[1] : name; if (override) { - this.cordovaDependencies[name] = version; + this._overrideCordovaDependencyVersion(scoped, id, name, version); } else { - if (_.has(this.cordovaDependencies, name)) { - var existingVersion = this.cordovaDependencies[name]; + if (_.has(this.cordovaDependencies, id)) { + const existingVersion = this.cordovaDependencies[id]; if (existingVersion === version) { return; } - this.cordovaDependencies[name] = packageVersionParser. + const versionToSet = packageVersionParser. lessThan(existingVersion, version) ? version : existingVersion; + this._overrideCordovaDependencyVersion(scoped, id, name, versionToSet); } else { this.cordovaDependencies[name] = version; } @@ -1486,43 +1588,11 @@ class ClientTarget extends Target { // Minify the CSS in this target minifyCss(minifierDef, minifyMode) { - const sources = this.css.map((file) => { - return new CssFile(file, { - arch: this.arch - }); + this.css = minifyCssFiles(this.css, { + arch: this.arch, + minifier: minifierDef, + minifyMode, }); - const minifier = minifierDef.userPlugin.processFilesForBundle.bind( - minifierDef.userPlugin); - - buildmessage.enterJob('minifying app stylesheet', function () { - try { - const markedMinifier = buildmessage.markBoundary(minifier); - markedMinifier(sources, { minifyMode }); - } catch (e) { - buildmessage.exception(e); - } - }); - - this.css = _.flatten(sources.map((source) => { - return source._minifiedFiles.map((file) => { - const newFile = new File({ - info: 'minified css', - data: Buffer.from(file.data, 'utf8') - }); - if (file.sourceMap) { - newFile.setSourceMap(file.sourceMap, '/'); - } - - if (file.path) { - newFile.setUrlFromRelPath(file.path); - newFile.targetPath = file.path; - } else { - newFile.setUrlToHash('.css', '?meteor_css_resource=true'); - } - - return newFile; - }); - })); } // Output the finished target to disk @@ -1530,8 +1600,6 @@ class ClientTarget extends Target { // Returns an object with the following keys: // - controlFile: the path (relative to 'builder') of the control file for // the target - // - nodePath: an array of paths required to be set in the NODE_PATH - // environment variable. write(builder, {minifyMode}) { builder.reserve("program.json"); @@ -1546,9 +1614,28 @@ class ClientTarget extends Target { // Reserve all file names from the manifest, so that interleaved // generateFilename calls don't overlap with them. - eachResource((file, type) => - builder.reserve(file.targetPath) - ); + + const targetPathToHash = new Map; + eachResource((file, type) => { + const hash = targetPathToHash.get(file.targetPath); + if (hash) { + // When we add assets that have a URL prefix like /__cordova, we + // also add them without the prefix, which means there could be + // collisions between target paths, causing builder.reserve to + // throw an exception. However, we tolerate collisions (and call + // builder.reserve only once) if the hashes of the two assets are + // identical, which should always be the case when we register a + // single asset using multiple target paths. If the hashes do not + // match for some reason, we just call builder.reserve again and + // let it throw. + if (file.hash() === hash) { + return; + } + } else { + targetPathToHash.set(file.targetPath, file.hash()); + } + builder.reserve(file.targetPath); + }); // Build up a manifest of all resources served via HTTP. const manifest = []; @@ -1594,6 +1681,7 @@ class ClientTarget extends Target { // Set this now, in case we mutated the file's contents. manifestItem.size = file.size(); manifestItem.hash = file.hash(); + manifestItem.sri = file.sri(); if (! file.targetPath.startsWith("dynamic/")) { writeFile(file, builder); @@ -1696,12 +1784,70 @@ class ClientTarget extends Target { builder.writeJson('program.json', program); return { - controlFile: "program.json", - nodePath: [] + controlFile: "program.json" }; } } +const { wrap, defaultMakeCacheKey } = require("optimism"); +const minifyCssFiles = Profile("minifyCssFiles", wrap(function (files, { + arch, + minifier, + minifyMode, +}) { + const sources = files.map(file => new CssFile(file, { arch })); + const markedMinifier = buildmessage.markBoundary( + minifier.userPlugin.processFilesForBundle, + minifier.userPlugin, + ); + + buildmessage.enterJob('minifying app stylesheet', function () { + try { + Promise.await(markedMinifier(sources, { minifyMode })); + } catch (e) { + buildmessage.exception(e); + } + }); + + return _.flatten(sources.map((source) => { + return source._minifiedFiles.map((file) => { + const newFile = new File({ + info: 'minified css', + arch, + data: Buffer.from(file.data, 'utf8') + }); + if (file.sourceMap) { + newFile.setSourceMap(file.sourceMap, '/'); + } + + if (file.path) { + newFile.setUrlFromRelPath(file.path); + newFile.targetPath = file.path; + } else { + newFile.setUrlToHash('.css', '?meteor_css_resource=true'); + } + + return newFile; + }); + })); +}, { + makeCacheKey(files, { arch, minifier, minifyMode }) { + return defaultMakeCacheKey( + minifier, + arch, + minifyMode, + hashOfFiles(files), + ); + } +})); + +const { createHash } = require("crypto"); +function hashOfFiles(files) { + const hash = createHash("sha1"); + files.forEach(file => hash.update(file.hash()).update("\0")); + return hash.digest("hex"); +} + // mark methods for profiling [ 'minifyCss', @@ -1885,6 +2031,10 @@ class JsImage { } function tryLookup(nodeModulesPath, name) { + if (typeof nodeModulesPath !== "string") { + return; + } + var nodeModulesTopDir = files.pathJoin( nodeModulesPath, name.split("/", 1)[0] @@ -1897,18 +2047,39 @@ class JsImage { } } - const found = _.some(item.nodeModulesDirectories, nmd => { + let found = _.some(item.nodeModulesDirectories, nmd => { // Npm.require doesn't consider local node_modules // directories. return ! nmd.local && tryLookup(nmd.sourcePath, name); }); - if (found || tryLookup(devBundleLibNodeModulesDir, name)) { - return require(fullPath); + if (! found && + bindings.Plugin && + typeof bindings.Plugin.name === "string") { + // If this package is part of a build plugin, try looking up + // the requested module in any node_modules directories + // belonging to the plugin package, as declared by + // + // Package.registerBuildPlugin({ + // name: "this-plugin-name", + // ... + // npmDependencies: { name: version, ... } + // }); + // + // in the parent package (e.g. ecmascript, coffeescript). + const nmdSourcePaths = + nodeModulesDirsByPackageName.get(bindings.Plugin.name); + if (Array.isArray(nmdSourcePaths)) { + found = _.some(nmdSourcePaths, sourcePath => { + return tryLookup(sourcePath, name); + }); + } } - if (appNodeModules && - tryLookup(appNodeModules, name)) { + found = found || tryLookup(devBundleLibNodeModulesDir, name); + found = found || tryLookup(appNodeModules, name); + + if (found) { return require(fullPath); } @@ -2063,8 +2234,6 @@ class JsImage { // Returns an object with the following keys: // - controlFile: the path (relative to 'builder') of the control file for // the image - // - nodePath: an array of paths required to be set in the NODE_PATH - // environment variable. write(builder, { buildMode, // falsy or 'symlink', documented on exports.bundle @@ -2141,6 +2310,10 @@ class JsImage { delete loadItem.node_modules; } + // Will be initialized with a Buffer version of item.source, with + // //# sourceMappingURL comments appropriately removed/appended. + let sourceBuffer; + if (item.sourceMap) { const sourceMapBuffer = Buffer.from(JSON.stringify(item.sourceMap), "utf8"); @@ -2156,7 +2329,7 @@ class JsImage { // Remove any existing sourceMappingURL line. (eg, if roundtripping // through JsImage.readFromDisk, don't end up with two!) - item.source = addSourceMappingURL( + sourceBuffer = addSourceMappingURL( item.source, sourceMappingURL, item.targetPath, @@ -2165,11 +2338,17 @@ class JsImage { if (item.sourceMapRoot) { loadItem.sourceMapRoot = item.sourceMapRoot; } + } else { + // If we do not have an item.sourceMap, then we still want to + // remove any existing //# sourceMappingURL comments. + // https://github.com/meteor/meteor/issues/9894 + sourceBuffer = removeSourceMappingURLs(item.source); } loadItem.path = builder.writeToGeneratedFilename( item.targetPath, - { data: Buffer.from(item.source, 'utf8') }); + { data: sourceBuffer } + ); if (!_.isEmpty(item.assets)) { // For package code, static assets go inside a directory inside @@ -2251,7 +2430,7 @@ class JsImage { copyOptions.filter = prodPackagePredicate; } - builder.copyDirectory(copyOptions); + builder.copyNodeModulesDirectory(copyOptions); } }); @@ -2272,8 +2451,7 @@ class JsImage { }); return { - controlFile: "program.json", - nodePath: [] + controlFile: "program.json" }; } @@ -2390,7 +2568,7 @@ class ServerTarget extends JsImageTarget { constructor (options, ...args) { super(options, ...args); - this.clientTargets = options.clientTargets; + this.clientArchs = options.clientArchs; this.releaseName = options.releaseName; this.appIdentifier = options.appIdentifier; @@ -2407,36 +2585,18 @@ class ServerTarget extends JsImageTarget { buildMode, // falsy or 'symlink', documented in exports.bundle includeNodeModules, - // a function that takes {forTarget: Target, relativeTo: Target} and - // return the path of one target in the bundle relative to another. hack - // to get the path of the client target.. we'll find a better solution - // here eventually - getRelativeTargetPath, }) { var self = this; - var nodePath = []; // This is where the dev_bundle will be downloaded and unpacked builder.reserve('dependencies'); - // Mapping from arch to relative path to the client program, if we have any - // (hack). Ex.: { 'web.browser': '../web.browser/program.json', ... } - var clientTargetPaths = {}; - if (self.clientTargets) { - _.each(self.clientTargets, function (target) { - clientTargetPaths[target.arch] = files.pathJoin(getRelativeTargetPath({ - forTarget: target, - relativeTo: self, - }), 'program.json'); - }); - } - // We will write out config.json, the dependency kit, and the // server driver alongside the JsImage builder.writeJson("config.json", { meteorRelease: self.releaseName || undefined, appId: self.appIdentifier || undefined, - clientPaths: clientTargetPaths + clientArchs: self.clientArchs || undefined, }); // Write package.json and npm-shrinkwrap.json for the dependencies of @@ -2530,7 +2690,6 @@ class ServerTarget extends JsImageTarget { var controlFilePath = 'boot.js'; return { controlFile: controlFilePath, - nodePath: nodePath }; } } @@ -2546,10 +2705,7 @@ var writeFile = Profile("bundler writeFile", function (file, builder, options) { if (! file.targetPath) { throw new Error("No targetPath?"); } - var contents = file.contents(); - if (! (contents instanceof Buffer)) { - throw new Error("contents not a Buffer?"); - } + // XXX should probably use sanitize: true, but that will have // to wait until the server is actually driven by the manifest // (rather than just serving all of the files in a certain @@ -2560,35 +2716,67 @@ var writeFile = Profile("bundler writeFile", function (file, builder, options) { if (options && options.sourceMapUrl) { data = addSourceMappingURL(data, options.sourceMapUrl); - } - - if (! Buffer.isBuffer(data)) { - data = Buffer.from(data, "utf8"); + } else { + // If we do not have an options.sourceMapUrl to append, then we still + // want to remove any existing //# sourceMappingURL comments. + // https://github.com/meteor/meteor/issues/9894 + data = removeSourceMappingURLs(data); } builder.write(file.targetPath, { data, hash }); }); +// Takes a Buffer or string and returns a Buffer. If it looks like there +// are no //# sourceMappingURL comments to remove, an attempt is made to +// return the provided buffer without modification. +function removeSourceMappingURLs(data) { + if (Buffer.isBuffer(data)) { + // Unfortuantely there is no way to search a Buffer using a RegExp, so + // there's a chance of false positives here, which could lead to + // unnecessarily stringifying and re-Buffer.from-ing the data, though + // that should not cause any logical problems. + if (! data.includes("//# source", 0, "utf8")) { + return data; + } + data = data.toString("utf8"); + } + + // Remove any/all existing //# sourceMappingURL comments using + // String#replace (since unfortunately there is no Buffer#replace). + data = data.replace(/\n\/\/# source(?:Mapping)?URL=[^\n]+/g, "\n"); + + // Always return a Buffer. + return Buffer.from(data, "utf8"); +} + +const newLineBuffer = Buffer.from("\n", "utf8"); + // The data argument may be either a Buffer or a string, but this function -// always returns a string. +// always returns a Buffer. function addSourceMappingURL(data, url, targetPath) { - const parts = [ - // If data is a Buffer, convert it to a string. - data.toString("utf8") - // Remove any existing sourceURL or sourceMappingURL comments. - .replace(/\n\/\/# source(?:Mapping)?URL=[^\n]+/g, '\n') - ]; + // An array of Buffer objects, even when data is a string. + const parts = [removeSourceMappingURLs(data)]; if (targetPath) { // If a targetPath was provided, use it to add a sourceURL comment to // help associate output files with mapped source files. - parts.push(`//# sourceURL=${SOURCE_URL_PREFIX}/${targetPath}`); + parts.push( + newLineBuffer, + Buffer.from( + `//# sourceURL=${SOURCE_URL_PREFIX}/${targetPath}`, + "utf8" + ) + ); } - parts.push(`//# sourceMappingURL=${url}`); - parts.push(""); // Trailing newline. + parts.push( + newLineBuffer, + Buffer.from("//# sourceMappingURL=" + url, "utf8"), + newLineBuffer // trailing newline + ); - return parts.join("\n"); + // Always return a Buffer. + return Buffer.concat(parts); } // Writes a target a path in 'programs' @@ -2596,19 +2784,24 @@ var writeTargetToPath = Profile( "bundler writeTargetToPath", function (name, target, outputPath, { includeNodeModules, - getRelativeTargetPath, - previousBuilder, + previousBuilder = null, buildMode, minifyMode, }) { var builder = new Builder({ outputPath: files.pathJoin(outputPath, 'programs', name), - previousBuilder + previousBuilder, + // We do not force an in-place build for individual targets like + // .meteor/local/build/programs/web.browser.legacy, because they + // tend to be written atomically, and it's important on Windows to + // avoid overwriting files that might be open currently in the build + // or server process. If in-place builds were safer on Windows, they + // would be much quicker than from-scratch rebuilds. + forceInPlaceBuild: false, }); var targetBuild = target.write(builder, { includeNodeModules, - getRelativeTargetPath, buildMode, minifyMode, }); @@ -2619,7 +2812,6 @@ var writeTargetToPath = Profile( name, arch: target.mostCompatibleArch(), path: files.pathJoin('programs', name, targetBuild.controlFile), - nodePath: targetBuild.nodePath, cordovaDependencies: target.cordovaDependencies || undefined, builder }; @@ -2642,32 +2834,35 @@ var writeTargetToPath = Profile( // serverWatchSet: watch.WatchSet for all files and directories that // ultimately went into all server programs // starManifest: the JSON manifest of the star -// nodePath: an array of paths required to be set in NODE_PATH. It's -// up to the called to determine what they should be. // } // // options: // - includeNodeModules: string or falsy, documented on exports.bundle // - builtBy: vanity identification string to write into metadata // - releaseName: The Meteor release version -// - getRelativeTargetPath: see doc at ServerTarget.write // - previousBuilder: previous Builder object used in previous iteration var writeSiteArchive = Profile("bundler writeSiteArchive", function ( targets, outputPath, { includeNodeModules, builtBy, releaseName, - getRelativeTargetPath, - previousBuilders, + previousBuilders = Object.create(null), buildMode, minifyMode }) { const builders = {}; - const previousStarBuilder = previousBuilders && previousBuilders.star; - const builder = new Builder({outputPath, - previousBuilder: previousStarBuilder}); - builders.star = builder; + const builder = builders.star = new Builder({ + outputPath, + previousBuilder: previousBuilders.star, + // If we were to build .meteor/local/build by first writing a fresh + // temporary directory and then renaming it over the existing output + // directory, we would sacrifice the ability to preserve any output + // directories that were not written as part of this archive, such as + // .meteor/local/build/programs/web.browser.legacy, which is often + // built/written later for performance reasons (#10055). + forceInPlaceBuild: true, + }); try { var json = { @@ -2678,7 +2873,6 @@ var writeSiteArchive = Profile("bundler writeSiteArchive", function ( nodeVersion: process.versions.node, npmVersion: meteorNpm.npmVersion, }; - var nodePath = []; // Tell the deploy server what version of the dependency kit we're using, so // it can load the right modules. (Include this even if we copied or @@ -2733,31 +2927,23 @@ Find out more about Meteor at meteor.com. Object.keys(targets).forEach(name => { const target = targets[name]; - const previousBuilder = - (previousBuilders && previousBuilders[name]) ? - previousBuilders[name] : null; const { arch, path, cordovaDependencies, - nodePath: targetNP, builder: targetBuilder - } = - writeTargetToPath(name, target, builder.buildPath, { - includeNodeModules, - builtBy, - releaseName, - getRelativeTargetPath, - previousBuilder, - buildMode, - minifyMode - }); + } = writeTargetToPath(name, target, builder.buildPath, { + includeNodeModules, + builtBy, + releaseName, + previousBuilder: previousBuilders[name] || null, + buildMode, + minifyMode + }); builders[name] = targetBuilder; json.programs.push({ name, arch, path, cordovaDependencies }); - - nodePath = nodePath.concat(targetNP); }); // Control file @@ -2772,15 +2958,15 @@ Find out more about Meteor at meteor.com. // be adjusted so we can later pass them as previousBuilder's Object.keys(builders).forEach(name => { const subBuilder = builders[name]; - subBuilder.outputPath = builder.outputPath + subBuilder.outputPath.substring(builder.buildPath.length); + subBuilder.outputPath = builder.outputPath + + subBuilder.outputPath.substring(builder.buildPath.length); }); return { clientWatchSet, serverWatchSet, starManifest: json, - nodePath, - builders + builders, }; } catch (e) { builder.abort(); @@ -2851,17 +3037,18 @@ Find out more about Meteor at meteor.com. * you are testing! */ -exports.bundle = function (options) { +exports.bundle = Profile("bundler.bundle", function (options) { return files.withCache(() => bundle(options)); -}; +}); function bundle({ projectContext, outputPath, includeNodeModules, buildOptions, - previousBuilders, + previousBuilders = Object.create(null), hasCachedBundle, + allowDelayedClientBuilds = false, }) { buildOptions = buildOptions || {}; @@ -2889,10 +3076,11 @@ function bundle({ var serverWatchSet = new watch.WatchSet(); var clientWatchSet = new watch.WatchSet(); var starResult = null; - var targets = {}; - var nodePath = []; var lintingMessages = null; - var builders = {}; + + // If delayed client builds are allowed, this array will be populated + // with callbacks to run after the application process has started up. + const postStartupCallbacks = allowDelayedClientBuilds && []; const bundlerCacheDir = projectContext.getProjectLocalDirectory('bundler-cache'); @@ -2935,22 +3123,18 @@ function bundle({ }); var makeServerTarget = Profile( - "bundler.bundle..makeServerTarget", function (app, clientTargets) { - var targetOptions = { + "bundler.bundle..makeServerTarget", function (app, clientArchs) { + const server = new ServerTarget({ bundlerCacheDir, packageMap: projectContext.packageMap, isopackCache: projectContext.isopackCache, sourceRoot: packageSource.sourceRoot, arch: serverArch, - releaseName: releaseName, - appIdentifier: appIdentifier, + releaseName, + appIdentifier, buildMode: buildOptions.buildMode, - }; - if (clientTargets) { - targetOptions.clientTargets = clientTargets; - } - - var server = new ServerTarget(targetOptions); + clientArchs, + }); server.make({ packages: [app] @@ -3010,80 +3194,98 @@ function bundle({ return mergeAppWatchSets(); } - var clientTargets = []; + const targets = Object.create(null); + const hasOwn = Object.prototype.hasOwnProperty; + + // Write to disk + const writeOptions = { + includeNodeModules, + builtBy, + releaseName, + minifyMode, + }; + + function writeClientTarget(target) { + const { arch } = target; + const written = writeTargetToPath(arch, target, outputPath, { + buildMode: buildOptions.buildMode, + previousBuilder: previousBuilders[arch], + ...writeOptions, + }); + clientWatchSet.merge(target.getWatchSet()); + previousBuilders[arch] = written.builder; + } + // Client - _.each(webArchs, function (arch) { - var client = makeClientTarget(app, arch, {minifiers}); - clientTargets.push(client); - targets[arch] = client; + webArchs.forEach(arch => { + if (allowDelayedClientBuilds && + hasOwn.call(previousBuilders, arch) && + projectContext.platformList.canDelayBuildingArch(arch)) { + // If delayed client builds are allowed, and we have a previous + // builder for this arch, and it's an arch that we can safely + // build later (e.g. web.browser.legacy), then schedule it to be + // built after the server has started up. + postStartupCallbacks.push(async ({ + pauseClient, + refreshClient, + runLog, + }) => { + const start = +new Date; + + // Build the target first. + const target = makeClientTarget(app, arch, { minifiers }); + + // Tell the webapp package to pause responding to requests from + // clients that use this arch, because we're about to write a + // new version of this bundle to disk. If the message fails + // becuase the child process exited, proceed with writing the + // target anyway. + await pauseClient(arch).catch(ignoreHarmlessErrors); + + // Now write the target to disk. Note that we are rewriting the + // bundle in place, so this work is not atomic by any means, + // which is why we needed to pause the client. + writeClientTarget(target); + + // Refresh and unpause the client, now that writing is finished. + // If the child process exited for some reason, don't worry if + // this message fails. + await refreshClient(arch).catch(ignoreHarmlessErrors); + + // Let the webapp package running in the child process know it + // should regenerate the client program for this arch. + if (Profile.enabled) { + runLog.log(`Finished delayed build of ${arch} in ${ + new Date - start + }ms`, { arrow: true }); + } + }); + + } else { + // Otherwise make the client target now, and write it below. + targets[arch] = makeClientTarget(app, arch, {minifiers}); + } }); // Server if (! hasCachedBundle) { - var server = makeServerTarget(app, clientTargets); - targets.server = server; + targets.server = makeServerTarget(app, webArchs); } - // Hack to let servers find relative paths to clients. Should find - // another solution eventually (probably some kind of mount - // directive that mounts the client bundle in the server at runtime) - var getRelativeTargetPath = function (options) { - var pathForTarget = function (target) { - var name; - _.each(targets, function (t, n) { - if (t === target) { - name = n; - } - }); - if (! name) { - throw new Error("missing target?"); - } - return files.pathJoin('programs', name); - }; - - return files.pathRelative(pathForTarget(options.relativeTo), - pathForTarget(options.forTarget)); - }; - - // Write to disk - var writeOptions = { - includeNodeModules, - builtBy, - releaseName, - getRelativeTargetPath, - minifyMode: minifyMode - }; - if (outputPath !== null) { if (hasCachedBundle) { // If we already have a cached bundle, just recreate the new targets. // XXX This might make the contents of "star.json" out of date. - builders = _.clone(previousBuilders); - _.each(targets, function (target, name) { - const previousBuilder = previousBuilders && previousBuilders[name]; - var targetBuild = writeTargetToPath( - name, target, outputPath, - _.extend({ - buildMode: buildOptions.buildMode, - }, writeOptions, {previousBuilder}) - ); - nodePath = nodePath.concat(targetBuild.nodePath); - clientWatchSet.merge(target.getWatchSet()); - builders[name] = targetBuild.builder; - }); + _.each(targets, writeClientTarget); } else { - starResult = writeSiteArchive( - targets, - outputPath, - _.extend({ - buildMode: buildOptions.buildMode, - }, writeOptions, {previousBuilders}) - ); - - nodePath = nodePath.concat(starResult.nodePath); + starResult = writeSiteArchive(targets, outputPath, { + buildMode: buildOptions.buildMode, + previousBuilders, + ...writeOptions, + }); serverWatchSet.merge(starResult.serverWatchSet); clientWatchSet.merge(starResult.clientWatchSet); - builders = starResult.builders; + Object.assign(previousBuilders, starResult.builders); } } @@ -3101,11 +3303,21 @@ function bundle({ serverWatchSet, clientWatchSet, starManifest: starResult && starResult.starManifest, - nodePath, - builders + postStartupCallbacks, }; } +// Used as a catch handler for pauseClient and refreshClient above. +function ignoreHarmlessErrors(error) { + switch (error && error.message) { + case "process exited": + case "channel closed": + return; + default: + throw error; + } +} + // Returns null if there are no lint warnings and the app has no linters // defined. Returns an empty MessageSet if the app has a linter defined but // there are no lint warnings (on app or packages). diff --git a/tools/isobuild/compiler-plugin.js b/tools/isobuild/compiler-plugin.js index 63092ed7d0..9d84313e76 100644 --- a/tools/isobuild/compiler-plugin.js +++ b/tools/isobuild/compiler-plugin.js @@ -18,10 +18,13 @@ import {cssToCommonJS} from "./css-modules.js"; import Resolver from "./resolver.js"; import { optimisticStatOrNull, + optimisticReadJsonOrNull, } from "../fs/optimistic.js"; import { isTestFilePath } from './test-files.js'; +const hasOwn = Object.prototype.hasOwnProperty; + // This file implements the new compiler plugins added in Meteor 1.2, which are // registered with the Plugin.registerCompiler API. // @@ -62,7 +65,7 @@ import { isTestFilePath } from './test-files.js'; // Cache the (slightly post-processed) results of linker.fullLink. const CACHE_SIZE = process.env.METEOR_LINKER_CACHE_SIZE || 1024*1024*100; const CACHE_DEBUG = !! process.env.METEOR_TEST_PRINT_LINKER_CACHE_DEBUG; -const LINKER_CACHE_SALT = 19; // Increment this number to force relinking. +const LINKER_CACHE_SALT = 21; // Increment this number to force relinking. const LINKER_CACHE = new LRU({ max: CACHE_SIZE, // Cache is measured in bytes. We don't care about servePath. @@ -107,17 +110,19 @@ export class CompilerPluginProcessor { sourceRoot, isopackCache, linkerCacheDir, + minifyCssResource, }) { - const self = this; + Object.assign(this, { + unibuilds, + arch, + sourceRoot, + isopackCache, + linkerCacheDir, + minifyCssResource, + }); - self.unibuilds = unibuilds; - self.arch = arch; - self.sourceRoot = sourceRoot; - self.isopackCache = isopackCache; - - self.linkerCacheDir = linkerCacheDir; - if (self.linkerCacheDir) { - files.mkdir_p(self.linkerCacheDir); + if (this.linkerCacheDir) { + files.mkdir_p(this.linkerCacheDir); } } @@ -184,11 +189,13 @@ export class CompilerPluginProcessor { return new InputFile(resourceSlot); }); - var markedMethod = buildmessage.markBoundary( - sourceProcessor.userPlugin.processFilesForTarget.bind( - sourceProcessor.userPlugin)); + const markedMethod = buildmessage.markBoundary( + sourceProcessor.userPlugin.processFilesForTarget, + sourceProcessor.userPlugin + ); + try { - markedMethod(inputFiles); + Promise.await(markedMethod(inputFiles)); } catch (e) { buildmessage.exception(e); } @@ -219,6 +226,11 @@ class InputFile extends buildPluginModule.InputFile { // Map from imported module identifier strings (possibly relative) to // fully require.resolve'd module identifiers. this._resolveCache = Object.create(null); + + // Communicate to compiler plugins that methods like addJavaScript + // accept a lazy finalizer function as a second argument, so that + // compilation can be avoided until/unless absolutely necessary. + this.supportsLazyCompilation = true; } getContentsAsBuffer() { @@ -262,10 +274,10 @@ class InputFile extends buildPluginModule.InputFile { } getFileOptions() { - var self = this; // XXX fileOptions only exists on some resources (of type "source"). The JS // resources might not have this property. - return self._resourceSlot.inputResource.fileOptions || {}; + const { inputResource } = this._resourceSlot; + return inputResource.fileOptions || (inputResource.fileOptions = {}); } readAndWatchFileWithHash(path) { @@ -365,7 +377,12 @@ class InputFile extends buildPluginModule.InputFile { } const batch = this._resourceSlot.packageSourceBatch; - const resolver = batch.getResolver(); + const resolver = batch.getResolver({ + // Make sure we use a server architecture when resolving, so that we + // don't accidentally use package.json "browser" fields. + // https://github.com/meteor/meteor/issues/9870 + targetArch: archinfo.host(), + }); const resolved = resolver.resolve(id, parentPath); if (resolved === "missing") { @@ -438,16 +455,16 @@ class InputFile extends buildPluginModule.InputFile { * @param {String|Object} options.sourceMap A stringified JSON * sourcemap, in case the stylesheet was generated from a different * file. + * @param {Function} lazyFinalizer Optional function that can be called + * to obtain any remaining options that may be + * expensive to compute, and thus should only be + * computed if/when we are sure this CSS will be used + * by the application. * @memberOf InputFile * @instance */ - addStylesheet(options) { - var self = this; - if (options.sourceMap && typeof options.sourceMap === 'string') { - // XXX remove an anti-XSSI header? ")]}'\n" - options.sourceMap = JSON.parse(options.sourceMap); - } - self._resourceSlot.addStylesheet(options); + addStylesheet(options, lazyFinalizer) { + this._resourceSlot.addStylesheet(options, lazyFinalizer); } /** @@ -463,16 +480,16 @@ class InputFile extends buildPluginModule.InputFile { * @param {String|Object} options.sourceMap A stringified JSON * sourcemap, in case the JavaScript file was generated from a * different file. + * @param {Function} lazyFinalizer Optional function that can be called + * to obtain any remaining options that may be + * expensive to compute, and thus should only be + * computed if/when we are sure this JavaScript will + * be used by the application. * @memberOf InputFile * @instance */ - addJavaScript(options) { - var self = this; - if (options.sourceMap && typeof options.sourceMap === 'string') { - // XXX remove an anti-XSSI header? ")]}'\n" - options.sourceMap = JSON.parse(options.sourceMap); - } - self._resourceSlot.addJavaScript(options); + addJavaScript(options, lazyFinalizer) { + this._resourceSlot.addJavaScript(options, lazyFinalizer); } /** @@ -486,12 +503,16 @@ class InputFile extends buildPluginModule.InputFile { * file. * @param {String} [options.hash] Optionally, supply a hash for the output * file. + * @param {Function} lazyFinalizer Optional function that can be called + * to obtain any remaining options that may be + * expensive to compute, and thus should only be + * computed if/when we are sure this asset will be + * used by the application. * @memberOf InputFile * @instance */ - addAsset(options) { - var self = this; - self._resourceSlot.addAsset(options); + addAsset(options, lazyFinalizer) { + this._resourceSlot.addAsset(options, lazyFinalizer); } /** @@ -501,26 +522,29 @@ class InputFile extends buildPluginModule.InputFile { * @param {String} options.section Which section of the document should * be appended to. Can only be "head" or "body". * @param {String} options.data The content to append. + * @param {Function} lazyFinalizer Optional function that can be called + * to obtain any remaining options that may be + * expensive to compute, and thus should only be + * computed if/when we are sure this HTML will be used + * by the application. * @memberOf InputFile * @instance */ addHtml(options) { - var self = this; - self._resourceSlot.addHtml(options); + if (typeof lazyFinalizer === "function") { + // For now, just call the lazyFinalizer function immediately. Since + // HTML is not compiled, this immediate invocation is probably + // permanently appropriate for addHtml, whereas methods like + // addJavaScript benefit from waiting to call lazyFinalizer. + Object.assign(options, Promise.await(lazyFinalizer())); + } + + this._resourceSlot.addHtml(options); } _reportError(message, info) { - if (this.getFileOptions().lazy === true) { - // Files with fileOptions.lazy === true were not explicitly added to - // the source batch via api.addFiles or api.mainModule, so any - // compilation errors should not be fatal until the files are - // actually imported by the ImportScanner. Attempting compilation is - // still important for lazy files that might end up being imported - // later, which is why we defang the error here, instead of avoiding - // compilation preemptively. Note also that exceptions thrown by the - // compiler will still cause build errors. - this._resourceSlot.addError(message, info); - } else { + this._resourceSlot.addError(message, info); + if (! this.getFileOptions().lazy) { super._reportError(message, info); } } @@ -537,6 +561,8 @@ class ResourceSlot { self.outputResources = []; // JS, which gets linked together at the end. self.jsOutputResources = []; + // Errors encountered while processing this resource. + self.errors = []; self.sourceProcessor = sourceProcessor; self.packageSourceBatch = packageSourceBatch; @@ -590,156 +616,206 @@ class ResourceSlot { return fileOptions && fileOptions[name]; } - _isLazy(options) { + _isLazy(options, isJavaScript) { let lazy = this._getOption("lazy", options); if (typeof lazy === "boolean") { return lazy; } - // If file.lazy was not previously defined, mark the file lazy if - // it is contained by an imports directory. Note that any files - // contained by a node_modules directory will already have been - // marked lazy in PackageSource#_inferFileOptions. Same for - // non-test files if running (non-full-app) tests (`meteor test`) - if (!this.packageSourceBatch.useMeteorInstall) { + const isApp = ! this.packageSourceBatch.unibuild.pkg.name; + if (! isApp) { + // Meteor package files must be explicitly added by api.addFiles or + // api.mainModule, and are implicitly eager unless specified + // otherwise via this.inputResource.fileOptions.lazy, which we + // already checked above. return false; } + // The rest of this method assumes we're considering a resource in an + // application rather than a Meteor package. + + if (! this.packageSourceBatch.useMeteorInstall) { + // If this application is somehow still not using the module system, + // then everything is eagerly loaded. + return false; + } + + const { + isTest = false, + isAppTest = false, + } = global.testCommandMetadata || {}; + + const runningTests = isTest || isAppTest; + + if (isJavaScript) { + if (runningTests) { + const testModule = this._getOption("testModule", options); + + // If we set fileOptions.testModule = true in _inferFileOptions, + // then consider this module an eager entry point for tests. If we + // set it to false (rather than leaving it undefined), that means + // a meteor.testModule was configured in package.json, and this + // test module was not it. In that case, we fall through to the + // mainModule check, ignoring isTestFilePath, because we can + // assume this is not an eager test module. If testModule was not + // set to a boolean, then isTestFilePath should determine if this + // is an eager test module. + const isEagerTestModule = typeof testModule === "boolean" + ? testModule + : isTestFilePath(this.inputResource.path); + + if (isEagerTestModule) { + // If we know it's eager, then it isn't lazy. + return false; + } + + if (! isAppTest) { + // If running `meteor test` without the --full-app option, then + // any JS modules that are not eager test modules must be lazy. + return true; + } + } + + // PackageSource#_inferFileOptions (in package-source.js) sets the + // mainModule option to false to indicate that a meteor.mainModule + // was configured for this architecture, but this module was not it. + // It's important to wait until this point (ResourceSlot#_isLazy) to + // make the final call, because we can finally tell whether the + // output resource is JavaScript or not (non-JS resources are not + // affected by the meteor.mainModule option). + const mainModule = this._getOption("mainModule", options); + if (typeof mainModule === "boolean") { + return ! mainModule; + } + } + + // In other words, the imports directory remains relevant for non-JS + // resources, and for JS resources in the absence of an explicit + // meteor.mainModule configuration in package.json. const splitPath = this.inputResource.path.split(files.pathSep); const isInImports = splitPath.indexOf("imports") >= 0; - - if (global.testCommandMetadata && - (global.testCommandMetadata.isTest || - global.testCommandMetadata.isAppTest)) { - // test files should always be included, if we're running app - // tests. - return isInImports && !isTestFilePath(this.inputResource.path); - } else { - return isInImports; - } + return isInImports; } - addStylesheet(options) { - const self = this; - if (! self.sourceProcessor) { + addStylesheet(options, lazyFinalizer) { + if (! this.sourceProcessor) { throw Error("addStylesheet on non-source ResourceSlot?"); } - const data = files.convertToStandardLineEndings(options.data); - const useMeteorInstall = self.packageSourceBatch.useMeteorInstall; - const sourcePath = this.inputResource.path; - const targetPath = options.path || sourcePath; - const resource = { - refreshable: true, - sourcePath, - targetPath, - servePath: self.packageSourceBatch.unibuild.pkg._getServePath(targetPath), - hash: sha1(data), - lazy: this._isLazy(options), - }; + // In contrast to addJavaScript, CSS resources passed to addStylesheet + // default to being eager (non-lazy). + options.lazy = this._isLazy(options, false); - if (useMeteorInstall && resource.lazy) { + const cssResource = new CssOutputResource({ + resourceSlot: this, + options, + lazyFinalizer, + }); + + if (this.packageSourceBatch.useMeteorInstall && + cssResource.lazy) { // If the current packageSourceBatch supports modules, and this CSS // file is lazy, add it as a lazy JS module instead of adding it // unconditionally as a CSS resource, so that it can be imported // when needed. - resource.type = "js"; - resource.data = - Buffer.from(cssToCommonJS(data, resource.hash), "utf8"); + const jsResource = this.addJavaScript(options, () => { + const result = {}; - self.jsOutputResources.push(resource); + let css = this.packageSourceBatch.processor + .minifyCssResource(cssResource); + + if (! css && typeof css !== "string") { + // The minifier didn't do anything, so we should use the + // original contents of cssResource.data. + css = cssResource.data.toString("utf8"); + + if (cssResource.sourceMap) { + // Add the source map as an asset, and append a + // sourceMappingURL comment to the end of the CSS text that + // will be dynamically inserted when/if this JS module is + // evaluated at runtime. Note that this only happens when the + // minifier did not modify the CSS, and thus does not happen + // when we are building for production. + const { servePath } = this.addAsset({ + path: jsResource.targetPath + ".map.json", + data: JSON.stringify(cssResource.sourceMap) + }); + css += "\n//# sourceMappingURL=" + servePath + "\n"; + } + } + + result.data = Buffer.from(cssToCommonJS(css), "utf8"); + + // The JavaScript module that dynamically loads this CSS should + // not inherit the source map of the original CSS output. + result.sourceMap = null; + + return result; + }); } else { // Eager CSS is added unconditionally to a combined