From 58a2ba81d250301e7b5e3da28ae4c1b42d91b2c2 Mon Sep 17 00:00:00 2001 From: t1st3 Date: Mon, 15 Aug 2016 22:06:54 +0200 Subject: [PATCH] Delete crossdomain.xml (#1881) from #1779 * removes crossdomain.xml * remove occurences to crossdomain.xml in doc and test --- src/crossdomain.xml | 15 --------------- src/doc/misc.md | 20 -------------------- src/doc/usage.md | 6 ------ test/file_existence.js | 1 - 4 files changed, 42 deletions(-) delete mode 100644 src/crossdomain.xml diff --git a/src/crossdomain.xml b/src/crossdomain.xml deleted file mode 100644 index 818b8225..00000000 --- a/src/crossdomain.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - diff --git a/src/doc/misc.md b/src/doc/misc.md index 0275240c..846ef5ac 100644 --- a/src/doc/misc.md +++ b/src/doc/misc.md @@ -6,7 +6,6 @@ table of contents](TOC.md) * [.gitignore](#gitignore) * [.editorconfig](#editorconfig) * [Server Configuration](#server-configuration) -* [crossdomain.xml](#crossdomainxml) * [robots.txt](#robotstxt) * [browserconfig.xml](#browserconfigxml) @@ -114,25 +113,6 @@ Notice that the original repo for the `.htaccess` file is [this one](https://github.com/h5bp/server-configs-apache). -## crossdomain.xml - -The _cross-domain policy file_ is an XML document that gives a web client — -such as Adobe Flash Player, Adobe Reader, etc. — permission to handle data -across multiple domains, by: - - * granting read access to data - * permitting the client to include custom headers in cross-domain requests - * granting permissions for socket-based connections - -__e.g.__ If a client hosts content from a particular source domain and that -content makes requests directed towards a domain other than its own, the remote -domain would need to host a cross-domain policy file in order to grant access -to the source domain and allow the client to continue with the transaction. - -For more in-depth information, please see Adobe's [cross-domain policy file -specification](https://www.adobe.com/devnet/articles/crossdomain_policy_file_spec.html). - - ## robots.txt The `robots.txt` file is used to give instructions to web robots on what can diff --git a/src/doc/usage.md b/src/doc/usage.md index b8fc360a..e2716bc6 100644 --- a/src/doc/usage.md +++ b/src/doc/usage.md @@ -39,7 +39,6 @@ A basic HTML5 Boilerplate site initially looks something like this: ├── index.html ├── humans.txt ├── robots.txt -├── crossdomain.xml ├── favicon.ico ├── tile-wide.png └── tile.png @@ -112,11 +111,6 @@ technology powering it. Edit this file to include any pages you need hidden from search engines. -### crossdomain.xml - -A template for working with cross-domain requests. [About -crossdomain.xml](misc.md#crossdomainxml). - ### Icons Replace the default `favicon.ico`, `tile.png`, `tile-wide.png` and Apple diff --git a/test/file_existence.js b/test/file_existence.js index ecc9376d..d64f6304 100644 --- a/test/file_existence.js +++ b/test/file_existence.js @@ -20,7 +20,6 @@ var expectedFilesInDistDir = [ '404.html', 'apple-touch-icon.png', 'browserconfig.xml', - 'crossdomain.xml', 'css/', // for directories, a `/` character // should be included at the end