diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..883576d9 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,10 @@ + +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 2 +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 00000000..cffa8e62 --- /dev/null +++ b/.eslintignore @@ -0,0 +1,3 @@ +coverage/ +test/assets +workspace/ \ No newline at end of file diff --git a/.eslintrc b/.eslintrc new file mode 100644 index 00000000..f8ad5be8 --- /dev/null +++ b/.eslintrc @@ -0,0 +1,6 @@ +{ + "extends": "@dadi", + "rules": { + "require-atomic-updates": "off" + } +} diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index de275acc..b1e4434a 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,4 +1,4 @@ # This is a comment. # Each line is a file pattern followed by one or more owners. -* @jimlambie @eduardoboucas @adamkdean @abovedave +* @eduardoboucas diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 00000000..321b5bb7 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,2 @@ +coverage/ +test/assets \ No newline at end of file diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000..d9fc58ea --- /dev/null +++ b/.prettierrc @@ -0,0 +1 @@ +"@dadi/prettier-config" diff --git a/.snyk b/.snyk index d9cd7bae..e2233863 100644 --- a/.snyk +++ b/.snyk @@ -1,5 +1,5 @@ # Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities. -version: v1.12.0 +version: v1.13.5 ignore: {} # patches apply the minimum changes required to fix a vulnerability patch: @@ -17,3 +17,8 @@ patch: patched: '2018-06-19T09:38:39.334Z' - images > node-gyp > request > hawk > cryptiles > boom > hoek: patched: '2018-06-19T09:38:39.334Z' + SNYK-JS-LODASH-450202: + - snyk > snyk-mvn-plugin > lodash: + patched: '2019-07-10T00:32:06.056Z' + - snyk > snyk-php-plugin > @snyk/composer-lockfile-parser > lodash: + patched: '2019-07-10T00:32:06.056Z' diff --git a/CHANGELOG.md b/CHANGELOG.md index 6eda96ae..e3a39328 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,153 +1,156 @@ # Change Log + All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/). +## [4.0.0] - 2019-09-23 + +### Changed + +- [#514](https://github.com/dadi/cdn/pull/514): remove support for legacy path-based URL syntax + ## [3.6.2] - 2019-08-23 ### Fixed -* [#510](https://github.com/dadi/cdn/pull/510): ensure search parameters are used in passthrough URL +- [#510](https://github.com/dadi/cdn/pull/510): ensure search parameters are used in passthrough URL ## [3.6.1] - 2019-08-12 ### Fixed -* [#508](https://github.com/dadi/cdn/pull/508): bypass JS processing when no transformation parameters supplied +- [#508](https://github.com/dadi/cdn/pull/508): bypass JS processing when no transformation parameters supplied ## [3.6.0] - 2019-05-15 ### Changed -* [#505](https://github.com/dadi/cdn/pull/505): serve original image untransformed when no manipulation parameters are defined +- [#505](https://github.com/dadi/cdn/pull/505): serve original image untransformed when no manipulation parameters are defined ## [3.5.0] - 2019-02-28 ### Changed -* [#253](https://github.com/dadi/cdn/issues/253): add LastModified header to items returned from cache -* [#479](https://github.com/dadi/cdn/issues/479): set content-length header after gzipping -* Update dependencies: mime, concat-stream +- [#253](https://github.com/dadi/cdn/issues/253): add LastModified header to items returned from cache +- [#479](https://github.com/dadi/cdn/issues/479): set content-length header after gzipping +- Update dependencies: mime, concat-stream ## [3.4.5] - 2018-12-17 ### Fixes -* [#472](https://github.com/dadi/cdn/issues/472): remove dependency incompatible with ARM64 architecture +- [#472](https://github.com/dadi/cdn/issues/472): remove dependency incompatible with ARM64 architecture ## [3.4.4] - 2018-11-22 -* Removes invalid configuration samples +- Removes invalid configuration samples ## [3.4.3] - 2018-11-22 ### Changed -* [#449](https://github.com/dadi/cdn/issues/449): ensure responses for all status codes are allowed to complete before taking action -* Remove the default behaviour of responding with progressize JPEGs -* Handle requests for gzip encoding better, checking for "gzip" anywhere in the Accept-Encoding header +- [#449](https://github.com/dadi/cdn/issues/449): ensure responses for all status codes are allowed to complete before taking action +- Remove the default behaviour of responding with progressize JPEGs +- Handle requests for gzip encoding better, checking for "gzip" anywhere in the Accept-Encoding header ## [3.4.2] - 2018-11-15 ### Changed -* Remove defaults for authentication and token signing credentials, requires user to set them explicitly -* Accept full configuration block at internal mgmt endpoints +- Remove defaults for authentication and token signing credentials, requires user to set them explicitly +- Accept full configuration block at internal mgmt endpoints ## [3.4.0] - 2018-11-09 ### Added -* [#378](https://github.com/dadi/cdn/issues/378): support progressive JPEGs - add `?progressive=true` to a URL -* [#447](https://github.com/dadi/cdn/issues/447): use a base URL for status endpoint checks, configure with `publicUrl` +- [#378](https://github.com/dadi/cdn/issues/378): support progressive JPEGs - add `?progressive=true` to a URL +- [#447](https://github.com/dadi/cdn/issues/447): use a base URL for status endpoint checks, configure with `publicUrl` ### Changed -* [#437](https://github.com/dadi/cdn/issues/437): allow override of local image directory in multi-domain -* [#451](https://github.com/dadi/cdn/issues/451): replace exif-reader package with a forked+patched version -* Add support for Node.js 10 +- [#437](https://github.com/dadi/cdn/issues/437): allow override of local image directory in multi-domain +- [#451](https://github.com/dadi/cdn/issues/451): replace exif-reader package with a forked+patched version +- Add support for Node.js 10 ## [3.3.0] - 2018-10-24 ### Added -* [#398](https://github.com/dadi/cdn/issues/398): add 'Vary: Accept-Encoding' header to responses -* [#439](https://github.com/dadi/cdn/issues/439): reload domain configs on directory changes - -### Changed +- [#398](https://github.com/dadi/cdn/issues/398): add 'Vary: Accept-Encoding' header to responses +- [#439](https://github.com/dadi/cdn/issues/439): reload domain configs on directory changes -* [#351](https://github.com/dadi/cdn/issues/351): add `assets.remote.allowFullURL` configuration parameter -* [#406](https://github.com/dadi/cdn/issues/406): return JSON response when URL is incomplete -* [#434](https://github.com/dadi/cdn/issues/434): return file size attributes for the image pre and post transforms -* Sharp image dependency updated to 0.21.0 +### Changed +- [#351](https://github.com/dadi/cdn/issues/351): add `assets.remote.allowFullURL` configuration parameter +- [#406](https://github.com/dadi/cdn/issues/406): return JSON response when URL is incomplete +- [#434](https://github.com/dadi/cdn/issues/434): return file size attributes for the image pre and post transforms +- Sharp image dependency updated to 0.21.0 ## [3.2.2] - 2018-10-01 ### Changed -* [#424](https://github.com/dadi/cdn/issues/424): PNG compression honours quality parameter setting, mapping the `quality` parameter inversely to a compression level between 1 and 9 -* [#431](https://github.com/dadi/cdn/pull/431): don't assume JPG extension when no extension is supplied - +- [#424](https://github.com/dadi/cdn/issues/424): PNG compression honours quality parameter setting, mapping the `quality` parameter inversely to a compression level between 1 and 9 +- [#431](https://github.com/dadi/cdn/pull/431): don't assume JPG extension when no extension is supplied ## [3.2.1] - 2018-08-22 ### Changed -* [#412](https://github.com/dadi/cdn/issues/412): modify regex for "CSS" to search from beginning of URL - +- [#412](https://github.com/dadi/cdn/issues/412): modify regex for "CSS" to search from beginning of URL ## [3.2.0] - 2018-08-01 ### Added -* [#400](https://github.com/dadi/cdn/pull/400): support for conditional formats -* [#405](https://github.com/dadi/cdn/pull/405): support for default files -* With #405, the `/` route doesn't respond with a plain text 200 response. In order to check that CDN is online, send a request to `/hello` - +- [#400](https://github.com/dadi/cdn/pull/400): support for conditional formats +- [#405](https://github.com/dadi/cdn/pull/405): support for default files +- With #405, the `/` route doesn't respond with a plain text 200 response. In order to check that CDN is online, send a request to `/hello` ## [3.1.1] - 2018-07-11 ### Changed -* removed package lock file to allow latest dependencies -* updated dependencies +- removed package lock file to allow latest dependencies +- updated dependencies ## [3.1.0] - 2018-07-04 ### Changed -* [#394](https://github.com/dadi/cdn/pull/394): performance improvements. +- [#394](https://github.com/dadi/cdn/pull/394): performance improvements. ## [3.0.5] - 2018-06-27 ### Changed -* work queue added, ensures if multiple requests are made for the same resource _before_ the first one has finished processing, remaining requests wait for the result of the first one _instead of requesting a new computation each time_. When the processing for the first request finishes, all waiting requests are served and the request is removed from the work queue. +- work queue added, ensures if multiple requests are made for the same resource _before_ the first one has finished processing, remaining requests wait for the result of the first one _instead of requesting a new computation each time_. When the processing for the first request finishes, all waiting requests are served and the request is removed from the work queue. ## [3.0.4] - 2018-06-22 ### Fixed -* [#388](https://github.com/dadi/cdn/pull/388): fix issue where not all chunks from a remote HTTP call were passed to the calling function. +- [#388](https://github.com/dadi/cdn/pull/388): fix issue where not all chunks from a remote HTTP call were passed to the calling function. ## [3.0.3] - 2018-06-06 ### Changed -* [#372](https://github.com/dadi/cdn/pull/372): add proper support for range requests, enabling seekable content such as audio and video to be served correctly. +- [#372](https://github.com/dadi/cdn/pull/372): add proper support for range requests, enabling seekable content such as audio and video to be served correctly. ## [3.0.2] - 2018-06-04 ### Changed -* changed `.npmignore` to correctly exclude any files and directories used for development purposes only. +- changed `.npmignore` to correctly exclude any files and directories used for development purposes only. ## [3.0.1] - 2018-06-04 ### Changed -* [#369](https://github.com/dadi/cdn/pull/369): fix issue where `devicePixelRatio` was ignored when a resize style other than `crop` was used; change default resize style to `aspectfill` when `width`, `height` and `gravity` are supplied. +- [#369](https://github.com/dadi/cdn/pull/369): fix issue where `devicePixelRatio` was ignored when a resize style other than `crop` was used; change default resize style to `aspectfill` when `width`, `height` and `gravity` are supplied. ## [3.0.0] - 2018-05-21 @@ -155,55 +158,55 @@ Full public release of Release Candidate 4. ### Breaking changes -* The file upload function in CDN was recently removed. We came to the conclusion that CDN should remain a delivery tool - how a user gets their assets to their storage location for CDN to serve should be up to them. There are a great number of ways to do that, including via DADI API (see https://docs.dadi.tech/api/latest). +- The file upload function in CDN was recently removed. We came to the conclusion that CDN should remain a delivery tool - how a user gets their assets to their storage location for CDN to serve should be up to them. There are a great number of ways to do that, including via DADI API (see https://docs.dadi.tech/api/latest). ## [3.0.0-RC4] - 2018-05-16 ### Added -* [#346](https://github.com/dadi/cdn/pull/346): add basic GIF support -* [#356](https://github.com/dadi/cdn/pull/356): add Digital Ocean Spaces support -* [#360](https://github.com/dadi/cdn/pull/360) and [#363](https://github.com/dadi/cdn/pull/363): cache 404 responses using the `caching.cache404` config property +- [#346](https://github.com/dadi/cdn/pull/346): add basic GIF support +- [#356](https://github.com/dadi/cdn/pull/356): add Digital Ocean Spaces support +- [#360](https://github.com/dadi/cdn/pull/360) and [#363](https://github.com/dadi/cdn/pull/363): cache 404 responses using the `caching.cache404` config property ### Changed -* [#347](https://github.com/dadi/cdn/pull/347): gracefully handle case where domains directory does not exist -* [#354](https://github.com/dadi/cdn/pull/354): include all image options in cache key -* [#355](https://github.com/dadi/cdn/pull/355): return fallback image when remote server returns 404 -* [#357](https://github.com/dadi/cdn/pull/357): make config properties overridable at domain level -* [#358](https://github.com/dadi/cdn/pull/358): update `@dadi/cache` to version 2.0.0 -* [#359](https://github.com/dadi/cdn/pull/359): update `supertest` to version 3.1.0 -* [#365](https://github.com/dadi/cdn/pull/365): update `babel-preset-minify` to version 0.4.3 +- [#347](https://github.com/dadi/cdn/pull/347): gracefully handle case where domains directory does not exist +- [#354](https://github.com/dadi/cdn/pull/354): include all image options in cache key +- [#355](https://github.com/dadi/cdn/pull/355): return fallback image when remote server returns 404 +- [#357](https://github.com/dadi/cdn/pull/357): make config properties overridable at domain level +- [#358](https://github.com/dadi/cdn/pull/358): update `@dadi/cache` to version 2.0.0 +- [#359](https://github.com/dadi/cdn/pull/359): update `supertest` to version 3.1.0 +- [#365](https://github.com/dadi/cdn/pull/365): update `babel-preset-minify` to version 0.4.3 ## [3.0.0-RC3] - 2018-05-08 ### Changed -* Allow `remote.enabled` to be overridden at domain level. +- Allow `remote.enabled` to be overridden at domain level. ## [3.0.0-RC2] - 2018-05-02 ### Changed -* [#345](https://github.com/dadi/cdn/issues/345): stop requiring the existence of the domains directory if multi-domain is not enabled and handle it gracefully with an informative error message if it is enabled and the directory doesn't exist +- [#345](https://github.com/dadi/cdn/issues/345): stop requiring the existence of the domains directory if multi-domain is not enabled and handle it gracefully with an informative error message if it is enabled and the directory doesn't exist ## [3.0.0-RC1] - 2018-04-24 ### Added -* [#254](https://github.com/dadi/cdn/issues/313): ability to define full remote URLs at recipe level -* [#313](https://github.com/dadi/cdn/issues/313): add config export -* [#314](https://github.com/dadi/cdn/issues/314): allow controller plugins to set X-Cache header -* [#326](https://github.com/dadi/cdn/pull/326): use domain as part of cache key -* [#327](https://github.com/dadi/cdn/pull/327): return 404 if multi-domain is enabled and a request is made for a domain that is not configured -* [#330](https://github.com/dadi/cdn/issues/331): add file monitors to domain-specific workspace directories -* [#331](https://github.com/dadi/cdn/issues/331): make `/api/recipes` and `/api/routes` endpoints work with multiple domains -* [#336](https://github.com/dadi/cdn/pull/336): flush cache by domain +- [#254](https://github.com/dadi/cdn/issues/313): ability to define full remote URLs at recipe level +- [#313](https://github.com/dadi/cdn/issues/313): add config export +- [#314](https://github.com/dadi/cdn/issues/314): allow controller plugins to set X-Cache header +- [#326](https://github.com/dadi/cdn/pull/326): use domain as part of cache key +- [#327](https://github.com/dadi/cdn/pull/327): return 404 if multi-domain is enabled and a request is made for a domain that is not configured +- [#330](https://github.com/dadi/cdn/issues/331): add file monitors to domain-specific workspace directories +- [#331](https://github.com/dadi/cdn/issues/331): make `/api/recipes` and `/api/routes` endpoints work with multiple domains +- [#336](https://github.com/dadi/cdn/pull/336): flush cache by domain ### Changed -* [#324](https://github.com/dadi/cdn/issues/324): modify cache flush endpoint to match other products -* [#329](https://github.com/dadi/cdn/pull/329): remove upload support +- [#324](https://github.com/dadi/cdn/issues/324): modify cache flush endpoint to match other products +- [#329](https://github.com/dadi/cdn/pull/329): remove upload support ## [2.0.0] - 2018-03-13 @@ -211,49 +214,49 @@ Full public release of Release Candidate 4. [Full release notes](https://github.com/dadi/cdn/releases/tag/v2.0.0) -* Plugin support -* On-demand JavaScript transpiling (experimental) -* Support for any type of asset -* [#259](https://github.com/dadi/cdn/issues/259) WebP image support -* Simplified paths for non-image assets +- Plugin support +- On-demand JavaScript transpiling (experimental) +- Support for any type of asset +- [#259](https://github.com/dadi/cdn/issues/259) WebP image support +- Simplified paths for non-image assets ### Changed -* [#255](https://github.com/dadi/cdn/issues/255): default value for the `resizeStyle` property is now `aspectfit`, except when an explicit ratio is defined (i.e. `width` and `height` or `ratio` are defined) -* [#282](https://github.com/dadi/cdn/issues/282): deliver the fallback image even when crop is present -* [#283](https://github.com/dadi/cdn/issues/283): use correct dimensions when original or calculated size is above the configured security limit -* [#291](https://github.com/dadi/cdn/issues/291): cache JSON response of images, in the same way as the actual images are -* refactor parts of the code base to use ES6 features -* fix an issue where the `gravity` URL parameter was not applied correctly -* fix an issue whereby it was not possible to minify JavaScript files that contain ES6 code -* begin removal of Underscore.js dependency +- [#255](https://github.com/dadi/cdn/issues/255): default value for the `resizeStyle` property is now `aspectfit`, except when an explicit ratio is defined (i.e. `width` and `height` or `ratio` are defined) +- [#282](https://github.com/dadi/cdn/issues/282): deliver the fallback image even when crop is present +- [#283](https://github.com/dadi/cdn/issues/283): use correct dimensions when original or calculated size is above the configured security limit +- [#291](https://github.com/dadi/cdn/issues/291): cache JSON response of images, in the same way as the actual images are +- refactor parts of the code base to use ES6 features +- fix an issue where the `gravity` URL parameter was not applied correctly +- fix an issue whereby it was not possible to minify JavaScript files that contain ES6 code +- begin removal of Underscore.js dependency ## [1.13.3] - 2017-11-02 ### Changed -* [#276](https://github.com/dadi/cdn/issues/276): ensure images can be processed with no sharpening +- [#276](https://github.com/dadi/cdn/issues/276): ensure images can be processed with no sharpening ## [1.13.2] - 2017-10-25 ### Changed -* fix an issue where the aspect ratio was not respected when maxWidth/maxHeight resizes were being made +- fix an issue where the aspect ratio was not respected when maxWidth/maxHeight resizes were being made ## [1.13.1] - 2017-10-21 ### Changed -* [#260](https://github.com/dadi/cdn/pulls/260): update [finalhandler](https://www.npmjs.com/package/finalhandler) to version 1.1.0 -* [#264](https://github.com/dadi/cdn/pulls/264): update [request](https://www.npmjs.com/package/request) to version 2.83.0 -* [#267](https://github.com/dadi/cdn/pulls/267): make options from recipe take precedence in Image handler -* [#272](https://github.com/dadi/cdn/pulls/272): update [should](https://www.npmjs.com/package/should) to version 13.1.2 +- [#260](https://github.com/dadi/cdn/pulls/260): update [finalhandler](https://www.npmjs.com/package/finalhandler) to version 1.1.0 +- [#264](https://github.com/dadi/cdn/pulls/264): update [request](https://www.npmjs.com/package/request) to version 2.83.0 +- [#267](https://github.com/dadi/cdn/pulls/267): make options from recipe take precedence in Image handler +- [#272](https://github.com/dadi/cdn/pulls/272): update [should](https://www.npmjs.com/package/should) to version 13.1.2 ## [1.13.0] - 2017-10-20 ### Changed -* [#270](https://github.com/dadi/cdn/issues/270): cropping modifications to make it behave more intuitively. DevicePixelRatio is now respected, along with distorting images by providing both width & height. These changes only affect resize style `crop`. See the [documentation](https://docs.dadi.tech/#cdn) for more information. +- [#270](https://github.com/dadi/cdn/issues/270): cropping modifications to make it behave more intuitively. DevicePixelRatio is now respected, along with distorting images by providing both width & height. These changes only affect resize style `crop`. See the [documentation](https://docs.dadi.tech/#cdn) for more information. ## [1.12.0] - 2017-09-01 @@ -268,44 +271,46 @@ Other than improved performance, Sharp offers us a smoother transition into addi [#243](https://github.com/dadi/cdn/issues/243): remove restriction on configuration file names [#247](https://github.com/dadi/cdn/issues/247): respond with error when loading from a URL returns no image data - ## [1.11.1] - 2017-03-22 ### Changed -* Remote image requests that followed a redirect sometimes return a redirect header that is a path only, without protocol and hostname. Updated the `wget-improved` dependency to handle this case + +- Remote image requests that followed a redirect sometimes return a redirect header that is a path only, without protocol and hostname. Updated the `wget-improved` dependency to handle this case ## [1.11.0] - 2017-03-21 ### Added -* [#209](https://github.com/dadi/cdn/issues/209): Add post install script to copy a sample development configuration file to the application root +- [#209](https://github.com/dadi/cdn/issues/209): Add post install script to copy a sample development configuration file to the application root ### Changed -* [#218](https://github.com/dadi/cdn/issues/218): Set a default file extension of JPG for remote image requests that don't include an extension -* [#223](https://github.com/dadi/cdn/issues/223): Ensure that querystring parameters on remote URLs are retained and passed to the remote request -* Return 403 errors from remote requests ([c31a980](https://github.com/dadi/cdn/pull/229/commits/c31a98061dc0b5ea54a8ba8cf3163f9d9b8ca7c0)) + +- [#218](https://github.com/dadi/cdn/issues/218): Set a default file extension of JPG for remote image requests that don't include an extension +- [#223](https://github.com/dadi/cdn/issues/223): Ensure that querystring parameters on remote URLs are retained and passed to the remote request +- Return 403 errors from remote requests ([c31a980](https://github.com/dadi/cdn/pull/229/commits/c31a98061dc0b5ea54a8ba8cf3163f9d9b8ca7c0)) ## [1.10.3] - 2017-03-10 ### Changed -* package.json to reduce vulnerabilities ([728d4ea3](https://github.com/dadi/cdn/commit/728d4ea3)) +- package.json to reduce vulnerabilities ([728d4ea3](https://github.com/dadi/cdn/commit/728d4ea3)) ## [1.10.2] - 2017-03-07 ### Changed -* return file with extension when url is extension-less ([5d5774c1](https://github.com/dadi/cdn/commit/5d5774c1)) +- return file with extension when url is extension-less ([5d5774c1](https://github.com/dadi/cdn/commit/5d5774c1)) ## [1.10.1] - 2017-03-05 ### Changed -* [#216](https://github.com/dadi/cdn/issues/216): remove sharpening for PNG format ([ebc87e33](https://github.com/dadi/cdn/commit/ebc87e33)) +- [#216](https://github.com/dadi/cdn/issues/216): remove sharpening for PNG format ([ebc87e33](https://github.com/dadi/cdn/commit/ebc87e33)) ## [1.10.0] - 2017-02-17 ### Added + - [#211](https://github.com/dadi/cdn/pull/211): run in cluster mode by default when in production ([6902d119](https://github.com/dadi/cdn/commit/6902d119)) ## [1.9.0] - 2017-02-17 @@ -322,49 +327,57 @@ Other than improved performance, Sharp offers us a smoother transition into addi ## [1.8.2] - 2017-02-01 ### Changed + - ensure uploaded files have safe filenames ([6b9fea42](https://github.com/dadi/cdn/commit/6b9fea42)) ## [1.8.1] - 2017-01-24 ### Changed + - load js files and json files in workspace ([ba5b4a92](https://github.com/dadi/cdn/commit/ba5b4a92)) ## [1.8.0] - 2017-01-18 ### Added + - SSL handling improvements ([2ab581e0](https://github.com/dadi/cdn/commit/2ab581e0)) - add redirectPort to config ([867f85e5](https://github.com/dadi/cdn/commit/867f85e5)) ## [1.7.1] - 2017-01-07 ### Changed -* [#84](https://github.com/dadi/cdn/issues/184): Fix bug where the first part of the path was interpreted as a recipe/route/processor + +- [#84](https://github.com/dadi/cdn/issues/184): Fix bug where the first part of the path was interpreted as a recipe/route/processor ## [1.7.0] - 2017-01-05 ### Added -* [#130](https://github.com/dadi/cdn/issues/130): Add image upload support, allowing configuration of CDN to accept image uploads. See documentation at http://docs.dadi.tech/cdn/concepts/upload -* [#151](https://github.com/dadi/cdn/issues/151): Add external image support. See documentation at http://docs.dadi.tech/cdn/ -* [#153](https://github.com/dadi/cdn/issues/153): CDN can be configured to respond to the route `/robots.txt`. Specify the path to a robots.txt file in the configuration file: + +- [#130](https://github.com/dadi/cdn/issues/130): Add image upload support, allowing configuration of CDN to accept image uploads. See documentation at http://docs.dadi.tech/cdn/concepts/upload +- [#151](https://github.com/dadi/cdn/issues/151): Add external image support. See documentation at http://docs.dadi.tech/cdn/ +- [#153](https://github.com/dadi/cdn/issues/153): CDN can be configured to respond to the route `/robots.txt`. Specify the path to a robots.txt file in the configuration file: ```json "robots": "path/to/robots.txt" ``` ### Changed -* [#155](https://github.com/dadi/cdn/issues/155): [@dadi/cache](http://www.npmjs.org/@dadi/cache) module now used in place of custom caching -* [#160](https://github.com/dadi/cdn/issues/160): Fix: image is now returned even if no query is specified -* [#177](https://github.com/dadi/cdn/issues/177): Fix compression when changing formats from PNG to JPG -* [#181](https://github.com/dadi/cdn/issues/181): Removed node-canvas dependency, which was only used for determining the primary colour of an image. This is now handled by [node-vibrant](https://github.com/akfish/node-vibrant). Removing node-canvas simplifies the install process. If interested, you can compare results from the new module with color-thief's demo page at http://lokeshdhakar.com/projects/color-thief/. -* [#182](https://github.com/dadi/cdn/issues/182): Fix crash when caching is enabled and a JSON response is requested (e.g. `/test.jpg?format=json`). -* Modified package dependencies to include the `lwip` dependency using the same identifying string as used by `smartcrop-lwip`. This fixes the problem where NPM treated the two dependencies as separate and compiled them both when installing, extending the installation process. -* validation added to route and recipe names, to ensure they are 5 or more characters and only a mix of letters, dashes and underscores -* creating a Recipe by sending a POST request must now be sent to `/api/recipes`, not `/api/recipes/new` -* replaced Bluebird Promises with native Promises -* removed Redis dependencies, as these are now handled in @dadi/cache + +- [#155](https://github.com/dadi/cdn/issues/155): [@dadi/cache](http://www.npmjs.org/@dadi/cache) module now used in place of custom caching +- [#160](https://github.com/dadi/cdn/issues/160): Fix: image is now returned even if no query is specified +- [#177](https://github.com/dadi/cdn/issues/177): Fix compression when changing formats from PNG to JPG +- [#181](https://github.com/dadi/cdn/issues/181): Removed node-canvas dependency, which was only used for determining the primary colour of an image. This is now handled by [node-vibrant](https://github.com/akfish/node-vibrant). Removing node-canvas simplifies the install process. If interested, you can compare results from the new module with color-thief's demo page at http://lokeshdhakar.com/projects/color-thief/. +- [#182](https://github.com/dadi/cdn/issues/182): Fix crash when caching is enabled and a JSON response is requested (e.g. `/test.jpg?format=json`). +- Modified package dependencies to include the `lwip` dependency using the same identifying string as used by `smartcrop-lwip`. This fixes the problem where NPM treated the two dependencies as separate and compiled them both when installing, extending the installation process. +- validation added to route and recipe names, to ensure they are 5 or more characters and only a mix of letters, dashes and underscores +- creating a Recipe by sending a POST request must now be sent to `/api/recipes`, not `/api/recipes/new` +- replaced Bluebird Promises with native Promises +- removed Redis dependencies, as these are now handled in @dadi/cache ## [1.6.2] - 2016-10-22 + ### Changed + When specifying only two crop coordinates, the crop rectangle wasn't being correctly set. Using v2 of the request format, cropped images should be requested as follows: @@ -372,9 +385,11 @@ Using v2 of the request format, cropped images should be requested as follows: **Format of parameters:** `?resize=crop&crop=top,left,bottom,right` 1. specifying the full crop rectangle: http://cdn.example.com/images/taylor_swift.jpg?resize=crop&crop=0,225,312,567 - * the image is not resized; the resulting image will be 312px x 342px + +- the image is not resized; the resulting image will be 312px x 342px + 2. specifying the top left corner of the crop rectangle: http://cdn.example.com/images/taylor_swift.jpg?resize=crop&crop=0,225 - * the size of the crop rectangle is determined by the size of the image; if the original image is 800px x 600px, the crop rectangle and resulting image size will be 575px x 600px -Adding `width=400` will cause CDN to resize the image after cropping. +- the size of the crop rectangle is determined by the size of the image; if the original image is 800px x 600px, the crop rectangle and resulting image size will be 575px x 600px +Adding `width=400` will cause CDN to resize the image after cropping. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a99024aa..ed09223f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -6,12 +6,12 @@ We'd love for you to contribute to our source code and to make DADI CDN even bet Here are the guidelines we'd like you to follow: - - [Question or Problem?](#question) - - [Issues and Bugs](#issue) - - [Feature Requests](#feature) - - [Submission Guidelines](#submit) - - [Coding Rules](#rules) - - [Git Commit Guidelines](#commit) +- [Question or Problem?](#question) +- [Issues and Bugs](#issue) +- [Feature Requests](#feature) +- [Submission Guidelines](#submit) +- [Coding Rules](#rules) +- [Git Commit Guidelines](#commit) ## Got a Question or Problem? @@ -21,6 +21,7 @@ If the documentation doesn't answer your problem please feel free to email the DADI team directly on: team@dadi.tech ## Found an Issue? + If you find a bug in the source code or a mistake in the documentation, you can help us by submitting an issue to our [GitHub Repository][github]. But we'd love it if you submitted a Pull Request with a fix instead! @@ -28,68 +29,72 @@ submitted a Pull Request with a fix instead! **Please see the Submission Guidelines below**. ## Want a Feature? + You can request a new feature by submitting an issue to our [GitHub][issues] issue tracker. If you would like to implement a new feature then consider what kind of change it is: -* **Major Changes** that you wish to contribute to the project should be added as -a Feature Request in the [GitHub][issues] issue tracker. This will get the conversation -started. -* **Small Changes** can be crafted and submitted to the [GitHub Repository][github] as a Pull Request. +- **Major Changes** that you wish to contribute to the project should be added as + a Feature Request in the [GitHub][issues] issue tracker. This will get the conversation + started. +- **Small Changes** can be crafted and submitted to the [GitHub Repository][github] as a Pull Request. ## Submission Guidelines ### Submitting an Issue + Before you submit your issue [search the archive][issues], maybe your question was already answered. If your issue appears to be a bug, and hasn't been reported, open a new issue. Help us to maximize the effort we can spend fixing issues and adding new -features, by not reporting duplicate issues. Providing the following information will increase the +features, by not reporting duplicate issues. Providing the following information will increase the chances of your issue being dealt with quickly: -* **Overview of the Issue** - if an error is being thrown a non-minified stack trace helps -* **Motivation for or Use Case** - explain why this is a bug for you -* **DADI CDN Version** -* **Operating System** -* **Steps to Reproduce** - provide a set of steps to follow to reproduce the error. -* **Related Issues** - has a similar issue been reported before? -* **Suggest a Fix** - if you can't fix the bug yourself, perhaps you can point to what might be +- **Overview of the Issue** - if an error is being thrown a non-minified stack trace helps +- **Motivation for or Use Case** - explain why this is a bug for you +- **DADI CDN Version** +- **Operating System** +- **Steps to Reproduce** - provide a set of steps to follow to reproduce the error. +- **Related Issues** - has a similar issue been reported before? +- **Suggest a Fix** - if you can't fix the bug yourself, perhaps you can point to what might be causing the problem (e.g. a line of code or a commit) ### Submitting a Pull Request + Before you submit your pull request consider the following guidelines: -* Search [GitHub][pulls] for an open or closed Pull Request +- Search [GitHub][pulls] for an open or closed Pull Request that relates to your submission. You don't want to duplicate effort. -* Fork the original repository and clone your fork ([see this GitHub article](https://help.github.com/articles/fork-a-repo/)). -* Add the original repository as an upstream remote: `git remote add upstream https://github.com/dadi/cdn.git` - -* Make your changes in a new git branch. Name your branch using the format `topic/branch_name`. -Use `fix` for fixes and `feature` for features: - - ```shell - git checkout -b fix/my-fix-branch master - ``` - ```shell - git checkout -b feature/my-new-feature-branch master - ``` - -* Create your patch, **including appropriate test cases**. -* Follow our [Coding Rules](#rules). -* Run the full test suite using `npm test` and ensure that all tests pass. -* Commit your changes using a descriptive commit message that follows our +- Fork the original repository and clone your fork ([see this GitHub article](https://help.github.com/articles/fork-a-repo/)). +- Add the original repository as an upstream remote: `git remote add upstream https://github.com/dadi/cdn.git` + +- Make your changes in a new git branch. Name your branch using the format `topic/branch_name`. + Use `fix` for fixes and `feature` for features: + +```shell +git checkout -b fix/my-fix-branch master +``` + +```shell +git checkout -b feature/my-new-feature-branch master +``` + +- Create your patch, **including appropriate test cases**. +- Follow our [Coding Rules](#rules). +- Run the full test suite using `npm test` and ensure that all tests pass. +- Commit your changes using a descriptive commit message that follows our [commit message conventions](#commit-message-format) and passes our commit message presubmit hook. Adherence to the [commit message conventions](#commit-message-format) is required because release notes are automatically generated from these messages. -* Push your branch to GitHub: +- Push your branch to GitHub: ```shell git push origin fix/my-fix-branch ``` -* In GitHub, send a pull request to `dadi/cdn:master`. -* If we suggest changes then: - * Make the required updates. - * Re-run the full test suite to ensure tests are still passing. - * Commit your changes to your branch (e.g. `fix/my-fix-branch`). - * Push the changes to GitHub (this will update your Pull Request). +- In GitHub, send a pull request to `dadi/cdn:master`. +- If we suggest changes then: + - Make the required updates. + - Re-run the full test suite to ensure tests are still passing. + - Commit your changes to your branch (e.g. `fix/my-fix-branch`). + - Push the changes to GitHub (this will update your Pull Request). If the pull request gets too outdated we may ask you to rebase and force push to update the pull request: @@ -98,12 +103,12 @@ git rebase master -i git push origin fix/my-fix-branch -f ``` -*WARNING. Squashing or reverting commits and forced push thereafter may remove GitHub comments on code that were previously made by you and others in your commits.* +_WARNING. Squashing or reverting commits and forced push thereafter may remove GitHub comments on code that were previously made by you and others in your commits._ -* Documentation! Please add relevant documentation to the pull request. If this is a new feature then -please document it fully within the pull request. If you're making changes to an existing feature, please -give us a link to the existing [documentation][docs] along with your documentation changes. If you need -an example of excellent pull request documentation, have a look at the [effort put in here](https://github.com/dadi/api/pull/27). +- Documentation! Please add relevant documentation to the pull request. If this is a new feature then + please document it fully within the pull request. If you're making changes to an existing feature, please + give us a link to the existing [documentation][docs] along with your documentation changes. If you need + an example of excellent pull request documentation, have a look at the [effort put in here](https://github.com/dadi/api/pull/27). > That's it! Thank you for your contribution! @@ -111,37 +116,38 @@ an example of excellent pull request documentation, have a look at the [effort p After your pull request is merged, you can safely delete your branch and pull the changes from the main (upstream) repository: -* Delete the remote branch on GitHub either through the GitHub web UI or your local shell as follows: +- Delete the remote branch on GitHub either through the GitHub web UI or your local shell as follows: ```shell git push origin --delete my-fix-branch ``` -* Check out the master branch: +- Check out the master branch: ```shell git checkout master -f ``` -* Delete the local branch: +- Delete the local branch: ```shell git branch -D my-fix-branch ``` -* Update your master with the latest upstream version: +- Update your master with the latest upstream version: ```shell git pull --ff upstream master ``` ## Coding Rules + To ensure consistency throughout the source code, keep these rules in mind as you are working: -* Please use **two-space indentation**, as used in Node.JS itself. -* All features or bug fixes **must be tested** by one or more tests. Browse the [test -suite][tests] for examples. -* All public API methods **must be documented** with [JSDoc](http://usejsdoc.org/). +- Please use **two-space indentation**, as used in Node.JS itself. +- All features or bug fixes **must be tested** by one or more tests. Browse the [test + suite][tests] for examples. +- All public API methods **must be documented** with [JSDoc](http://usejsdoc.org/). ## Git Commit Guidelines @@ -153,7 +159,7 @@ Please don't include more than one change in each patch. If your commit message ### Commit Message Format -We have very precise rules over how our git commit messages can be formatted. This leads to **more readable messages** that are easy to follow when looking through the **project history**. We also use the git commit messages to **generate the change log**. +We have very precise rules over how our git commit messages can be formatted. This leads to **more readable messages** that are easy to follow when looking through the **project history**. We also use the git commit messages to **generate the change log**. The commit message format validation can be initialised by running `npm run init` from the root of the repository. This will add a symlink at `.git/hooks/commit-msg` which will be run every time you commit. @@ -163,7 +169,7 @@ Any line of the commit message cannot be longer 100 characters. This allows the #### Message Format -Each commit message consists of a **header**, a **body** and a **footer**. The header has a special format that includes a **type** and a **subject**: +Each commit message consists of a **header**, a **body** and a **footer**. The header has a special format that includes a **type** and a **subject**: ``` type: subject @@ -175,46 +181,49 @@ Close #yyy Ref #zzz ``` -* Use `Fix #xxx` when the commit fixes an open issue. -* Use `Close #xxx` when the commit closes an open pull request. -* Use `Ref #xxx` when referencing an issue or pull request that is already closed or should remain open. Examples include partial fixes and commits that add a test but not a fix. +- Use `Fix #xxx` when the commit fixes an open issue. +- Use `Close #xxx` when the commit closes an open pull request. +- Use `Ref #xxx` when referencing an issue or pull request that is already closed or should remain open. Examples include partial fixes and commits that add a test but not a fix. ### Reverting -If the commit reverts a previous commit, it should begin with `revert: `, followed by the header of the reverted commit. In the body it should say: `This reverts commit .`, where the hash is the SHA of the commit being reverted. + +If the commit reverts a previous commit, it should begin with `revert:`, followed by the header of the reverted commit. In the body it should say: `This reverts commit .`, where the hash is the SHA of the commit being reverted. ### Type + Must be one of the following: -* **feat**: A new feature -* **fix**: A bug fix -* **docs**: Documentation only changes -* **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc) -* **refactor**: A code change that neither fixes a bug nor adds a feature -* **perf**: A code change that improves performance -* **test**: Adding missing tests -* **chore**: Changes to the build process or auxiliary tools and libraries such as documentation generation +- **feat**: A new feature +- **fix**: A bug fix +- **docs**: Documentation only changes +- **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc) +- **refactor**: A code change that neither fixes a bug nor adds a feature +- **perf**: A code change that improves performance +- **test**: Adding missing tests +- **chore**: Changes to the build process or auxiliary tools and libraries such as documentation generation ### Subject + The subject contains a succinct description of the change: -* use the imperative, present tense: "fix" not "fixed" nor "fixes" -* don't capitalize first letter -* no dot (.) at the end +- use the imperative, present tense: "fix" not "fixed" nor "fixes" +- don't capitalize first letter +- no dot (.) at the end ### Body + Just as in the **subject**, write your commit message in the imperative: "Fix bug" and not "Fixed bug" or "Fixes bug". This convention matches up with commit messages generated by commands like `git merge` and `git revert`. The body should include the motivation for the change and contrast this with previous behavior. ### Footer + The footer should contain any information about **Breaking Changes** and is also the place to reference GitHub issues that this commit **Closes**. **Breaking Changes** should start with the word `BREAKING CHANGE:` with a space or two newlines. The rest of the commit message is then used for this. - [github]: https://github.com/dadi/cdn [issues]: https://github.com/dadi/cdn/issues [pulls]: https://github.com/dadi/cdn/pulls [tests]: https://github.com/dadi/cdn/tree/master/test [docs]: https://github.com/dadi/cdn/tree/docs/ - diff --git a/GPL.md b/GPL.md index 5f8b06fe..ae385a69 100644 --- a/GPL.md +++ b/GPL.md @@ -217,23 +217,23 @@ produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: -- a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. -- b) The work must carry prominent notices stating that it is - released under this License and any conditions added under - section 7. This requirement modifies the requirement in section 4 - to "keep intact all notices". -- c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. -- d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. +- a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. +- b) The work must carry prominent notices stating that it is + released under this License and any conditions added under + section 7. This requirement modifies the requirement in section 4 + to "keep intact all notices". +- c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. +- d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, @@ -252,42 +252,42 @@ sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: -- a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. -- b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the Corresponding - Source from a network server at no charge. -- c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. -- d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. -- e) Convey the object code using peer-to-peer transmission, - provided you inform other peers where the object code and - Corresponding Source of the work are being offered to the general - public at no charge under subsection 6d. +- a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. +- b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the Corresponding + Source from a network server at no charge. +- c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. +- d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. +- e) Convey the object code using peer-to-peer transmission, + provided you inform other peers where the object code and + Corresponding Source of the work are being offered to the general + public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be @@ -363,23 +363,23 @@ Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: -- a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or -- b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or -- c) Prohibiting misrepresentation of the origin of that material, - or requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or -- d) Limiting the use for publicity purposes of names of licensors - or authors of the material; or -- e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or -- f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions - of it) with contractual assumptions of liability to the recipient, - for any liability that these contractual assumptions directly - impose on those licensors and authors. +- a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or +- b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or +- c) Prohibiting misrepresentation of the origin of that material, + or requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or +- d) Limiting the use for publicity purposes of names of licensors + or authors of the material; or +- e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or +- f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions + of it) with contractual assumptions of liability to the recipient, + for any liability that these contractual assumptions directly + impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you diff --git a/README.md b/README.md index cfbdbd9d..8fc00eda 100644 --- a/README.md +++ b/README.md @@ -3,15 +3,13 @@ [![npm (scoped)](https://img.shields.io/npm/v/@dadi/cdn.svg?maxAge=10800&style=flat-square)](https://www.npmjs.com/package/@dadi/cdn) [![Coverage Status](https://coveralls.io/repos/github/dadi/cdn/badge.svg?branch=develop)](https://coveralls.io/github/dadi/cdn?branch=develop) [![Build Status](https://travis-ci.org/dadi/cdn.svg?branch=master)](https://travis-ci.org/dadi/cdn) -[![JavaScript Style Guide](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](http://standardjs.com/) - ## DADI CDN -* [Overview](#overview) -* [Requirements](#requirements) -* [Your First CDN Project](#your-first-cdn-project) -* [Links](#links) +- [Overview](#overview) +- [Requirements](#requirements) +- [Your First CDN Project](#your-first-cdn-project) +- [Links](#links) ## Overview @@ -25,7 +23,7 @@ CDN is part of DADI, a suite of components covering the full development stack, ## Requirements -* **[Node.js](https://www.nodejs.org/)** (supported versions: 6.9.2, 6.11.1, 8.9.4) +- **[Node.js](https://www.nodejs.org/)** (supported versions: 6.9.2, 6.11.1, 8.9.4) ## Your first CDN project @@ -35,7 +33,7 @@ Ensure you have the required dependencies installed. See the first sections in t ### Install CDN -All DADI platform microservices are available from [NPM](https://www.npmjs.com/). To add *CDN* to your project as a dependency: +All DADI platform microservices are available from [NPM](https://www.npmjs.com/). To add _CDN_ to your project as a dependency: ```bash $ cd my-app @@ -69,8 +67,8 @@ CDN requires a configuration file specific to the application environment. For e When CDN was installed, a development configuration file was created for you in a `config` folder at your application root. Full configuration documentation can be found at https://docs.dadi.cloud/cdn. - ### Run CDN as a service + To run your CDN application in the background as a service, install Forever and Forever Service: ```bash @@ -104,7 +102,7 @@ The sample configuration file defines a local filesystem source. The `path` prop } ``` -We'll use the above configuration for an example. With image files in the `images` directory we can make a request for one to view it in the browser: +We'll use the above configuration for an example. With image files in the `images` directory we can make a request for one to view it in the browser: ##### Images available @@ -115,14 +113,15 @@ drwxr-xr-x 4 root wheel 136 13 Mar 13:02 . drwxr-xr-x 4 root wheel 136 13 Mar 13:01 .. -rw-r--r-- 1 root wheel 9396 13 Mar 13:02 92875.jpg -rw-r--r-- 1 root wheel 4832710 13 Mar 13:02 92876.jpg -``` +``` ##### Browser request http://127.0.0.1:8001/92875.jpg ## Links -* [CDN Documentation](https://docs.dadi.cloud/cdn) + +- [CDN Documentation](https://docs.dadi.cloud/cdn) ## Licence @@ -149,7 +148,7 @@ them, to determine what licences are applicable.** DADI is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. The GNU General Public License (GPL) is available at diff --git a/config.js b/config.js index d06b1635..515a2195 100755 --- a/config.js +++ b/config.js @@ -62,32 +62,37 @@ const schema = { env: 'SSL_INTERMEDIATE_CERTIFICATE_PATH' }, sslIntermediateCertificatePaths: { - doc: 'The filenames of SSL intermediate certificates, overrides sslIntermediateCertificate (singular)', + doc: + 'The filenames of SSL intermediate certificates, overrides sslIntermediateCertificate (singular)', format: Array, default: [], env: 'SSL_INTERMEDIATE_CERTIFICATE_PATHS' }, enableHTTP2: { - doc: 'When `server.protocol = https` use http2 as the primary response method. Fallback to http1 for unsupported clients.', + doc: + 'When `server.protocol = https` use http2 as the primary response method. Fallback to http1 for unsupported clients.', format: Boolean, default: true } }, publicUrl: { host: { - doc: 'The host of the URL where the CDN instance can be publicly accessed at', + doc: + 'The host of the URL where the CDN instance can be publicly accessed at', format: '*', default: null, env: 'URL_HOST' }, port: { - doc: 'The port of the URL where the CDN instance can be publicly accessed at', + doc: + 'The port of the URL where the CDN instance can be publicly accessed at', format: '*', default: 80, env: 'URL_PORT' }, protocol: { - doc: 'The protocol of the URL where the CDN instance can be publicly accessed at', + doc: + 'The protocol of the URL where the CDN instance can be publicly accessed at', format: 'String', default: 'http', env: 'URL_PROTOCOL' @@ -121,7 +126,8 @@ const schema = { }, accessLog: { enabled: { - doc: "If true, HTTP access logging is enabled. The log file name is similar to the setting used for normal logging, with the addition of 'access'. For example `cdn.access.log`.", + doc: + "If true, HTTP access logging is enabled. The log file name is similar to the setting used for normal logging, with the addition of 'access'. For example `cdn.access.log`.", format: Boolean, default: true } @@ -186,36 +192,42 @@ const schema = { }, s3: { enabled: { - doc: 'If true, image files may be requested from Amazon S3 Buckets or Digital Ocean Spaces', + doc: + 'If true, image files may be requested from Amazon S3 Buckets or Digital Ocean Spaces', format: Boolean, default: false }, accessKey: { - doc: 'The access key used to connect to Amazon or Digital Ocean services for image files', + doc: + 'The access key used to connect to Amazon or Digital Ocean services for image files', format: String, default: '', env: 'AWS_S3_IMAGES_ACCESS_KEY' }, secretKey: { - doc: 'The secret used to connect to Amazon or Digital Ocean services for image files', + doc: + 'The secret used to connect to Amazon or Digital Ocean services for image files', format: String, default: '', env: 'AWS_S3_IMAGES_SECRET_KEY' }, bucketName: { - doc: 'The Amazon S3 Bucket or Digital Ocean Space that contains the image files', + doc: + 'The Amazon S3 Bucket or Digital Ocean Space that contains the image files', format: String, default: '', env: 'AWS_S3_IMAGES_BUCKET_NAME' }, region: { - doc: 'The Amazon S3 or Digital Ocean region the Bucket/Space is served from', + doc: + 'The Amazon S3 or Digital Ocean region the Bucket/Space is served from', format: String, default: '', env: 'AWS_S3_IMAGES_REGION' }, endpoint: { - doc: 'The endpoint used to access Digital Ocean Spaces. Not required for Amazon S3.', + doc: + 'The endpoint used to access Digital Ocean Spaces. Not required for Amazon S3.', format: String, default: '', env: 'AWS_S3_IMAGES_ENDPOINT' @@ -229,7 +241,8 @@ const schema = { allowDomainOverride: true }, path: { - doc: 'The remote host to request images from, for example http://media.example.com', + doc: + 'The remote host to request images from, for example http://media.example.com', format: String, default: '', allowDomainOverride: true @@ -259,36 +272,42 @@ const schema = { }, s3: { enabled: { - doc: 'If true, asset files may be requested from Amazon S3 Buckets or Digital Ocean Spaces', + doc: + 'If true, asset files may be requested from Amazon S3 Buckets or Digital Ocean Spaces', format: Boolean, default: false }, accessKey: { - doc: 'The access key used to connect to Amazon or Digital Ocean services for asset files', + doc: + 'The access key used to connect to Amazon or Digital Ocean services for asset files', format: String, default: '', env: 'AWS_S3_ASSETS_ACCESS_KEY' }, secretKey: { - doc: 'The secret used to connect to Amazon or Digital Ocean services for asset files', + doc: + 'The secret used to connect to Amazon or Digital Ocean services for asset files', format: String, default: '', env: 'AWS_S3_ASSETS_SECRET_KEY' }, bucketName: { - doc: 'The Amazon S3 Bucket or Digital Ocean Space that contains the asset files', + doc: + 'The Amazon S3 Bucket or Digital Ocean Space that contains the asset files', format: String, default: '', env: 'AWS_S3_ASSETS_BUCKET_NAME' }, region: { - doc: 'The Amazon S3 or Digital Ocean region the Bucket/Space is served from', + doc: + 'The Amazon S3 or Digital Ocean region the Bucket/Space is served from', format: String, default: '', env: 'AWS_S3_ASSETS_REGION' }, endpoint: { - doc: 'The endpoint used to access Digital Ocean Spaces. Not required for Amazon S3.', + doc: + 'The endpoint used to access Digital Ocean Spaces. Not required for Amazon S3.', format: String, default: '', env: 'AWS_S3_ASSETS_ENDPOINT' @@ -302,7 +321,8 @@ const schema = { allowDomainOverride: true }, path: { - doc: 'The remote host to request assets from, for example http://media.example.com', + doc: + 'The remote host to request assets from, for example http://media.example.com', format: String, default: '', allowDomainOverride: true @@ -391,13 +411,15 @@ const schema = { default: false }, port: { - doc: 'Accept connections on the specified port. A value of zero will assign a random port.', + doc: + 'Accept connections on the specified port. A value of zero will assign a random port.', format: Number, default: 8003, env: 'STATUS_PORT' }, routes: { - doc: 'An array of routes to test. Each route object must contain properties `route` and `expectedResponseTime`. Note, `expectedResponseTime` is seconds.', + doc: + 'An array of routes to test. Each route object must contain properties `route` and `expectedResponseTime`. Note, `expectedResponseTime` is seconds.', format: Array, default: [ { @@ -480,7 +502,8 @@ const schema = { } }, cluster: { - doc: 'If true, CDN runs in cluster mode, starting a worker for each CPU core', + doc: + 'If true, CDN runs in cluster mode, starting a worker for each CPU core', format: Boolean, default: true }, @@ -506,18 +529,20 @@ const schema = { }, headers: { useGzipCompression: { - doc: 'If true, uses gzip compression and adds a \'Content-Encoding:gzip\' header to the response.', + doc: + "If true, uses gzip compression and adds a 'Content-Encoding:gzip' header to the response.", format: Boolean, default: true, allowDomainOverride: true }, cacheControl: { - doc: 'A set of cache control headers based on specified mimetypes or paths', + doc: + 'A set of cache control headers based on specified mimetypes or paths', format: Object, default: { - 'default': 'public, max-age=3600', - 'paths': [], - 'mimetypes': [ + default: 'public, max-age=3600', + paths: [], + mimetypes: [ {'text/css': 'public, max-age=86400'}, {'text/javascript': 'public, max-age=86400'}, {'application/javascript': 'public, max-age=86400'} @@ -558,7 +583,8 @@ const schema = { }, remote: { url: { - doc: 'Remote URL to be used for geolocation. {key}, {secret} and {ip} will be replaced by the API key, secret and IP to locate, respectively', + doc: + 'Remote URL to be used for geolocation. {key}, {secret} and {ip} will be replaced by the API key, secret and IP to locate, respectively', format: String, default: '' }, @@ -583,7 +609,8 @@ const schema = { }, network: { url: { - doc: 'Remote URL to be used for network test service. {key}, {secret} and {ip} will be replaced by the API key, secret and IP to locate, respectively', + doc: + 'Remote URL to be used for network test service. {key}, {secret} and {ip} will be replaced by the API key, secret and IP to locate, respectively', format: String, default: '' }, @@ -625,7 +652,8 @@ const schema = { default: 'bicubic' }, centreSampling: { - doc: 'Whether to use *magick centre sampling convention instead of corner sampling', + doc: + 'Whether to use *magick centre sampling convention instead of corner sampling', format: Boolean, default: false } @@ -633,7 +661,8 @@ const schema = { }, experimental: { jsTranspiling: { - doc: 'Whether to enable experimental support for on-demand JavaScript transpiling', + doc: + 'Whether to enable experimental support for on-demand JavaScript transpiling', format: Boolean, default: false, env: 'JSTRANSPILING', @@ -661,47 +690,51 @@ const schema = { }, http: { followRedirects: { - doc: 'The number of redirects to follow when retrieving assets via HTTP requests', + doc: + 'The number of redirects to follow when retrieving assets via HTTP requests', format: Number, default: 10, allowDomainOverride: true } }, defaultFiles: { - doc: 'An array of filenames that can be used as fallback defaults when no path is specified', + doc: + 'An array of filenames that can be used as fallback defaults when no path is specified', format: Array, default: [] } } -const Config = function () { +const Config = function() { this.loadFile(this.configPath()) - this.watcher = chokidar.watch( - this.configPath(), - {usePolling: true} - ).on('all', (event, filePath) => { - this.loadFile(this.configPath()) - }) + this.watcher = chokidar + .watch(this.configPath(), {usePolling: true}) + .on('all', (event, filePath) => { + this.loadFile(this.configPath()) + }) this.domainSchema = {} this.createDomainSchema(schema, this.domainSchema) - let domainsDirectory = this.get('multiDomain.directory') + const domainsDirectory = this.get('multiDomain.directory') // Watch the domains directory for new & removed domain configurations. - this.domainsWatcher = chokidar.watch(domainsDirectory, { - awaitWriteFinish: true, - depth: 1, - usePolling: true - }).on('addDir', (event, filePath) => { - this.loadDomainConfigs() - }).on('unlinkDir', (event, filePath) => { - // Wait 3 sec for the delete to finish before rescanning - setTimeout(() => { + this.domainsWatcher = chokidar + .watch(domainsDirectory, { + awaitWriteFinish: true, + depth: 1, + usePolling: true + }) + .on('addDir', (event, filePath) => { this.loadDomainConfigs() - }, 3000) - }) + }) + .on('unlinkDir', (event, filePath) => { + // Wait 3 sec for the delete to finish before rescanning + setTimeout(() => { + this.loadDomainConfigs() + }, 3000) + }) } Config.prototype = convict(schema) @@ -712,8 +745,8 @@ Config.prototype = convict(schema) * * @return {String} */ -Config.prototype.configPath = function () { - let environment = this.get('env') +Config.prototype.configPath = function() { + const environment = this.get('env') return `./config/config.${environment}.json` } @@ -727,11 +760,11 @@ Config.prototype.configPath = function () { * @param {Object} target - variable to write the schema to * @param {Array} tail - helper variable for recursion */ -Config.prototype.createDomainSchema = function (schema, target, tail = []) { +Config.prototype.createDomainSchema = function(schema, target, tail = []) { if (!schema || typeof schema !== 'object') return if (schema.allowDomainOverride) { - let path = tail.join('.') + const path = tail.join('.') objectPath.set( target, @@ -745,11 +778,7 @@ Config.prototype.createDomainSchema = function (schema, target, tail = []) { } Object.keys(schema).forEach(key => { - this.createDomainSchema( - schema[key], - target, - tail.concat(key) - ) + this.createDomainSchema(schema[key], target, tail.concat(key)) }) } @@ -769,7 +798,7 @@ Config.prototype._get = Config.prototype.get * @param {String} domain - domain name * @return {Object} */ -Config.prototype.get = function (path, domain) { +Config.prototype.get = function(path, domain) { if ( domain === undefined || this.domainConfigs[domain] === undefined || @@ -781,7 +810,7 @@ Config.prototype.get = function (path, domain) { return this.domainConfigs[domain].get(path) } -Config.prototype.loadDomainConfig = function (domain, domainConfig) { +Config.prototype.loadDomainConfig = function(domain, domainConfig) { this.domainConfigs[domain] = convict(this.domainSchema) this.domainConfigs[domain].load(domainConfig) } @@ -792,24 +821,25 @@ Config.prototype.loadDomainConfig = function (domain, domainConfig) { * * @return {Object} */ -Config.prototype.loadDomainConfigs = function () { +Config.prototype.loadDomainConfigs = function() { if (!this.get('multiDomain.enabled')) { return {} } - let domainsDirectory = this.get('multiDomain.directory') - let configs = {} + const domainsDirectory = this.get('multiDomain.directory') + const configs = {} domainManager .scanDomains(domainsDirectory) - .getDomains().forEach(({domain, path: domainPath}) => { - let configPath = path.join( + .getDomains() + .forEach(({domain, path: domainPath}) => { + const configPath = path.join( domainPath, `config/config.${this.get('env')}.json` ) try { - let file = fs.statSync(configPath) + const file = fs.statSync(configPath) if (file.isFile()) { configs[domain] = convict(this.domainSchema) @@ -844,7 +874,7 @@ Config.prototype._set = Config.prototype.set * @param {Object} value * @param {String} domain */ -Config.prototype.set = function (path, value, domain) { +Config.prototype.set = function(path, value, domain) { if ( domain === undefined || this.domainConfigs[domain] === undefined || diff --git a/dadi/lib/auth/index.js b/dadi/lib/auth/index.js index d7c02b90..d1842f47 100755 --- a/dadi/lib/auth/index.js +++ b/dadi/lib/auth/index.js @@ -5,7 +5,7 @@ const path = require('path') const config = require(path.join(__dirname, '/../../../config.js')) const help = require(path.join(__dirname, '/../help')) -function mustAuthenticate (requestUrl) { +function mustAuthenticate(requestUrl) { // Allow internal requests. if (requestUrl.indexOf('/_dadi') === 0) { return false @@ -16,8 +16,8 @@ function mustAuthenticate (requestUrl) { } // This attaches middleware to the passed in app instance -module.exports = function (router) { - let tokenRoute = '/token' +module.exports = function(router) { + const tokenRoute = '/token' // Authorize router.use((req, res, next) => { @@ -33,36 +33,39 @@ module.exports = function (router) { } // Strip token value out of request headers. - let parts = req.headers.authorization.split(' ') + const parts = req.headers.authorization.split(' ') // Headers should be `Authorization: Bearer <%=tokenvalue%>` - let token = (parts.length === 2 && /^Bearer$/i.test(parts[0])) - ? parts[1] - : null + const token = + parts.length === 2 && /^Bearer$/i.test(parts[0]) ? parts[1] : null if (!token) { return fail('NoToken', res) } - jwt.verify(token, config.get('auth.privateKey', req.__domain), (err, decoded) => { - if (err || (decoded.domain !== req.__domain)) { - return fail('InvalidToken', res) - } + jwt.verify( + token, + config.get('auth.privateKey', req.__domain), + (err, decoded) => { + if (err || decoded.domain !== req.__domain) { + return fail('InvalidToken', res) + } - return next() - }) + return next() + } + ) }) // Setup token service. router.use(tokenRoute, (req, res, next) => { - let method = req.method && req.method.toLowerCase() + const method = req.method && req.method.toLowerCase() if (method !== 'post') { return next() } - let clientId = req.body.clientId - let secret = req.body.secret + const clientId = req.body.clientId + const secret = req.body.secret // Fail if the auth.clientId or auth.secret haven't been set. if (!clientId || !secret) { @@ -82,41 +85,57 @@ module.exports = function (router) { return fail('NoAccess', res) } - let payload = { + const payload = { domain: req.__domain } // Sign a JWT token. - jwt.sign(payload, config.get('auth.privateKey', req.__domain), { - expiresIn: config.get('auth.tokenTtl', req.__domain) - }, (err, token) => { - if (err) { - logger.error({module: 'auth'}, err) - - return fail('JWTError', res) + jwt.sign( + payload, + config.get('auth.privateKey', req.__domain), + { + expiresIn: config.get('auth.tokenTtl', req.__domain) + }, + (err, token) => { + if (err) { + logger.error({module: 'auth'}, err) + + return fail('JWTError', res) + } + + res.setHeader('Content-Type', 'application/json') + res.setHeader('Cache-Control', 'no-store') + res.setHeader('Pragma', 'no-cache') + res.end( + JSON.stringify({ + accessToken: token, + tokenType: 'Bearer', + expiresIn: config.get('auth.tokenTtl') + }) + ) } - - res.setHeader('Content-Type', 'application/json') - res.setHeader('Cache-Control', 'no-store') - res.setHeader('Pragma', 'no-cache') - res.end(JSON.stringify({ - accessToken: token, - tokenType: 'Bearer', - expiresIn: config.get('auth.tokenTtl') - })) - }) + ) }) - function fail (type, res) { + function fail(type, res) { switch (type) { case 'NoToken': - res.setHeader('WWW-Authenticate', 'Bearer, error="no_token", error_description="No access token supplied"') + res.setHeader( + 'WWW-Authenticate', + 'Bearer, error="no_token", error_description="No access token supplied"' + ) break case 'InvalidToken': - res.setHeader('WWW-Authenticate', 'Bearer, error="invalid_token", error_description="Invalid or expired access token"') + res.setHeader( + 'WWW-Authenticate', + 'Bearer, error="invalid_token", error_description="Invalid or expired access token"' + ) break case 'NoPrivateKey': - res.setHeader('WWW-Authenticate', 'Bearer, error="no_private_key", error_description="No private key configured in auth.privateKey"') + res.setHeader( + 'WWW-Authenticate', + 'Bearer, error="no_private_key", error_description="No private key configured in auth.privateKey"' + ) break default: res.setHeader('WWW-Authenticate', 'Bearer realm="/token"') diff --git a/dadi/lib/cache/index.js b/dadi/lib/cache/index.js index 60cb7f50..93fd3687 100755 --- a/dadi/lib/cache/index.js +++ b/dadi/lib/cache/index.js @@ -11,7 +11,7 @@ const cache = new DadiCache(config.get('caching')) * Creates a new Cache instance for the server * @constructor */ -const Cache = function () {} +const Cache = function() {} /** * Adds a stream to the cache @@ -21,17 +21,17 @@ const Cache = function () {} * @param {Boolean} wait Whether to wait for the write operation * @return {Promise} */ -Cache.prototype.cacheFile = function (stream, key, options, wait) { +Cache.prototype.cacheFile = function(stream, key, options, wait) { if (!this.isEnabled()) return Promise.resolve(stream) - let encryptedKey = this.getNormalisedKey(key) - let cacheStream = PassThrough() - let responseStream = PassThrough() + const encryptedKey = this.getNormalisedKey(key) + const cacheStream = PassThrough() + const responseStream = PassThrough() stream.pipe(cacheStream) stream.pipe(responseStream) - let write = cache.set(encryptedKey, cacheStream, options) + const write = cache.set(encryptedKey, cacheStream, options) if (wait) { return write.then(() => responseStream) @@ -46,15 +46,19 @@ Cache.prototype.cacheFile = function (stream, key, options, wait) { * @param {String} pattern * @param {Function} callback */ -Cache.prototype.delete = function (pattern, callback = () => {}) { - let hashedPattern = this.getNormalisedKey(pattern) - - cache.flush(hashedPattern).then(() => { - return callback(null) - }).catch((err) => { - console.log(err) - return callback(null) - }) +Cache.prototype.delete = function(pattern, callback = () => {}) { + const hashedPattern = this.getNormalisedKey(pattern) + + cache + .flush(hashedPattern) + .then(() => { + return callback(null) + }) + .catch(err => { + console.log(err) + + return callback(null) + }) } /** @@ -66,12 +70,12 @@ Cache.prototype.delete = function (pattern, callback = () => {}) { * @param {Array/String} key * @return {String} */ -Cache.prototype.getNormalisedKey = function (key) { +Cache.prototype.getNormalisedKey = function(key) { if (key === '') return key if (Array.isArray(key)) { return key.reduce((normalisedKey, node) => { - if (node || (node === 0)) { + if (node || node === 0) { normalisedKey += sha1(node.toString()) } @@ -92,12 +96,13 @@ Cache.prototype.getNormalisedKey = function (key) { * @param {Object} options Optional options object * @return {Promise} */ -Cache.prototype.getStream = function (key, options) { +Cache.prototype.getStream = function(key, options) { if (!this.isEnabled()) return Promise.resolve(null) - let encryptedKey = this.getNormalisedKey(key) + const encryptedKey = this.getNormalisedKey(key) - return cache.get(encryptedKey, options).catch(err => { // eslint-disable-line handle-callback-err + return cache.get(encryptedKey, options).catch(err => { + // eslint-disable-line handle-callback-err return null }) } @@ -111,8 +116,8 @@ Cache.prototype.getStream = function (key, options) { * @param {String} key The cache key * @return {Promise} */ -Cache.prototype.getMetadata = function (key) { - let encryptedKey = this.getNormalisedKey(key) +Cache.prototype.getMetadata = function(key) { + const encryptedKey = this.getNormalisedKey(key) return cache.getMetadata(encryptedKey) } @@ -122,9 +127,11 @@ Cache.prototype.getMetadata = function (key) { * * @return {Boolean} */ -Cache.prototype.isEnabled = function () { - return config.get('caching.directory.enabled') || +Cache.prototype.isEnabled = function() { + return ( + config.get('caching.directory.enabled') || config.get('caching.redis.enabled') + ) } /** @@ -133,10 +140,10 @@ Cache.prototype.isEnabled = function () { * @param {String} key cache key * @param {[type]} value */ -Cache.prototype.set = function (key, value, options) { +Cache.prototype.set = function(key, value, options) { if (!this.isEnabled()) return Promise.resolve(null) - let encryptedKey = this.getNormalisedKey(key) + const encryptedKey = this.getNormalisedKey(key) return cache.set(encryptedKey, value, options) } @@ -148,6 +155,6 @@ module.exports = () => instance || new Cache() module.exports.Cache = Cache // Reset method (for unit tests). -module.exports.reset = function () { +module.exports.reset = function() { instance = null } diff --git a/dadi/lib/controller/domain.js b/dadi/lib/controller/domain.js index b6a12d32..72bcf3a8 100644 --- a/dadi/lib/controller/domain.js +++ b/dadi/lib/controller/domain.js @@ -8,13 +8,17 @@ const help = require(path.join(__dirname, '/../help')) module.exports.post = (req, res) => { // Don't accept an empty POST if (!req.body || !Array.isArray(req.body) || req.body.length === 0) { - return help.sendBackJSON(400, { - success: false, - errors: ['Bad Request'] - }, res) + return help.sendBackJSON( + 400, + { + success: false, + errors: ['Bad Request'] + }, + res + ) } - let domains = req.body + const domains = req.body domains.forEach(item => { if (!DomainManager.getDomain(item.domain)) { @@ -23,10 +27,14 @@ module.exports.post = (req, res) => { } }) - return help.sendBackJSON(201, { - success: true, - domains: DomainManager.getDomains().map(item => item.domain) - }, res) + return help.sendBackJSON( + 201, + { + success: true, + domains: DomainManager.getDomains().map(item => item.domain) + }, + res + ) } /** @@ -35,67 +43,95 @@ module.exports.post = (req, res) => { module.exports.put = (req, res) => { // Don't accept an empty body if (!req.body || !req.body.data) { - return help.sendBackJSON(400, { - success: false, - errors: ['Bad Request'] - }, res) + return help.sendBackJSON( + 400, + { + success: false, + errors: ['Bad Request'] + }, + res + ) } - let domain = req.params.domain - let configSchema = req.body.data + const domain = req.params.domain + const configSchema = req.body.data // Don't accept an empty param. if (!domain || Object.keys(configSchema).length === 0) { - return help.sendBackJSON(400, { - success: false, - errors: ['Bad Request'] - }, res) + return help.sendBackJSON( + 400, + { + success: false, + errors: ['Bad Request'] + }, + res + ) } // Domain not found. if (!DomainManager.getDomain(domain)) { - return help.sendBackJSON(404, { - success: false, - errors: [`Domain '${domain}' does not exist`] - }, res) + return help.sendBackJSON( + 404, + { + success: false, + errors: [`Domain '${domain}' does not exist`] + }, + res + ) } // Update the domain configuration. DomainManager.addDomain(domain, configSchema) - return help.sendBackJSON(200, { - success: true, - domains: DomainManager.getDomains().map(item => item.domain) - }, res) + return help.sendBackJSON( + 200, + { + success: true, + domains: DomainManager.getDomains().map(item => item.domain) + }, + res + ) } /** * Accept DELETE requests for removing domains from the internal domain configuration. */ module.exports.delete = (req, res) => { - let domain = req.params.domain + const domain = req.params.domain // Don't accept an empty param. if (!domain) { - return help.sendBackJSON(400, { - success: false, - errors: ['Bad Request'] - }, res) + return help.sendBackJSON( + 400, + { + success: false, + errors: ['Bad Request'] + }, + res + ) } // Domain not found. if (!DomainManager.getDomain(domain)) { - return help.sendBackJSON(404, { - success: false, - errors: [`Domain '${domain}' does not exist`] - }, res) + return help.sendBackJSON( + 404, + { + success: false, + errors: [`Domain '${domain}' does not exist`] + }, + res + ) } // Remove the domain. DomainManager.removeDomain(domain) - return help.sendBackJSON(200, { - success: true, - domains: DomainManager.getDomains().map(item => item.domain) - }, res) + return help.sendBackJSON( + 200, + { + success: true, + domains: DomainManager.getDomains().map(item => item.domain) + }, + res + ) } diff --git a/dadi/lib/controller/index.js b/dadi/lib/controller/index.js index 7ba610b0..08350c13 100755 --- a/dadi/lib/controller/index.js +++ b/dadi/lib/controller/index.js @@ -21,14 +21,14 @@ const workspace = require(path.join(__dirname, '/../models/workspace')) logger.init(config.get('logging'), config.get('logging.aws'), config.get('env')) -let workQueue = new WorkQueue() +const workQueue = new WorkQueue() -const Controller = function (router) { +const Controller = function(router) { router.use(logger.requestLogger) router.use(seek) - router.get('/hello', function (req, res, next) { + router.get('/hello', function(req, res, next) { res.end('Welcome to DADI CDN') }) @@ -36,7 +36,7 @@ const Controller = function (router) { const robotsFile = config.get('robots') try { - var file = fs.readFileSync(robotsFile) + const file = fs.readFileSync(robotsFile) res.statusCode = 200 res.end(file.toString()) @@ -48,93 +48,110 @@ const Controller = function (router) { }) router.get(/(.+)/, (req, res) => { - let factory = new HandlerFactory(workspace.get()) - let queueKey = sha1(req.__domain + req.url) - - return workQueue.run(queueKey, () => { - return factory.create(req).then(handler => { - return handler.get().then(data => { - return { handler, data } - }).catch(err => { - err.__handler = handler - return Promise.reject(err) + const factory = new HandlerFactory(workspace.get()) + const queueKey = sha1(req.__domain + req.url) + + return workQueue + .run(queueKey, () => { + return factory.create(req).then(handler => { + return handler + .get() + .then(data => { + return {handler, data} + }) + .catch(err => { + err.__handler = handler + + return Promise.reject(err) + }) }) }) - }).then(({handler, data}) => { - this.addContentTypeHeader(res, handler) - this.addCacheControlHeader(res, handler, req.__domain) - this.addLastModifiedHeader(res, handler) - this.addVaryHeader(res, handler) - - if (handler.storageHandler && handler.storageHandler.notFound) { - res.statusCode = config.get('notFound.statusCode', req.__domain) || 404 - } + .then(({handler, data}) => { + this.addContentTypeHeader(res, handler) + this.addCacheControlHeader(res, handler, req.__domain) + this.addLastModifiedHeader(res, handler) + this.addVaryHeader(res, handler) + + if (handler.storageHandler && handler.storageHandler.notFound) { + res.statusCode = + config.get('notFound.statusCode', req.__domain) || 404 + } - if (handler.storageHandler && handler.storageHandler.cleanUp) { - handler.storageHandler.cleanUp() - } + if (handler.storageHandler && handler.storageHandler.cleanUp) { + handler.storageHandler.cleanUp() + } - let etagResult = etag(data) - res.setHeader('ETag', etagResult) + const etagResult = etag(data) - if (this.shouldCompress(req, handler)) { - res.setHeader('Content-Encoding', 'gzip') + res.setHeader('ETag', etagResult) - data = new Promise((resolve, reject) => { - zlib.gzip(data, (err, compressedData) => { - if (err) return reject(err) + if (this.shouldCompress(req, handler)) { + res.setHeader('Content-Encoding', 'gzip') - res.setHeader('Content-Length', compressedData.byteLength) - resolve(compressedData) - }) - }) - } else { - res.setHeader( - 'Content-Length', - Buffer.isBuffer(data) ? data.byteLength : data.length - ) - } + data = new Promise((resolve, reject) => { + zlib.gzip(data, (err, compressedData) => { + if (err) return reject(err) - return Promise.resolve(data).then(data => { - if (req.headers.range) { - res.sendSeekable(data) - } else if (req.headers['if-none-match'] === etagResult && handler.getContentType() !== 'application/json') { - res.statusCode = 304 - res.end() + res.setHeader('Content-Length', compressedData.byteLength) + resolve(compressedData) + }) + }) } else { - let cacheHeader = (handler.getHeader && handler.getHeader('x-cache')) || - (handler.isCached ? 'HIT' : 'MISS') - - res.setHeader('X-Cache', cacheHeader) - res.end(data) + res.setHeader( + 'Content-Length', + Buffer.isBuffer(data) ? data.byteLength : data.length + ) } + + return Promise.resolve(data).then(data => { + if (req.headers.range) { + res.sendSeekable(data) + } else if ( + req.headers['if-none-match'] === etagResult && + handler.getContentType() !== 'application/json' + ) { + res.statusCode = 304 + res.end() + } else { + const cacheHeader = + (handler.getHeader && handler.getHeader('x-cache')) || + (handler.isCached ? 'HIT' : 'MISS') + + res.setHeader('X-Cache', cacheHeader) + res.end(data) + } + }) }) - }).catch(err => { - logger.error({err: err}) + .catch(err => { + logger.error({err}) - if (err.__handler) { - res.setHeader('X-Cache', err.__handler.isCached ? 'HIT' : 'MISS') + if (err.__handler) { + res.setHeader('X-Cache', err.__handler.isCached ? 'HIT' : 'MISS') - delete err.__handler - } + delete err.__handler + } - help.sendBackJSON(err.statusCode || 400, err, res) - }) + help.sendBackJSON(err.statusCode || 400, err, res) + }) }) // Invalidation request - router.post('/api/flush', function (req, res) { + router.post('/api/flush', function(req, res) { if (!req.body.pattern) { - return help.sendBackJSON(400, { - success: false, - message: "A 'pattern' must be specified" - }, res) + return help.sendBackJSON( + 400, + { + success: false, + message: "A 'pattern' must be specified" + }, + res + ) } let pattern = [req.__domain] if (req.body.pattern !== '*') { - let parsedUrl = urlParser.parse(req.body.pattern, true) + const parsedUrl = urlParser.parse(req.body.pattern, true) pattern = pattern.concat([ parsedUrl.pathname, @@ -142,45 +159,64 @@ const Controller = function (router) { ]) } - help.clearCache(pattern, (err) => { + help.clearCache(pattern, err => { if (err) console.log(err) if (!config.get('cloudfront.enabled')) { - return help.sendBackJSON(200, { - success: true, - message: `Cache flushed for pattern "${req.body.pattern}"` - }, res) + return help.sendBackJSON( + 200, + { + success: true, + message: `Cache flushed for pattern "${req.body.pattern}"` + }, + res + ) } // Invalidate the Cloudfront cache - let cf = cloudfront.createClient(config.get('cloudfront.accessKey'), config.get('cloudfront.secretKey')) - - cf.getDistribution(config.get('cloudfront.distribution'), function (err, distribution) { + const cf = cloudfront.createClient( + config.get('cloudfront.accessKey'), + config.get('cloudfront.secretKey') + ) + + cf.getDistribution(config.get('cloudfront.distribution'), function( + err, + distribution + ) { if (err) console.log(err) - let callerReference = (new Date()).toString() - - distribution.invalidate(callerReference, ['/' + req.body.pattern], function (err, invalidation) { - if (err) console.log(err) - - return help.sendBackJSON(200, { - success: true, - message: 'Cache and cloudfront flushed for pattern ' + req.body.pattern - }, res) - }) + const callerReference = new Date().toString() + + distribution.invalidate( + callerReference, + ['/' + req.body.pattern], + function(err, invalidation) { + if (err) console.log(err) + + return help.sendBackJSON( + 200, + { + success: true, + message: + 'Cache and cloudfront flushed for pattern ' + req.body.pattern + }, + res + ) + } + ) }) }) }) - router.post('/api/recipes', function (req, res) { + router.post('/api/recipes', function(req, res) { return RecipeController.post(req, res) }) - router.post('/api/routes', function (req, res) { + router.post('/api/routes', function(req, res) { return RouteController.post(req, res) }) - router.use('/_dadi/domains/:domain?', function (req, res, next) { + router.use('/_dadi/domains/:domain?', function(req, res, next) { if ( !config.get('dadiNetwork.enableConfigurationAPI') || !config.get('multiDomain.enabled') @@ -199,45 +235,48 @@ const Controller = function (router) { * @param {Object} handler - the current asset handler (image, CSS, JS) * @returns {Boolean} - whether to compress the data before sending the response */ -Controller.prototype.shouldCompress = function (req, handler) { - let acceptHeader = req.headers['accept-encoding'] || '' - let contentType = handler.getContentType() - let useCompression = config.get('headers.useGzipCompression', req.__domain) +Controller.prototype.shouldCompress = function(req, handler) { + const acceptHeader = req.headers['accept-encoding'] || '' + const contentType = handler.getContentType() + const useCompression = config.get('headers.useGzipCompression', req.__domain) - return useCompression && + return ( + useCompression && contentType !== 'application/json' && acceptHeader.split(',').includes('gzip') && compressible(contentType) + ) } -Controller.prototype.addContentTypeHeader = function (res, handler) { +Controller.prototype.addContentTypeHeader = function(res, handler) { if (handler.getContentType()) { res.setHeader('Content-Type', handler.getContentType()) } } -Controller.prototype.addLastModifiedHeader = function (res, handler) { +Controller.prototype.addLastModifiedHeader = function(res, handler) { if (!handler) return if (handler.getLastModified) { - var lastMod = handler.getLastModified() + const lastMod = handler.getLastModified() + if (lastMod) res.setHeader('Last-Modified', lastMod) } } -Controller.prototype.addVaryHeader = function (res, handler) { +Controller.prototype.addVaryHeader = function(res, handler) { if (!handler) return res.setHeader('Vary', 'Accept-Encoding') } -Controller.prototype.addCacheControlHeader = function (res, handler, domain) { - let configHeaderSets = config.get('headers.cacheControl', domain) +Controller.prototype.addCacheControlHeader = function(res, handler, domain) { + const configHeaderSets = config.get('headers.cacheControl', domain) // If it matches, sets Cache-Control header using the file path configHeaderSets.paths.forEach(obj => { - let key = Object.keys(obj)[0] - let value = obj[key] + const key = Object.keys(obj)[0] + const value = obj[key] if (handler.storageHandler.getFullUrl().indexOf(key) > -1) { setHeader(value) @@ -246,10 +285,10 @@ Controller.prototype.addCacheControlHeader = function (res, handler, domain) { // If not already set, sets Cache-Control header using the file mimetype configHeaderSets.mimetypes.forEach(obj => { - let key = Object.keys(obj)[0] - let value = obj[key] + const key = Object.keys(obj)[0] + const value = obj[key] - if (handler.getFilename && (mime.getType(handler.getFilename()) === key)) { + if (handler.getFilename && mime.getType(handler.getFilename()) === key) { setHeader(value) } }) @@ -257,8 +296,8 @@ Controller.prototype.addCacheControlHeader = function (res, handler, domain) { // If not already set, sets Cache-Control header using the default setHeader(configHeaderSets.default) - function setHeader (value) { - if (!value || (value.length === 0)) return + function setHeader(value) { + if (!value || value.length === 0) return // already set if (res.getHeader('cache-control')) return diff --git a/dadi/lib/controller/recipe.js b/dadi/lib/controller/recipe.js index dbc05f01..a0b160a1 100644 --- a/dadi/lib/controller/recipe.js +++ b/dadi/lib/controller/recipe.js @@ -6,53 +6,71 @@ const Recipe = require(path.join(__dirname, '/../models/recipe')) const workspace = require(path.join(__dirname, '/../models/workspace')) module.exports.post = (req, res) => { - let obj = typeof req.body === 'object' - ? req.body - : JSON.parse(req.body) + const obj = typeof req.body === 'object' ? req.body : JSON.parse(req.body) // Don't accept an empty POST if (Object.keys(obj).length === 0) { - return help.sendBackJSON(400, { - success: false, - errors: ['Bad Request'] - }, res) + return help.sendBackJSON( + 400, + { + success: false, + errors: ['Bad Request'] + }, + res + ) } - let recipe = new Recipe(obj) - let validationErrors = recipe.validate() + const recipe = new Recipe(obj) + const validationErrors = recipe.validate() if (validationErrors) { - return help.sendBackJSON(400, { - success: false, - errors: validationErrors - }, res) + return help.sendBackJSON( + 400, + { + success: false, + errors: validationErrors + }, + res + ) } - let existingWorkspaceFile = workspace.get( - recipe.name, - req.__domain - ) + const existingWorkspaceFile = workspace.get(recipe.name, req.__domain) // Do we already have a recipe (or any other workspace file) // with this name? if (existingWorkspaceFile) { - return help.sendBackJSON(400, { - success: false, - errors: [`Route ${recipe.name} already exists`] - }, res) + return help.sendBackJSON( + 400, + { + success: false, + errors: [`Route ${recipe.name} already exists`] + }, + res + ) } - return recipe.save(req.__domain).then(() => { - return help.sendBackJSON(201, { - success: true, - message: `Recipe "${recipe.name}" created` - }, res) - }).catch(err => { - logger.error({module: 'recipes'}, err) - - return help.sendBackJSON(400, { - success: false, - errors: ['Error when saving recipe'] - }, res) - }) + return recipe + .save(req.__domain) + .then(() => { + return help.sendBackJSON( + 201, + { + success: true, + message: `Recipe "${recipe.name}" created` + }, + res + ) + }) + .catch(err => { + logger.error({module: 'recipes'}, err) + + return help.sendBackJSON( + 400, + { + success: false, + errors: ['Error when saving recipe'] + }, + res + ) + }) } diff --git a/dadi/lib/controller/route.js b/dadi/lib/controller/route.js index b9214ab2..80d18c49 100644 --- a/dadi/lib/controller/route.js +++ b/dadi/lib/controller/route.js @@ -9,37 +9,60 @@ module.exports.post = (req, res) => { const validationErrors = route.validate() // Don't accept an empty POST - if (!req.body || (Object.keys(req.body).length === 0)) { - return help.sendBackJSON(400, { - success: false, - errors: ['Bad Request'] - }, res) + if (!req.body || Object.keys(req.body).length === 0) { + return help.sendBackJSON( + 400, + { + success: false, + errors: ['Bad Request'] + }, + res + ) } if (validationErrors) { - return help.sendBackJSON(400, { - success: false, - errors: validationErrors - }, res) + return help.sendBackJSON( + 400, + { + success: false, + errors: validationErrors + }, + res + ) } if (workspace.get(route.config.route, req.__domain)) { - return help.sendBackJSON(400, { - success: false, - errors: [`Route '${route.config.route}' already exists`] - }, res) + return help.sendBackJSON( + 400, + { + success: false, + errors: [`Route '${route.config.route}' already exists`] + }, + res + ) } - return route.save(req.__domain).then(() => { - return help.sendBackJSON(200, { - success: true - }, res) - }).catch(err => { - logger.error({module: 'routes'}, err) + return route + .save(req.__domain) + .then(() => { + return help.sendBackJSON( + 200, + { + success: true + }, + res + ) + }) + .catch(err => { + logger.error({module: 'routes'}, err) - return help.sendBackJSON(400, { - success: false, - errors: ['Error when saving route'] - }, res) - }) + return help.sendBackJSON( + 400, + { + success: false, + errors: ['Error when saving route'] + }, + res + ) + }) } diff --git a/dadi/lib/controller/seek.js b/dadi/lib/controller/seek.js index 577a1c08..0700ae7d 100644 --- a/dadi/lib/controller/seek.js +++ b/dadi/lib/controller/seek.js @@ -8,16 +8,17 @@ const rangeStream = require('range-stream') const parseRange = require('range-parser') const sbuff = require('simple-bufferstream') -module.exports = function (req, res, next) { +module.exports = function(req, res, next) { // every new request gets a thin wrapper over the generic function - res.sendSeekable = function (stream, config) { + res.sendSeekable = function(stream, config) { return sendSeekable(stream, config, req, res, next) } + next() } // the generic handler for serving up partial streams -function sendSeekable (stream, config, req, res, next) { +function sendSeekable(stream, config, req, res, next) { if (stream instanceof Buffer) { config = config || {} config.length = stream.length @@ -25,7 +26,8 @@ function sendSeekable (stream, config, req, res, next) { } if (!config.length) { - let err = new Error('send-seekable requires `length` option') + const err = new Error('send-seekable requires `length` option') + return next(err) } @@ -39,10 +41,11 @@ function sendSeekable (stream, config, req, res, next) { // if this is a partial request if (req.headers.range) { // parse ranges - let ranges = parseRange(config.length, req.headers.range) + const ranges = parseRange(config.length, req.headers.range) if (ranges === -2) { res.statusCode = 400 + return res.end() // malformed range } @@ -50,6 +53,7 @@ function sendSeekable (stream, config, req, res, next) { // unsatisfiable range res.setHeader('Content-Range', '*/' + config.length) res.statusCode = 416 + return res.end() } @@ -61,13 +65,16 @@ function sendSeekable (stream, config, req, res, next) { return next(new Error('send-seekable can only serve single ranges')) } - let start = ranges[0].start - let end = ranges[0].end + const start = ranges[0].start + const end = ranges[0].end // formatting response res.statusCode = 206 - res.setHeader('Content-Length', (end - start) + 1) // end is inclusive - res.setHeader('Content-Range', 'bytes ' + start + '-' + end + '/' + config.length) + res.setHeader('Content-Length', end - start + 1) // end is inclusive + res.setHeader( + 'Content-Range', + 'bytes ' + start + '-' + end + '/' + config.length + ) // slicing the stream to partial content stream = stream.pipe(rangeStream(start, end)) @@ -75,4 +82,3 @@ function sendSeekable (stream, config, req, res, next) { return stream.pipe(res) } - diff --git a/dadi/lib/handlers/css.js b/dadi/lib/handlers/css.js index d970bd99..dc472ac1 100644 --- a/dadi/lib/handlers/css.js +++ b/dadi/lib/handlers/css.js @@ -14,22 +14,16 @@ const url = require('url') * @param {String} format The extension of the file being handled * @param {Object} req The request instance */ -const CSSHandler = function (format, req, { - options = {} -} = {}) { - this.legacyURLOverrides = this.getLegacyURLOverrides(req.url) +const CSSHandler = function(format, req, {options = {}} = {}) { this.options = options - this.url = url.parse( - this.legacyURLOverrides.url || req.url, - true - ) + this.url = url.parse(req.url, true) - this.isExternalUrl = this.url.pathname.indexOf('http://') > 0 || this.url.pathname.indexOf('https://') > 0 + this.isExternalUrl = + this.url.pathname.indexOf('http://') > 0 || + this.url.pathname.indexOf('https://') > 0 this.isCompressed = Boolean( - this.options.compress || - this.legacyURLOverrides.compress || - this.url.query.compress === '1' + this.options.compress || this.url.query.compress === '1' ) this.cache = Cache() @@ -52,48 +46,53 @@ const CSSHandler = function (format, req, { * * @return {Promise} A stream with the file */ -CSSHandler.prototype.get = function () { - return this.cache.getStream(this.cacheKey, { - ttl: config.get('caching.ttl', this.req.__domain) - }).then(stream => { - if (stream) { - this.isCached = true - - return stream - } - - this.storageHandler = this.storageFactory.create( - 'asset', - this.url.pathname.slice(1), - {domain: this.req.__domain} - ) - - // Aborting the request if full remote URL is required and not enabled. - if ( - this.isExternalUrl && - ( - !config.get('assets.remote.enabled', this.req.__domain) || - !config.get('assets.remote.allowFullURL', this.req.__domain) - ) - ) { - let err = { - statusCode: 403, - message: 'Loading assets from a full remote URL is not supported by this instance of DADI CDN' +CSSHandler.prototype.get = function() { + return this.cache + .getStream(this.cacheKey, { + ttl: config.get('caching.ttl', this.req.__domain) + }) + .then(stream => { + if (stream) { + this.isCached = true + + return stream } - return Promise.reject(err) - } + this.storageHandler = this.storageFactory.create( + 'asset', + this.url.pathname.slice(1), + {domain: this.req.__domain} + ) - return this.storageHandler.get().then(stream => { - return this.transform(stream) - }).then(stream => { - return this.cache.cacheFile(stream, this.cacheKey, { - ttl: config.get('caching.ttl', this.req.__domain) - }) + // Aborting the request if full remote URL is required and not enabled. + if ( + this.isExternalUrl && + (!config.get('assets.remote.enabled', this.req.__domain) || + !config.get('assets.remote.allowFullURL', this.req.__domain)) + ) { + const err = { + statusCode: 403, + message: + 'Loading assets from a full remote URL is not supported by this instance of DADI CDN' + } + + return Promise.reject(err) + } + + return this.storageHandler + .get() + .then(stream => { + return this.transform(stream) + }) + .then(stream => { + return this.cache.cacheFile(stream, this.cacheKey, { + ttl: config.get('caching.ttl', this.req.__domain) + }) + }) + }) + .then(stream => { + return help.streamToBuffer(stream) }) - }).then(stream => { - return help.streamToBuffer(stream) - }) } /** @@ -101,7 +100,7 @@ CSSHandler.prototype.get = function () { * * @return {String} The content type */ -CSSHandler.prototype.getContentType = function () { +CSSHandler.prototype.getContentType = function() { return 'text/css' } @@ -110,7 +109,7 @@ CSSHandler.prototype.getContentType = function () { * * @return {String} The filename */ -CSSHandler.prototype.getFilename = function () { +CSSHandler.prototype.getFilename = function() { return this.url.pathname.split('/').slice(-1)[0] } @@ -119,39 +118,16 @@ CSSHandler.prototype.getFilename = function () { * * @return {Number} The last modified timestamp */ -CSSHandler.prototype.getLastModified = function () { +CSSHandler.prototype.getLastModified = function() { if (!this.storageHandler || !this.storageHandler.getLastModified) return null return this.storageHandler.getLastModified() } -/** - * Looks for parameters in the URL using legacy syntax - * (e.g. /css/0/file.css) - * - * @param {String} url The URL - * @return {Object} A list of parameters and their value - */ -CSSHandler.prototype.getLegacyURLOverrides = function (url) { - let overrides = {} - - let legacyURLMatch = url.match(/^\/css(\/(\d))?/) - - if (legacyURLMatch) { - if (legacyURLMatch[2]) { - overrides.compress = legacyURLMatch[2] === '1' - } - - overrides.url = url.slice(legacyURLMatch[0].length) - } - - return overrides -} - /** * Sets the base URL (excluding any recipe or route nodes) */ -CSSHandler.prototype.setBaseUrl = function (baseUrl) { +CSSHandler.prototype.setBaseUrl = function(baseUrl) { this.url = url.parse(baseUrl, true) } @@ -161,7 +137,7 @@ CSSHandler.prototype.setBaseUrl = function (baseUrl) { * @param {Stream} stream The input stream * @return {Promise} */ -CSSHandler.prototype.transform = function (stream) { +CSSHandler.prototype.transform = function(stream) { const tmpDir = path.resolve('./tmp') if (!this.isCompressed) return Promise.resolve(stream) @@ -185,18 +161,18 @@ CSSHandler.prototype.transform = function (stream) { callback: (err, min) => { if (err) { return reject(err) - } else { - fs.unlinkSync(fileIn) - stream = fs.createReadStream(fileOut) + } - stream.on('open', function () { - return resolve(stream) - }) + fs.unlinkSync(fileIn) + stream = fs.createReadStream(fileOut) - stream.on('close', function () { - fs.unlinkSync(fileOut) - }) - } + stream.on('open', function() { + return resolve(stream) + }) + + stream.on('close', function() { + fs.unlinkSync(fileOut) + }) } }) }) @@ -204,7 +180,7 @@ CSSHandler.prototype.transform = function (stream) { }) } -module.exports = function (format, request, handlerData) { +module.exports = function(format, request, handlerData) { return new CSSHandler(format, request, handlerData) } diff --git a/dadi/lib/handlers/default.js b/dadi/lib/handlers/default.js index 8f146fb1..cdab0c1c 100644 --- a/dadi/lib/handlers/default.js +++ b/dadi/lib/handlers/default.js @@ -12,17 +12,13 @@ const url = require('url') * @param {String} format The extension of the file being handled * @param {Object} req The request instance */ -const DefaultHandler = function (format, req, { - options = {} -} = {}) { - this.legacyURLOverrides = this.getLegacyURLOverrides(req.url) +const DefaultHandler = function(format, req, {options = {}} = {}) { this.options = options - this.url = url.parse( - this.legacyURLOverrides.url || req.url, - true - ) + this.url = url.parse(req.url, true) - this.isExternalUrl = this.url.pathname.indexOf('http://') > 0 || this.url.pathname.indexOf('https://') > 0 + this.isExternalUrl = + this.url.pathname.indexOf('http://') > 0 || + this.url.pathname.indexOf('https://') > 0 this.cache = Cache() this.cacheKey = [req.__domain, this.url.href] @@ -38,46 +34,48 @@ const DefaultHandler = function (format, req, { * * @return {Promise} A stream with the file */ -DefaultHandler.prototype.get = function () { - return this.cache.getStream(this.cacheKey, { - ttl: config.get('caching.ttl', this.req.__domain) - }).then(stream => { - if (stream) { - this.isCached = true - - return stream - } - - this.storageHandler = this.storageFactory.create( - 'asset', - this.url.href.slice(1), - {domain: this.req.__domain} - ) - - // Aborting the request if full remote URL is required and not enabled. - if ( - this.isExternalUrl && - ( - !config.get('assets.remote.enabled', this.req.__domain) || - !config.get('assets.remote.allowFullURL', this.req.__domain) - ) - ) { - let err = { - statusCode: 403, - message: 'Loading assets from a full remote URL is not supported by this instance of DADI CDN' +DefaultHandler.prototype.get = function() { + return this.cache + .getStream(this.cacheKey, { + ttl: config.get('caching.ttl', this.req.__domain) + }) + .then(stream => { + if (stream) { + this.isCached = true + + return stream } - return Promise.reject(err) - } + this.storageHandler = this.storageFactory.create( + 'asset', + this.url.href.slice(1), + {domain: this.req.__domain} + ) + + // Aborting the request if full remote URL is required and not enabled. + if ( + this.isExternalUrl && + (!config.get('assets.remote.enabled', this.req.__domain) || + !config.get('assets.remote.allowFullURL', this.req.__domain)) + ) { + const err = { + statusCode: 403, + message: + 'Loading assets from a full remote URL is not supported by this instance of DADI CDN' + } + + return Promise.reject(err) + } - return this.storageHandler.get().then(stream => { - return this.cache.cacheFile(stream, this.cacheKey, { - ttl: config.get('caching.ttl', this.req.__domain) + return this.storageHandler.get().then(stream => { + return this.cache.cacheFile(stream, this.cacheKey, { + ttl: config.get('caching.ttl', this.req.__domain) + }) }) }) - }).then(stream => { - return help.streamToBuffer(stream) - }) + .then(stream => { + return help.streamToBuffer(stream) + }) } /** @@ -85,7 +83,7 @@ DefaultHandler.prototype.get = function () { * * @return {String} The content type */ -DefaultHandler.prototype.getContentType = function () { +DefaultHandler.prototype.getContentType = function() { let newUrl = this.url.pathname if (this.storageHandler && this.storageHandler.url !== newUrl) { @@ -104,7 +102,7 @@ DefaultHandler.prototype.getContentType = function () { * * @return {String} The filename */ -DefaultHandler.prototype.getFilename = function () { +DefaultHandler.prototype.getFilename = function() { return this.url.pathname.split('/').slice(-1)[0] } @@ -113,39 +111,20 @@ DefaultHandler.prototype.getFilename = function () { * * @return {Number} The last modified timestamp */ -DefaultHandler.prototype.getLastModified = function () { +DefaultHandler.prototype.getLastModified = function() { if (!this.storageHandler || !this.storageHandler.getLastModified) return null return this.storageHandler.getLastModified() } -/** - * Looks for parameters in the URL using legacy syntax - * (e.g. /fonts/0/file.css) - * - * @param {String} url The URL - * @return {Object} A list of parameters and their value - */ -DefaultHandler.prototype.getLegacyURLOverrides = function (url) { - let overrides = {} - - const legacyURLMatch = url.match(/\/fonts(\/(\d))?/) - - if (legacyURLMatch) { - overrides.url = url.slice(legacyURLMatch[0].length) - } - - return overrides -} - /** * Sets the base URL (excluding any recipe or route nodes) */ -DefaultHandler.prototype.setBaseUrl = function (baseUrl) { +DefaultHandler.prototype.setBaseUrl = function(baseUrl) { this.url = url.parse(baseUrl, true) } -module.exports = function (format, request, handlerData) { +module.exports = function(format, request, handlerData) { return new DefaultHandler(format, request, handlerData) } diff --git a/dadi/lib/handlers/factory.js b/dadi/lib/handlers/factory.js index 0886eee0..54707253 100644 --- a/dadi/lib/handlers/factory.js +++ b/dadi/lib/handlers/factory.js @@ -15,91 +15,65 @@ const PluginHandler = require(path.join(__dirname, '/plugin')) const Route = require(path.join(__dirname, '/../models/route')) const workspace = require(path.join(__dirname, '/../models/workspace')) -function parseUrl (req) { +function parseUrl(req) { return url.parse(req.url, true) } -function getFormat (req) { - let parsedPath = parseUrl(req).pathname +function getFormat(req) { + const parsedPath = parseUrl(req).pathname - // add default jpg extension - // if (path.extname(parsedPath) === '') { - // parsedPath += '.jpg' - // } - - if (req.__cdnLegacyURLSyntax) { - return parsedPath.split('/').find(Boolean) - } - - return path.extname(parsedPath).replace('.', '').toLowerCase() + return path + .extname(parsedPath) + .replace('.', '') + .toLowerCase() } -const HandlerFactory = function () {} +const HandlerFactory = function() {} -HandlerFactory.prototype.create = function (req, mimetype) { +HandlerFactory.prototype.create = function(req, mimetype) { const parsedUrl = url.parse(req.url, true) const pathComponents = parsedUrl.pathname.slice(1).split('/') + const format = mimetype ? mime.getExtension(mimetype) : null - let format = mimetype ? mime.getExtension(mimetype) : null + // ensure the querystring is decoded (removes for eg & entities introduced via XSLT) + if (parsedUrl.search) { + parsedUrl.search = he.decode(parsedUrl.search) + req.url = url.format(parsedUrl) + } - // version 1 matches a string like /jpg/80/0/0/640/480/ at the beginning of the url pathname - const v1pattern = /^\/[a-z]{3,4}\/[0-9]+\/[0-1]+\/[0-1]+\/[0-9]+\/[0-9]+\//gi + // Check if a workspace file matches the first part of the path. + const workspaceMatch = workspace.get(pathComponents[0], req.__domain) - if (v1pattern.test(parsedUrl.pathname) || /\/(fonts|css|js)/.test(pathComponents[0])) { - req.__cdnLegacyURLSyntax = true + switch (workspaceMatch && workspaceMatch.type) { + case 'plugins': + return this.createFromPlugin({ + plugin: require(workspaceMatch.path), + req + }) - logger.warn( - `'${parsedUrl.pathname}': this request uses a deprecated URL format which will be removed from future versions of DADI CDN. For more information, please visit https://docs.dadi.cloud/cdn/1.11#querystring-url-scheme.` - ) - } else { - // ensure the querystring is decoded (removes for eg & entities introduced via XSLT) - if (parsedUrl.search) { - parsedUrl.search = he.decode(parsedUrl.search) - req.url = url.format(parsedUrl) - } - } + case 'recipes': + return this.createFromRecipe({ + name: pathComponents[0], + req, + workspaceMatch + }) - // Create an image handler if the request uses a legacy URL. - if (req.__cdnLegacyURLSyntax) { - return this.createFromFormat({ - format, - req - }) - } else { - // Check if a workspace file matches the first part of the path. - const workspaceMatch = workspace.get(pathComponents[0], req.__domain) - - switch (workspaceMatch && workspaceMatch.type) { - case 'plugins': - return this.createFromPlugin({ - plugin: require(workspaceMatch.path), - req - }) - - case 'recipes': - return this.createFromRecipe({ - name: pathComponents[0], - req, - workspaceMatch - }) - - case 'routes': - return this.createFromRoute({ - name: pathComponents[0], - req, - workspaceMatch - }) + case 'routes': + return this.createFromRoute({ + name: pathComponents[0], + req, + workspaceMatch + }) - default: - return this.createFromFormat({ - format, - req - }) - } + default: + return this.createFromFormat({ + format, + req + }) } } -HandlerFactory.prototype.callErrorHandler = function (format, req) { +HandlerFactory.prototype.callErrorHandler = function(format, req) { const error = new Error('Unknown URI') error.statusCode = 404 @@ -108,8 +82,13 @@ HandlerFactory.prototype.callErrorHandler = function (format, req) { return Promise.reject(error) } -HandlerFactory.prototype.createFromFormat = function ({format, options, plugins, req}) { - let handlerData = { +HandlerFactory.prototype.createFromFormat = function({ + format, + options, + plugins, + req +}) { + const handlerData = { options, plugins } @@ -131,6 +110,7 @@ HandlerFactory.prototype.createFromFormat = function ({format, options, plugins, return resolve(new ImageHandler(format, req, handlerData)) case 'bin': format = 'jpg' + return resolve(new ImageHandler(format, req, handlerData)) default: return resolve(new DefaultHandler(format, req, handlerData)) @@ -138,11 +118,16 @@ HandlerFactory.prototype.createFromFormat = function ({format, options, plugins, }) } -HandlerFactory.prototype.createFromPlugin = function ({plugin, req}) { +HandlerFactory.prototype.createFromPlugin = function({plugin, req}) { return Promise.resolve(new PluginHandler(req, plugin)) } -HandlerFactory.prototype.createFromRecipe = function ({name, req, route, workspaceMatch}) { +HandlerFactory.prototype.createFromRecipe = function({ + name, + req, + route, + workspaceMatch +}) { const parsedUrl = url.parse(req.url, true) const source = workspaceMatch.source const recipeSettings = source.settings || {} @@ -181,7 +166,11 @@ HandlerFactory.prototype.createFromRecipe = function ({name, req, route, workspa }) } -HandlerFactory.prototype.createFromRoute = function ({name, req, workspaceMatch}) { +HandlerFactory.prototype.createFromRoute = function({ + name, + req, + workspaceMatch +}) { const route = new Route(workspaceMatch.source) route.setDomain(req.__domain) @@ -189,7 +178,7 @@ HandlerFactory.prototype.createFromRoute = function ({name, req, workspaceMatch} route.setUserAgent(req.headers['user-agent']) return route.getRecipe().then(recipeName => { - let workspaceMatch = workspace.get(recipeName, req.__domain) + const workspaceMatch = workspace.get(recipeName, req.__domain) if (workspaceMatch && workspaceMatch.type === 'recipes') { return this.createFromRecipe({ @@ -204,7 +193,7 @@ HandlerFactory.prototype.createFromRoute = function ({name, req, workspaceMatch} }) } -module.exports = function () { +module.exports = function() { return new HandlerFactory() } diff --git a/dadi/lib/handlers/image.js b/dadi/lib/handlers/image.js index a7b409c5..5afa56dd 100644 --- a/dadi/lib/handlers/image.js +++ b/dadi/lib/handlers/image.js @@ -3,7 +3,7 @@ const fs = require('fs-extra') const exifReader = require('exif-reader-paras20xx') const fit = require('aspect-fit') -const { BitmapImage, GifFrame, GifUtil } = require('gifwrap') +const {BitmapImage, GifFrame, GifUtil} = require('gifwrap') const help = require('./../help') const Jimp = require('jimp') const mkdirp = require('mkdirp') @@ -21,7 +21,9 @@ const Cache = require(path.join(__dirname, '/../cache')) const config = require(path.join(__dirname, '/../../../config')) const workspace = require(path.join(__dirname, '/../models/workspace')) -const tmpDirectory = path.resolve(path.join(__dirname, '/../../../workspace/_tmp')) +const tmpDirectory = path.resolve( + path.join(__dirname, '/../../../workspace/_tmp') +) mkdirp(tmpDirectory, (err, made) => { if (err) { @@ -43,27 +45,33 @@ const GRAVITY_TYPES = { } const IMAGE_PARAMETERS = [ - { name: 'format', aliases: ['fmt'] }, - { name: 'quality', aliases: ['q'], default: 75 }, - { name: 'sharpen', aliases: ['sh'], default: 0, allowZero: true, minimumValue: 1 }, - { name: 'saturate', aliases: ['sat'], default: 1, allowZero: true }, - { name: 'width', aliases: ['w'] }, - { name: 'height', aliases: ['h'] }, - { name: 'ratio', aliases: ['rx'] }, - { name: 'cropX', aliases: ['cx'] }, - { name: 'cropY', aliases: ['cy'] }, - { name: 'crop', aliases: ['coords'] }, - { name: 'resizeStyle', aliases: ['resize'] }, - { name: 'devicePixelRatio', aliases: ['dpr'] }, - { name: 'gravity', aliases: ['g'], default: 'None' }, - { name: 'filter', aliases: ['f'], default: 'lanczos', lowercase: true }, - { name: 'trim', aliases: ['t'] }, - { name: 'trimFuzz', aliases: ['tf'] }, - { name: 'blur', aliases: ['b'] }, - { name: 'strip', aliases: ['s'] }, - { name: 'rotate', aliases: ['r'] }, - { name: 'flip', aliases: ['fl'] }, - { name: 'progressive', aliases: ['pg'] } + {name: 'format', aliases: ['fmt']}, + {name: 'quality', aliases: ['q'], default: 75}, + { + name: 'sharpen', + aliases: ['sh'], + default: 0, + allowZero: true, + minimumValue: 1 + }, + {name: 'saturate', aliases: ['sat'], default: 1, allowZero: true}, + {name: 'width', aliases: ['w']}, + {name: 'height', aliases: ['h']}, + {name: 'ratio', aliases: ['rx']}, + {name: 'cropX', aliases: ['cx']}, + {name: 'cropY', aliases: ['cy']}, + {name: 'crop', aliases: ['coords']}, + {name: 'resizeStyle', aliases: ['resize']}, + {name: 'devicePixelRatio', aliases: ['dpr']}, + {name: 'gravity', aliases: ['g'], default: 'None'}, + {name: 'filter', aliases: ['f'], default: 'lanczos', lowercase: true}, + {name: 'trim', aliases: ['t']}, + {name: 'trimFuzz', aliases: ['tf']}, + {name: 'blur', aliases: ['b']}, + {name: 'strip', aliases: ['s']}, + {name: 'rotate', aliases: ['r']}, + {name: 'flip', aliases: ['fl']}, + {name: 'progressive', aliases: ['pg']} ] /** @@ -71,10 +79,7 @@ const IMAGE_PARAMETERS = [ * @param {String} format - the type of image requested * @param {Object} req - the original HTTP request */ -const ImageHandler = function (format, req, { - options = {}, - plugins = [] -} = {}) { +const ImageHandler = function(format, req, {options = {}, plugins = []} = {}) { this.req = req this.storageFactory = Object.create(StorageFactory) this.storageHandler = null @@ -94,10 +99,11 @@ const ImageHandler = function (format, req, { } this.exifData = {} - this.isExternalUrl = !pathname.indexOf('http://') || !pathname.indexOf('https://') + this.isExternalUrl = + !pathname.indexOf('http://') || !pathname.indexOf('https://') this.plugins = Object.keys(workspace.get()).reduce((activePlugins, file) => { - if ((workspace.get(file).type === 'plugins') && plugins.includes(file)) { + if (workspace.get(file).type === 'plugins' && plugins.includes(file)) { try { return activePlugins.concat(require(workspace.get(file).path)) } catch (err) { @@ -115,22 +121,32 @@ const ImageHandler = function (format, req, { * * @return {Promise} */ -ImageHandler.prototype.checkCropRectangle = function () { - let options = this.options +ImageHandler.prototype.checkCropRectangle = function() { + const options = this.options // sanity check on crop requests if (options.cropX !== undefined && options.cropY !== undefined) { if ( - this.calculatedDimensions.width + parseInt(options.cropX) >= this.imageData.width || - this.calculatedDimensions.height + parseInt(options.cropY) >= this.imageData.height + this.calculatedDimensions.width + parseInt(options.cropX) >= + this.imageData.width || + this.calculatedDimensions.height + parseInt(options.cropY) >= + this.imageData.height ) { - let rectangle = (this.calculatedDimensions.width + parseInt(options.cropX)).toString() + 'x' + (this.calculatedDimensions.height + parseInt(options.cropY)).toString() - let original = this.imageData.width.toString() + 'x' + this.imageData.height.toString() - let message = 'The calculated crop rectangle is larger than (or one dimension is equal to) the original image size. Crop rectangle: ' + rectangle + ', Image size: ' + original + const rectangle = + (this.calculatedDimensions.width + parseInt(options.cropX)).toString() + + 'x' + + (this.calculatedDimensions.height + parseInt(options.cropY)).toString() + const original = + this.imageData.width.toString() + 'x' + this.imageData.height.toString() + const message = + 'The calculated crop rectangle is larger than (or one dimension is equal to) the original image size. Crop rectangle: ' + + rectangle + + ', Image size: ' + + original return Promise.reject({ statusCode: 400, - message: message + message }) } } @@ -144,56 +160,31 @@ ImageHandler.prototype.checkCropRectangle = function () { * @param {number} width - Crop width * @param {number} heifgt - Crop height */ -ImageHandler.prototype.extractEntropy = function (image, width, height) { +ImageHandler.prototype.extractEntropy = function(image, width, height) { return new Promise((resolve, reject) => { - smartcrop.crop(image, { - width: width, - height: height - }).then(result => { - resolve({ - x1: result.topCrop.x, - x2: result.topCrop.x + result.topCrop.width, - y1: result.topCrop.y, - y2: result.topCrop.y + result.topCrop.height + smartcrop + .crop(image, { + width, + height + }) + .then(result => { + resolve({ + x1: result.topCrop.x, + x2: result.topCrop.x + result.topCrop.width, + y1: result.topCrop.y, + y2: result.topCrop.y + result.topCrop.height + }) + }) + .catch(err => { + reject(err) }) - }).catch(err => { - reject(err) - }) }) } -ImageHandler.prototype.get = function () { - let assetPath = this.parsedUrl.asset.href - - // (!) DEPRECATED - // - // Extend the options object with settings from the legacy URL syntax. - if (this.req.__cdnLegacyURLSyntax) { - const pathParts = this.parsedUrl.cdn.pathname.split('/') - const urlSegments = pathParts.filter((segment, index) => { - if (index > 0 && segment === '') { - return true - } - - if (index < 13 || (index >= 13 && /^[0-1]$/.test(segment))) { - return Boolean(segment) - } - }) - - assetPath = pathParts.slice(urlSegments.length + 1).join('/') +ImageHandler.prototype.get = function() { + const assetPath = this.parsedUrl.asset.href - this.options = Object.assign( - {}, - this.options, - getImageOptionsFromLegacyURL(urlSegments) - ) - } else { - this.options = Object.assign( - {}, - this.options, - this.parsedUrl.cdn.query - ) - } + this.options = Object.assign({}, this.options, this.parsedUrl.cdn.query) // The image only needs processing if there are any manipulation parameters // applied. @@ -202,14 +193,13 @@ ImageHandler.prototype.get = function () { // Aborting the request if full remote URL is required and not enabled. if ( this.isExternalUrl && - ( - !config.get('images.remote.enabled', this.req.__domain) || - !config.get('images.remote.allowFullURL', this.req.__domain) - ) + (!config.get('images.remote.enabled', this.req.__domain) || + !config.get('images.remote.allowFullURL', this.req.__domain)) ) { - let err = { + const err = { statusCode: 403, - message: 'Loading images from a full remote URL is not supported by this instance of DADI CDN' + message: + 'Loading images from a full remote URL is not supported by this instance of DADI CDN' } return Promise.reject(err) @@ -230,11 +220,9 @@ ImageHandler.prototype.get = function () { } }) - this.storageHandler = this.storageFactory.create( - 'image', - assetPath, - {domain: this.req.__domain} - ) + this.storageHandler = this.storageFactory.create('image', assetPath, { + domain: this.req.__domain + }) // The cache key is formed by multiple parts which will be hashed // separately, so that they can be used as search parameters for @@ -250,138 +238,142 @@ ImageHandler.prototype.get = function () { ] const isJSONResponse = this.options.format === 'json' - return this.cache.getStream(cacheKey, { - ttl: config.get('caching.ttl', this.req.__domain) - }).then(cacheStream => { - if (cacheStream) { - this.isCached = true - - return this.cache.getMetadata(cacheKey).then(metadata => { - if (metadata && metadata.errorCode) { - this.storageHandler.notFound = true - this.contentType = metadata.contentType || 'application/json' - } + return this.cache + .getStream(cacheKey, { + ttl: config.get('caching.ttl', this.req.__domain) + }) + .then(cacheStream => { + if (cacheStream) { + this.isCached = true + + return this.cache.getMetadata(cacheKey).then(metadata => { + if (metadata && metadata.errorCode) { + this.storageHandler.notFound = true + this.contentType = metadata.contentType || 'application/json' + } - if (metadata && metadata.lastModified) { - this.storageHandler.lastModified = metadata.lastModified - } + if (metadata && metadata.lastModified) { + this.storageHandler.lastModified = metadata.lastModified + } - return help.streamToBuffer(cacheStream) - }) - } + return help.streamToBuffer(cacheStream) + }) + } - let stream = this.storageHandler.get() + const stream = this.storageHandler.get() - return stream.then(stream => { - return help.streamToBuffer(stream) - }).then(imageBuffer => { - // If the image does not need processing, we can return the buffer - // straight away. - if (!needsProcessing) { - return imageBuffer - } + return stream + .then(stream => { + return help.streamToBuffer(stream) + }) + .then(imageBuffer => { + // If the image does not need processing, we can return the buffer + // straight away. + if (!needsProcessing) { + return imageBuffer + } - let sharpImage = sharp(imageBuffer) + const sharpImage = sharp(imageBuffer) - return sharpImage.metadata().then(imageData => { - this.imageData = imageData + return sharpImage + .metadata() + .then(imageData => { + this.imageData = imageData - if (this.imageData.format === 'jpeg') { - this.imageData.format = 'jpg' - } + if (this.imageData.format === 'jpeg') { + this.imageData.format = 'jpg' + } - if (Buffer.isBuffer(this.imageData.exif)) { - this.exifData = exifReader(this.imageData.exif) - } + if (Buffer.isBuffer(this.imageData.exif)) { + this.exifData = exifReader(this.imageData.exif) + } - this.calculatedDimensions = this.getCalculatedDimensions({ - width: imageData.width, - height: imageData.height - }) + this.calculatedDimensions = this.getCalculatedDimensions({ + width: imageData.width, + height: imageData.height + }) - return this.process(sharpImage, imageBuffer) - }).then(result => { - return this.checkCropRectangle().then(() => { - return result - }) - }).then(result => { - // Return image info only, as JSON. - if (isJSONResponse) { - return sharpImage.toBuffer().then(sharpImageBuffer => { - return this.getImageInfo(imageBuffer, sharpImageBuffer).then(data => { - return JSON.stringify( - Object.assign({}, data, result.data) - ) + return this.process(sharpImage, imageBuffer) }) - }) - } + .then(result => { + return this.checkCropRectangle().then(() => { + return result + }) + }) + .then(result => { + // Return image info only, as JSON. + if (isJSONResponse) { + return sharpImage.toBuffer().then(sharpImageBuffer => { + return this.getImageInfo(imageBuffer, sharpImageBuffer).then( + data => { + return JSON.stringify( + Object.assign({}, data, result.data) + ) + } + ) + }) + } - return result - }) - }).then(result => { - // Cache the file if it's not already cached. - if (!this.isCached) { - let metadata = { - lastModified: this.storageHandler.getLastModified ? this.storageHandler.getLastModified() : null - } + return result + }) + }) + .then(result => { + // Cache the file if it's not already cached. + if (!this.isCached) { + let metadata = { + lastModified: this.storageHandler.getLastModified + ? this.storageHandler.getLastModified() + : null + } - if (this.storageHandler.notFound) { - metadata = { - contentType: this.getContentType(), - errorCode: 404 - } - } + if (this.storageHandler.notFound) { + metadata = { + contentType: this.getContentType(), + errorCode: 404 + } + } - // The only situation where we don't want to write the result to - // cache is when the response is a 404 and the config specifies - // that 404s should not be cached. - if ( - !this.storageHandler.notFound || - config.get('caching.cache404', this.req.__domain) - ) { - this.cache.set( - cacheKey, - result, - { - metadata, - ttl: config.get('caching.ttl', this.req.__domain) + // The only situation where we don't want to write the result to + // cache is when the response is a 404 and the config specifies + // that 404s should not be cached. + if ( + !this.storageHandler.notFound || + config.get('caching.cache404', this.req.__domain) + ) { + this.cache.set(cacheKey, result, { + metadata, + ttl: config.get('caching.ttl', this.req.__domain) + }) } - ) - } - } + } - return result + return result + }) }) - }).catch(error => { - // If the response is a 404 and we want to cache 404s, we - // write the error to cache. - if ( - (error.statusCode === 404) && - config.get('caching.cache404', this.req.__domain) && - !this.isCached - ) { - this.cache.set( - cacheKey, - JSON.stringify(error), - { + .catch(error => { + // If the response is a 404 and we want to cache 404s, we + // write the error to cache. + if ( + error.statusCode === 404 && + config.get('caching.cache404', this.req.__domain) && + !this.isCached + ) { + this.cache.set(cacheKey, JSON.stringify(error), { metadata: { errorCode: error.statusCode } - } - ) - } + }) + } - return Promise.reject(error) - }) + return Promise.reject(error) + }) } -ImageHandler.prototype.getAvailablePlugins = function (files) { +ImageHandler.prototype.getAvailablePlugins = function(files) { return Object.keys(files).reduce((plugins, file) => { if (files[file].type === 'plugins') { try { - plugins.push( - require(files[file].path) - ) + plugins.push(require(files[file].path)) } catch (err) { console.log(`Plugin '${file}' failed to load:`, err) } @@ -391,7 +383,7 @@ ImageHandler.prototype.getAvailablePlugins = function (files) { }, []) } -ImageHandler.prototype.getContentType = function () { +ImageHandler.prototype.getContentType = function() { if (this.contentType) { return this.contentType } @@ -409,9 +401,7 @@ ImageHandler.prototype.getContentType = function () { this.storageHandler.notFound && config.get('notFound.images.enabled', this.req.__domain) ) { - outputFormat = path.extname( - config.get('notFound.images.path') - ).slice(1) + outputFormat = path.extname(config.get('notFound.images.path')).slice(1) } switch (outputFormat.toLowerCase()) { @@ -432,7 +422,11 @@ ImageHandler.prototype.getContentType = function () { /** * */ -ImageHandler.prototype.getCropOffsetsByGravity = function (gravity, croppedDimensions, scale) { +ImageHandler.prototype.getCropOffsetsByGravity = function( + gravity, + croppedDimensions, + scale +) { const originalWidth = this.imageData.width const originalHeight = this.imageData.height @@ -488,7 +482,7 @@ ImageHandler.prototype.getCropOffsetsByGravity = function (gravity, croppedDimen horizontalOffset = 0 } - function getMaxOfArray (numArray) { + function getMaxOfArray(numArray) { return Math.max.apply(null, numArray) } @@ -500,28 +494,29 @@ ImageHandler.prototype.getCropOffsetsByGravity = function (gravity, croppedDimen } } -ImageHandler.prototype.getCalculatedDimensions = function ({width, height}) { - let options = this.options - let dimensions = { +ImageHandler.prototype.getCalculatedDimensions = function({width, height}) { + const options = this.options + const dimensions = { width, height } let ratio = height / width - let ratioOverride = Boolean(options.ratio) && options.ratio.match(/^(\d+)-(\d+)$/) + const ratioOverride = + Boolean(options.ratio) && options.ratio.match(/^(\d+)-(\d+)$/) // Is there an explicit ratio defined? if (ratioOverride) { ratio = parseFloat(ratioOverride[2]) / parseFloat(ratioOverride[1]) // Scenario 1: Width override is defined, height override is not. - if ((options.width !== undefined) && (options.height === undefined)) { + if (options.width !== undefined && options.height === undefined) { dimensions.width = options.width dimensions.height = Math.ceil(options.width * ratio) - } else if ((options.width === undefined) && (options.height !== undefined)) { + } else if (options.width === undefined && options.height !== undefined) { // Scenario 2: Width override is not defined, height override is. dimensions.width = Math.ceil(options.height / ratio) dimensions.height = options.height - } else if ((options.width === undefined) && (options.height === undefined)) { + } else if (options.width === undefined && options.height === undefined) { // Scenario 3: Width and height overrides are not defined. dimensions.height = Math.ceil(dimensions.width * ratio) } else { @@ -541,7 +536,10 @@ ImageHandler.prototype.getCalculatedDimensions = function ({width, height}) { // Ensuring dimensions are within security bounds. dimensions.width = Math.min(dimensions.width, config.get('security.maxWidth')) - dimensions.height = Math.min(dimensions.height, config.get('security.maxHeight')) + dimensions.height = Math.min( + dimensions.height, + config.get('security.maxHeight') + ) if (options.devicePixelRatio && options.devicePixelRatio < 4) { dimensions.width = dimensions.width * options.devicePixelRatio @@ -555,12 +553,12 @@ ImageHandler.prototype.getCalculatedDimensions = function ({width, height}) { * Returns the filename including extension of the requested image * @returns {string} the filename of the image */ -ImageHandler.prototype.getFilename = function () { +ImageHandler.prototype.getFilename = function() { if (path.extname(this.fileName) === '') { return this.fileName + '.' + this.fileExt - } else { - return this.fileName } + + return this.fileName } /** @@ -573,13 +571,13 @@ ImageHandler.prototype.getFilename = function () { * which will use WebP if the requesting client supports it, or JPEG * otherwise). */ -ImageHandler.prototype.getFormat = function () { - let formats = (this.options.format || this.fileExt).split(',') +ImageHandler.prototype.getFormat = function() { + const formats = (this.options.format || this.fileExt).split(',') this.options.format = formats.find((format, index) => { // If this is the last format in the input string, that's // what we'll use. - if (index === (formats.length - 1)) { + if (index === formats.length - 1) { return true } @@ -588,8 +586,8 @@ ImageHandler.prototype.getFormat = function () { // if the client supports WebP, choosing it if it does, or // choosing the fallback if it doesn't. if (format === 'webp') { - let acceptHeader = (this.req.headers && this.req.headers.accept) || '' - let supportsWebP = acceptHeader.split(',').includes('image/webp') + const acceptHeader = (this.req.headers && this.req.headers.accept) || '' + const supportsWebP = acceptHeader.split(',').includes('image/webp') return supportsWebP } @@ -622,9 +620,9 @@ ImageHandler.prototype.getFormat = function () { "filter":"None", "blur":0, "strip":0, "rotate":0, "flip":0, "ratio":0, "devicePixelRatio":0 } */ -ImageHandler.prototype.getImageInfo = function (oldBuffer, newBuffer) { - let options = this.options - let data = { +ImageHandler.prototype.getImageInfo = function(oldBuffer, newBuffer) { + const options = this.options + const data = { fileName: this.fileName, cacheReference: sha1(this.fileName), quality: options.quality ? options.quality : 75, @@ -640,7 +638,7 @@ ImageHandler.prototype.getImageInfo = function (oldBuffer, newBuffer) { ratio: options.ratio ? options.ratio : 0, devicePixelRatio: options.devicePixelRatio ? options.devicePixelRatio : 0 } - let colourOptions = { + const colourOptions = { colorCount: options.maxColours || 64, quality: options.colourQuality || 1 } @@ -649,42 +647,52 @@ ImageHandler.prototype.getImageInfo = function (oldBuffer, newBuffer) { data.entropyCrop = this.entropy } - return getColours(oldBuffer, colourOptions).then(colours => { - data.format = this.imageData.format - data.fileSizePre = oldBuffer.byteLength - data.primaryColorPre = colours.primaryColour - data.palettePre = colours.palette - - if (this.exifData.image && this.exifData.image.XResolution && this.exifData.image.YResolution) { - data.density = { - width: this.exifData.image.XResolution, - height: this.exifData.image.YResolution, - unit: (this.exifData.image.ResolutionUnit ? (this.exifData.image.ResolutionUnit === 2 ? 'dpi' : '') : '') + return getColours(oldBuffer, colourOptions) + .then(colours => { + data.format = this.imageData.format + data.fileSizePre = oldBuffer.byteLength + data.primaryColorPre = colours.primaryColour + data.palettePre = colours.palette + + if ( + this.exifData.image && + this.exifData.image.XResolution && + this.exifData.image.YResolution + ) { + data.density = { + width: this.exifData.image.XResolution, + height: this.exifData.image.YResolution, + unit: this.exifData.image.ResolutionUnit + ? this.exifData.image.ResolutionUnit === 2 + ? 'dpi' + : '' + : '' + } } - } - - return data - }).then(data => { - return getColours(newBuffer, colourOptions).then(colours => { - data.fileSizePost = newBuffer.byteLength - data.primaryColorPost = colours.primaryColour - data.palettePost = colours.palette return data }) - }) + .then(data => { + return getColours(newBuffer, colourOptions).then(colours => { + data.fileSizePost = newBuffer.byteLength + data.primaryColorPost = colours.primaryColour + data.palettePost = colours.palette + + return data + }) + }) } -ImageHandler.prototype.getLastModified = function () { +ImageHandler.prototype.getLastModified = function() { if (!this.storageHandler || !this.storageHandler.getLastModified) return null return this.storageHandler.getLastModified() } -ImageHandler.prototype.parseUrl = function (url) { - let parsedUrl = urlParser.parse(url, true) - let searchNodes = (parsedUrl.search && parsedUrl.search.split('?')) || [] - let cdnUrl = `${parsedUrl.pathname}?${searchNodes.slice(-1)}` +ImageHandler.prototype.parseUrl = function(url) { + const parsedUrl = urlParser.parse(url, true) + const searchNodes = (parsedUrl.search && parsedUrl.search.split('?')) || [] + const cdnUrl = `${parsedUrl.pathname}?${searchNodes.slice(-1)}` let assetUrl = parsedUrl.pathname if (parsedUrl.protocol && parsedUrl.host) { @@ -708,15 +716,13 @@ ImageHandler.prototype.parseUrl = function (url) { } } -ImageHandler.prototype.process = function (sharpImage, imageBuffer) { - let options = this.options +ImageHandler.prototype.process = function(sharpImage, imageBuffer) { + const options = this.options // Default values fot resize style if (!options.resizeStyle) { if (options.width && options.height) { - options.resizeStyle = options.gravity - ? 'aspectfill' - : 'entropy' + options.resizeStyle = options.gravity ? 'aspectfill' : 'entropy' } else { options.resizeStyle = 'aspectfit' } @@ -730,9 +736,10 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { } const {width, height} = this.calculatedDimensions - const shouldExtractEntropy = ((options.resizeStyle === 'entropy') && width && height) - ? this.extractEntropy(imageBuffer, width, height) - : false + const shouldExtractEntropy = + options.resizeStyle === 'entropy' && width && height + ? this.extractEntropy(imageBuffer, width, height) + : false return Promise.resolve(shouldExtractEntropy).then(entropy => { this.entropy = entropy @@ -744,7 +751,12 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { centreSampling: config.get('engines.sharp.centreSampling') } - if (width && height && typeof options.cropX !== 'undefined' && typeof options.cropY !== 'undefined') { + if ( + width && + height && + typeof options.cropX !== 'undefined' && + typeof options.cropY !== 'undefined' + ) { sharpImage.extract({ left: parseInt(options.cropX), top: parseInt(options.cropY), @@ -757,19 +769,30 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { Aspect Fit: Will size your image until the whole image fits within your area. You are left with the extra space on top and bottom. */ - case 'aspectfit': - const size = fit(this.imageData.width, this.imageData.height, width, height) + case 'aspectfit': { + const size = fit( + this.imageData.width, + this.imageData.height, + width, + height + ) - sharpImage = sharpImage.resize(parseInt(size.width), parseInt(size.height), resizeOptions) + sharpImage = sharpImage.resize( + parseInt(size.width), + parseInt(size.height), + resizeOptions + ) break + } + /* Aspect Fill: Will size your image proportionally until the whole area is full of your image. Your image is clipped. It will size proportionally to make sure there is no blank space left in your area. */ - case 'aspectfill': - const scaleWidth = (width / this.imageData.width) - const scaleHeight = (height / this.imageData.height) + case 'aspectfill': { + const scaleWidth = width / this.imageData.width + const scaleHeight = height / this.imageData.height const scale = Math.max(scaleWidth, scaleHeight) const crops = this.getCropOffsetsByGravity( options.gravity, @@ -793,7 +816,8 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { // Only crop if the aspect ratio is not the same if ( - (width / height) !== (this.imageData.width / this.imageData.height) + width / height !== + this.imageData.width / this.imageData.height ) { sharpImage.extract({ left: crops.x1, @@ -804,25 +828,28 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { } break + } /* Fill: Will size your image to the exact dimensions provided. Aspect ratio will _not_ be preserved. */ - case 'fill': + case 'fill': { resizeOptions.fit = 'fill' - sharpImage = sharpImage - .resize(width, height, resizeOptions) + sharpImage = sharpImage.resize(width, height, resizeOptions) break + } /* Crop: Will crop the image using the coordinates provided. If dimensions are provided, the resulting image will also be resized accordingly. */ - case 'crop': + case 'crop': { if (options.crop) { - let coords = options.crop.split(',').map(coord => parseInt(coord)) + const coords = options.crop + .split(',') + .map(coord => parseInt(coord)) if (coords.length === 2) { coords.push(height - coords[0]) @@ -835,6 +862,7 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { width: coords[3] - coords[1], height: coords[2] - coords[0] } + sharpImage.extract(cropDimensions) // resize if options.width or options.height are explicitly set @@ -844,19 +872,39 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { } if (options.devicePixelRatio && options.devicePixelRatio < 4) { - const adjustedWidth = parseFloat(options.width) * parseFloat(options.devicePixelRatio) - const adjustedHeight = parseFloat(options.height) * parseFloat(options.devicePixelRatio) - - sharpImage.resize(adjustedWidth || undefined, adjustedHeight || undefined, resizeOptions) + const adjustedWidth = + parseFloat(options.width) * + parseFloat(options.devicePixelRatio) + const adjustedHeight = + parseFloat(options.height) * + parseFloat(options.devicePixelRatio) + + sharpImage.resize( + adjustedWidth || undefined, + adjustedHeight || undefined, + resizeOptions + ) } else { - sharpImage.resize(options.width, options.height, resizeOptions) + sharpImage.resize( + options.width, + options.height, + resizeOptions + ) } } else { if (options.devicePixelRatio && options.devicePixelRatio < 4) { - const adjustedWidth = parseFloat(cropDimensions.width) * parseFloat(options.devicePixelRatio) - const adjustedHeight = parseFloat(cropDimensions.height) * parseFloat(options.devicePixelRatio) - - sharpImage.resize(adjustedWidth || undefined, adjustedHeight || undefined, resizeOptions) + const adjustedWidth = + parseFloat(cropDimensions.width) * + parseFloat(options.devicePixelRatio) + const adjustedHeight = + parseFloat(cropDimensions.height) * + parseFloat(options.devicePixelRatio) + + sharpImage.resize( + adjustedWidth || undefined, + adjustedHeight || undefined, + resizeOptions + ) } } } else { @@ -867,19 +915,20 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { sharpImage.extract({ left: Math.round(excessWidth / 2), top: Math.round(excessHeight / 2), - width: width, - height: height + width, + height }) } break + } /* Entropy: Will crop the image using the dimensions provided. The crop coordinates will be determined by analising the image entropy using smartcrop. */ - case 'entropy': + case 'entropy': { if (entropy) { sharpImage.extract({ left: entropy.x1, @@ -892,6 +941,7 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { } break + } } } else if (width && !height) { sharpImage = sharpImage.resize(width, null, resizeOptions) @@ -902,20 +952,23 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { // @param {String} flip - flip the image on the x axis ('x'), y axis ('y') or both ('xy') switch (options.flip) { - case 'x': + case 'x': { sharpImage.flop() break + } - case 'y': + case 'y': { sharpImage.flip() break + } - case 'xy': + case 'xy': { sharpImage.flip().flop() break + } } // @param {Number} angle - angle of rotation, must be a multiple of 90 @@ -926,28 +979,35 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { // Image format and parameters const format = (this.options.format === 'json' ? this.imageData.format - : this.options.format).toLowerCase() + : this.options.format + ).toLowerCase() let outputFn - let outputOptions = {} + const outputOptions = {} switch (format) { case 'gif': case 'jpg': - case 'jpeg': + case 'jpeg': { outputFn = 'jpeg' outputOptions.quality = parseInt(options.quality) break + } - case 'png': + case 'png': { outputFn = 'png' // Map options.quality inversely to a compression level between 1 and 9 // Ignore compressionLevel=0 since this results in much larger file sizes - let compressionLevel = parseInt((options.quality * -0.09) + 9) - outputOptions.compressionLevel = Math.max(Math.min(compressionLevel, 9), 1) + const compressionLevel = parseInt(options.quality * -0.09 + 9) + + outputOptions.compressionLevel = Math.max( + Math.min(compressionLevel, 9), + 1 + ) break + } case 'webp': outputFn = 'webp' @@ -961,7 +1021,7 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { } try { - let jsonData = {} + const jsonData = {} let pluginQueue = Promise.resolve(null) sharpImage = sharpImage[outputFn](outputOptions) @@ -976,10 +1036,15 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { get: this.cache.getStream, set: this.cache.set }, - imageInfo: Object.assign({}, this.imageData, this.calculatedDimensions, { - naturalWidth: this.imageData.width, - naturalHeight: this.imageData.height - }), + imageInfo: Object.assign( + {}, + this.imageData, + this.calculatedDimensions, + { + naturalWidth: this.imageData.width, + naturalHeight: this.imageData.height + } + ), jsonData, options: this.options, processor: sharpImage, @@ -993,9 +1058,7 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { pluginQueue.then(pluginStream => { if (pluginStream) { - return resolve( - help.streamToBuffer(pluginStream) - ) + return resolve(help.streamToBuffer(pluginStream)) } sharpImage.toBuffer({}, (err, buffer, info) => { @@ -1007,7 +1070,10 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { processBuffer = this.processGif(buffer) } - if (options.progressive === 'true' && (format === 'jpeg' || format === 'jpg')) { + if ( + options.progressive === 'true' && + (format === 'jpeg' || format === 'jpg') + ) { processBuffer = this.progressiveJpeg(buffer) } @@ -1034,15 +1100,18 @@ ImageHandler.prototype.process = function (sharpImage, imageBuffer) { * processor after applying image manipulations * @returns {Buffer} a GIF encoded buffer */ -ImageHandler.prototype.processGif = function (buffer) { +ImageHandler.prototype.processGif = function(buffer) { return Jimp.read(buffer).then(image => { - let bitmap = new BitmapImage(image.bitmap) + const bitmap = new BitmapImage(image.bitmap) GifUtil.quantizeDekker(bitmap) - let frame = new GifFrame(bitmap) + const frame = new GifFrame(bitmap) - let tmpGifFile = `${path.join(tmpDirectory, sha1(this.parsedUrl.original.path))}.gif` + const tmpGifFile = `${path.join( + tmpDirectory, + sha1(this.parsedUrl.original.path) + )}.gif` return GifUtil.write(tmpGifFile, [frame]).then(gif => { return fs.unlink(tmpGifFile).then(() => { @@ -1059,15 +1128,13 @@ ImageHandler.prototype.processGif = function (buffer) { * processor after applying image manipulations * @returns {Buffer} a progressive JPEG encoded buffer */ -ImageHandler.prototype.progressiveJpeg = function (buffer) { +ImageHandler.prototype.progressiveJpeg = function(buffer) { return imagemin.buffer(buffer, { - plugins: [ - imageminJpegtran({progressive: true}) - ] + plugins: [imageminJpegtran({progressive: true})] }) } -ImageHandler.prototype.sanitiseOptions = function (options) { +ImageHandler.prototype.sanitiseOptions = function(options) { // check the options for aliases // e.g. "dpr" === "devicePixelRatio" @@ -1084,14 +1151,18 @@ ImageHandler.prototype.sanitiseOptions = function (options) { }) Object.keys(options).forEach(key => { - let settings = IMAGE_PARAMETERS.filter(setting => { + const settings = IMAGE_PARAMETERS.filter(setting => { return setting.name === key || setting.aliases.includes(key) }) if (settings && settings[0]) { let value = options[key] - if (options[key] !== '0' || settings[0].allowZero || settings[0].default) { + if ( + options[key] !== '0' || + settings[0].allowZero || + settings[0].default + ) { if (options[key] !== '0' || settings[0].allowZero) { if (settings[0].lowercase) { value = value.toLowerCase() @@ -1101,7 +1172,10 @@ ImageHandler.prototype.sanitiseOptions = function (options) { if (settings[0].minimumValue && value < settings[0].minimumValue) { value = settings[0].minimumValue - } else if (settings[0].maximumValue && value > settings[0].maximumValue) { + } else if ( + settings[0].maximumValue && + value > settings[0].maximumValue + ) { value = settings[0].maximumValue } @@ -1116,7 +1190,7 @@ ImageHandler.prototype.sanitiseOptions = function (options) { }) // ensure we have defaults for options not specified - let defaults = IMAGE_PARAMETERS.filter(setting => { + const defaults = IMAGE_PARAMETERS.filter(setting => { return setting.default }) @@ -1132,13 +1206,13 @@ ImageHandler.prototype.sanitiseOptions = function (options) { return imageOptions } -ImageHandler.prototype.setBaseUrl = function (baseUrl) { +ImageHandler.prototype.setBaseUrl = function(baseUrl) { this.parsedUrl = this.parseUrl(baseUrl) } -function getColours (buffer, options) { +function getColours(buffer, options) { return new Promise((resolve, reject) => { - let v = new Vibrant(buffer, options) + const v = new Vibrant(buffer, options) v.getSwatches((err, swatches) => { if (err) { @@ -1148,10 +1222,11 @@ function getColours (buffer, options) { // remove empty swatches and sort by population descending swatches = Object.values(swatches).sort((a, b) => { if (a.population === b.population) return 0 + return a.population > b.population ? -1 : 1 }) - let colourData = { + const colourData = { primaryColour: swatches[0].getHex(), palette: { rgb: [], @@ -1171,40 +1246,6 @@ function getColours (buffer, options) { }) } -/** - * Parses the request URL and returns an options object - * @param {Array} optionsArray - the options specified in the request URL - * @returns {object} - */ -function getImageOptionsFromLegacyURL (optionsArray) { - let superLegacyFormatOffset = optionsArray.length === 13 - ? 0 - : 4 - - let options = { - format: optionsArray[0], - quality: optionsArray[1], - trim: optionsArray[2], - trimFuzz: optionsArray[3], - width: optionsArray[4], - height: optionsArray[5], - cropX: (superLegacyFormatOffset === 0) ? '0' : optionsArray[6], - cropY: (superLegacyFormatOffset === 0) ? '0' : optionsArray[7], - ratio: (superLegacyFormatOffset === 0) ? '0' : optionsArray[8], - devicePixelRatio: (superLegacyFormatOffset === 0) ? '0' : optionsArray[9], - resizeStyle: optionsArray[6 + superLegacyFormatOffset], - gravity: optionsArray[7 + superLegacyFormatOffset], - filter: optionsArray[8 + superLegacyFormatOffset], - blur: optionsArray[9 + superLegacyFormatOffset], - strip: optionsArray[10 + superLegacyFormatOffset], - rotate: optionsArray[11 + superLegacyFormatOffset], - flip: optionsArray[12 + superLegacyFormatOffset], - progressive: optionsArray[13 + superLegacyFormatOffset] - } - - return options -} - module.exports = ImageHandler module.exports.ImageHandler = ImageHandler module.exports.parameters = IMAGE_PARAMETERS diff --git a/dadi/lib/handlers/js.js b/dadi/lib/handlers/js.js index 3d2ba984..47aed091 100644 --- a/dadi/lib/handlers/js.js +++ b/dadi/lib/handlers/js.js @@ -9,7 +9,8 @@ const url = require('url') const userAgent = require('useragent') const StorageFactory = require('./../storage/factory') -const DEFAULT_UA = 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)' +const DEFAULT_UA = + 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)' /** * Creates a new JSHandler instance. @@ -17,16 +18,10 @@ const DEFAULT_UA = 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4 * @param {String} format The extension of the file being handled * @param {Object} req The request instance */ -const JSHandler = function (format, req, { - options = {} -} = {}) { - this.legacyURLOverrides = this.getLegacyURLOverrides(req.url) - this.url = url.parse( - this.legacyURLOverrides.url || req.url, - true - ) +const JSHandler = function(format, req, {options = {}} = {}) { + this.url = url.parse(req.url, true) - const mergedOptions = Object.assign({}, this.url.query, this.legacyURLOverrides, options) + const mergedOptions = Object.assign({}, this.url.query, options) // Normalising boolean values (e.g. true vs. 1 vs. '1'). this.options = Object.keys(mergedOptions).reduce((result, key) => { @@ -56,7 +51,9 @@ const JSHandler = function (format, req, { return result }, {}) - this.isExternalUrl = this.url.pathname.indexOf('http://') > 0 || this.url.pathname.indexOf('https://') > 0 + this.isExternalUrl = + this.url.pathname.indexOf('http://') > 0 || + this.url.pathname.indexOf('https://') > 0 this.cache = Cache() this.cacheKey = [req.__domain, this.url.href] @@ -74,58 +71,63 @@ const JSHandler = function (format, req, { * * @return {Promise} A stream with the file */ -JSHandler.prototype.get = function () { +JSHandler.prototype.get = function() { if (this.isTransformEnabled()) { this.cacheKey.push(this.getBabelPluginsHash()) } - return this.cache.getStream(this.cacheKey, { - ttl: config.get('caching.ttl', this.req.__domain) - }).then(stream => { - if (stream) { - this.isCached = true - - return stream - } + return this.cache + .getStream(this.cacheKey, { + ttl: config.get('caching.ttl', this.req.__domain) + }) + .then(stream => { + if (stream) { + this.isCached = true - this.storageHandler = this.storageFactory.create( - 'asset', - this.url.pathname.slice(1), - {domain: this.req.__domain} - ) - - // Aborting the request if full remote URL is required and not enabled. - if ( - this.isExternalUrl && - ( - !config.get('assets.remote.enabled', this.req.__domain) || - !config.get('assets.remote.allowFullURL', this.req.__domain) - ) - ) { - let err = { - statusCode: 403, - message: 'Loading assets from a full remote URL is not supported by this instance of DADI CDN' + return stream } - return Promise.reject(err) - } - - return this.storageHandler.get().then(stream => { - const {compress, transform} = this.options + this.storageHandler = this.storageFactory.create( + 'asset', + this.url.pathname.slice(1), + {domain: this.req.__domain} + ) - if (compress === true || transform === true) { - return this.transform(stream) + // Aborting the request if full remote URL is required and not enabled. + if ( + this.isExternalUrl && + (!config.get('assets.remote.enabled', this.req.__domain) || + !config.get('assets.remote.allowFullURL', this.req.__domain)) + ) { + const err = { + statusCode: 403, + message: + 'Loading assets from a full remote URL is not supported by this instance of DADI CDN' + } + + return Promise.reject(err) } - return stream - }).then(stream => { - return this.cache.cacheFile(stream, this.cacheKey, { - ttl: config.get('caching.ttl', this.req.__domain) - }) + return this.storageHandler + .get() + .then(stream => { + const {compress, transform} = this.options + + if (compress === true || transform === true) { + return this.transform(stream) + } + + return stream + }) + .then(stream => { + return this.cache.cacheFile(stream, this.cacheKey, { + ttl: config.get('caching.ttl', this.req.__domain) + }) + }) + }) + .then(stream => { + return help.streamToBuffer(stream) }) - }).then(stream => { - return help.streamToBuffer(stream) - }) } /** @@ -133,10 +135,10 @@ JSHandler.prototype.get = function () { * * @return {Object} Babel configuration object */ -JSHandler.prototype.getBabelConfig = function () { +JSHandler.prototype.getBabelConfig = function() { const query = this.url.query - let options = { + const options = { babelrc: false, presets: [] } @@ -145,7 +147,7 @@ JSHandler.prototype.getBabelConfig = function () { options.presets.push(['env', this.getBabelEnvOptions(this.userAgent)]) } - if (this.legacyURLOverrides.compress || query.compress === '1' || this.options.compress) { + if (query.compress === '1' || this.options.compress) { options.presets.push('minify') } @@ -158,7 +160,7 @@ JSHandler.prototype.getBabelConfig = function () { * * @return {Object} Babel targets object */ -JSHandler.prototype.getBabelEnvOptions = function (userAgentString) { +JSHandler.prototype.getBabelEnvOptions = function(userAgentString) { const agent = userAgent.parse(userAgentString).toAgent() // If the agent is "Other", it means we don't have a valid browser @@ -176,9 +178,8 @@ JSHandler.prototype.getBabelEnvOptions = function (userAgentString) { return indexes }, []) - const sanitisedAgent = dotIndexes.length <= 1 - ? agent - : agent.slice(0, dotIndexes[1]) + const sanitisedAgent = + dotIndexes.length <= 1 ? agent : agent.slice(0, dotIndexes[1]) return { targets: { @@ -193,9 +194,11 @@ JSHandler.prototype.getBabelEnvOptions = function (userAgentString) { * * @return {String} A hash of all the plugins */ -JSHandler.prototype.getBabelPluginsHash = function () { +JSHandler.prototype.getBabelPluginsHash = function() { const babelOptions = this.getBabelEnvOptions(this.userAgent) - const functions = babelPresetEnv(null, babelOptions).plugins.map(plugin => plugin[0]) + const functions = babelPresetEnv(null, babelOptions).plugins.map( + plugin => plugin[0] + ) const hashSource = functions.reduce((result, functionSource) => { if (typeof functionSource === 'function') { return result + functionSource.toString() @@ -215,7 +218,7 @@ JSHandler.prototype.getBabelPluginsHash = function () { * * @return {String} The content type */ -JSHandler.prototype.getContentType = function () { +JSHandler.prototype.getContentType = function() { return 'application/javascript' } @@ -224,7 +227,7 @@ JSHandler.prototype.getContentType = function () { * * @return {String} The filename */ -JSHandler.prototype.getFilename = function () { +JSHandler.prototype.getFilename = function() { return this.url.pathname.split('/').slice(-1)[0] } @@ -233,53 +236,30 @@ JSHandler.prototype.getFilename = function () { * * @return {Number} The last modified timestamp */ -JSHandler.prototype.getLastModified = function () { +JSHandler.prototype.getLastModified = function() { if (!this.storageHandler || !this.storageHandler.getLastModified) return null return this.storageHandler.getLastModified() } -/** - * Looks for parameters in the URL using legacy syntax - * (e.g. /js/0/file.js) - * - * @param {String} url The URL - * @return {Object} A list of parameters and their value - */ -JSHandler.prototype.getLegacyURLOverrides = function (url) { - let overrides = {} - - const legacyURLMatch = url.match(/\/js(\/(\d))?/) - - if (legacyURLMatch) { - if (legacyURLMatch[2]) { - overrides.compress = legacyURLMatch[2] === '1' - } - - overrides.url = url.slice(legacyURLMatch[0].length) - } - - return overrides -} - /** * Returns true if transforms are enabled for this request. * * @return {Boolean} */ -JSHandler.prototype.isTransformEnabled = function () { +JSHandler.prototype.isTransformEnabled = function() { // Currently behind a feature flag. if (!config.get('experimental.jsTranspiling', this.req.__domain)) { return false } - return (this.url.query.transform || (this.options.transform === true)) + return this.url.query.transform || this.options.transform === true } /** * Sets the base URL (excluding any recipe or route nodes) */ -JSHandler.prototype.setBaseUrl = function (baseUrl) { +JSHandler.prototype.setBaseUrl = function(baseUrl) { this.url = url.parse(baseUrl, true) } @@ -289,7 +269,7 @@ JSHandler.prototype.setBaseUrl = function (baseUrl) { * @param {Stream} stream The input stream * @return {Promise} */ -JSHandler.prototype.transform = function (stream) { +JSHandler.prototype.transform = function(stream) { let inputCode = '' return new Promise((resolve, reject) => { @@ -300,7 +280,8 @@ JSHandler.prototype.transform = function (stream) { const outputStream = new Readable() try { - const outputCode = babel.transform(inputCode, this.getBabelConfig()).code + const outputCode = babel.transform(inputCode, this.getBabelConfig()) + .code outputStream.push(outputCode) } catch (err) { @@ -314,7 +295,7 @@ JSHandler.prototype.transform = function (stream) { }) } -module.exports = function (format, request, handlerData) { +module.exports = function(format, request, handlerData) { return new JSHandler(format, request, handlerData) } diff --git a/dadi/lib/handlers/plugin.js b/dadi/lib/handlers/plugin.js index 5b5f24fe..0c899139 100644 --- a/dadi/lib/handlers/plugin.js +++ b/dadi/lib/handlers/plugin.js @@ -2,14 +2,14 @@ const Cache = require('./../cache')() const help = require('./../help') const StorageFactory = require('./../storage/factory') -const Plugin = function (req, plugin) { +const Plugin = function(req, plugin) { this.headers = {} this.plugin = plugin this.req = req this.storageFactory = Object.create(StorageFactory) } -Plugin.prototype.get = function () { +Plugin.prototype.get = function() { try { return Promise.resolve( this.plugin({ @@ -25,7 +25,7 @@ Plugin.prototype.get = function () { return help.streamToBuffer(stream) }) } catch (err) { - let error = new Error(err) + const error = new Error(err) error.message = 'A plugin has thrown a fatal error.' error.statusCode = 500 @@ -34,15 +34,15 @@ Plugin.prototype.get = function () { } } -Plugin.prototype.getContentType = function () { +Plugin.prototype.getContentType = function() { return this.headers['content-type'] } -Plugin.prototype.getHeader = function (header) { +Plugin.prototype.getHeader = function(header) { return this.headers[header.toLowerCase()] } -Plugin.prototype.setHeader = function (header, value) { +Plugin.prototype.setHeader = function(header, value) { this.headers[header.toLowerCase()] = value } diff --git a/dadi/lib/help.js b/dadi/lib/help.js index 39fec2d3..f347b0b4 100755 --- a/dadi/lib/help.js +++ b/dadi/lib/help.js @@ -1,9 +1,10 @@ const cache = require('./cache') const concat = require('concat-stream') -module.exports.clearCache = function (pathname, callback) { - cache().delete(pathname, (err) => { +module.exports.clearCache = function(pathname, callback) { + cache().delete(pathname, err => { if (err) console.log(err) + return callback(null) }) } @@ -11,13 +12,13 @@ module.exports.clearCache = function (pathname, callback) { /** * Display Unauthorized Error */ -module.exports.displayUnauthorizedError = function (res) { +module.exports.displayUnauthorizedError = function(res) { res.statusCode = 401 res.setHeader('Cache-Control', 'private, no-cache, no-store, must-revalidate') res.setHeader('Content-Type', 'application/json') res.setHeader('Expires', '-1') - let errorMsg = { + const errorMsg = { Error: 'HTTP 401 Unauthorized' } @@ -25,7 +26,7 @@ module.exports.displayUnauthorizedError = function (res) { } // helper that sends json response -module.exports.sendBackJSON = function (successCode, results, res) { +module.exports.sendBackJSON = function(successCode, results, res) { res.statusCode = successCode let resBody @@ -52,9 +53,9 @@ module.exports.sendBackJSON = function (successCode, results, res) { * * @param {stream} stream */ -module.exports.streamToBuffer = function (stream) { +module.exports.streamToBuffer = function(stream) { return new Promise((resolve, reject) => { - let concatStream = concat(buffer => { + const concatStream = concat(buffer => { return resolve(buffer) }) diff --git a/dadi/lib/index.js b/dadi/lib/index.js index 31b0e441..ecba62ad 100755 --- a/dadi/lib/index.js +++ b/dadi/lib/index.js @@ -40,10 +40,7 @@ const devConfigPath = path.join( fs.stat(devConfigPath, (err, stats) => { if (err && err.code === 'ENOENT') { - fs.writeFileSync( - devConfigPath, - fs.readFileSync(devConfigPath + '.sample') - ) + fs.writeFileSync(devConfigPath, fs.readFileSync(devConfigPath + '.sample')) } }) @@ -52,7 +49,7 @@ const Controller = require(path.join(__dirname, '/controller')) const configPath = path.resolve(path.join(__dirname, '/../../config')) const config = require(configPath) -const Server = function () { +const Server = function() { this.crons = {} } @@ -63,69 +60,70 @@ const Server = function () { * @param {Function} listener * @return {http.Server} */ -Server.prototype.create = function (listener) { - let protocol = config.get('server.protocol') +Server.prototype.create = function(listener) { + const protocol = config.get('server.protocol') if (protocol === 'http') { return http.createServer(listener) - } else { - let readFileSyncSafe = (path) => { - try { - return fs.readFileSync(path) - } catch (ex) { - console.log(ex) - } + } - return null + const readFileSyncSafe = path => { + try { + return fs.readFileSync(path) + } catch (ex) { + console.log(ex) } - let passphrase = config.get('server.sslPassphrase') - let caPath = config.get('server.sslIntermediateCertificatePath') - let caPaths = config.get('server.sslIntermediateCertificatePaths') - let serverOptions = { - key: readFileSyncSafe(config.get('server.sslPrivateKeyPath')), - cert: readFileSyncSafe(config.get('server.sslCertificatePath')) - } + return null + } - if (passphrase && passphrase.length >= 4) { - serverOptions.passphrase = passphrase - } + const passphrase = config.get('server.sslPassphrase') + const caPath = config.get('server.sslIntermediateCertificatePath') + const caPaths = config.get('server.sslIntermediateCertificatePaths') + const serverOptions = { + key: readFileSyncSafe(config.get('server.sslPrivateKeyPath')), + cert: readFileSyncSafe(config.get('server.sslCertificatePath')) + } - if (caPaths && caPaths.length > 0) { - serverOptions.ca = [] - caPaths.forEach(path => { - let data = readFileSyncSafe(path) + if (passphrase && passphrase.length >= 4) { + serverOptions.passphrase = passphrase + } - if (data) { - serverOptions.ca.push(data) - } - }) - } else if (caPath && caPath.length > 0) { - serverOptions.ca = readFileSyncSafe(caPath) - } + if (caPaths && caPaths.length > 0) { + serverOptions.ca = [] + caPaths.forEach(path => { + const data = readFileSyncSafe(path) - // We need to catch any errors resulting from bad parameters, - // such as incorrect passphrase or no passphrase provided. - try { - if (config.get('server.enableHTTP2')) { - serverOptions['allowHTTP1'] = true // fallback to http1 - return http2.createSecureServer(serverOptions, listener) - } else { - return https.createServer(serverOptions, listener) + if (data) { + serverOptions.ca.push(data) } - } catch (ex) { - let exPrefix = 'error starting https server: ' + }) + } else if (caPath && caPath.length > 0) { + serverOptions.ca = readFileSyncSafe(caPath) + } - switch (ex.message) { - case 'error:06065064:digital envelope routines:EVP_DecryptFinal_ex:bad decrypt': - throw new Error(exPrefix + 'incorrect ssl passphrase') + // We need to catch any errors resulting from bad parameters, + // such as incorrect passphrase or no passphrase provided. + try { + if (config.get('server.enableHTTP2')) { + serverOptions['allowHTTP1'] = true // fallback to http1 - case 'error:0906A068:PEM routines:PEM_do_header:bad password read': - throw new Error(exPrefix + 'required ssl passphrase not provided') + return http2.createSecureServer(serverOptions, listener) + } - default: - throw new Error(exPrefix + ex.message) - } + return https.createServer(serverOptions, listener) + } catch (ex) { + const exPrefix = 'error starting https server: ' + + switch (ex.message) { + case 'error:06065064:digital envelope routines:EVP_DecryptFinal_ex:bad decrypt': + throw new Error(exPrefix + 'incorrect ssl passphrase') + + case 'error:0906A068:PEM routines:PEM_do_header:bad password read': + throw new Error(exPrefix + 'required ssl passphrase not provided') + + default: + throw new Error(exPrefix + ex.message) } } } @@ -133,7 +131,7 @@ Server.prototype.create = function (listener) { /** * Handler function for when the server is listening for requests. */ -Server.prototype.onListening = function () { +Server.prototype.onListening = function() { /* istanbul ignore next */ if (config.get('env') !== 'test') { dadiBoot.started({ @@ -158,16 +156,18 @@ Server.prototype.onListening = function () { * Handler function for when the HTTP->HTTPS redirect server * is listening for requests. */ -Server.prototype.onRedirectListening = function () { - let address = this.address() - let env = config.get('env') +Server.prototype.onRedirectListening = function() { + const address = this.address() + const env = config.get('env') /* istanbul ignore next */ if (env !== 'test') { let startText = '\n ----------------------------\n' + startText += ' Started HTTP -> HTTPS Redirect\n' startText += ' ----------------------------\n' - startText += ' Server: '.green + address.address + ':' + address.port + '\n' + startText += + ' Server: '.green + address.address + ':' + address.port + '\n' startText += ' ----------------------------\n' console.log(startText) @@ -178,16 +178,18 @@ Server.prototype.onRedirectListening = function () { * Handler function for when the status endpoint server is * listening for requests. */ -Server.prototype.onStatusListening = function () { - var address = this.address() - let env = config.get('env') +Server.prototype.onStatusListening = function() { + const address = this.address() + const env = config.get('env') /* istanbul ignore next */ if (env !== 'test') { let startText = '\n ----------------------------\n' + startText += ' Started standalone status endpoint\n' startText += ' ----------------------------\n' - startText += ' Server: '.green + address.address + ':' + address.port + '\n' + startText += + ' Server: '.green + address.address + ':' + address.port + '\n' startText += ' ----------------------------\n' console.log(startText) @@ -200,7 +202,7 @@ Server.prototype.onStatusListening = function () { * * @param {Function} done - callback function */ -Server.prototype.start = function (done) { +Server.prototype.start = function(done) { router.use((req, res, next) => { const FAVICON_REGEX = /\/(favicon|(apple-)?touch-icon(-i(phone|pad))?(-\d{2,}x\d{2,})?(-precomposed)?)\.(jpe?g|png|ico|gif)$/i @@ -215,10 +217,14 @@ Server.prototype.start = function (done) { router.use(bodyParser.json({limit: '50mb'})) router.use((err, req, res, next) => { if (err) { - return help.sendBackJSON(400, { - success: false, - errors: ['Invalid JSON Syntax'] - }, res) + return help.sendBackJSON( + 400, + { + success: false, + errors: ['Invalid JSON Syntax'] + }, + res + ) } next() @@ -227,19 +233,19 @@ Server.prototype.start = function (done) { // Ensure that middleware runs in the correct order, // especially when running an integrated status page. if (config.get('status.standalone')) { - let statusRouter = Router() + const statusRouter = Router() config.get('status.requireAuthentication') && auth(statusRouter) statusRouter.use('/api/status', this.status) - let statusApp = http.createServer(function (req, res) { + const statusApp = http.createServer(function(req, res) { res.setHeader('Access-Control-Allow-Origin', '*') res.setHeader('Cache-Control', 'no-cache') statusRouter(req, res, finalhandler(req, res)) }) - let statusServer = statusApp.listen(config.get('status.port')) + const statusServer = statusApp.listen(config.get('status.port')) statusServer.on('listening', this.onStatusListening) @@ -260,13 +266,13 @@ Server.prototype.start = function (done) { let redirectInstance let redirectServer - let redirectPort = config.get('server.redirectPort') + const redirectPort = config.get('server.redirectPort') if (redirectPort > 0) { redirectInstance = http.createServer((req, res) => { - let port = config.get('server.port') - let hostname = req.headers.host.split(':')[0] - let location = `https://${hostname}:${port}${req.url}` + const port = config.get('server.port') + const hostname = req.headers.host.split(':')[0] + const location = `https://${hostname}:${port}${req.url}` res.setHeader('Location', location) res.statusCode = 301 @@ -277,18 +283,22 @@ Server.prototype.start = function (done) { redirectServer.on('listening', this.onRedirectListening) } - let app = this.create((req, res) => { + const app = this.create((req, res) => { if (config.get('multiDomain.enabled')) { - let domain = req.headers.host.split(':')[0] + const domain = req.headers.host.split(':')[0] if ( !config.get('dadiNetwork.enableConfigurationAPI') && !domainManager.getDomain(domain) ) { - return help.sendBackJSON(404, { - success: false, - message: `Domain not configured: ${domain}` - }, res) + return help.sendBackJSON( + 404, + { + success: false, + message: `Domain not configured: ${domain}` + }, + res + ) } req.__domain = domain @@ -303,7 +313,8 @@ Server.prototype.start = function (done) { router(req, res, finalhandler(req, res)) }) - let server = app.listen(config.get('server.port')) + const server = app.listen(config.get('server.port')) + server.on('listening', this.onListening) this.readyState = 1 @@ -322,39 +333,49 @@ Server.prototype.start = function (done) { /** * Starts the frequency cache flushing process. */ -Server.prototype.startFrequencyCache = function () { - let crons = {} +Server.prototype.startFrequencyCache = function() { + const crons = {} // If multi-domain is enabled, we'll set up a cron for each domain. if (config.get('multiDomain.enabled')) { domainManager.getDomains().forEach(({domain, path: domainPath}) => { - let cronString = config.get('caching.expireAt', domain) + const cronString = config.get('caching.expireAt', domain) if (typeof cronString !== 'string') return - crons[domain] = new CronJob(cronString, () => { - try { - // Flush cache for this domain. - cache().delete([domain]) - } catch (err) { - logger.error({module: 'expireAt-flush'}, err) - } - }, null, true) + crons[domain] = new CronJob( + cronString, + () => { + try { + // Flush cache for this domain. + cache().delete([domain]) + } catch (err) { + logger.error({module: 'expireAt-flush'}, err) + } + }, + null, + true + ) }) } else { - let cronString = config.get('caching.expireAt') + const cronString = config.get('caching.expireAt') if (typeof cronString !== 'string') return // Otherwise, we'll set a single cron to flush the cache globally. - crons.__global = new CronJob(cronString, () => { - try { - // Flush cache globally. - cache().delete() - } catch (err) { - logger.error({module: 'expireAt-flush'}, err) - } - }, null, true) + crons.__global = new CronJob( + cronString, + () => { + try { + // Flush cache globally. + cache().delete() + } catch (err) { + logger.error({module: 'expireAt-flush'}, err) + } + }, + null, + true + ) } this.crons = crons @@ -367,24 +388,26 @@ Server.prototype.startFrequencyCache = function () { * @param {http.ServerResponse} res * @param {Function} next */ -Server.prototype.status = function (req, res, next) { - let method = req.method && req.method.toLowerCase() - let authorization = req.headers.authorization +Server.prototype.status = function(req, res, next) { + const method = req.method && req.method.toLowerCase() + const authorization = req.headers.authorization if (method !== 'post' || config.get('status.enabled') === false) { return next() } - let baseUrl = config.get('publicUrl.host') - ? `${config.get('publicUrl.protocol')}://${config.get('publicUrl.host')}:${config.get('publicUrl.port')}` + const baseUrl = config.get('publicUrl.host') + ? `${config.get('publicUrl.protocol')}://${config.get( + 'publicUrl.host' + )}:${config.get('publicUrl.port')}` : `http://${config.get('server.host')}:${config.get('server.port')}` - let params = { - site: site, + const params = { + site, package: '@dadi/cdn', - version: version, + version, healthCheck: { - authorization: authorization, + authorization, baseUrl, routes: config.get('status.routes') } @@ -393,7 +416,7 @@ Server.prototype.status = function (req, res, next) { dadiStatus(params, (err, data) => { if (err) return next(err) - let responseMessages = { + const responseMessages = { Green: 'Service is responding within specified parameters', Amber: 'Service is responding, but outside of specified parameters' } @@ -401,11 +424,12 @@ Server.prototype.status = function (req, res, next) { data.status = { status: data.routes[0].status, healthStatus: data.routes[0].healthStatus, - message: responseMessages[data.routes[0].healthStatus] || + message: + responseMessages[data.routes[0].healthStatus] || 'Service is not responding correctly' } - let resBody = JSON.stringify(data, null, 2) + const resBody = JSON.stringify(data, null, 2) res.statusCode = 200 res.setHeader('Content-Type', 'application/json') @@ -421,7 +445,7 @@ Server.prototype.status = function (req, res, next) { * * @param {Function} done */ -Server.prototype.stop = function (done) { +Server.prototype.stop = function(done) { this.readyState = 3 this.stopFrequencyCache() @@ -451,7 +475,7 @@ Server.prototype.stop = function (done) { /** * Starts the frequency cache flushing process. */ -Server.prototype.stopFrequencyCache = function () { +Server.prototype.stopFrequencyCache = function() { Object.keys(this.crons).forEach(id => { this.crons[id].stop() }) diff --git a/dadi/lib/models/domain-manager.js b/dadi/lib/models/domain-manager.js index 9fda7a16..aa6b1596 100644 --- a/dadi/lib/models/domain-manager.js +++ b/dadi/lib/models/domain-manager.js @@ -1,7 +1,7 @@ const fs = require('fs-extra') const path = require('path') -const DomainManager = function () { +const DomainManager = function() { this.domains = [] } @@ -11,8 +11,8 @@ const DomainManager = function () { * @param {String} domain * @param {Object} domainConfig */ -DomainManager.prototype.addDomain = function (domain, domainConfig) { - let config = require('./../../../config') +DomainManager.prototype.addDomain = function(domain, domainConfig) { + const config = require('./../../../config') if (!this.getDomain(domain)) { config.loadDomainConfig(domain, domainConfig) @@ -30,8 +30,8 @@ DomainManager.prototype.addDomain = function (domain, domainConfig) { * * @param {String} domain */ -DomainManager.prototype.removeDomain = function (domain) { - let config = require('./../../../config') +DomainManager.prototype.removeDomain = function(domain) { + const config = require('./../../../config') if (this.getDomain(domain)) { delete config.domainConfigs[domain] @@ -46,7 +46,7 @@ DomainManager.prototype.removeDomain = function (domain) { * @param {String} domain * @return {Object} */ -DomainManager.prototype.getDomain = function (domain) { +DomainManager.prototype.getDomain = function(domain) { if (typeof domain !== 'string') return null return this.domains.find(item => { @@ -59,7 +59,7 @@ DomainManager.prototype.getDomain = function (domain) { * * @return {Array} */ -DomainManager.prototype.getDomains = function () { +DomainManager.prototype.getDomains = function() { return this.domains } @@ -71,12 +71,12 @@ DomainManager.prototype.getDomains = function () { * @param {String} domainsDirectory - full path of the base domains directory * @return {DomainManager} */ -DomainManager.prototype.scanDomains = function (domainsDirectory) { - let domainsPath = path.resolve(domainsDirectory) +DomainManager.prototype.scanDomains = function(domainsDirectory) { + const domainsPath = path.resolve(domainsDirectory) try { this.domains = fs.readdirSync(domainsPath).reduce((domains, domain) => { - let domainPath = path.join(domainsPath, domain) + const domainPath = path.join(domainsPath, domain) if (fs.statSync(domainPath).isDirectory()) { domains.push({ diff --git a/dadi/lib/models/recipe.js b/dadi/lib/models/recipe.js index b2fd01c5..8a03668a 100644 --- a/dadi/lib/models/recipe.js +++ b/dadi/lib/models/recipe.js @@ -3,14 +3,14 @@ const fs = require('fs-extra') const path = require('path') const config = require(path.join(__dirname, '/../../../config')) -const Recipe = function (content) { +const Recipe = function(content) { this.recipe = content this.name = this.recipe.recipe } -Recipe.prototype.save = function (domainName) { - let domain = domainManager.getDomain(domainName) - let recipePath = path.resolve( +Recipe.prototype.save = function(domainName) { + const domain = domainManager.getDomain(domainName) + const recipePath = path.resolve( path.join( domain ? domain.path : '', config.get('paths.recipes', domainName), @@ -23,12 +23,12 @@ Recipe.prototype.save = function (domainName) { }) } -Recipe.prototype.validate = function () { - let required = ['recipe', 'settings'] - let errors = [] +Recipe.prototype.validate = function() { + const required = ['recipe', 'settings'] + const errors = [] - for (var key in required) { - if (!this.recipe.hasOwnProperty(required[key])) { + for (const key in required) { + if (this.recipe[required[key]] === undefined) { errors.push({ error: `Property "${required[key]}" not found in recipe` }) @@ -38,7 +38,8 @@ Recipe.prototype.validate = function () { // Validate name pattern. if (/^[A-Za-z-_]{5,}$/.test(this.recipe.recipe) === false) { errors.push({ - error: 'Recipe name must be 5 characters or longer and contain only uppercase and lowercase letters, dashes and underscores' + error: + 'Recipe name must be 5 characters or longer and contain only uppercase and lowercase letters, dashes and underscores' }) } diff --git a/dadi/lib/models/route.js b/dadi/lib/models/route.js index a4b79972..02ae53f4 100644 --- a/dadi/lib/models/route.js +++ b/dadi/lib/models/route.js @@ -10,32 +10,31 @@ const userAgentParser = require('ua-parser-js') const cache = require(path.join(__dirname, '/../cache'))() const config = require(path.join(__dirname, '/../../../config')) -const Route = function (config) { +const Route = function(config) { this.config = config } -Route.prototype._arrayIntersect = function (object, array) { +Route.prototype._arrayIntersect = function(object, array) { if (!object) return false if (!(object instanceof Array)) { object = [object] } - return array.some((element) => { - return object.some((objectPart) => { - return objectPart.toString().toLowerCase() === element.toString().toLowerCase() + return array.some(element => { + return object.some(objectPart => { + return ( + objectPart.toString().toLowerCase() === element.toString().toLowerCase() + ) }) }) } -Route.prototype._getCacheKey = function () { - return [ - this.domain, - this.ip + this.config.route - ] +Route.prototype._getCacheKey = function() { + return [this.domain, this.ip + this.config.route] } -Route.prototype._getPathInObject = function (path, object, breadcrumbs) { +Route.prototype._getPathInObject = function(path, object, breadcrumbs) { breadcrumbs = breadcrumbs || path.split('.') const head = breadcrumbs[0] @@ -43,21 +42,25 @@ Route.prototype._getPathInObject = function (path, object, breadcrumbs) { if (breadcrumbs.length === 1) { return object[head] } else if (object[head]) { - return this._getPathInObject(path, object[breadcrumbs[0]], breadcrumbs.slice(1)) + return this._getPathInObject( + path, + object[breadcrumbs[0]], + breadcrumbs.slice(1) + ) } } -Route.prototype._matchBranch = function (branch) { +Route.prototype._matchBranch = function(branch) { if (!branch.condition) return Promise.resolve(true) let match = true - let queue = [] + const queue = [] - Object.keys(branch.condition).every((type) => { + Object.keys(branch.condition).every(type => { let condition = branch.condition[type] switch (type) { - case 'device': + case 'device': { // Ensure the condition is in array format if (!(condition instanceof Array)) { condition = [condition] @@ -66,11 +69,14 @@ Route.prototype._matchBranch = function (branch) { match = match && this._arrayIntersect(this.getDevice(), condition) break + } - case 'language': - let minQuality = (branch.condition.languageMinQuality && parseFloat(branch.condition.languageMinQuality)) + case 'language': { + let minQuality = + branch.condition.languageMinQuality && + parseFloat(branch.condition.languageMinQuality) - if ((minQuality === undefined) || isNaN(minQuality)) { + if (minQuality === undefined || isNaN(minQuality)) { minQuality = 1 } @@ -79,37 +85,44 @@ Route.prototype._matchBranch = function (branch) { condition = [condition] } - const languageMatch = this.getLanguages(minQuality).some((language) => { + const languageMatch = this.getLanguages(minQuality).some(language => { return this._arrayIntersect(language, condition) }) match = match && languageMatch break + } - case 'country': + case 'country': { // Ensure the condition is in array format if (!(condition instanceof Array)) { condition = [condition] } - queue.push(this.getLocation().then((location) => { - match = match && this._arrayIntersect(location, condition) - })) + queue.push( + this.getLocation().then(location => { + match = match && this._arrayIntersect(location, condition) + }) + ) break + } - case 'network': + case 'network': { // Ensure the condition is in array format if (!(condition instanceof Array)) { condition = [condition] } - queue.push(this.getNetwork().then((network) => { - match = match && network && this._arrayIntersect(network, condition) - })) + queue.push( + this.getNetwork().then(network => { + match = match && network && this._arrayIntersect(network, condition) + }) + ) break + } } return match @@ -120,44 +133,47 @@ Route.prototype._matchBranch = function (branch) { }) } -Route.prototype._requestAndGetPath = function (uri, path) { +Route.prototype._requestAndGetPath = function(uri, path) { return request({ json: true, - uri: uri - }).then((response) => { + uri + }).then(response => { return response && this._getPathInObject(path, response) }) } -Route.prototype.evaluateBranches = function (branches, index) { +Route.prototype.evaluateBranches = function(branches, index) { index = index || 0 if (!branches[index]) { return Promise.resolve(false) } - return this._matchBranch(branches[index]).then((branchMatch) => { + return this._matchBranch(branches[index]).then(branchMatch => { if (branchMatch) { return branches[index] } - return this.evaluateBranches(branches, (index + 1)) + return this.evaluateBranches(branches, index + 1) }) } -Route.prototype.getDevice = function () { +Route.prototype.getDevice = function() { const ua = userAgentParser(this.userAgent) return ua.device.type || 'desktop' } -Route.prototype.getLanguages = function (minQuality) { +Route.prototype.getLanguages = function(minQuality) { const languages = languageParser.parse(this.language) - let result = [] + const result = [] - languages.forEach((language) => { - if ((result.indexOf(language.code) === -1) && (language.quality >= minQuality)) { + languages.forEach(language => { + if ( + result.indexOf(language.code) === -1 && + language.quality >= minQuality + ) { result.push(language.code) } }) @@ -165,7 +181,7 @@ Route.prototype.getLanguages = function (minQuality) { return result } -Route.prototype.getLocation = function () { +Route.prototype.getLocation = function() { if (!config.get('geolocation.enabled')) { return Promise.reject('Geolocation is not enabled') } @@ -182,29 +198,34 @@ Route.prototype.getLocation = function () { } } -Route.prototype.getMaxmindLocation = function () { +Route.prototype.getMaxmindLocation = function() { return new Promise((resolve, reject) => { - const dbPath = path.resolve(__dirname, config.get('geolocation.maxmind.countryDbPath')) + const dbPath = path.resolve( + __dirname, + config.get('geolocation.maxmind.countryDbPath') + ) - Maxmind.open(dbPath, { - cache: { - max: 1000, // max items in cache - maxAge: 1000 * 60 * 60 // life time in milliseconds - } - }, (err, db) => { - if (err) return reject(err) + Maxmind.open( + dbPath, + { + cache: { + max: 1000, // max items in cache + maxAge: 1000 * 60 * 60 // life time in milliseconds + } + }, + (err, db) => { + if (err) return reject(err) - const country = db.get(this.ip) + const country = db.get(this.ip) - return resolve( - country && country.country && country.country.iso_code - ) - }) + return resolve(country && country.country && country.country.iso_code) + } + ) }) } -Route.prototype.getNetwork = function () { - let path = config.get('network.path') +Route.prototype.getNetwork = function() { + const path = config.get('network.path') let uri = config.get('network.url') // Replace placeholders in uri @@ -212,37 +233,38 @@ Route.prototype.getNetwork = function () { uri = uri.replace('{key}', config.get('network.key')) uri = uri.replace('{secret}', config.get('network.secret')) - return this._requestAndGetPath(uri, path).then((network) => { - return network.split('/') - }).catch((err) => { - logger.error({module: 'routes'}, err) + return this._requestAndGetPath(uri, path) + .then(network => { + return network.split('/') + }) + .catch(err => { + logger.error({module: 'routes'}, err) - return Promise.resolve(null) - }) + return Promise.resolve(null) + }) } -Route.prototype.getRecipe = function () { - return cache.getStream(this._getCacheKey()).then(cachedRecipe => { - if (cachedRecipe) return cachedRecipe - - return this.processRoute().then(recipe => { - if (recipe) { - return cache.set(this._getCacheKey(), recipe).then(() => { +Route.prototype.getRecipe = function() { + return this.processRoute().then(recipe => { + if (recipe) { + return cache + .set(this._getCacheKey(), recipe) + .then(() => { return recipe - }).catch(err => { + }) + .catch(err => { logger.error({module: 'routes'}, err) return recipe }) - } + } - return recipe - }) + return recipe }) } -Route.prototype.getRemoteLocation = function () { - let countryPath = config.get('geolocation.remote.countryPath') +Route.prototype.getRemoteLocation = function() { + const countryPath = config.get('geolocation.remote.countryPath') let uri = config.get('geolocation.remote.url') // Replace placeholders @@ -250,26 +272,28 @@ Route.prototype.getRemoteLocation = function () { uri = uri.replace('{key}', config.get('geolocation.remote.key')) uri = uri.replace('{secret}', config.get('geolocation.remote.secret')) - return this._requestAndGetPath(uri, countryPath).catch((err) => { + return this._requestAndGetPath(uri, countryPath).catch(err => { logger.error({module: 'routes'}, err) return Promise.resolve(null) }) } -Route.prototype.processRoute = function () { - return this.evaluateBranches(this.config.branches).then((match) => { - if (match) return match.recipe - }).catch((err) => { - logger.error({module: 'routes'}, err) +Route.prototype.processRoute = function() { + return this.evaluateBranches(this.config.branches) + .then(match => { + if (match) return match.recipe + }) + .catch(err => { + logger.error({module: 'routes'}, err) - return Promise.resolve(null) - }) + return Promise.resolve(null) + }) } -Route.prototype.save = function (domainName) { - let domain = domainManager.getDomain(domainName) - let routePath = path.resolve( +Route.prototype.save = function(domainName) { + const domain = domainManager.getDomain(domainName) + const routePath = path.resolve( path.join( domain ? domain.path : '', config.get('paths.routes', domainName), @@ -282,24 +306,24 @@ Route.prototype.save = function (domainName) { }) } -Route.prototype.setDomain = function (domain) { +Route.prototype.setDomain = function(domain) { this.domain = domain } -Route.prototype.setIP = function (ip) { +Route.prototype.setIP = function(ip) { this.ip = ip } -Route.prototype.setLanguage = function (language) { +Route.prototype.setLanguage = function(language) { this.language = language } -Route.prototype.setUserAgent = function (userAgent) { +Route.prototype.setUserAgent = function(userAgent) { this.userAgent = userAgent } -Route.prototype.validate = function () { - let errors = [] +Route.prototype.validate = function() { + const errors = [] // Check for required fields if (!this.config.route) { @@ -308,10 +332,12 @@ Route.prototype.validate = function () { // Check for name pattern if (/^[A-Za-z-_]{5,}$/.test(this.config.route) === false) { - errors.push('Route name must be 5 characters or longer and contain only uppercase and lowercase letters, dashes and underscores') + errors.push( + 'Route name must be 5 characters or longer and contain only uppercase and lowercase letters, dashes and underscores' + ) } - if (this.config.branches && (this.config.branches instanceof Array)) { + if (this.config.branches && this.config.branches instanceof Array) { // Check for `recipe` in branches this.config.branches.forEach((branch, index) => { if (!branch.recipe) { diff --git a/dadi/lib/models/workspace.js b/dadi/lib/models/workspace.js index a86bba36..053090be 100644 --- a/dadi/lib/models/workspace.js +++ b/dadi/lib/models/workspace.js @@ -25,7 +25,7 @@ const domainManager = require('./domain-manager') } } */ -const Workspace = function () { +const Workspace = function() { this.TYPES = { plugins: '*.js', recipes: '*.json', @@ -42,7 +42,7 @@ const Workspace = function () { * * @return {Object} The new workspace */ -Workspace.prototype.build = function () { +Workspace.prototype.build = function() { return this.read().then(files => { this.workspace = files @@ -56,8 +56,8 @@ Workspace.prototype.build = function () { * * @return {Array} list of directories created */ -Workspace.prototype.createDirectories = function () { - let directories = Object.keys(this.TYPES).map(type => { +Workspace.prototype.createDirectories = function() { + const directories = Object.keys(this.TYPES).map(type => { return path.resolve(config.get(`paths.${type}`)) }) @@ -83,9 +83,9 @@ Workspace.prototype.createDirectories = function () { * @param {String} item * @return {Object} */ -Workspace.prototype.get = function (item, domain) { +Workspace.prototype.get = function(item, domain) { if (item !== undefined) { - let key = domain ? `${domain}:${item}` : item + const key = domain ? `${domain}:${item}` : item return this.workspace[key] } @@ -99,12 +99,10 @@ Workspace.prototype.get = function (item, domain) { * * @return {Object} */ -Workspace.prototype.read = function () { - let directories = [] +Workspace.prototype.read = function() { + const directories = [] let queue = Object.keys(this.TYPES).reduce((queue, type) => { - let directoryPath = path.resolve( - config.get(`paths.${type}`) - ) + const directoryPath = path.resolve(config.get(`paths.${type}`)) return queue.then(() => { return fs.readdir(directoryPath).then(items => { @@ -123,7 +121,7 @@ Workspace.prototype.read = function () { domainManager.getDomains().map(({domain, path: domainPath}) => { return Promise.all( Object.keys(this.TYPES).map(type => { - let typePath = path.resolve( + const typePath = path.resolve( domainPath, config.get(`paths.${type}`, domain) ) @@ -142,89 +140,89 @@ Workspace.prototype.read = function () { }) } - return this.createDirectories().then(() => { - return queue - }).then(() => { - return directories.reduce((files, {domain, items, type}) => { - items.forEach(file => { - const extension = path.extname(file) - const baseName = path.basename(file, extension) - const fullPath = path.resolve( - domain ? `${config.get('multiDomain.directory')}/${domain}` : '', - config.get(`paths.${type}`), - file - ) - - if (!['.js', '.json'].includes(extension)) return - - let source - let workspaceKey = baseName - - if (extension === '.json') { - delete require.cache[fullPath] - - source = require(fullPath) - } - - if (type === 'recipes') { - workspaceKey = source.recipe || workspaceKey - } else if (type === 'routes') { - workspaceKey = source.route || workspaceKey - } - - // Prepend workspace key with domain. - if (domain) { - workspaceKey = `${domain}:${workspaceKey}` - } - - if (files[workspaceKey] !== undefined) { - throw new Error(`Naming conflict: ${workspaceKey} exists in both '${files[workspaceKey].path}' and '${fullPath}'`) - } - - files[workspaceKey] = { - domain, - path: fullPath, - source: source, - type - } - }) + return this.createDirectories() + .then(() => { + return queue + }) + .then(() => { + return directories.reduce((files, {domain, items, type}) => { + items.forEach(file => { + const extension = path.extname(file) + const baseName = path.basename(file, extension) + const fullPath = path.resolve( + domain ? `${config.get('multiDomain.directory')}/${domain}` : '', + config.get(`paths.${type}`), + file + ) - return files - }, {}) - }) + if (!['.js', '.json'].includes(extension)) return + + let source + let workspaceKey = baseName + + if (extension === '.json') { + delete require.cache[fullPath] + + source = require(fullPath) + } + + if (type === 'recipes') { + workspaceKey = source.recipe || workspaceKey + } else if (type === 'routes') { + workspaceKey = source.route || workspaceKey + } + + // Prepend workspace key with domain. + if (domain) { + workspaceKey = `${domain}:${workspaceKey}` + } + + if (files[workspaceKey] !== undefined) { + throw new Error( + `Naming conflict: ${workspaceKey} exists in both '${files[workspaceKey].path}' and '${fullPath}'` + ) + } + + files[workspaceKey] = { + domain, + path: fullPath, + source, + type + } + }) + + return files + }, {}) + }) } /** * Starts watching workspace files for changes and rebuild the workspace * tree when something changes. */ -Workspace.prototype.startWatchingFiles = function () { - let watchers = {} +Workspace.prototype.startWatchingFiles = function() { + const watchers = {} // Watch each workspace type. Object.keys(this.TYPES).forEach(type => { - let directory = path.resolve( - config.get(`paths.${type}`) - ) - - watchers[type] = chokidar.watch( - `${directory}/${this.TYPES[type]}`, - {usePolling: true} - ).on('all', (event, filePath) => this.build()) + const directory = path.resolve(config.get(`paths.${type}`)) + + watchers[type] = chokidar + .watch(`${directory}/${this.TYPES[type]}`, {usePolling: true}) + .on('all', (event, filePath) => this.build()) }) // Watch files within domain-level workspace directories. domainManager.getDomains().forEach(({domain, path: domainPath}) => { Object.keys(this.TYPES).forEach(type => { - let directory = path.resolve( + const directory = path.resolve( domainPath, config.get(`paths.${type}`, domain) ) - watchers[`${domain}:${type}`] = chokidar.watch( - `${directory}/${this.TYPES[type]}`, - {usePolling: true} - ).on('all', (event, filePath) => this.build()) + watchers[`${domain}:${type}`] = chokidar + .watch(`${directory}/${this.TYPES[type]}`, {usePolling: true}) + .on('all', (event, filePath) => this.build()) }) }) @@ -234,7 +232,7 @@ Workspace.prototype.startWatchingFiles = function () { /** * Stop watching workspace files for changes. */ -Workspace.prototype.stopWatchingFiles = function () { +Workspace.prototype.stopWatchingFiles = function() { Object.keys(this.watchers).forEach(key => { this.watchers[key].close() }) diff --git a/dadi/lib/storage/disk.js b/dadi/lib/storage/disk.js index aded109e..5e6733e9 100644 --- a/dadi/lib/storage/disk.js +++ b/dadi/lib/storage/disk.js @@ -5,8 +5,8 @@ const path = require('path') const Missing = require(path.join(__dirname, '/missing')) -const DiskStorage = function ({assetType = 'assets', domain, url}) { - let assetPath = config.get(`${assetType}.directory.path`, domain) +const DiskStorage = function({assetType = 'assets', domain, url}) { + const assetPath = config.get(`${assetType}.directory.path`, domain) if (url !== '') { this.url = nodeUrl.parse(url, true).pathname @@ -18,11 +18,11 @@ const DiskStorage = function ({assetType = 'assets', domain, url}) { this.path = path.resolve(assetPath) } -DiskStorage.prototype.getFullUrl = function () { +DiskStorage.prototype.getFullUrl = function() { return decodeURIComponent(path.join(this.path, this.url)) } -DiskStorage.prototype.getLastModified = function () { +DiskStorage.prototype.getLastModified = function() { return this.lastModified } @@ -30,20 +30,22 @@ DiskStorage.prototype.getLastModified = function () { * Scans a directory for files and compares them to the config.defaultFiles array, * returning an array of file names that match the defaultFiles array. */ -DiskStorage.prototype.getDefaultFile = function () { +DiskStorage.prototype.getDefaultFile = function() { return new Promise((resolve, reject) => { - let fullUrl = this.getFullUrl() + const fullUrl = this.getFullUrl() fs.lstat(fullUrl, (err, stats) => { if (err) return resolve() if (stats.isDirectory()) { - let defaultFiles = config.get('defaultFiles') + const defaultFiles = config.get('defaultFiles') fs.readdir(fullUrl, (err, files) => { if (err) return resolve() - files = files.filter(file => defaultFiles.includes(path.basename(file))) + files = files.filter(file => + defaultFiles.includes(path.basename(file)) + ) return resolve(files[0] || 'no-default-configured') }) @@ -52,13 +54,13 @@ DiskStorage.prototype.getDefaultFile = function () { }) } -DiskStorage.prototype.get = function () { +DiskStorage.prototype.get = function() { return new Promise((resolve, reject) => { let wait = Promise.resolve() // If we're looking at a directory (assumed because no extension), // attempt to get a configured default file from the directory - let isDirectory = path.parse(this.getFullUrl()).ext === '' + const isDirectory = path.parse(this.getFullUrl()).ext === '' if (isDirectory) { wait = this.getDefaultFile() @@ -71,17 +73,17 @@ DiskStorage.prototype.get = function () { } // attempt to open - let stream = fs.createReadStream(this.getFullUrl()) + const stream = fs.createReadStream(this.getFullUrl()) stream.on('open', () => { // check file size - let stats = fs.statSync(this.getFullUrl()) - let fileSize = parseInt(stats.size) + const stats = fs.statSync(this.getFullUrl()) + const fileSize = parseInt(stats.size) this.lastModified = stats.mtime if (fileSize === 0) { - let err = { + const err = { statusCode: 404, message: 'File size is 0 bytes' } @@ -93,21 +95,25 @@ DiskStorage.prototype.get = function () { }) stream.on('error', () => { - let err = { + const err = { statusCode: 404, message: 'File not found: ' + this.getFullUrl() } - return new Missing().get({ - domain: this.domain, - isDirectory: isDirectory - }).then(stream => { - this.notFound = true - this.lastModified = new Date() - return resolve(stream) - }).catch(e => { - return reject(err) - }) + return new Missing() + .get({ + domain: this.domain, + isDirectory + }) + .then(stream => { + this.notFound = true + this.lastModified = new Date() + + return resolve(stream) + }) + .catch(e => { + return reject(err) + }) }) }) }) diff --git a/dadi/lib/storage/factory.js b/dadi/lib/storage/factory.js index ac2df2ce..369a6932 100644 --- a/dadi/lib/storage/factory.js +++ b/dadi/lib/storage/factory.js @@ -20,38 +20,34 @@ const ADAPTERS = { } } -module.exports.create = function create (type, assetPath, {domain} = {}) { +module.exports.create = function create(type, assetPath, {domain} = {}) { if (assetPath.indexOf('/') === 0) { assetPath = assetPath.slice(1) } - let assetType = type === 'image' - ? 'images' - : 'assets' - let adapterFromPath = module.exports.extractAdapterFromPath(assetPath) + const assetType = type === 'image' ? 'images' : 'assets' + const adapterFromPath = module.exports.extractAdapterFromPath(assetPath) let adapter if (adapterFromPath.adapter) { adapter = adapterFromPath.adapter assetPath = adapterFromPath.canonicalPath } else { - if ( - assetPath.indexOf('http:') === 0 || - assetPath.indexOf('https:') === 0 - ) { + if (assetPath.indexOf('http:') === 0 || assetPath.indexOf('https:') === 0) { adapter = 'http' } else { - let enabledStorage = Object.keys(config.get(assetType)).find(key => { + const enabledStorage = Object.keys(config.get(assetType)).find(key => { return config.get(`${assetType}.${key}.enabled`, domain) }) - adapter = Object.keys(ADAPTERS).find(key => { - return ADAPTERS[key].configBlock === enabledStorage - }) || 'disk' + adapter = + Object.keys(ADAPTERS).find(key => { + return ADAPTERS[key].configBlock === enabledStorage + }) || 'disk' } } - let Adapter = ADAPTERS[adapter].handler + const Adapter = ADAPTERS[adapter].handler return new Adapter({ assetType, @@ -60,18 +56,20 @@ module.exports.create = function create (type, assetPath, {domain} = {}) { }) } -module.exports.extractAdapterFromPath = function (assetPath) { +module.exports.extractAdapterFromPath = function(assetPath) { if (assetPath.indexOf('/') === 0) { assetPath = assetPath.slice(1) } let newAssetPath = assetPath - let adapter = Object.keys(ADAPTERS).find(key => { + const adapter = Object.keys(ADAPTERS).find(key => { if (assetPath.indexOf(`${key}/`) === 0) { newAssetPath = assetPath.slice(key.length + 1) return true } + + return false }) return { diff --git a/dadi/lib/storage/http.js b/dadi/lib/storage/http.js index 9275bba5..735928b7 100644 --- a/dadi/lib/storage/http.js +++ b/dadi/lib/storage/http.js @@ -9,11 +9,11 @@ const url = require('url') const urljoin = require('url-join') const Missing = require(path.join(__dirname, '/missing')) -const HTTPStorage = function ({assetType = 'assets', domain, url}) { - let isExternalURL = url.indexOf('http:') === 0 || - url.indexOf('https:') === 0 +const HTTPStorage = function({assetType = 'assets', domain, url}) { + const isExternalURL = + url.indexOf('http:') === 0 || url.indexOf('https:') === 0 - let remoteAddress = config.get(`${assetType}.remote.path`, domain) + const remoteAddress = config.get(`${assetType}.remote.path`, domain) if (!isExternalURL) { if (!remoteAddress) { @@ -27,120 +27,135 @@ const HTTPStorage = function ({assetType = 'assets', domain, url}) { this.url = url } -HTTPStorage.prototype.getFullUrl = function () { +HTTPStorage.prototype.getFullUrl = function() { if (this.baseUrl) { return urljoin(this.baseUrl, this.url) - } else { - return this.url } + + return this.url } -HTTPStorage.prototype.get = function ({ +HTTPStorage.prototype.get = function({ redirects = 0, requestUrl = this.getFullUrl() } = {}) { return new Promise((resolve, reject) => { - let parsedUrl = url.parse(requestUrl) - let requestFn = parsedUrl.protocol === 'https:' - ? https - : http - - requestFn.get({ - protocol: parsedUrl.protocol, - hostname: parsedUrl.hostname, - path: parsedUrl.path, - port: parsedUrl.port, - headers: { - 'User-Agent': 'DADI CDN' - } - }, res => { - let buffers = [] - - res.on('data', chunk => { - buffers.push(chunk) - }) - - res.on('end', () => { - let statusCode = res.statusCode - - // Successful response, return the data - if (statusCode === 200) { - return resolve(streamifier.createReadStream(Buffer.concat(buffers))) - } - - // Determine what to do with the response when not successful. If it's - // a redirect status code, we continue trying to get it until we reach the - // configured redirect limit. - if ( - [301, 302, 307].includes(statusCode) && - typeof res.headers.location === 'string' - ) { - let parsedRedirectUrl = url.parse(res.headers.location) - - parsedRedirectUrl.host = parsedRedirectUrl.host || parsedUrl.host - parsedRedirectUrl.port = parsedRedirectUrl.port || parsedUrl.port - parsedRedirectUrl.protocol = parsedRedirectUrl.protocol || parsedUrl.protocol - - if (redirects < config.get('http.followRedirects', this.domain)) { - return resolve( - this.get({ - redirects: redirects + 1, - requestUrl: url.format(parsedRedirectUrl) - }) - ) + const parsedUrl = url.parse(requestUrl) + const requestFn = parsedUrl.protocol === 'https:' ? https : http + + requestFn + .get( + { + protocol: parsedUrl.protocol, + hostname: parsedUrl.hostname, + path: parsedUrl.path, + port: parsedUrl.port, + headers: { + 'User-Agent': 'DADI CDN' } + }, + res => { + const buffers = [] - // We've hit the maximum number of redirects allowed, so we'll - // treat this as a 404. - statusCode = 404 - } - - // It's not a redirect, determine what to return - let httpError - - switch (statusCode) { - case 404: - httpError = new Error(`Not Found: ${this.getFullUrl()}`) - - break - case 403: - httpError = new Error(`Forbidden: ${this.getFullUrl()}`) + res.on('data', chunk => { + buffers.push(chunk) + }) - break - default: - httpError = new Error(`Remote server responded with error code ${statusCode} for URL: ${this.getFullUrl()}`) + res.on('end', () => { + let statusCode = res.statusCode + + // Successful response, return the data + if (statusCode === 200) { + return resolve( + streamifier.createReadStream(Buffer.concat(buffers)) + ) + } + + // Determine what to do with the response when not successful. If it's + // a redirect status code, we continue trying to get it until we reach the + // configured redirect limit. + if ( + [301, 302, 307].includes(statusCode) && + typeof res.headers.location === 'string' + ) { + const parsedRedirectUrl = url.parse(res.headers.location) + + parsedRedirectUrl.host = parsedRedirectUrl.host || parsedUrl.host + parsedRedirectUrl.port = parsedRedirectUrl.port || parsedUrl.port + parsedRedirectUrl.protocol = + parsedRedirectUrl.protocol || parsedUrl.protocol + + if (redirects < config.get('http.followRedirects', this.domain)) { + return resolve( + this.get({ + redirects: redirects + 1, + requestUrl: url.format(parsedRedirectUrl) + }) + ) + } + + // We've hit the maximum number of redirects allowed, so we'll + // treat this as a 404. + statusCode = 404 + } + + // It's not a redirect, determine what to return + let httpError + + switch (statusCode) { + case 404: + httpError = new Error(`Not Found: ${this.getFullUrl()}`) + + break + case 403: + httpError = new Error(`Forbidden: ${this.getFullUrl()}`) + + break + default: + httpError = new Error( + `Remote server responded with error code ${statusCode} for URL: ${this.getFullUrl()}` + ) + } + + httpError.statusCode = statusCode + + if (statusCode === 404) { + new Missing() + .get({ + domain: this.domain, + isDirectory: path.parse(this.getFullUrl()).ext === '' + }) + .then(stream => { + this.notFound = true + this.lastModified = new Date() + resolve(stream) + }) + .catch(() => { + reject(httpError) + }) + } else { + reject(httpError) + } + }) } + ) + .on('error', err => { + let httpError - httpError.statusCode = statusCode - - if (statusCode === 404) { - new Missing().get({ - domain: this.domain, - isDirectory: path.parse(this.getFullUrl()).ext === '' - }).then(stream => { - this.notFound = true - this.lastModified = new Date() - resolve(stream) - }).catch(() => { - reject(httpError) - }) + if (err.code && err.code === 'ENOTFOUND') { + httpError = new Error( + `Remote server not found for URL: ${this.getFullUrl()} ${ + err.message + }` + ) } else { - reject(httpError) + httpError = new Error(`ERROR: ${err.message} CODE: ${err.code}`) } - }) - }).on('error', (err) => { - let httpError - if (err.code && err.code === 'ENOTFOUND') { - httpError = new Error(`Remote server not found for URL: ${this.getFullUrl()} ${err.message}`) - } else { - httpError = new Error(`ERROR: ${err.message} CODE: ${err.code}`) - } + httpError.statusCode = 500 - httpError.statusCode = 500 - - reject(httpError) - }) + reject(httpError) + }) }) } diff --git a/dadi/lib/storage/missing.js b/dadi/lib/storage/missing.js index 7b294ef9..c6baf8c3 100644 --- a/dadi/lib/storage/missing.js +++ b/dadi/lib/storage/missing.js @@ -2,24 +2,24 @@ const fs = require('fs') const path = require('path') const config = require(path.join(__dirname, '/../../../config')) -const Missing = function () {} +const Missing = function() {} -Missing.prototype.get = function ({domain, isDirectory = false}) { - let imagePath = config.get('notFound.images.enabled', domain) +Missing.prototype.get = function({domain, isDirectory = false}) { + const imagePath = config.get('notFound.images.enabled', domain) ? config.get('notFound.images.path', domain) : null return new Promise((resolve, reject) => { if (!imagePath || isDirectory) { - return reject({ statusCode: 404 }) + return reject({statusCode: 404}) } - let errorNotFound = { + const errorNotFound = { statusCode: 404, message: `File not found: ${imagePath}` } - let stream = fs.createReadStream(imagePath) + const stream = fs.createReadStream(imagePath) stream.on('open', () => { // Check file size. @@ -28,7 +28,7 @@ Missing.prototype.get = function ({domain, isDirectory = false}) { return reject(errorNotFound) } - let fileSize = parseInt(stats.size) + const fileSize = parseInt(stats.size) if (fileSize === 0) { return reject({ @@ -47,7 +47,7 @@ Missing.prototype.get = function ({domain, isDirectory = false}) { }) } -module.exports = function () { +module.exports = function() { return new Missing() } diff --git a/dadi/lib/storage/s3.js b/dadi/lib/storage/s3.js index c83021d7..c5ab7943 100644 --- a/dadi/lib/storage/s3.js +++ b/dadi/lib/storage/s3.js @@ -6,7 +6,7 @@ const stream = require('stream') const logger = require('@dadi/logger') const Missing = require(path.join(__dirname, '/missing')) -const S3Storage = function ({assetType = 'assets', domain, url}) { +const S3Storage = function({assetType = 'assets', domain, url}) { this.providerType = 'Amazon S3' AWS.config.update({ @@ -14,8 +14,8 @@ const S3Storage = function ({assetType = 'assets', domain, url}) { secretAccessKey: config.get(`${assetType}.s3.secretKey`) }) - let region = config.get(`${assetType}.s3.region`) - let endpoint = config.get(`${assetType}.s3.endpoint`) + const region = config.get(`${assetType}.s3.region`) + const endpoint = config.get(`${assetType}.s3.endpoint`) if (region !== '') { AWS.config.update({region}) @@ -35,58 +35,71 @@ const S3Storage = function ({assetType = 'assets', domain, url}) { this.s3 = new AWS.S3() } -S3Storage.prototype.get = function () { +S3Storage.prototype.get = function() { return new Promise((resolve, reject) => { - let requestData = { + const requestData = { Bucket: this.getBucket(), Key: this.getKey() } - logger.info(`${this.providerType} Request (${this.url}):${JSON.stringify(requestData)}`) + logger.info( + `${this.providerType} Request (${this.url}):${JSON.stringify( + requestData + )}` + ) if (requestData.Bucket === '' || requestData.Key === '') { - let err = { + const err = { statusCode: 400, - message: 'Either no Bucket or Key provided: ' + JSON.stringify(requestData) + message: + 'Either no Bucket or Key provided: ' + JSON.stringify(requestData) } + return reject(err) } // create the AWS.Request object - let request = this.s3.getObject(requestData) - - let promise = request.promise() - - promise.then(data => { - if (data.LastModified) { - this.lastModified = data.LastModified + const request = this.s3.getObject(requestData) + + const promise = request.promise() + + promise.then( + data => { + if (data.LastModified) { + this.lastModified = data.LastModified + } + + const bufferStream = new stream.PassThrough() + + bufferStream.push(data.Body) + bufferStream.push(null) + resolve(bufferStream) + }, + error => { + if (error.statusCode === 404) { + return new Missing() + .get({ + domain: this.domain, + isDirectory: path.parse(this.getFullUrl()).ext === '' + }) + .then(stream => { + this.notFound = true + this.lastModified = new Date() + + return resolve(stream) + }) + .catch(err => { + return reject(err) + }) + } + + return reject(error) } - - let bufferStream = new stream.PassThrough() - bufferStream.push(data.Body) - bufferStream.push(null) - resolve(bufferStream) - }, - (error) => { - if (error.statusCode === 404) { - return new Missing().get({ - domain: this.domain, - isDirectory: path.parse(this.getFullUrl()).ext === '' - }).then(stream => { - this.notFound = true - this.lastModified = new Date() - return resolve(stream) - }).catch(err => { - return reject(err) - }) - } - - return reject(error) - }) + ) }) } -S3Storage.prototype.getBucket = function () { +S3Storage.prototype.getBucket = function() { // If the URL starts with /s3, it means the second parameter // is the name of the bucket. if (this.url.indexOf('/s3') === 0) { @@ -96,29 +109,25 @@ S3Storage.prototype.getBucket = function () { return this.bucketName } -S3Storage.prototype.getFullUrl = function () { +S3Storage.prototype.getFullUrl = function() { return this.url } -S3Storage.prototype.getKey = function () { +S3Storage.prototype.getKey = function() { // If the URL start with /s3, it means the second parameter // is the name of the bucket. Let's strip that out. if (this.url.indexOf('/s3') === 0) { - return decodeURIComponent( - this.urlParts.slice(1).join('/') - ) + return decodeURIComponent(this.urlParts.slice(1).join('/')) } - return decodeURIComponent( - this.urlParts.join('/') - ) + return decodeURIComponent(this.urlParts.join('/')) } -S3Storage.prototype.getLastModified = function () { +S3Storage.prototype.getLastModified = function() { return this.lastModified } -S3Storage.prototype.getUrlParts = function (url) { +S3Storage.prototype.getUrlParts = function(url) { let canonicalUrl = url if (canonicalUrl.indexOf('/s3/') === 0) { diff --git a/dadi/lib/workQueue.js b/dadi/lib/workQueue.js index be212f71..afb60606 100644 --- a/dadi/lib/workQueue.js +++ b/dadi/lib/workQueue.js @@ -1,11 +1,11 @@ -const WorkQueue = function (multiplexFn) { +const WorkQueue = function(multiplexFn) { this.multiplexFn = multiplexFn || (i => i) this.jobs = {} } -WorkQueue.prototype.processJobResult = function (key, error, result) { - let job = this.jobs[key] - let subscribers = job.subscribers || [] +WorkQueue.prototype.processJobResult = function(key, error, result) { + const job = this.jobs[key] + const subscribers = job.subscribers || [] delete this.jobs[key] @@ -13,39 +13,39 @@ WorkQueue.prototype.processJobResult = function (key, error, result) { if (error) { subscriber(error) } else { - let subscriberResult = this.multiplexFn(result) + const subscriberResult = this.multiplexFn(result) subscriber(null, subscriberResult) } }) } -WorkQueue.prototype.run = function (key, jobFn) { +WorkQueue.prototype.run = function(key, jobFn) { if (!this.jobs[key]) { this.jobs[key] = { - fn: jobFn().then(result => { - this.processJobResult(key, null, result) - }).catch(error => { - this.processJobResult(key, error) - }) + fn: jobFn() + .then(result => { + this.processJobResult(key, null, result) + }) + .catch(error => { + this.processJobResult(key, error) + }) } } return this.subscribe(key) } -WorkQueue.prototype.subscribe = function (key) { +WorkQueue.prototype.subscribe = function(key) { return new Promise((resolve, reject) => { this.jobs[key].subscribers = this.jobs[key].subscribers || [] - this.jobs[key].subscribers.push( - (err, result) => { - if (err) { - return reject(err) - } - - resolve(result) + this.jobs[key].subscribers.push((err, result) => { + if (err) { + return reject(err) } - ) + + resolve(result) + }) }) } diff --git a/index.js b/index.js index 4b0bb4b0..8fa58b22 100755 --- a/index.js +++ b/index.js @@ -1,41 +1,50 @@ -var chokidar = require('chokidar') -var cluster = require('cluster') -var config = require('./config') -var fs = require('fs') -var path = require('path') +const chokidar = require('chokidar') +const cluster = require('cluster') +const config = require('./config') +const fs = require('fs') +const path = require('path') // Console start message const dadiBoot = require('@dadi/boot') + dadiBoot.start(require('./package.json')) require('console-stamp')(console, 'yyyy-mm-dd HH:MM:ss.l') if (config.get('cluster')) { if (cluster.isMaster) { - var numWorkers = require('os').cpus().length + const numWorkers = require('os').cpus().length + console.log('Master cluster setting up ' + numWorkers + ' workers...') - for (var i = 0; i < numWorkers; i++) { + for (let i = 0; i < numWorkers; i++) { cluster.fork() } - cluster.on('online', function (worker) { + cluster.on('online', function(worker) { console.log('Worker ' + worker.process.pid + ' is online') }) - cluster.on('exit', function (worker, code, signal) { - console.log('Worker ' + worker.process.pid + ' died with code: ' + code + ', and signal: ' + signal) + cluster.on('exit', function(worker, code, signal) { + console.log( + 'Worker ' + + worker.process.pid + + ' died with code: ' + + code + + ', and signal: ' + + signal + ) console.log('Starting a new worker') cluster.fork() }) - var watcher = chokidar.watch(process.cwd(), { + const watcher = chokidar.watch(process.cwd(), { depth: 0, - ignored: /[\/\\]\./, + ignored: /[/\\]\./, ignoreInitial: true }) - watcher.on('add', function (filePath) { + watcher.on('add', function(filePath) { if (path.basename(filePath) === 'restart.cdn') { console.log('Shutdown requested') fs.unlinkSync(filePath) @@ -43,11 +52,14 @@ if (config.get('cluster')) { } }) } else { - var app = module.exports = require('./dadi/lib') - app.start(function () { - console.log('Process ' + process.pid + ' is listening for incoming requests') + const app = (module.exports = require('./dadi/lib')) - process.on('message', function (message) { + app.start(function() { + console.log( + 'Process ' + process.pid + ' is listening for incoming requests' + ) + + process.on('message', function(message) { if (message.type === 'shutdown') { console.log('Process ' + process.pid + ' is shutting down...') process.exit(0) @@ -56,27 +68,31 @@ if (config.get('cluster')) { }) } } else { - var app = module.exports = require('./dadi/lib') - app.start(function () { - console.log('Process ' + process.pid + ' is listening for incoming requests') + const app = (module.exports = require('./dadi/lib')) + + app.start(function() { + console.log( + 'Process ' + process.pid + ' is listening for incoming requests' + ) }) } -function restartWorkers () { - var wid, workerIds = [] +function restartWorkers() { + let wid + const workerIds = [] for (wid in cluster.workers) { workerIds.push(wid) } - workerIds.forEach(function (wid) { + workerIds.forEach(function(wid) { if (cluster.workers[wid]) { cluster.workers[wid].send({ type: 'shutdown', from: 'master' }) - setTimeout(function () { + setTimeout(function() { if (cluster.workers[wid]) { cluster.workers[wid].kill('SIGKILL') } diff --git a/package.json b/package.json index b98fa709..a86dd089 100644 --- a/package.json +++ b/package.json @@ -1,12 +1,13 @@ { "name": "@dadi/cdn", - "version": "3.6.2", + "version": "4.0.0", "product": "DADI CDN", "description": "A high performance, just-in-time asset manipulation and delivery layer designed as a modern content distribution solution.", "scripts": { "init": "validate-commit-msg", "start": "node index.js --node_env=development", - "test": "rm -f config/config.test.json && standard 'dadi/**/*.js' && env NODE_ENV=test ./node_modules/.bin/nyc --reporter=lcov -x '**/workspace/**' -x 'test/**' ./node_modules/mocha/bin/_mocha test && ./node_modules/.bin/nyc report", + "test": "rm -f config/config.test.json && eslint --ext js,jsx . && prettier --check '**/*.{js,jsx,md,html,css}' && npm run test:run && ./node_modules/.bin/nyc report", + "test:run": "rm -rf cache && env NODE_ENV=test ./node_modules/.bin/nyc --reporter=lcov -x '**/workspace/**' -x 'test/**' ./node_modules/mocha/bin/_mocha test", "precommit": "node scripts/precommit.js", "posttest": "./scripts/coverage.js", "snyk-protect": "snyk protect", @@ -41,11 +42,10 @@ "fs-extra": "^7.0.1", "gifwrap": "^0.7.5", "he": "^1.1.0", - "husky": "^1.0.1", "image-size-stream": "1.1.0", "imagemin": "^6.0.0", "imagemin-jpegtran": "^6.0.0", - "jimp": "^0.5.6", + "jimp": "^0.6.1", "jsonwebtoken": "^8.2.1", "length-stream": "~0.1.1", "maxmind": "^2.2.0", @@ -60,10 +60,10 @@ "request-promise": "^4.1.1", "router": "~1.3.0", "sha1": "~1.1.1", - "sharp": "^0.21.0", + "sharp": "^0.22.0", "simple-bufferstream": "^1.0.0", "smartcrop-sharp": "^2.0.2", - "snyk": "^1.103.4", + "snyk": "^1.192.4", "sqwish": "^0.2.2", "stream-length": "^1.0.2", "streamifier": "^0.1.1", @@ -75,22 +75,27 @@ "validate-commit-message": "^3.0.1" }, "devDependencies": { + "@dadi/eslint-config": "^1.1.0", + "@dadi/prettier-config": "^1.1.0", "aws-sdk-mock": "^1.5.0", "coveralls": "^3.0.1", "env-test": "^1.0.0", + "eslint": "^6.3.0", "fakeredis": "^2.0.0", "http-proxy": "^1.16.2", + "husky": "^1.3.1", "it-each": "^0.3.1", + "lint-staged": "^9.2.5", "mocha": "^5.2.0", "nock": "^9.0.2", "nyc": "^14.0.0", + "prettier": "^1.18.2", "proxyquire": "~2.0.0", "redis": "^2.6.3", "should": "~13.2.0", "sinon": "^4.0.2", - "standard": "8.x.x", - "superagent": "^4.1.0", - "supertest": "^3.4.2" + "superagent": "^5.0.2", + "supertest": "^4.0.0" }, "repository": { "type": "git", @@ -105,5 +110,16 @@ }, "author": "DADI ", "license": "SEE LICENSE IN GPL.md", - "snyk": true + "snyk": true, + "husky": { + "hooks": { + "pre-commit": "lint-staged" + } + }, + "lint-staged": { + "*.{js,jsx,md,html,css}": [ + "prettier --write", + "git add" + ] + } } diff --git a/scripts/coverage.js b/scripts/coverage.js index 6030a104..12921d60 100755 --- a/scripts/coverage.js +++ b/scripts/coverage.js @@ -3,7 +3,10 @@ const exec = require('child_process').exec if (process.env['CI']) { - exec('cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js', (err, out) => { - if (err) console.log(err) - }) + exec( + 'cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js', + (err, out) => { + if (err) console.log(err) + } + ) } diff --git a/scripts/precommit.js b/scripts/precommit.js index 97b0f85e..e26fb727 100644 --- a/scripts/precommit.js +++ b/scripts/precommit.js @@ -2,12 +2,12 @@ const exec = require('child_process').exec -function currentBranch () { +function currentBranch() { return new Promise((resolve, reject) => { exec('git branch --no-color', (err, out) => { if (err) return reject(err) - let branches = out.split('\n') + const branches = out.split('\n') let branch = branches.find(branch => { return /^\*/.test(branch) }) @@ -23,7 +23,8 @@ function currentBranch () { currentBranch().then(branch => { console.log('Checking valid branch name...') - if (branch !== 'master' && + if ( + branch !== 'master' && branch !== 'develop' && !/^feature\//.test(branch) && !/^patch\//.test(branch) && diff --git a/test/acceptance/auth.js b/test/acceptance/auth.js index ab78a502..1d4e8553 100755 --- a/test/acceptance/auth.js +++ b/test/acceptance/auth.js @@ -6,11 +6,13 @@ const help = require(__dirname + '/help') const app = require(__dirname + '/../../dadi/lib/') const fs = require('fs') -let cdnUrl = `http://${config.get('server.host')}:${config.get('server.port')}` -let configBackup = config.get() +const cdnUrl = `http://${config.get('server.host')}:${config.get( + 'server.port' +)}` +const configBackup = config.get() -describe('Authentication', function () { - let tokenRoute = config.get('auth.tokenUrl') +describe('Authentication', function() { + const tokenRoute = config.get('auth.tokenUrl') before(done => { app.start(err => { @@ -92,7 +94,9 @@ describe('Authentication', function () { }) .end((err, res) => { res.statusCode.should.eql(401) - res.headers['www-authenticate'].should.eql('Bearer, error="no_private_key", error_description="No private key configured in auth.privateKey"') + res.headers['www-authenticate'].should.eql( + 'Bearer, error="no_private_key", error_description="No private key configured in auth.privateKey"' + ) done() }) }) @@ -124,7 +128,7 @@ describe('Authentication', function () { it('should not allow `/api/flush` request with expired tokens', done => { config.set('auth.tokenTtl', 1) - let _done = err => { + const _done = err => { config.set('auth.tokenTtl', configBackup.auth.tokenTtl) done(err) @@ -135,7 +139,7 @@ describe('Authentication', function () { .post('/api/flush') .send({pattern: 'test'}) .set('Authorization', 'Bearer ' + token) - .expect(200, (err) => { + .expect(200, err => { if (err) return _done(err) setTimeout(() => { @@ -214,7 +218,7 @@ describe('Authentication', function () { .expect('Cache-Control', 'no-store') .expect(200) .end((err, res) => { - let token = res.body.accessToken + const token = res.body.accessToken request(cdnUrl) .post('/api/flush') @@ -314,7 +318,7 @@ describe('Authentication', function () { config.set('auth.privateKey', 'privateKey2', 'testdomain.com') config.set('auth.tokenTtl', 20000, 'testdomain.com') - let startTime = Math.floor(Date.now() / 1000) + const startTime = Math.floor(Date.now() / 1000) request(cdnUrl) .post(tokenRoute) @@ -328,42 +332,37 @@ describe('Authentication', function () { .expect('Cache-Control', 'no-store') .expect(200) .end((err, res) => { - jwt.verify( - res.body.accessToken, - 'privateKey1', - (err, decoded) => { - if (err) return done(err) - - (decoded.exp - startTime).should.eql(10000) - decoded.domain.should.eql('localhost') + jwt.verify(res.body.accessToken, 'privateKey1', (err, decoded) => { + if (err) return done(err)(decoded.exp - startTime).should.eql(10000) + decoded.domain.should.eql('localhost') - request(cdnUrl) - .post(tokenRoute) - .send({ - clientId: 'testClient2', - secret: 'superSecret2' - }) - .set('host', 'testdomain.com:80') - .expect('content-type', 'application/json') - .expect('pragma', 'no-cache') - .expect('Cache-Control', 'no-store') - .expect(200) - .end((err, res) => { - jwt.verify( - res.body.accessToken, - 'privateKey2', - (err, decoded) => { - if (err) return done(err) - - (decoded.exp - startTime).should.eql(20000) - decoded.domain.should.eql('testdomain.com') - - done() - } - ) - }) - } - ) + request(cdnUrl) + .post(tokenRoute) + .send({ + clientId: 'testClient2', + secret: 'superSecret2' + }) + .set('host', 'testdomain.com:80') + .expect('content-type', 'application/json') + .expect('pragma', 'no-cache') + .expect('Cache-Control', 'no-store') + .expect(200) + .end((err, res) => { + jwt.verify( + res.body.accessToken, + 'privateKey2', + (err, decoded) => { + if (err) + return done(err)(decoded.exp - startTime).should.eql( + 20000 + ) + decoded.domain.should.eql('testdomain.com') + + done() + } + ) + }) + }) }) }) }) diff --git a/test/acceptance/controller.js b/test/acceptance/controller.js index be0e23db..a434fcc5 100755 --- a/test/acceptance/controller.js +++ b/test/acceptance/controller.js @@ -9,17 +9,21 @@ const request = require('supertest') const cache = require(path.join(__dirname, '/../../dadi/lib/cache')) const help = require(path.join(__dirname, '/help')) const app = require(path.join(__dirname, '/../../dadi/lib/')) -const imageHandler = require(path.join(__dirname, '/../../dadi/lib/handlers/image')) +const imageHandler = require(path.join( + __dirname, + '/../../dadi/lib/handlers/image' +)) let config = require(path.join(__dirname, '/../../config')) -let configBackup = config.get() -let cdnUrl = 'http://' + config.get('server.host') + ':' + config.get('server.port') +const configBackup = config.get() +const cdnUrl = + 'http://' + config.get('server.host') + ':' + config.get('server.port') let testConfigString -describe('Controller', function () { +describe('Controller', function() { this.timeout(10000) - let tokenRoute = config.get('auth.tokenUrl') + const tokenRoute = config.get('auth.tokenUrl') before(done => { delete require.cache[__dirname + '/../../config'] @@ -43,82 +47,25 @@ describe('Controller', function () { help.clearCache() }) - describe('Options Discovery', function (done) { - describe('Legacy URL syntax', () => { - it('should extract options from url path if no querystring', function (done) { - // spy on the sanitiseOptions method to access the provided arguments - var method = sinon.spy(imageHandler.ImageHandler.prototype, 'sanitiseOptions') - - var client = request(cdnUrl) - client - .get('/jpg/50/0/0/801/478/0/0/0/2/aspectfit/North/0/0/0/0/0/test.jpg') - .expect(200) - .end((err, res) => { - imageHandler.ImageHandler.prototype.sanitiseOptions.restore() - var options = method.returnValues[0] - options.quality.should.eql(50) - options.width.should.eql(801) - options.height.should.eql(478) - options.resizeStyle.should.eql('aspectfit') - done() - }) - }) - - it('should extract options from url path if using legacyURLFormat', function (done) { - // spy on the sanitiseOptions method to access the provided arguments - var method = sinon.spy(imageHandler.ImageHandler.prototype, 'sanitiseOptions') - - var client = request(cdnUrl) - client - .get('/jpg/50/0/0/801/478/aspectfit/North/0/0/0/0/0/test.jpg') - .expect(200) - .end((err, res) => { - imageHandler.ImageHandler.prototype.sanitiseOptions.restore() - var options = method.returnValues[0] - - options.quality.should.eql(50) - options.width.should.eql(801) - options.height.should.eql(478) - options.resizeStyle.should.eql('aspectfit') - - done() - }) - }) - - it('should extract options from url path if using legacyURLFormat with missing params', function (done) { - // spy on the sanitiseOptions method to access the provided arguments - var method = sinon.spy(imageHandler.ImageHandler.prototype, 'sanitiseOptions') - - var client = request(cdnUrl) - client - .get('/jpg/50/0/0/801/478/0/0/0//0/North/0/0/0/0/0/test.jpg') - .expect(200) - .end((err, res) => { - imageHandler.ImageHandler.prototype.sanitiseOptions.restore() - var options = method.returnValues[0] - - options.quality.should.eql(50) - options.width.should.eql(801) - options.height.should.eql(478) - options.gravity.should.eql('North') - - done() - }) - }) - }) - - it('should extract options from querystring if one is present', function (done) { + describe('Options Discovery', function(done) { + it('should extract options from querystring if one is present', function(done) { // spy on the sanitiseOptions method to access the provided arguments - var method = sinon.spy(imageHandler.ImageHandler.prototype, 'sanitiseOptions') + const method = sinon.spy( + imageHandler.ImageHandler.prototype, + 'sanitiseOptions' + ) + + const client = request(cdnUrl) - var client = request(cdnUrl) client - .get('/test.jpg?quality=50&width=801&height=478&gravity=North&resizeStyle=aspectfit&devicePixelRatio=2') + .get( + '/test.jpg?quality=50&width=801&height=478&gravity=North&resizeStyle=aspectfit&devicePixelRatio=2' + ) .end((err, res) => { imageHandler.ImageHandler.prototype.sanitiseOptions.restore() method.called.should.eql(true) - var options = method.returnValues[0] + const options = method.returnValues[0] options.quality.should.eql(50) options.width.should.eql(801) @@ -128,18 +75,22 @@ describe('Controller', function () { }) }) - it('should extract options from querystring using abbreviated params', function (done) { + it('should extract options from querystring using abbreviated params', function(done) { // spy on the sanitiseOptions method to access the provided arguments - var method = sinon.spy(imageHandler.ImageHandler.prototype, 'sanitiseOptions') + const method = sinon.spy( + imageHandler.ImageHandler.prototype, + 'sanitiseOptions' + ) + + const client = request(cdnUrl) - var client = request(cdnUrl) client .get('/test.jpg?q=50&w=801&h=478&g=North&resize=aspectfit&dpr=2') .end((err, res) => { imageHandler.ImageHandler.prototype.sanitiseOptions.restore() method.called.should.eql(true) - var options = method.returnValues[0] + const options = method.returnValues[0] options.quality.should.eql(50) options.width.should.eql(801) @@ -150,18 +101,24 @@ describe('Controller', function () { }) }) - it('should extract options from querystring when it is encoded', function (done) { + it('should extract options from querystring when it is encoded', function(done) { // spy on the sanitiseOptions method to access the provided arguments - var method = sinon.spy(imageHandler.ImageHandler.prototype, 'sanitiseOptions') + const method = sinon.spy( + imageHandler.ImageHandler.prototype, + 'sanitiseOptions' + ) + + const client = request(cdnUrl) - var client = request(cdnUrl) client - .get('/test.jpg?q=50&w=801&h=478&g=North&resize=aspectfit&dpr=2') + .get( + '/test.jpg?q=50&w=801&h=478&g=North&resize=aspectfit&dpr=2' + ) .end((err, res) => { imageHandler.ImageHandler.prototype.sanitiseOptions.restore() method.called.should.eql(true) - var options = method.returnValues[0] + const options = method.returnValues[0] options.quality.should.eql(50) options.width.should.eql(801) @@ -172,46 +129,60 @@ describe('Controller', function () { }) }) - it('should extract output format from querystring if present', function (done) { + it('should extract output format from querystring if present', function(done) { // spy on the sanitiseOptions method to access the provided arguments - var method = sinon.spy(imageHandler.ImageHandler.prototype, 'sanitiseOptions') + const method = sinon.spy( + imageHandler.ImageHandler.prototype, + 'sanitiseOptions' + ) + + const client = request(cdnUrl) - var client = request(cdnUrl) client - .get('/test.jpg?format=png&quality=50&width=801&height=478&gravity=North&resizeStyle=aspectfit&devicePixelRatio=2') + .get( + '/test.jpg?format=png&quality=50&width=801&height=478&gravity=North&resizeStyle=aspectfit&devicePixelRatio=2' + ) .end((err, res) => { imageHandler.ImageHandler.prototype.sanitiseOptions.restore() method.called.should.eql(true) - var options = method.returnValues[0] + const options = method.returnValues[0] + options.format.should.eql('png') done() }) }) - it('should extract options from querystring if an external URL is provided', function (done) { - let server = nock('https://cdn.somedomain.tech') + it('should extract options from querystring if an external URL is provided', function(done) { + const server = nock('https://cdn.somedomain.tech') .get('/images/mock/logo.png') .replyWithFile(200, 'test/images/visual/measure1.png', { 'Content-Type': 'image/png' }) // spy on the sanitiseOptions method to access the provided arguments - let method = sinon.spy(imageHandler.ImageHandler.prototype, 'sanitiseOptions') + const method = sinon.spy( + imageHandler.ImageHandler.prototype, + 'sanitiseOptions' + ) + + const configStub = sinon.stub(config, 'get') - let configStub = sinon.stub(config, 'get') configStub.withArgs('images.remote.enabled').returns(true) configStub.withArgs('images.remote.allowFullURL').returns(true) configStub.callThrough() - let client = request(cdnUrl) + const client = request(cdnUrl) + client - .get('/https://cdn.somedomain.tech/images/mock/logo.png?quality=50&width=80&height=478&gravity=North&resizeStyle=aspectfit&devicePixelRatio=2') + .get( + '/https://cdn.somedomain.tech/images/mock/logo.png?quality=50&width=80&height=478&gravity=North&resizeStyle=aspectfit&devicePixelRatio=2' + ) .end((err, res) => { imageHandler.ImageHandler.prototype.sanitiseOptions.restore() method.called.should.eql(true) - var options = method.returnValues[0] + const options = method.returnValues[0] options.quality.should.eql(50) options.width.should.eql(80) @@ -225,8 +196,8 @@ describe('Controller', function () { }) }) - it('should extract options from querystring if an external URL with URL params is provided', function (done) { - let server = nock('https://cdn.somedomain.tech') + it('should extract options from querystring if an external URL with URL params is provided', function(done) { + const server = nock('https://cdn.somedomain.tech') .get('/images/mock/logo.png') .query({height: '100', width: '500'}) .replyWithFile(200, 'test/images/visual/measure1.png', { @@ -234,21 +205,28 @@ describe('Controller', function () { }) // spy on the sanitiseOptions method to access the provided arguments - var method = sinon.spy(imageHandler.ImageHandler.prototype, 'sanitiseOptions') + const method = sinon.spy( + imageHandler.ImageHandler.prototype, + 'sanitiseOptions' + ) + + const configStub = sinon.stub(config, 'get') - var configStub = sinon.stub(config, 'get') configStub.withArgs('images.remote.enabled').returns(true) configStub.withArgs('images.remote.allowFullURL').returns(true) configStub.callThrough() - var client = request(cdnUrl) + const client = request(cdnUrl) + client - .get('/https://cdn.somedomain.tech/images/mock/logo.png?height=100&width=500?quality=50&width=80&height=478&gravity=North&resizeStyle=aspectfit&devicePixelRatio=2') + .get( + '/https://cdn.somedomain.tech/images/mock/logo.png?height=100&width=500?quality=50&width=80&height=478&gravity=North&resizeStyle=aspectfit&devicePixelRatio=2' + ) .end((err, res) => { imageHandler.ImageHandler.prototype.sanitiseOptions.restore() method.called.should.eql(true) - var options = method.returnValues[0] + const options = method.returnValues[0] options.quality.should.eql(50) options.width.should.eql(80) @@ -265,10 +243,10 @@ describe('Controller', function () { describe('cache control header', () => { it('should set the cache-control header according to the mimetype configuration in headers.cacheControl', done => { - let cacheControl = { - 'default': 'public, max-age=3600', - 'paths': [], - 'mimetypes': [ + const cacheControl = { + default: 'public, max-age=3600', + paths: [], + mimetypes: [ {'text/css': 'public, max-age=86400'}, {'text/javascript': 'public, max-age=86400'}, {'application/javascript': 'public, max-age=86400'} @@ -286,9 +264,14 @@ describe('Controller', function () { request(cdnUrl) .get('/test.css') .expect(200, (err, res) => { - res.headers['cache-control'].should.eql(cacheControl.mimetypes[0]['text/css']) + res.headers['cache-control'].should.eql( + cacheControl.mimetypes[0]['text/css'] + ) - config.set('headers.cacheControl', configBackup.headers.cacheControl) + config.set( + 'headers.cacheControl', + configBackup.headers.cacheControl + ) done() }) @@ -296,19 +279,15 @@ describe('Controller', function () { }) it('should respect the value of headers.cacheControl defined at domain level', done => { - let cacheControl1 = { - 'default': 'public, max-age=3600', - 'paths': [], - 'mimetypes': [ - {'text/css': 'public, max-age=86400'} - ] + const cacheControl1 = { + default: 'public, max-age=3600', + paths: [], + mimetypes: [{'text/css': 'public, max-age=86400'}] } - let cacheControl2 = { - 'default': 'public, max-age=3600', - 'paths': [], - 'mimetypes': [ - {'text/css': 'public, max-age=172800'} - ] + const cacheControl2 = { + default: 'public, max-age=3600', + paths: [], + mimetypes: [{'text/css': 'public, max-age=172800'}] } config.set('multiDomain.enabled', true) @@ -321,16 +300,26 @@ describe('Controller', function () { .get('/test.css') .set('Host', 'localhost:80') .expect(200, (err, res) => { - res.headers['cache-control'].should.eql(cacheControl1.mimetypes[0]['text/css']) + res.headers['cache-control'].should.eql( + cacheControl1.mimetypes[0]['text/css'] + ) request(cdnUrl) .get('/test.css') .set('Host', 'testdomain.com:80') .expect(200, (err, res) => { - res.headers['cache-control'].should.eql(cacheControl2.mimetypes[0]['text/css']) + res.headers['cache-control'].should.eql( + cacheControl2.mimetypes[0]['text/css'] + ) - config.set('headers.cacheControl', configBackup.headers.cacheControl) - config.set('multiDomain.enabled', configBackup.multiDomain.enabled) + config.set( + 'headers.cacheControl', + configBackup.headers.cacheControl + ) + config.set( + 'multiDomain.enabled', + configBackup.multiDomain.enabled + ) done() }) @@ -338,37 +327,37 @@ describe('Controller', function () { }) }) - describe('Assets', function () { + describe('Assets', function() { this.timeout(10000) - it('should handle uncompressed CSS file if uri is valid', function (done) { - var client = request(cdnUrl) - client - .get('/css/0/test.css') - .expect(200, done) + it('should handle uncompressed CSS file if uri is valid', function(done) { + const client = request(cdnUrl) + + client.get('/test.css').expect(200, done) }) - it('should handle compressed CSS file if uri is valid', function (done) { - var client = request(cdnUrl) - client - .get('/css/1/test.css') - .expect(200, done) + it('should handle compressed CSS file if uri is valid', function(done) { + const client = request(cdnUrl) + + client.get('/test.css?compress=1').expect(200, done) }) - it('should handle TTF file if uri is valid', function (done) { - var client = request(cdnUrl) + it('should handle TTF file if uri is valid', function(done) { + const client = request(cdnUrl) + client - .get('/fonts/test.ttf') - .expect('Content-Type', 'font/ttf') - .expect(200, done) + .get('/test.ttf') + .expect('Content-Type', 'font/ttf') + .expect(200, done) }) - it('should handle TTF file in subfolder if uri is valid', function (done) { - var client = request(cdnUrl) + it('should handle TTF file in subfolder if uri is valid', function(done) { + const client = request(cdnUrl) + client - .get('/fonts/next-level/test.ttf') - .expect('Content-Type', 'font/ttf') - .expect(200, done) + .get('/next-level/test.ttf') + .expect('Content-Type', 'font/ttf') + .expect(200, done) }) describe('gzip encoding', () => { @@ -376,7 +365,7 @@ describe('Controller', function () { config.set('headers.useGzipCompression', false) request(cdnUrl) - .get('/css/0/test.css') + .get('/test.css') .end((err, res) => { res.statusCode.should.eql(200) should.not.exist(res.headers['content-encoding']) @@ -384,13 +373,16 @@ describe('Controller', function () { config.set('headers.useGzipCompression', true) request(cdnUrl) - .get('/css/0/test.css') + .get('/test.css') .set('Accept-Encoding', 'gzip, deflate') .end((err, res) => { res.statusCode.should.eql(200) res.headers['content-encoding'].should.eql('gzip') - config.set('headers.useGzipCompression', configBackup.headers.useGzipCompression) + config.set( + 'headers.useGzipCompression', + configBackup.headers.useGzipCompression + ) done() }) @@ -406,7 +398,7 @@ describe('Controller', function () { config.set('headers.useGzipCompression', true, 'testdomain.com') request(cdnUrl) - .get('/css/0/test.css?cache=false') + .get('/test.css?cache=false') .set('Host', 'localhost') .end((err, res) => { res.statusCode.should.eql(200) @@ -415,14 +407,20 @@ describe('Controller', function () { config.set('headers.useGzipCompression', true) request(cdnUrl) - .get('/css/0/test.css?cache=false') + .get('/test.css?cache=false') .set('Host', 'testdomain.com') .end((err, res) => { res.statusCode.should.eql(200) res.headers['content-encoding'].should.eql('gzip') - config.set('headers.useGzipCompression', configBackup.headers.useGzipCompression) - config.set('multiDomain.enabled', configBackup.multiDomain.enabled) + config.set( + 'headers.useGzipCompression', + configBackup.headers.useGzipCompression + ) + config.set( + 'multiDomain.enabled', + configBackup.multiDomain.enabled + ) done() }) @@ -431,9 +429,9 @@ describe('Controller', function () { }) }) - describe('HTML passthrough', function () { + describe('HTML passthrough', function() { let server - let remoteUrl = 'http://localhost:8888' + const remoteUrl = 'http://localhost:8888' before(() => { config.set('images.directory.enabled', false) @@ -452,20 +450,28 @@ describe('Controller', function () { server = require('http').createServer((req, res) => { switch (req.url) { case '/': - fs.readFile(path.join(__dirname, '../assets/test.html'), 'utf8', (_err, data) => { - res.statusCode = 200 - res.setHeader('Content-Type', 'text/html') - res.end(data) - }) + fs.readFile( + path.join(__dirname, '../assets/test.html'), + 'utf8', + (_err, data) => { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/html') + res.end(data) + } + ) break case '/test.jpg': - fs.readFile(path.join(__dirname, '../images/test.jpg'), null, (_err, data) => { - res.statusCode = 200 - res.setHeader('Content-Type', 'image/jpeg') - res.end(data) - }) + fs.readFile( + path.join(__dirname, '../images/test.jpg'), + null, + (_err, data) => { + res.statusCode = 200 + res.setHeader('Content-Type', 'image/jpeg') + res.end(data) + } + ) break @@ -481,55 +487,60 @@ describe('Controller', function () { }) after(done => { - config.set('images.directory.enabled', configBackup.images.directory.enabled) + config.set( + 'images.directory.enabled', + configBackup.images.directory.enabled + ) config.set('images.remote.enabled', configBackup.images.remote.enabled) config.set('images.remote.path', configBackup.images.remote.path) - config.set('images.remote.allowFullURL', configBackup.images.remote.allowFullURL) + config.set( + 'images.remote.allowFullURL', + configBackup.images.remote.allowFullURL + ) config.set('images.s3.enabled', configBackup.images.s3.enabled) - config.set('assets.directory.enabled', configBackup.assets.directory.enabled) + config.set( + 'assets.directory.enabled', + configBackup.assets.directory.enabled + ) config.set('assets.remote.enabled', configBackup.assets.remote.enabled) config.set('assets.remote.path', configBackup.assets.remote.path) - config.set('assets.remote.allowFullURL', configBackup.assets.remote.allowFullURL) + config.set( + 'assets.remote.allowFullURL', + configBackup.assets.remote.allowFullURL + ) config.set('assets.s3.enabled', configBackup.assets.s3.enabled) - config.set('caching.directory.enabled', configBackup.caching.directory.enabled) + config.set( + 'caching.directory.enabled', + configBackup.caching.directory.enabled + ) server = null done() }) - it('should pass an html request through from the configured remote origin', function (done) { - let client = request(cdnUrl) + it('should pass an html request through from the configured remote origin', function(done) { + const client = request(cdnUrl) client - .get('/') - .expect(200) - .end((_err, res) => { - res.headers['content-type'].should.exist - res.headers['content-type'].should.eql('text/html') + .get('/') + .expect(200) + .end((_err, res) => { + res.headers['content-type'].should.exist + res.headers['content-type'].should.eql('text/html') - res.headers['x-cache'].should.exist - res.headers['x-cache'].should.eql('MISS') + res.headers['x-cache'].should.exist + res.headers['x-cache'].should.eql('MISS') - done() - }) + done() + }) }) - it('should cache and return an html request from the configured remote origin', function (done) { - let client = request(cdnUrl) + it('should cache and return an html request from the configured remote origin', function(done) { + const client = request(cdnUrl) client - .get('/') - .expect(200) - .end((_err, res) => { - res.headers['content-type'].should.exist - res.headers['content-type'].should.eql('text/html') - - res.headers['x-cache'].should.exist - res.headers['x-cache'].should.eql('MISS') - - client .get('/') .expect(200) .end((_err, res) => { @@ -537,76 +548,76 @@ describe('Controller', function () { res.headers['content-type'].should.eql('text/html') res.headers['x-cache'].should.exist - res.headers['x-cache'].should.eql('HIT') + res.headers['x-cache'].should.eql('MISS') - done() + client + .get('/') + .expect(200) + .end((_err, res) => { + res.headers['content-type'].should.exist + res.headers['content-type'].should.eql('text/html') + + res.headers['x-cache'].should.exist + res.headers['x-cache'].should.eql('HIT') + + done() + }) }) - }) }) - it('should pass an image request through from the configured remote origin', function (done) { - let client = request(cdnUrl) + it('should pass an image request through from the configured remote origin', function(done) { + const client = request(cdnUrl) client - .get('/test.jpg') - .expect(200) - .end((_err, res) => { - res.body.should.be.an.instanceOf(Buffer) + .get('/test.jpg') + .expect(200) + .end((_err, res) => { + res.body.should.be.an.instanceOf(Buffer) - res.headers['content-type'].should.exist - res.headers['content-type'].should.eql('image/jpeg') + res.headers['content-type'].should.exist + res.headers['content-type'].should.eql('image/jpeg') - res.headers['x-cache'].should.exist - res.headers['x-cache'].should.eql('MISS') + res.headers['x-cache'].should.exist + res.headers['x-cache'].should.eql('MISS') - done() - }) + done() + }) }) }) - describe('JavaScript', function () { + describe('JavaScript', function() { this.timeout(10000) - describe('legacy URL syntax', () => { - it('should handle uncompressed JS file if uri is valid', function (done) { - var client = request(cdnUrl) - client - .get('/js/0/test.js') - .expect(200, done) - }) - - it('should handle compressed JS file if uri is valid', function (done) { - var client = request(cdnUrl) - client - .get('/js/1/test.js') - .expect(200, done) - }) - }) + it('should return JS file', function(done) { + const client = request(cdnUrl) - it('should return JS file', function (done) { - var client = request(cdnUrl) - client - .get('/test.js') - .expect(200, done) + client.get('/test.js').expect(200, done) }) describe('transpiling', () => { - let originalJs = fs.readFileSync( + const originalJs = fs.readFileSync( path.join(__dirname, '/../assets/test-es6.js'), 'utf8' ) - let transpiledJs = '"use strict";\n\nvar makeFoo = function makeFoo(bar) {\n return "I foo, you " + bar;\n};' + const transpiledJs = + '"use strict";\n\nvar makeFoo = function makeFoo(bar) {\n return "I foo, you " + bar;\n};' it('should deliver original JS file if experimental.jsTranspiling is disabled', done => { config.set('experimental.jsTranspiling', false) request(cdnUrl) .get('/test-es6.js?transform=1') - .set('User-Agent', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)') + .set( + 'User-Agent', + 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)' + ) .expect(200, (err, res) => { res.text.should.eql(originalJs) - config.set('experimental.jsTranspiling', configBackup.experimental.jsTranspiling) + config.set( + 'experimental.jsTranspiling', + configBackup.experimental.jsTranspiling + ) done() }) @@ -617,10 +628,16 @@ describe('Controller', function () { request(cdnUrl) .get('/test-es6.js?transform=1') - .set('User-Agent', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)') + .set( + 'User-Agent', + 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)' + ) .expect(200, (err, res) => { res.text.should.eql(transpiledJs) - config.set('experimental.jsTranspiling', configBackup.experimental.jsTranspiling) + config.set( + 'experimental.jsTranspiling', + configBackup.experimental.jsTranspiling + ) done() }) @@ -643,19 +660,28 @@ describe('Controller', function () { request(cdnUrl) .get('/test-es6.js?transform=1') - .set('User-Agent', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)') + .set( + 'User-Agent', + 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)' + ) .set('Host', 'localhost:80') .expect(200, (err, res) => { res.text.should.eql(originalJs) request(cdnUrl) .get('/test-es6.js?transform=1') - .set('User-Agent', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)') + .set( + 'User-Agent', + 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)' + ) .set('Host', 'testdomain.com:80') .expect(200, (err, res) => { res.text.should.eql(transpiledJs) - config.set('experimental.jsTranspiling', configBackup.experimental.jsTranspiling) + config.set( + 'experimental.jsTranspiling', + configBackup.experimental.jsTranspiling + ) done() }) @@ -669,19 +695,28 @@ describe('Controller', function () { request(cdnUrl) .get('/test-es6.js?transform=1') - .set('User-Agent', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)') + .set( + 'User-Agent', + 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)' + ) .set('Host', 'localhost:80') .expect(200, (err, res) => { res.text.should.eql(originalJs) request(cdnUrl) .get('/test-es6.js?transform=1') - .set('User-Agent', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)') + .set( + 'User-Agent', + 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)' + ) .set('Host', 'testdomain.com:80') .expect(200, (err, res) => { res.text.should.eql(transpiledJs) - config.set('experimental.jsTranspiling', configBackup.experimental.jsTranspiling) + config.set( + 'experimental.jsTranspiling', + configBackup.experimental.jsTranspiling + ) done() }) @@ -691,16 +726,21 @@ describe('Controller', function () { }) }) - describe('Video', function () { - it('should respond with 206 Partial Content if uri is valid', function (done) { - var newTestConfig = JSON.parse(testConfigString) + describe('Video', function() { + it('should respond with 206 Partial Content if uri is valid', function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var client = request(cdnUrl) + const client = request(cdnUrl) + client .get('/video.mp4') .set('Range', 'bytes=0-') @@ -710,15 +750,20 @@ describe('Controller', function () { }) }) - it('should respond with 416 if range header is invalid', function (done) { - var newTestConfig = JSON.parse(testConfigString) + it('should respond with 416 if range header is invalid', function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var client = request(cdnUrl) + const client = request(cdnUrl) + client .get('/video.mp4') .set('Range', 'bytes=a') @@ -728,15 +773,20 @@ describe('Controller', function () { }) }) - it('should respond with 400 if range header is malformed', function (done) { - var newTestConfig = JSON.parse(testConfigString) + it('should respond with 400 if range header is malformed', function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var client = request(cdnUrl) + const client = request(cdnUrl) + client .get('/video.mp4') .set('Range', 'bytes') @@ -747,313 +797,78 @@ describe('Controller', function () { }) }) - describe('Images', function () { - describe('Legacy URL syntax', () => { - it('should handle test image if image uri is valid', function (done) { - var newTestConfig = JSON.parse(testConfigString) - newTestConfig.images.directory.enabled = true - newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) - - config.loadFile(config.configPath()) - - var client = request(cdnUrl) - client - .get('/jpg/50/0/0/801/478/0/0/0/2/aspectfit/North/0/0/0/0/0/test.jpg') - .end((err, res) => { - res.statusCode.should.eql(200) - done() - }) - }) - - it('should handle deep nested test image', function (done) { - var newTestConfig = JSON.parse(testConfigString) - newTestConfig.images.directory.enabled = true - newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) - - config.loadFile(config.configPath()) - - var client = request(cdnUrl) - client - .get('/jpg/50/0/0/801/478/aspectfit/North/0/0/0/0/0/next-level/test.jpg') - .end((err, res) => { - res.statusCode.should.eql(200) - done() - }) - }) - - it('should handle test image with missing params', function (done) { - var newTestConfig = JSON.parse(testConfigString) - newTestConfig.images.directory.enabled = true - newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) - - config.loadFile(config.configPath()) - - var client = request(cdnUrl) - client - .get('/jpg/50/0/0/801/478/0/0/0//0/North/0/0/0/0/0/test.jpg') - .end((err, res) => { - res.statusCode.should.eql(200) - done() - }) - }) - - it('should handle deep nested test image with missing params', function (done) { - var newTestConfig = JSON.parse(testConfigString) - newTestConfig.images.directory.enabled = true - newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) - - config.loadFile(config.configPath()) - - var client = request(cdnUrl) - client - .get('/jpg/50/0/0/801/478/0/0/0//0/North/0/0/0/0/0/next-level/test.jpg') - .end((err, res) => { - res.statusCode.should.eql(200) - done() - }) - }) - - it('should handle image uri with spaces', function (done) { - var newTestConfig = JSON.parse(testConfigString) - newTestConfig.images.directory.enabled = true - newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) - - config.loadFile(config.configPath()) - - var client = request(cdnUrl) - client - .get('/jpg/50/0/0/801/478/0/0/0/2/aspectfit/North/0/0/0/0/0/test%20copy.jpg') - .end((err, res) => { - res.statusCode.should.eql(200) - done() - }) - }) - - it('should handle image uri with special characters', function (done) { - var newTestConfig = JSON.parse(testConfigString) - newTestConfig.images.directory.enabled = true - newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) - - config.loadFile(config.configPath()) - - var client = request(cdnUrl) - client - .get('/jpg/50/0/0/700/700/0/0/0/1/aspectfit/North/0/0/0/0/0/768px-Rotating_earth_%28huge%29.gif') - .end((err, res) => { - res.statusCode.should.eql(200) - done() - }) - }) - - it('should return a placeholder image if image is not found', function (done) { - var newTestConfig = JSON.parse(testConfigString) - newTestConfig.images.directory.enabled = true - newTestConfig.images.directory.path = './test/images' - - newTestConfig.notFound = { - images: { - enabled: true, - path: './test/images/missing.png' - } - } - - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) - - config.loadFile(config.configPath()) - - var client = request(cdnUrl) - client - .get('/jpg/50/0/0/801/478/0/0/0/2/aspectfit/North/0/0/0/0/0/testxxx.jpg') - .end((err, res) => { - res.body.should.be.instanceof(Buffer) - res.headers['content-type'].should.eql('image/png') - res.statusCode.should.eql(404) - done() - }) - }) - - it('should return configured statusCode if image is not found', function (done) { - var newTestConfig = JSON.parse(testConfigString) - newTestConfig.images.directory.enabled = true - newTestConfig.images.directory.path = './test/images' - - newTestConfig.notFound = { - statusCode: 410, - images: { - enabled: true, - path: './test/images/missing.png' - } - } - - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) - - config.loadFile(config.configPath()) - - var client = request(cdnUrl) - client - .get('/jpg/50/0/0/801/478/0/0/0/2/aspectfit/North/0/0/0/0/0/testxxx.jpg') - .end((err, res) => { - res.statusCode.should.eql(410) + describe('Images', function() { + it('should return lastModified header for cached items using disk storage', function(done) { + this.timeout(4000) - newTestConfig.notFound.statusCode = 404 - newTestConfig.notFound.images.enabled = false - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) - config.loadFile(config.configPath()) + help.clearCache() - done() - }) - }) + const newTestConfig = JSON.parse(testConfigString) - it('should return image info when format = JSON', function (done) { - var newTestConfig = JSON.parse(testConfigString) - newTestConfig.images.directory.enabled = true - newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + newTestConfig.caching.directory.enabled = true + newTestConfig.images.directory.enabled = true + newTestConfig.images.directory.path = './test/images' + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) - config.loadFile(config.configPath()) + config.loadFile(config.configPath()) - var client = request(cdnUrl) - client - .get('/test.jpg?format=json') - .end((err, res) => { - res.statusCode.should.eql(200) - var info = res.body + cache.reset() - info.fileName.should.eql('test.jpg') - info.format.should.eql('jpg') - done() - }) - }) + const client = request(cdnUrl) - it('should include EXIF data when format = JSON', function (done) { - var newTestConfig = JSON.parse(testConfigString) - newTestConfig.images.directory.enabled = true - newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + client.get('/test.jpg').end((err, res) => { + res.statusCode.should.eql(200) - config.loadFile(config.configPath()) + res.headers['last-modified'].should.exist - var client = request(cdnUrl) - client - .get('/dm.jpg?format=json') - .end((err, res) => { + setTimeout(function() { + client.get('/test.jpg').end((err, res) => { res.statusCode.should.eql(200) - res.body.density.should.be.Object - res.body.density.width.should.be.Number - res.body.density.height.should.be.Number - res.body.density.unit.should.be.String + res.headers['last-modified'].should.exist done() }) - }) - - it('should get image from cache if cache is enabled and cached item exists', function (done) { - this.timeout(4000) - - help.clearCache() - - var newTestConfig = JSON.parse(testConfigString) - newTestConfig.caching.directory.enabled = true - newTestConfig.images.directory.enabled = true - newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) - - config.loadFile(config.configPath()) - - cache.reset() - - var client = request(cdnUrl) - client - .get('/jpg/50/0/0/801/478/0/0/0/1/aspectfit/North/0/0/0/0/0/test.jpg') - .end((err, res) => { - res.statusCode.should.eql(200) - - res.headers['x-cache'].should.exist - res.headers['x-cache'].should.eql('MISS') - - setTimeout(function () { - client - .get('/jpg/50/0/0/801/478/0/0/0/1/aspectfit/North/0/0/0/0/0/test.jpg') - .end((err, res) => { - res.statusCode.should.eql(200) - - res.headers['x-cache'].should.exist - res.headers['x-cache'].should.eql('HIT') - done() - }) - }, 1000) - }) + }, 1000) }) }) - it('should return lastModified header for cached items using disk storage', function (done) { - this.timeout(4000) - - help.clearCache() + it('should handle deep nested test image', function(done) { + const newTestConfig = JSON.parse(testConfigString) - var newTestConfig = JSON.parse(testConfigString) - newTestConfig.caching.directory.enabled = true newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - cache.reset() + const client = request(cdnUrl) - var client = request(cdnUrl) - client - .get('/jpg/50/0/0/801/478/0/0/0/1/aspectfit/North/0/0/0/0/0/test.jpg') - .end((err, res) => { - res.statusCode.should.eql(200) - - res.headers['last-modified'].should.exist - - setTimeout(function () { - client - .get('/jpg/50/0/0/801/478/0/0/0/1/aspectfit/North/0/0/0/0/0/test.jpg') - .end((err, res) => { - res.statusCode.should.eql(200) - - res.headers['last-modified'].should.exist - done() - }) - }, 1000) - }) + client.get('/next-level/test.jpg').end((err, res) => { + res.statusCode.should.eql(200) + done() + }) }) - it('should handle deep nested test image', function (done) { - var newTestConfig = JSON.parse(testConfigString) - newTestConfig.images.directory.enabled = true - newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + it('should handle image uri with uppercase extension', function(done) { + const newTestConfig = JSON.parse(testConfigString) - config.loadFile(config.configPath()) - - var client = request(cdnUrl) - client - .get('/next-level/test.jpg') - .end((err, res) => { - res.statusCode.should.eql(200) - done() - }) - }) - - it('should handle image uri with uppercase extension', function (done) { - var newTestConfig = JSON.parse(testConfigString) newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var client = request(cdnUrl) + const client = request(cdnUrl) + client .get('/shane%20long%20new%20contract.JPG?quality=100') .end((err, res) => { @@ -1062,17 +877,24 @@ describe('Controller', function () { }) }) - it('should extract entropy data from an image', function (done) { - var newTestConfig = JSON.parse(testConfigString) + it('should extract entropy data from an image', function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var client = request(cdnUrl) + const client = request(cdnUrl) + client - .get('/test.jpg?quality=100&width=180&height=180&resizeStyle=entropy&format=json') + .get( + '/test.jpg?quality=100&width=180&height=180&resizeStyle=entropy&format=json' + ) .end((err, res) => { res.statusCode.should.eql(200) @@ -1085,72 +907,90 @@ describe('Controller', function () { }) }) - it('should return pre and post image details', function (done) { - var newTestConfig = JSON.parse(testConfigString) + it('should return pre and post image details', function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var client = request(cdnUrl) + const client = request(cdnUrl) + client - .get('/test.jpg?quality=100&width=180&height=180&resizeStyle=entropy&format=json') + .get( + '/test.jpg?quality=100&width=180&height=180&resizeStyle=entropy&format=json' + ) .end((err, res) => { res.statusCode.should.eql(200) - let fileSizePre = res.body.fileSizePre + const fileSizePre = res.body.fileSizePre + res.body.fileSizePost.should.be.below(fileSizePre) - let primaryColorPre = res.body.primaryColorPre + const primaryColorPre = res.body.primaryColorPre + res.body.primaryColorPost.should.not.eql(primaryColorPre) done() }) }) - it('should return 400 when requested crop dimensions are larger than the original image', function (done) { - var newTestConfig = JSON.parse(testConfigString) + it('should return 400 when requested crop dimensions are larger than the original image', function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var client = request(cdnUrl) - client - .get('/test.jpg?resize=crop&crop=0,0,3000,3000') - .end((err, res) => { - res.statusCode.should.eql(400) - res.body.message.should.exist - - done() - }) + const client = request(cdnUrl) + + client.get('/test.jpg?resize=crop&crop=0,0,3000,3000').end((err, res) => { + res.statusCode.should.eql(400) + res.body.message.should.exist + + done() + }) }) describe('comma-separated conditional formats', () => { it('should return an image as WebP if format is `webp,jpg` and the requesting browser indicates support for WebP', done => { request(cdnUrl) - .get('/test.jpg?format=webp,jpg') - .set('accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8') - .end((err, res) => { - res.statusCode.should.eql(200) - res.headers['content-type'].should.eql('image/webp') + .get('/test.jpg?format=webp,jpg') + .set( + 'accept', + 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8' + ) + .end((err, res) => { + res.statusCode.should.eql(200) + res.headers['content-type'].should.eql('image/webp') - done() - }) + done() + }) }) it('should return an image as JPEG if format is `webp,jpg` and the requesting browser does not indicate support for WebP', done => { request(cdnUrl) - .get('/test.jpg?format=webp,jpg') - .set('accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8') - .end((err, res) => { - res.statusCode.should.eql(200) - res.headers['content-type'].should.eql('image/jpeg') + .get('/test.jpg?format=webp,jpg') + .set( + 'accept', + 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8' + ) + .end((err, res) => { + res.statusCode.should.eql(200) + res.headers['content-type'].should.eql('image/jpeg') - done() - }) + done() + }) }) }) @@ -1158,7 +998,7 @@ describe('Controller', function () { it('should return "404 Not Found" when the remote image returns 404', done => { config.set('notFound.images.enabled', false) - let client = request(cdnUrl) + const client = request(cdnUrl) .get('/images/not-found.jpg') .expect(404) .end((err, res) => { @@ -1170,30 +1010,30 @@ describe('Controller', function () { }) describe('default files', () => { - it('should return a configured default file if no path is specified', function (done) { + it('should return a configured default file if no path is specified', function(done) { config.set('defaultFiles', ['test.css']) request(cdnUrl) - .get('/') - .expect(200) - .end((err, res) => { - res.headers['content-type'].should.eql('text/css') + .get('/') + .expect(200) + .end((err, res) => { + res.headers['content-type'].should.eql('text/css') - config.set('defaultFiles', []) - done() - }) + config.set('defaultFiles', []) + done() + }) }) - it('should return 404 if no default file is found', function (done) { + it('should return 404 if no default file is found', function(done) { config.set('defaultFiles', ['index.html']) request(cdnUrl) - .get('/') - .expect(404) - .end((err, res) => { - config.set('defaultFiles', []) - done() - }) + .get('/') + .expect(404) + .end((err, res) => { + config.set('defaultFiles', []) + done() + }) }) }) @@ -1202,20 +1042,22 @@ describe('Controller', function () { config.set('notFound.images.enabled', true) config.set('notFound.images.path', './test/images/missing.png') - return help.imagesEqual({ - base: 'test/images/missing.png', - test: `${cdnUrl}/not-found.jpg` - }).then(match => { - match.should.eql(true) - }) + return help + .imagesEqual({ + base: 'test/images/missing.png', + test: `${cdnUrl}/not-found.jpg` + }) + .then(match => { + match.should.eql(true) + }) }) - it('should return configured statusCode if image is not found', function (done) { + it('should return configured statusCode if image is not found', function(done) { config.set('notFound.images.enabled', true) config.set('notFound.images.path', './test/images/missing.png') config.set('notFound.statusCode', 410) - let client = request(cdnUrl) + const client = request(cdnUrl) .get('/not-found.jpg') .expect(410) .end((err, res) => { @@ -1223,26 +1065,32 @@ describe('Controller', function () { res.headers['content-type'].should.eql('image/png') res.statusCode.should.eql(410) - config.set('notFound.images.enabled', configBackup.notFound.images.enabled) + config.set( + 'notFound.images.enabled', + configBackup.notFound.images.enabled + ) config.set('notFound.statusCode', configBackup.notFound.statusCode) done() }) }) - it('should return a json response when a directory is requested', function (done) { + it('should return a json response when a directory is requested', function(done) { config.set('notFound.images.enabled', true) config.set('notFound.images.path', './test/images/missing.png') config.set('notFound.statusCode', 410) - let client = request(cdnUrl) + const client = request(cdnUrl) .get('/path/to/missing/') .expect(410) .end((err, res) => { res.body.message.includes('File not found:').should.eql(true) res.statusCode.should.eql(404) - config.set('notFound.images.enabled', configBackup.notFound.images.enabled) + config.set( + 'notFound.images.enabled', + configBackup.notFound.images.enabled + ) config.set('notFound.statusCode', configBackup.notFound.statusCode) done() @@ -1250,7 +1098,7 @@ describe('Controller', function () { }) describe('when multi-domain is enabled', () => { - let fallbackImages = { + const fallbackImages = { localhost: 'test/images/original.jpg', 'testdomain.com': 'test/images/dog-w600.jpeg' } @@ -1263,11 +1111,19 @@ describe('Controller', function () { config.set('notFound.statusCode', 418, 'localhost') config.set('notFound.images.enabled', true, 'localhost') - config.set('notFound.images.path', fallbackImages.localhost, 'localhost') + config.set( + 'notFound.images.path', + fallbackImages.localhost, + 'localhost' + ) config.set('notFound.statusCode', 451, 'testdomain.com') config.set('notFound.images.enabled', true, 'testdomain.com') - config.set('notFound.images.path', fallbackImages['testdomain.com'], 'testdomain.com') + config.set( + 'notFound.images.path', + fallbackImages['testdomain.com'], + 'testdomain.com' + ) return help.proxyStart() }) @@ -1279,54 +1135,62 @@ describe('Controller', function () { }) it('returns the fallback image and status code defined by each domain if the image is not found', done => { - help.imagesEqual({ - base: fallbackImages.localhost, - test: `${help.proxyUrl}/not-found.jpg?mockdomain=localhost` - }).then(match => { - match.should.eql(true) + help + .imagesEqual({ + base: fallbackImages.localhost, + test: `${help.proxyUrl}/not-found.jpg?mockdomain=localhost` + }) + .then(match => { + match.should.eql(true) - request(help.proxyUrl) - .get('/not-found.jpg?mockdomain=testdomain.com') - .expect(418) - .end((err, res) => { - help.imagesEqual({ - base: fallbackImages['testdomain.com'], - test: `${help.proxyUrl}/not-found.jpg?mockdomain=testdomain.com` - }).then(match => { - match.should.eql(true) + request(help.proxyUrl) + .get('/not-found.jpg?mockdomain=testdomain.com') + .expect(418) + .end((err, res) => { + help + .imagesEqual({ + base: fallbackImages['testdomain.com'], + test: `${help.proxyUrl}/not-found.jpg?mockdomain=testdomain.com` + }) + .then(match => { + match.should.eql(true) - request(help.proxyUrl) - .get('/not-found.jpg?mockdomain=testdomain.com') - .expect(451) - .end(done) + request(help.proxyUrl) + .get('/not-found.jpg?mockdomain=testdomain.com') + .expect(451) + .end(done) + }) }) - }) - }) + }) }) it('returns an error message if the fallback image is disabled for the domain', done => { config.set('notFound.images.enabled', false, 'testdomain.com') - help.imagesEqual({ - base: fallbackImages.localhost, - test: `${help.proxyUrl}/not-found.jpg?mockdomain=localhost` - }).then(match => { - match.should.eql(true) + help + .imagesEqual({ + base: fallbackImages.localhost, + test: `${help.proxyUrl}/not-found.jpg?mockdomain=localhost` + }) + .then(match => { + match.should.eql(true) - request(help.proxyUrl) - .get('/not-found.jpg?mockdomain=testdomain.com') - .expect(418) - .end((err, res) => { - request(help.proxyUrl) - .get('/not-found.jpg?mockdomain=testdomain.com') - .expect(451) - .end((err, res) => { - res.body.message.includes('File not found:').should.eql(true) + request(help.proxyUrl) + .get('/not-found.jpg?mockdomain=testdomain.com') + .expect(418) + .end((err, res) => { + request(help.proxyUrl) + .get('/not-found.jpg?mockdomain=testdomain.com') + .expect(451) + .end((err, res) => { + res.body.message + .includes('File not found:') + .should.eql(true) - done() - }) - }) - }) + done() + }) + }) + }) }) }) }) @@ -1339,15 +1203,21 @@ describe('Controller', function () { }) afterEach(() => { - config.set('images.directory.enabled', configBackup.images.directory.enabled) + config.set( + 'images.directory.enabled', + configBackup.images.directory.enabled + ) config.set('images.remote.enabled', configBackup.images.remote.enabled) config.set('images.remote.path', configBackup.images.remote.path) - config.set('images.remote.allowFullURL', configBackup.images.remote.allowFullURL) + config.set( + 'images.remote.allowFullURL', + configBackup.images.remote.allowFullURL + ) config.set('images.s3.enabled', configBackup.images.s3.enabled) }) it('should retrieve image from remote URL using `images.remote.path` as base URL', () => { - let server = nock('https://one.somedomain.tech') + const server = nock('https://one.somedomain.tech') .get('/images/mock/logo.png') .replyWithFile(200, 'test/images/visual/measure1.png', { 'Content-Type': 'image/png' @@ -1355,18 +1225,20 @@ describe('Controller', function () { config.set('images.remote.path', 'https://one.somedomain.tech') - return help.imagesEqual({ - base: 'test/images/visual/measure1.png', - test: `${cdnUrl}/images/mock/logo.png` - }).then(match => { - match.should.eql(true) + return help + .imagesEqual({ + base: 'test/images/visual/measure1.png', + test: `${cdnUrl}/images/mock/logo.png` + }) + .then(match => { + match.should.eql(true) - server.isDone().should.eql(true) - }) + server.isDone().should.eql(true) + }) }) it('should retrieve image from remote URL and follow redirects', done => { - let server = nock('https://one.somedomain.tech') + const server = nock('https://one.somedomain.tech') .get('/images/mock/logo.png') .reply(301, undefined, { Location: 'https://one.somedomain.tech/images/mock/logo2.png' @@ -1395,7 +1267,7 @@ describe('Controller', function () { }) it('should retrieve image from remote URL and follow redirects with relative paths', done => { - let server = nock('https://one.somedomain.tech') + const server = nock('https://one.somedomain.tech') .get('/images/mock/logo.png') .reply(301, undefined, { Location: '/images/mock/logo2.png' @@ -1424,7 +1296,7 @@ describe('Controller', function () { }) it('should return a 404 when retrieving a remote asset that includes more redirects than the ones allowed in `http.followRedirects`', done => { - let server = nock('https://one.somedomain.tech') + const server = nock('https://one.somedomain.tech') .get('/images/mock/logo.png') .reply(301, undefined, { Location: 'https://one.somedomain.tech/images/mock/logo2.png' @@ -1447,14 +1319,17 @@ describe('Controller', function () { .end((err, res) => { server.pendingMocks().length.should.eql(1) - config.set('http.followRedirects', configBackup.http.followRedirects) + config.set( + 'http.followRedirects', + configBackup.http.followRedirects + ) done() }) }) it('should return a 404 when retrieving a remote asset that includes more redirects than the ones allowed in `http.followRedirects` at domain level', done => { - let server1 = nock('https://one.somedomain.tech') + const server1 = nock('https://one.somedomain.tech') .get('/images/mock/logo.png') .reply(301, undefined, { Location: 'https://one.somedomain.tech/images/mock/logo2.png' @@ -1468,7 +1343,7 @@ describe('Controller', function () { 'Content-Type': 'image/png' }) - let server2 = nock('https://two.somedomain.tech') + const server2 = nock('https://two.somedomain.tech') .get('/images/mock/logo.png') .reply(301, undefined, { Location: 'https://two.somedomain.tech/images/mock/logo2.png' @@ -1487,12 +1362,20 @@ describe('Controller', function () { config.set('images.directory.enabled', false, 'localhost') config.set('images.remote.enabled', true, 'localhost') - config.set('images.remote.path', 'https://one.somedomain.tech', 'localhost') + config.set( + 'images.remote.path', + 'https://one.somedomain.tech', + 'localhost' + ) config.set('http.followRedirects', 1, 'localhost') config.set('images.directory.enabled', false, 'testdomain.com') config.set('images.remote.enabled', true, 'testdomain.com') - config.set('images.remote.path', 'https://two.somedomain.tech', 'testdomain.com') + config.set( + 'images.remote.path', + 'https://two.somedomain.tech', + 'testdomain.com' + ) config.set('http.followRedirects', 10, 'testdomain.com') request(cdnUrl) @@ -1513,7 +1396,10 @@ describe('Controller', function () { server2.isDone().should.eql(true) - config.set('multiDomain.enabled', configBackup.multiDomain.enabled) + config.set( + 'multiDomain.enabled', + configBackup.multiDomain.enabled + ) done() }) @@ -1523,7 +1409,7 @@ describe('Controller', function () { it('should return 400 when requesting a relative remote URL and `image.remote.path` is not set', done => { config.set('images.remote.path', null) - let client = request(cdnUrl) + const client = request(cdnUrl) .get('/images/mock/logo.png') .expect(400) .end((err, res) => { @@ -1534,7 +1420,7 @@ describe('Controller', function () { }) it('should retrieve image from remote URL using a full URL', () => { - let server = nock('https://two.somedomain.tech') + const server = nock('https://two.somedomain.tech') .get('/images/mock/logo.png') .replyWithFile(200, 'test/images/visual/measure1.png', { 'Content-Type': 'image/png' @@ -1543,21 +1429,23 @@ describe('Controller', function () { config.set('images.remote.allowFullURL', true) config.set('images.remote.path', 'https://one.somedomain.tech') - return help.imagesEqual({ - base: 'test/images/visual/measure1.png', - test: `${cdnUrl}/https://two.somedomain.tech/images/mock/logo.png` - }).then(match => { - match.should.eql(true) + return help + .imagesEqual({ + base: 'test/images/visual/measure1.png', + test: `${cdnUrl}/https://two.somedomain.tech/images/mock/logo.png` + }) + .then(match => { + match.should.eql(true) - server.isDone().should.eql(true) - }) + server.isDone().should.eql(true) + }) }) it('should return 403 when requesting a full remote URL and `image.remote.enabled` is false', done => { config.set('images.remote.enabled', false) config.set('images.remote.allowFullURL', true) - let client = request(cdnUrl) + const client = request(cdnUrl) .get('/https://two.somedomain.tech/images/mock/logo.png') .expect(403) .end((err, res) => { @@ -1572,7 +1460,7 @@ describe('Controller', function () { it('should return 403 when requesting a full remote URL and `image.remote.allowFullURL` is false', done => { config.set('images.remote.allowFullURL', false) - let client = request(cdnUrl) + const client = request(cdnUrl) .get('/https://two.somedomain.tech/images/mock/logo.png') .expect(403) .end((err, res) => { @@ -1586,14 +1474,14 @@ describe('Controller', function () { describe('placeholder image is disabled', () => { it('should return "404 Not Found" when the remote image returns 404', done => { - let server = nock('https://one.somedomain.tech') + const server = nock('https://one.somedomain.tech') .get('/images/mock/logo.png') .reply(404) config.set('images.remote.path', 'https://one.somedomain.tech') config.set('notFound.images.enabled', false) - let client = request(cdnUrl) + const client = request(cdnUrl) .get('/images/mock/logo.png') .expect(404) .end((err, res) => { @@ -1608,7 +1496,7 @@ describe('Controller', function () { describe('placeholder image is enabled', () => { it('should return a placeholder image when the remote image returns 404', () => { - let server = nock('https://one.somedomain.tech') + const server = nock('https://one.somedomain.tech') .get('/images/mock/logo.png') .reply(404) @@ -1616,18 +1504,20 @@ describe('Controller', function () { config.set('notFound.images.enabled', true) config.set('notFound.images.path', './test/images/missing.png') - return help.imagesEqual({ - base: 'test/images/missing.png', - test: `${cdnUrl}/images/mock/logo.png` - }).then(match => { - match.should.eql(true) + return help + .imagesEqual({ + base: 'test/images/missing.png', + test: `${cdnUrl}/images/mock/logo.png` + }) + .then(match => { + match.should.eql(true) - server.isDone().should.eql(true) - }) + server.isDone().should.eql(true) + }) }) - it('should return configured statusCode if image is not found', function (done) { - let server = nock('https://one.somedomain.tech') + it('should return configured statusCode if image is not found', function(done) { + const server = nock('https://one.somedomain.tech') .get('/images/mock/logo.png') .reply(404) @@ -1636,7 +1526,7 @@ describe('Controller', function () { config.set('notFound.images.path', './test/images/missing.png') config.set('notFound.statusCode', 410) - let client = request(cdnUrl) + const client = request(cdnUrl) .get('/images/mock/logo.png') .expect(410) .end((err, res) => { @@ -1644,15 +1534,21 @@ describe('Controller', function () { res.headers['content-type'].should.eql('image/png') res.statusCode.should.eql(410) - config.set('notFound.images.enabled', configBackup.notFound.images.enabled) - config.set('notFound.statusCode', configBackup.notFound.statusCode) + config.set( + 'notFound.images.enabled', + configBackup.notFound.images.enabled + ) + config.set( + 'notFound.statusCode', + configBackup.notFound.statusCode + ) done() }) }) describe('when multi-domain is enabled', () => { - let fallbackImages = { + const fallbackImages = { localhost: 'test/images/original.jpg', 'testdomain.com': 'test/images/dog-w600.jpeg' } @@ -1665,11 +1561,19 @@ describe('Controller', function () { config.set('notFound.statusCode', 418, 'localhost') config.set('notFound.images.enabled', true, 'localhost') - config.set('notFound.images.path', fallbackImages.localhost, 'localhost') + config.set( + 'notFound.images.path', + fallbackImages.localhost, + 'localhost' + ) config.set('notFound.statusCode', 451, 'testdomain.com') config.set('notFound.images.enabled', true, 'testdomain.com') - config.set('notFound.images.path', fallbackImages['testdomain.com'], 'testdomain.com') + config.set( + 'notFound.images.path', + fallbackImages['testdomain.com'], + 'testdomain.com' + ) return help.proxyStart() }) @@ -1681,78 +1585,86 @@ describe('Controller', function () { }) it('returns the fallback image and status code defined by each domain if the image is not found', done => { - let server1 = nock('https://one.somedomain.tech') + const server1 = nock('https://one.somedomain.tech') .get('/not-found.jpg') .reply(404) - let server2 = nock('https://two.somedomain.tech') + const server2 = nock('https://two.somedomain.tech') .get('/not-found.jpg') .reply(404) - help.imagesEqual({ - base: fallbackImages.localhost, - test: `${help.proxyUrl}/not-found.jpg?mockdomain=localhost` - }).then(match => { - match.should.eql(true) - - request(help.proxyUrl) - .get('/not-found.jpg?mockdomain=testdomain.com') - .expect(418) - .end((err, res) => { - help.imagesEqual({ - base: fallbackImages['testdomain.com'], - test: `${help.proxyUrl}/not-found.jpg?mockdomain=testdomain.com` - }).then(match => { - match.should.eql(true) + help + .imagesEqual({ + base: fallbackImages.localhost, + test: `${help.proxyUrl}/not-found.jpg?mockdomain=localhost` + }) + .then(match => { + match.should.eql(true) - request(help.proxyUrl) - .get('/not-found.jpg?mockdomain=testdomain.com') - .expect(451) - .end(done) + request(help.proxyUrl) + .get('/not-found.jpg?mockdomain=testdomain.com') + .expect(418) + .end((err, res) => { + help + .imagesEqual({ + base: fallbackImages['testdomain.com'], + test: `${help.proxyUrl}/not-found.jpg?mockdomain=testdomain.com` + }) + .then(match => { + match.should.eql(true) + + request(help.proxyUrl) + .get('/not-found.jpg?mockdomain=testdomain.com') + .expect(451) + .end(done) + }) }) - }) - }) + }) }) it('returns an error message if the fallback image is disabled for the domain', done => { config.set('notFound.images.enabled', false, 'testdomain.com') - let server1 = nock('https://one.somedomain.tech') + const server1 = nock('https://one.somedomain.tech') .get('/not-found.jpg') .reply(404) - help.imagesEqual({ - base: fallbackImages.localhost, - test: `${help.proxyUrl}/not-found.jpg?mockdomain=localhost` - }).then(match => { - match.should.eql(true) + help + .imagesEqual({ + base: fallbackImages.localhost, + test: `${help.proxyUrl}/not-found.jpg?mockdomain=localhost` + }) + .then(match => { + match.should.eql(true) - request(help.proxyUrl) - .get('/not-found.jpg?mockdomain=testdomain.com') - .expect(418) - .end((err, res) => { - request(help.proxyUrl) - .get('/not-found.jpg?mockdomain=testdomain.com') - .expect(451) - .end((err, res) => { - res.body.message.includes('File not found:').should.eql(true) + request(help.proxyUrl) + .get('/not-found.jpg?mockdomain=testdomain.com') + .expect(418) + .end((err, res) => { + request(help.proxyUrl) + .get('/not-found.jpg?mockdomain=testdomain.com') + .expect(451) + .end((err, res) => { + res.body.message + .includes('File not found:') + .should.eql(true) - done() - }) - }) - }) + done() + }) + }) + }) }) }) }) it('should return "403 Forbidden" when the remote image returns 403', done => { - let server = nock('https://one.somedomain.tech') + const server = nock('https://one.somedomain.tech') .get('/images/mock/logo.png') .reply(403) config.set('images.remote.path', 'https://one.somedomain.tech') - let client = request(cdnUrl) + const client = request(cdnUrl) .get('/images/mock/logo.png') .expect(403) .end((err, res) => { @@ -1765,13 +1677,13 @@ describe('Controller', function () { }) it('should return whatever error code the remote server sends back, along with a generic error message', done => { - let server = nock('https://one.somedomain.tech') + const server = nock('https://one.somedomain.tech') .get('/images/mock/logo.png') .reply(418) config.set('images.remote.path', 'https://one.somedomain.tech') - let client = request(cdnUrl) + const client = request(cdnUrl) .get('/images/mock/logo.png') .expect(418) .end((err, res) => { @@ -1792,26 +1704,39 @@ describe('Controller', function () { }) afterEach(() => { - config.set('images.directory.enabled', configBackup.images.directory.enabled) + config.set( + 'images.directory.enabled', + configBackup.images.directory.enabled + ) config.set('images.remote.enabled', configBackup.images.remote.enabled) config.set('images.remote.path', configBackup.images.remote.path) - config.set('images.remote.allowFullURL', configBackup.images.remote.allowFullURL) + config.set( + 'images.remote.allowFullURL', + configBackup.images.remote.allowFullURL + ) config.set('images.s3.enabled', configBackup.images.s3.enabled) }) - it('should return 200 when image is returned', function (done) { + it('should return 200 when image is returned', function(done) { // return a buffer from the S3 request - let stream = fs.createReadStream('./test/images/missing.png') - let buffers = [] - stream - .on('data', function (data) { buffers.push(data) }) - .on('end', function () { - let buffer = Buffer.concat(buffers) + const stream = fs.createReadStream('./test/images/missing.png') + const buffers = [] - AWS.mock('S3', 'getObject', Promise.resolve({ - LastModified: Date.now(), - Body: buffer - })) + stream + .on('data', function(data) { + buffers.push(data) + }) + .on('end', function() { + const buffer = Buffer.concat(buffers) + + AWS.mock( + 'S3', + 'getObject', + Promise.resolve({ + LastModified: Date.now(), + Body: buffer + }) + ) config.set('images.s3.bucketName', 'test-bucket') config.set('images.s3.accessKey', 'xxx') @@ -1820,38 +1745,45 @@ describe('Controller', function () { config.set('notFound.images.enabled', true) config.set('notFound.images.path', './test/images/missing.png') - let client = request(cdnUrl) - .get('/images/mock/logo.png') - .expect(200) - .end((err, res) => { - AWS.restore() + const client = request(cdnUrl) + .get('/images/mock/logo.png') + .expect(200) + .end((err, res) => { + AWS.restore() - res.body.should.be.instanceof(Buffer) - res.headers['content-type'].should.eql('image/png') - res.statusCode.should.eql(200) + res.body.should.be.instanceof(Buffer) + res.headers['content-type'].should.eql('image/png') + res.statusCode.should.eql(200) - done() - }) + done() + }) }) }) - it('should return lastModified header for cached items using S3 storage', function (done) { + it('should return lastModified header for cached items using S3 storage', function(done) { this.timeout(4000) help.clearCache() cache.reset() - let stream = fs.createReadStream('./test/images/missing.png') - let buffers = [] - stream - .on('data', function (data) { buffers.push(data) }) - .on('end', function () { - let buffer = Buffer.concat(buffers) + const stream = fs.createReadStream('./test/images/missing.png') + const buffers = [] - AWS.mock('S3', 'getObject', Promise.resolve({ - LastModified: new Date().toLocaleString(), - Body: buffer - })) + stream + .on('data', function(data) { + buffers.push(data) + }) + .on('end', function() { + const buffer = Buffer.concat(buffers) + + AWS.mock( + 'S3', + 'getObject', + Promise.resolve({ + LastModified: new Date().toLocaleString(), + Body: buffer + }) + ) config.set('images.s3.bucketName', 'test-bucket') config.set('images.s3.accessKey', 'xxx') @@ -1860,32 +1792,32 @@ describe('Controller', function () { config.set('notFound.images.enabled', true) config.set('notFound.images.path', './test/images/missing.png') - let client = request(cdnUrl) - .get('/images/mock/logo.png') - .end((err, res) => { - res.body.should.be.instanceof(Buffer) - res.headers['content-type'].should.eql('image/png') - res.statusCode.should.eql(200) + const client = request(cdnUrl) + .get('/images/mock/logo.png') + .end((err, res) => { + res.body.should.be.instanceof(Buffer) + res.headers['content-type'].should.eql('image/png') + res.statusCode.should.eql(200) - setTimeout(function () { - request(cdnUrl) - .get('/images/mock/logo.png') - .end((err, res) => { - AWS.restore() + setTimeout(function() { + request(cdnUrl) + .get('/images/mock/logo.png') + .end((err, res) => { + AWS.restore() - res.statusCode.should.eql(200) + res.statusCode.should.eql(200) - res.headers['last-modified'].should.exist - done() - }) - }, 1000) - }) + res.headers['last-modified'].should.exist + done() + }) + }, 1000) + }) }) }) - it('should return a placeholder image when the S3 image returns 404', function (done) { + it('should return a placeholder image when the S3 image returns 404', function(done) { // return 404 from the S3 request - AWS.mock('S3', 'getObject', Promise.reject({ statusCode: 404 })) + AWS.mock('S3', 'getObject', Promise.reject({statusCode: 404})) config.set('images.s3.bucketName', 'test-bucket') config.set('images.s3.accessKey', 'xxx') @@ -1895,22 +1827,22 @@ describe('Controller', function () { config.set('notFound.images.path', './test/images/missing.png') request(cdnUrl) - .get('/images/mock/logo.png') - .expect(404) - .end((err, res) => { - AWS.restore() + .get('/images/mock/logo.png') + .expect(404) + .end((err, res) => { + AWS.restore() - res.body.should.be.instanceof(Buffer) - res.headers['content-type'].should.eql('image/png') - res.statusCode.should.eql(404) + res.body.should.be.instanceof(Buffer) + res.headers['content-type'].should.eql('image/png') + res.statusCode.should.eql(404) - done() - }) + done() + }) }) - it('should return a json response when a directory is requested', function (done) { + it('should return a json response when a directory is requested', function(done) { // return 404 from the S3 request - AWS.mock('S3', 'getObject', Promise.reject({ statusCode: 404 })) + AWS.mock('S3', 'getObject', Promise.reject({statusCode: 404})) config.set('images.s3.bucketName', 'test-bucket') config.set('images.s3.accessKey', 'xxx') @@ -1932,9 +1864,9 @@ describe('Controller', function () { }) }) - it('should return configured statusCode if image is not found', function (done) { + it('should return configured statusCode if image is not found', function(done) { // return 404 from the S3 request - AWS.mock('S3', 'getObject', Promise.reject({ statusCode: 404 })) + AWS.mock('S3', 'getObject', Promise.reject({statusCode: 404})) config.set('images.s3.bucketName', 'test-bucket') config.set('images.s3.accessKey', 'xxx') @@ -1944,92 +1876,102 @@ describe('Controller', function () { config.set('notFound.images.path', './test/images/missing.png') request(cdnUrl) - .get('/images/mock/logo.png') - .expect(410) - .end((err, res) => { - AWS.restore() + .get('/images/mock/logo.png') + .expect(410) + .end((err, res) => { + AWS.restore() - res.body.should.be.instanceof(Buffer) - res.headers['content-type'].should.eql('image/png') - res.statusCode.should.eql(410) + res.body.should.be.instanceof(Buffer) + res.headers['content-type'].should.eql('image/png') + res.statusCode.should.eql(410) - config.set('notFound.images.enabled', configBackup.notFound.images.enabled) - config.set('notFound.statusCode', configBackup.notFound.statusCode) + config.set( + 'notFound.images.enabled', + configBackup.notFound.images.enabled + ) + config.set('notFound.statusCode', configBackup.notFound.statusCode) - done() - }) + done() + }) }) }) - describe('Other', function () { - it('should respond to the hello endpoint', function (done) { - var client = request(cdnUrl) - client - .get('/hello') - .end((err, res) => { - res.statusCode.should.eql(200) - res.text.should.eql('Welcome to DADI CDN') - done() - }) + describe('Other', function() { + it('should respond to the hello endpoint', function(done) { + const client = request(cdnUrl) + + client.get('/hello').end((err, res) => { + res.statusCode.should.eql(200) + res.text.should.eql('Welcome to DADI CDN') + done() + }) }) - it('should return 404 if there is no configured robots.txt file', function (done) { - var client = request(cdnUrl) - client - .get('/robots.txt') - .end((err, res) => { - res.statusCode.should.eql(404) - res.text.should.eql('File not found') - done() - }) + it('should return 404 if there is no configured robots.txt file', function(done) { + const client = request(cdnUrl) + + client.get('/robots.txt').end((err, res) => { + res.statusCode.should.eql(404) + res.text.should.eql('File not found') + done() + }) }) - it('should return a configured robots.txt file', function (done) { - var newTestConfig = JSON.parse(testConfigString) + it('should return a configured robots.txt file', function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.robots = 'test/robots.txt' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var client = request(cdnUrl) - client - .get('/robots.txt') - .end((err, res) => { - res.statusCode.should.eql(200) - res.text.should.eql('User-Agent: *\nDisallow: /') - done() - }) + const client = request(cdnUrl) + + client.get('/robots.txt').end((err, res) => { + res.statusCode.should.eql(200) + res.text.should.eql('User-Agent: *\nDisallow: /') + done() + }) }) - it('should return a 204 for favicons', function (done) { - var newTestConfig = JSON.parse(testConfigString) + it('should return a 204 for favicons', function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var client = request(cdnUrl) - client - .get('/favicon.ico') - .end((err, res) => { + const client = request(cdnUrl) + + client.get('/favicon.ico').end((err, res) => { res.statusCode.should.eql(204) done() }) }) - it('should handle requests for unknown formats', function (done) { - var newTestConfig = JSON.parse(testConfigString) + it('should handle requests for unknown formats', function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var client = request(cdnUrl) - client - .get('/something-else.zip') - .end((err, res) => { + const client = request(cdnUrl) + + client.get('/something-else.zip').end((err, res) => { res.statusCode.should.eql(404) done() }) diff --git a/test/acceptance/file-change-monitor.js b/test/acceptance/file-change-monitor.js index 1b42ab5c..a7ced6dc 100644 --- a/test/acceptance/file-change-monitor.js +++ b/test/acceptance/file-change-monitor.js @@ -10,9 +10,11 @@ const app = require('./../../dadi/lib/') const workspace = require('./../../dadi/lib/models/workspace') let config = require('./../../config') -let configBackup = config.get() -let cdnUrl = `http://${config.get('server.host')}:${config.get('server.port')}` -let newDomainSubdirectory = path.join(__dirname, '../../domains/xxx-domain') +const configBackup = config.get() +const cdnUrl = `http://${config.get('server.host')}:${config.get( + 'server.port' +)}` +const newDomainSubdirectory = path.join(__dirname, '../../domains/xxx-domain') let testConfig const cleanup = (dir, done) => { @@ -23,14 +25,12 @@ const cleanup = (dir, done) => { }) } -describe('File change monitor', function () { +describe('File change monitor', function() { this.timeout(15000) describe('Config', () => { before(done => { - testConfig = JSON.parse( - fs.readFileSync(config.configPath()).toString() - ) + testConfig = JSON.parse(fs.readFileSync(config.configPath()).toString()) app.start(err => { if (err) return done(err) @@ -51,7 +51,7 @@ describe('File change monitor', function () { }) it('should reload the config when the current config file changes', done => { - let configContent = JSON.parse( + const configContent = JSON.parse( fs.readFileSync(config.configPath()).toString() ) @@ -76,7 +76,7 @@ describe('File change monitor', function () { config.set('multiDomain.enabled', true) // Get initial domain list - let domains = domainManager.getDomains() + const domains = domainManager.getDomains() // Make a new domain directory fs.mkdir(newDomainSubdirectory, err => { @@ -102,52 +102,47 @@ describe('File change monitor', function () { describe('Workspace', () => { it('should reload a recipe when the file changes', done => { - let recipePath = path.resolve( - 'workspace', 'recipes', 'sample-image-recipe.json' + const recipePath = path.resolve( + 'workspace', + 'recipes', + 'sample-image-recipe.json' ) - let recipe = require(recipePath) + const recipe = require(recipePath) app.start(err => { if (err) return done(err) setTimeout(() => { request(cdnUrl) - .get('/sample-image-recipe/test.jpg') - .expect(200) - .end((err, res) => { - res.headers['content-type'].should.eql( - 'image/png' - ) - - let newRecipe = Object.assign({}, recipe, { - settings: { - format: 'jpg' - } - }) + .get('/sample-image-recipe/test.jpg') + .expect(200) + .end((err, res) => { + res.headers['content-type'].should.eql('image/png') + + const newRecipe = Object.assign({}, recipe, { + settings: { + format: 'jpg' + } + }) - fs.writeFileSync( - recipePath, - JSON.stringify(newRecipe, null, 2) - ) + fs.writeFileSync(recipePath, JSON.stringify(newRecipe, null, 2)) - setTimeout(() => { - request(cdnUrl) - .get('/sample-image-recipe/test.jpg') - .expect(200) - .end((err, res) => { - fs.writeFileSync( - recipePath, - JSON.stringify(recipe, null, 2) - ) - - res.headers['content-type'].should.eql( - 'image/jpeg' - ) - - app.stop(done) - }) - }, 500) - }) + setTimeout(() => { + request(cdnUrl) + .get('/sample-image-recipe/test.jpg') + .expect(200) + .end((err, res) => { + fs.writeFileSync( + recipePath, + JSON.stringify(recipe, null, 2) + ) + + res.headers['content-type'].should.eql('image/jpeg') + + app.stop(done) + }) + }, 500) + }) }, 500) }) }) @@ -156,7 +151,7 @@ describe('File change monitor', function () { config.set('multiDomain.enabled', true) config.set('multiDomain.directory', 'domains') - let recipePath = path.resolve( + const recipePath = path.resolve( 'domains/testdomain.com/workspace/recipes/foobar-recipe-one.json' ) @@ -178,44 +173,46 @@ describe('File change monitor', function () { setTimeout(() => { request(cdnUrl) - .get('/foobar-recipe-one/test.jpg') - .set('host', 'testdomain.com:80') - .expect(200) - .end((err, res) => { - res.headers['content-type'].should.eql( - 'image/png' - ) - - let newRecipe = Object.assign({}, recipe, { - settings: { - format: 'jpg' - } - }) + .get('/foobar-recipe-one/test.jpg') + .set('host', 'testdomain.com:80') + .expect(200) + .end((err, res) => { + res.headers['content-type'].should.eql('image/png') + + const newRecipe = Object.assign({}, recipe, { + settings: { + format: 'jpg' + } + }) + + fs.writeFileSync( + recipePath, + JSON.stringify(newRecipe, null, 2) + ) - fs.writeFileSync( - recipePath, - JSON.stringify(newRecipe, null, 2) - ) - - setTimeout(() => { - request(cdnUrl) - .get('/foobar-recipe-one/test.jpg') - .set('host', 'testdomain.com:80') - .expect(200) - .end((err, res) => { - res.headers['content-type'].should.eql( - 'image/jpeg' - ) - - config.set('multiDomain.enabled', configBackup.multiDomain.enabled) - config.set('multiDomain.directory', configBackup.multiDomain.directory) - - done1() - - app.stop(done) - }) - }, 500) - }) + setTimeout(() => { + request(cdnUrl) + .get('/foobar-recipe-one/test.jpg') + .set('host', 'testdomain.com:80') + .expect(200) + .end((err, res) => { + res.headers['content-type'].should.eql('image/jpeg') + + config.set( + 'multiDomain.enabled', + configBackup.multiDomain.enabled + ) + config.set( + 'multiDomain.directory', + configBackup.multiDomain.directory + ) + + done1() + + app.stop(done) + }) + }, 500) + }) }, 500) }) }) @@ -224,7 +221,8 @@ describe('File change monitor', function () { }).timeout(5000) it('should reload a route when the file changes', done => { - let mobileUA = 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_1 like Mac OS X) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0 Mobile/14B72 Safari/602.1' + const mobileUA = + 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_1 like Mac OS X) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0 Mobile/14B72 Safari/602.1' help.createTempFile( 'workspace/recipes/test-recipe-one.json', @@ -256,13 +254,13 @@ describe('File change monitor', function () { route: 'route-one', branches: [ { - 'condition': { - 'device': 'mobile', + condition: { + device: 'mobile' }, - 'recipe': 'recipe-one' + recipe: 'recipe-one' }, { - 'recipe': 'recipe-two' + recipe: 'recipe-two' } ] }, @@ -275,40 +273,40 @@ describe('File change monitor', function () { setTimeout(() => { request(cdnUrl) - .get('/route-one/test.jpg') - .set('user-agent', mobileUA) - .expect(200) - .end((err, res) => { - res.headers['content-type'].should.eql( - 'image/jpeg' - ) - - routeContent.branches[0].recipe = 'recipe-two' - routeContent.branches[1].recipe = 'recipe-one' - - fs.writeFileSync( - path.resolve('workspace/routes/test-route-one.json'), - JSON.stringify(routeContent, null, 2) - ) - - setTimeout(() => { - request(cdnUrl) - .get('/route-one/test.jpg') - .set('user-agent', mobileUA) - .expect(200) - .end((err, res) => { - res.headers['content-type'].should.eql( - 'image/png' - ) - - done3() - done2() - done1() - - app.stop(done) - }) - }, 500) - }) + .get('/route-one/test.jpg') + .set('user-agent', mobileUA) + .expect(200) + .end((err, res) => { + res.headers['content-type'].should.eql('image/jpeg') + + routeContent.branches[0].recipe = 'recipe-two' + routeContent.branches[1].recipe = 'recipe-one' + + fs.writeFileSync( + path.resolve( + 'workspace/routes/test-route-one.json' + ), + JSON.stringify(routeContent, null, 2) + ) + + setTimeout(() => { + request(cdnUrl) + .get('/route-one/test.jpg') + .set('user-agent', mobileUA) + .expect(200) + .end((err, res) => { + res.headers['content-type'].should.eql( + 'image/png' + ) + + done3() + done2() + done1() + + app.stop(done) + }) + }, 500) + }) }, 500) }) } @@ -320,7 +318,8 @@ describe('File change monitor', function () { }).timeout(5000) it('should reload a route at domain level when the file changes', done => { - let mobileUA = 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_1 like Mac OS X) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0 Mobile/14B72 Safari/602.1' + const mobileUA = + 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_1 like Mac OS X) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0 Mobile/14B72 Safari/602.1' config.set('multiDomain.enabled', true) config.set('multiDomain.directory', 'domains') @@ -355,13 +354,13 @@ describe('File change monitor', function () { route: 'route-one', branches: [ { - 'condition': { - 'device': 'mobile', + condition: { + device: 'mobile' }, - 'recipe': 'recipe-one' + recipe: 'recipe-one' }, { - 'recipe': 'recipe-two' + recipe: 'recipe-two' } ] }, @@ -374,45 +373,51 @@ describe('File change monitor', function () { setTimeout(() => { request(cdnUrl) - .get('/route-one/test.jpg') - .set('host', 'testdomain.com:80') - .set('user-agent', mobileUA) - .expect(200) - .end((err, res) => { - res.headers['content-type'].should.eql( - 'image/jpeg' - ) - - routeContent.branches[0].recipe = 'recipe-two' - routeContent.branches[1].recipe = 'recipe-one' - - fs.writeFileSync( - path.resolve('domains/testdomain.com/workspace/routes/test-route-one.json'), - JSON.stringify(routeContent, null, 2) - ) - - setTimeout(() => { - request(cdnUrl) - .get('/route-one/test.jpg') - .set('host', 'testdomain.com:80') - .set('user-agent', mobileUA) - .expect(200) - .end((err, res) => { - res.headers['content-type'].should.eql( - 'image/png' - ) - - config.set('multiDomain.enabled', configBackup.multiDomain.enabled) - config.set('multiDomain.directory', configBackup.multiDomain.directory) - - done3() - done2() - done1() - - app.stop(done) - }) - }, 500) - }) + .get('/route-one/test.jpg') + .set('host', 'testdomain.com:80') + .set('user-agent', mobileUA) + .expect(200) + .end((err, res) => { + res.headers['content-type'].should.eql('image/jpeg') + + routeContent.branches[0].recipe = 'recipe-two' + routeContent.branches[1].recipe = 'recipe-one' + + fs.writeFileSync( + path.resolve( + 'domains/testdomain.com/workspace/routes/test-route-one.json' + ), + JSON.stringify(routeContent, null, 2) + ) + + setTimeout(() => { + request(cdnUrl) + .get('/route-one/test.jpg') + .set('host', 'testdomain.com:80') + .set('user-agent', mobileUA) + .expect(200) + .end((err, res) => { + res.headers['content-type'].should.eql( + 'image/png' + ) + + config.set( + 'multiDomain.enabled', + configBackup.multiDomain.enabled + ) + config.set( + 'multiDomain.directory', + configBackup.multiDomain.directory + ) + + done3() + done2() + done1() + + app.stop(done) + }) + }, 500) + }) }, 500) }) } @@ -421,6 +426,6 @@ describe('File change monitor', function () { ) } ) - }).timeout(5000) + }).timeout(5000) }) }) diff --git a/test/acceptance/flush.js b/test/acceptance/flush.js index e38510ea..24308cae 100755 --- a/test/acceptance/flush.js +++ b/test/acceptance/flush.js @@ -9,13 +9,15 @@ const request = require('supertest') const should = require('should') let bearerToken -let cdnUrl = `http://${config.get('server.host')}:${config.get('server.port')}` -let configBackup = config.get() +const cdnUrl = `http://${config.get('server.host')}:${config.get( + 'server.port' +)}` +const configBackup = config.get() -describe('Cache', function () { +describe('Cache', function() { this.timeout(10000) - describe('Flush', function () { + describe('Flush', function() { describe('when multi-domain is not enabled', () => { beforeEach(done => { config.set('caching.directory.enabled', true) @@ -31,17 +33,19 @@ describe('Cache', function () { bearerToken = token help.clearCache() - request(`http://${config.get('server.host')}:${config.get('server.port')}`) - .get('/test.jpg?q=50') - .expect(200) - .end((err, res) => { - if (err) return done(err) + request( + `http://${config.get('server.host')}:${config.get('server.port')}` + ) + .get('/test.jpg?q=50') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['x-cache'].should.exist - res.headers['x-cache'].should.eql('MISS') + res.headers['x-cache'].should.exist + res.headers['x-cache'].should.eql('MISS') - done() - }) + done() + }) }) }) }) @@ -51,12 +55,15 @@ describe('Cache', function () { app.stop(done) - config.set('caching.directory.enabled', configBackup.caching.directory.enabled) + config.set( + 'caching.directory.enabled', + configBackup.caching.directory.enabled + ) config.set('caching.redis.enabled', configBackup.caching.redis.enabled) - config.set('multiDomain.enabled', configBackup.multiDomain.enabled) + config.set('multiDomain.enabled', configBackup.multiDomain.enabled) }) - it('should not flush cached items that don\'t match the specified path', done => { + it("should not flush cached items that don't match the specified path", done => { request(cdnUrl) .get('/test.jpg?q=70') .expect(200) @@ -199,9 +206,12 @@ describe('Cache', function () { app.stop(done) - config.set('caching.directory.enabled', configBackup.caching.directory.enabled) + config.set( + 'caching.directory.enabled', + configBackup.caching.directory.enabled + ) config.set('caching.redis.enabled', configBackup.caching.redis.enabled) - config.set('multiDomain.enabled', configBackup.multiDomain.enabled) + config.set('multiDomain.enabled', configBackup.multiDomain.enabled) }) it('should only flush cached items for the target domain', done => { @@ -256,14 +266,13 @@ describe('Cache', function () { done() }) - }) }, 500) }) }) }, 500) }) - }) + }) }) }) @@ -319,7 +328,6 @@ describe('Cache', function () { done() }) - }) }, 500) }) @@ -330,7 +338,7 @@ describe('Cache', function () { }) }) - it('should not flush cached items that don\'t match the specified path', done => { + it("should not flush cached items that don't match the specified path", done => { help.getBearerToken('localhost', (err, bearerToken) => { request(cdnUrl) .get('/test.jpg?q=70') @@ -375,7 +383,7 @@ describe('Cache', function () { }, 500) }) }) - }) + }) }) }) diff --git a/test/acceptance/help.js b/test/acceptance/help.js index a30d3442..bfac79e3 100755 --- a/test/acceptance/help.js +++ b/test/acceptance/help.js @@ -9,50 +9,53 @@ const request = require('supertest') const req = require('request') const url = require('url') -let cdnUrl = `http://${config.get('server.host')}:${config.get('server.port')}` +const cdnUrl = `http://${config.get('server.host')}:${config.get( + 'server.port' +)}` module.exports.cdnUrl = cdnUrl -module.exports.createTempFile = function (filePath, content, options, callback) { - return fs.ensureDir( - path.dirname(path.resolve(filePath)) - ).then(() => { - if (typeof options === 'function') { - callback = options - options = {} - } +module.exports.createTempFile = function(filePath, content, options, callback) { + return fs + .ensureDir(path.dirname(path.resolve(filePath))) + .then(() => { + if (typeof options === 'function') { + callback = options + options = {} + } - let serialisedContent = typeof content === 'string' - ? content - : JSON.stringify(content, null, 2) + const serialisedContent = + typeof content === 'string' ? content : JSON.stringify(content, null, 2) - return fs.writeFile(filePath, serialisedContent) - }).then(() => { - let removeFn = () => fs.removeSync(filePath) + return fs.writeFile(filePath, serialisedContent) + }) + .then(() => { + const removeFn = () => fs.removeSync(filePath) - return new Promise((resolve, reject) => { - setTimeout(() => { - callback(removeFn, content) + return new Promise((resolve, reject) => { + setTimeout(() => { + callback(removeFn, content) - resolve() - }, (options.interval || 0)) + resolve() + }, options.interval || 0) + }) }) - }) } -module.exports.imagesEqual = function ({base, headers, test}) { - let fullBasePath = path.resolve(base) +module.exports.imagesEqual = function({base, headers, test}) { + const fullBasePath = path.resolve(base) if (test.indexOf('/') === 0) { - test = `http://${config.get('server.host')}:${config.get('server.port')}${test}` + test = `http://${config.get('server.host')}:${config.get( + 'server.port' + )}${test}` } - return Jimp - .read(fullBasePath) + return Jimp.read(fullBasePath) .then(baselineImage => { return Jimp.read(test).then(testImage => { - let diff = Jimp.diff(baselineImage, testImage, 0.1) - let distance = Jimp.distance(baselineImage, testImage) + const diff = Jimp.diff(baselineImage, testImage, 0.1) + const distance = Jimp.distance(baselineImage, testImage) if (distance < 0.15 || diff.percent < 0.15) { return Promise.resolve(true) @@ -60,19 +63,22 @@ module.exports.imagesEqual = function ({base, headers, test}) { return Promise.resolve(false) }) - }).catch(err => { + }) + .catch(err => { console.error(err) }) } -module.exports.filesEqual = function ({base, headers, test}) { - let fullBasePath = path.resolve(base) +module.exports.filesEqual = function({base, headers, test}) { + const fullBasePath = path.resolve(base) if (test.indexOf('/') === 0) { - test = `http://${config.get('server.host')}:${config.get('server.port')}${test}` + test = `http://${config.get('server.host')}:${config.get( + 'server.port' + )}${test}` } - let getFileContents = fileName => { + const getFileContents = fileName => { return new Promise((resolve, reject) => { fs.readFile(fileName, (err, data) => { return err ? reject(err) : resolve(data.toString()) @@ -80,11 +86,12 @@ module.exports.filesEqual = function ({base, headers, test}) { }) } - let getRemoteFileContents = url => { + const getRemoteFileContents = url => { return new Promise((resolve, reject) => { - require('http').get(url, (res) => { + require('http').get(url, res => { let string = '' - res.on('data', (chunk) => { + + res.on('data', chunk => { string += chunk.toString() }) @@ -100,18 +107,21 @@ module.exports.filesEqual = function ({base, headers, test}) { return getRemoteFileContents(test).then(testFile => { return testFile === baselineFile }) - }).catch(err => { + }) + .catch(err => { console.error(err) }) } -module.exports.getBearerToken = function (domain, done) { +module.exports.getBearerToken = function(domain, done) { if (typeof domain === 'function') { done = domain domain = 'localhost' } - request('http://' + config.get('server.host') + ':' + config.get('server.port')) + request( + 'http://' + config.get('server.host') + ':' + config.get('server.port') + ) .post(config.get('auth.tokenUrl')) .set('host', `${domain}:80`) .send({ @@ -120,27 +130,31 @@ module.exports.getBearerToken = function (domain, done) { }) .expect(200) // .expect('content-type', 'application/json') - .end(function (err, res) { + .end(function(err, res) { if (err) return done(err) - var bearerToken = res.body.accessToken + const bearerToken = res.body.accessToken + should.exist(bearerToken) done(null, bearerToken) }) } -module.exports.clearCache = function () { - var deleteFolderRecursive = function (filepath) { +module.exports.clearCache = function() { + const deleteFolderRecursive = function(filepath) { if (fs.existsSync(filepath) && fs.lstatSync(filepath).isDirectory()) { - fs.readdirSync(filepath).forEach(function (file, index) { - var curPath = filepath + '/' + file - if (fs.lstatSync(curPath).isDirectory()) { // recurse + fs.readdirSync(filepath).forEach(function(file, index) { + const curPath = filepath + '/' + file + + if (fs.lstatSync(curPath).isDirectory()) { + // recurse deleteFolderRecursive(curPath) - } else { // delete file + } else { + // delete file try { fs.unlinkSync(path.resolve(curPath)) } catch (err) { - + // no-op } } }) @@ -149,17 +163,21 @@ module.exports.clearCache = function () { try { fs.unlinkSync(filepath) } catch (err) { - + // no-op } } } // for each directory in the cache folder, remove all files then // delete the folder - fs.stat(config.get('caching.directory.path'), function (err, stats) { + fs.stat(config.get('caching.directory.path'), function(err, stats) { if (!err) { - fs.readdirSync(config.get('caching.directory.path')).forEach(function (dirname) { - deleteFolderRecursive(path.join(config.get('caching.directory.path'), dirname)) + fs.readdirSync(config.get('caching.directory.path')).forEach(function( + dirname + ) { + deleteFolderRecursive( + path.join(config.get('caching.directory.path'), dirname) + ) }) } }) @@ -171,13 +189,13 @@ module.exports.clearCache = function () { // // Example: http://{proxyUrl}/test.jpg?mockdomain=testdomain.com will // be forwarded to http://{cdnUrl}/test.jpg with `Host: testdomain.com`. -let proxyPort = config.get('server.port') + 1 -let proxyUrl = `http://localhost:${proxyPort}` -let proxy = httpProxy.createProxyServer({}) +const proxyPort = config.get('server.port') + 1 +const proxyUrl = `http://localhost:${proxyPort}` +const proxy = httpProxy.createProxyServer({}) proxy.on('proxyReq', (proxyReq, req, res, options) => { - let parsedUrl = url.parse(req.url, true) - let mockDomain = parsedUrl.query.mockdomain + const parsedUrl = url.parse(req.url, true) + const mockDomain = parsedUrl.query.mockdomain parsedUrl.search = null delete parsedUrl.query.mockdomain @@ -186,7 +204,7 @@ proxy.on('proxyReq', (proxyReq, req, res, options) => { proxyReq.setHeader('Host', mockDomain) }) -let proxyServer = http.createServer((req, res) => { +const proxyServer = http.createServer((req, res) => { proxy.web(req, res, { target: cdnUrl }) @@ -203,4 +221,5 @@ module.exports.proxyStop = () => { proxyServer.close(resolve) }) } + module.exports.proxyUrl = proxyUrl diff --git a/test/acceptance/multi-domain.js b/test/acceptance/multi-domain.js index 854c0758..6abd7bbf 100644 --- a/test/acceptance/multi-domain.js +++ b/test/acceptance/multi-domain.js @@ -9,104 +9,91 @@ const request = require('supertest') const app = require(__dirname + '/../../dadi/lib/') const Cache = require(__dirname + '/../../dadi/lib/cache') const config = require(__dirname + '/../../config') -const domainManager = require(__dirname + '/../../dadi/lib/models/domain-manager') +const domainManager = require(__dirname + + '/../../dadi/lib/models/domain-manager') const help = require(__dirname + '/help') -const cdnUrl = `http://${config.get('server.host')}:${config.get('server.port')}` +const cdnUrl = `http://${config.get('server.host')}:${config.get( + 'server.port' +)}` const images = { - 'localhost': 'test/images/test.jpg', + localhost: 'test/images/test.jpg', 'testdomain.com': 'test/images/dog-w600.jpeg' } const stylesheets = { - 'localhost': 'test/assets/test.css', + localhost: 'test/assets/test.css', 'testdomain.com': 'test/assets/test.css' } const jsFiles = { - 'localhost': 'test/assets/test.js', + localhost: 'test/assets/test.js', 'testdomain.com': 'test/assets/test.js' } const txtFiles = { - 'localhost': 'test/assets/test.txt', + localhost: 'test/assets/test.txt', 'testdomain.com': 'test/assets/test.txt' } -let configBackup = config.get() -let server1 = nock('http://one.somedomain.tech') +const configBackup = config.get() +const server1 = nock('http://one.somedomain.tech') .get('/test.jpg') .times(Infinity) .reply(200, (uri, requestBody) => { - return fs.createReadStream( - path.resolve(images['localhost']) - ) + return fs.createReadStream(path.resolve(images['localhost'])) }) -let server2 = nock('http://two.somedomain.tech') +const server2 = nock('http://two.somedomain.tech') .get('/test.jpg') .times(Infinity) .reply(200, (uri, requestBody) => { - return fs.createReadStream( - path.resolve(images['testdomain.com']) - ) + return fs.createReadStream(path.resolve(images['testdomain.com'])) }) -let cssScope1 = nock('http://one.somedomain.tech') +const cssScope1 = nock('http://one.somedomain.tech') .get('/test.css') .times(Infinity) .reply(200, (uri, requestBody) => { - return fs.createReadStream( - path.resolve(stylesheets['localhost']) - ) + return fs.createReadStream(path.resolve(stylesheets['localhost'])) }) -let cssScope2 = nock('http://two.somedomain.tech') +const cssScope2 = nock('http://two.somedomain.tech') .get('/test.css') .times(Infinity) .reply(200, (uri, requestBody) => { - return fs.createReadStream( - path.resolve(stylesheets['testdomain.com']) - ) + return fs.createReadStream(path.resolve(stylesheets['testdomain.com'])) }) -let jsScope1 = nock('http://one.somedomain.tech') +const jsScope1 = nock('http://one.somedomain.tech') .get('/test.js') .times(Infinity) .reply(200, (uri, requestBody) => { - return fs.createReadStream( - path.resolve(jsFiles['localhost']) - ) + return fs.createReadStream(path.resolve(jsFiles['localhost'])) }) -let jsScope2 = nock('http://two.somedomain.tech') +const jsScope2 = nock('http://two.somedomain.tech') .get('/test.js') .times(Infinity) .reply(200, (uri, requestBody) => { - return fs.createReadStream( - path.resolve(jsFiles['testdomain.com']) - ) + return fs.createReadStream(path.resolve(jsFiles['testdomain.com'])) }) -let txtScope1 = nock('http://one.somedomain.tech') +const txtScope1 = nock('http://one.somedomain.tech') .get('/test.txt') .times(Infinity) .reply(200, (uri, requestBody) => { - return fs.createReadStream( - path.resolve(txtFiles['localhost']) - ) + return fs.createReadStream(path.resolve(txtFiles['localhost'])) }) -let txtScope2 = nock('http://two.somedomain.tech') +const txtScope2 = nock('http://two.somedomain.tech') .get('/test.txt') .times(Infinity) .reply(200, (uri, requestBody) => { - return fs.createReadStream( - path.resolve(txtFiles['testdomain.com']) - ) + return fs.createReadStream(path.resolve(txtFiles['testdomain.com'])) }) -describe('Multi-domain', function () { +describe('Multi-domain', function() { describe('if multi-domain is disabled', () => { before(done => { config.set('multiDomain.enabled', false) @@ -116,7 +103,7 @@ describe('Multi-domain', function () { if (err) return done(err) setTimeout(done, 500) - }) + }) }) }) @@ -124,41 +111,50 @@ describe('Multi-domain', function () { config.set('multiDomain.enabled', false) help.proxyStop().then(() => { - app.stop(done) + app.stop(done) }) }) it('should retrieve a remote image from a path specified by a recipe regardless of whether the domain is configured', () => { - return help.imagesEqual({ - base: images['localhost'], - test: `${cdnUrl}/sample-image-recipe/test.jpg` - }).then(match => { - match.should.eql(true) - console.log('match :', match); - return help.imagesEqual({ + return help + .imagesEqual({ base: images['localhost'], - test: `${help.proxyUrl}/sample-image-recipe/test.jpg?mockdomain=unknowndomain.com` - }).then(match => { - console.log('match :', match); + test: `${cdnUrl}/sample-image-recipe/test.jpg` + }) + .then(match => { match.should.eql(true) + console.log('match :', match) + + return help + .imagesEqual({ + base: images['localhost'], + test: `${help.proxyUrl}/sample-image-recipe/test.jpg?mockdomain=unknowndomain.com` + }) + .then(match => { + console.log('match :', match) + match.should.eql(true) + }) }) - }) }).timeout(20000) it('should retrieve a remote image regardless of whether the domain is configured', () => { - return help.imagesEqual({ - base: images['localhost'], - test: `${cdnUrl}/test.jpg` - }).then(match => { - match.should.eql(true) - - return help.imagesEqual({ + return help + .imagesEqual({ base: images['localhost'], - test: `${help.proxyUrl}/test.jpg?mockdomain=unknowndomain.com` - }).then(match => { + test: `${cdnUrl}/test.jpg` + }) + .then(match => { match.should.eql(true) + + return help + .imagesEqual({ + base: images['localhost'], + test: `${help.proxyUrl}/test.jpg?mockdomain=unknowndomain.com` + }) + .then(match => { + match.should.eql(true) + }) }) - }) }).timeout(20000) describe('Caching', () => { @@ -174,14 +170,14 @@ describe('Multi-domain', function () { Cache.reset() config.set('caching.redis.enabled', configBackup.caching.redis.enabled) - config.set('caching.directory.enabled', configBackup.caching.directory.enabled) + config.set( + 'caching.directory.enabled', + configBackup.caching.directory.enabled + ) }) it('should not include domain name as part of cache key', done => { - let cacheSet = sinon.spy( - Cache.Cache.prototype, - 'set' - ) + const cacheSet = sinon.spy(Cache.Cache.prototype, 'set') request(cdnUrl) .get('/test.jpg') @@ -197,7 +193,10 @@ describe('Multi-domain', function () { .expect(200) .end((err, res) => { res.headers['x-cache'].should.eql('HIT') - cacheSet.getCall(0).args[0].includes('testdomain.com').should.eql(false) + cacheSet + .getCall(0) + .args[0].includes('testdomain.com') + .should.eql(false) cacheSet.restore() @@ -206,7 +205,7 @@ describe('Multi-domain', function () { }, 1000) }) }).timeout(20000) - }) + }) }) describe('if multi-domain is enabled', () => { @@ -240,13 +239,37 @@ describe('Multi-domain', function () { afterEach(done => { config.set('images.s3.enabled', configBackup.images.s3.enabled) - config.set('images.directory.enabled', configBackup.images.directory.enabled, 'localhost') - config.set('images.remote.enabled', configBackup.images.remote.enabled, 'localhost') - config.set('images.remote.path', configBackup.images.remote.path, 'localhost') + config.set( + 'images.directory.enabled', + configBackup.images.directory.enabled, + 'localhost' + ) + config.set( + 'images.remote.enabled', + configBackup.images.remote.enabled, + 'localhost' + ) + config.set( + 'images.remote.path', + configBackup.images.remote.path, + 'localhost' + ) - config.set('assets.directory.enabled', configBackup.assets.directory.enabled, 'localhost') - config.set('assets.remote.enabled', configBackup.assets.remote.enabled, 'localhost') - config.set('assets.remote.path', configBackup.assets.remote.path, 'localhost') + config.set( + 'assets.directory.enabled', + configBackup.assets.directory.enabled, + 'localhost' + ) + config.set( + 'assets.remote.enabled', + configBackup.assets.remote.enabled, + 'localhost' + ) + config.set( + 'assets.remote.path', + configBackup.assets.remote.path, + 'localhost' + ) config.set('multiDomain.enabled', configBackup.multiDomain.enabled) config.set('dadiNetwork.enableConfigurationAPI', false) @@ -257,19 +280,23 @@ describe('Multi-domain', function () { }) it('should retrieve a remote image from the path specified by a recipe at domain level', () => { - return help.imagesEqual({ - base: images['localhost'], - test: `${help.proxyUrl}/test-recipe/test.jpg?mockdomain=localhost` - }).then(match => { - match.should.eql(true) - - return help.imagesEqual({ - base: images['testdomain.com'], - test: `${help.proxyUrl}/test-recipe/test.jpg?mockdomain=testdomain.com` - }).then(match => { + return help + .imagesEqual({ + base: images['localhost'], + test: `${help.proxyUrl}/test-recipe/test.jpg?mockdomain=localhost` + }) + .then(match => { match.should.eql(true) + + return help + .imagesEqual({ + base: images['testdomain.com'], + test: `${help.proxyUrl}/test-recipe/test.jpg?mockdomain=testdomain.com` + }) + .then(match => { + match.should.eql(true) + }) }) - }) }).timeout(20000) it('should retrieve a local image from the path specified by the domain config', () => { @@ -277,87 +304,111 @@ describe('Multi-domain', function () { config.set('images.directory.path', 'test/images/next-level', 'localhost') config.set('images.remote.enabled', false, 'localhost') - let DiskStorage = require(path.join(__dirname, '../../dadi/lib/storage/disk')) - let diskStorage = new DiskStorage({ + const DiskStorage = require(path.join( + __dirname, + '../../dadi/lib/storage/disk' + )) + const diskStorage = new DiskStorage({ assetType: 'images', domain: 'localhost', - url: '/test.jpg'} - ) + url: '/test.jpg' + }) diskStorage.path.should.eql(path.resolve('./test/images/next-level')) - return help.imagesEqual({ - base: images['localhost'], - test: `${help.proxyUrl}/test.jpg?mockdomain=localhost` - }).then(match => { - match.should.eql(true) - }) + return help + .imagesEqual({ + base: images['localhost'], + test: `${help.proxyUrl}/test.jpg?mockdomain=localhost` + }) + .then(match => { + match.should.eql(true) + }) }).timeout(10000) it('should retrieve a remote image from the path specified by the domain config', () => { - return help.imagesEqual({ - base: images['localhost'], - test: `${help.proxyUrl}/test.jpg?mockdomain=localhost` - }).then(match => { - match.should.eql(true) - - return help.imagesEqual({ - base: images['testdomain.com'], - test: `${help.proxyUrl}/test.jpg?mockdomain=testdomain.com` - }).then(match => { + return help + .imagesEqual({ + base: images['localhost'], + test: `${help.proxyUrl}/test.jpg?mockdomain=localhost` + }) + .then(match => { match.should.eql(true) + + return help + .imagesEqual({ + base: images['testdomain.com'], + test: `${help.proxyUrl}/test.jpg?mockdomain=testdomain.com` + }) + .then(match => { + match.should.eql(true) + }) }) - }) }).timeout(10000) it('should retrieve a remote CSS file from the path specified by the domain config', () => { - return help.filesEqual({ - base: stylesheets['localhost'], - test: `${help.proxyUrl}/test.css?mockdomain=localhost` - }).then(match => { - match.should.eql(true) - - return help.filesEqual({ - base: stylesheets['testdomain.com'], - test: `${help.proxyUrl}/test.css?mockdomain=testdomain.com` - }).then(match => { + return help + .filesEqual({ + base: stylesheets['localhost'], + test: `${help.proxyUrl}/test.css?mockdomain=localhost` + }) + .then(match => { match.should.eql(true) + + return help + .filesEqual({ + base: stylesheets['testdomain.com'], + test: `${help.proxyUrl}/test.css?mockdomain=testdomain.com` + }) + .then(match => { + match.should.eql(true) + }) }) - }) }).timeout(10000) it('should retrieve a remote TXT file from the path specified by the domain config', () => { - return help.filesEqual({ - base: txtFiles['localhost'], - test: `${help.proxyUrl}/test.txt?mockdomain=localhost` - }).then(match => { - match.should.eql(true) - - return help.filesEqual({ - base: txtFiles['testdomain.com'], - test: `${help.proxyUrl}/test.txt?mockdomain=testdomain.com` - }).then(match => { - match.should.eql(true) + return help + .filesEqual({ + base: txtFiles['localhost'], + test: `${help.proxyUrl}/test.txt?mockdomain=localhost` }) - }) - }).timeout(10000) - - it.skip('should retrieve a remote JS file from the path specified by the domain config', () => { - return help.filesEqual({ - base: jsFiles['localhost'], - test: `${help.proxyUrl}/test.js?mockdomain=localhost` - }).then(match => { - match.should.eql(true) - - return help.filesEqual({ - base: jsFiles['testdomain.com'], - test: `${help.proxyUrl}/test.js?mockdomain=testdomain.com` - }).then(match => { + .then(match => { match.should.eql(true) + + return help + .filesEqual({ + base: txtFiles['testdomain.com'], + test: `${help.proxyUrl}/test.txt?mockdomain=testdomain.com` + }) + .then(match => { + match.should.eql(true) + }) }) - }) }).timeout(10000) + it.skip( + 'should retrieve a remote JS file from the path specified by the domain config', + () => { + return help + .filesEqual({ + base: jsFiles['localhost'], + test: `${help.proxyUrl}/test.js?mockdomain=localhost` + }) + .then(match => { + match.should.eql(true) + + return help + .filesEqual({ + base: jsFiles['testdomain.com'], + test: `${help.proxyUrl}/test.js?mockdomain=testdomain.com` + }) + .then(match => { + match.should.eql(true) + }) + }) + } + ).timeout(10000) + it('should use the images.allowFullURL setting defined at domain level to determine whether or not a request with a full remote URL will be served', done => { config.set('images.remote.allowFullURL', true, 'localhost') config.set('images.remote.allowFullURL', false, 'testdomain.com') @@ -512,7 +563,7 @@ describe('Multi-domain', function () { it('should return 201 when adding a single domain', done => { config.set('dadiNetwork.enableConfigurationAPI', true) - let domains = [ + const domains = [ { domain: 'api-added-domain.com', data: { @@ -531,10 +582,15 @@ describe('Multi-domain', function () { .send(domains) .end((_err, res) => { res.statusCode.should.eql(201) - let domainAdded = res.body.domains.includes('api-added-domain.com') + const domainAdded = res.body.domains.includes( + 'api-added-domain.com' + ) + domainAdded.should.eql(true) - config.get('images.remote.path', 'api-added-domain.com').should.eql('https://google.com') + config + .get('images.remote.path', 'api-added-domain.com') + .should.eql('https://google.com') done() }) }) @@ -542,14 +598,14 @@ describe('Multi-domain', function () { it('should return 201 when adding multiple domains', done => { config.set('dadiNetwork.enableConfigurationAPI', true) - let domains = [ + const domains = [ { domain: 'api-added-domain-one.com', data: { images: { remote: { path: 'https://google.com' - } + } } } }, @@ -572,27 +628,35 @@ describe('Multi-domain', function () { .end((_err, res) => { res.statusCode.should.eql(201) - let domainsAdded = res.body.domains.includes('api-added-domain-one.com') && - res.body.domains.includes('api-added-domain-two.com') + const domainsAdded = + res.body.domains.includes('api-added-domain-one.com') && + res.body.domains.includes('api-added-domain-two.com') domainsAdded.should.eql(true) - let addedPath = config.get('images.remote.path', 'api-added-domain-one.com') + let addedPath = config.get( + 'images.remote.path', + 'api-added-domain-one.com' + ) + addedPath.should.eql('https://google.com') - addedPath = config.get('images.remote.path', 'api-added-domain-two.com') + addedPath = config.get( + 'images.remote.path', + 'api-added-domain-two.com' + ) addedPath.should.eql('https://google.co.uk') done() }) }) - it('should return 404 when modifying a domain that doesn\'t exist', done => { + it("should return 404 when modifying a domain that doesn't exist", done => { config.set('dadiNetwork.enableConfigurationAPI', true) - let domain = 'api-added-domain.com' - let domains = [ + const domain = 'api-added-domain.com' + const domains = [ { - domain: domain, + domain, data: { images: { remote: { @@ -603,7 +667,7 @@ describe('Multi-domain', function () { } ] - let update = { + const update = { data: { images: { remote: { @@ -619,7 +683,8 @@ describe('Multi-domain', function () { .send(domains) .end((_err, res) => { res.statusCode.should.eql(201) - let domainAdded = res.body.domains.includes(domain) + const domainAdded = res.body.domains.includes(domain) + domainAdded.should.eql(true) request(cdnUrl) @@ -636,10 +701,10 @@ describe('Multi-domain', function () { it('should return 200 when modifying a domain', done => { config.set('dadiNetwork.enableConfigurationAPI', true) - let domain = 'api-added-domain.com' - let domains = [ + const domain = 'api-added-domain.com' + const domains = [ { - domain: domain, + domain, data: { images: { remote: { @@ -650,7 +715,7 @@ describe('Multi-domain', function () { } ] - let update = { + const update = { data: { images: { remote: { @@ -666,10 +731,12 @@ describe('Multi-domain', function () { .send(domains) .end((_err, res) => { res.statusCode.should.eql(201) - let domainAdded = res.body.domains.includes(domain) + const domainAdded = res.body.domains.includes(domain) + domainAdded.should.eql(true) let configuredPath = config.get('images.remote.path', domain) + configuredPath.should.eql(domains[0].data.images.remote.path) request(cdnUrl) @@ -678,7 +745,8 @@ describe('Multi-domain', function () { .send(update) .end((_err, res) => { res.statusCode.should.eql(200) - let domainAdded = res.body.domains.includes(domain) + const domainAdded = res.body.domains.includes(domain) + domainAdded.should.eql(true) configuredPath = config.get('images.remote.path', domain) @@ -688,10 +756,10 @@ describe('Multi-domain', function () { }) }) - it('should return 404 when deleting a domain that doesn\'t exist', done => { + it("should return 404 when deleting a domain that doesn't exist", done => { config.set('dadiNetwork.enableConfigurationAPI', true) - let domain = 'api-added-domain.com' + const domain = 'api-added-domain.com' request(cdnUrl) .delete('/_dadi/domains/not-a-domain') @@ -705,10 +773,10 @@ describe('Multi-domain', function () { it('should return 200 when deleting a domain', done => { config.set('dadiNetwork.enableConfigurationAPI', true) - let domain = 'api-added-domain.com' - let domains = [ + const domain = 'api-added-domain.com' + const domains = [ { - domain: domain, + domain, data: { images: { remote: { @@ -725,12 +793,13 @@ describe('Multi-domain', function () { .send(domains) .end((_err, res) => { res.statusCode.should.eql(201) - let domainAdded = res.body.domains.includes(domain) + const domainAdded = res.body.domains.includes(domain) + domainAdded.should.eql(true) - let configuredPath = config.get('images.remote.path', domain) - configuredPath.should.eql(domains[0].data.images.remote.path) + const configuredPath = config.get('images.remote.path', domain) + configuredPath.should.eql(domains[0].data.images.remote.path) ;(typeof domainManager.getDomain(domain)).should.eql('object') request(cdnUrl) @@ -738,10 +807,12 @@ describe('Multi-domain', function () { .set('Host', 'testdomain.com:80') .end((_err, res) => { res.statusCode.should.eql(200) - let domainAdded = res.body.domains.includes(domain) - domainAdded.should.eql(false) + const domainAdded = res.body.domains.includes(domain) - ;(typeof domainManager.getDomain(domain)).should.eql('undefined') + domainAdded.should.eql(false) + ;(typeof domainManager.getDomain(domain)).should.eql( + 'undefined' + ) done() }) @@ -750,7 +821,7 @@ describe('Multi-domain', function () { }) describe('when the target domain is not configured', () => { - let testDomain = 'unknowndomain.com' + const testDomain = 'unknowndomain.com' it('should return 404 when trying to retrieve a remote image', done => { request(cdnUrl) @@ -758,9 +829,7 @@ describe('Multi-domain', function () { .set('Host', `${testDomain}:80`) .expect(404) .end((err, res) => { - res.body.message.should.eql( - `Domain not configured: ${testDomain}` - ) + res.body.message.should.eql(`Domain not configured: ${testDomain}`) done() }) @@ -772,9 +841,7 @@ describe('Multi-domain', function () { .set('Host', `${testDomain}:80`) .expect(404) .end((err, res) => { - res.body.message.should.eql( - `Domain not configured: ${testDomain}` - ) + res.body.message.should.eql(`Domain not configured: ${testDomain}`) done() }) @@ -786,9 +853,7 @@ describe('Multi-domain', function () { .set('Host', `${testDomain}:80`) .expect(404) .end((err, res) => { - res.body.message.should.eql( - `Domain not configured: ${testDomain}` - ) + res.body.message.should.eql(`Domain not configured: ${testDomain}`) request(cdnUrl) .get('/test.css') @@ -818,14 +883,14 @@ describe('Multi-domain', function () { Cache.reset() config.set('caching.redis.enabled', configBackup.caching.redis.enabled) - config.set('caching.directory.enabled', configBackup.caching.directory.enabled) + config.set( + 'caching.directory.enabled', + configBackup.caching.directory.enabled + ) }) it('should include domain name as part of cache key', done => { - let cacheSet = sinon.spy( - Cache.Cache.prototype, - 'set' - ) + const cacheSet = sinon.spy(Cache.Cache.prototype, 'set') request(cdnUrl) .get('/test.jpg') @@ -841,7 +906,10 @@ describe('Multi-domain', function () { .expect(200) .end((err, res) => { res.headers['x-cache'].should.eql('HIT') - cacheSet.getCall(0).args[0].includes('testdomain.com').should.eql(true) + cacheSet + .getCall(0) + .args[0].includes('testdomain.com') + .should.eql(true) cacheSet.restore() request(cdnUrl) diff --git a/test/acceptance/plugins.js b/test/acceptance/plugins.js index 989c7742..9e24e8d1 100644 --- a/test/acceptance/plugins.js +++ b/test/acceptance/plugins.js @@ -8,7 +8,9 @@ const should = require('should') const sinon = require('sinon') const stream = require('stream') -const cdnUrl = `http://${config.get('server.host')}:${config.get('server.port')}` +const cdnUrl = `http://${config.get('server.host')}:${config.get( + 'server.port' +)}` const ImageHandler = require(__dirname + '/../../dadi/lib/handlers/image') const workspace = require(__dirname + '/../../dadi/lib/models/workspace') @@ -24,7 +26,7 @@ const appActions = { stop: done => app.stop(done) } -describe('Plugins', function (done) { +describe('Plugins', function(done) { this.timeout(15000) describe('pre-processing', () => { @@ -41,18 +43,21 @@ describe('Plugins', function (done) { }, done2 => { appActions.start(() => { - help.imagesEqual({ - base: 'test/images/visual/baseline/measure.png?saturate=0.png', - test: `${cdnUrl}/test-recipe-with-plugin/visual/measure1.png` - }).then(match => { - match.should.eql(true) + help + .imagesEqual({ + base: + 'test/images/visual/baseline/measure.png?saturate=0.png', + test: `${cdnUrl}/test-recipe-with-plugin/visual/measure1.png` + }) + .then(match => { + match.should.eql(true) - appActions.stop(() => { - done2() - done1() - done() + appActions.stop(() => { + done2() + done1() + done() + }) }) - }) }) } ) @@ -62,7 +67,7 @@ describe('Plugins', function (done) { }) describe('post-processing', () => { - it('should modify the image before it\'s sent to the client', done => { + it("should modify the image before it's sent to the client", done => { help.createTempFile( 'workspace/plugins/test-plugin-one.js', `module.exports.post = parameters => { parameters.processor.greyscale() }`, @@ -75,18 +80,21 @@ describe('Plugins', function (done) { }, done2 => { appActions.start(() => { - help.imagesEqual({ - base: 'test/images/visual/baseline/measure.png?saturate=0.png', - test: `${cdnUrl}/test-recipe-with-plugin/visual/measure1.png` - }).then(match => { - match.should.eql(true) + help + .imagesEqual({ + base: + 'test/images/visual/baseline/measure.png?saturate=0.png', + test: `${cdnUrl}/test-recipe-with-plugin/visual/measure1.png` + }) + .then(match => { + match.should.eql(true) - appActions.stop(() => { - done2() - done1() - done() + appActions.stop(() => { + done2() + done1() + done() + }) }) - }) }) } ) @@ -104,17 +112,19 @@ describe('Plugins', function (done) { }`, done1 => { appActions.start(() => { - help.imagesEqual({ - base: 'test/images/test.jpg', - test: `${cdnUrl}/test-controller-plugin-one` - }).then(match => { - match.should.eql(true) + help + .imagesEqual({ + base: 'test/images/test.jpg', + test: `${cdnUrl}/test-controller-plugin-one` + }) + .then(match => { + match.should.eql(true) - appActions.stop(() => { - done1() - done() + appActions.stop(() => { + done1() + done() + }) }) - }) }) } ) @@ -146,7 +156,7 @@ describe('Plugins', function (done) { }) } ) - }) + }) it('should respond with a 500 if the plugin throws any errors', done => { help.createTempFile( @@ -167,6 +177,6 @@ describe('Plugins', function (done) { }) } ) - }) + }) }) }) diff --git a/test/acceptance/recipes.js b/test/acceptance/recipes.js index e22e991a..947a481b 100644 --- a/test/acceptance/recipes.js +++ b/test/acceptance/recipes.js @@ -6,78 +6,92 @@ const sinon = require('sinon') const request = require('supertest') const cache = require(__dirname + '/../../dadi/lib/cache') -const domainManager = require(__dirname + '/../../dadi/lib/models/domain-manager') +const domainManager = require(__dirname + + '/../../dadi/lib/models/domain-manager') const help = require(__dirname + '/help') const app = require(__dirname + '/../../dadi/lib/') const imageHandler = require(__dirname + '/../../dadi/lib/handlers/image') let config = require(__dirname + '/../../config') -let configBackup = config.get() -let cdnUrl = `http://${config.get('server.host')}:${config.get('server.port')}` +const configBackup = config.get() +const cdnUrl = `http://${config.get('server.host')}:${config.get( + 'server.port' +)}` let testConfigString -describe('Recipes', function () { +describe('Recipes', function() { this.timeout(8000) - let tokenRoute = config.get('auth.tokenUrl') + const tokenRoute = config.get('auth.tokenUrl') let sample = {} - beforeEach(function (done) { + beforeEach(function(done) { delete require.cache[__dirname + '/../../config'] config = require(__dirname + '/../../config') testConfigString = fs.readFileSync(config.configPath()) sample = { - 'recipe': 'test-recipe', - 'path': '/test', - 'settings': { - 'format': 'jpg', - 'quality': '80', - 'trim': '0', - 'trimFuzz': '0', - 'width': '1024', - 'height': '768', - 'cropX': '0', - 'cropY': '0', - 'ratio': '0', - 'devicePixelRatio': '0', - 'resizeStyle': '0', - 'gravity': '0', - 'filter': '0', - 'blur': '0', - 'strip': '0', - 'rotate': '0', - 'flip': '0', - 'transform': '1' + recipe: 'test-recipe', + path: '/test', + settings: { + format: 'jpg', + quality: '80', + trim: '0', + trimFuzz: '0', + width: '1024', + height: '768', + cropX: '0', + cropY: '0', + ratio: '0', + devicePixelRatio: '0', + resizeStyle: '0', + gravity: '0', + filter: '0', + blur: '0', + strip: '0', + rotate: '0', + flip: '0', + transform: '1' } } - app.start(function (err) { + app.start(function(err) { if (err) return done(err) // give it a moment for http.Server to finish starting - setTimeout(function () { + setTimeout(function() { done() }, 500) }) }) - afterEach(function (done) { + afterEach(function(done) { help.clearCache() app.stop(done) try { - fs.unlinkSync(path.join(path.resolve(config.get('paths.recipes')), 'test-recipe.json')) - } catch (err) {} + fs.unlinkSync( + path.join(path.resolve(config.get('paths.recipes')), 'test-recipe.json') + ) + } catch (err) { + // no-op + } try { - fs.unlinkSync(path.join(path.resolve(config.get('paths.recipes')), 'test-recipe-two.json')) - } catch (err) {} + fs.unlinkSync( + path.join( + path.resolve(config.get('paths.recipes')), + 'test-recipe-two.json' + ) + ) + } catch (err) { + // no-op + } }) - describe('Create', function () { - it('should not allow recipe create request without a valid token', function (done) { + describe('Create', function() { + it('should not allow recipe create request without a valid token', function(done) { help.getBearerToken((err, token) => { request(cdnUrl) .post('/api/recipes') @@ -87,7 +101,7 @@ describe('Recipes', function () { }) }) - it('should return error if no data was sent', function (done) { + it('should return error if no data was sent', function(done) { help.getBearerToken((err, token) => { request(cdnUrl) .post('/api/recipes') @@ -101,7 +115,7 @@ describe('Recipes', function () { }) }) - it('should return error if recipe body is not valid JSON', function (done) { + it('should return error if recipe body is not valid JSON', function(done) { help.getBearerToken((err, token) => { request(cdnUrl) .post('/api/recipes') @@ -116,220 +130,240 @@ describe('Recipes', function () { }) }) - it('should return error if recipe name is missing', function (done) { + it('should return error if recipe name is missing', function(done) { help.getBearerToken((err, token) => { request(cdnUrl) - .post('/api/recipes') - .send(Object.assign({}, sample, {recipe: undefined})) - .set('Authorization', 'Bearer ' + token) - .expect(400) - .end(function (err, res) { - res.body.success.should.eql(false) - res.body.errors.should.be.Array - res.body.errors[0].error.should.eql('Property "recipe" not found in recipe') + .post('/api/recipes') + .send(Object.assign({}, sample, {recipe: undefined})) + .set('Authorization', 'Bearer ' + token) + .expect(400) + .end(function(err, res) { + res.body.success.should.eql(false) + res.body.errors.should.be.Array + res.body.errors[0].error.should.eql( + 'Property "recipe" not found in recipe' + ) - done() - }) + done() + }) }) }) - it('should return error if recipe name is too short', function (done) { + it('should return error if recipe name is too short', function(done) { help.getBearerToken((err, token) => { request(cdnUrl) - .post('/api/recipes') - .send(Object.assign({}, sample, {recipe: 'xxxx'})) - .set('Authorization', 'Bearer ' + token) - .expect(400) - .end(function (err, res) { - res.body.success.should.eql(false) - res.body.errors.should.be.Array - res.body.errors[0].error.should.eql('Recipe name must be 5 characters or longer and contain only uppercase and lowercase letters, dashes and underscores') - done() - }) + .post('/api/recipes') + .send(Object.assign({}, sample, {recipe: 'xxxx'})) + .set('Authorization', 'Bearer ' + token) + .expect(400) + .end(function(err, res) { + res.body.success.should.eql(false) + res.body.errors.should.be.Array + res.body.errors[0].error.should.eql( + 'Recipe name must be 5 characters or longer and contain only uppercase and lowercase letters, dashes and underscores' + ) + done() + }) }) }) - it('should return error if recipe settings are missing', function (done) { + it('should return error if recipe settings are missing', function(done) { help.getBearerToken((err, token) => { request(cdnUrl) - .post('/api/recipes') - .send(Object.assign({}, sample, {settings: undefined})) - .set('Authorization', 'Bearer ' + token) - .expect(400) - .end(function (err, res) { - res.body.success.should.eql(false) - res.body.errors.should.be.Array - res.body.errors[0].error.should.eql('Property "settings" not found in recipe') - done() - }) + .post('/api/recipes') + .send(Object.assign({}, sample, {settings: undefined})) + .set('Authorization', 'Bearer ' + token) + .expect(400) + .end(function(err, res) { + res.body.success.should.eql(false) + res.body.errors.should.be.Array + res.body.errors[0].error.should.eql( + 'Property "settings" not found in recipe' + ) + done() + }) }) }) - it('should return error if recipe already exists', function (done) { + it('should return error if recipe already exists', function(done) { help.getBearerToken((err, token) => { request(cdnUrl) - .post('/api/recipes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - res.statusCode.should.eql(201) + .post('/api/recipes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + res.statusCode.should.eql(201) - setTimeout(() => { - request(cdnUrl) - .post('/api/recipes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - res.statusCode.should.eql(400) - res.body.errors[0].should.eql(`Route ${sample.recipe} already exists`) - - done() - }) - }, 300) - }) + setTimeout(() => { + request(cdnUrl) + .post('/api/recipes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + res.statusCode.should.eql(400) + res.body.errors[0].should.eql( + `Route ${sample.recipe} already exists` + ) + + done() + }) + }, 300) + }) }) }) - it('should return error if recipe save fails', function (done) { - let mockWriteJson = sinon.stub(fs, 'writeJson').rejects( - new Error() - ) + it('should return error if recipe save fails', function(done) { + const mockWriteJson = sinon.stub(fs, 'writeJson').rejects(new Error()) help.getBearerToken((err, token) => { request(cdnUrl) - .post('/api/recipes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - res.statusCode.should.eql(400) - res.body.errors[0].should.eql('Error when saving recipe') + .post('/api/recipes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + res.statusCode.should.eql(400) + res.body.errors[0].should.eql('Error when saving recipe') - mockWriteJson.restore() + mockWriteJson.restore() - done() - }) + done() + }) }) - }) + }) - it('should set the correct recipe filepath', function (done) { + it('should set the correct recipe filepath', function(done) { help.getBearerToken((err, token) => { - let stub = sinon.stub(fs, 'writeJson').resolves(true) + const stub = sinon.stub(fs, 'writeJson').resolves(true) request(cdnUrl) - .post('/api/recipes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - stub.called.should.eql(true) - stub.getCall(0).args[0].should.eql( - path.join(path.resolve(config.get('paths.recipes')), 'test-recipe.json') - ) - fs.writeJson.restore() + .post('/api/recipes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + stub.called.should.eql(true) + stub + .getCall(0) + .args[0].should.eql( + path.join( + path.resolve(config.get('paths.recipes')), + 'test-recipe.json' + ) + ) + fs.writeJson.restore() - done() - }) + done() + }) }) }) - it('should save valid recipe to filesystem', function (done) { + it('should save valid recipe to filesystem', function(done) { help.getBearerToken((err, token) => { request(cdnUrl) - .post('/api/recipes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - res.statusCode.should.eql(201) - done() - }) + .post('/api/recipes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + res.statusCode.should.eql(201) + done() + }) }) }) }) - describe('Apply', function () { - it('should apply the new recipe', function (done) { + describe('Apply', function() { + it('should apply the new recipe', function(done) { // set some config values - let newTestConfig = JSON.parse(testConfigString) + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.caching.directory.enabled = false newTestConfig.caching.redis.enabled = false cache.reset() newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) help.getBearerToken((err, token) => { request(cdnUrl) - .post('/api/recipes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - res.statusCode.should.eql(201) + .post('/api/recipes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + res.statusCode.should.eql(201) - setTimeout(() => { - request(cdnUrl) - .get('/test-recipe/inside-test.jpg') - .end(function (err, res) { - res.statusCode.should.eql(200) - res.headers['content-type'].should.eql('image/jpeg') + setTimeout(() => { + request(cdnUrl) + .get('/test-recipe/inside-test.jpg') + .end(function(err, res) { + res.statusCode.should.eql(200) + res.headers['content-type'].should.eql('image/jpeg') - done() - }) - }, 500) - }) + done() + }) + }, 500) + }) }) }) it('should prepend the contents of the `path` property to the image path, if `path` is a relative path', done => { - let server = nock('https://one.somedomain.tech') + const server = nock('https://one.somedomain.tech') .get('/test/images/mock.png') .replyWithFile(200, 'test/images/visual/measure1.png', { 'Content-Type': 'image/png' }) // set some config values - let newTestConfig = JSON.parse(testConfigString) + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.caching.directory.enabled = false newTestConfig.caching.redis.enabled = false cache.reset() newTestConfig.images.directory.enabled = false newTestConfig.images.remote.enabled = true newTestConfig.images.remote.path = 'https://one.somedomain.tech' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) help.getBearerToken((err, token) => { request(cdnUrl) - .post('/api/recipes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .end((err, res) => { - res.statusCode.should.eql(201) + .post('/api/recipes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .end((err, res) => { + res.statusCode.should.eql(201) - setTimeout(() => { - request(cdnUrl) - .get('/test-recipe/images/mock.png') - .expect(200) - .end((err, res) => { - server.isDone().should.eql(true) + setTimeout(() => { + request(cdnUrl) + .get('/test-recipe/images/mock.png') + .expect(200) + .end((err, res) => { + server.isDone().should.eql(true) - done() - }) - }, 500) - }) + done() + }) + }, 500) + }) }) }) it('should use the value of the `path` property as the base URL if `path` is a full URL, replacing the one defined in the config', done => { - let server = nock('https://two.somedomain.tech') + const server = nock('https://two.somedomain.tech') .get('/test/images/mock.png') .reply(200, 'test/images/visual/measure1.png', { 'Content-Type': 'image/png' }) // set some config values - let newTestConfig = JSON.parse(testConfigString) + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.caching.directory.enabled = false newTestConfig.caching.redis.enabled = false cache.reset() @@ -337,34 +371,41 @@ describe('Recipes', function () { newTestConfig.images.s3.enabled = false newTestConfig.images.remote.enabled = true newTestConfig.images.remote.path = 'https://one.somedomain.tech' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) help.getBearerToken((err, token) => { request(cdnUrl) - .post('/api/recipes') - .send(Object.assign({}, sample, {path: 'https://two.somedomain.tech/test'})) - .set('Authorization', 'Bearer ' + token) - .end((err, res) => { - res.statusCode.should.eql(201) + .post('/api/recipes') + .send( + Object.assign({}, sample, { + path: 'https://two.somedomain.tech/test' + }) + ) + .set('Authorization', 'Bearer ' + token) + .end((err, res) => { + res.statusCode.should.eql(201) - setTimeout(() => { - request(cdnUrl) - .get('/test-recipe/images/mock.png') - .expect(200) - .end((err, res) => { - server.isDone().should.eql(true) + setTimeout(() => { + request(cdnUrl) + .get('/test-recipe/images/mock.png') + .expect(200) + .end((err, res) => { + server.isDone().should.eql(true) - done() - }) - }, 500) - }) + done() + }) + }, 500) + }) }) }) - it('should return error if the recipe is not found', function (done) { - let server = nock('https://one.somedomain.tech') + it('should return error if the recipe is not found', function(done) { + const server = nock('https://one.somedomain.tech') .get('/thumbxx/test.jpg') .reply(404) @@ -372,68 +413,90 @@ describe('Recipes', function () { help.getBearerToken((err, token) => { request(cdnUrl) - .post('/api/recipes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - res.statusCode.should.eql(201) + .post('/api/recipes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + res.statusCode.should.eql(201) - setTimeout(() => { - request(cdnUrl) - .get('/thumbxx/test.jpg') - .end(function (err, res) { - res.statusCode.should.eql(404) - res.body.statusCode.should.eql(404) + setTimeout(() => { + request(cdnUrl) + .get('/thumbxx/test.jpg') + .end(function(err, res) { + res.statusCode.should.eql(404) + res.body.statusCode.should.eql(404) - config.set('notFound.images.enabled', configBackup.notFound.images.enabled) + config.set( + 'notFound.images.enabled', + configBackup.notFound.images.enabled + ) - done() - }) - }, 500) - }) + done() + }) + }, 500) + }) }) }) it('should handle image if recipe is valid', () => { - let newTestConfig = JSON.parse(testConfigString) + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - return help.imagesEqual({ - base: 'test/images/test.jpg', - test: '/sample-image-recipe/test.jpg' - }).then(match => { - match.should.eql(true) - }) + return help + .imagesEqual({ + base: 'test/images/test.jpg', + test: '/sample-image-recipe/test.jpg' + }) + .then(match => { + match.should.eql(true) + }) }) - it('should handle JS file if recipe is valid', function (done) { - let newTestConfig = JSON.parse(testConfigString) + it('should handle JS file if recipe is valid', function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.assets.directory.enabled = true newTestConfig.assets.directory.path = './test/assets' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) request(cdnUrl) .get('/sample-js-recipe/test-es6.js') - .set('user-agent', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)') + .set( + 'user-agent', + 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)' + ) .expect(200) - .end(function (err, res) { - res.text.should.eql('"use strict";var makeFoo=function(a){return"I foo, you "+a};') + .end(function(err, res) { + res.text.should.eql( + '"use strict";var makeFoo=function(a){return"I foo, you "+a};' + ) done() }) }) - it('should return error if recipe is invalid ', function (done) { - let newTestConfig = JSON.parse(testConfigString) + it('should return error if recipe is invalid ', function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) @@ -442,196 +505,228 @@ describe('Recipes', function () { .expect(404, done) }) - it('should not return the same cached result for an image obtained with and without a recipe', function (done) { + it('should not return the same cached result for an image obtained with and without a recipe', function(done) { // set some config values - let newTestConfig = JSON.parse(testConfigString) + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.caching.directory.enabled = true newTestConfig.caching.redis.enabled = false cache.reset() newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) help.getBearerToken((err, token) => { request(cdnUrl) - .post('/api/recipes') - .send(Object.assign({}, sample, {path: undefined})) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - res.statusCode.should.eql(201) - - setTimeout(() => { - request(cdnUrl) - .get('/test-recipe/original.jpg') - .end(function (err, res) { - res.statusCode.should.eql(200) - res.headers['content-type'].should.eql('image/jpeg') - res.headers['x-cache'].should.eql('MISS') + .post('/api/recipes') + .send(Object.assign({}, sample, {path: undefined})) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + res.statusCode.should.eql(201) + setTimeout(() => { request(cdnUrl) - .get('/original.jpg') - .end(function (err, res) { - res.statusCode.should.eql(200) - res.headers['content-type'].should.eql('image/jpeg') - res.headers['x-cache'].should.eql('MISS') - - request(cdnUrl) .get('/test-recipe/original.jpg') - .end(function (err, res) { + .end(function(err, res) { res.statusCode.should.eql(200) res.headers['content-type'].should.eql('image/jpeg') - res.headers['x-cache'].should.eql('HIT') + res.headers['x-cache'].should.eql('MISS') request(cdnUrl) - .get('/original.jpg') - .end(function (err, res) { - res.statusCode.should.eql(200) - res.headers['content-type'].should.eql('image/jpeg') - res.headers['x-cache'].should.eql('HIT') + .get('/original.jpg') + .end(function(err, res) { + res.statusCode.should.eql(200) + res.headers['content-type'].should.eql('image/jpeg') + res.headers['x-cache'].should.eql('MISS') - done() - }) + request(cdnUrl) + .get('/test-recipe/original.jpg') + .end(function(err, res) { + res.statusCode.should.eql(200) + res.headers['content-type'].should.eql('image/jpeg') + res.headers['x-cache'].should.eql('HIT') + + request(cdnUrl) + .get('/original.jpg') + .end(function(err, res) { + res.statusCode.should.eql(200) + res.headers['content-type'].should.eql( + 'image/jpeg' + ) + res.headers['x-cache'].should.eql('HIT') + + done() + }) + }) + }) }) - }) - }) - }, 500) - }) + }, 500) + }) }) }) - it('should not return the same cached result for an image obtained via two recipes with different options', function (done) { + it('should not return the same cached result for an image obtained via two recipes with different options', function(done) { // set some config values - let newTestConfig = JSON.parse(testConfigString) + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.caching.directory.enabled = true newTestConfig.caching.redis.enabled = false cache.reset() newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) help.getBearerToken((err, token) => { request(cdnUrl) - .post('/api/recipes') - .send(Object.assign({}, sample, {path: undefined})) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - res.statusCode.should.eql(201) - - request(cdnUrl) .post('/api/recipes') - .send(Object.assign({}, sample, { - recipe: 'test-recipe-two', - path: undefined, - settings: Object.assign({}, sample.settings, { - quality: 70 - }) - })) + .send(Object.assign({}, sample, {path: undefined})) .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - res.statusCode.should.eql(201) + .end(function(err, res) { + res.statusCode.should.eql(201) - setTimeout(() => { - request(cdnUrl) - .get('/test-recipe/original.jpg') - .end(function (err, res) { - res.statusCode.should.eql(200) - res.headers['content-type'].should.eql('image/jpeg') - res.headers['x-cache'].should.eql('MISS') + request(cdnUrl) + .post('/api/recipes') + .send( + Object.assign({}, sample, { + recipe: 'test-recipe-two', + path: undefined, + settings: Object.assign({}, sample.settings, { + quality: 70 + }) + }) + ) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + res.statusCode.should.eql(201) setTimeout(() => { request(cdnUrl) - .get('/test-recipe/original.jpg') - .end(function (err, res) { - res.statusCode.should.eql(200) - res.headers['content-type'].should.eql('image/jpeg') - res.headers['x-cache'].should.eql('HIT') - - request(cdnUrl) - .get('/test-recipe-two/original.jpg') - .end(function (err, res) { + .get('/test-recipe/original.jpg') + .end(function(err, res) { res.statusCode.should.eql(200) res.headers['content-type'].should.eql('image/jpeg') res.headers['x-cache'].should.eql('MISS') setTimeout(() => { request(cdnUrl) - .get('/test-recipe-two/original.jpg') - .end(function (err, res) { - res.statusCode.should.eql(200) - res.headers['content-type'].should.eql('image/jpeg') - res.headers['x-cache'].should.eql('HIT') - - done() - }) + .get('/test-recipe/original.jpg') + .end(function(err, res) { + res.statusCode.should.eql(200) + res.headers['content-type'].should.eql('image/jpeg') + res.headers['x-cache'].should.eql('HIT') + + request(cdnUrl) + .get('/test-recipe-two/original.jpg') + .end(function(err, res) { + res.statusCode.should.eql(200) + res.headers['content-type'].should.eql( + 'image/jpeg' + ) + res.headers['x-cache'].should.eql('MISS') + + setTimeout(() => { + request(cdnUrl) + .get('/test-recipe-two/original.jpg') + .end(function(err, res) { + res.statusCode.should.eql(200) + res.headers['content-type'].should.eql( + 'image/jpeg' + ) + res.headers['x-cache'].should.eql('HIT') + + done() + }) + }, 600) + }) + }) }, 600) - }) - }) + }) }, 600) }) - }, 600) }) - }) }) }) }) - describe('File change monitor', function () { - it('should reload the recipe when the file changes', function (done) { + describe('File change monitor', function() { + it('should reload the recipe when the file changes', function(done) { // set some config values - let newTestConfig = JSON.parse(testConfigString) + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.caching.directory.enabled = false newTestConfig.caching.redis.enabled = false cache.reset() newTestConfig.images.directory.enabled = true newTestConfig.images.directory.path = './test/images' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) help.getBearerToken((err, token) => { request(cdnUrl) - .post('/api/recipes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - res.statusCode.should.eql(201) - - setTimeout(() => { - request(cdnUrl) - .get('/test-recipe/inside-test.jpg') - .end(function (err, res) { - res.headers['content-type'].should.eql('image/jpeg') - - // Change the format within the recipe - let recipeContent = fs.readFileSync(path.join(path.resolve(config.get('paths.recipes')), 'test-recipe.json')) - let recipe = JSON.parse(recipeContent.toString()) - recipe.settings.format = 'png' - - fs.writeFileSync(path.join(path.resolve(config.get('paths.recipes')), 'test-recipe.json'), JSON.stringify(recipe)) + .post('/api/recipes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + res.statusCode.should.eql(201) - setTimeout(function () { - request(cdnUrl) + setTimeout(() => { + request(cdnUrl) .get('/test-recipe/inside-test.jpg') - .end(function (err, res) { - res.headers['content-type'].should.eql('image/png') + .end(function(err, res) { + res.headers['content-type'].should.eql('image/jpeg') - done() + // Change the format within the recipe + const recipeContent = fs.readFileSync( + path.join( + path.resolve(config.get('paths.recipes')), + 'test-recipe.json' + ) + ) + const recipe = JSON.parse(recipeContent.toString()) + + recipe.settings.format = 'png' + + fs.writeFileSync( + path.join( + path.resolve(config.get('paths.recipes')), + 'test-recipe.json' + ), + JSON.stringify(recipe) + ) + + setTimeout(function() { + request(cdnUrl) + .get('/test-recipe/inside-test.jpg') + .end(function(err, res) { + res.headers['content-type'].should.eql('image/png') + + done() + }) + }, 2500) }) - }, 2500) - }) - }, 500) - }) + }, 500) + }) }) }) }) }) describe('Recipes (with multi-domain)', () => { - let sample = { + const sample = { recipe: 'test-domain-recipe', settings: { format: 'png' @@ -663,38 +758,38 @@ describe('Recipes (with multi-domain)', () => { it('should create a recipe for the given domain only', done => { help.getBearerToken('localhost', (err, token) => { request(cdnUrl) - .post('/api/recipes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .set('host', 'localhost:80') - .expect(201) - .end((err, res) => { - setTimeout(() => { - request(cdnUrl) - .get('/test-domain-recipe/test.jpg') - .set('host', 'localhost:80') - .expect(200) - .end((err, res) => { - res.headers['content-type'].should.eql('image/png') - + .post('/api/recipes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .set('host', 'localhost:80') + .expect(201) + .end((err, res) => { + setTimeout(() => { request(cdnUrl) - .get('/test-domain-recipe/test.jpg') - .set('host', 'testdomain.com:80') - .expect(404) - .end((err, res) => { - let recipePath = path.resolve( - path.join( - domainManager.getDomain('localhost').path, - config.get('paths.recipes', 'localhost'), - 'test-domain-recipe.json' - ) - ) + .get('/test-domain-recipe/test.jpg') + .set('host', 'localhost:80') + .expect(200) + .end((err, res) => { + res.headers['content-type'].should.eql('image/png') - fs.remove(recipePath).then(done) - }) - }) - }, 500) - }) + request(cdnUrl) + .get('/test-domain-recipe/test.jpg') + .set('host', 'testdomain.com:80') + .expect(404) + .end((err, res) => { + const recipePath = path.resolve( + path.join( + domainManager.getDomain('localhost').path, + config.get('paths.recipes', 'localhost'), + 'test-domain-recipe.json' + ) + ) + + fs.remove(recipePath).then(done) + }) + }) + }, 500) + }) }) }) }) diff --git a/test/acceptance/routes.js b/test/acceptance/routes.js index 00a2aa33..92dd0264 100644 --- a/test/acceptance/routes.js +++ b/test/acceptance/routes.js @@ -5,17 +5,20 @@ const sinon = require('sinon') const request = require('supertest') const cache = require(__dirname + '/../../dadi/lib/cache') -const domainManager = require(__dirname + '/../../dadi/lib/models/domain-manager') +const domainManager = require(__dirname + + '/../../dadi/lib/models/domain-manager') const help = require(__dirname + '/help') const app = require(__dirname + '/../../dadi/lib/') const Route = require(__dirname + '/../../dadi/lib/models/route') let config = require(__dirname + '/../../config') -let cdnUrl = `http://${config.get('server.host')}:${config.get('server.port')}` +const cdnUrl = `http://${config.get('server.host')}:${config.get( + 'server.port' +)}` -describe('Routes', function () { +describe('Routes', function() { this.timeout(8000) - let tokenRoute = config.get('auth.tokenUrl') + const tokenRoute = config.get('auth.tokenUrl') beforeEach(done => { delete require.cache[__dirname + '/../../config'] @@ -30,26 +33,26 @@ describe('Routes', function () { afterEach(done => { app.stop(done) - }) + }) - describe('Create', function () { + describe('Create', function() { let sample = {} beforeEach(() => { sample = { - 'route': 'sample-route', - 'branches': [ + route: 'sample-route', + branches: [ { - 'condition': { - 'device': 'desktop', - 'language': 'en', + condition: { + device: 'desktop', + language: 'en', // "country": ["GB", "US"], - 'network': 'cable' + network: 'cable' }, - 'recipe': 'thumbnail' + recipe: 'thumbnail' }, { - 'recipe': 'default-recipe' + recipe: 'default-recipe' } ] } @@ -63,11 +66,13 @@ describe('Routes', function () { sample.route + '.json' ) ) - } catch (err) {} + } catch (err) { + // no-op + } }) it('should not allow route create request without a valid token', done => { - help.getBearerToken(function (err, token) { + help.getBearerToken(function(err, token) { request(cdnUrl) .post('/api/routes/new') .set('Authorization', 'Bearer ' + token.toString() + '1') @@ -77,8 +82,7 @@ describe('Routes', function () { }) it('should return error if no data was sent', done => { - help.getBearerToken(function (err, token) { - + help.getBearerToken(function(err, token) { request(cdnUrl) .post('/api/routes') .send({}) @@ -88,99 +92,103 @@ describe('Routes', function () { }) it('should return error if route name is missing', done => { - help.getBearerToken(function (err, token) { - var routeName = sample.route + help.getBearerToken(function(err, token) { + const routeName = sample.route delete sample.route request(cdnUrl) - .post('/api/routes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .expect(400) - .end(function (err, res) { - res.body.success.should.eql(false) - res.body.errors.should.be.Array - res.body.errors.should.containEql('Route name is missing') + .post('/api/routes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .expect(400) + .end(function(err, res) { + res.body.success.should.eql(false) + res.body.errors.should.be.Array + res.body.errors.should.containEql('Route name is missing') - // Restore route name - sample.route = routeName + // Restore route name + sample.route = routeName - done() - }) + done() + }) }) }) it('should return error if route name is too short', done => { - help.getBearerToken(function (err, token) { - var routeName = sample.route + help.getBearerToken(function(err, token) { + const routeName = sample.route sample.route = 'xxxx' request(cdnUrl) - .post('/api/routes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .expect(400) - .end(function (err, res) { - res.body.success.should.eql(false) - res.body.errors.should.be.Array - res.body.errors.should.containEql('Route name must be 5 characters or longer and contain only uppercase and lowercase letters, dashes and underscores') - - // Restore route name - sample.route = routeName - - done() - }) + .post('/api/routes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .expect(400) + .end(function(err, res) { + res.body.success.should.eql(false) + res.body.errors.should.be.Array + res.body.errors.should.containEql( + 'Route name must be 5 characters or longer and contain only uppercase and lowercase letters, dashes and underscores' + ) + + // Restore route name + sample.route = routeName + + done() + }) }) }) it('should save route to filesystem', done => { help.getBearerToken((err, token) => { - request(cdnUrl) - .post('/api/routes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - setTimeout(function () { - var expectedPath = path.join(path.resolve(config.get('paths.routes')), sample.route + '.json') - fs.stat(expectedPath, (err, stats) => { - should.not.exist(err) - res.statusCode.should.eql(200) - res.body.success.should.eql(true) - - done() - }) - }, 1000) - }) + .post('/api/routes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + setTimeout(function() { + const expectedPath = path.join( + path.resolve(config.get('paths.routes')), + sample.route + '.json' + ) + + fs.stat(expectedPath, (err, stats) => { + should.not.exist(err) + res.statusCode.should.eql(200) + res.body.success.should.eql(true) + + done() + }) + }, 1000) + }) }) }) it('should return error when trying to create route with existing name', done => { help.getBearerToken((err, token) => { - request(cdnUrl) - .post('/api/routes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - setTimeout(function () { - request(cdnUrl) - .post('/api/routes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - res.body.success.should.eql(false) - done() - }) - }, 1000) - }) + .post('/api/routes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + setTimeout(function() { + request(cdnUrl) + .post('/api/routes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + res.body.success.should.eql(false) + done() + }) + }, 1000) + }) }) }) }) - describe('Apply', function () { + describe('Apply', function() { const jpgRecipe = { recipe: 'jpg-recipe', settings: { @@ -200,9 +208,9 @@ describe('Routes', function () { ) } - let testRoute = { - 'route': 'test-route', - 'branches': [] + const testRoute = { + route: 'test-route', + branches: [] } const testRoutePath = path.join( @@ -211,208 +219,220 @@ describe('Routes', function () { ) before(() => { - fs.writeFileSync(getRecipePath('jpg-recipe'), JSON.stringify(jpgRecipe, null, 2)) - fs.writeFileSync(getRecipePath('png-recipe'), JSON.stringify(pngRecipe, null, 2)) + fs.writeFileSync( + getRecipePath('jpg-recipe'), + JSON.stringify(jpgRecipe, null, 2) + ) + fs.writeFileSync( + getRecipePath('png-recipe'), + JSON.stringify(pngRecipe, null, 2) + ) }) after(() => { try { fs.unlinkSync(getRecipePath('jpg-recipe')) fs.unlinkSync(getRecipePath('png-recipe')) - } catch (err) {} + } catch (err) { + // no-op + } }) afterEach(done => { try { fs.unlinkSync(testRoutePath) - } catch (err) {} + } catch (err) { + // no-op + } setTimeout(done, 500) - }) + }) it('should choose a route branch if the "device" condition matches', done => { - let userAgent = 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_1 like Mac OS X) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0 Mobile/14B72 Safari/602.1' + const userAgent = + 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_1 like Mac OS X) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0 Mobile/14B72 Safari/602.1' testRoute.branches = [ { - 'condition': { - 'device': 'mobile', + condition: { + device: 'mobile' }, - 'recipe': 'png-recipe' + recipe: 'png-recipe' }, { - 'recipe': 'jpg-recipe' + recipe: 'jpg-recipe' } ] - help.getBearerToken(function (err, token) { + help.getBearerToken(function(err, token) { request(cdnUrl) - .post('/api/routes') - .send(testRoute) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - setTimeout(() => { - request(cdnUrl) - .get('/' + testRoute.route + '/test.jpg') - .set('user-agent', userAgent) - .end(function (err, res) { - res.headers['content-type'].should.eql('image/png') - - done() - }) - }, 500) - }) + .post('/api/routes') + .send(testRoute) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + setTimeout(() => { + request(cdnUrl) + .get('/' + testRoute.route + '/test.jpg') + .set('user-agent', userAgent) + .end(function(err, res) { + res.headers['content-type'].should.eql('image/png') + + done() + }) + }, 500) + }) }) }) it('should skip a route branch if the "device" condition does not match the device type', done => { - let userAgent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/603.3.8 (KHTML, like Gecko) Version/10.1.2 Safari/603.3.8' + const userAgent = + 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/603.3.8 (KHTML, like Gecko) Version/10.1.2 Safari/603.3.8' testRoute.branches = [ { - 'condition': { - 'device': 'mobile', + condition: { + device: 'mobile' }, - 'recipe': 'png-recipe' + recipe: 'png-recipe' }, { - 'recipe': 'jpg-recipe' + recipe: 'jpg-recipe' } ] - help.getBearerToken(function (err, token) { + help.getBearerToken(function(err, token) { request(cdnUrl) - .post('/api/routes') - .send(testRoute) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - setTimeout(() => { - request(cdnUrl) - .get('/' + testRoute.route + '/test.jpg') - .set('user-agent', userAgent) - .end(function (err, res) { - res.headers['content-type'].should.eql('image/jpeg') - - done() - }) - }, 500) - }) + .post('/api/routes') + .send(testRoute) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + setTimeout(() => { + request(cdnUrl) + .get('/' + testRoute.route + '/test.jpg') + .set('user-agent', userAgent) + .end(function(err, res) { + res.headers['content-type'].should.eql('image/jpeg') + + done() + }) + }, 500) + }) }) }) it('should choose a route branch if the "language" condition matches', done => { - let userLanguage = 'en-GB,en;q=0.8' + const userLanguage = 'en-GB,en;q=0.8' testRoute.branches = [ { - 'condition': { - 'language': ['en', 'pt'], - 'languageMinQuality': 0.5 + condition: { + language: ['en', 'pt'], + languageMinQuality: 0.5 }, - 'recipe': 'png-recipe' + recipe: 'png-recipe' }, { - 'recipe': 'jpg-recipe' + recipe: 'jpg-recipe' } ] - help.getBearerToken(function (err, token) { + help.getBearerToken(function(err, token) { request(cdnUrl) - .post('/api/routes') - .send(testRoute) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - setTimeout(() => { - request(cdnUrl) - .get('/' + testRoute.route + '/test.jpg') - .set('accept-language', userLanguage) - .end(function (err, res) { - res.headers['content-type'].should.eql('image/png') - - done() - }) - }, 500) - }) + .post('/api/routes') + .send(testRoute) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + setTimeout(() => { + request(cdnUrl) + .get('/' + testRoute.route + '/test.jpg') + .set('accept-language', userLanguage) + .end(function(err, res) { + res.headers['content-type'].should.eql('image/png') + + done() + }) + }, 500) + }) }) }) it('should skip a route branch if the "language" condition does not match the client\'s language', done => { - let userLanguage = 'en-GB,en;q=0.8' + const userLanguage = 'en-GB,en;q=0.8' testRoute.branches = [ { - 'condition': { - 'language': ['pt'], - 'languageMinQuality': 0.5 + condition: { + language: ['pt'], + languageMinQuality: 0.5 }, - 'recipe': 'png-recipe' + recipe: 'png-recipe' }, { - 'recipe': 'jpg-recipe' + recipe: 'jpg-recipe' } ] - help.getBearerToken(function (err, token) { + help.getBearerToken(function(err, token) { request(cdnUrl) - .post('/api/routes') - .send(testRoute) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - setTimeout(() => { - request(cdnUrl) - .get('/' + testRoute.route + '/test.jpg') - .set('accept-language', userLanguage) - .end(function (err, res) { - res.headers['content-type'].should.eql('image/jpeg') - - done() - }) - }, 500) - }) + .post('/api/routes') + .send(testRoute) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + setTimeout(() => { + request(cdnUrl) + .get('/' + testRoute.route + '/test.jpg') + .set('accept-language', userLanguage) + .end(function(err, res) { + res.headers['content-type'].should.eql('image/jpeg') + + done() + }) + }, 500) + }) }) }) it('should skip a route branch if the "language" condition matches the client\'s language but with a non-sufficient quality parameter', done => { - let userLanguage = 'pt,en;q=0.3' + const userLanguage = 'pt,en;q=0.3' testRoute.branches = [ { - 'condition': { - 'language': ['en'], - 'languageMinQuality': 0.5 + condition: { + language: ['en'], + languageMinQuality: 0.5 }, - 'recipe': 'png-recipe' + recipe: 'png-recipe' }, { - 'recipe': 'jpg-recipe' + recipe: 'jpg-recipe' } ] - help.getBearerToken(function (err, token) { + help.getBearerToken(function(err, token) { request(cdnUrl) - .post('/api/routes') - .send(testRoute) - .set('Authorization', 'Bearer ' + token) - .end(function (err, res) { - setTimeout(() => { - request(cdnUrl) - .get('/' + testRoute.route + '/test.jpg') - .set('accept-language', userLanguage) - .end(function (err, res) { - res.headers['content-type'].should.eql('image/jpeg') - - done() - }) - }, 500) - }) + .post('/api/routes') + .send(testRoute) + .set('Authorization', 'Bearer ' + token) + .end(function(err, res) { + setTimeout(() => { + request(cdnUrl) + .get('/' + testRoute.route + '/test.jpg') + .set('accept-language', userLanguage) + .end(function(err, res) { + res.headers['content-type'].should.eql('image/jpeg') + + done() + }) + }, 500) + }) }) }) }) }) describe('Routes (with multi-domain)', () => { - let configBackup = config.get() - let sample = { + const configBackup = config.get() + const sample = { route: 'test-domain-route', branches: [ { @@ -450,38 +470,38 @@ describe('Routes (with multi-domain)', () => { it('should create a route for the given domain only', done => { help.getBearerToken('localhost', (err, token) => { request(cdnUrl) - .post('/api/routes') - .send(sample) - .set('Authorization', 'Bearer ' + token) - .set('host', 'localhost:80') - .expect(201) - .end((err, res) => { - setTimeout(() => { - request(cdnUrl) - .get('/test-domain-route/test.jpg') - .set('host', 'localhost:80') - .expect(200) - .end((err, res) => { - res.headers['content-type'].should.eql('image/png') - + .post('/api/routes') + .send(sample) + .set('Authorization', 'Bearer ' + token) + .set('host', 'localhost:80') + .expect(201) + .end((err, res) => { + setTimeout(() => { request(cdnUrl) - .get('/test-domain-recipe/test.jpg') - .set('host', 'testdomain.com:80') - .expect(404) - .end((err, res) => { - let routePath = path.resolve( - path.join( - domainManager.getDomain('localhost').path, - config.get('paths.routes', 'localhost'), - 'test-domain-route.json' - ) - ) - - fs.remove(routePath).then(done) - }) - }) - }, 500) - }) + .get('/test-domain-route/test.jpg') + .set('host', 'localhost:80') + .expect(200) + .end((err, res) => { + res.headers['content-type'].should.eql('image/png') + + request(cdnUrl) + .get('/test-domain-recipe/test.jpg') + .set('host', 'testdomain.com:80') + .expect(404) + .end((err, res) => { + const routePath = path.resolve( + path.join( + domainManager.getDomain('localhost').path, + config.get('paths.routes', 'localhost'), + 'test-domain-route.json' + ) + ) + + fs.remove(routePath).then(done) + }) + }) + }, 500) + }) }) }) }) diff --git a/test/acceptance/ssl.js b/test/acceptance/ssl.js index 0abbb022..c783d971 100644 --- a/test/acceptance/ssl.js +++ b/test/acceptance/ssl.js @@ -1,29 +1,31 @@ -var should = require('should') -var request = require('supertest') -var app = require(__dirname + '/../../dadi/lib/') -var config = require(__dirname + '/../../config') +const should = require('should') +const request = require('supertest') +let app = require(__dirname + '/../../dadi/lib/') +const config = require(__dirname + '/../../config') -var clientHost = 'http://' + config.get('server.host') + ':' + config.get('server.port') -var secureClientHost = 'https://' + config.get('server.host') + ':' + config.get('server.port') +const clientHost = + 'http://' + config.get('server.host') + ':' + config.get('server.port') +const secureClientHost = + 'https://' + config.get('server.host') + ':' + config.get('server.port') -var client = request(clientHost) -var secureClient = request(secureClientHost) +const client = request(clientHost) +const secureClient = request(secureClientHost) describe('http2', () => { - before((done) => { + before(done => { // avoid [Error: self signed certificate] code: 'DEPTH_ZERO_SELF_SIGNED_CERT' process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0' done() }) - beforeEach((done) => { + beforeEach(done => { delete require.cache[require.resolve(__dirname + '/../../dadi/lib/')] app = require(__dirname + '/../../dadi/lib/') done() }) - afterEach((done) => { + afterEach(done => { config.set('server.protocol', 'http') config.set('server.redirectPort', '') config.set('server.sslPassphrase', '') @@ -38,45 +40,43 @@ describe('http2', () => { } }) - it('should respond to a http1 request even if http2 is enabled', (done) => { + it('should respond to a http1 request even if http2 is enabled', done => { config.set('server.protocol', 'https') config.set('server.sslPrivateKeyPath', 'test/ssl/unprotected/key.pem') config.set('server.sslCertificatePath', 'test/ssl/unprotected/cert.pem') - app.start(function (err) { + app.start(function(err) { if (err) return done(err) - secureClient - .get('/hello') - .end((err, res) => { - if (err) throw err + secureClient.get('/hello').end((err, res) => { + if (err) throw err - // We're assuming here that the 'supertest' module doesn't support http2 - // If they ever add it this test might need to be changed! + // We're assuming here that the 'supertest' module doesn't support http2 + // If they ever add it this test might need to be changed! - res.res.httpVersion.should.eql('1.1') + res.res.httpVersion.should.eql('1.1') - done() - }) + done() + }) }) }) }) describe('SSL', () => { - before((done) => { + before(done => { // avoid [Error: self signed certificate] code: 'DEPTH_ZERO_SELF_SIGNED_CERT' process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0' done() }) - beforeEach((done) => { + beforeEach(done => { delete require.cache[require.resolve(__dirname + '/../../dadi/lib/')] app = require(__dirname + '/../../dadi/lib/') done() }) - afterEach((done) => { + afterEach(done => { config.set('server.protocol', 'http') config.set('server.redirectPort', '') config.set('server.sslPassphrase', '') @@ -91,30 +91,31 @@ describe('SSL', () => { } }) - it('should respond to a http request when ssl is disabled', (done) => { - app.start(function (err) { + it('should respond to a http request when ssl is disabled', done => { + app.start(function(err) { if (err) return done(err) - client - .get('/hello') - .end((err, res) => { - if (err) throw err - res.statusCode.should.eql(200) - done() - }) + client.get('/hello').end((err, res) => { + if (err) throw err + res.statusCode.should.eql(200) + done() + }) }) }) - it('should redirect http request to https when redirectPort is set', function (done) { + it('should redirect http request to https when redirectPort is set', function(done) { config.set('server.protocol', 'https') config.set('server.redirectPort', '9999') config.set('server.sslPrivateKeyPath', 'test/ssl/unprotected/key.pem') config.set('server.sslCertificatePath', 'test/ssl/unprotected/cert.pem') - app.start(function (err) { + app.start(function(err) { if (err) return done(err) - var httpClient = request('http://' + config.get('server.host') + ':9999') + const httpClient = request( + 'http://' + config.get('server.host') + ':9999' + ) + httpClient .get('/') .expect(301) @@ -127,26 +128,24 @@ describe('SSL', () => { }) }) - it('should respond to a https request when using protected ssl key with a passphrase', (done) => { + it('should respond to a https request when using protected ssl key with a passphrase', done => { config.set('server.protocol', 'https') config.set('server.sslPrivateKeyPath', 'test/ssl/protected/key.pem') config.set('server.sslCertificatePath', 'test/ssl/protected/cert.pem') config.set('server.sslPassphrase', 'changeme') - app.start(function (err) { + app.start(function(err) { if (err) return done(err) - secureClient - .get('/hello') - .end((err, res) => { - if (err) throw err - res.statusCode.should.eql(200) - done() - }) + secureClient.get('/hello').end((err, res) => { + if (err) throw err + res.statusCode.should.eql(200) + done() + }) }) }) - it('should throw a bad password read exception when using protected ssl key with the wrong passphrase', (done) => { + it('should throw a bad password read exception when using protected ssl key with the wrong passphrase', done => { config.set('server.protocol', 'https') config.set('server.sslPrivateKeyPath', 'test/ssl/protected/key.pem') config.set('server.sslCertificatePath', 'test/ssl/protected/cert.pem') @@ -161,7 +160,7 @@ describe('SSL', () => { done() }) - it('should throw a bad password read exception when using protected ssl key without a passphrase', (done) => { + it('should throw a bad password read exception when using protected ssl key without a passphrase', done => { config.set('server.protocol', 'https') config.set('server.sslPrivateKeyPath', 'test/ssl/protected/key.pem') config.set('server.sslCertificatePath', 'test/ssl/protected/cert.pem') diff --git a/test/acceptance/status.js b/test/acceptance/status.js index 1f176e3a..f9f96b87 100644 --- a/test/acceptance/status.js +++ b/test/acceptance/status.js @@ -2,27 +2,30 @@ const nock = require('nock') const should = require('should') const request = require('supertest') -let app = require(__dirname + '/../../dadi/lib/') -let config = require(__dirname + '/../../config') +const app = require(__dirname + '/../../dadi/lib/') +const config = require(__dirname + '/../../config') const help = require(__dirname + '/help') -describe('Status', function () { - var statusRoute = '/api/status' // TODO move to config - var bearerToken - let statusConfigBackup = config.get('status') - let urlConfigBackup = config.get('publicUrl') +describe('Status', function() { + const statusRoute = '/api/status' // TODO move to config + let bearerToken + const statusConfigBackup = config.get('status') + const urlConfigBackup = config.get('publicUrl') this.timeout(10000) - before(function (done) { + before(function(done) { done() }) - after(function (done) { + after(function(done) { // make sure config is reset properly so other tests run ok // it's essential that status.standalone is disabled config.set('status.standalone', false) - config.set('status.requireAuthentication', statusConfigBackup.requireAuthentication) + config.set( + 'status.requireAuthentication', + statusConfigBackup.requireAuthentication + ) config.set('publicUrl.host', urlConfigBackup.host) config.set('publicUrl.port', urlConfigBackup.port) @@ -30,22 +33,22 @@ describe('Status', function () { done() }) - describe('Base URL', function () { - beforeEach(function (done) { + describe('Base URL', function() { + beforeEach(function(done) { config.set('publicUrl.host', 'www.example.com') config.set('publicUrl.port', 80) - let statusScope = nock('http://www.example.com') + const statusScope = nock('http://www.example.com') .get('/test.jpg?format=png&quality=50&width=800&height=600') .reply(200) - app.start(function (err) { + app.start(function(err) { if (err) return done(err) // give http.Server a moment to finish starting up // then grab a bearer token from it - setTimeout(function () { - help.getBearerToken(function (err, token) { + setTimeout(function() { + help.getBearerToken(function(err, token) { if (err) return done(err) bearerToken = token done() @@ -54,13 +57,16 @@ describe('Status', function () { }) }) - afterEach(function (done) { + afterEach(function(done) { help.clearCache() app.stop(done) }) - - it('should use publicUrl as base for status checks, if configured', function (done) { - var client = request('http://' + config.get('server.host') + ':' + config.get('server.port')) + + it('should use publicUrl as base for status checks, if configured', function(done) { + const client = request( + 'http://' + config.get('server.host') + ':' + config.get('server.port') + ) + client .post(statusRoute) .set('Authorization', 'Bearer ' + bearerToken) @@ -68,17 +74,18 @@ describe('Status', function () { .expect(200) .end((err, res) => { console.log('nock :', nock) - let statusResponse = res.body + const statusResponse = res.body + statusResponse.status.status.should.eql(200) done() }) }) }) - describe('Integrated', function () { - describe('Authenticated', function () { - beforeEach(function (done) { - app.start(function (err) { + describe('Integrated', function() { + describe('Authenticated', function() { + beforeEach(function(done) { + app.start(function(err) { if (err) return done(err) config.set('status.standalone', false) @@ -86,8 +93,8 @@ describe('Status', function () { // give http.Server a moment to finish starting up // then grab a bearer token from it - setTimeout(function () { - help.getBearerToken(function (err, token) { + setTimeout(function() { + help.getBearerToken(function(err, token) { if (err) return done(err) bearerToken = token done() @@ -96,24 +103,39 @@ describe('Status', function () { }) }) - afterEach(function (done) { + afterEach(function(done) { config.set('status.standalone', statusConfigBackup.standalone) - config.set('status.requireAuthentication', statusConfigBackup.requireAuthentication) + config.set( + 'status.requireAuthentication', + statusConfigBackup.requireAuthentication + ) help.clearCache() app.stop(done) }) - it('should return error if no token is given', function (done) { - var client = request('http://' + config.get('server.host') + ':' + config.get('server.port')) + it('should return error if no token is given', function(done) { + const client = request( + 'http://' + + config.get('server.host') + + ':' + + config.get('server.port') + ) + client .post(statusRoute) .expect('content-type', 'application/json') .expect(401, done) }) - it('should return ok if token is given', function (done) { - var client = request('http://' + config.get('server.host') + ':' + config.get('server.port')) + it('should return ok if token is given', function(done) { + const client = request( + 'http://' + + config.get('server.host') + + ':' + + config.get('server.port') + ) + client .post(statusRoute) .set('Authorization', 'Bearer ' + bearerToken) @@ -122,54 +144,63 @@ describe('Status', function () { }) }) - describe('Unauthenticated', function () { - beforeEach(function (done) { - app.start(function (err) { + describe('Unauthenticated', function() { + beforeEach(function(done) { + app.start(function(err) { if (err) return done(err) config.set('status.standalone', false) config.set('status.requireAuthentication', false) // give http.Server a moment to finish starting up - setTimeout(function () { + setTimeout(function() { done() }, 500) }) }) - afterEach(function (done) { + afterEach(function(done) { config.set('status.standalone', statusConfigBackup.standalone) - config.set('status.requireAuthentication', statusConfigBackup.requireAuthentication) + config.set( + 'status.requireAuthentication', + statusConfigBackup.requireAuthentication + ) help.clearCache() app.stop(done) }) - it('should return ok even if no token is given', function (done) { - var client = request('http://' + config.get('server.host') + ':' + config.get('server.port')) + it('should return ok even if no token is given', function(done) { + const client = request( + 'http://' + + config.get('server.host') + + ':' + + config.get('server.port') + ) + client - .post(statusRoute) - .expect(200) - .end((err, res) => { - done() - }) + .post(statusRoute) + .expect(200) + .end((err, res) => { + done() + }) }) }) }) - describe('Standalone', function () { - describe('Authenticated', function () { - beforeEach(function (done) { + describe('Standalone', function() { + describe('Authenticated', function() { + beforeEach(function(done) { config.set('status.standalone', true) config.set('status.requireAuthentication', true) - app.start(function (err) { + app.start(function(err) { if (err) return done(err) // give http.Server a moment to finish starting up // then grab a bearer token from it - setTimeout(function () { - help.getBearerToken(function (err, token) { + setTimeout(function() { + help.getBearerToken(function(err, token) { if (err) return done(err) bearerToken = token done() @@ -178,27 +209,42 @@ describe('Status', function () { }) }) - afterEach(function (done) { + afterEach(function(done) { config.set('status.standalone', statusConfigBackup.standalone) - config.set('status.requireAuthentication', statusConfigBackup.requireAuthentication) + config.set( + 'status.requireAuthentication', + statusConfigBackup.requireAuthentication + ) help.clearCache() app.stop(done) }) - it('should return error if no token is given', function (done) { - var client = request('http://' + config.get('server.host') + ':' + config.get('status.port')) + it('should return error if no token is given', function(done) { + const client = request( + 'http://' + + config.get('server.host') + + ':' + + config.get('status.port') + ) + client .post(statusRoute) .expect('content-type', 'application/json') - .end(function (err, res) { + .end(function(err, res) { res.statusCode.should.eql(401) done() }) }) - it('should return ok if token is given', function (done) { - var client = request('http://' + config.get('server.host') + ':' + config.get('status.port')) + it('should return ok if token is given', function(done) { + const client = request( + 'http://' + + config.get('server.host') + + ':' + + config.get('status.port') + ) + client .post(statusRoute) .set('Authorization', 'Bearer ' + bearerToken) @@ -207,31 +253,40 @@ describe('Status', function () { }) }) - describe('Unauthenticated', function () { - beforeEach(function (done) { + describe('Unauthenticated', function() { + beforeEach(function(done) { config.set('status.standalone', true) config.set('status.requireAuthentication', false) - app.start(function (err) { + app.start(function(err) { if (err) return done(err) // give http.Server a moment to finish starting up - setTimeout(function () { + setTimeout(function() { done() }, 500) }) }) - afterEach(function (done) { + afterEach(function(done) { config.set('status.standalone', statusConfigBackup.standalone) - config.set('status.requireAuthentication', statusConfigBackup.requireAuthentication) + config.set( + 'status.requireAuthentication', + statusConfigBackup.requireAuthentication + ) help.clearCache() app.stop(done) }) - it('should return ok even if no token is given', function (done) { - var client = request('http://' + config.get('server.host') + ':' + config.get('status.port')) + it('should return ok even if no token is given', function(done) { + const client = request( + 'http://' + + config.get('server.host') + + ':' + + config.get('status.port') + ) + client .post(statusRoute) .expect(200) diff --git a/test/acceptance/visual.js b/test/acceptance/visual.js index ca5f45e3..faf0a8e2 100644 --- a/test/acceptance/visual.js +++ b/test/acceptance/visual.js @@ -3,78 +3,92 @@ const path = require('path') const querystring = require('querystring') const request = require('supertest') -const baselineFilePath = path.resolve(path.join(__dirname, '../images/visual/baseline')) +const baselineFilePath = path.resolve( + path.join(__dirname, '../images/visual/baseline') +) const config = require(__dirname + '/../../config') -const testManifest = require(path.resolve(path.join(__dirname, 'visual_manifest.json'))) +const testManifest = require(path.resolve( + path.join(__dirname, 'visual_manifest.json') +)) -const cdnUrl = 'http://' + config.get('server.host') + ':' + config.get('server.port') +const cdnUrl = + 'http://' + config.get('server.host') + ':' + config.get('server.port') const cdnClient = request(cdnUrl) let app -require('it-each')({ testPerIteration: true }) +require('it-each')({testPerIteration: true}) -describe('Visual Regression', function (done) { +describe('Visual Regression', function(done) { this.timeout(15000) - before(function (done) { + before(function(done) { delete require.cache[require.resolve(__dirname + '/../../dadi/lib/')] app = require(__dirname + '/../../dadi/lib/') - app.start(function (err) { + app.start(function(err) { if (err) { return done() } // give it a moment for http.Server to finish starting - setTimeout(function () { + setTimeout(function() { done() }, 500) }) }) - after(function (done) { + after(function(done) { app.stop(done) }) - it.each(testManifest.tests, 'Test', ['baselineFilename'], function (element, next) { + it.each(testManifest.tests, 'Test', ['baselineFilename'], function( + element, + next + ) { requestTestImage(element) - .then(() => { - next() - }) - .catch(err => { - console.log(err) - - next(err) - }) + .then(() => { + next() + }) + .catch(err => { + console.log(err) + + next(err) + }) }) }) -function requestTestImage (test) { - let testFilePath = path.join(test.recipeRoute || '', testManifest.path, test.image || '') - let outputPath = path.join(__dirname, '../', test.baselineFilename.replace('baseline', 'failed')) - let requestPath = test.url || ('/' + testFilePath + '?' + querystring.encode(test.params)) - let baselineImagePath = path.join(__dirname, '../', test.baselineFilename) - - return Jimp - .read(baselineImagePath) - .then(baselineImage => { - return Jimp - .read(cdnUrl + requestPath) - .then(testImage => { - let diff = Jimp.diff(baselineImage, testImage, 0.1) // threshold ranges 0-1 (default: 0.1) - let distance = Jimp.distance(baselineImage, testImage) // perceived distance - - if (distance < 0.15 || diff.percent < 0.15) { - return - } - - let error = new Error( - `Image mismatch percentage: ${diff.percent * 100}. Saving diff image to ${outputPath}.` - ) - - diff.image.write(outputPath) - - return Promise.reject(error) - }) +function requestTestImage(test) { + const testFilePath = path.join( + test.recipeRoute || '', + testManifest.path, + test.image || '' + ) + const outputPath = path.join( + __dirname, + '../', + test.baselineFilename.replace('baseline', 'failed') + ) + const requestPath = + test.url || '/' + testFilePath + '?' + querystring.encode(test.params) + const baselineImagePath = path.join(__dirname, '../', test.baselineFilename) + + return Jimp.read(baselineImagePath).then(baselineImage => { + return Jimp.read(cdnUrl + requestPath).then(testImage => { + const diff = Jimp.diff(baselineImage, testImage, 0.1) // threshold ranges 0-1 (default: 0.1) + const distance = Jimp.distance(baselineImage, testImage) // perceived distance + + if (distance < 0.15 || diff.percent < 0.15) { + return + } + + const error = new Error( + `Image mismatch percentage: ${diff.percent * + 100}. Saving diff image to ${outputPath}.` + ) + + diff.image.write(outputPath) + + return Promise.reject(error) }) + }) } diff --git a/test/acceptance/wcache.js b/test/acceptance/wcache.js index 81963cd5..ca245a1d 100644 --- a/test/acceptance/wcache.js +++ b/test/acceptance/wcache.js @@ -9,18 +9,24 @@ const cache = require(__dirname + '/../../dadi/lib/cache') const config = require(__dirname + '/../../config') let bearerToken -let client = request('http://' + config.get('server.host') + ':' + config.get('server.port')) -let configBackup = config.get() +const client = request( + 'http://' + config.get('server.host') + ':' + config.get('server.port') +) +const configBackup = config.get() const USER_AGENTS = { - chrome64: 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.167 Safari/537.36', - chrome41: 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36', - firefox40_1: 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1', - firefox54: 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20100101 Firefox/54.0', + chrome64: + 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.167 Safari/537.36', + chrome41: + 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36', + firefox40_1: + 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1', + firefox54: + 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20100101 Firefox/54.0', ie9: 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)' } -describe('Cache', function () { +describe('Cache', function() { this.timeout(10000) before(() => { @@ -29,12 +35,15 @@ describe('Cache', function () { }) after(() => { - config.set('caching.directory.enabled', configBackup.caching.directory.enabled) + config.set( + 'caching.directory.enabled', + configBackup.caching.directory.enabled + ) config.set('caching.redis.enabled', configBackup.caching.redis.enabled) }) beforeEach(done => { - app.start(function () { + app.start(function() { help.getBearerToken((err, token) => { if (err) return done(err) @@ -52,54 +61,54 @@ describe('Cache', function () { describe('Images', () => { it('should get image from cache when available', done => { client - .get('/test.jpg') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.jpg') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('image/jpeg') - res.headers['x-cache'].should.eql('MISS') + res.headers['content-type'].should.eql('image/jpeg') + res.headers['x-cache'].should.eql('MISS') - setTimeout(() => { - client - .get('/test.jpg') - .expect(200) - .end((err, res) => { - if (err) return done(err) + setTimeout(() => { + client + .get('/test.jpg') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('image/jpeg') - res.headers['x-cache'].should.eql('HIT') + res.headers['content-type'].should.eql('image/jpeg') + res.headers['x-cache'].should.eql('HIT') - done() - }) - }, 500) - }) + done() + }) + }, 500) + }) }) it('should get image JSON data from cache when available', done => { client - .get('/test.jpg?format=json') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.jpg?format=json') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('application/json') - res.headers['x-cache'].should.eql('MISS') + res.headers['content-type'].should.eql('application/json') + res.headers['x-cache'].should.eql('MISS') - setTimeout(() => { - client - .get('/test.jpg?format=json') - .expect(200) - .end((err, res) => { - if (err) return done(err) + setTimeout(() => { + client + .get('/test.jpg?format=json') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('application/json') - res.headers['x-cache'].should.eql('HIT') + res.headers['content-type'].should.eql('application/json') + res.headers['x-cache'].should.eql('HIT') - done() - }) - }, 500) - }) + done() + }) + }, 500) + }) }) it('should return a vary accept encoding header', done => { @@ -120,31 +129,34 @@ describe('Cache', function () { config.set('notFound.images.enabled', false) client - .get('/not-a-valid-image.jpg') - .expect(404) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('application/json') - res.headers['x-cache'].should.eql('MISS') - - setTimeout(() => { - client - .get('/not-a-valid-image.jpg') - .expect(404) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('application/json') - res.headers['x-cache'].should.eql('MISS') + .get('/not-a-valid-image.jpg') + .expect(404) + .end((err, res) => { + if (err) return done(err) - config.set('caching.cache404', configBackup.caching.cache404) - config.set('notFound.images.enabled', configBackup.notFound.images.enabled) + res.headers['content-type'].should.eql('application/json') + res.headers['x-cache'].should.eql('MISS') - done() - }) - }, 500) - }) + setTimeout(() => { + client + .get('/not-a-valid-image.jpg') + .expect(404) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql('application/json') + res.headers['x-cache'].should.eql('MISS') + + config.set('caching.cache404', configBackup.caching.cache404) + config.set( + 'notFound.images.enabled', + configBackup.notFound.images.enabled + ) + + done() + }) + }, 500) + }) }) it('should not cache a 404 if caching.cache404 is false and the image fallback is enabled', done => { @@ -153,32 +165,38 @@ describe('Cache', function () { config.set('notFound.images.path', 'test/images/missing.png') client - .get('/not-a-valid-image.jpg') - .expect(404) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('image/png') - res.headers['x-cache'].should.eql('MISS') - - setTimeout(() => { - client - .get('/not-a-valid-image.jpg') - .expect(404) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('image/png') - res.headers['x-cache'].should.eql('MISS') + .get('/not-a-valid-image.jpg') + .expect(404) + .end((err, res) => { + if (err) return done(err) - config.set('caching.cache404', configBackup.caching.cache404) - config.set('notFound.images.enabled', configBackup.notFound.images.enabled) - config.set('notFound.images.path', configBackup.notFound.images.path) + res.headers['content-type'].should.eql('image/png') + res.headers['x-cache'].should.eql('MISS') - done() - }) - }, 500) - }) + setTimeout(() => { + client + .get('/not-a-valid-image.jpg') + .expect(404) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql('image/png') + res.headers['x-cache'].should.eql('MISS') + + config.set('caching.cache404', configBackup.caching.cache404) + config.set( + 'notFound.images.enabled', + configBackup.notFound.images.enabled + ) + config.set( + 'notFound.images.path', + configBackup.notFound.images.path + ) + + done() + }) + }, 500) + }) }) it('should cache a 404 if caching.cache404 is true and the image fallback is disabled', done => { @@ -186,31 +204,34 @@ describe('Cache', function () { config.set('notFound.images.enabled', false) client - .get('/not-a-valid-image.jpg') - .expect(404) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('application/json') - res.headers['x-cache'].should.eql('MISS') - - setTimeout(() => { - client - .get('/not-a-valid-image.jpg') - .expect(404) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('application/json') - res.headers['x-cache'].should.eql('HIT') + .get('/not-a-valid-image.jpg') + .expect(404) + .end((err, res) => { + if (err) return done(err) - config.set('caching.cache404', configBackup.caching.cache404) - config.set('notFound.images.enabled', configBackup.notFound.images.enabled) + res.headers['content-type'].should.eql('application/json') + res.headers['x-cache'].should.eql('MISS') - done() - }) - }, 500) - }) + setTimeout(() => { + client + .get('/not-a-valid-image.jpg') + .expect(404) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql('application/json') + res.headers['x-cache'].should.eql('HIT') + + config.set('caching.cache404', configBackup.caching.cache404) + config.set( + 'notFound.images.enabled', + configBackup.notFound.images.enabled + ) + + done() + }) + }, 500) + }) }) it('should cache a 404 if caching.cache404 is true and the image fallback is enabled', done => { @@ -219,36 +240,42 @@ describe('Cache', function () { config.set('notFound.images.path', 'test/images/missing.png') client - .get('/not-a-valid-image.jpg') - .expect(404) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('image/png') - res.headers['x-cache'].should.eql('MISS') - - // Setting a new fallback image to ensure that the content-type returned matches the - // content-type of the image that was cached, not the one that is currently set. - config.set('notFound.images.path', 'test/images/original.jpg') - - setTimeout(() => { - client - .get('/not-a-valid-image.jpg') - .expect(404) - .end((err, res) => { - if (err) return done(err) + .get('/not-a-valid-image.jpg') + .expect(404) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('image/png') - res.headers['x-cache'].should.eql('HIT') + res.headers['content-type'].should.eql('image/png') + res.headers['x-cache'].should.eql('MISS') - config.set('caching.cache404', configBackup.caching.cache404) - config.set('notFound.images.enabled', configBackup.notFound.images.enabled) - config.set('notFound.images.path', configBackup.notFound.images.path) + // Setting a new fallback image to ensure that the content-type returned matches the + // content-type of the image that was cached, not the one that is currently set. + config.set('notFound.images.path', 'test/images/original.jpg') - done() - }) - }, 500) - }) + setTimeout(() => { + client + .get('/not-a-valid-image.jpg') + .expect(404) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql('image/png') + res.headers['x-cache'].should.eql('HIT') + + config.set('caching.cache404', configBackup.caching.cache404) + config.set( + 'notFound.images.enabled', + configBackup.notFound.images.enabled + ) + config.set( + 'notFound.images.path', + configBackup.notFound.images.path + ) + + done() + }) + }, 500) + }) }) }) @@ -268,54 +295,54 @@ describe('Cache', function () { it('should cache as different items requests with identical paths but different domains', done => { client - .get('/test.jpg') - .set('Host', 'localhost:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('image/jpeg') - res.headers['x-cache'].should.eql('MISS') - - setTimeout(() => { - client - .get('/test.jpg') - .set('Host', 'localhost:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.jpg') + .set('Host', 'localhost:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('image/jpeg') - res.headers['x-cache'].should.eql('HIT') + res.headers['content-type'].should.eql('image/jpeg') + res.headers['x-cache'].should.eql('MISS') + setTimeout(() => { client - .get('/test.jpg') - .set('Host', 'testdomain.com:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.jpg') + .set('Host', 'localhost:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('image/jpeg') - res.headers['x-cache'].should.eql('MISS') + res.headers['content-type'].should.eql('image/jpeg') + res.headers['x-cache'].should.eql('HIT') - setTimeout(() => { client - .get('/test.jpg') - .set('Host', 'testdomain.com:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('image/jpeg') - res.headers['x-cache'].should.eql('HIT') - - done() - }) - }, 150) - }) - }) - }, 150) - }) + .get('/test.jpg') + .set('Host', 'testdomain.com:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql('image/jpeg') + res.headers['x-cache'].should.eql('MISS') + + setTimeout(() => { + client + .get('/test.jpg') + .set('Host', 'testdomain.com:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql('image/jpeg') + res.headers['x-cache'].should.eql('HIT') + + done() + }) + }, 150) + }) + }) + }, 150) + }) }) }) }) @@ -323,71 +350,73 @@ describe('Cache', function () { describe('JavaScript', () => { it('should get untranspiled JS from cache when available, not dependent on user agent', done => { client - .get('/test.js') - .set('user-agent', USER_AGENTS.chrome41) - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.js') + .set('user-agent', USER_AGENTS.chrome41) + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('application/javascript') - res.headers['x-cache'].should.eql('MISS') + res.headers['content-type'].should.eql('application/javascript') + res.headers['x-cache'].should.eql('MISS') - setTimeout(() => { - client - .get('/test.js') - .set('user-agent', USER_AGENTS.ie9) - .expect(200) - .end((err, res) => { - if (err) return done(err) + setTimeout(() => { + client + .get('/test.js') + .set('user-agent', USER_AGENTS.ie9) + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('application/javascript') - res.headers['x-cache'].should.eql('HIT') + res.headers['content-type'].should.eql('application/javascript') + res.headers['x-cache'].should.eql('HIT') - done() - }) - }, 500) - }) + done() + }) + }, 500) + }) }) it('should get transpiled JS from cache when available, based on user agent', done => { client - .get('/test-es6.js?transform=1&compress=1') - .set('user-agent', USER_AGENTS.chrome64) - .expect(200) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('application/javascript') - res.headers['x-cache'].should.eql('MISS') - - setTimeout(() => { - client - .get('/test-es6.js?transform=1&compress=1') - .set('user-agent', USER_AGENTS.firefox54) - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test-es6.js?transform=1&compress=1') + .set('user-agent', USER_AGENTS.chrome64) + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('application/javascript') - res.headers['x-cache'].should.eql('HIT') + res.headers['content-type'].should.eql('application/javascript') + res.headers['x-cache'].should.eql('MISS') - setTimeout(() => { - client + setTimeout(() => { + client .get('/test-es6.js?transform=1&compress=1') - .set('user-agent', USER_AGENTS.ie9) + .set('user-agent', USER_AGENTS.firefox54) .expect(200) .end((err, res) => { if (err) return done(err) res.headers['content-type'].should.eql('application/javascript') - res.headers['x-cache'].should.eql('MISS') + res.headers['x-cache'].should.eql('HIT') - done() + setTimeout(() => { + client + .get('/test-es6.js?transform=1&compress=1') + .set('user-agent', USER_AGENTS.ie9) + .expect(200) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql( + 'application/javascript' + ) + res.headers['x-cache'].should.eql('MISS') + + done() + }) + }, 500) }) - }, 500) - }) - }, 500) - }) + }, 500) + }) }) describe('when multi-domain is enabled', () => { @@ -402,54 +431,60 @@ describe('Cache', function () { it('should cache as different items requests with identical paths but different domains', done => { client - .get('/test.js') - .set('Host', 'localhost:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('application/javascript') - res.headers['x-cache'].should.eql('MISS') - - setTimeout(() => { - client - .get('/test.js') - .set('Host', 'localhost:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.js') + .set('Host', 'localhost:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('application/javascript') - res.headers['x-cache'].should.eql('HIT') + res.headers['content-type'].should.eql('application/javascript') + res.headers['x-cache'].should.eql('MISS') + setTimeout(() => { client - .get('/test.js') - .set('Host', 'testdomain.com:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.js') + .set('Host', 'localhost:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('application/javascript') - res.headers['x-cache'].should.eql('MISS') + res.headers['content-type'].should.eql( + 'application/javascript' + ) + res.headers['x-cache'].should.eql('HIT') - setTimeout(() => { client - .get('/test.js') - .set('Host', 'testdomain.com:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('application/javascript') - res.headers['x-cache'].should.eql('HIT') - - done() - }) - }, 150) - }) - }) - }, 150) - }) + .get('/test.js') + .set('Host', 'testdomain.com:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql( + 'application/javascript' + ) + res.headers['x-cache'].should.eql('MISS') + + setTimeout(() => { + client + .get('/test.js') + .set('Host', 'testdomain.com:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql( + 'application/javascript' + ) + res.headers['x-cache'].should.eql('HIT') + + done() + }) + }, 150) + }) + }) + }, 150) + }) }) }) }) @@ -457,78 +492,78 @@ describe('Cache', function () { describe('CSS', () => { it('should get CSS from cache when available', done => { client - .get('/test.css') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.css') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('text/css') - res.headers['x-cache'].should.eql('MISS') + res.headers['content-type'].should.eql('text/css') + res.headers['x-cache'].should.eql('MISS') - setTimeout(() => { - client - .get('/test.css') - .expect(200) - .end((err, res) => { - if (err) return done(err) + setTimeout(() => { + client + .get('/test.css') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('text/css') - res.headers['x-cache'].should.eql('HIT') + res.headers['content-type'].should.eql('text/css') + res.headers['x-cache'].should.eql('HIT') - done() - }) - }, 500) - }) + done() + }) + }, 500) + }) }) it('should get compressed CSS from cache, independently from uncompressed version', done => { client - .get('/test.css') - .expect(200) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('text/css') - res.headers['x-cache'].should.eql('MISS') - - setTimeout(() => { - client - .get('/test.css') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.css') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('text/css') - res.headers['x-cache'].should.eql('HIT') + res.headers['content-type'].should.eql('text/css') + res.headers['x-cache'].should.eql('MISS') - setTimeout(() => { - client - .get('/test.css?compress=1') + setTimeout(() => { + client + .get('/test.css') .expect(200) .end((err, res) => { if (err) return done(err) res.headers['content-type'].should.eql('text/css') - res.headers['x-cache'].should.eql('MISS') + res.headers['x-cache'].should.eql('HIT') setTimeout(() => { client - .get('/test.css?compress=1') - .expect(200) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('text/css') - res.headers['x-cache'].should.eql('HIT') - - done() - }) + .get('/test.css?compress=1') + .expect(200) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql('text/css') + res.headers['x-cache'].should.eql('MISS') + + setTimeout(() => { + client + .get('/test.css?compress=1') + .expect(200) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql('text/css') + res.headers['x-cache'].should.eql('HIT') + + done() + }) + }, 500) + }) }, 500) }) - }, 500) - }) - }, 500) - }) + }, 500) + }) }) describe('when multi-domain is enabled', () => { @@ -543,54 +578,54 @@ describe('Cache', function () { it('should cache as different items requests with identical paths but different domains', done => { client - .get('/test.css') - .set('Host', 'localhost:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('text/css') - res.headers['x-cache'].should.eql('MISS') - - setTimeout(() => { - client - .get('/test.css') - .set('Host', 'localhost:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.css') + .set('Host', 'localhost:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('text/css') - res.headers['x-cache'].should.eql('HIT') + res.headers['content-type'].should.eql('text/css') + res.headers['x-cache'].should.eql('MISS') + setTimeout(() => { client - .get('/test.css') - .set('Host', 'testdomain.com:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.css') + .set('Host', 'localhost:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('text/css') - res.headers['x-cache'].should.eql('MISS') + res.headers['content-type'].should.eql('text/css') + res.headers['x-cache'].should.eql('HIT') - setTimeout(() => { client - .get('/test.css') - .set('Host', 'testdomain.com:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('text/css') - res.headers['x-cache'].should.eql('HIT') - - done() - }) - }, 150) - }) - }) - }, 150) - }) + .get('/test.css') + .set('Host', 'testdomain.com:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql('text/css') + res.headers['x-cache'].should.eql('MISS') + + setTimeout(() => { + client + .get('/test.css') + .set('Host', 'testdomain.com:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql('text/css') + res.headers['x-cache'].should.eql('HIT') + + done() + }) + }, 150) + }) + }) + }, 150) + }) }) }) }) @@ -598,54 +633,54 @@ describe('Cache', function () { describe('Other assets', () => { it('should get TTF from cache when available', done => { client - .get('/test.ttf') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.ttf') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('font/ttf') - res.headers['x-cache'].should.eql('MISS') + res.headers['content-type'].should.eql('font/ttf') + res.headers['x-cache'].should.eql('MISS') - setTimeout(() => { - client - .get('/test.ttf') - .expect(200) - .end((err, res) => { - if (err) return done(err) + setTimeout(() => { + client + .get('/test.ttf') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('font/ttf') - res.headers['x-cache'].should.eql('HIT') + res.headers['content-type'].should.eql('font/ttf') + res.headers['x-cache'].should.eql('HIT') - done() - }) - }, 500) - }) + done() + }) + }, 500) + }) }) it('should get PDF from cache when available', done => { client - .get('/test.pdf') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.pdf') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('application/pdf') - res.headers['x-cache'].should.eql('MISS') + res.headers['content-type'].should.eql('application/pdf') + res.headers['x-cache'].should.eql('MISS') - setTimeout(() => { - client - .get('/test.pdf') - .expect(200) - .end((err, res) => { - if (err) return done(err) + setTimeout(() => { + client + .get('/test.pdf') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('application/pdf') - res.headers['x-cache'].should.eql('HIT') + res.headers['content-type'].should.eql('application/pdf') + res.headers['x-cache'].should.eql('HIT') - done() - }) - }, 500) - }) + done() + }) + }, 500) + }) }) describe('when multi-domain is enabled', () => { @@ -660,62 +695,64 @@ describe('Cache', function () { it('should cache as different items requests with identical paths but different domains', done => { client - .get('/test.pdf') - .set('Host', 'localhost:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('application/pdf') - res.headers['x-cache'].should.eql('MISS') - - setTimeout(() => { - client - .get('/test.pdf') - .set('Host', 'localhost:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.pdf') + .set('Host', 'localhost:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('application/pdf') - res.headers['x-cache'].should.eql('HIT') + res.headers['content-type'].should.eql('application/pdf') + res.headers['x-cache'].should.eql('MISS') + setTimeout(() => { client - .get('/test.pdf') - .set('Host', 'testdomain.com:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.pdf') + .set('Host', 'localhost:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('application/pdf') - res.headers['x-cache'].should.eql('MISS') + res.headers['content-type'].should.eql('application/pdf') + res.headers['x-cache'].should.eql('HIT') - setTimeout(() => { client - .get('/test.pdf') - .set('Host', 'testdomain.com:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) - - res.headers['content-type'].should.eql('application/pdf') - res.headers['x-cache'].should.eql('HIT') - - done() - }) - }, 150) - }) - }) - }, 150) - }) + .get('/test.pdf') + .set('Host', 'testdomain.com:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql('application/pdf') + res.headers['x-cache'].should.eql('MISS') + + setTimeout(() => { + client + .get('/test.pdf') + .set('Host', 'testdomain.com:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) + + res.headers['content-type'].should.eql( + 'application/pdf' + ) + res.headers['x-cache'].should.eql('HIT') + + done() + }) + }, 150) + }) + }) + }, 150) + }) }) }) }) describe('TTL', () => { it('should keep cached items for the period of time defined in caching.ttl', done => { - let mockCacheGet = sinon.spy(cache.Cache.prototype, 'getStream') - let mockCacheSet = sinon.spy(cache.Cache.prototype, 'cacheFile') + const mockCacheGet = sinon.spy(cache.Cache.prototype, 'getStream') + const mockCacheSet = sinon.spy(cache.Cache.prototype, 'cacheFile') config.set('caching.ttl', 3) @@ -741,8 +778,8 @@ describe('Cache', function () { }) it('when multi-domain is enabled, cached items should be kept for the period of time defined in each domain config', done => { - let mockCacheGet = sinon.spy(cache.Cache.prototype, 'getStream') - let mockCacheSet = sinon.spy(cache.Cache.prototype, 'cacheFile') + const mockCacheGet = sinon.spy(cache.Cache.prototype, 'getStream') + const mockCacheSet = sinon.spy(cache.Cache.prototype, 'cacheFile') config.set('multiDomain.enabled', true) config.loadDomainConfigs() @@ -763,26 +800,26 @@ describe('Cache', function () { res.headers['x-cache'].should.eql('MISS') client - .get('/test.pdf') - .set('Host', 'testdomain.com:80') - .expect(200) - .end((err, res) => { - if (err) return done(err) + .get('/test.pdf') + .set('Host', 'testdomain.com:80') + .expect(200) + .end((err, res) => { + if (err) return done(err) - res.headers['content-type'].should.eql('application/pdf') - res.headers['x-cache'].should.eql('MISS') + res.headers['content-type'].should.eql('application/pdf') + res.headers['x-cache'].should.eql('MISS') - mockCacheGet.firstCall.args[1].ttl.should.eql(3) - mockCacheSet.firstCall.args[2].ttl.should.eql(3) + mockCacheGet.firstCall.args[1].ttl.should.eql(3) + mockCacheSet.firstCall.args[2].ttl.should.eql(3) - mockCacheGet.secondCall.args[1].ttl.should.eql(5) - mockCacheSet.secondCall.args[2].ttl.should.eql(5) + mockCacheGet.secondCall.args[1].ttl.should.eql(5) + mockCacheSet.secondCall.args[2].ttl.should.eql(5) - mockCacheGet.restore() - mockCacheSet.restore() + mockCacheGet.restore() + mockCacheSet.restore() - done() - }) + done() + }) }) }, 500) }) @@ -802,12 +839,14 @@ describe('Frequency cache flush', () => { config.set('multiDomain.enabled', false) app.start(() => { - let mockCacheDelete = sinon.spy(cache.Cache.prototype, 'delete') + const mockCacheDelete = sinon.spy(cache.Cache.prototype, 'delete') setTimeout(() => { - mockCacheDelete.args.every(callArgs => { - return callArgs.length === 0 - }).should.eql(true) + mockCacheDelete.args + .every(callArgs => { + return callArgs.length === 0 + }) + .should.eql(true) mockCacheDelete.callCount.should.be.above(4) mockCacheDelete.restore() @@ -830,20 +869,26 @@ describe('Frequency cache flush', () => { config.set('caching.expireAt', '* * * * * *', 'testdomain.com') app.start(() => { - let mockCacheDelete = sinon.spy(cache.Cache.prototype, 'delete') + const mockCacheDelete = sinon.spy(cache.Cache.prototype, 'delete') setTimeout(() => { - mockCacheDelete.args.every(callArgs => { - callArgs.length.should.eql(1) - callArgs[0].should.eql(['testdomain.com']) + mockCacheDelete.args + .every(callArgs => { + callArgs.length.should.eql(1) + callArgs[0].should.eql(['testdomain.com']) - return true - }).should.eql(true) + return true + }) + .should.eql(true) mockCacheDelete.callCount.should.be.above(4) mockCacheDelete.restore() - config.set('caching.expireAt', configBackup.caching.expireAt, 'testdomain.com') + config.set( + 'caching.expireAt', + configBackup.caching.expireAt, + 'testdomain.com' + ) config.set('multiDomain.enabled', configBackup.multiDomain.enabled) app.stop(done) diff --git a/test/acceptance/work-queue.js b/test/acceptance/work-queue.js index 9a8f5e97..84c44009 100644 --- a/test/acceptance/work-queue.js +++ b/test/acceptance/work-queue.js @@ -9,11 +9,13 @@ const request = require('supertest') const sinon = require('sinon') const should = require('should') -let cdnUrl = `http://${config.get('server.host')}:${config.get('server.port')}` -let client = request(cdnUrl) -let configBackup = config.get() +const cdnUrl = `http://${config.get('server.host')}:${config.get( + 'server.port' +)}` +const client = request(cdnUrl) +const configBackup = config.get() -describe('Work queue', function () { +describe('Work queue', function() { this.timeout(100000) beforeEach(done => { @@ -22,30 +24,31 @@ describe('Work queue', function () { config.set('images.directory.enabled', true) config.set('images.remote.enabled', false) - app.start(function () { - help.getBearerToken((err, token) => { - if (err) return done(err) - - bearerToken = token - done() - }) - }) + app.start(function() { + done() + }) }) afterEach(done => { help.clearCache() app.stop(done) - config.set('caching.directory.enabled', configBackup.caching.directory.enabled) + config.set( + 'caching.directory.enabled', + configBackup.caching.directory.enabled + ) config.set('caching.redis.enabled', configBackup.caching.redis.enabled) - config.set('images.directory.enabled', configBackup.images.directory.enabled) + config.set( + 'images.directory.enabled', + configBackup.images.directory.enabled + ) config.set('images.remote.enabled', configBackup.images.remote.enabled) }) it('should process the image just once on subsequent requests and render the correct result (5 requests)', () => { - let processorSpy = sinon.spy(ImageHandler.prototype, 'process') - let numberOfRequests = 5 - let ops = Array.apply(null, { + const processorSpy = sinon.spy(ImageHandler.prototype, 'process') + const numberOfRequests = 5 + const ops = Array.apply(null, { length: numberOfRequests }).map(() => { return help.imagesEqual({ @@ -62,36 +65,36 @@ describe('Work queue', function () { }) it('should process the image just once on subsequent requests (50 requests)', () => { - let processorSpy = sinon.spy(ImageHandler.prototype, 'process') - let numberOfRequests = 50 - let ops = Array.apply(null, { + const processorSpy = sinon.spy(ImageHandler.prototype, 'process') + const numberOfRequests = 50 + const ops = Array.apply(null, { length: numberOfRequests }).map(() => { return new Promise((resolve, reject) => { client - .get('/original.jpg?format=jpg') - .expect(200) - .end((err, res) => { - if (err) { - return reject(err) - } + .get('/original.jpg?format=jpg') + .expect(200) + .end((err, res) => { + if (err) { + return reject(err) + } - resolve(res) - }) + resolve(res) + }) }) }) return Promise.all(ops).then(results => { - results.every(res => { - res.statusCode.should.eql(200) - res.headers['content-type'].should.eql( - 'image/jpeg' - ) + results + .every(res => { + res.statusCode.should.eql(200) + res.headers['content-type'].should.eql('image/jpeg') - return true - }).should.eql(true) + return true + }) + .should.eql(true) processorSpy.callCount.should.eql(1) processorSpy.restore() }) }) -}) \ No newline at end of file +}) diff --git a/test/assets/test.css b/test/assets/test.css index 2027e75b..5c6d8383 100755 --- a/test/assets/test.css +++ b/test/assets/test.css @@ -7,18 +7,82 @@ */ /* DEFAULT DEFINITION */ -html { font-size:100%; } -body { margin:0; padding:0; font:0.75em Tahoma, Arial, Verdana, Helvetica, sans-serif; color:#51575c; background:#fefefe; } -img, table, td, fieldset, form, legend, h1, h2, h3, h4, h5, h6, p, ul, ol, li, dl, dt, dd { margin:0; padding:0; border:0; font-size:1em; } -ul, li { list-style:none; } -table { border-collapse:collapse; } -a { outline:0 none; text-decoration:none; color:#777; } -a:hover { text-decoration:underline; } -.offleft { position:absolute; left:-5000px; width:2000px; } -.hidden { display:none !important; } -.show { display:block !important; } -object { outline:none; } /* FF3 */ -.clearfix:after { content:"."; display:block; height:0; clear:both; visibility:hidden; } -.clearfix {display:inline-block;}/*ie7*/ -.clearfix {display:block;} -* html .clearfix {height:1%;} +html { + font-size: 100%; +} +body { + margin: 0; + padding: 0; + font: 0.75em Tahoma, Arial, Verdana, Helvetica, sans-serif; + color: #51575c; + background: #fefefe; +} +img, +table, +td, +fieldset, +form, +legend, +h1, +h2, +h3, +h4, +h5, +h6, +p, +ul, +ol, +li, +dl, +dt, +dd { + margin: 0; + padding: 0; + border: 0; + font-size: 1em; +} +ul, +li { + list-style: none; +} +table { + border-collapse: collapse; +} +a { + outline: 0 none; + text-decoration: none; + color: #777; +} +a:hover { + text-decoration: underline; +} +.offleft { + position: absolute; + left: -5000px; + width: 2000px; +} +.hidden { + display: none !important; +} +.show { + display: block !important; +} +object { + outline: none; +} /* FF3 */ +.clearfix:after { + content: '.'; + display: block; + height: 0; + clear: both; + visibility: hidden; +} +.clearfix { + display: inline-block; +} /*ie7*/ +.clearfix { + display: block; +} +* html .clearfix { + height: 1%; +} diff --git a/test/assets/test.html b/test/assets/test.html index 2c20cdd1..fb288d5b 100644 --- a/test/assets/test.html +++ b/test/assets/test.html @@ -1,66 +1,65 @@ - - -DADI + + + DADI - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - -
StatusIDIPLocationUserCPUsGo routinesLast active
- Connected - Ready - {{ i.state }} - {{ i.id }}{{ i.address }}{{ i.location }}{{ i.user }}{{ i.num_cpu }}{{ i.num_goroutine }}{{ moment(i.last_active).fromNow() }}
-
- - \ No newline at end of file + #app { + min-height: 100%; + width: 100%; + margin: auto; + } + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +
StatusIDIPLocationUserCPUsGo routinesLast active
+ Connected + Ready + {{ i.state }} + {{ i.id }}{{ i.address }}{{ i.location }}{{ i.user }}{{ i.num_cpu }}{{ i.num_goroutine }}{{ moment(i.last_active).fromNow() }}
+
+ + diff --git a/test/assets/test.js b/test/assets/test.js index 155c4e8b..b7c6b47f 100755 --- a/test/assets/test.js +++ b/test/assets/test.js @@ -16,4 +16,4 @@ cb.tools.substitute = function (str, arr) { str = str.replace(re, arr[i]) } return str -} +} \ No newline at end of file diff --git a/test/pretest.js b/test/pretest.js index b3c97d83..8a83dc44 100755 --- a/test/pretest.js +++ b/test/pretest.js @@ -1,15 +1,18 @@ -var fs = require('fs') -var path = require('path') -var colors = require('colors') +const fs = require('fs') +const path = require('path') +const colors = require('colors') -var testConfigPath = './config/config.test.json' -var testConfigSamplePath = './config/config.test.json.sample' +const testConfigPath = './config/config.test.json' +const testConfigSamplePath = './config/config.test.json.sample' -var testConfigSample = fs.readFileSync(testConfigSamplePath, { encoding: 'utf-8'}) +const testConfigSample = fs.readFileSync(testConfigSamplePath, { + encoding: 'utf-8' +}) -function loadConfig () { +function loadConfig() { try { - var testConfig = fs.readFileSync(testConfigPath, { encoding: 'utf-8'}) + const testConfig = fs.readFileSync(testConfigPath, {encoding: 'utf-8'}) + return JSON.parse(testConfig) } catch (err) { if (err.code === 'ENOENT') { diff --git a/test/unit/cache.js b/test/unit/cache.js index a7a509e1..ef2cb66a 100755 --- a/test/unit/cache.js +++ b/test/unit/cache.js @@ -1,22 +1,22 @@ -var should = require('should') -var fs = require('fs') -var path = require('path') -var sinon = require('sinon') -var proxyquire = require('proxyquire') -var redis = require('redis') -var fakeredis = require('fakeredis') +const should = require('should') +const fs = require('fs') +const path = require('path') +const sinon = require('sinon') +const proxyquire = require('proxyquire') +const redis = require('redis') +const fakeredis = require('fakeredis') -var Router = require('router') -var router = Router() +const Router = require('router') +const router = Router() -var config -var cache -var imageHandler = require(__dirname + '/../../dadi/lib/handlers/image') +let config +let cache +const imageHandler = require(__dirname + '/../../dadi/lib/handlers/image') -var testConfigString +let testConfigString -describe('Cache', function (done) { - beforeEach(function (done) { +describe('Cache', function(done) { + beforeEach(function(done) { delete require.cache[__dirname + '/../../dadi/lib/cache'] cache = require(__dirname + '/../../dadi/lib/cache') @@ -28,92 +28,109 @@ describe('Cache', function (done) { done() }) - afterEach(function (done) { + afterEach(function(done) { delete require.cache[__dirname + '/../../dadi/lib/cache'] fs.writeFileSync(config.configPath(), testConfigString) done() }) - it('should export an instance', function (done) { + it('should export an instance', function(done) { cache.should.be.Function done() }) - it("should cache if the app's directory config settings allow", function (done) { - var newTestConfig = JSON.parse(testConfigString) + it("should cache if the app's directory config settings allow", function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.caching.directory.enabled = true newTestConfig.caching.redis.enabled = false - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) cache.reset() - var req = { + const req = { url: '/jpg/50/0/0/801/478/0/0/0/2/aspectfit/North/0/0/0/0/0/test.jpg' } - var im = new imageHandler('jpg', req) + const im = new imageHandler('jpg', req) im.cache.isEnabled().should.eql(true) done() }) - it("should not cache if the app's config settings don't allow", function (done) { - var newTestConfig = JSON.parse(testConfigString) + it("should not cache if the app's config settings don't allow", function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.caching.directory.enabled = false newTestConfig.caching.redis.enabled = false - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) cache.reset() - var req = { + const req = { url: '/jpg/50/0/0/801/478/0/0/0/2/aspectfit/North/0/0/0/0/0/test.jpg' } - var imageHandler = proxyquire('../../dadi/lib/handlers/image', {'Cache': cache}) - var im = new imageHandler('jpg', req) + const imageHandler = proxyquire('../../dadi/lib/handlers/image', { + Cache: cache + }) + const im = new imageHandler('jpg', req) im.cache.isEnabled().should.eql(false) done() }) - it('should receive null from cache.getStream() if the caching is disabled', function (done) { - var newTestConfig = JSON.parse(testConfigString) + it('should receive null from cache.getStream() if the caching is disabled', function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.caching.directory.enabled = false newTestConfig.caching.directory.path = './cache' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) cache.reset() - var req = { - __cdnLegacyURLSyntax: true, - url: '/jpg/50/0/0/801/478/0/0/0/2/aspectfit/North/0/0/0/0/0/test.jpg' + const req = { + url: '/test.jpg' } - var im = new imageHandler('jpg', req) + const im = new imageHandler('jpg', req) + + const getStream = sinon.spy(im.cache, 'getStream') + + im.get() + .then(function(stream) { + getStream.restore() - var getStream = sinon.spy(im.cache, 'getStream') + const args = getStream.firstCall.args - im.get().then(function (stream) { - getStream.restore() + args[0].includes(req.url).should.eql(true) - var args = getStream.firstCall.args - args[0].includes(req.url).should.eql(true) + const returnValue = getStream.firstCall.returnValue - var returnValue = getStream.firstCall.returnValue - returnValue.then(err => { - should.not.exist(err) + returnValue.then(err => { + should.not.exist(err) - done() + done() + }) }) - }).catch(console.log) + .catch(console.log) }) }) diff --git a/test/unit/config.js b/test/unit/config.js index 28a08430..03ab9e77 100644 --- a/test/unit/config.js +++ b/test/unit/config.js @@ -7,12 +7,8 @@ const should = require('should') const sinon = require('sinon') describe('Config', done => { - let rawConfig = require( - './../../config/config.test.json' - ) - let domainConfig = require( - './../../domains/testdomain.com/config/config.test.json' - ) + const rawConfig = require('./../../config/config.test.json') + const domainConfig = require('./../../domains/testdomain.com/config/config.test.json') it('should create config object', () => { config.should.be.Function @@ -20,30 +16,28 @@ describe('Config', done => { describe('when not given a domain', () => { it('should return values from the main config', () => { - config.get('server.port').should.eql( - rawConfig.server.port - ) - }) + config.get('server.port').should.eql(rawConfig.server.port) + }) }) describe('when given a domain', () => { it('should return values from the main config if the value is not specified in the domain config', () => { should.not.exist(domainConfig.paths && domainConfig.paths.plugins) - config.get('paths.plugins', 'testdomain.com').should.eql( - config.get('paths.plugins') - ) + config + .get('paths.plugins', 'testdomain.com') + .should.eql(config.get('paths.plugins')) }) - it('should return values from the main config if the value specified in the domain config isn\'t overridable', () => { + it("should return values from the main config if the value specified in the domain config isn't overridable", () => { should.exist(domainConfig.server.host) Boolean( objectPath.get(config.schema, 'server.host.allowDomainOverride') ).should.eql(false) - config.get('server.host', 'testdomain.com').should.eql( - rawConfig.server.host - ) + config + .get('server.host', 'testdomain.com') + .should.eql(rawConfig.server.host) }) it('should return values from the domain config if the value is overridable and is specified in the domain config', () => { @@ -52,9 +46,9 @@ describe('Config', done => { objectPath.get(config.schema, 'images.remote.path.allowDomainOverride') ).should.eql(true) - config.get('images.remote.path', 'testdomain.com').should.eql( - domainConfig.images.remote.path - ) - }) + config + .get('images.remote.path', 'testdomain.com') + .should.eql(domainConfig.images.remote.path) + }) }) }) diff --git a/test/unit/domain-manager.js b/test/unit/domain-manager.js index 26fda989..92a49016 100644 --- a/test/unit/domain-manager.js +++ b/test/unit/domain-manager.js @@ -1,24 +1,20 @@ const config = require(__dirname + '/../../config') -const domainManager = require(__dirname + '/../../dadi/lib/models/domain-manager') +const domainManager = require(__dirname + + '/../../dadi/lib/models/domain-manager') const fs = require('fs-extra') const path = require('path') +const should = require('should') describe('Domain manager', () => { describe('`scanDomains()` method', () => { it('should build an array of domains and paths', () => { - let domainsDirectory = path.resolve( - config.get('multiDomain.directory') - ) + const domainsDirectory = path.resolve(config.get('multiDomain.directory')) return Promise.all([ - fs.ensureDir( - path.join(domainsDirectory, 'localhost') - ), - fs.ensureDir( - path.join(domainsDirectory, 'testdomain.com') - ) + fs.ensureDir(path.join(domainsDirectory, 'localhost')), + fs.ensureDir(path.join(domainsDirectory, 'testdomain.com')) ]).then(() => { - let domains = new domainManager.DomainManager() + const domains = new domainManager.DomainManager() domains.scanDomains(domainsDirectory) @@ -36,37 +32,39 @@ describe('Domain manager', () => { }) it('should ignore any files and only include directories', () => { - let domainsDirectory = path.resolve( - config.get('multiDomain.directory') - ) - - let mockFile1 = path.join(domainsDirectory, 'not-a-domain') - let mockFile2 = path.join(domainsDirectory, 'definitely-not-a-domain.js') - - return Promise.all([ - fs.ensureFile(mockFile1), - fs.ensureFile(mockFile2) - ]).then(() => { - let domains = new domainManager.DomainManager() - - domains.scanDomains(domainsDirectory) + const domainsDirectory = path.resolve(config.get('multiDomain.directory')) - should.not.exist( - domains.domains.find(item => { - return ['not-a-domain', 'definitely-not-a-domain.js'].includes(item.domain) - }) - ) + const mockFile1 = path.join(domainsDirectory, 'not-a-domain') + const mockFile2 = path.join( + domainsDirectory, + 'definitely-not-a-domain.js' + ) - return fs.remove(mockFile1) - }).then(() => { - return fs.remove(mockFile2) - }) + return Promise.all([fs.ensureFile(mockFile1), fs.ensureFile(mockFile2)]) + .then(() => { + const domains = new domainManager.DomainManager() + + domains.scanDomains(domainsDirectory) + + should.not.exist( + domains.domains.find(item => { + return ['not-a-domain', 'definitely-not-a-domain.js'].includes( + item.domain + ) + }) + ) + + return fs.remove(mockFile1) + }) + .then(() => { + return fs.remove(mockFile2) + }) }) }) describe('`addDomain()` method', () => { it('should add the specified domain to the internal map of domains', () => { - let domains = new domainManager.DomainManager() + const domains = new domainManager.DomainManager() domains.addDomain('test-domain', {}) domains.getDomain('test-domain').should.eql({domain: 'test-domain'}) @@ -75,30 +73,24 @@ describe('Domain manager', () => { describe('`removeDomain()` method', () => { it('should remove the specified domain from the internal map of domains', () => { - let domains = new domainManager.DomainManager() + const domains = new domainManager.DomainManager() domains.removeDomain('test-domain') - let domain = domains.getDomain('test-domain'); + const domain = domains.getDomain('test-domain') - (typeof domain).should.eql('undefined') + ;(typeof domain).should.eql('undefined') }) }) describe('`getDomains()` method', () => { it('should return the full array of domains and paths', () => { - let domainsDirectory = path.resolve( - config.get('multiDomain.directory') - ) + const domainsDirectory = path.resolve(config.get('multiDomain.directory')) return Promise.all([ - fs.ensureDir( - path.join(domainsDirectory, 'localhost') - ), - fs.ensureDir( - path.join(domainsDirectory, 'testdomain.com') - ) + fs.ensureDir(path.join(domainsDirectory, 'localhost')), + fs.ensureDir(path.join(domainsDirectory, 'testdomain.com')) ]).then(() => { - let domains = new domainManager.DomainManager() + const domains = new domainManager.DomainManager() domains.scanDomains(domainsDirectory) domains.getDomains().should.eql(domains.domains) @@ -108,19 +100,13 @@ describe('Domain manager', () => { describe('`getDomain()` method', () => { it('should return the name and path of a matching domain', () => { - let domainsDirectory = path.resolve( - config.get('multiDomain.directory') - ) + const domainsDirectory = path.resolve(config.get('multiDomain.directory')) return Promise.all([ - fs.ensureDir( - path.join(domainsDirectory, 'localhost') - ), - fs.ensureDir( - path.join(domainsDirectory, 'testdomain.com') - ) + fs.ensureDir(path.join(domainsDirectory, 'localhost')), + fs.ensureDir(path.join(domainsDirectory, 'testdomain.com')) ]).then(() => { - let domains = new domainManager.DomainManager() + const domains = new domainManager.DomainManager() domains.scanDomains(domainsDirectory) domains.getDomain('localhost').should.eql(domains.domains[0]) @@ -129,13 +115,11 @@ describe('Domain manager', () => { }) it('should return `undefined` when given a domain that is not configured', () => { - let domainsDirectory = path.resolve( - config.get('multiDomain.directory') - ) - let domains = new domainManager.DomainManager() + const domainsDirectory = path.resolve(config.get('multiDomain.directory')) + const domains = new domainManager.DomainManager() domains.scanDomains(domainsDirectory) should.not.exist(domains.getDomain('lolcathost')) - }) - }) + }) + }) }) diff --git a/test/unit/imagehandler.js b/test/unit/imagehandler.js index 848398bc..83baeeb7 100644 --- a/test/unit/imagehandler.js +++ b/test/unit/imagehandler.js @@ -11,106 +11,125 @@ const HTTPStorage = require('./../../dadi/lib/storage/http') const S3Storage = require('./../../dadi/lib/storage/s3') const config = require('./../../config') -let configBackup = config.get() - -describe('ImageHandler', function (done) { - beforeEach(function (done) { +const configBackup = config.get() +describe('ImageHandler', function(done) { + beforeEach(function(done) { done() }) - afterEach(function (done) { - config.set('caching.directory.enabled', configBackup.caching.directory.enabled) + afterEach(function(done) { + config.set( + 'caching.directory.enabled', + configBackup.caching.directory.enabled + ) config.set('caching.redis.enabled', configBackup.caching.redis.enabled) - - config.set('images.directory.enabled', configBackup.images.directory.enabled) + + config.set( + 'images.directory.enabled', + configBackup.images.directory.enabled + ) config.set('images.s3.enabled', configBackup.images.s3.enabled) config.set('images.remote.enabled', configBackup.images.remote.enabled) config.set('images.directory.path', configBackup.images.directory.path) done() }) - it('should use Disk Storage adapter when nothing else is configured', function (done) { + it('should use Disk Storage adapter when nothing else is configured', function(done) { config.set('caching.directory.enabled', false) config.set('caching.redis.enabled', false) - + config.set('images.directory.enabled', false) config.set('images.s3.enabled', false) config.set('images.remote.enabled', false) config.set('images.directory.path', './test/images') - let spy = sinon.spy(factory, 'create') + const spy = sinon.spy(factory, 'create') - let req = { - __cdnLegacyURLSyntax: true, - url: '/jpg/50/0/0/801/478/0/0/0/2/aspectfit/North/0/0/0/0/0/test.jpg' + const req = { + url: '/test.jpg' } // set some expected values - let expected = path.join(path.resolve(config.get('images.directory.path')), '/test.jpg') + const expected = path.join( + path.resolve(config.get('images.directory.path')), + '/test.jpg' + ) // stub the get method so it doesn't do anything - let get = sinon.stub(DiskStorage.DiskStorage.prototype, 'get').callsFake(function () { - return new Promise(function (resolve, reject) { - let readable = new fs.createReadStream(expected) - return resolve(readable) + const get = sinon + .stub(DiskStorage.DiskStorage.prototype, 'get') + .callsFake(function() { + return new Promise(function(resolve, reject) { + const readable = new fs.createReadStream(expected) + + return resolve(readable) + }) }) - }) // this is the test - let im = new imageHandler('jpg', req) - im.get().then(function (stream) { + const im = new imageHandler('jpg', req) + + im.get().then(function(stream) { factory.create.restore() DiskStorage.DiskStorage.prototype.get.restore() spy.called.should.eql(true) get.called.should.eql(true) - let returnValue = spy.firstCall.returnValue + const returnValue = spy.firstCall.returnValue + returnValue.getFullUrl().should.eql(expected) done() }) }) - it('should use Disk Storage adapter when configured', function (done) { + it('should use Disk Storage adapter when configured', function(done) { config.set('caching.directory.enabled', false) config.set('caching.redis.enabled', false) - + config.set('images.directory.enabled', true) config.set('images.s3.enabled', false) config.set('images.remote.enabled', false) config.set('images.directory.path', './test/images') - let spy = sinon.spy(factory, 'create') + const spy = sinon.spy(factory, 'create') - let req = { - __cdnLegacyURLSyntax: true, + const req = { headers: {}, - url: '/jpg/50/0/0/801/478/0/0/0/2/aspectfit/North/0/0/0/0/0/test.jpg' + url: '/test.jpg' } // set some expected values - let expected = path.join(path.resolve(config.get('images.directory.path')), '/test.jpg') + const expected = path.join( + path.resolve(config.get('images.directory.path')), + '/test.jpg' + ) // stub the get method so it doesn't do anything - let get = sinon.stub(DiskStorage.DiskStorage.prototype, 'get').callsFake(function () { - return new Promise(function (resolve, reject) { - let readable = new fs.createReadStream(expected) - resolve(readable) + const get = sinon + .stub(DiskStorage.DiskStorage.prototype, 'get') + .callsFake(function() { + return new Promise(function(resolve, reject) { + const readable = new fs.createReadStream(expected) + + resolve(readable) + }) }) - }) // this is the test - let im = new imageHandler('jpg', req) - im.get().then(function (stream) { + const im = new imageHandler('jpg', req) + + im.get().then(function(stream) { factory.create.restore() DiskStorage.DiskStorage.prototype.get.restore() spy.called.should.eql(true) get.called.should.eql(true) - let returnValue = spy.firstCall.returnValue + const returnValue = spy.firstCall.returnValue + returnValue.getFullUrl().should.eql(expected) done() @@ -122,112 +141,124 @@ describe('ImageHandler', function (done) { config.set('caching.directory.enabled', false) config.set('caching.redis.enabled', false) - + config.set('images.directory.enabled', false) config.set('images.s3.enabled', false) config.set('images.remote.enabled', true) config.set('images.remote.path', 'https://nodejs.org') - let spy = sinon.spy(factory, 'create') + const spy = sinon.spy(factory, 'create') - let req = { - __cdnLegacyURLSyntax: false, + const req = { headers: {}, url: 'static/images/logos/nodejs-new-white-pantone.png' } // set some expected values - let expected = 'https://nodejs.org/static/images/logos/nodejs-new-white-pantone.png' + const expected = + 'https://nodejs.org/static/images/logos/nodejs-new-white-pantone.png' // stub the get method so it doesn't do anything - let get = sinon.stub(HTTPStorage.HTTPStorage.prototype, 'get').callsFake(function () { - return new Promise(function (resolve, reject) { - let s = new stream.PassThrough() - - request - .get(expected) - .on('response', response => {}) - .on('error', err => {}) - .pipe(s) - resolve(s) + const get = sinon + .stub(HTTPStorage.HTTPStorage.prototype, 'get') + .callsFake(function() { + return new Promise(function(resolve, reject) { + const s = new stream.PassThrough() + + request + .get(expected) + .on('response', response => {}) + .on('error', err => {}) + .pipe(s) + resolve(s) + }) }) - }) // this is the test - let im = new imageHandler('jpg', req) - return im.get().then(function (stream) { + const im = new imageHandler('jpg', req) + + return im.get().then(function(stream) { factory.create.restore() HTTPStorage.HTTPStorage.prototype.get.restore() spy.called.should.eql(true) get.called.should.eql(true) - let returnValue = spy.firstCall.returnValue + const returnValue = spy.firstCall.returnValue + returnValue.getFullUrl().should.eql(expected) }) }) - it('should use S3 Storage adapter when configured', function (done) { + it('should use S3 Storage adapter when configured', function(done) { config.set('caching.directory.enabled', false) config.set('caching.redis.enabled', false) - + config.set('images.directory.enabled', false) config.set('images.s3.enabled', true) config.set('images.remote.enabled', false) - let spy = sinon.spy(factory, 'create') + const spy = sinon.spy(factory, 'create') - let req = { - __cdnLegacyURLSyntax: true, - url: '/jpg/50/0/0/801/478/0/0/0/2/aspectfit/North/0/0/0/0/0/test.jpg' + const req = { + url: '/test.jpg' } // set some expected values - let expected = ['test.jpg'] + const expected = ['test.jpg'] - let testImage = path.join(path.resolve(config.get('images.directory.path')), '/test.jpg') + const testImage = path.join( + path.resolve(config.get('images.directory.path')), + '/test.jpg' + ) // stub the get method so it doesn't do anything - let get = sinon.stub(S3Storage.S3Storage.prototype, 'get').callsFake(function () { - return new Promise(function (resolve, reject) { - let readable = new fs.createReadStream(testImage) - resolve(readable) + const get = sinon + .stub(S3Storage.S3Storage.prototype, 'get') + .callsFake(function() { + return new Promise(function(resolve, reject) { + const readable = new fs.createReadStream(testImage) + + resolve(readable) + }) }) - }) // this is the test - let im = new imageHandler('jpg', req) - im.get().then(function (stream) { + const im = new imageHandler('jpg', req) + + im.get().then(function(stream) { factory.create.restore() S3Storage.S3Storage.prototype.get.restore() spy.called.should.eql(true) get.called.should.eql(true) - let returnValue = spy.firstCall.returnValue + const returnValue = spy.firstCall.returnValue + returnValue.urlParts.should.eql(expected) done() }) }) - it('should return filename with jpg extension when a URL has no extension', function (done) { + it('should return filename with jpg extension when a URL has no extension', function(done) { config.set('caching.directory.enabled', false) config.set('caching.redis.enabled', false) - + config.set('images.directory.enabled', false) config.set('images.s3.enabled', false) config.set('images.remote.enabled', true) - let req = { + const req = { headers: {}, url: '/test' } // set some expected values - let expected = 'test.jpg' + const expected = 'test.jpg' + + const im = new imageHandler('jpg', req) - let im = new imageHandler('jpg', req) im.getFilename().should.eql(expected) done() diff --git a/test/unit/jshandler.js b/test/unit/jshandler.js index 76e5ba7a..535ca39b 100644 --- a/test/unit/jshandler.js +++ b/test/unit/jshandler.js @@ -5,23 +5,26 @@ const should = require('should') const sinon = require('sinon') const Cache = require(__dirname + '/../../dadi/lib/cache') -const DiskStorage = require(__dirname + '/../../dadi/lib/storage/disk').DiskStorage +const DiskStorage = require(__dirname + '/../../dadi/lib/storage/disk') + .DiskStorage const JSHandler = require(__dirname + '/../../dadi/lib/handlers/js') const mockRequest = (url, browser) => { - let request = { + const request = { headers: {}, url } switch (browser) { case 'chrome-65': - request.headers['user-agent'] = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36' + request.headers['user-agent'] = + 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36' break case 'ie-9': - request.headers['user-agent'] = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)' + request.headers['user-agent'] = + 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Trident/5.0)' break } @@ -40,7 +43,7 @@ const makeStream = string => { return stream } -describe('JS handler', function () { +describe('JS handler', function() { let mockCacheGet let mockDiskStorageGet @@ -60,183 +63,186 @@ describe('JS handler', function () { Cache.reset() }) - describe('legacy URL syntax', () => { - it('reads the correct file from the URL path', () => { - const mockJsFile = [ - 'const greeter = name => {', - ' return \`Hello, \${name}\`;', - '};' - ].join('\n') - - mockDiskStorageGet = sinon.stub(DiskStorage.prototype, 'get').resolves(makeStream(mockJsFile)) - - const jsHandler = new JSHandler('.js', mockRequest('/js/0/foo.js')) - - return jsHandler.get().then(out => { - return out.toString('utf8') - }).then(out => { - mockCacheGet.getCall(0).args[0].includes('/foo.js').should.eql(true) - - out.should.eql(mockJsFile) - }) - }) - - it('delivers the compressed JS file', () => { - const mockJsFile = [ - 'const greeter = name => {', - ' return \`Hello, \${name}\`;', - '};' - ].join('\n') - - mockDiskStorageGet = sinon.stub(DiskStorage.prototype, 'get').resolves(makeStream(mockJsFile)) - - const jsHandler = new JSHandler('.js', mockRequest('/js/1/foo.js')) - - return jsHandler.get().then(out => { - return out.toString('utf8') - }).then(out => { - mockCacheGet.getCall(0).args[0].includes('/foo.js').should.eql(true) - - out.should.eql('const greeter=a=>`Hello, ${a}`;') - }) - }) - }) - it('reads the correct file from the URL path', () => { const mockJsFile = [ 'const greeter = name => {', - ' return \`Hello, \${name}\`;', + ' return `Hello, ${name}`;', '};' ].join('\n') - mockDiskStorageGet = sinon.stub(DiskStorage.prototype, 'get').resolves(makeStream(mockJsFile)) + mockDiskStorageGet = sinon + .stub(DiskStorage.prototype, 'get') + .resolves(makeStream(mockJsFile)) const jsHandler = new JSHandler('.js', mockRequest('/foo.js')) - return jsHandler.get().then(out => { - return out.toString('utf8') - }).then(out => { - mockCacheGet.getCall(0).args[0].includes('/foo.js').should.eql(true) + return jsHandler + .get() + .then(out => { + return out.toString('utf8') + }) + .then(out => { + mockCacheGet + .getCall(0) + .args[0].includes('/foo.js') + .should.eql(true) - out.should.eql(mockJsFile) - }) + out.should.eql(mockJsFile) + }) }) it('delivers the compressed JS file', () => { const mockJsFile = [ 'const greeter = name => {', - ' return \`Hello, \${name}\`;', + ' return `Hello, ${name}`;', '};' ].join('\n') - mockDiskStorageGet = sinon.stub(DiskStorage.prototype, 'get').resolves(makeStream(mockJsFile)) + mockDiskStorageGet = sinon + .stub(DiskStorage.prototype, 'get') + .resolves(makeStream(mockJsFile)) const jsHandler = new JSHandler('.js', mockRequest('/foo.js?compress=1')) - return jsHandler.get().then(out => { - return out.toString('utf8') - }).then(out => { - mockCacheGet.getCall(0).args[0].includes('/foo.js?compress=1').should.eql(true) + return jsHandler + .get() + .then(out => { + return out.toString('utf8') + }) + .then(out => { + mockCacheGet + .getCall(0) + .args[0].includes('/foo.js?compress=1') + .should.eql(true) - out.should.eql('const greeter=a=>`Hello, ${a}`;') - }) + out.should.eql('const greeter=a=>`Hello, ${a}`;') + }) }) describe('transpiling', () => { it('delivers transpiled JS for browsers that do not support original features', () => { const mockJsFile = [ 'const greeter = name => {', - ' return \`Hello, \${name}\`;', + ' return `Hello, ${name}`;', '};' ].join('\n') - mockDiskStorageGet = sinon.stub(DiskStorage.prototype, 'get').resolves(makeStream(mockJsFile)) - - const jsHandler = new JSHandler('.js', mockRequest('/foo.js?transform=1', 'ie-9')) - - return jsHandler.get().then(out => { - return out.toString('utf8') - }).then(out => { - out.should.eql( - [ - '"use strict";', - '', - 'var greeter = function greeter(name) {', - ' return "Hello, " + name;', - '};' - ].join('\n') - ) - }) + mockDiskStorageGet = sinon + .stub(DiskStorage.prototype, 'get') + .resolves(makeStream(mockJsFile)) + + const jsHandler = new JSHandler( + '.js', + mockRequest('/foo.js?transform=1', 'ie-9') + ) + + return jsHandler + .get() + .then(out => { + return out.toString('utf8') + }) + .then(out => { + out.should.eql( + [ + '"use strict";', + '', + 'var greeter = function greeter(name) {', + ' return "Hello, " + name;', + '};' + ].join('\n') + ) + }) }) it('delivers transpiled JS when the user agent header is missing', () => { const mockJsFile = [ 'const greeter = name => {', - ' return \`Hello, \${name}\`;', + ' return `Hello, ${name}`;', '};' ].join('\n') - mockDiskStorageGet = sinon.stub(DiskStorage.prototype, 'get').resolves(makeStream(mockJsFile)) + mockDiskStorageGet = sinon + .stub(DiskStorage.prototype, 'get') + .resolves(makeStream(mockJsFile)) const jsHandler = new JSHandler('.js', mockRequest('/foo.js?transform=1')) - return jsHandler.get().then(out => { - return out.toString('utf8') - }).then(out => { - out.should.eql( - [ - '"use strict";', - '', - 'var greeter = function greeter(name) {', - ' return "Hello, " + name;', - '};' - ].join('\n') - ) - }) + return jsHandler + .get() + .then(out => { + return out.toString('utf8') + }) + .then(out => { + out.should.eql( + [ + '"use strict";', + '', + 'var greeter = function greeter(name) {', + ' return "Hello, " + name;', + '};' + ].join('\n') + ) + }) }) it('delivers transpiled JS when the user agent has not been matched to a valid browser target', () => { const mockJsFile = [ 'const greeter = name => {', - ' return \`Hello, \${name}\`;', + ' return `Hello, ${name}`;', '};' ].join('\n') - mockDiskStorageGet = sinon.stub(DiskStorage.prototype, 'get').resolves(makeStream(mockJsFile)) - - const jsHandler = new JSHandler('.js', mockRequest('/foo.js?transform=1', 'some funky user agent')) - - return jsHandler.get().then(out => { - return out.toString('utf8') - }).then(out => { - out.should.eql( - [ - '"use strict";', - '', - 'var greeter = function greeter(name) {', - ' return "Hello, " + name;', - '};' - ].join('\n') - ) - }) + mockDiskStorageGet = sinon + .stub(DiskStorage.prototype, 'get') + .resolves(makeStream(mockJsFile)) + + const jsHandler = new JSHandler( + '.js', + mockRequest('/foo.js?transform=1', 'some funky user agent') + ) + + return jsHandler + .get() + .then(out => { + return out.toString('utf8') + }) + .then(out => { + out.should.eql( + [ + '"use strict";', + '', + 'var greeter = function greeter(name) {', + ' return "Hello, " + name;', + '};' + ].join('\n') + ) + }) }) it('delivers untouched JS for browsers that support original features', () => { const mockJsFile = [ 'const greeter = name => {', - ' return \`Hello, \${name}\`;', + ' return `Hello, ${name}`;', '};' ].join('\n') - mockDiskStorageGet = sinon.stub(DiskStorage.prototype, 'get').resolves(makeStream(mockJsFile)) - - const jsHandler = new JSHandler('.js', mockRequest('/foo.js?transform=1', 'chrome-65')) - - return jsHandler.get().then(out => { - return out.toString('utf8') - }).then(out => { - out.should.eql('"use strict";\n\n' + mockJsFile) - }) + mockDiskStorageGet = sinon + .stub(DiskStorage.prototype, 'get') + .resolves(makeStream(mockJsFile)) + + const jsHandler = new JSHandler( + '.js', + mockRequest('/foo.js?transform=1', 'chrome-65') + ) + + return jsHandler + .get() + .then(out => { + return out.toString('utf8') + }) + .then(out => { + out.should.eql('"use strict";\n\n' + mockJsFile) + }) }) }) }) diff --git a/test/unit/plugins.js b/test/unit/plugins.js index 7d21cb1e..f967374b 100644 --- a/test/unit/plugins.js +++ b/test/unit/plugins.js @@ -18,21 +18,27 @@ describe('Plugins', done => { const pluginPost = parameters => { pluginParameters = parameters } + const handler = new ImageHandler('jpg', req) sinon.stub(handler.storageFactory, 'create').returns({ get: () => { const readable = new fs.createReadStream( - path.join(path.resolve(config.get('images.directory.path')), '/test.jpg') + path.join( + path.resolve(config.get('images.directory.path')), + '/test.jpg' + ) ) - return Promise.resolve(readable) + return Promise.resolve(readable) } }) - handler.plugins = [{ - post: pluginPost - }] + handler.plugins = [ + { + post: pluginPost + } + ] return handler.get().then(response => { handler.storageFactory.create.restore() diff --git a/test/unit/routes.js b/test/unit/routes.js index 4c0e21ca..db66eba5 100644 --- a/test/unit/routes.js +++ b/test/unit/routes.js @@ -49,7 +49,7 @@ describe('Routes model', () => { route._arrayIntersect(array4, array5).should.eql(true) }) - it('should transform the first argument in an array if it isn\'t one', () => { + it("should transform the first argument in an array if it isn't one", () => { const route = new Route(sampleRoute) route._arrayIntersect('one', ['one', 'two']).should.eql(true) @@ -62,10 +62,7 @@ describe('Routes model', () => { route.ip = '123.456.78.9' - route._getCacheKey().should.eql([ - undefined, - route.ip + sampleRoute.route - ]) + route._getCacheKey().should.eql([undefined, route.ip + sampleRoute.route]) }) }) @@ -73,10 +70,9 @@ describe('Routes model', () => { it('should return a value located at the path described by the `path` argument', () => { const route = new Route(sampleRoute) - route._getPathInObject( - 'branches.0.condition.network', - sampleRoute - ).should.eql(sampleRoute.branches[0].condition.network) + route + ._getPathInObject('branches.0.condition.network', sampleRoute) + .should.eql(sampleRoute.branches[0].condition.network) }) }) @@ -84,10 +80,9 @@ describe('Routes model', () => { it('should return true if the branch has no conditions', () => { const route = new Route(sampleRoute) - return route._matchBranch(sampleRoute.branches[1]) - .then(match => { - match.should.eql(true) - }) + return route._matchBranch(sampleRoute.branches[1]).then(match => { + match.should.eql(true) + }) }) it('should return true/false based on whether the branch matches a `device` condition', () => { @@ -109,7 +104,8 @@ describe('Routes model', () => { sinon.stub(route1, 'getDevice').returns('mobile') sinon.stub(route2, 'getDevice').returns('desktop') - return route1._matchBranch(sampleRoute.branches[0]) + return route1 + ._matchBranch(sampleRoute.branches[0]) .then(match => { route1.getDevice.restore() @@ -295,6 +291,7 @@ describe('Routes model', () => { describe('`getLocation` method', () => { it('should return a rejected Promise if Geolocation is not enabled in config', () => { const configBackup = config.get('geolocation.enabled') + config.set('geolocation.enabled', false) const route = new Route(sampleRoute) @@ -399,9 +396,15 @@ describe('Routes model', () => { const route2 = new Route(sampleRoute) const route3 = new Route(sampleRoute) - sinon.stub(route1, 'evaluateBranches').returns(Promise.resolve(branches[0])) - sinon.stub(route2, 'evaluateBranches').returns(Promise.resolve(branches[1])) - sinon.stub(route3, 'evaluateBranches').returns(Promise.resolve(branches[2])) + sinon + .stub(route1, 'evaluateBranches') + .returns(Promise.resolve(branches[0])) + sinon + .stub(route2, 'evaluateBranches') + .returns(Promise.resolve(branches[1])) + sinon + .stub(route3, 'evaluateBranches') + .returns(Promise.resolve(branches[2])) return Promise.all([ route1.processRoute(), @@ -420,7 +423,7 @@ describe('Routes model', () => { }) describe('`setIP` method', () => { - it('should save an internal reference of the client\'s IP', () => { + it("should save an internal reference of the client's IP", () => { const route = new Route(sampleRoute) const ip = '123.456.78.9' @@ -433,7 +436,7 @@ describe('Routes model', () => { }) describe('`setLanguage` method', () => { - it('should save an internal reference of the client\'s language header', () => { + it("should save an internal reference of the client's language header", () => { const route = new Route(sampleRoute) const language = 'de,pt;q=0.8' @@ -446,9 +449,10 @@ describe('Routes model', () => { }) describe('`setUserAgent` method', () => { - it('should save an internal reference of the client\'s user agent', () => { + it("should save an internal reference of the client's user agent", () => { const route = new Route(sampleRoute) - const userAgent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/603.3.8 (KHTML, like Gecko) Version/10.1.2 Safari/603.3.8' + const userAgent = + 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/603.3.8 (KHTML, like Gecko) Version/10.1.2 Safari/603.3.8' should.not.exist(route.userAgent) diff --git a/test/unit/server.js b/test/unit/server.js index 1475223e..2cbf304b 100755 --- a/test/unit/server.js +++ b/test/unit/server.js @@ -1,30 +1,32 @@ -var path = require('path') -var should = require('should') -var sinon = require('sinon') -var server = require(__dirname + '/../../dadi/lib') -var Server = require(__dirname + '/../../dadi/lib').Server -var fs = require('fs') - -describe('Server', function () { - it('should export an instance', function (done) { +const path = require('path') +const should = require('should') +const sinon = require('sinon') +const server = require(__dirname + '/../../dadi/lib') +const Server = require(__dirname + '/../../dadi/lib').Server +const fs = require('fs') + +describe('Server', function() { + it('should export an instance', function(done) { server.start.should.be.Function server.stop.should.be.Function done() }) - it('should export the Server prototype', function (done) { + it('should export the Server prototype', function(done) { server.Server.should.be.Function done() }) - it('should export the app config', function (done) { + it('should export the app config', function(done) { server.config.should.be.Function done() }) - describe('start', function () { - it('should set readyState', function (done) { - var stub = sinon.stub(fs, 'readdirSync').callsFake(function () { return [] }) + describe('start', function() { + it('should set readyState', function(done) { + const stub = sinon.stub(fs, 'readdirSync').callsFake(function() { + return [] + }) server.start() @@ -36,11 +38,13 @@ describe('Server', function () { }) }) - describe('stop', function () { - it('should set readyState', function (done) { - var stub = sinon.stub(server.server, 'close').callsFake(function (cb) { cb() }) + describe('stop', function() { + it('should set readyState', function(done) { + const stub = sinon.stub(server.server, 'close').callsFake(function(cb) { + cb() + }) - server.stop(function (err) { + server.stop(function(err) { if (err) return done(err) server.readyState.should.equal(0) diff --git a/test/unit/storage.http.js b/test/unit/storage.http.js index 1c3ff081..3f0c7a1a 100644 --- a/test/unit/storage.http.js +++ b/test/unit/storage.http.js @@ -1,23 +1,23 @@ -var AWS = require('aws-sdk-mock') -var fs = require('fs') -var nock = require('nock') -var path = require('path') -var request = require('request') -var should = require('should') -var sinon = require('sinon') -var Promise = require('bluebird') -var stream = require('stream') -var imageHandler = require(__dirname + '/../../dadi/lib/handlers/image') -var factory = require(__dirname + '/../../dadi/lib/storage/factory') -var HTTPStorage = require(__dirname + '/../../dadi/lib/storage/http') -var cache = require(__dirname + '/../../dadi/lib/cache') - -var config -var stub -var testConfigString - -describe('Storage', function (done) { - beforeEach(function (done) { +const AWS = require('aws-sdk-mock') +const fs = require('fs') +const nock = require('nock') +const path = require('path') +const request = require('request') +const should = require('should') +const sinon = require('sinon') +const Promise = require('bluebird') +const stream = require('stream') +const imageHandler = require(__dirname + '/../../dadi/lib/handlers/image') +const factory = require(__dirname + '/../../dadi/lib/storage/factory') +const HTTPStorage = require(__dirname + '/../../dadi/lib/storage/http') +const cache = require(__dirname + '/../../dadi/lib/cache') + +let config +let stub +let testConfigString + +describe('Storage', function(done) { + beforeEach(function(done) { delete require.cache[__dirname + '/../../config'] config = require(__dirname + '/../../config') @@ -27,114 +27,145 @@ describe('Storage', function (done) { done() }) - afterEach(function (done) { - setTimeout(function () { + afterEach(function(done) { + setTimeout(function() { fs.writeFileSync(config.configPath(), testConfigString) done() }, 1000) }) - describe('HTTP', function (done) { - it('should use specified URL when passing external URL in request', function () { - var newTestConfig = JSON.parse(testConfigString) + describe('HTTP', function(done) { + it('should use specified URL when passing external URL in request', function() { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = false newTestConfig.images.s3.enabled = false newTestConfig.images.remote.enabled = true - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var req = { - url: '/https://www.google.co.uk/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png' + const req = { + url: + '/https://www.google.co.uk/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png' } - var httpStorage = new HTTPStorage({ + const httpStorage = new HTTPStorage({ assetType: 'images', url: req.url.substring(1) }) - httpStorage.getFullUrl().should.eql('https://www.google.co.uk/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png') + httpStorage + .getFullUrl() + .should.eql( + 'https://www.google.co.uk/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png' + ) }) - it('should use specified URL with URL parameters when passing external URL in request', function () { - var newTestConfig = JSON.parse(testConfigString) + it('should use specified URL with URL parameters when passing external URL in request', function() { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = false newTestConfig.images.s3.enabled = false newTestConfig.images.remote.enabled = true - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var req = { - url: '/https://www.google.co.uk/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png?h=32' + const req = { + url: + '/https://www.google.co.uk/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png?h=32' } - var httpStorage = new HTTPStorage({ + const httpStorage = new HTTPStorage({ assetType: 'images', url: req.url.substring(1) }) - httpStorage.getFullUrl().should.eql('https://www.google.co.uk/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png?h=32') + httpStorage + .getFullUrl() + .should.eql( + 'https://www.google.co.uk/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png?h=32' + ) }) - it('should block a request for the specified external URL if allowFullURL is false', function () { - var newTestConfig = JSON.parse(testConfigString) + it('should block a request for the specified external URL if allowFullURL is false', function() { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = false newTestConfig.images.s3.enabled = false newTestConfig.images.remote.enabled = true newTestConfig.images.remote.allowFullURL = false - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var req = { - url: '/https://www.google.co.uk/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png' + const req = { + url: + '/https://www.google.co.uk/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png' } - var im = new imageHandler('png', req) - return im.get().catch(function (err) { + const im = new imageHandler('png', req) + + return im.get().catch(function(err) { err.statusCode.should.eql(403) return Promise.resolve(true) }) }) - it('should make a request for the specified external URL if allowFullURL is true', function () { - var newTestConfig = JSON.parse(testConfigString) + it('should make a request for the specified external URL if allowFullURL is true', function() { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = false newTestConfig.images.s3.enabled = false newTestConfig.images.remote.enabled = true newTestConfig.images.remote.allowFullURL = true - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var req = { - url: '/https://www.google.co.uk/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png' + const req = { + url: + '/https://www.google.co.uk/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png' } // this is the test - var im = new imageHandler('png', req) + const im = new imageHandler('png', req) // fake the http request so it doesn't do anything - var scope = nock('https://www.google.co.uk') + const scope = nock('https://www.google.co.uk') .get('/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png') - .reply(200, function (uri, requestBody) { - var testImage = 'http://www.google.co.uk/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png' - var s = new stream.PassThrough() - - request.get(testImage) - .on('response', response => { - // console.log(response.statusCode) // 200 - // console.log(response.headers['content-type']) // 'image/png' - }) - .on('error', err => {}) - .pipe(s) + .reply(200, function(uri, requestBody) { + const testImage = + 'http://www.google.co.uk/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png' + const s = new stream.PassThrough() + + request + .get(testImage) + .on('response', response => { + // console.log(response.statusCode) // 200 + // console.log(response.headers['content-type']) // 'image/png' + }) + .on('error', err => {}) + .pipe(s) return s }) - return im.get().then(function (stream) { + return im.get().then(function(stream) { // was our faked http request called? scope.isDone().should.eql(true) }) diff --git a/test/unit/storage.s3.js b/test/unit/storage.s3.js index 795b98e0..2dd4a5c7 100644 --- a/test/unit/storage.s3.js +++ b/test/unit/storage.s3.js @@ -1,23 +1,23 @@ -var AWS = require('aws-sdk-mock') -var fs = require('fs') -var path = require('path') -var should = require('should') -var sinon = require('sinon') -var Promise = require('bluebird') -var stream = require('stream') -var imageHandler = require(__dirname + '/../../dadi/lib/handlers/image') -var factory = require(__dirname + '/../../dadi/lib/storage/factory') -var DiskStorage = require(__dirname + '/../../dadi/lib/storage/disk') -var HTTPStorage = require(__dirname + '/../../dadi/lib/storage/http') -var S3Storage = require(__dirname + '/../../dadi/lib/storage/s3') -var cache = require(__dirname + '/../../dadi/lib/cache') - -var config -var stub -var testConfigString - -describe('Storage', function (done) { - beforeEach(function (done) { +const AWS = require('aws-sdk-mock') +const fs = require('fs') +const path = require('path') +const should = require('should') +const sinon = require('sinon') +const Promise = require('bluebird') +const stream = require('stream') +const imageHandler = require(__dirname + '/../../dadi/lib/handlers/image') +const factory = require(__dirname + '/../../dadi/lib/storage/factory') +const DiskStorage = require(__dirname + '/../../dadi/lib/storage/disk') +const HTTPStorage = require(__dirname + '/../../dadi/lib/storage/http') +const S3Storage = require(__dirname + '/../../dadi/lib/storage/s3') +const cache = require(__dirname + '/../../dadi/lib/cache') + +let config +let stub +let testConfigString + +describe('Storage', function(done) { + beforeEach(function(done) { delete require.cache[__dirname + '/../../config'] config = require(__dirname + '/../../config') @@ -27,31 +27,34 @@ describe('Storage', function (done) { done() }) - afterEach(function (done) { - setTimeout(function () { + afterEach(function(done) { + setTimeout(function() { fs.writeFileSync(config.configPath(), testConfigString) done() }, 1000) }) - describe('S3', function (done) { - it('should use bucket name from config when not specified in path', function () { - var newTestConfig = JSON.parse(testConfigString) + describe('S3', function(done) { + it('should use bucket name from config when not specified in path', function() { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = false newTestConfig.images.s3.enabled = true newTestConfig.images.s3.bucketName = 'test' newTestConfig.images.s3.region = 'eu-east-1' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var req = { - __cdnLegacyURLSyntax: true, - url: '/jpg/50/0/0/801/478/0/0/0/2/aspectfit/North/0/0/0/0/0/test.jpg' + const req = { + url: '/test.jpg' } - var settings = config.get('images') - var s3Storage = new S3Storage({ + const settings = config.get('images') + const s3Storage = new S3Storage({ assetType: 'images', url: req.url }) @@ -59,64 +62,79 @@ describe('Storage', function (done) { s3Storage.getBucket().should.eql(settings.s3.bucketName) }) - it('should use correct key when s3 adapter is specified in config', function () { - var newTestConfig = JSON.parse(testConfigString) + it('should use correct key when s3 adapter is specified in config', function() { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = false newTestConfig.images.s3.enabled = true newTestConfig.images.s3.bucketName = 'test' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var spy = sinon.spy(factory, 'create') + const spy = sinon.spy(factory, 'create') - var req = { - __cdnLegacyURLSyntax: true, - url: '/jpg/50/0/0/801/478/0/0/0/2/aspectfit/North/0/0/0/0/0/test.jpg' + const req = { + url: '/test.jpg' } // set expected key value - var expected = 'test.jpg' + const expected = 'test.jpg' - var testImage = path.join(path.resolve(config.get('images.directory.path')), '/test.jpg') + const testImage = path.join( + path.resolve(config.get('images.directory.path')), + '/test.jpg' + ) // stub the get method so it doesn't do anything - var get = sinon.stub(S3Storage.S3Storage.prototype, 'get').callsFake(function () { - return new Promise(function (resolve, reject) { - var readable = new fs.createReadStream(testImage) - resolve(readable) + const get = sinon + .stub(S3Storage.S3Storage.prototype, 'get') + .callsFake(function() { + return new Promise(function(resolve, reject) { + const readable = new fs.createReadStream(testImage) + + resolve(readable) + }) }) - }) // this is the test - var im = new imageHandler('jpg', req) - return im.get().then(function (stream) { + const im = new imageHandler('jpg', req) + + return im.get().then(function(stream) { factory.create.restore() S3Storage.S3Storage.prototype.get.restore() spy.called.should.eql(true) get.called.should.eql(true) - var returnValue = spy.firstCall.returnValue + const returnValue = spy.firstCall.returnValue + returnValue.getKey().should.eql(expected) }) }) - it('should use bucket name from path when specified', function () { - var newTestConfig = JSON.parse(testConfigString) + it('should use bucket name from path when specified', function() { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = false newTestConfig.images.s3.enabled = true newTestConfig.images.s3.bucketName = 'test' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var req = { + const req = { url: '/s3/testBucket/test.jpg' } - var settings = config.get('images') - var s3Storage = new S3Storage({ + const settings = config.get('images') + const s3Storage = new S3Storage({ assetType: 'images', url: req.url }) @@ -124,21 +142,25 @@ describe('Storage', function (done) { s3Storage.getBucket().should.eql('testBucket') }) - it('should use correct key when s3 adapter is specified in path', function () { - var newTestConfig = JSON.parse(testConfigString) + it('should use correct key when s3 adapter is specified in path', function() { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = false newTestConfig.images.s3.enabled = true newTestConfig.images.s3.bucketName = 'test' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var req = { + const req = { url: '/s3/testBucket/test.jpg' } - var settings = config.get('images') - var s3Storage = new S3Storage({ + const settings = config.get('images') + const s3Storage = new S3Storage({ assetType: 'images', url: req.url }) @@ -146,36 +168,39 @@ describe('Storage', function (done) { s3Storage.getKey().should.eql('test.jpg') }) - it('should call AWS with the correct parameters', function (done) { - var newTestConfig = JSON.parse(testConfigString) + it('should call AWS with the correct parameters', function(done) { + const newTestConfig = JSON.parse(testConfigString) + newTestConfig.images.directory.enabled = false newTestConfig.images.s3.enabled = true newTestConfig.images.s3.bucketName = 'test' - fs.writeFileSync(config.configPath(), JSON.stringify(newTestConfig, null, 2)) + fs.writeFileSync( + config.configPath(), + JSON.stringify(newTestConfig, null, 2) + ) config.loadFile(config.configPath()) - var req = { - __cdnLegacyURLSyntax: true, - url: '/jpg/50/0/0/801/478/0/0/0/2/aspectfit/North/0/0/0/0/0/test.jpg' + const req = { + url: '/test.jpg' } // set expected key value - var expected = 'test.jpg' + const expected = 'test.jpg' // mock the s3 request - AWS.mock('S3', 'getObject', function (data) { + AWS.mock('S3', 'getObject', function(data) { AWS.restore() // here's the test // "data" contains the parameters passed to getObject - + data.Key.should.eql(expected) data.Bucket.should.eql(newTestConfig.images.s3.bucketName) done() }) - var im = new imageHandler('jpg', req) + const im = new imageHandler('jpg', req) im.get() }) diff --git a/test/unit/workspace.js b/test/unit/workspace.js index 1ea735ec..3cb410cc 100644 --- a/test/unit/workspace.js +++ b/test/unit/workspace.js @@ -3,9 +3,8 @@ const fs = require('fs-extra') const path = require('path') const should = require('should') const sinon = require('sinon') -const workspaceFactory = require( - __dirname + '/../../dadi/lib/models/workspace' -).factory +const workspaceFactory = require(__dirname + '/../../dadi/lib/models/workspace') + .factory /** * Generates a workspace file with the given type and content. @@ -16,35 +15,30 @@ const workspaceFactory = require( * @param {String} content * @return {String} The full path to the generated file */ -const mockWorkspaceFile = function ({ +const mockWorkspaceFile = function({ content = {}, delete: isDelete = false, domain, name, type }) { - let domainSubPath = domain + const domainSubPath = domain ? path.join(config.get('multiDomain.directory'), domain) : '' - let directory = path.resolve( - domainSubPath, - config.get(`paths.${type}`) - ) - let fullPath = path.join( - directory, - name - ) + const directory = path.resolve(domainSubPath, config.get(`paths.${type}`)) + const fullPath = path.join(directory, name) if (isDelete) { try { fs.unlinkSync(fullPath) return fullPath - } catch (err) {} + } catch (err) { + // no-op + } } else { - const serialisedContent = typeof content === 'string' - ? content - : JSON.stringify(content, null, 2) + const serialisedContent = + typeof content === 'string' ? content : JSON.stringify(content, null, 2) fs.ensureDirSync(directory) fs.writeFileSync(fullPath, serialisedContent) @@ -55,7 +49,7 @@ const mockWorkspaceFile = function ({ let workspace -describe('Workspace', function () { +describe('Workspace', function() { beforeEach(() => { workspace = workspaceFactory() }) @@ -99,12 +93,14 @@ describe('Workspace', function () { files['my-plugin'].should.be.Object files['my-plugin'].path.should.eql(samplePluginPath) files['my-plugin'].type.should.eql('plugins') - + // Recipe const source = require(sampleRecipePath) files['my-recipe'].should.be.Object - JSON.stringify(files['my-recipe'].source).should.eql(JSON.stringify(sampleRecipe)) + JSON.stringify(files['my-recipe'].source).should.eql( + JSON.stringify(sampleRecipe) + ) files['my-recipe'].path.should.eql(sampleRecipePath) files['my-recipe'].type.should.eql('recipes') @@ -117,12 +113,12 @@ describe('Workspace', function () { type: 'recipes', name: 'my-recipe.json', delete: true - }) + }) }) }) it('should read files from domain-specific workspace directories', () => { - let configBackup = config.get('multiDomain') + const configBackup = config.get('multiDomain') config.set('multiDomain.enabled', true) config.set('multiDomain.directory', 'domains') @@ -156,12 +152,16 @@ describe('Workspace', function () { const source = require(sampleRecipePath) files['testdomain.com:my-domain-recipe'].should.be.Object - JSON.stringify(files['testdomain.com:my-domain-recipe'].source).should.eql( - JSON.stringify(sampleRecipe) + JSON.stringify( + files['testdomain.com:my-domain-recipe'].source + ).should.eql(JSON.stringify(sampleRecipe)) + files['testdomain.com:my-domain-recipe'].path.should.eql( + sampleRecipePath ) - files['testdomain.com:my-domain-recipe'].path.should.eql(sampleRecipePath) files['testdomain.com:my-domain-recipe'].type.should.eql('recipes') - files['testdomain.com:my-domain-recipe'].domain.should.eql('testdomain.com') + files['testdomain.com:my-domain-recipe'].domain.should.eql( + 'testdomain.com' + ) mockWorkspaceFile({ type: 'plugins', @@ -175,7 +175,7 @@ describe('Workspace', function () { delete: true }) - config.set('multiDomain', configBackup) + config.set('multiDomain', configBackup) }) }) @@ -279,37 +279,34 @@ describe('Workspace', function () { content: sampleRecipe }) - return workspace.read().then(files => { - files['my-recipe'].should.be.Object - files['my-recipe'].source.settings.format.should.eql('png') + return workspace + .read() + .then(files => { + files['my-recipe'].should.be.Object + files['my-recipe'].source.settings.format.should.eql('png') - sampleRecipePath = mockWorkspaceFile({ - type: 'recipes', - name: 'my-recipe.json', - content: Object.assign( - {}, - sampleRecipe, - { - settings: Object.assign( - {}, - sampleRecipe.settings, - {format: 'jpg'} - ) - } - ) - }) + sampleRecipePath = mockWorkspaceFile({ + type: 'recipes', + name: 'my-recipe.json', + content: Object.assign({}, sampleRecipe, { + settings: Object.assign({}, sampleRecipe.settings, { + format: 'jpg' + }) + }) + }) - return workspace.read() - }).then(files => { - files['my-recipe'].should.be.Object - files['my-recipe'].source.settings.format.should.eql('jpg') + return workspace.read() + }) + .then(files => { + files['my-recipe'].should.be.Object + files['my-recipe'].source.settings.format.should.eql('jpg') - mockWorkspaceFile({ - type: 'recipes', - name: 'my-recipe.json', - delete: true + mockWorkspaceFile({ + type: 'recipes', + name: 'my-recipe.json', + delete: true + }) }) - }) }) }) @@ -317,20 +314,20 @@ describe('Workspace', function () { it('should generate a tree structure of the workspace files and save it internally', () => { return workspace.read().then(tree1 => { workspace.workspace.should.eql({}) - + return workspace.build().then(tree2 => { workspace.workspace.should.eql(tree1) workspace.workspace.should.eql(tree2) }) }) - }) + }) }) describe('get()', () => { it('should return the entire workspace tree when given no arguments', () => { return workspace.read().then(files => { return workspace.build().then(() => { - workspace.get().should.eql(files) + workspace.get().should.eql(files) }) }) }) @@ -362,12 +359,12 @@ describe('Workspace', function () { type: 'recipes', name: 'my-recipe.json', delete: true - }) + }) }) }) describe('by domain', () => { - let configBackup = config.get('multiDomain') + const configBackup = config.get('multiDomain') beforeEach(() => { config.set('multiDomain.enabled', true) @@ -396,7 +393,7 @@ describe('Workspace', function () { return workspace.build().then(() => { should.not.exist(workspace.get('my-recipe')) - let workspaceItem = workspace.get('my-recipe', 'testdomain.com') + const workspaceItem = workspace.get('my-recipe', 'testdomain.com') workspaceItem.should.be.Object workspaceItem.source.should.eql(sampleRecipe) @@ -432,7 +429,7 @@ describe('Workspace', function () { type: 'recipes', name: 'my-recipe.json', delete: true - }) + }) }) }) @@ -465,7 +462,9 @@ describe('Workspace', function () { return workspace.build().then(() => { workspace.get('my-recipe').source.settings.format.should.eql('png') - workspace.get('my-recipe', 'testdomain.com').source.settings.format.should.eql('jpg') + workspace + .get('my-recipe', 'testdomain.com') + .source.settings.format.should.eql('jpg') mockWorkspaceFile({ type: 'recipes', diff --git a/workspace/plugins/duotone-plugin.js b/workspace/plugins/duotone-plugin.js index dc1a42d3..90933a83 100644 --- a/workspace/plugins/duotone-plugin.js +++ b/workspace/plugins/duotone-plugin.js @@ -19,7 +19,7 @@ function hexToRgb(hex) { ? [ parseInt(result[1], 16), parseInt(result[2], 16), - parseInt(result[3], 16), + parseInt(result[3], 16) ] : null } @@ -72,11 +72,13 @@ module.exports.post = ({jsonData, options, processor, sharp, stream, url}) => { } return sharp(data, { - raw: info, - }).toFormat(options.format).toBuffer() + raw: info + }) + .toFormat(options.format) + .toBuffer() }) .then(buffer => { - let bufferStream = new PassThrough() + const bufferStream = new PassThrough() bufferStream.end(buffer) diff --git a/workspace/plugins/layout.js b/workspace/plugins/layout.js index 74a89eae..46d27a40 100644 --- a/workspace/plugins/layout.js +++ b/workspace/plugins/layout.js @@ -11,12 +11,12 @@ const TILE_TYPES = { OUTPUT: 'o:' } -function getValue (input) { +function getValue(input) { return parseInt(input.substring(2)) } -const ImageLayoutProcessor = function ({assetStore, cache, req, setHeader}) { - let parsedUrl = url.parse(req.url, true) +const ImageLayoutProcessor = function({assetStore, cache, req, setHeader}) { + const parsedUrl = url.parse(req.url, true) this.cache = cache this.assetStore = assetStore @@ -28,8 +28,8 @@ const ImageLayoutProcessor = function ({assetStore, cache, req, setHeader}) { this.newImage = null } -ImageLayoutProcessor.prototype.get = function () { - let cacheKey = this.req.url +ImageLayoutProcessor.prototype.get = function() { + const cacheKey = this.req.url return this.cache.get(cacheKey).then(cachedLayout => { if (cachedLayout) { @@ -59,18 +59,30 @@ ImageLayoutProcessor.prototype.get = function () { let resizedHeight if (input.fileName) { - let scaleWidth = (600 / input.originalImageSize.naturalWidth) - let scaleHeight = (600 / input.originalImageSize.naturalHeight) - let scale = Math.max(scaleWidth, scaleHeight) + const scaleWidth = 600 / input.originalImageSize.naturalWidth + const scaleHeight = 600 / input.originalImageSize.naturalHeight + const scale = Math.max(scaleWidth, scaleHeight) + + const calculatedWidth = + input.originalImageSize.naturalWidth * scale + const calculatedHeight = + input.originalImageSize.naturalHeight * scale + const sc = Math.max( + input.width / calculatedWidth, + input.height / calculatedHeight + ) - let calculatedWidth = input.originalImageSize.naturalWidth * scale - let calculatedHeight = input.originalImageSize.naturalHeight * scale - let sc = Math.max(input.width / calculatedWidth, input.height / calculatedHeight) resizedWidth = calculatedWidth * sc resizedHeight = calculatedHeight * sc - input.l = resizedWidth === input.width ? 0 : (resizedWidth - input.width) / 2 - input.t = resizedHeight === input.height ? 0 : (resizedHeight - input.height) / 2 + input.l = + resizedWidth === input.width + ? 0 + : (resizedWidth - input.width) / 2 + input.t = + resizedHeight === input.height + ? 0 + : (resizedHeight - input.height) / 2 } else { resizedWidth = input.width resizedHeight = input.height @@ -79,7 +91,10 @@ ImageLayoutProcessor.prototype.get = function () { // Read the overlay image, resive and composite it on the original Jimp.read(obj) .then(inputImage => { - inputImage.resize(Math.floor(resizedWidth), Math.floor(resizedHeight)) + inputImage.resize( + Math.floor(resizedWidth), + Math.floor(resizedHeight) + ) this.newImage.blit( inputImage, @@ -92,66 +107,79 @@ ImageLayoutProcessor.prototype.get = function () { ) cb() - }).catch(err => { + }) + .catch(err => { cb(err) }) } } - let instance = this + const instance = this // Create a blank canvas using the output file dimensions. - new Jimp(this.outputFile.width, this.outputFile.height, 0xff0000ff, (_err, image) => { - this.newImage = image - - let i = 0 - - this.inputs.forEach((input, index) => { - if (input.fileName) { - let imageSizeStream = new PassThrough() - let imageStream = new PassThrough() - - let concatStream = concat(obj => { - return addImage(input, obj, (err) => { - if (err) { - console.log(err) - } - - if (++i === this.inputs.length) { - return returnImage(instance) - } - }) - }) - - streams[index].pipe(imageSizeStream) - streams[index].pipe(imageStream) + new Jimp( + this.outputFile.width, + this.outputFile.height, + 0xff0000ff, + (_err, image) => { + this.newImage = image + + let i = 0 + + this.inputs.forEach((input, index) => { + if (input.fileName) { + const imageSizeStream = new PassThrough() + const imageStream = new PassThrough() + + const concatStream = concat(obj => { + return addImage(input, obj, err => { + if (err) { + console.log(err) + } - return this.getImageSize(imageSizeStream).then(imageInfo => { - input.originalImageSize = imageInfo - imageStream.pipe(concatStream) - }) - } else if (input.colour) { - // Create a colour tile. - new Jimp(input.width, input.height, `#${input.colour}`, (_err, image) => { - image.getBuffer(Jimp.MIME_PNG, (_err, buffer) => { - addImage(input, buffer, () => { if (++i === this.inputs.length) { return returnImage(instance) } }) }) - }) - } - }) - }) - function returnImage (instance) { - return instance.newImage - .getBuffer(instance.getContentType(), (err, outBuffer) => { - let cacheStream = new PassThrough() - let responseStream = new PassThrough() + streams[index].pipe(imageSizeStream) + streams[index].pipe(imageStream) + + return this.getImageSize(imageSizeStream).then(imageInfo => { + input.originalImageSize = imageInfo + imageStream.pipe(concatStream) + }) + } else if (input.colour) { + // Create a colour tile. + new Jimp( + input.width, + input.height, + `#${input.colour}`, + (_err, image) => { + image.getBuffer(Jimp.MIME_PNG, (_err, buffer) => { + addImage(input, buffer, () => { + if (++i === this.inputs.length) { + return returnImage(instance) + } + }) + }) + } + ) + } + }) + } + ) + + function returnImage(instance) { + return instance.newImage.getBuffer( + instance.getContentType(), + (err, outBuffer) => { + const cacheStream = new PassThrough() + const responseStream = new PassThrough() + + const bufferStream = new PassThrough() - let bufferStream = new PassThrough() bufferStream.end(outBuffer) bufferStream.pipe(cacheStream) @@ -161,14 +189,15 @@ ImageLayoutProcessor.prototype.get = function () { instance.cache.set(cacheStream, cacheKey) return resolve(responseStream) - }) + } + ) } }) }) }) } -ImageLayoutProcessor.prototype.getContentType = function () { +ImageLayoutProcessor.prototype.getContentType = function() { switch (this.format.toLowerCase()) { case 'png': return 'image/png' @@ -182,7 +211,7 @@ ImageLayoutProcessor.prototype.getContentType = function () { } } -ImageLayoutProcessor.prototype.getImageSize = function (stream) { +ImageLayoutProcessor.prototype.getImageSize = function(stream) { return new Promise((resolve, reject) => { const size = imagesize() @@ -200,14 +229,14 @@ ImageLayoutProcessor.prototype.getImageSize = function (stream) { }) } -ImageLayoutProcessor.prototype.getFilename = function () { +ImageLayoutProcessor.prototype.getFilename = function() { return this.outputFile.fileName } -ImageLayoutProcessor.prototype.getInput = function (type, inputStr) { +ImageLayoutProcessor.prototype.getInput = function(type, inputStr) { const parts = inputStr.split(',') - let input = {} + const input = {} switch (type) { case TILE_TYPES.IMAGE: @@ -248,13 +277,13 @@ ImageLayoutProcessor.prototype.getInput = function (type, inputStr) { return input } -ImageLayoutProcessor.prototype.getLastModified = function () { +ImageLayoutProcessor.prototype.getLastModified = function() { if (!this.storageHandler || !this.storageHandler.getLastModified) return null return this.storageHandler.getLastModified() } -ImageLayoutProcessor.prototype.getOutputFile = function (inputStr) { +ImageLayoutProcessor.prototype.getOutputFile = function(inputStr) { const parts = inputStr.split(',') const output = { fileName: parts[0] @@ -278,11 +307,13 @@ ImageLayoutProcessor.prototype.getOutputFile = function (inputStr) { return output } -ImageLayoutProcessor.prototype.processUrl = function (requestPath) { - const pathParts = decodeURIComponent(requestPath).replace('/layout/', '').split('|') +ImageLayoutProcessor.prototype.processUrl = function(requestPath) { + const pathParts = decodeURIComponent(requestPath) + .replace('/layout/', '') + .split('|') pathParts.forEach(part => { - var type = part.substring(0, 2) + const type = part.substring(0, 2) switch (type) { case TILE_TYPES.IMAGE: @@ -298,5 +329,6 @@ ImageLayoutProcessor.prototype.processUrl = function (requestPath) { module.exports = options => { const layoutProcessor = new ImageLayoutProcessor(options) + return layoutProcessor.get() }