From be28b9f6f9b4e206f319797b4918b8a90648a4ec Mon Sep 17 00:00:00 2001 From: mserranom Date: Wed, 19 Feb 2020 12:06:09 +0100 Subject: [PATCH 01/24] removed unneeded default function arg preventing from decaffeination --- app/coffee/DockerLockManager.coffee | 2 +- app/coffee/LockManager.coffee | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/app/coffee/DockerLockManager.coffee b/app/coffee/DockerLockManager.coffee index 739f2cd..bf90f02 100644 --- a/app/coffee/DockerLockManager.coffee +++ b/app/coffee/DockerLockManager.coffee @@ -46,7 +46,7 @@ module.exports = LockManager = logger.error {key:key, lock: existingLock}, "tried to release lock that has gone" callback() - runWithLock: (key, runner = ( (releaseLock = (error) ->) -> ), callback = ( (error) -> )) -> + runWithLock: (key, runner, callback = ( (error) -> )) -> LockManager.getLock key, (error, lockValue) -> return callback(error) if error? runner (error1, args...) -> diff --git a/app/coffee/LockManager.coffee b/app/coffee/LockManager.coffee index afa3cca..5d9fe26 100644 --- a/app/coffee/LockManager.coffee +++ b/app/coffee/LockManager.coffee @@ -9,7 +9,7 @@ module.exports = LockManager = MAX_LOCK_WAIT_TIME: 15000 # 10s maximum time to spend trying to get the lock LOCK_STALE: 5*60*1000 # 5 mins time until lock auto expires - runWithLock: (path, runner = ((releaseLock = (error) ->) ->), callback = ((error) ->)) -> + runWithLock: (path, runner, callback = ((error) ->)) -> lockOpts = wait: @MAX_LOCK_WAIT_TIME pollPeriod: @LOCK_TEST_INTERVAL From 725074c09dd7443e42f57148ec0da4a70bf3a2b0 Mon Sep 17 00:00:00 2001 From: mserranom Date: Wed, 19 Feb 2020 12:10:00 +0100 Subject: [PATCH 02/24] decaffeinate: update build scripts to es --- .dockerignore | 2 -- .eslintrc | 65 +++++++++++++++++++++++++++++++++++++++++++++++++ .prettierrc | 8 ++++++ Dockerfile | 1 - Jenkinsfile | 7 ++++++ Makefile | 15 ++++++++---- buildscript.txt | 2 +- nodemon.json | 7 +++--- package.json | 18 ++++++-------- 9 files changed, 102 insertions(+), 23 deletions(-) create mode 100644 .eslintrc create mode 100644 .prettierrc diff --git a/.dockerignore b/.dockerignore index 386f26d..ba1c344 100644 --- a/.dockerignore +++ b/.dockerignore @@ -5,5 +5,3 @@ gitrev .npm .nvmrc nodemon.json -app.js -**/js/* diff --git a/.eslintrc b/.eslintrc new file mode 100644 index 0000000..42a4b5c --- /dev/null +++ b/.eslintrc @@ -0,0 +1,65 @@ +// this file was auto-generated, do not edit it directly. +// instead run bin/update_build_scripts from +// https://github.com/sharelatex/sharelatex-dev-environment +// Version: 1.3.5 +{ + "extends": [ + "standard", + "prettier", + "prettier/standard" + ], + "parserOptions": { + "ecmaVersion": 2017 + }, + "plugins": [ + "mocha", + "chai-expect", + "chai-friendly" + ], + "env": { + "node": true, + "mocha": true + }, + "rules": { + // Swap the no-unused-expressions rule with a more chai-friendly one + "no-unused-expressions": 0, + "chai-friendly/no-unused-expressions": "error" + }, + "overrides": [ + { + // Test specific rules + "files": ["test/**/*.js"], + "globals": { + "expect": true + }, + "rules": { + // mocha-specific rules + "mocha/handle-done-callback": "error", + "mocha/no-exclusive-tests": "error", + "mocha/no-global-tests": "error", + "mocha/no-identical-title": "error", + "mocha/no-nested-tests": "error", + "mocha/no-pending-tests": "error", + "mocha/no-skipped-tests": "error", + "mocha/no-mocha-arrows": "error", + + // chai-specific rules + "chai-expect/missing-assertion": "error", + "chai-expect/terminating-properties": "error", + + // prefer-arrow-callback applies to all callbacks, not just ones in mocha tests. + // we don't enforce this at the top-level - just in tests to manage `this` scope + // based on mocha's context mechanism + "mocha/prefer-arrow-callback": "error" + } + }, + { + // Backend specific rules + "files": ["app/**/*.js", "app.js", "index.js"], + "rules": { + // don't allow console.log in backend code + "no-console": "error" + } + } + ] +} diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 0000000..5845b82 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,8 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/sharelatex/sharelatex-dev-environment +# Version: 1.3.5 +{ + "semi": false, + "singleQuote": true +} diff --git a/Dockerfile b/Dockerfile index 9dcf70c..9faccd4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,7 +21,6 @@ RUN npm install --quiet COPY . /app -RUN npm run compile:all FROM base diff --git a/Jenkinsfile b/Jenkinsfile index 47a6638..c7b961e 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -37,6 +37,13 @@ pipeline { } } + stage('Linting') { + steps { + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make format' + sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make lint' + } + } + stage('Unit Tests') { steps { sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit' diff --git a/Makefile b/Makefile index 13785bc..88234f2 100644 --- a/Makefile +++ b/Makefile @@ -16,12 +16,17 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ clean: docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) - rm -f app.js - rm -rf app/js - rm -rf test/unit/js - rm -rf test/acceptance/js -test: test_unit test_acceptance +format: + $(DOCKER_COMPOSE) run --rm test_unit npm run format + +format_fix: + $(DOCKER_COMPOSE) run --rm test_unit npm run format:fix + +lint: + $(DOCKER_COMPOSE) run --rm test_unit npm run lint + +test: format lint test_unit test_acceptance test_unit: @[ ! -d test/unit ] && echo "clsi has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit diff --git a/buildscript.txt b/buildscript.txt index e6e202c..9cccf33 100644 --- a/buildscript.txt +++ b/buildscript.txt @@ -1,6 +1,6 @@ clsi --public-repo=True ---language=coffeescript +--language=es --env-add= --node-version=10.19.0 --acceptance-creds=None diff --git a/nodemon.json b/nodemon.json index 98db38d..5826281 100644 --- a/nodemon.json +++ b/nodemon.json @@ -10,10 +10,9 @@ }, "watch": [ - "app/coffee/", - "app.coffee", + "app/js/", + "app.js", "config/" ], - "ext": "coffee" - + "ext": "js" } diff --git a/package.json b/package.json index 24984d1..5539538 100644 --- a/package.json +++ b/package.json @@ -7,17 +7,15 @@ "url": "https://github.com/sharelatex/clsi-sharelatex.git" }, "scripts": { - "compile:app": "([ -e app/coffee ] && coffee -m $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee -m $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", - "start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", - "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js", - "test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP", - "test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js", - "test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP", - "compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee", - "compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee", - "compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests && npm run compile:smoke_tests", + "start": "node $NODE_APP_OPTIONS app.js", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", + "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "nodemon": "nodemon --config nodemon.json", - "compile:smoke_tests": "[ ! -e test/smoke/coffee ] && echo 'No smoke tests to compile' || coffee -o test/smoke/js -c test/smoke/coffee" + "lint": "node_modules/.bin/eslint .", + "format": "node_modules/.bin/prettier-eslint '**/*.js' --list-different", + "format:fix": "node_modules/.bin/prettier-eslint '**/*.js' --write" }, "author": "James Allen ", "dependencies": { From e14da0f9a61f9cac49ce9872922122c2a786c864 Mon Sep 17 00:00:00 2001 From: mserranom Date: Wed, 19 Feb 2020 12:11:31 +0100 Subject: [PATCH 03/24] decaffeinate: update .gitignore --- .gitignore | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.gitignore b/.gitignore index 048a75b..912e380 100644 --- a/.gitignore +++ b/.gitignore @@ -1,13 +1,7 @@ **.swp node_modules -app/js -test/unit/js -test/smoke/js -test/acceptance/js test/acceptance/fixtures/tmp compiles -app.js -**/*.map .DS_Store *~ cache From f8fff476dd21c3b375614432b61904eb0383e326 Mon Sep 17 00:00:00 2001 From: mserranom Date: Wed, 19 Feb 2020 12:13:44 +0100 Subject: [PATCH 04/24] decaffeinate: add eslint and prettier packages --- npm-shrinkwrap.json | 3088 +++++++++++++++++++++++++++++++++++++++++++ package.json | 18 + 2 files changed, 3106 insertions(+) diff --git a/npm-shrinkwrap.json b/npm-shrinkwrap.json index ac2f706..40fed3c 100644 --- a/npm-shrinkwrap.json +++ b/npm-shrinkwrap.json @@ -4,6 +4,166 @@ "lockfileVersion": 1, "requires": true, "dependencies": { + "@babel/code-frame": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz", + "integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==", + "dev": true, + "requires": { + "@babel/highlight": "^7.8.3" + } + }, + "@babel/generator": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.8.4.tgz", + "integrity": "sha512-PwhclGdRpNAf3IxZb0YVuITPZmmrXz9zf6fH8lT4XbrmfQKr6ryBzhv593P5C6poJRciFCL/eHGW2NuGrgEyxA==", + "dev": true, + "requires": { + "@babel/types": "^7.8.3", + "jsesc": "^2.5.1", + "lodash": "^4.17.13", + "source-map": "^0.5.0" + }, + "dependencies": { + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true + } + } + }, + "@babel/helper-function-name": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.8.3.tgz", + "integrity": "sha512-BCxgX1BC2hD/oBlIFUgOCQDOPV8nSINxCwM3o93xP4P9Fq6aV5sgv2cOOITDMtCfQ+3PvHp3l689XZvAM9QyOA==", + "dev": true, + "requires": { + "@babel/helper-get-function-arity": "^7.8.3", + "@babel/template": "^7.8.3", + "@babel/types": "^7.8.3" + } + }, + "@babel/helper-get-function-arity": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.8.3.tgz", + "integrity": "sha512-FVDR+Gd9iLjUMY1fzE2SR0IuaJToR4RkCDARVfsBBPSP53GEqSFjD8gNyxg246VUyc/ALRxFaAK8rVG7UT7xRA==", + "dev": true, + "requires": { + "@babel/types": "^7.8.3" + } + }, + "@babel/helper-split-export-declaration": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz", + "integrity": "sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA==", + "dev": true, + "requires": { + "@babel/types": "^7.8.3" + } + }, + "@babel/highlight": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.8.3.tgz", + "integrity": "sha512-PX4y5xQUvy0fnEVHrYOarRPXVWafSjTW9T0Hab8gVIawpl2Sj0ORyrygANq+KjcNlSSTw0YCLSNA8OyZ1I4yEg==", + "dev": true, + "requires": { + "chalk": "^2.0.0", + "esutils": "^2.0.2", + "js-tokens": "^4.0.0" + } + }, + "@babel/parser": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.8.4.tgz", + "integrity": "sha512-0fKu/QqildpXmPVaRBoXOlyBb3MC+J0A66x97qEfLOMkn3u6nfY5esWogQwi/K0BjASYy4DbnsEWnpNL6qT5Mw==", + "dev": true + }, + "@babel/runtime": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.8.4.tgz", + "integrity": "sha512-neAp3zt80trRVBI1x0azq6c57aNBqYZH8KhMm3TaB7wEI5Q4A2SHfBHE8w9gOhI/lrqxtEbXZgQIrHP+wvSGwQ==", + "dev": true, + "requires": { + "regenerator-runtime": "^0.13.2" + } + }, + "@babel/template": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.8.3.tgz", + "integrity": "sha512-04m87AcQgAFdvuoyiQ2kgELr2tV8B4fP/xJAVUL3Yb3bkNdMedD3d0rlSQr3PegP0cms3eHjl1F7PWlvWbU8FQ==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.8.3", + "@babel/parser": "^7.8.3", + "@babel/types": "^7.8.3" + } + }, + "@babel/traverse": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.8.4.tgz", + "integrity": "sha512-NGLJPZwnVEyBPLI+bl9y9aSnxMhsKz42so7ApAv9D+b4vAFPpY013FTS9LdKxcABoIYFU52HcYga1pPlx454mg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.8.3", + "@babel/generator": "^7.8.4", + "@babel/helper-function-name": "^7.8.3", + "@babel/helper-split-export-declaration": "^7.8.3", + "@babel/parser": "^7.8.4", + "@babel/types": "^7.8.3", + "debug": "^4.1.0", + "globals": "^11.1.0", + "lodash": "^4.17.13" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, + "@babel/types": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.8.3.tgz", + "integrity": "sha512-jBD+G8+LWpMBBWvVcdr4QysjUE4mU/syrhN17o1u3gx0/WzJB1kwiVZAXRtWbsIPOwW8pF/YJV5+nmetPzepXg==", + "dev": true, + "requires": { + "esutils": "^2.0.2", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + }, + "dependencies": { + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + } + } + }, "@google-cloud/common": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-2.2.3.tgz", @@ -704,11 +864,23 @@ "@types/node": "*" } }, + "@types/eslint-visitor-keys": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@types/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz", + "integrity": "sha512-OCutwjDZ4aFS6PB1UZ988C4YgwlBHJd6wCeQqaLdmadZ/7e+w79+hbMUFC1QXDNCmdyoRfAFdm0RypzwR+Qpag==", + "dev": true + }, "@types/geojson": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-1.0.6.tgz", "integrity": "sha1-PgKXJyjGkkjCrwjWCkjLuGgP/98=" }, + "@types/json-schema": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.4.tgz", + "integrity": "sha512-8+KAKzEvSUdeo+kmqnKrqgeE+LcA0tjYWFY7RPProVYwnqDjukzO+3b6dLD56rYX5TdWejnEOLJYOIeh4CXKuA==", + "dev": true + }, "@types/long": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz", @@ -752,6 +924,59 @@ "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.5.tgz", "integrity": "sha512-SCcK7mvGi3+ZNz833RRjFIxrn4gI1PPR3NtuIS+6vMkvmsGjosqTJwRt5bAEFLRz+wtJMWv8+uOnZf2hi2QXTg==" }, + "@typescript-eslint/experimental-utils": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-1.13.0.tgz", + "integrity": "sha512-zmpS6SyqG4ZF64ffaJ6uah6tWWWgZ8m+c54XXgwFtUv0jNz8aJAVx8chMCvnk7yl6xwn8d+d96+tWp7fXzTuDg==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.3", + "@typescript-eslint/typescript-estree": "1.13.0", + "eslint-scope": "^4.0.0" + }, + "dependencies": { + "eslint-scope": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + } + } + }, + "@typescript-eslint/parser": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-1.13.0.tgz", + "integrity": "sha512-ITMBs52PCPgLb2nGPoeT4iU3HdQZHcPaZVw+7CsFagRJHUhyeTgorEwHXhFf3e7Evzi8oujKNpHc8TONth8AdQ==", + "dev": true, + "requires": { + "@types/eslint-visitor-keys": "^1.0.0", + "@typescript-eslint/experimental-utils": "1.13.0", + "@typescript-eslint/typescript-estree": "1.13.0", + "eslint-visitor-keys": "^1.0.0" + } + }, + "@typescript-eslint/typescript-estree": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-1.13.0.tgz", + "integrity": "sha512-b5rCmd2e6DCC6tCTN9GSUAuxdYwCM/k/2wdjHGrIRGPSJotWMCe/dGpi66u42bhuh8q3QBzqM4TMA1GUUCJvdw==", + "dev": true, + "requires": { + "lodash.unescape": "4.0.1", + "semver": "5.5.0" + }, + "dependencies": { + "semver": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.5.0.tgz", + "integrity": "sha512-4SJ3dm0WAwWy/NVeioZh5AntkdJoWKxHxcmyP622fOkgHa4z3R0TdBJICINyaSDE6uNwVc8gZr+ZinwZAH4xIA==", + "dev": true + } + } + }, "JSONStream": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.2.tgz", @@ -788,6 +1013,12 @@ "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.3.0.tgz", "integrity": "sha512-/czfa8BwS88b9gWQVhc8eknunSA2DoJpJyTQkhheIf5E48u1N0R4q/YxxsAeqRrmK9TQ/uYfgLDfZo91UlANIA==" }, + "acorn-jsx": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.1.0.tgz", + "integrity": "sha512-tMUqwBWfLFbJbizRmEcWSLw6HnFzfdJs2sOJEOwwtVPMoH/0Ay+E703oZz78VSXZiiDcZrQ5XKjPIUQixhmgVw==", + "dev": true + }, "agent-base": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", @@ -807,11 +1038,29 @@ "uri-js": "^4.2.2" } }, + "ansi-escapes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.0.tgz", + "integrity": "sha512-EiYhwo0v255HUL6eDyuLrXEkTi7WwVCLAw+SeOQ7M7qdun1z1pum4DEm/nuqIVbPvi9RPPc9k9LbyBv6H0DwVg==", + "dev": true, + "requires": { + "type-fest": "^0.8.1" + } + }, "ansi-regex": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, "aproba": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", @@ -826,11 +1075,59 @@ "readable-stream": "^2.0.6" } }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "aria-query": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-3.0.0.tgz", + "integrity": "sha1-ZbP8wcoRVajJrmTW7uKX8V1RM8w=", + "dev": true, + "requires": { + "ast-types-flow": "0.0.7", + "commander": "^2.11.0" + }, + "dependencies": { + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true + } + } + }, "array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" }, + "array-includes": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.1.tgz", + "integrity": "sha512-c2VXaCHl7zPsvpkFsw4nxvFie4fh1ur9bpcgsVkIjqn0H/Xwdg+7fv3n2r/isyS8EBj5b06M9kHyZuIr4El6WQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0", + "is-string": "^1.0.5" + } + }, + "array.prototype.flat": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.3.tgz", + "integrity": "sha512-gBlRZV0VSmfPIeWfuuy56XZMvbVfbEUnOXUvt3F/eUUUSyzlgLxhEX4YAEpxNAogRGehPSnfXyPtYyKAhkzQhQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1" + } + }, "arrify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", @@ -855,6 +1152,18 @@ "integrity": "sha1-x/hUOP3UZrx8oWq5DIFRN5el0js=", "dev": true }, + "ast-types-flow": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", + "integrity": "sha1-9wtzXGvKGlycItmCw+Oef+ujva0=", + "dev": true + }, + "astral-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz", + "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==", + "dev": true + }, "async": { "version": "0.2.9", "resolved": "https://registry.npmjs.org/async/-/async-0.2.9.tgz", @@ -893,6 +1202,26 @@ "is-buffer": "^2.0.2" } }, + "axobject-query": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.1.2.tgz", + "integrity": "sha512-ICt34ZmrVt8UQnvPl6TVyDTkmhXmAyAT4Jh5ugfGUX4MOrZ+U/ZY6/sdylRw3qGNr9Ub5AJsaHeDMzNLehRdOQ==", + "dev": true + }, + "babel-eslint": { + "version": "10.0.3", + "resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.0.3.tgz", + "integrity": "sha512-z3U7eMY6r/3f3/JB9mTsLjyxrv0Yb1zb8PCWCLpguxfCzBIZUwy23R1t/XKewP+8mEN2Ck8Dtr4q20z6ce6SoA==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "@babel/parser": "^7.0.0", + "@babel/traverse": "^7.0.0", + "@babel/types": "^7.0.0", + "eslint-visitor-keys": "^1.0.0", + "resolve": "^1.12.0" + } + }, "balanced-match": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", @@ -960,6 +1289,12 @@ "type-is": "~1.6.16" } }, + "boolify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/boolify/-/boolify-1.0.1.tgz", + "integrity": "sha1-tcCeF8rNET0Rt7s+04TMASmU2Gs=", + "dev": true + }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -1039,6 +1374,29 @@ "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" }, + "callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "camelcase-keys": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.1.2.tgz", + "integrity": "sha512-QfFrU0CIw2oltVvpndW32kuJ/9YOJwUnmWrjlXt1nnJZHCaS9i6bfOpg9R4Lw8aZjStkJWM+jc0cdXjWBgVJSw==", + "dev": true, + "requires": { + "camelcase": "^5.3.1", + "map-obj": "^4.0.0", + "quick-lru": "^4.0.1" + } + }, "caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", @@ -1054,11 +1412,111 @@ "deep-eql": "0.1.3" } }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "dependencies": { + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "chardet": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "dev": true + }, "chownr": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.1.tgz", "integrity": "sha1-VHJri4//TfBTxCGH6AH7RBLfFJQ=" }, + "cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "requires": { + "restore-cursor": "^3.1.0" + } + }, + "cli-width": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz", + "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=", + "dev": true + }, + "cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "dev": true, + "requires": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, "cls-bluebird": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/cls-bluebird/-/cls-bluebird-2.1.0.tgz", @@ -1084,6 +1542,21 @@ "integrity": "sha1-bdTeHrYveE2MjYCWdVLLpUf/2d4=", "dev": true }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, "combined-stream": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz", @@ -1097,6 +1570,12 @@ "resolved": "http://registry.npmjs.org/commander/-/commander-2.0.0.tgz", "integrity": "sha1-0bhvkB+LZL2UG96tr5JFMDk76Sg=" }, + "common-tags": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", + "integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==", + "dev": true + }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -1123,6 +1602,12 @@ "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" }, + "contains-path": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", + "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=", + "dev": true + }, "content-disposition": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", @@ -1152,16 +1637,41 @@ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" }, + "core-js": { + "version": "3.6.4", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.6.4.tgz", + "integrity": "sha512-4paDGScNgZP2IXXilaffL9X7968RuvwlkK3xWtZRVqgd8SYNiVKRJvkFd1aqqEuPfN7E68ZHEp9hDj6lHj4Hyw==", + "dev": true + }, "core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, "d64": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz", "integrity": "sha1-QAKofoUMv8n52XBrYPymE6MzbpA=" }, + "damerau-levenshtein": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.6.tgz", + "integrity": "sha512-JVrozIeElnj3QzfUIt8tB8YMluBJom4Vw9qTPpjGYQ9fYlB3D/rb6OordUxf3xeFB35LKWs0xqcO5U6ySvBtug==", + "dev": true + }, "dashdash": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", @@ -1178,6 +1688,12 @@ "ms": "2.0.0" } }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true + }, "deep-eql": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-0.1.3.tgz", @@ -1192,6 +1708,21 @@ "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", "integrity": "sha1-xPp8lUBKF6nD6Mp+FTcxK3NjMKw=" }, + "deep-is": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", + "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", + "dev": true + }, + "define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, + "requires": { + "object-keys": "^1.0.12" + } + }, "delay": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/delay/-/delay-4.3.0.tgz", @@ -1227,6 +1758,12 @@ "resolved": "https://registry.npmjs.org/diff/-/diff-1.0.7.tgz", "integrity": "sha1-JLuwAcSn1VIhaefKvbLCgU7ZHPQ=" }, + "dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "dev": true + }, "docker-modem": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-1.0.7.tgz", @@ -1284,6 +1821,15 @@ "tar-fs": "~1.16.3" } }, + "doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "requires": { + "esutils": "^2.0.2" + } + }, "dot-prop": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.1.1.tgz", @@ -1345,6 +1891,12 @@ "shimmer": "^1.2.0" } }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, "encodeurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", @@ -1363,6 +1915,45 @@ "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", "integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=" }, + "error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "requires": { + "is-arrayish": "^0.2.1" + } + }, + "es-abstract": { + "version": "1.17.4", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.4.tgz", + "integrity": "sha512-Ae3um/gb8F0mui/jPL+QiqmglkUsaQf7FwBEHYIFkztkneosu9imhqHpBzQ3h1vit8t5iQ74t6PEVvphBZiuiQ==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.1.5", + "is-regex": "^1.0.5", + "object-inspect": "^1.7.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.0", + "string.prototype.trimleft": "^2.1.1", + "string.prototype.trimright": "^2.1.1" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, "es6-promise": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", @@ -1387,6 +1978,470 @@ "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", "dev": true }, + "eslint": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.6.0.tgz", + "integrity": "sha512-PpEBq7b6qY/qrOmpYQ/jTMDYfuQMELR4g4WI1M/NaSDDD/bdcMb+dj4Hgks7p41kW2caXsPsEZAEAyAgjVVC0g==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.10.0", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^5.0.0", + "eslint-utils": "^1.4.3", + "eslint-visitor-keys": "^1.1.0", + "espree": "^6.1.2", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^5.0.0", + "globals": "^11.7.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^7.0.0", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.14", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.2", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^6.1.2", + "strip-ansi": "^5.2.0", + "strip-json-comments": "^3.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + }, + "dependencies": { + "ajv": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.11.0.tgz", + "integrity": "sha512-nCprB/0syFYy9fVYU1ox1l2KN8S9I+tziH8D4zdZuLT3N6RMlGSGt5FSTpAiHB/Whv8Qs1cWHma1aMKZyaHRKA==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "fast-deep-equal": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz", + "integrity": "sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA==", + "dev": true + }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, + "mkdirp": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "dev": true, + "requires": { + "minimist": "0.0.8" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + }, + "strip-json-comments": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.0.1.tgz", + "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==", + "dev": true + } + } + }, + "eslint-config-prettier": { + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.10.0.tgz", + "integrity": "sha512-AtndijGte1rPILInUdHjvKEGbIV06NuvPrqlIEaEaWtbtvJh464mDeyGMdZEQMsGvC0ZVkiex1fSNcC4HAbRGg==", + "dev": true, + "requires": { + "get-stdin": "^6.0.0" + } + }, + "eslint-config-standard": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-14.1.0.tgz", + "integrity": "sha512-EF6XkrrGVbvv8hL/kYa/m6vnvmUT+K82pJJc4JJVMM6+Qgqh0pnwprSxdduDLB9p/7bIxD+YV5O0wfb8lmcPbA==", + "dev": true + }, + "eslint-config-standard-jsx": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard-jsx/-/eslint-config-standard-jsx-8.1.0.tgz", + "integrity": "sha512-ULVC8qH8qCqbU792ZOO6DaiaZyHNS/5CZt3hKqHkEhVlhPEPN3nfBqqxJCyp59XrjIBZPu1chMYe9T2DXZ7TMw==", + "dev": true + }, + "eslint-config-standard-react": { + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard-react/-/eslint-config-standard-react-9.2.0.tgz", + "integrity": "sha512-u+KRP2uCtthZ/W4DlLWCC59GZNV/y9k9yicWWammgTs/Omh8ZUUPF3EnYm81MAcbkYQq2Wg0oxutAhi/FQ8mIw==", + "dev": true, + "requires": { + "eslint-config-standard-jsx": "^8.0.0" + } + }, + "eslint-import-resolver-node": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.3.tgz", + "integrity": "sha512-b8crLDo0M5RSe5YG8Pu2DYBj71tSB6OvXkfzwbJU2w7y8P4/yo0MyF8jU26IEuEuHF2K5/gcAJE3LhQGqBBbVg==", + "dev": true, + "requires": { + "debug": "^2.6.9", + "resolve": "^1.13.1" + }, + "dependencies": { + "resolve": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", + "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + } + } + }, + "eslint-module-utils": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.5.2.tgz", + "integrity": "sha512-LGScZ/JSlqGKiT8OC+cYRxseMjyqt6QO54nl281CK93unD89ijSeRV6An8Ci/2nvWVKe8K/Tqdm75RQoIOCr+Q==", + "dev": true, + "requires": { + "debug": "^2.6.9", + "pkg-dir": "^2.0.0" + } + }, + "eslint-plugin-chai-expect": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-expect/-/eslint-plugin-chai-expect-2.1.0.tgz", + "integrity": "sha512-rd0/4mjMV6c3i0o4DKkWI4uaFN9DK707kW+/fDphaDI6HVgxXnhML9Xgt5vHnTXmSSnDhupuCFBgsEAEpchXmQ==", + "dev": true + }, + "eslint-plugin-chai-friendly": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.5.0.tgz", + "integrity": "sha512-Pxe6z8C9fP0pn2X2nGFU/b3GBOCM/5FVus1hsMwJsXP3R7RiXFl7g0ksJbsc0GxiLyidTW4mEFk77qsNn7Tk7g==", + "dev": true + }, + "eslint-plugin-es": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.0.tgz", + "integrity": "sha512-6/Jb/J/ZvSebydwbBJO1R9E5ky7YeElfK56Veh7e4QGFHCXoIXGH9HhVz+ibJLM3XJ1XjP+T7rKBLUa/Y7eIng==", + "dev": true, + "requires": { + "eslint-utils": "^2.0.0", + "regexpp": "^3.0.0" + }, + "dependencies": { + "eslint-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.0.0.tgz", + "integrity": "sha512-0HCPuJv+7Wv1bACm8y5/ECVfYdfsAm9xmVb7saeFlxjPYALefjhbYoCkBjPdPzGH8wWyTpAez82Fh3VKYEZ8OA==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, + "regexpp": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.0.0.tgz", + "integrity": "sha512-Z+hNr7RAVWxznLPuA7DIh8UNX1j9CDrUQxskw9IrBE1Dxue2lyXT+shqEIeLUjrokxIP8CMy1WkjgG3rTsd5/g==", + "dev": true + } + } + }, + "eslint-plugin-import": { + "version": "2.20.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.20.1.tgz", + "integrity": "sha512-qQHgFOTjguR+LnYRoToeZWT62XM55MBVXObHM6SKFd1VzDcX/vqT1kAz8ssqigh5eMj8qXcRoXXGZpPP6RfdCw==", + "dev": true, + "requires": { + "array-includes": "^3.0.3", + "array.prototype.flat": "^1.2.1", + "contains-path": "^0.1.0", + "debug": "^2.6.9", + "doctrine": "1.5.0", + "eslint-import-resolver-node": "^0.3.2", + "eslint-module-utils": "^2.4.1", + "has": "^1.0.3", + "minimatch": "^3.0.4", + "object.values": "^1.1.0", + "read-pkg-up": "^2.0.0", + "resolve": "^1.12.0" + }, + "dependencies": { + "doctrine": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", + "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=", + "dev": true, + "requires": { + "esutils": "^2.0.2", + "isarray": "^1.0.0" + } + } + } + }, + "eslint-plugin-jsx-a11y": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.2.3.tgz", + "integrity": "sha512-CawzfGt9w83tyuVekn0GDPU9ytYtxyxyFZ3aSWROmnRRFQFT2BiPJd7jvRdzNDi6oLWaS2asMeYSNMjWTV4eNg==", + "dev": true, + "requires": { + "@babel/runtime": "^7.4.5", + "aria-query": "^3.0.0", + "array-includes": "^3.0.3", + "ast-types-flow": "^0.0.7", + "axobject-query": "^2.0.2", + "damerau-levenshtein": "^1.0.4", + "emoji-regex": "^7.0.2", + "has": "^1.0.3", + "jsx-ast-utils": "^2.2.1" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + } + } + }, + "eslint-plugin-mocha": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-6.2.2.tgz", + "integrity": "sha512-oNhPzfkT6Q6CJ0HMVJ2KLxEWG97VWGTmuHOoRcDLE0U88ugUyFNV9wrT2XIt5cGtqc5W9k38m4xTN34L09KhBA==", + "dev": true, + "requires": { + "ramda": "^0.26.1" + } + }, + "eslint-plugin-node": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.0.0.tgz", + "integrity": "sha512-chUs/NVID+sknFiJzxoN9lM7uKSOEta8GC8365hw1nDfwIPIjjpRSwwPvQanWv8dt/pDe9EV4anmVSwdiSndNg==", + "dev": true, + "requires": { + "eslint-plugin-es": "^3.0.0", + "eslint-utils": "^2.0.0", + "ignore": "^5.1.1", + "minimatch": "^3.0.4", + "resolve": "^1.10.1", + "semver": "^6.1.0" + }, + "dependencies": { + "eslint-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.0.0.tgz", + "integrity": "sha512-0HCPuJv+7Wv1bACm8y5/ECVfYdfsAm9xmVb7saeFlxjPYALefjhbYoCkBjPdPzGH8wWyTpAez82Fh3VKYEZ8OA==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, + "ignore": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.4.tgz", + "integrity": "sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==", + "dev": true + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } + } + }, + "eslint-plugin-prettier": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-3.1.2.tgz", + "integrity": "sha512-GlolCC9y3XZfv3RQfwGew7NnuFDKsfI4lbvRK+PIIo23SFH+LemGs4cKwzAaRa+Mdb+lQO/STaIayno8T5sJJA==", + "dev": true, + "requires": { + "prettier-linter-helpers": "^1.0.0" + } + }, + "eslint-plugin-promise": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-4.2.1.tgz", + "integrity": "sha512-VoM09vT7bfA7D+upt+FjeBO5eHIJQBUWki1aPvB+vbNiHS3+oGIJGIeyBtKQTME6UPXXy3vV07OL1tHd3ANuDw==", + "dev": true + }, + "eslint-plugin-react": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.18.3.tgz", + "integrity": "sha512-Bt56LNHAQCoou88s8ViKRjMB2+36XRejCQ1VoLj716KI1MoE99HpTVvIThJ0rvFmG4E4Gsq+UgToEjn+j044Bg==", + "dev": true, + "requires": { + "array-includes": "^3.1.1", + "doctrine": "^2.1.0", + "has": "^1.0.3", + "jsx-ast-utils": "^2.2.3", + "object.entries": "^1.1.1", + "object.fromentries": "^2.0.2", + "object.values": "^1.1.1", + "prop-types": "^15.7.2", + "resolve": "^1.14.2", + "string.prototype.matchall": "^4.0.2" + }, + "dependencies": { + "doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "requires": { + "esutils": "^2.0.2" + } + }, + "resolve": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", + "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + } + } + }, + "eslint-plugin-standard": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-standard/-/eslint-plugin-standard-4.0.1.tgz", + "integrity": "sha512-v/KBnfyaOMPmZc/dmc6ozOdWqekGp7bBGq4jLAecEfPGmfKiWS4sA8sC0LqiV9w5qmXAtXVn4M3p1jSyhY85SQ==", + "dev": true + }, + "eslint-scope": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz", + "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "eslint-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", + "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, + "eslint-visitor-keys": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz", + "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==", + "dev": true + }, + "espree": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/espree/-/espree-6.1.2.tgz", + "integrity": "sha512-2iUPuuPP+yW1PZaMSDM9eyVf8D5P0Hi8h83YtZ5bPc/zHYjII5khoixIUTMO794NOY8F/ThF1Bo8ncZILarUTA==", + "dev": true, + "requires": { + "acorn": "^7.1.0", + "acorn-jsx": "^5.1.0", + "eslint-visitor-keys": "^1.1.0" + }, + "dependencies": { + "acorn": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz", + "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==", + "dev": true + } + } + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "esquery": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.1.0.tgz", + "integrity": "sha512-MxYW9xKmROWF672KqjO75sszsA8Mxhw06YFeS5VHlB98KDHbOSurm3ArsjO60Eaf3QmGMCP1yn+0JQkNLo/97Q==", + "dev": true, + "requires": { + "estraverse": "^4.0.0" + } + }, + "esrecurse": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.1.tgz", + "integrity": "sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==", + "dev": true, + "requires": { + "estraverse": "^4.1.0" + } + }, + "estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true + }, + "esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true + }, "etag": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", @@ -1455,6 +2510,28 @@ "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", "integrity": "sha1-+LETa0Bx+9jrFAr/hYsQGewpFfo=" }, + "external-editor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "dev": true, + "requires": { + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + }, + "dependencies": { + "iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + } + } + }, "extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", @@ -1465,16 +2542,46 @@ "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=" }, + "fast-diff": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz", + "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==", + "dev": true + }, "fast-json-stable-stringify": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=" }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "dev": true + }, "fast-text-encoding": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz", "integrity": "sha512-R9bHCvweUxxwkDwhjav5vxpFvdPGlVngtqmx4pIZfSUhM/Q4NiIUHB456BAf+Q1Nwu3HEZYONtu+Rya+af4jiQ==" }, + "figures": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "file-entry-cache": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", + "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "dev": true, + "requires": { + "flat-cache": "^2.0.1" + } + }, "file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", @@ -1501,11 +2608,48 @@ } } }, + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "dev": true, + "requires": { + "locate-path": "^2.0.0" + } + }, "findit2": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", "integrity": "sha1-WKRmaX34piBc39vzlVNri9d3pfY=" }, + "flat-cache": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", + "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "dev": true, + "requires": { + "flatted": "^2.0.0", + "rimraf": "2.6.3", + "write": "1.0.3" + }, + "dependencies": { + "rimraf": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", + "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + } + } + }, + "flatted": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.1.tgz", + "integrity": "sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg==", + "dev": true + }, "follow-redirects": { "version": "1.5.10", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", @@ -1577,6 +2721,18 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", + "dev": true + }, "gauge": { "version": "2.7.4", "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", @@ -1618,6 +2774,18 @@ "resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-3.4.2.tgz", "integrity": "sha1-kv9xllINZwg5pnMICSoSqt8valk=" }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "get-stdin": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-6.0.0.tgz", + "integrity": "sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g==", + "dev": true + }, "getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", @@ -1639,6 +2807,21 @@ "path-is-absolute": "^1.0.0" } }, + "glob-parent": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz", + "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true + }, "google-auth-library": { "version": "5.5.1", "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.5.1.tgz", @@ -1734,12 +2917,36 @@ "har-schema": "^2.0.0" } }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-ansi": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", + "dev": true, + "requires": { + "ansi-regex": "^2.0.0" + } + }, "has-flag": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz", "integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE=", "dev": true }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "dev": true + }, "has-unicode": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", @@ -1764,6 +2971,12 @@ "resolved": "https://registry.npmjs.org/hex2dec/-/hex2dec-1.1.2.tgz", "integrity": "sha512-Yu+q/XWr2fFQ11tHxPq4p4EiNkb2y+lAacJNhAdRXVfRIcDH6gi7htWFnnlIzvqHMHoWeIsfXlNAjZInpAOJDA==" }, + "hosted-git-info": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.5.tgz", + "integrity": "sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg==", + "dev": true + }, "http-errors": { "version": "1.6.3", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", @@ -1836,6 +3049,12 @@ "safer-buffer": ">= 2.1.2 < 3" } }, + "ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "dev": true + }, "ignore-walk": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.1.tgz", @@ -1844,6 +3063,28 @@ "minimatch": "^3.0.4" } }, + "import-fresh": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz", + "integrity": "sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==", + "dev": true, + "requires": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + } + }, + "imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true + }, + "indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true + }, "inflection": { "version": "1.12.0", "resolved": "https://registry.npmjs.org/inflection/-/inflection-1.12.0.tgz", @@ -1868,6 +3109,97 @@ "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", "integrity": "sha1-7uJfVtscnsYIXgwid4CD9Zar+Sc=" }, + "inquirer": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.4.tgz", + "integrity": "sha512-Bu5Td5+j11sCkqfqmUTiwv+tWisMtP0L7Q8WrqA2C/BbBhy1YTdFrvjjlrKq8oagA/tLQBski2Gcx/Sqyi2qSQ==", + "dev": true, + "requires": { + "ansi-escapes": "^4.2.1", + "chalk": "^2.4.2", + "cli-cursor": "^3.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^3.0.0", + "lodash": "^4.17.15", + "mute-stream": "0.0.8", + "run-async": "^2.2.0", + "rxjs": "^6.5.3", + "string-width": "^4.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, + "string-width": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "dependencies": { + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } + } + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + } + } + } + } + }, + "internal-slot": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.2.tgz", + "integrity": "sha512-2cQNfwhAfJIkU4KZPkDI+Gj5yNNnbqi40W9Gge6dfnk4TocEVm00B3bdiL+JINrbGJil2TeHvM4rETGzk/f/0g==", + "dev": true, + "requires": { + "es-abstract": "^1.17.0-next.1", + "has": "^1.0.3", + "side-channel": "^1.0.2" + } + }, "ipaddr.js": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.8.0.tgz", @@ -1878,6 +3210,12 @@ "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz", "integrity": "sha512-nW24QBoPcFGGHJGUwnfpI7Yc5CdqWNdsyHQszVE/z2pKHXzh7FZ5GWhJqSyaQ9wMkQnsTx+kAI8bHlCX4tKdbg==" }, + "is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, "is-bluebird": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-bluebird/-/is-bluebird-1.0.2.tgz", @@ -1888,6 +3226,24 @@ "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==" }, + "is-callable": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "dev": true + }, + "is-date-object": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", + "dev": true + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, "is-fullwidth-code-point": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", @@ -1896,11 +3252,35 @@ "number-is-nan": "^1.0.0" } }, + "is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, "is-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" }, + "is-promise": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", + "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=", + "dev": true + }, + "is-regex": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", + "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, "is-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", @@ -1911,6 +3291,21 @@ "resolved": "https://registry.npmjs.org/is-stream-ended/-/is-stream-ended-0.1.4.tgz", "integrity": "sha512-xj0XPvmr7bQFTvirqnFr50o0hQIh6ZItDqloxt5aJrR4NQsYeSsyFQERYGCAzfindAcnKjINnwEEgLx4IqVzQw==" }, + "is-string": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", + "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "dev": true + }, + "is-symbol": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", + "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "dev": true, + "requires": { + "has-symbols": "^1.0.1" + } + }, "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", @@ -1921,6 +3316,12 @@ "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", @@ -1947,11 +3348,33 @@ } } }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, "jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" }, + "jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true + }, "json-bigint": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", @@ -1970,6 +3393,12 @@ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha1-afaofZUTq4u4/mO9sJecRI5oRmA=" }, + "json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", + "dev": true + }, "json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", @@ -2007,6 +3436,16 @@ "verror": "1.10.0" } }, + "jsx-ast-utils": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-2.2.3.tgz", + "integrity": "sha512-EdIHFMm+1BPynpKOpdPqiOsvnIrInRGJD7bzPZdPkjitQEqpdpUuFpq4T0npZFKTiB3RhWFdGN+oqOJIdhDhQA==", + "dev": true, + "requires": { + "array-includes": "^3.0.3", + "object.assign": "^4.1.0" + } + }, "jwa": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", @@ -2026,6 +3465,52 @@ "safe-buffer": "^5.0.1" } }, + "levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "dev": true, + "requires": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + } + }, + "load-json-file": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", + "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "pify": "^2.0.0", + "strip-bom": "^3.0.0" + }, + "dependencies": { + "graceful-fs": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", + "dev": true + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + } + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "dev": true, + "requires": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + } + }, "lockfile": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/lockfile/-/lockfile-1.0.4.tgz", @@ -2054,11 +3539,29 @@ "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI=" }, + "lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=", + "dev": true + }, + "lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, "lodash.pickby": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/lodash.pickby/-/lodash.pickby-4.6.0.tgz", "integrity": "sha1-feoh2MGNdwOifHBMFdO4SmfjOv8=" }, + "lodash.unescape": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.unescape/-/lodash.unescape-4.0.1.tgz", + "integrity": "sha1-vyJJiGzlFM2hEvrpIYzcBlIR/Jw=", + "dev": true + }, "log-driver": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz", @@ -2105,11 +3608,63 @@ } } }, + "loglevel": { + "version": "1.6.7", + "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.6.7.tgz", + "integrity": "sha512-cY2eLFrQSAfVPhCgH1s7JI73tMbg9YC3v3+ZHVW67sBS7UxWzNEk/ZBbSfLykBWHp33dqqtOv82gjhKEi81T/A==", + "dev": true + }, + "loglevel-colored-level-prefix": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/loglevel-colored-level-prefix/-/loglevel-colored-level-prefix-1.0.0.tgz", + "integrity": "sha1-akAhj9x64V/HbD0PPmdsRlOIYD4=", + "dev": true, + "requires": { + "chalk": "^1.1.3", + "loglevel": "^1.4.1" + }, + "dependencies": { + "ansi-styles": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", + "dev": true + }, + "chalk": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", + "dev": true, + "requires": { + "ansi-styles": "^2.2.1", + "escape-string-regexp": "^1.0.2", + "has-ansi": "^2.0.0", + "strip-ansi": "^3.0.0", + "supports-color": "^2.0.0" + } + }, + "supports-color": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", + "dev": true + } + } + }, "long": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" }, + "loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "dev": true, + "requires": { + "js-tokens": "^3.0.0 || ^4.0.0" + } + }, "lru-cache": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", @@ -2132,6 +3687,24 @@ "statsd-parser": "~0.0.4" } }, + "make-plural": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/make-plural/-/make-plural-4.3.0.tgz", + "integrity": "sha512-xTYd4JVHpSCW+aqDof6w/MebaMVNTVYBZhbB/vi513xXdiPT92JMVCo0Jq8W2UZnzYRFeVbQiQ+I25l13JuKvA==", + "dev": true, + "requires": { + "minimist": "^1.2.0" + }, + "dependencies": { + "minimist": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", + "dev": true, + "optional": true + } + } + }, "map-obj": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz", @@ -2152,6 +3725,29 @@ "resolved": "https://registry.npmjs.org/mersenne/-/mersenne-0.0.4.tgz", "integrity": "sha1-QB/ex+whzbngPNPTAhOY2iGycIU=" }, + "messageformat": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/messageformat/-/messageformat-2.3.0.tgz", + "integrity": "sha512-uTzvsv0lTeQxYI2y1NPa1lItL5VRI8Gb93Y2K2ue5gBPyrbJxfDi/EYWxh2PKv5yO42AJeeqblS9MJSh/IEk4w==", + "dev": true, + "requires": { + "make-plural": "^4.3.0", + "messageformat-formatters": "^2.0.1", + "messageformat-parser": "^4.1.2" + } + }, + "messageformat-formatters": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/messageformat-formatters/-/messageformat-formatters-2.0.1.tgz", + "integrity": "sha512-E/lQRXhtHwGuiQjI7qxkLp8AHbMD5r2217XNe/SREbBlSawe0lOqsFb7rflZJmlQFSULNLIqlcjjsCPlB3m3Mg==", + "dev": true + }, + "messageformat-parser": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/messageformat-parser/-/messageformat-parser-4.1.2.tgz", + "integrity": "sha512-7dWuifeyldz7vhEuL96Kwq1fhZXBW+TUfbnHN4UCrCxoXQTYjHnR78eI66Gk9LaLLsAvzPNVJBaa66DRfFNaiA==", + "dev": true + }, "methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", @@ -2206,6 +3802,12 @@ "mime-db": "~1.37.0" } }, + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true + }, "minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", @@ -2334,6 +3936,12 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, + "mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "dev": true + }, "mv": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", @@ -2426,6 +4034,12 @@ "resolved": "https://registry.npmjs.org/natives/-/natives-1.1.6.tgz", "integrity": "sha1-pgO0pJirdxc2ErnqGs3sTZgPALs=" }, + "natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", + "dev": true + }, "ncp": { "version": "2.0.0", "resolved": "http://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", @@ -2447,6 +4061,12 @@ "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz", "integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=" }, + "nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true + }, "node-fetch": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", @@ -2493,6 +4113,18 @@ "osenv": "^0.1.4" } }, + "normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "requires": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, "npm-bundled": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.0.5.tgz", @@ -2533,6 +4165,66 @@ "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" }, + "object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "object.assign": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", + "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", + "dev": true, + "requires": { + "define-properties": "^1.1.2", + "function-bind": "^1.1.1", + "has-symbols": "^1.0.0", + "object-keys": "^1.0.11" + } + }, + "object.entries": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.1.tgz", + "integrity": "sha512-ilqR7BgdyZetJutmDPfXCDffGa0/Yzl2ivVNpbx/g4UeWrCdRnFDUBrKJGLhGieRHDATnyZXWBeCb29k9CJysQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "has": "^1.0.3" + } + }, + "object.fromentries": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.2.tgz", + "integrity": "sha512-r3ZiBH7MQppDJVLx6fhD618GKNG40CZYH9wgwdhKxBDDbQgjeWGGd4AtkZad84d291YxvWe7bJGuE65Anh0dxQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "has": "^1.0.3" + } + }, + "object.values": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.1.tgz", + "integrity": "sha512-WTa54g2K8iu0kmS/us18jEmdv1a4Wi//BZ/DTVYEcH0XhLM5NYdpDHja3gt57VrZLcNAO2WGA+KpWsDBaHt6eA==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "has": "^1.0.3" + } + }, "on-finished": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", @@ -2549,6 +4241,29 @@ "wrappy": "1" } }, + "onetime": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", + "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", + "dev": true, + "requires": { + "mimic-fn": "^2.1.0" + } + }, + "optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dev": true, + "requires": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + } + }, "os-homedir": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", @@ -2576,16 +4291,60 @@ "p-try": "^2.0.0" } }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true, + "requires": { + "p-limit": "^1.1.0" + }, + "dependencies": { + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "requires": { + "p-try": "^1.0.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "dev": true + } + } + }, "p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" }, + "parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "requires": { + "callsites": "^3.0.0" + } + }, "parse-duration": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-0.1.1.tgz", "integrity": "sha1-ExFN3JiRwezSgANiRFVN5DZHoiY=" }, + "parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "dev": true, + "requires": { + "error-ex": "^1.2.0" + } + }, "parse-ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", @@ -2596,11 +4355,29 @@ "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz", "integrity": "sha1-/CidTtiZMRlGDBViUyYs3I3mW/M=" }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true + }, "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, + "path-is-inside": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", + "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", + "dev": true + }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "dev": true + }, "path-parse": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", @@ -2611,6 +4388,23 @@ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" }, + "path-type": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", + "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "dev": true, + "requires": { + "pify": "^2.0.0" + }, + "dependencies": { + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + } + } + }, "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", @@ -2621,6 +4415,646 @@ "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==" }, + "pkg-dir": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", + "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "dev": true, + "requires": { + "find-up": "^2.1.0" + } + }, + "prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", + "dev": true + }, + "prettier": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", + "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", + "dev": true + }, + "prettier-eslint": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/prettier-eslint/-/prettier-eslint-9.0.1.tgz", + "integrity": "sha512-KZT65QTosSAqBBqmrC+RpXbsMRe7Os2YSR9cAfFbDlyPAopzA/S5bioiZ3rpziNQNSJaOxmtXSx07EQ+o2Dlug==", + "dev": true, + "requires": { + "@typescript-eslint/parser": "^1.10.2", + "common-tags": "^1.4.0", + "core-js": "^3.1.4", + "dlv": "^1.1.0", + "eslint": "^5.0.0", + "indent-string": "^4.0.0", + "lodash.merge": "^4.6.0", + "loglevel-colored-level-prefix": "^1.0.0", + "prettier": "^1.7.0", + "pretty-format": "^23.0.1", + "require-relative": "^0.8.7", + "typescript": "^3.2.1", + "vue-eslint-parser": "^2.0.2" + }, + "dependencies": { + "ajv": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.11.0.tgz", + "integrity": "sha512-nCprB/0syFYy9fVYU1ox1l2KN8S9I+tziH8D4zdZuLT3N6RMlGSGt5FSTpAiHB/Whv8Qs1cWHma1aMKZyaHRKA==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ansi-escapes": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", + "dev": true + }, + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + }, + "cli-cursor": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "dev": true, + "requires": { + "restore-cursor": "^2.0.0" + } + }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "eslint": { + "version": "5.16.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", + "integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.9.1", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^4.0.3", + "eslint-utils": "^1.3.1", + "eslint-visitor-keys": "^1.0.0", + "espree": "^5.0.1", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob": "^7.1.2", + "globals": "^11.7.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^6.2.2", + "js-yaml": "^3.13.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.11", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.2", + "path-is-inside": "^1.0.2", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^5.5.1", + "strip-ansi": "^4.0.0", + "strip-json-comments": "^2.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0" + } + }, + "eslint-scope": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "espree": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz", + "integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==", + "dev": true, + "requires": { + "acorn": "^6.0.7", + "acorn-jsx": "^5.0.0", + "eslint-visitor-keys": "^1.0.0" + } + }, + "fast-deep-equal": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz", + "integrity": "sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA==", + "dev": true + }, + "figures": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", + "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "inquirer": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", + "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", + "dev": true, + "requires": { + "ansi-escapes": "^3.2.0", + "chalk": "^2.4.2", + "cli-cursor": "^2.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^2.0.0", + "lodash": "^4.17.12", + "mute-stream": "0.0.7", + "run-async": "^2.2.0", + "rxjs": "^6.4.0", + "string-width": "^2.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "dev": true + }, + "mkdirp": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "dev": true, + "requires": { + "minimist": "0.0.8" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "mute-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "dev": true + }, + "onetime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "dev": true, + "requires": { + "mimic-fn": "^1.0.0" + } + }, + "restore-cursor": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "dev": true, + "requires": { + "onetime": "^2.0.0", + "signal-exit": "^3.0.2" + } + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, + "prettier-eslint-cli": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/prettier-eslint-cli/-/prettier-eslint-cli-5.0.0.tgz", + "integrity": "sha512-cei9UbN1aTrz3sQs88CWpvY/10PYTevzd76zoG1tdJ164OhmNTFRKPTOZrutVvscoQWzbnLKkviS3gu5JXwvZg==", + "dev": true, + "requires": { + "arrify": "^2.0.1", + "boolify": "^1.0.0", + "camelcase-keys": "^6.0.0", + "chalk": "^2.4.2", + "common-tags": "^1.8.0", + "core-js": "^3.1.4", + "eslint": "^5.0.0", + "find-up": "^4.1.0", + "get-stdin": "^7.0.0", + "glob": "^7.1.4", + "ignore": "^5.1.2", + "lodash.memoize": "^4.1.2", + "loglevel-colored-level-prefix": "^1.0.0", + "messageformat": "^2.2.1", + "prettier-eslint": "^9.0.0", + "rxjs": "^6.5.2", + "yargs": "^13.2.4" + }, + "dependencies": { + "ajv": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.11.0.tgz", + "integrity": "sha512-nCprB/0syFYy9fVYU1ox1l2KN8S9I+tziH8D4zdZuLT3N6RMlGSGt5FSTpAiHB/Whv8Qs1cWHma1aMKZyaHRKA==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ansi-escapes": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", + "dev": true + }, + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + }, + "cli-cursor": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "dev": true, + "requires": { + "restore-cursor": "^2.0.0" + } + }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "eslint": { + "version": "5.16.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", + "integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.9.1", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^4.0.3", + "eslint-utils": "^1.3.1", + "eslint-visitor-keys": "^1.0.0", + "espree": "^5.0.1", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob": "^7.1.2", + "globals": "^11.7.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^6.2.2", + "js-yaml": "^3.13.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.11", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.2", + "path-is-inside": "^1.0.2", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^5.5.1", + "strip-ansi": "^4.0.0", + "strip-json-comments": "^2.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0" + }, + "dependencies": { + "ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "dev": true + } + } + }, + "eslint-scope": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "espree": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz", + "integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==", + "dev": true, + "requires": { + "acorn": "^6.0.7", + "acorn-jsx": "^5.0.0", + "eslint-visitor-keys": "^1.0.0" + } + }, + "fast-deep-equal": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz", + "integrity": "sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA==", + "dev": true + }, + "figures": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", + "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + } + }, + "get-stdin": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-7.0.0.tgz", + "integrity": "sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ==", + "dev": true + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "ignore": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.4.tgz", + "integrity": "sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==", + "dev": true + }, + "inquirer": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.2.tgz", + "integrity": "sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ==", + "dev": true, + "requires": { + "ansi-escapes": "^3.2.0", + "chalk": "^2.4.2", + "cli-cursor": "^2.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^2.0.0", + "lodash": "^4.17.12", + "mute-stream": "0.0.7", + "run-async": "^2.2.0", + "rxjs": "^6.4.0", + "string-width": "^2.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "requires": { + "p-locate": "^4.1.0" + } + }, + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "dev": true + }, + "mkdirp": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "dev": true, + "requires": { + "minimist": "0.0.8" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "mute-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "dev": true + }, + "onetime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "dev": true, + "requires": { + "mimic-fn": "^1.0.0" + } + }, + "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "restore-cursor": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "dev": true, + "requires": { + "onetime": "^2.0.0", + "signal-exit": "^3.0.2" + } + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, + "prettier-linter-helpers": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", + "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", + "dev": true, + "requires": { + "fast-diff": "^1.1.2" + } + }, + "pretty-format": { + "version": "23.6.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-23.6.0.tgz", + "integrity": "sha512-zf9NV1NSlDLDjycnwm6hpFATCGl/K1lt0R/GdkAK2O5LN/rwJoB+Mh93gGJjut4YbmecbfgLWVGSTCr0Ewvvbw==", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0", + "ansi-styles": "^3.2.0" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + } + } + }, "pretty-ms": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-4.0.0.tgz", @@ -2634,6 +5068,12 @@ "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", "integrity": "sha1-o31zL0JxtKsa0HDTVQjoKQeI/6o=" }, + "progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", + "dev": true + }, "prom-client": { "version": "11.5.3", "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-11.5.3.tgz", @@ -2642,6 +5082,17 @@ "tdigest": "^0.1.1" } }, + "prop-types": { + "version": "15.7.2", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", + "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", + "dev": true, + "requires": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.8.1" + } + }, "protobufjs": { "version": "6.8.8", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", @@ -2737,6 +5188,18 @@ "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", "integrity": "sha1-yzroBuh0BERYTvFUzo7pjUA/PjY=" }, + "quick-lru": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", + "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", + "dev": true + }, + "ramda": { + "version": "0.26.1", + "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.26.1.tgz", + "integrity": "sha512-hLWjpy7EnsDBb0p+Z3B7rPi3GDeRG5ZtiI33kJhTt+ORCd38AbAIjB/9zRIUoeTbE/AVX5ZkU7m6bznsvrf8eQ==", + "dev": true + }, "range-parser": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", @@ -2790,6 +5253,33 @@ } } }, + "react-is": { + "version": "16.12.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.12.0.tgz", + "integrity": "sha512-rPCkf/mWBtKc97aLL9/txD8DZdemK0vkA3JMLShjlJB3Pj3s+lpf1KaBzMfQrAmhMQB0n1cU/SUGgKKBCe837Q==", + "dev": true + }, + "read-pkg": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", + "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", + "dev": true, + "requires": { + "load-json-file": "^2.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^2.0.0" + } + }, + "read-pkg-up": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", + "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", + "dev": true, + "requires": { + "find-up": "^2.0.0", + "read-pkg": "^2.0.0" + } + }, "readable-stream": { "version": "2.3.6", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", @@ -2804,6 +5294,28 @@ "util-deprecate": "~1.0.1" } }, + "regenerator-runtime": { + "version": "0.13.3", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz", + "integrity": "sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw==", + "dev": true + }, + "regexp.prototype.flags": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.3.0.tgz", + "integrity": "sha512-2+Q0C5g951OlYlJz6yu5/M33IcsESLlLfsyIaLJaG4FA2r4yP8MvVMJUUP/fVBkSpbbbZlS5gynbEWLipiiXiQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1" + } + }, + "regexpp": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", + "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", + "dev": true + }, "request": { "version": "2.88.0", "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", @@ -2843,6 +5355,12 @@ "resolved": "https://registry.npmjs.org/require-all/-/require-all-1.0.0.tgz", "integrity": "sha1-hINwjnzkxt+tmItQgPl4KbktIic=" }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true + }, "require-in-the-middle": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-4.0.1.tgz", @@ -2874,6 +5392,18 @@ "integrity": "sha1-rW8wwTvs15cBDEaK+ndcDAprR/o=", "dev": true }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "require-relative": { + "version": "0.8.7", + "resolved": "https://registry.npmjs.org/require-relative/-/require-relative-0.8.7.tgz", + "integrity": "sha1-eZlTn8ngR6N5KPoZb44VY9q9Nt4=", + "dev": true + }, "resolve": { "version": "1.12.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.12.0.tgz", @@ -2882,6 +5412,22 @@ "path-parse": "^1.0.6" } }, + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true + }, + "restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "requires": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + } + }, "retry-as-promised": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/retry-as-promised/-/retry-as-promised-2.3.2.tgz", @@ -2928,6 +5474,24 @@ "glob": "^7.0.5" } }, + "run-async": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz", + "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=", + "dev": true, + "requires": { + "is-promise": "^2.1.0" + } + }, + "rxjs": { + "version": "6.5.4", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.4.tgz", + "integrity": "sha512-naMQXcgEo3csAEGvw/NydRA0fuS2nDZJiw1YUWFKU7aPPAPGZEsD4Iimit96qwCieH6y614MCLYwdkrWx7z/7Q==", + "dev": true, + "requires": { + "tslib": "^1.9.0" + } + }, "safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", @@ -3071,11 +5635,36 @@ "coffee-script": "1.6.0" } }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "dev": true, + "requires": { + "shebang-regex": "^1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "dev": true + }, "shimmer": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.0.tgz", "integrity": "sha1-+Wb3VVeJdj502IQRk2haXnhzZmU=" }, + "side-channel": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.2.tgz", + "integrity": "sha512-7rL9YlPHg7Ancea1S96Pa8/QWb4BtXL/TZvS6B8XFetGBeuhAsfmUspK6DokBeZ64+Kj9TCNRD/30pVz1BvQNA==", + "dev": true, + "requires": { + "es-abstract": "^1.17.0-next.1", + "object-inspect": "^1.7.0" + } + }, "sigmund": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz", @@ -3095,6 +5684,25 @@ "buster-format": "~0.5" } }, + "slice-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", + "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "astral-regex": "^1.0.0", + "is-fullwidth-code-point": "^2.0.0" + }, + "dependencies": { + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + } + } + }, "smoke-test-sharelatex": { "version": "git+https://github.com/sharelatex/smoke-test-sharelatex.git#bc3e93d18ccee219c0d99e8b02c984ccdd842e1c", "from": "git+https://github.com/sharelatex/smoke-test-sharelatex.git#v0.2.0", @@ -3161,6 +5769,38 @@ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, + "spdx-correct": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", + "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", + "dev": true, + "requires": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-exceptions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", + "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==", + "dev": true + }, + "spdx-expression-parse": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", + "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", + "dev": true, + "requires": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-license-ids": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", + "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", + "dev": true + }, "split": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", @@ -3174,6 +5814,12 @@ "resolved": "https://registry.npmjs.org/split-ca/-/split-ca-1.0.1.tgz", "integrity": "sha1-bIOv82kvphJW4M0ZfgXp3hV2kaY=" }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, "sqlite3": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/sqlite3/-/sqlite3-4.0.6.tgz", @@ -3245,6 +5891,40 @@ "strip-ansi": "^3.0.0" } }, + "string.prototype.matchall": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.2.tgz", + "integrity": "sha512-N/jp6O5fMf9os0JU3E72Qhf590RSRZU/ungsL/qJUYVTNv7hTG0P/dbPjxINVN9jpscu3nzYwKESU3P3RY5tOg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0", + "has-symbols": "^1.0.1", + "internal-slot": "^1.0.2", + "regexp.prototype.flags": "^1.3.0", + "side-channel": "^1.0.2" + } + }, + "string.prototype.trimleft": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz", + "integrity": "sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1" + } + }, + "string.prototype.trimright": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz", + "integrity": "sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1" + } + }, "string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", @@ -3261,6 +5941,12 @@ "ansi-regex": "^2.0.0" } }, + "strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "dev": true + }, "strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", @@ -3280,6 +5966,82 @@ "has-flag": "^2.0.0" } }, + "table": { + "version": "5.4.6", + "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz", + "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==", + "dev": true, + "requires": { + "ajv": "^6.10.2", + "lodash": "^4.17.14", + "slice-ansi": "^2.1.0", + "string-width": "^3.0.0" + }, + "dependencies": { + "ajv": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.11.0.tgz", + "integrity": "sha512-nCprB/0syFYy9fVYU1ox1l2KN8S9I+tziH8D4zdZuLT3N6RMlGSGt5FSTpAiHB/Whv8Qs1cWHma1aMKZyaHRKA==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "fast-deep-equal": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz", + "integrity": "sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, "tar": { "version": "4.4.8", "resolved": "https://registry.npmjs.org/tar/-/tar-4.4.8.tgz", @@ -3376,6 +6138,12 @@ "terraformer": "~1.0.5" } }, + "text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", + "dev": true + }, "through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", @@ -3395,11 +6163,26 @@ "integrity": "sha1-kNt58X2Ni1NiFUOJSSuXJ2LP0nY=", "dev": true }, + "tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, + "requires": { + "os-tmpdir": "~1.0.2" + } + }, "to-buffer": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.1.1.tgz", "integrity": "sha1-STvUj2LXxD/N7TE6A9ytsuEhOoA=" }, + "to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", + "dev": true + }, "to-no-case": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", @@ -3442,6 +6225,12 @@ } } }, + "tslib": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.10.0.tgz", + "integrity": "sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==", + "dev": true + }, "tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", @@ -3455,6 +6244,15 @@ "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" }, + "type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "dev": true, + "requires": { + "prelude-ls": "~1.1.2" + } + }, "type-detect": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-0.1.1.tgz", @@ -3480,6 +6278,12 @@ "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=" }, + "typescript": { + "version": "3.7.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.5.tgz", + "integrity": "sha512-/P5lkRXkWHNAbcJIiHPfRoKqyd7bsyCma1hZNUGfn20qm64T6ZBlrzprymeu918H+mB/0rIg2gGK/BXkhhYgBw==", + "dev": true + }, "underscore": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.9.1.tgz", @@ -3513,6 +6317,12 @@ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz", "integrity": "sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ==" }, + "v8-compile-cache": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", + "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==", + "dev": true + }, "v8-profiler-node8": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/v8-profiler-node8/-/v8-profiler-node8-6.0.1.tgz", @@ -3549,6 +6359,16 @@ } } }, + "validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "requires": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, "validator": { "version": "10.9.0", "resolved": "https://registry.npmjs.org/validator/-/validator-10.9.0.tgz", @@ -3569,11 +6389,100 @@ "extsprintf": "^1.2.0" } }, + "vue-eslint-parser": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-2.0.3.tgz", + "integrity": "sha512-ZezcU71Owm84xVF6gfurBQUGg8WQ+WZGxgDEQu1IHFBZNx7BFZg3L1yHxrCBNNwbwFtE1GuvfJKMtb6Xuwc/Bw==", + "dev": true, + "requires": { + "debug": "^3.1.0", + "eslint-scope": "^3.7.1", + "eslint-visitor-keys": "^1.0.0", + "espree": "^3.5.2", + "esquery": "^1.0.0", + "lodash": "^4.17.4" + }, + "dependencies": { + "acorn": { + "version": "5.7.3", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.3.tgz", + "integrity": "sha512-T/zvzYRfbVojPWahDsE5evJdHb3oJoQfFbsrKM7w5Zcs++Tr257tia3BmMP8XYVjp1S9RZXQMh7gao96BlqZOw==", + "dev": true + }, + "acorn-jsx": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-3.0.1.tgz", + "integrity": "sha1-r9+UiPsezvyDSPb7IvRk4ypYs2s=", + "dev": true, + "requires": { + "acorn": "^3.0.4" + }, + "dependencies": { + "acorn": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-3.3.0.tgz", + "integrity": "sha1-ReN/s56No/JbruP/U2niu18iAXo=", + "dev": true + } + } + }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "eslint-scope": { + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-3.7.3.tgz", + "integrity": "sha512-W+B0SvF4gamyCTmUc+uITPY0989iXVfKvhwtmJocTaYoc/3khEHmEmvfY/Gn9HA9VV75jrQECsHizkNw1b68FA==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "espree": { + "version": "3.5.4", + "resolved": "https://registry.npmjs.org/espree/-/espree-3.5.4.tgz", + "integrity": "sha512-yAcIQxtmMiB/jL32dzEp2enBeidsB7xWPLNiw3IIkpVds1P+h7qF9YwJq1yUNzp2OKXgAprs4F61ih66UsoD1A==", + "dev": true, + "requires": { + "acorn": "^5.5.0", + "acorn-jsx": "^3.0.0" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, "walkdir": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/walkdir/-/walkdir-0.4.1.tgz", "integrity": "sha512-3eBwRyEln6E1MSzcxcVpQIhRG8Q1jLvEqRmCZqS3dsfXEDR/AhOF4d+jHg1qvDCpYaVRZjENPQyrVxAkQqxPgQ==" }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true + }, "wide-align": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", @@ -3590,6 +6499,63 @@ "@types/node": "*" } }, + "word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "dev": true + }, + "wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", @@ -3600,16 +6566,138 @@ "resolved": "https://registry.npmjs.org/wrench/-/wrench-1.5.9.tgz", "integrity": "sha1-QRaRxjqbJTGxcAJnJ5veyiOyFCo=" }, + "write": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz", + "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==", + "dev": true, + "requires": { + "mkdirp": "^0.5.1" + }, + "dependencies": { + "mkdirp": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "dev": true, + "requires": { + "minimist": "0.0.8" + } + } + } + }, "xtend": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", "integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=" }, + "y18n": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", + "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", + "dev": true + }, "yallist": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", "integrity": "sha1-tLBJ4xS+VF486AIjbWzSLNkcPek=" }, + "yargs": { + "version": "13.3.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.0.tgz", + "integrity": "sha512-2eehun/8ALW8TLoIl7MVaRUrg+yCnenu8B4kBlRxj3GJGDKU1Og7sMXPNm1BYyM1DOJmTZ4YeN/Nwxv+8XJsUA==", + "dev": true, + "requires": { + "cliui": "^5.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^13.1.1" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "yargs-parser": { + "version": "13.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.1.tgz", + "integrity": "sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + }, "yn": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", diff --git a/package.json b/package.json index 5539538..c38621a 100644 --- a/package.json +++ b/package.json @@ -41,10 +41,28 @@ "wrench": "~1.5.4" }, "devDependencies": { + "babel-eslint": "^10.0.3", "bunyan": "^0.22.1", "chai": "~1.8.1", "coffeescript": "1.6.0", + "eslint": "^6.6.0", + "eslint-config-prettier": "^6.10.0", + "eslint-config-standard": "^14.1.0", + "eslint-config-standard-jsx": "^8.1.0", + "eslint-config-standard-react": "^9.2.0", + "eslint-plugin-chai-expect": "^2.1.0", + "eslint-plugin-chai-friendly": "^0.5.0", + "eslint-plugin-import": "^2.20.1", + "eslint-plugin-jsx-a11y": "^6.2.3", + "eslint-plugin-mocha": "^6.2.2", + "eslint-plugin-node": "^11.0.0", + "eslint-plugin-prettier": "^3.1.2", + "eslint-plugin-promise": "^4.2.1", + "eslint-plugin-react": "^7.18.3", + "eslint-plugin-standard": "^4.0.1", "mocha": "^4.0.1", + "prettier": "^1.19.1", + "prettier-eslint-cli": "^5.0.0", "sandboxed-module": "~0.3.0", "sinon": "~1.7.3", "timekeeper": "0.0.4" From 37794788ce873c399726fa8cec72a103c295d620 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 Feb 2020 12:14:01 +0100 Subject: [PATCH 05/24] decaffeinate: Rename CommandRunner.coffee and 25 other files from .coffee to .js --- app/coffee/{CommandRunner.coffee => CommandRunner.js} | 0 app/coffee/{CompileController.coffee => CompileController.js} | 0 app/coffee/{CompileManager.coffee => CompileManager.js} | 0 app/coffee/{ContentTypeMapper.coffee => ContentTypeMapper.js} | 0 app/coffee/{DbQueue.coffee => DbQueue.js} | 0 app/coffee/{DockerLockManager.coffee => DockerLockManager.js} | 0 app/coffee/{DockerRunner.coffee => DockerRunner.js} | 0 app/coffee/{DraftModeManager.coffee => DraftModeManager.js} | 0 app/coffee/{Errors.coffee => Errors.js} | 0 app/coffee/{LatexRunner.coffee => LatexRunner.js} | 0 app/coffee/{LocalCommandRunner.coffee => LocalCommandRunner.js} | 0 app/coffee/{LockManager.coffee => LockManager.js} | 0 app/coffee/{Metrics.coffee => Metrics.js} | 0 app/coffee/{OutputCacheManager.coffee => OutputCacheManager.js} | 0 app/coffee/{OutputFileFinder.coffee => OutputFileFinder.js} | 0 app/coffee/{OutputFileOptimiser.coffee => OutputFileOptimiser.js} | 0 ...jectPersistenceManager.coffee => ProjectPersistenceManager.js} | 0 app/coffee/{RequestParser.coffee => RequestParser.js} | 0 .../{ResourceStateManager.coffee => ResourceStateManager.js} | 0 app/coffee/{ResourceWriter.coffee => ResourceWriter.js} | 0 app/coffee/{SafeReader.coffee => SafeReader.js} | 0 ...cServerForbidSymlinks.coffee => StaticServerForbidSymlinks.js} | 0 app/coffee/{TikzManager.coffee => TikzManager.js} | 0 app/coffee/{UrlCache.coffee => UrlCache.js} | 0 app/coffee/{UrlFetcher.coffee => UrlFetcher.js} | 0 app/coffee/{db.coffee => db.js} | 0 26 files changed, 0 insertions(+), 0 deletions(-) rename app/coffee/{CommandRunner.coffee => CommandRunner.js} (100%) rename app/coffee/{CompileController.coffee => CompileController.js} (100%) rename app/coffee/{CompileManager.coffee => CompileManager.js} (100%) rename app/coffee/{ContentTypeMapper.coffee => ContentTypeMapper.js} (100%) rename app/coffee/{DbQueue.coffee => DbQueue.js} (100%) rename app/coffee/{DockerLockManager.coffee => DockerLockManager.js} (100%) rename app/coffee/{DockerRunner.coffee => DockerRunner.js} (100%) rename app/coffee/{DraftModeManager.coffee => DraftModeManager.js} (100%) rename app/coffee/{Errors.coffee => Errors.js} (100%) rename app/coffee/{LatexRunner.coffee => LatexRunner.js} (100%) rename app/coffee/{LocalCommandRunner.coffee => LocalCommandRunner.js} (100%) rename app/coffee/{LockManager.coffee => LockManager.js} (100%) rename app/coffee/{Metrics.coffee => Metrics.js} (100%) rename app/coffee/{OutputCacheManager.coffee => OutputCacheManager.js} (100%) rename app/coffee/{OutputFileFinder.coffee => OutputFileFinder.js} (100%) rename app/coffee/{OutputFileOptimiser.coffee => OutputFileOptimiser.js} (100%) rename app/coffee/{ProjectPersistenceManager.coffee => ProjectPersistenceManager.js} (100%) rename app/coffee/{RequestParser.coffee => RequestParser.js} (100%) rename app/coffee/{ResourceStateManager.coffee => ResourceStateManager.js} (100%) rename app/coffee/{ResourceWriter.coffee => ResourceWriter.js} (100%) rename app/coffee/{SafeReader.coffee => SafeReader.js} (100%) rename app/coffee/{StaticServerForbidSymlinks.coffee => StaticServerForbidSymlinks.js} (100%) rename app/coffee/{TikzManager.coffee => TikzManager.js} (100%) rename app/coffee/{UrlCache.coffee => UrlCache.js} (100%) rename app/coffee/{UrlFetcher.coffee => UrlFetcher.js} (100%) rename app/coffee/{db.coffee => db.js} (100%) diff --git a/app/coffee/CommandRunner.coffee b/app/coffee/CommandRunner.js similarity index 100% rename from app/coffee/CommandRunner.coffee rename to app/coffee/CommandRunner.js diff --git a/app/coffee/CompileController.coffee b/app/coffee/CompileController.js similarity index 100% rename from app/coffee/CompileController.coffee rename to app/coffee/CompileController.js diff --git a/app/coffee/CompileManager.coffee b/app/coffee/CompileManager.js similarity index 100% rename from app/coffee/CompileManager.coffee rename to app/coffee/CompileManager.js diff --git a/app/coffee/ContentTypeMapper.coffee b/app/coffee/ContentTypeMapper.js similarity index 100% rename from app/coffee/ContentTypeMapper.coffee rename to app/coffee/ContentTypeMapper.js diff --git a/app/coffee/DbQueue.coffee b/app/coffee/DbQueue.js similarity index 100% rename from app/coffee/DbQueue.coffee rename to app/coffee/DbQueue.js diff --git a/app/coffee/DockerLockManager.coffee b/app/coffee/DockerLockManager.js similarity index 100% rename from app/coffee/DockerLockManager.coffee rename to app/coffee/DockerLockManager.js diff --git a/app/coffee/DockerRunner.coffee b/app/coffee/DockerRunner.js similarity index 100% rename from app/coffee/DockerRunner.coffee rename to app/coffee/DockerRunner.js diff --git a/app/coffee/DraftModeManager.coffee b/app/coffee/DraftModeManager.js similarity index 100% rename from app/coffee/DraftModeManager.coffee rename to app/coffee/DraftModeManager.js diff --git a/app/coffee/Errors.coffee b/app/coffee/Errors.js similarity index 100% rename from app/coffee/Errors.coffee rename to app/coffee/Errors.js diff --git a/app/coffee/LatexRunner.coffee b/app/coffee/LatexRunner.js similarity index 100% rename from app/coffee/LatexRunner.coffee rename to app/coffee/LatexRunner.js diff --git a/app/coffee/LocalCommandRunner.coffee b/app/coffee/LocalCommandRunner.js similarity index 100% rename from app/coffee/LocalCommandRunner.coffee rename to app/coffee/LocalCommandRunner.js diff --git a/app/coffee/LockManager.coffee b/app/coffee/LockManager.js similarity index 100% rename from app/coffee/LockManager.coffee rename to app/coffee/LockManager.js diff --git a/app/coffee/Metrics.coffee b/app/coffee/Metrics.js similarity index 100% rename from app/coffee/Metrics.coffee rename to app/coffee/Metrics.js diff --git a/app/coffee/OutputCacheManager.coffee b/app/coffee/OutputCacheManager.js similarity index 100% rename from app/coffee/OutputCacheManager.coffee rename to app/coffee/OutputCacheManager.js diff --git a/app/coffee/OutputFileFinder.coffee b/app/coffee/OutputFileFinder.js similarity index 100% rename from app/coffee/OutputFileFinder.coffee rename to app/coffee/OutputFileFinder.js diff --git a/app/coffee/OutputFileOptimiser.coffee b/app/coffee/OutputFileOptimiser.js similarity index 100% rename from app/coffee/OutputFileOptimiser.coffee rename to app/coffee/OutputFileOptimiser.js diff --git a/app/coffee/ProjectPersistenceManager.coffee b/app/coffee/ProjectPersistenceManager.js similarity index 100% rename from app/coffee/ProjectPersistenceManager.coffee rename to app/coffee/ProjectPersistenceManager.js diff --git a/app/coffee/RequestParser.coffee b/app/coffee/RequestParser.js similarity index 100% rename from app/coffee/RequestParser.coffee rename to app/coffee/RequestParser.js diff --git a/app/coffee/ResourceStateManager.coffee b/app/coffee/ResourceStateManager.js similarity index 100% rename from app/coffee/ResourceStateManager.coffee rename to app/coffee/ResourceStateManager.js diff --git a/app/coffee/ResourceWriter.coffee b/app/coffee/ResourceWriter.js similarity index 100% rename from app/coffee/ResourceWriter.coffee rename to app/coffee/ResourceWriter.js diff --git a/app/coffee/SafeReader.coffee b/app/coffee/SafeReader.js similarity index 100% rename from app/coffee/SafeReader.coffee rename to app/coffee/SafeReader.js diff --git a/app/coffee/StaticServerForbidSymlinks.coffee b/app/coffee/StaticServerForbidSymlinks.js similarity index 100% rename from app/coffee/StaticServerForbidSymlinks.coffee rename to app/coffee/StaticServerForbidSymlinks.js diff --git a/app/coffee/TikzManager.coffee b/app/coffee/TikzManager.js similarity index 100% rename from app/coffee/TikzManager.coffee rename to app/coffee/TikzManager.js diff --git a/app/coffee/UrlCache.coffee b/app/coffee/UrlCache.js similarity index 100% rename from app/coffee/UrlCache.coffee rename to app/coffee/UrlCache.js diff --git a/app/coffee/UrlFetcher.coffee b/app/coffee/UrlFetcher.js similarity index 100% rename from app/coffee/UrlFetcher.coffee rename to app/coffee/UrlFetcher.js diff --git a/app/coffee/db.coffee b/app/coffee/db.js similarity index 100% rename from app/coffee/db.coffee rename to app/coffee/db.js From 4655768fd21b6b3a9fae2e8093859641876504fe Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 Feb 2020 12:14:14 +0100 Subject: [PATCH 06/24] decaffeinate: Convert CommandRunner.coffee and 25 other files to JS --- app/coffee/CommandRunner.js | 25 +- app/coffee/CompileController.js | 252 ++++---- app/coffee/CompileManager.js | 737 ++++++++++++---------- app/coffee/ContentTypeMapper.js | 50 +- app/coffee/DbQueue.js | 21 +- app/coffee/DockerLockManager.js | 126 ++-- app/coffee/DockerRunner.js | 761 +++++++++++++---------- app/coffee/DraftModeManager.js | 51 +- app/coffee/Errors.js | 49 +- app/coffee/LatexRunner.js | 172 ++--- app/coffee/LocalCommandRunner.js | 96 +-- app/coffee/LockManager.js | 79 ++- app/coffee/Metrics.js | 2 +- app/coffee/OutputCacheManager.js | 429 +++++++------ app/coffee/OutputFileFinder.js | 116 ++-- app/coffee/OutputFileOptimiser.js | 122 ++-- app/coffee/ProjectPersistenceManager.js | 171 +++-- app/coffee/RequestParser.js | 296 +++++---- app/coffee/ResourceStateManager.js | 168 +++-- app/coffee/ResourceWriter.js | 322 ++++++---- app/coffee/SafeReader.js | 55 +- app/coffee/StaticServerForbidSymlinks.js | 103 +-- app/coffee/TikzManager.js | 85 ++- app/coffee/UrlCache.js | 281 +++++---- app/coffee/UrlFetcher.js | 136 ++-- app/coffee/db.js | 60 +- 26 files changed, 2801 insertions(+), 1964 deletions(-) diff --git a/app/coffee/CommandRunner.js b/app/coffee/CommandRunner.js index 2d1c3a9..dd7210a 100644 --- a/app/coffee/CommandRunner.js +++ b/app/coffee/CommandRunner.js @@ -1,11 +1,18 @@ -Settings = require "settings-sharelatex" -logger = require "logger-sharelatex" +/* + * decaffeinate suggestions: + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let commandRunnerPath; +const Settings = require("settings-sharelatex"); +const logger = require("logger-sharelatex"); -if Settings.clsi?.dockerRunner == true - commandRunnerPath = "./DockerRunner" -else - commandRunnerPath = "./LocalCommandRunner" -logger.info commandRunnerPath:commandRunnerPath, "selecting command runner for clsi" -CommandRunner = require(commandRunnerPath) +if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) { + commandRunnerPath = "./DockerRunner"; +} else { + commandRunnerPath = "./LocalCommandRunner"; +} +logger.info({commandRunnerPath}, "selecting command runner for clsi"); +const CommandRunner = require(commandRunnerPath); -module.exports = CommandRunner +module.exports = CommandRunner; diff --git a/app/coffee/CompileController.js b/app/coffee/CompileController.js index 4952d84..cfdbcfe 100644 --- a/app/coffee/CompileController.js +++ b/app/coffee/CompileController.js @@ -1,119 +1,163 @@ -RequestParser = require "./RequestParser" -CompileManager = require "./CompileManager" -Settings = require "settings-sharelatex" -Metrics = require "./Metrics" -ProjectPersistenceManager = require "./ProjectPersistenceManager" -logger = require "logger-sharelatex" -Errors = require "./Errors" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let CompileController; +const RequestParser = require("./RequestParser"); +const CompileManager = require("./CompileManager"); +const Settings = require("settings-sharelatex"); +const Metrics = require("./Metrics"); +const ProjectPersistenceManager = require("./ProjectPersistenceManager"); +const logger = require("logger-sharelatex"); +const Errors = require("./Errors"); -module.exports = CompileController = - compile: (req, res, next = (error) ->) -> - timer = new Metrics.Timer("compile-request") - RequestParser.parse req.body, (error, request) -> - return next(error) if error? - request.project_id = req.params.project_id - request.user_id = req.params.user_id if req.params.user_id? - ProjectPersistenceManager.markProjectAsJustAccessed request.project_id, (error) -> - return next(error) if error? - CompileManager.doCompileWithLock request, (error, outputFiles = []) -> - if error instanceof Errors.AlreadyCompilingError - code = 423 # Http 423 Locked - status = "compile-in-progress" - else if error instanceof Errors.FilesOutOfSyncError - code = 409 # Http 409 Conflict - status = "retry" - else if error?.terminated - status = "terminated" - else if error?.validate - status = "validation-#{error.validate}" - else if error?.timedout - status = "timedout" - logger.log err: error, project_id: request.project_id, "timeout running compile" - else if error? - status = "error" - code = 500 - logger.warn err: error, project_id: request.project_id, "error running compile" - else - status = "failure" - for file in outputFiles - if file.path?.match(/output\.pdf$/) - status = "success" +module.exports = (CompileController = { + compile(req, res, next) { + if (next == null) { next = function(error) {}; } + const timer = new Metrics.Timer("compile-request"); + return RequestParser.parse(req.body, function(error, request) { + if (error != null) { return next(error); } + request.project_id = req.params.project_id; + if (req.params.user_id != null) { request.user_id = req.params.user_id; } + return ProjectPersistenceManager.markProjectAsJustAccessed(request.project_id, function(error) { + if (error != null) { return next(error); } + return CompileManager.doCompileWithLock(request, function(error, outputFiles) { + let code, status; + if (outputFiles == null) { outputFiles = []; } + if (error instanceof Errors.AlreadyCompilingError) { + code = 423; // Http 423 Locked + status = "compile-in-progress"; + } else if (error instanceof Errors.FilesOutOfSyncError) { + code = 409; // Http 409 Conflict + status = "retry"; + } else if (error != null ? error.terminated : undefined) { + status = "terminated"; + } else if (error != null ? error.validate : undefined) { + status = `validation-${error.validate}`; + } else if (error != null ? error.timedout : undefined) { + status = "timedout"; + logger.log({err: error, project_id: request.project_id}, "timeout running compile"); + } else if (error != null) { + status = "error"; + code = 500; + logger.warn({err: error, project_id: request.project_id}, "error running compile"); + } else { + let file; + status = "failure"; + for (file of Array.from(outputFiles)) { + if (file.path != null ? file.path.match(/output\.pdf$/) : undefined) { + status = "success"; + } + } - if status == "failure" - logger.warn project_id: request.project_id, outputFiles:outputFiles, "project failed to compile successfully, no output.pdf generated" + if (status === "failure") { + logger.warn({project_id: request.project_id, outputFiles}, "project failed to compile successfully, no output.pdf generated"); + } - # log an error if any core files are found - for file in outputFiles - if file.path is "core" - logger.error project_id:request.project_id, req:req, outputFiles:outputFiles, "core file found in output" - - if error? - outputFiles = error.outputFiles || [] - - timer.done() - res.status(code or 200).send { - compile: - status: status - error: error?.message or error - outputFiles: outputFiles.map (file) -> - url: - "#{Settings.apis.clsi.url}/project/#{request.project_id}" + - (if request.user_id? then "/user/#{request.user_id}" else "") + - (if file.build? then "/build/#{file.build}" else "") + - "/output/#{file.path}" - path: file.path - type: file.type - build: file.build + // log an error if any core files are found + for (file of Array.from(outputFiles)) { + if (file.path === "core") { + logger.error({project_id:request.project_id, req, outputFiles}, "core file found in output"); + } + } } - stopCompile: (req, res, next) -> - {project_id, user_id} = req.params - CompileManager.stopCompile project_id, user_id, (error) -> - return next(error) if error? - res.sendStatus(204) + if (error != null) { + outputFiles = error.outputFiles || []; + } - clearCache: (req, res, next = (error) ->) -> - ProjectPersistenceManager.clearProject req.params.project_id, req.params.user_id, (error) -> - return next(error) if error? - res.sendStatus(204) # No content + timer.done(); + return res.status(code || 200).send({ + compile: { + status, + error: (error != null ? error.message : undefined) || error, + outputFiles: outputFiles.map(file => + ({ + url: + `${Settings.apis.clsi.url}/project/${request.project_id}` + + ((request.user_id != null) ? `/user/${request.user_id}` : "") + + ((file.build != null) ? `/build/${file.build}` : "") + + `/output/${file.path}`, + path: file.path, + type: file.type, + build: file.build + }) + ) + } + }); + }); + }); + }); + }, - syncFromCode: (req, res, next = (error) ->) -> - file = req.query.file - line = parseInt(req.query.line, 10) - column = parseInt(req.query.column, 10) - project_id = req.params.project_id - user_id = req.params.user_id - CompileManager.syncFromCode project_id, user_id, file, line, column, (error, pdfPositions) -> - return next(error) if error? - res.json { + stopCompile(req, res, next) { + const {project_id, user_id} = req.params; + return CompileManager.stopCompile(project_id, user_id, function(error) { + if (error != null) { return next(error); } + return res.sendStatus(204); + }); + }, + + clearCache(req, res, next) { + if (next == null) { next = function(error) {}; } + return ProjectPersistenceManager.clearProject(req.params.project_id, req.params.user_id, function(error) { + if (error != null) { return next(error); } + return res.sendStatus(204); + }); + }, // No content + + syncFromCode(req, res, next) { + if (next == null) { next = function(error) {}; } + const { file } = req.query; + const line = parseInt(req.query.line, 10); + const column = parseInt(req.query.column, 10); + const { project_id } = req.params; + const { user_id } = req.params; + return CompileManager.syncFromCode(project_id, user_id, file, line, column, function(error, pdfPositions) { + if (error != null) { return next(error); } + return res.json({ pdf: pdfPositions - } + }); + }); + }, - syncFromPdf: (req, res, next = (error) ->) -> - page = parseInt(req.query.page, 10) - h = parseFloat(req.query.h) - v = parseFloat(req.query.v) - project_id = req.params.project_id - user_id = req.params.user_id - CompileManager.syncFromPdf project_id, user_id, page, h, v, (error, codePositions) -> - return next(error) if error? - res.json { + syncFromPdf(req, res, next) { + if (next == null) { next = function(error) {}; } + const page = parseInt(req.query.page, 10); + const h = parseFloat(req.query.h); + const v = parseFloat(req.query.v); + const { project_id } = req.params; + const { user_id } = req.params; + return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function(error, codePositions) { + if (error != null) { return next(error); } + return res.json({ code: codePositions - } + }); + }); + }, - wordcount: (req, res, next = (error) ->) -> - file = req.query.file || "main.tex" - project_id = req.params.project_id - user_id = req.params.user_id - image = req.query.image - logger.log {image, file, project_id}, "word count request" + wordcount(req, res, next) { + if (next == null) { next = function(error) {}; } + const file = req.query.file || "main.tex"; + const { project_id } = req.params; + const { user_id } = req.params; + const { image } = req.query; + logger.log({image, file, project_id}, "word count request"); - CompileManager.wordcount project_id, user_id, file, image, (error, result) -> - return next(error) if error? - res.json { + return CompileManager.wordcount(project_id, user_id, file, image, function(error, result) { + if (error != null) { return next(error); } + return res.json({ texcount: result - } + }); + }); + }, - status: (req, res, next = (error)-> )-> - res.send("OK") + status(req, res, next ){ + if (next == null) { next = function(error){}; } + return res.send("OK"); + } +}); diff --git a/app/coffee/CompileManager.js b/app/coffee/CompileManager.js index 792beb8..82dafd1 100644 --- a/app/coffee/CompileManager.js +++ b/app/coffee/CompileManager.js @@ -1,345 +1,454 @@ -ResourceWriter = require "./ResourceWriter" -LatexRunner = require "./LatexRunner" -OutputFileFinder = require "./OutputFileFinder" -OutputCacheManager = require "./OutputCacheManager" -Settings = require("settings-sharelatex") -Path = require "path" -logger = require "logger-sharelatex" -Metrics = require "./Metrics" -child_process = require "child_process" -DraftModeManager = require "./DraftModeManager" -TikzManager = require "./TikzManager" -LockManager = require "./LockManager" -fs = require("fs") -fse = require "fs-extra" -os = require("os") -async = require "async" -Errors = require './Errors' -CommandRunner = require "./CommandRunner" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let CompileManager; +const ResourceWriter = require("./ResourceWriter"); +const LatexRunner = require("./LatexRunner"); +const OutputFileFinder = require("./OutputFileFinder"); +const OutputCacheManager = require("./OutputCacheManager"); +const Settings = require("settings-sharelatex"); +const Path = require("path"); +const logger = require("logger-sharelatex"); +const Metrics = require("./Metrics"); +const child_process = require("child_process"); +const DraftModeManager = require("./DraftModeManager"); +const TikzManager = require("./TikzManager"); +const LockManager = require("./LockManager"); +const fs = require("fs"); +const fse = require("fs-extra"); +const os = require("os"); +const async = require("async"); +const Errors = require('./Errors'); +const CommandRunner = require("./CommandRunner"); -getCompileName = (project_id, user_id) -> - if user_id? then "#{project_id}-#{user_id}" else project_id +const getCompileName = function(project_id, user_id) { + if (user_id != null) { return `${project_id}-${user_id}`; } else { return project_id; } +}; -getCompileDir = (project_id, user_id) -> - Path.join(Settings.path.compilesDir, getCompileName(project_id, user_id)) +const getCompileDir = (project_id, user_id) => Path.join(Settings.path.compilesDir, getCompileName(project_id, user_id)); -module.exports = CompileManager = +module.exports = (CompileManager = { - doCompileWithLock: (request, callback = (error, outputFiles) ->) -> - compileDir = getCompileDir(request.project_id, request.user_id) - lockFile = Path.join(compileDir, ".project-lock") - # use a .project-lock file in the compile directory to prevent - # simultaneous compiles - fse.ensureDir compileDir, (error) -> - return callback(error) if error? - LockManager.runWithLock lockFile, (releaseLock) -> - CompileManager.doCompile(request, releaseLock) - , callback + doCompileWithLock(request, callback) { + if (callback == null) { callback = function(error, outputFiles) {}; } + const compileDir = getCompileDir(request.project_id, request.user_id); + const lockFile = Path.join(compileDir, ".project-lock"); + // use a .project-lock file in the compile directory to prevent + // simultaneous compiles + return fse.ensureDir(compileDir, function(error) { + if (error != null) { return callback(error); } + return LockManager.runWithLock(lockFile, releaseLock => CompileManager.doCompile(request, releaseLock) + , callback); + }); + }, - doCompile: (request, callback = (error, outputFiles) ->) -> - compileDir = getCompileDir(request.project_id, request.user_id) - timer = new Metrics.Timer("write-to-disk") - logger.log project_id: request.project_id, user_id: request.user_id, "syncing resources to disk" - ResourceWriter.syncResourcesToDisk request, compileDir, (error, resourceList) -> - # NOTE: resourceList is insecure, it should only be used to exclude files from the output list - if error? and error instanceof Errors.FilesOutOfSyncError - logger.warn project_id: request.project_id, user_id: request.user_id, "files out of sync, please retry" - return callback(error) - else if error? - logger.err err:error, project_id: request.project_id, user_id: request.user_id, "error writing resources to disk" - return callback(error) - logger.log project_id: request.project_id, user_id: request.user_id, time_taken: Date.now() - timer.start, "written files to disk" - timer.done() + doCompile(request, callback) { + if (callback == null) { callback = function(error, outputFiles) {}; } + const compileDir = getCompileDir(request.project_id, request.user_id); + let timer = new Metrics.Timer("write-to-disk"); + logger.log({project_id: request.project_id, user_id: request.user_id}, "syncing resources to disk"); + return ResourceWriter.syncResourcesToDisk(request, compileDir, function(error, resourceList) { + // NOTE: resourceList is insecure, it should only be used to exclude files from the output list + if ((error != null) && error instanceof Errors.FilesOutOfSyncError) { + logger.warn({project_id: request.project_id, user_id: request.user_id}, "files out of sync, please retry"); + return callback(error); + } else if (error != null) { + logger.err({err:error, project_id: request.project_id, user_id: request.user_id}, "error writing resources to disk"); + return callback(error); + } + logger.log({project_id: request.project_id, user_id: request.user_id, time_taken: Date.now() - timer.start}, "written files to disk"); + timer.done(); - injectDraftModeIfRequired = (callback) -> - if request.draft - DraftModeManager.injectDraftMode Path.join(compileDir, request.rootResourcePath), callback - else - callback() + const injectDraftModeIfRequired = function(callback) { + if (request.draft) { + return DraftModeManager.injectDraftMode(Path.join(compileDir, request.rootResourcePath), callback); + } else { + return callback(); + } + }; - createTikzFileIfRequired = (callback) -> - TikzManager.checkMainFile compileDir, request.rootResourcePath, resourceList, (error, needsMainFile) -> - return callback(error) if error? - if needsMainFile - TikzManager.injectOutputFile compileDir, request.rootResourcePath, callback - else - callback() + const createTikzFileIfRequired = callback => + TikzManager.checkMainFile(compileDir, request.rootResourcePath, resourceList, function(error, needsMainFile) { + if (error != null) { return callback(error); } + if (needsMainFile) { + return TikzManager.injectOutputFile(compileDir, request.rootResourcePath, callback); + } else { + return callback(); + } + }) + ; - # set up environment variables for chktex - env = {} - # only run chktex on LaTeX files (not knitr .Rtex files or any others) - isLaTeXFile = request.rootResourcePath?.match(/\.tex$/i) - if request.check? and isLaTeXFile - env['CHKTEX_OPTIONS'] = '-nall -e9 -e10 -w15 -w16' - env['CHKTEX_ULIMIT_OPTIONS'] = '-t 5 -v 64000' - if request.check is 'error' - env['CHKTEX_EXIT_ON_ERROR'] = 1 - if request.check is 'validate' - env['CHKTEX_VALIDATE'] = 1 + // set up environment variables for chktex + const env = {}; + // only run chktex on LaTeX files (not knitr .Rtex files or any others) + const isLaTeXFile = request.rootResourcePath != null ? request.rootResourcePath.match(/\.tex$/i) : undefined; + if ((request.check != null) && isLaTeXFile) { + env['CHKTEX_OPTIONS'] = '-nall -e9 -e10 -w15 -w16'; + env['CHKTEX_ULIMIT_OPTIONS'] = '-t 5 -v 64000'; + if (request.check === 'error') { + env['CHKTEX_EXIT_ON_ERROR'] = 1; + } + if (request.check === 'validate') { + env['CHKTEX_VALIDATE'] = 1; + } + } - # apply a series of file modifications/creations for draft mode and tikz - async.series [injectDraftModeIfRequired, createTikzFileIfRequired], (error) -> - return callback(error) if error? - timer = new Metrics.Timer("run-compile") - # find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite) - tag = request.imageName?.match(/:(.*)/)?[1]?.replace(/\./g,'-') or "default" - tag = "other" if not request.project_id.match(/^[0-9a-f]{24}$/) # exclude smoke test - Metrics.inc("compiles") - Metrics.inc("compiles-with-image.#{tag}") - compileName = getCompileName(request.project_id, request.user_id) - LatexRunner.runLatex compileName, { - directory: compileDir - mainFile: request.rootResourcePath - compiler: request.compiler - timeout: request.timeout - image: request.imageName - flags: request.flags + // apply a series of file modifications/creations for draft mode and tikz + return async.series([injectDraftModeIfRequired, createTikzFileIfRequired], function(error) { + if (error != null) { return callback(error); } + timer = new Metrics.Timer("run-compile"); + // find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite) + let tag = __guard__(__guard__(request.imageName != null ? request.imageName.match(/:(.*)/) : undefined, x1 => x1[1]), x => x.replace(/\./g,'-')) || "default"; + if (!request.project_id.match(/^[0-9a-f]{24}$/)) { tag = "other"; } // exclude smoke test + Metrics.inc("compiles"); + Metrics.inc(`compiles-with-image.${tag}`); + const compileName = getCompileName(request.project_id, request.user_id); + return LatexRunner.runLatex(compileName, { + directory: compileDir, + mainFile: request.rootResourcePath, + compiler: request.compiler, + timeout: request.timeout, + image: request.imageName, + flags: request.flags, environment: env - }, (error, output, stats, timings) -> - # request was for validation only - if request.check is "validate" - result = if error?.code then "fail" else "pass" - error = new Error("validation") - error.validate = result - # request was for compile, and failed on validation - if request.check is "error" and error?.message is 'exited' - error = new Error("compilation") - error.validate = "fail" - # compile was killed by user, was a validation, or a compile which failed validation - if error?.terminated or error?.validate or error?.timedout - OutputFileFinder.findOutputFiles resourceList, compileDir, (err, outputFiles) -> - return callback(err) if err? - error.outputFiles = outputFiles # return output files so user can check logs - callback(error) - return - # compile completed normally - return callback(error) if error? - Metrics.inc("compiles-succeeded") - for metric_key, metric_value of stats or {} - Metrics.count(metric_key, metric_value) - for metric_key, metric_value of timings or {} - Metrics.timing(metric_key, metric_value) - loadavg = os.loadavg?() - Metrics.gauge("load-avg", loadavg[0]) if loadavg? - ts = timer.done() - logger.log {project_id: request.project_id, user_id: request.user_id, time_taken: ts, stats:stats, timings:timings, loadavg:loadavg}, "done compile" - if stats?["latex-runs"] > 0 - Metrics.timing("run-compile-per-pass", ts / stats["latex-runs"]) - if stats?["latex-runs"] > 0 and timings?["cpu-time"] > 0 - Metrics.timing("run-compile-cpu-time-per-pass", timings["cpu-time"] / stats["latex-runs"]) + }, function(error, output, stats, timings) { + // request was for validation only + let metric_key, metric_value; + if (request.check === "validate") { + const result = (error != null ? error.code : undefined) ? "fail" : "pass"; + error = new Error("validation"); + error.validate = result; + } + // request was for compile, and failed on validation + if ((request.check === "error") && ((error != null ? error.message : undefined) === 'exited')) { + error = new Error("compilation"); + error.validate = "fail"; + } + // compile was killed by user, was a validation, or a compile which failed validation + if ((error != null ? error.terminated : undefined) || (error != null ? error.validate : undefined) || (error != null ? error.timedout : undefined)) { + OutputFileFinder.findOutputFiles(resourceList, compileDir, function(err, outputFiles) { + if (err != null) { return callback(err); } + error.outputFiles = outputFiles; // return output files so user can check logs + return callback(error); + }); + return; + } + // compile completed normally + if (error != null) { return callback(error); } + Metrics.inc("compiles-succeeded"); + const object = stats || {}; + for (metric_key in object) { + metric_value = object[metric_key]; + Metrics.count(metric_key, metric_value); + } + const object1 = timings || {}; + for (metric_key in object1) { + metric_value = object1[metric_key]; + Metrics.timing(metric_key, metric_value); + } + const loadavg = typeof os.loadavg === 'function' ? os.loadavg() : undefined; + if (loadavg != null) { Metrics.gauge("load-avg", loadavg[0]); } + const ts = timer.done(); + logger.log({project_id: request.project_id, user_id: request.user_id, time_taken: ts, stats, timings, loadavg}, "done compile"); + if ((stats != null ? stats["latex-runs"] : undefined) > 0) { + Metrics.timing("run-compile-per-pass", ts / stats["latex-runs"]); + } + if (((stats != null ? stats["latex-runs"] : undefined) > 0) && ((timings != null ? timings["cpu-time"] : undefined) > 0)) { + Metrics.timing("run-compile-cpu-time-per-pass", timings["cpu-time"] / stats["latex-runs"]); + } - OutputFileFinder.findOutputFiles resourceList, compileDir, (error, outputFiles) -> - return callback(error) if error? - OutputCacheManager.saveOutputFiles outputFiles, compileDir, (error, newOutputFiles) -> - callback null, newOutputFiles + return OutputFileFinder.findOutputFiles(resourceList, compileDir, function(error, outputFiles) { + if (error != null) { return callback(error); } + return OutputCacheManager.saveOutputFiles(outputFiles, compileDir, (error, newOutputFiles) => callback(null, newOutputFiles)); + }); + }); + }); + }); + }, - stopCompile: (project_id, user_id, callback = (error) ->) -> - compileName = getCompileName(project_id, user_id) - LatexRunner.killLatex compileName, callback + stopCompile(project_id, user_id, callback) { + if (callback == null) { callback = function(error) {}; } + const compileName = getCompileName(project_id, user_id); + return LatexRunner.killLatex(compileName, callback); + }, - clearProject: (project_id, user_id, _callback = (error) ->) -> - callback = (error) -> - _callback(error) - _callback = () -> + clearProject(project_id, user_id, _callback) { + if (_callback == null) { _callback = function(error) {}; } + const callback = function(error) { + _callback(error); + return _callback = function() {}; + }; - compileDir = getCompileDir(project_id, user_id) + const compileDir = getCompileDir(project_id, user_id); - CompileManager._checkDirectory compileDir, (err, exists) -> - return callback(err) if err? - return callback() if not exists # skip removal if no directory present + return CompileManager._checkDirectory(compileDir, function(err, exists) { + if (err != null) { return callback(err); } + if (!exists) { return callback(); } // skip removal if no directory present - proc = child_process.spawn "rm", ["-r", compileDir] + const proc = child_process.spawn("rm", ["-r", compileDir]); - proc.on "error", callback + proc.on("error", callback); - stderr = "" - proc.stderr.on "data", (chunk) -> stderr += chunk.toString() + let stderr = ""; + proc.stderr.on("data", chunk => stderr += chunk.toString()); - proc.on "close", (code) -> - if code == 0 - return callback(null) - else - return callback(new Error("rm -r #{compileDir} failed: #{stderr}")) + return proc.on("close", function(code) { + if (code === 0) { + return callback(null); + } else { + return callback(new Error(`rm -r ${compileDir} failed: ${stderr}`)); + } + }); + }); + }, - _findAllDirs: (callback = (error, allDirs) ->) -> - root = Settings.path.compilesDir - fs.readdir root, (err, files) -> - return callback(err) if err? - allDirs = (Path.join(root, file) for file in files) - callback(null, allDirs) + _findAllDirs(callback) { + if (callback == null) { callback = function(error, allDirs) {}; } + const root = Settings.path.compilesDir; + return fs.readdir(root, function(err, files) { + if (err != null) { return callback(err); } + const allDirs = (Array.from(files).map((file) => Path.join(root, file))); + return callback(null, allDirs); + }); + }, - clearExpiredProjects: (max_cache_age_ms, callback = (error) ->) -> - now = Date.now() - # action for each directory - expireIfNeeded = (checkDir, cb) -> - fs.stat checkDir, (err, stats) -> - return cb() if err? # ignore errors checking directory - age = now - stats.mtime - hasExpired = (age > max_cache_age_ms) - if hasExpired then fse.remove(checkDir, cb) else cb() - # iterate over all project directories - CompileManager._findAllDirs (error, allDirs) -> - return callback() if error? - async.eachSeries allDirs, expireIfNeeded, callback + clearExpiredProjects(max_cache_age_ms, callback) { + if (callback == null) { callback = function(error) {}; } + const now = Date.now(); + // action for each directory + const expireIfNeeded = (checkDir, cb) => + fs.stat(checkDir, function(err, stats) { + if (err != null) { return cb(); } // ignore errors checking directory + const age = now - stats.mtime; + const hasExpired = (age > max_cache_age_ms); + if (hasExpired) { return fse.remove(checkDir, cb); } else { return cb(); } + }) + ; + // iterate over all project directories + return CompileManager._findAllDirs(function(error, allDirs) { + if (error != null) { return callback(); } + return async.eachSeries(allDirs, expireIfNeeded, callback); + }); + }, - _checkDirectory: (compileDir, callback = (error, exists) ->) -> - fs.lstat compileDir, (err, stats) -> - if err?.code is 'ENOENT' - return callback(null, false) # directory does not exist - else if err? - logger.err {dir: compileDir, err:err}, "error on stat of project directory for removal" - return callback(err) - else if not stats?.isDirectory() - logger.err {dir: compileDir, stats:stats}, "bad project directory for removal" - return callback new Error("project directory is not directory") - else - callback(null, true) # directory exists + _checkDirectory(compileDir, callback) { + if (callback == null) { callback = function(error, exists) {}; } + return fs.lstat(compileDir, function(err, stats) { + if ((err != null ? err.code : undefined) === 'ENOENT') { + return callback(null, false); // directory does not exist + } else if (err != null) { + logger.err({dir: compileDir, err}, "error on stat of project directory for removal"); + return callback(err); + } else if (!(stats != null ? stats.isDirectory() : undefined)) { + logger.err({dir: compileDir, stats}, "bad project directory for removal"); + return callback(new Error("project directory is not directory")); + } else { + return callback(null, true); + } + }); + }, // directory exists - syncFromCode: (project_id, user_id, file_name, line, column, callback = (error, pdfPositions) ->) -> - # If LaTeX was run in a virtual environment, the file path that synctex expects - # might not match the file path on the host. The .synctex.gz file however, will be accessed - # wherever it is on the host. - compileName = getCompileName(project_id, user_id) - base_dir = Settings.path.synctexBaseDir(compileName) - file_path = base_dir + "/" + file_name - compileDir = getCompileDir(project_id, user_id) - synctex_path = "#{base_dir}/output.pdf" - command = ["code", synctex_path, file_path, line, column] - fse.ensureDir compileDir, (error) -> - if error? - logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync from code" - return callback(error) - CompileManager._runSynctex project_id, user_id, command, (error, stdout) -> - return callback(error) if error? - logger.log project_id: project_id, user_id:user_id, file_name: file_name, line: line, column: column, command:command, stdout: stdout, "synctex code output" - callback null, CompileManager._parseSynctexFromCodeOutput(stdout) + syncFromCode(project_id, user_id, file_name, line, column, callback) { + // If LaTeX was run in a virtual environment, the file path that synctex expects + // might not match the file path on the host. The .synctex.gz file however, will be accessed + // wherever it is on the host. + if (callback == null) { callback = function(error, pdfPositions) {}; } + const compileName = getCompileName(project_id, user_id); + const base_dir = Settings.path.synctexBaseDir(compileName); + const file_path = base_dir + "/" + file_name; + const compileDir = getCompileDir(project_id, user_id); + const synctex_path = `${base_dir}/output.pdf`; + const command = ["code", synctex_path, file_path, line, column]; + return fse.ensureDir(compileDir, function(error) { + if (error != null) { + logger.err({error, project_id, user_id, file_name}, "error ensuring dir for sync from code"); + return callback(error); + } + return CompileManager._runSynctex(project_id, user_id, command, function(error, stdout) { + if (error != null) { return callback(error); } + logger.log({project_id, user_id, file_name, line, column, command, stdout}, "synctex code output"); + return callback(null, CompileManager._parseSynctexFromCodeOutput(stdout)); + }); + }); + }, - syncFromPdf: (project_id, user_id, page, h, v, callback = (error, filePositions) ->) -> - compileName = getCompileName(project_id, user_id) - compileDir = getCompileDir(project_id, user_id) - base_dir = Settings.path.synctexBaseDir(compileName) - synctex_path = "#{base_dir}/output.pdf" - command = ["pdf", synctex_path, page, h, v] - fse.ensureDir compileDir, (error) -> - if error? - logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync to code" - return callback(error) - CompileManager._runSynctex project_id, user_id, command, (error, stdout) -> - return callback(error) if error? - logger.log project_id: project_id, user_id:user_id, page: page, h: h, v:v, stdout: stdout, "synctex pdf output" - callback null, CompileManager._parseSynctexFromPdfOutput(stdout, base_dir) + syncFromPdf(project_id, user_id, page, h, v, callback) { + if (callback == null) { callback = function(error, filePositions) {}; } + const compileName = getCompileName(project_id, user_id); + const compileDir = getCompileDir(project_id, user_id); + const base_dir = Settings.path.synctexBaseDir(compileName); + const synctex_path = `${base_dir}/output.pdf`; + const command = ["pdf", synctex_path, page, h, v]; + return fse.ensureDir(compileDir, function(error) { + if (error != null) { + logger.err({error, project_id, user_id, file_name}, "error ensuring dir for sync to code"); + return callback(error); + } + return CompileManager._runSynctex(project_id, user_id, command, function(error, stdout) { + if (error != null) { return callback(error); } + logger.log({project_id, user_id, page, h, v, stdout}, "synctex pdf output"); + return callback(null, CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)); + }); + }); + }, - _checkFileExists: (path, callback = (error) ->) -> - synctexDir = Path.dirname(path) - synctexFile = Path.join(synctexDir, "output.synctex.gz") - fs.stat synctexDir, (error, stats) -> - if error?.code is 'ENOENT' - return callback(new Errors.NotFoundError("called synctex with no output directory")) - return callback(error) if error? - fs.stat synctexFile, (error, stats) -> - if error?.code is 'ENOENT' - return callback(new Errors.NotFoundError("called synctex with no output file")) - return callback(error) if error? - return callback(new Error("not a file")) if not stats?.isFile() - callback() + _checkFileExists(path, callback) { + if (callback == null) { callback = function(error) {}; } + const synctexDir = Path.dirname(path); + const synctexFile = Path.join(synctexDir, "output.synctex.gz"); + return fs.stat(synctexDir, function(error, stats) { + if ((error != null ? error.code : undefined) === 'ENOENT') { + return callback(new Errors.NotFoundError("called synctex with no output directory")); + } + if (error != null) { return callback(error); } + return fs.stat(synctexFile, function(error, stats) { + if ((error != null ? error.code : undefined) === 'ENOENT') { + return callback(new Errors.NotFoundError("called synctex with no output file")); + } + if (error != null) { return callback(error); } + if (!(stats != null ? stats.isFile() : undefined)) { return callback(new Error("not a file")); } + return callback(); + }); + }); + }, - _runSynctex: (project_id, user_id, command, callback = (error, stdout) ->) -> - seconds = 1000 + _runSynctex(project_id, user_id, command, callback) { + if (callback == null) { callback = function(error, stdout) {}; } + const seconds = 1000; - command.unshift("/opt/synctex") + command.unshift("/opt/synctex"); - directory = getCompileDir(project_id, user_id) - timeout = 60 * 1000 # increased to allow for large projects - compileName = getCompileName(project_id, user_id) - CommandRunner.run compileName, command, directory, Settings.clsi?.docker.image, timeout, {}, (error, output) -> - if error? - logger.err err:error, command:command, project_id:project_id, user_id:user_id, "error running synctex" - return callback(error) - callback(null, output.stdout) + const directory = getCompileDir(project_id, user_id); + const timeout = 60 * 1000; // increased to allow for large projects + const compileName = getCompileName(project_id, user_id); + return CommandRunner.run(compileName, command, directory, Settings.clsi != null ? Settings.clsi.docker.image : undefined, timeout, {}, function(error, output) { + if (error != null) { + logger.err({err:error, command, project_id, user_id}, "error running synctex"); + return callback(error); + } + return callback(null, output.stdout); + }); + }, - _parseSynctexFromCodeOutput: (output) -> - results = [] - for line in output.split("\n") - [node, page, h, v, width, height] = line.split("\t") - if node == "NODE" - results.push { - page: parseInt(page, 10) - h: parseFloat(h) - v: parseFloat(v) - height: parseFloat(height) + _parseSynctexFromCodeOutput(output) { + const results = []; + for (let line of Array.from(output.split("\n"))) { + const [node, page, h, v, width, height] = Array.from(line.split("\t")); + if (node === "NODE") { + results.push({ + page: parseInt(page, 10), + h: parseFloat(h), + v: parseFloat(v), + height: parseFloat(height), width: parseFloat(width) - } - return results - - _parseSynctexFromPdfOutput: (output, base_dir) -> - results = [] - for line in output.split("\n") - [node, file_path, line, column] = line.split("\t") - if node == "NODE" - file = file_path.slice(base_dir.length + 1) - results.push { - file: file - line: parseInt(line, 10) - column: parseInt(column, 10) - } - return results - - - wordcount: (project_id, user_id, file_name, image, callback = (error, pdfPositions) ->) -> - logger.log project_id:project_id, user_id:user_id, file_name:file_name, image:image, "running wordcount" - file_path = "$COMPILE_DIR/" + file_name - command = [ "texcount", '-nocol', '-inc', file_path, "-out=" + file_path + ".wc"] - compileDir = getCompileDir(project_id, user_id) - timeout = 60 * 1000 - compileName = getCompileName(project_id, user_id) - fse.ensureDir compileDir, (error) -> - if error? - logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync from code" - return callback(error) - CommandRunner.run compileName, command, compileDir, image, timeout, {}, (error) -> - return callback(error) if error? - fs.readFile compileDir + "/" + file_name + ".wc", "utf-8", (err, stdout) -> - if err? - #call it node_err so sentry doesn't use random path error as unique id so it can't be ignored - logger.err node_err:err, command:command, compileDir:compileDir, project_id:project_id, user_id:user_id, "error reading word count output" - return callback(err) - results = CompileManager._parseWordcountFromOutput(stdout) - logger.log project_id:project_id, user_id:user_id, wordcount: results, "word count results" - callback null, results - - _parseWordcountFromOutput: (output) -> - results = { - encode: "" - textWords: 0 - headWords: 0 - outside: 0 - headers: 0 - elements: 0 - mathInline: 0 - mathDisplay: 0 - errors: 0 - messages: "" + }); + } } - for line in output.split("\n") - [data, info] = line.split(":") - if data.indexOf("Encoding") > -1 - results['encode'] = info.trim() - if data.indexOf("in text") > -1 - results['textWords'] = parseInt(info, 10) - if data.indexOf("in head") > -1 - results['headWords'] = parseInt(info, 10) - if data.indexOf("outside") > -1 - results['outside'] = parseInt(info, 10) - if data.indexOf("of head") > -1 - results['headers'] = parseInt(info, 10) - if data.indexOf("Number of floats/tables/figures") > -1 - results['elements'] = parseInt(info, 10) - if data.indexOf("Number of math inlines") > -1 - results['mathInline'] = parseInt(info, 10) - if data.indexOf("Number of math displayed") > -1 - results['mathDisplay'] = parseInt(info, 10) - if data is "(errors" # errors reported as (errors:123) - results['errors'] = parseInt(info, 10) - if line.indexOf("!!! ") > -1 # errors logged as !!! message !!! - results['messages'] += line + "\n" - return results + return results; + }, + + _parseSynctexFromPdfOutput(output, base_dir) { + const results = []; + for (let line of Array.from(output.split("\n"))) { + let column, file_path, node; + [node, file_path, line, column] = Array.from(line.split("\t")); + if (node === "NODE") { + const file = file_path.slice(base_dir.length + 1); + results.push({ + file, + line: parseInt(line, 10), + column: parseInt(column, 10) + }); + } + } + return results; + }, + + + wordcount(project_id, user_id, file_name, image, callback) { + if (callback == null) { callback = function(error, pdfPositions) {}; } + logger.log({project_id, user_id, file_name, image}, "running wordcount"); + const file_path = `$COMPILE_DIR/${file_name}`; + const command = [ "texcount", '-nocol', '-inc', file_path, `-out=${file_path}.wc`]; + const compileDir = getCompileDir(project_id, user_id); + const timeout = 60 * 1000; + const compileName = getCompileName(project_id, user_id); + return fse.ensureDir(compileDir, function(error) { + if (error != null) { + logger.err({error, project_id, user_id, file_name}, "error ensuring dir for sync from code"); + return callback(error); + } + return CommandRunner.run(compileName, command, compileDir, image, timeout, {}, function(error) { + if (error != null) { return callback(error); } + return fs.readFile(compileDir + "/" + file_name + ".wc", "utf-8", function(err, stdout) { + if (err != null) { + //call it node_err so sentry doesn't use random path error as unique id so it can't be ignored + logger.err({node_err:err, command, compileDir, project_id, user_id}, "error reading word count output"); + return callback(err); + } + const results = CompileManager._parseWordcountFromOutput(stdout); + logger.log({project_id, user_id, wordcount: results}, "word count results"); + return callback(null, results); + }); + }); + }); + }, + + _parseWordcountFromOutput(output) { + const results = { + encode: "", + textWords: 0, + headWords: 0, + outside: 0, + headers: 0, + elements: 0, + mathInline: 0, + mathDisplay: 0, + errors: 0, + messages: "" + }; + for (let line of Array.from(output.split("\n"))) { + const [data, info] = Array.from(line.split(":")); + if (data.indexOf("Encoding") > -1) { + results['encode'] = info.trim(); + } + if (data.indexOf("in text") > -1) { + results['textWords'] = parseInt(info, 10); + } + if (data.indexOf("in head") > -1) { + results['headWords'] = parseInt(info, 10); + } + if (data.indexOf("outside") > -1) { + results['outside'] = parseInt(info, 10); + } + if (data.indexOf("of head") > -1) { + results['headers'] = parseInt(info, 10); + } + if (data.indexOf("Number of floats/tables/figures") > -1) { + results['elements'] = parseInt(info, 10); + } + if (data.indexOf("Number of math inlines") > -1) { + results['mathInline'] = parseInt(info, 10); + } + if (data.indexOf("Number of math displayed") > -1) { + results['mathDisplay'] = parseInt(info, 10); + } + if (data === "(errors") { // errors reported as (errors:123) + results['errors'] = parseInt(info, 10); + } + if (line.indexOf("!!! ") > -1) { // errors logged as !!! message !!! + results['messages'] += line + "\n"; + } + } + return results; + } +}); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/app/coffee/ContentTypeMapper.js b/app/coffee/ContentTypeMapper.js index 68b2d14..c57057f 100644 --- a/app/coffee/ContentTypeMapper.js +++ b/app/coffee/ContentTypeMapper.js @@ -1,24 +1,28 @@ -Path = require 'path' +let ContentTypeMapper; +const Path = require('path'); -# here we coerce html, css and js to text/plain, -# otherwise choose correct mime type based on file extension, -# falling back to octet-stream -module.exports = ContentTypeMapper = - map: (path) -> - switch Path.extname(path) - when '.txt', '.html', '.js', '.css', '.svg' - return 'text/plain' - when '.csv' - return 'text/csv' - when '.pdf' - return 'application/pdf' - when '.png' - return 'image/png' - when '.jpg', '.jpeg' - return 'image/jpeg' - when '.tiff' - return 'image/tiff' - when '.gif' - return 'image/gif' - else - return 'application/octet-stream' +// here we coerce html, css and js to text/plain, +// otherwise choose correct mime type based on file extension, +// falling back to octet-stream +module.exports = (ContentTypeMapper = { + map(path) { + switch (Path.extname(path)) { + case '.txt': case '.html': case '.js': case '.css': case '.svg': + return 'text/plain'; + case '.csv': + return 'text/csv'; + case '.pdf': + return 'application/pdf'; + case '.png': + return 'image/png'; + case '.jpg': case '.jpeg': + return 'image/jpeg'; + case '.tiff': + return 'image/tiff'; + case '.gif': + return 'image/gif'; + default: + return 'application/octet-stream'; + } + } +}); diff --git a/app/coffee/DbQueue.js b/app/coffee/DbQueue.js index a3593fd..0f1f8cf 100644 --- a/app/coffee/DbQueue.js +++ b/app/coffee/DbQueue.js @@ -1,13 +1,16 @@ -async = require "async" -Settings = require "settings-sharelatex" -logger = require("logger-sharelatex") -queue = async.queue((task, cb)-> - task(cb) - , Settings.parallelSqlQueryLimit) +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const async = require("async"); +const Settings = require("settings-sharelatex"); +const logger = require("logger-sharelatex"); +const queue = async.queue((task, cb)=> task(cb) + , Settings.parallelSqlQueryLimit); -queue.drain = ()-> - logger.debug('all items have been processed') +queue.drain = ()=> logger.debug('all items have been processed'); module.exports = - queue: queue + {queue}; diff --git a/app/coffee/DockerLockManager.js b/app/coffee/DockerLockManager.js index bf90f02..9c7deff 100644 --- a/app/coffee/DockerLockManager.js +++ b/app/coffee/DockerLockManager.js @@ -1,56 +1,84 @@ -logger = require "logger-sharelatex" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let LockManager; +const logger = require("logger-sharelatex"); -LockState = {} # locks for docker container operations, by container name +const LockState = {}; // locks for docker container operations, by container name -module.exports = LockManager = +module.exports = (LockManager = { - MAX_LOCK_HOLD_TIME: 15000 # how long we can keep a lock - MAX_LOCK_WAIT_TIME: 10000 # how long we wait for a lock - LOCK_TEST_INTERVAL: 1000 # retry time + MAX_LOCK_HOLD_TIME: 15000, // how long we can keep a lock + MAX_LOCK_WAIT_TIME: 10000, // how long we wait for a lock + LOCK_TEST_INTERVAL: 1000, // retry time - tryLock: (key, callback = (err, gotLock) ->) -> - existingLock = LockState[key] - if existingLock? # the lock is already taken, check how old it is - lockAge = Date.now() - existingLock.created - if lockAge < LockManager.MAX_LOCK_HOLD_TIME - return callback(null, false) # we didn't get the lock, bail out - else - logger.error {key: key, lock: existingLock, age:lockAge}, "taking old lock by force" - # take the lock - LockState[key] = lockValue = {created: Date.now()} - callback(null, true, lockValue) + tryLock(key, callback) { + let lockValue; + if (callback == null) { callback = function(err, gotLock) {}; } + const existingLock = LockState[key]; + if (existingLock != null) { // the lock is already taken, check how old it is + const lockAge = Date.now() - existingLock.created; + if (lockAge < LockManager.MAX_LOCK_HOLD_TIME) { + return callback(null, false); // we didn't get the lock, bail out + } else { + logger.error({key, lock: existingLock, age:lockAge}, "taking old lock by force"); + } + } + // take the lock + LockState[key] = (lockValue = {created: Date.now()}); + return callback(null, true, lockValue); + }, - getLock: (key, callback = (error, lockValue) ->) -> - startTime = Date.now() - do attempt = () -> - LockManager.tryLock key, (error, gotLock, lockValue) -> - return callback(error) if error? - if gotLock - callback(null, lockValue) - else if Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME - e = new Error("Lock timeout") - e.key = key - return callback(e) - else - setTimeout attempt, LockManager.LOCK_TEST_INTERVAL + getLock(key, callback) { + let attempt; + if (callback == null) { callback = function(error, lockValue) {}; } + const startTime = Date.now(); + return (attempt = () => + LockManager.tryLock(key, function(error, gotLock, lockValue) { + if (error != null) { return callback(error); } + if (gotLock) { + return callback(null, lockValue); + } else if ((Date.now() - startTime) > LockManager.MAX_LOCK_WAIT_TIME) { + const e = new Error("Lock timeout"); + e.key = key; + return callback(e); + } else { + return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL); + } + }) + )(); + }, - releaseLock: (key, lockValue, callback = (error) ->) -> - existingLock = LockState[key] - if existingLock is lockValue # lockValue is an object, so we can test by reference - delete LockState[key] # our lock, so we can free it - callback() - else if existingLock? # lock exists but doesn't match ours - logger.error {key:key, lock: existingLock}, "tried to release lock taken by force" - callback() - else - logger.error {key:key, lock: existingLock}, "tried to release lock that has gone" - callback() + releaseLock(key, lockValue, callback) { + if (callback == null) { callback = function(error) {}; } + const existingLock = LockState[key]; + if (existingLock === lockValue) { // lockValue is an object, so we can test by reference + delete LockState[key]; // our lock, so we can free it + return callback(); + } else if (existingLock != null) { // lock exists but doesn't match ours + logger.error({key, lock: existingLock}, "tried to release lock taken by force"); + return callback(); + } else { + logger.error({key, lock: existingLock}, "tried to release lock that has gone"); + return callback(); + } + }, - runWithLock: (key, runner, callback = ( (error) -> )) -> - LockManager.getLock key, (error, lockValue) -> - return callback(error) if error? - runner (error1, args...) -> - LockManager.releaseLock key, lockValue, (error2) -> - error = error1 or error2 - return callback(error) if error? - callback(null, args...) + runWithLock(key, runner, callback) { + if (callback == null) { callback = function(error) {}; } + return LockManager.getLock(key, function(error, lockValue) { + if (error != null) { return callback(error); } + return runner((error1, ...args) => + LockManager.releaseLock(key, lockValue, function(error2) { + error = error1 || error2; + if (error != null) { return callback(error); } + return callback(null, ...Array.from(args)); + }) + ); + }); + } +}); diff --git a/app/coffee/DockerRunner.js b/app/coffee/DockerRunner.js index 6ea929f..ab78419 100644 --- a/app/coffee/DockerRunner.js +++ b/app/coffee/DockerRunner.js @@ -1,358 +1,475 @@ -Settings = require "settings-sharelatex" -logger = require "logger-sharelatex" -Docker = require("dockerode") -dockerode = new Docker() -crypto = require "crypto" -async = require "async" -LockManager = require "./DockerLockManager" -fs = require "fs" -Path = require 'path' -_ = require "underscore" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let DockerRunner, oneHour; +const Settings = require("settings-sharelatex"); +const logger = require("logger-sharelatex"); +const Docker = require("dockerode"); +const dockerode = new Docker(); +const crypto = require("crypto"); +const async = require("async"); +const LockManager = require("./DockerLockManager"); +const fs = require("fs"); +const Path = require('path'); +const _ = require("underscore"); -logger.info "using docker runner" +logger.info("using docker runner"); -usingSiblingContainers = () -> - Settings?.path?.sandboxedCompilesHostDir? +const usingSiblingContainers = () => __guard__(Settings != null ? Settings.path : undefined, x => x.sandboxedCompilesHostDir) != null; -module.exports = DockerRunner = - ERR_NOT_DIRECTORY: new Error("not a directory") - ERR_TERMINATED: new Error("terminated") - ERR_EXITED: new Error("exited") - ERR_TIMED_OUT: new Error("container timed out") +module.exports = (DockerRunner = { + ERR_NOT_DIRECTORY: new Error("not a directory"), + ERR_TERMINATED: new Error("terminated"), + ERR_EXITED: new Error("exited"), + ERR_TIMED_OUT: new Error("container timed out"), - run: (project_id, command, directory, image, timeout, environment, callback = (error, output) ->) -> + run(project_id, command, directory, image, timeout, environment, callback) { - if usingSiblingContainers() - _newPath = Settings.path.sandboxedCompilesHostDir - logger.log {path: _newPath}, "altering bind path for sibling containers" - # Server Pro, example: - # '/var/lib/sharelatex/data/compiles/' - # ... becomes ... - # '/opt/sharelatex_data/data/compiles/' - directory = Path.join(Settings.path.sandboxedCompilesHostDir, Path.basename(directory)) + let name; + if (callback == null) { callback = function(error, output) {}; } + if (usingSiblingContainers()) { + const _newPath = Settings.path.sandboxedCompilesHostDir; + logger.log({path: _newPath}, "altering bind path for sibling containers"); + // Server Pro, example: + // '/var/lib/sharelatex/data/compiles/' + // ... becomes ... + // '/opt/sharelatex_data/data/compiles/' + directory = Path.join(Settings.path.sandboxedCompilesHostDir, Path.basename(directory)); + } - volumes = {} - volumes[directory] = "/compile" + const volumes = {}; + volumes[directory] = "/compile"; - command = (arg.toString().replace?('$COMPILE_DIR', "/compile") for arg in command) - if !image? - image = Settings.clsi.docker.image + command = (Array.from(command).map((arg) => __guardMethod__(arg.toString(), 'replace', o => o.replace('$COMPILE_DIR', "/compile")))); + if ((image == null)) { + ({ image } = Settings.clsi.docker); + } - if Settings.texliveImageNameOveride? - img = image.split("/") - image = "#{Settings.texliveImageNameOveride}/#{img[2]}" + if (Settings.texliveImageNameOveride != null) { + const img = image.split("/"); + image = `${Settings.texliveImageNameOveride}/${img[2]}`; + } - options = DockerRunner._getContainerOptions(command, image, volumes, timeout, environment) - fingerprint = DockerRunner._fingerprintContainer(options) - options.name = name = "project-#{project_id}-#{fingerprint}" + const options = DockerRunner._getContainerOptions(command, image, volumes, timeout, environment); + const fingerprint = DockerRunner._fingerprintContainer(options); + options.name = (name = `project-${project_id}-${fingerprint}`); - # logOptions = _.clone(options) - # logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging" - logger.log project_id: project_id, "running docker container" - DockerRunner._runAndWaitForContainer options, volumes, timeout, (error, output) -> - if error?.message?.match("HTTP code is 500") - logger.log err: error, project_id: project_id, "error running container so destroying and retrying" - DockerRunner.destroyContainer name, null, true, (error) -> - return callback(error) if error? - DockerRunner._runAndWaitForContainer options, volumes, timeout, callback - else - callback(error, output) + // logOptions = _.clone(options) + // logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging" + logger.log({project_id}, "running docker container"); + DockerRunner._runAndWaitForContainer(options, volumes, timeout, function(error, output) { + if (__guard__(error != null ? error.message : undefined, x => x.match("HTTP code is 500"))) { + logger.log({err: error, project_id}, "error running container so destroying and retrying"); + return DockerRunner.destroyContainer(name, null, true, function(error) { + if (error != null) { return callback(error); } + return DockerRunner._runAndWaitForContainer(options, volumes, timeout, callback); + }); + } else { + return callback(error, output); + } + }); - return name # pass back the container name to allow it to be killed + return name; + }, // pass back the container name to allow it to be killed - kill: (container_id, callback = (error) ->) -> - logger.log container_id: container_id, "sending kill signal to container" - container = dockerode.getContainer(container_id) - container.kill (error) -> - if error? and error?.message?.match?(/Cannot kill container .* is not running/) - logger.warn err: error, container_id: container_id, "container not running, continuing" - error = null - if error? - logger.error err: error, container_id: container_id, "error killing container" - return callback(error) - else - callback() + kill(container_id, callback) { + if (callback == null) { callback = function(error) {}; } + logger.log({container_id}, "sending kill signal to container"); + const container = dockerode.getContainer(container_id); + return container.kill(function(error) { + if ((error != null) && __guardMethod__(error != null ? error.message : undefined, 'match', o => o.match(/Cannot kill container .* is not running/))) { + logger.warn({err: error, container_id}, "container not running, continuing"); + error = null; + } + if (error != null) { + logger.error({err: error, container_id}, "error killing container"); + return callback(error); + } else { + return callback(); + } + }); + }, - _runAndWaitForContainer: (options, volumes, timeout, _callback = (error, output) ->) -> - callback = (args...) -> - _callback(args...) - # Only call the callback once - _callback = () -> + _runAndWaitForContainer(options, volumes, timeout, _callback) { + if (_callback == null) { _callback = function(error, output) {}; } + const callback = function(...args) { + _callback(...Array.from(args || [])); + // Only call the callback once + return _callback = function() {}; + }; - name = options.name + const { name } = options; - streamEnded = false - containerReturned = false - output = {} + let streamEnded = false; + let containerReturned = false; + let output = {}; - callbackIfFinished = () -> - if streamEnded and containerReturned - callback(null, output) + const callbackIfFinished = function() { + if (streamEnded && containerReturned) { + return callback(null, output); + } + }; - attachStreamHandler = (error, _output) -> - return callback(error) if error? - output = _output - streamEnded = true - callbackIfFinished() + const attachStreamHandler = function(error, _output) { + if (error != null) { return callback(error); } + output = _output; + streamEnded = true; + return callbackIfFinished(); + }; - DockerRunner.startContainer options, volumes, attachStreamHandler, (error, containerId) -> - return callback(error) if error? + return DockerRunner.startContainer(options, volumes, attachStreamHandler, function(error, containerId) { + if (error != null) { return callback(error); } - DockerRunner.waitForContainer name, timeout, (error, exitCode) -> - return callback(error) if error? - if exitCode is 137 # exit status from kill -9 - err = DockerRunner.ERR_TERMINATED - err.terminated = true - return callback(err) - if exitCode is 1 # exit status from chktex - err = DockerRunner.ERR_EXITED - err.code = exitCode - return callback(err) - containerReturned = true - options?.HostConfig?.SecurityOpt = null #small log line - logger.log err:err, exitCode:exitCode, options:options, "docker container has exited" - callbackIfFinished() - - _getContainerOptions: (command, image, volumes, timeout, environment) -> - timeoutInSeconds = timeout / 1000 - - dockerVolumes = {} - for hostVol, dockerVol of volumes - dockerVolumes[dockerVol] = {} - - if volumes[hostVol].slice(-3).indexOf(":r") == -1 - volumes[hostVol] = "#{dockerVol}:rw" - - # merge settings and environment parameter - env = {} - for src in [Settings.clsi.docker.env, environment or {}] - env[key] = value for key, value of src - # set the path based on the image year - if m = image.match /:([0-9]+)\.[0-9]+/ - year = m[1] - else - year = "2014" - env['PATH'] = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/#{year}/bin/x86_64-linux/" - options = - "Cmd" : command, - "Image" : image - "Volumes" : dockerVolumes - "WorkingDir" : "/compile" - "NetworkDisabled" : true - "Memory" : 1024 * 1024 * 1024 * 1024 # 1 Gb - "User" : Settings.clsi.docker.user - "Env" : ("#{key}=#{value}" for key, value of env) # convert the environment hash to an array - "HostConfig" : - "Binds": ("#{hostVol}:#{dockerVol}" for hostVol, dockerVol of volumes) - "LogConfig": {"Type": "none", "Config": {}} - "Ulimits": [{'Name': 'cpu', 'Soft': timeoutInSeconds+5, 'Hard': timeoutInSeconds+10}] - "CapDrop": "ALL" - "SecurityOpt": ["no-new-privileges"] - - - if Settings.path?.synctexBinHostPath? - options["HostConfig"]["Binds"].push("#{Settings.path.synctexBinHostPath}:/opt/synctex:ro") - - if Settings.clsi.docker.seccomp_profile? - options.HostConfig.SecurityOpt.push "seccomp=#{Settings.clsi.docker.seccomp_profile}" - - return options - - _fingerprintContainer: (containerOptions) -> - # Yay, Hashing! - json = JSON.stringify(containerOptions) - return crypto.createHash("md5").update(json).digest("hex") - - startContainer: (options, volumes, attachStreamHandler, callback) -> - LockManager.runWithLock options.name, (releaseLock) -> - # Check that volumes exist before starting the container. - # When a container is started with volume pointing to a - # non-existent directory then docker creates the directory but - # with root ownership. - DockerRunner._checkVolumes options, volumes, (err) -> - return releaseLock(err) if err? - DockerRunner._startContainer options, volumes, attachStreamHandler, releaseLock - , callback - - # Check that volumes exist and are directories - _checkVolumes: (options, volumes, callback = (error, containerName) ->) -> - if usingSiblingContainers() - # Server Pro, with sibling-containers active, skip checks - return callback(null) - - checkVolume = (path, cb) -> - fs.stat path, (err, stats) -> - return cb(err) if err? - return cb(DockerRunner.ERR_NOT_DIRECTORY) if not stats?.isDirectory() - cb() - jobs = [] - for vol of volumes - do (vol) -> - jobs.push (cb) -> checkVolume(vol, cb) - async.series jobs, callback - - _startContainer: (options, volumes, attachStreamHandler, callback = ((error, output) ->)) -> - callback = _.once(callback) - name = options.name - - logger.log {container_name: name}, "starting container" - container = dockerode.getContainer(name) - - createAndStartContainer = -> - dockerode.createContainer options, (error, container) -> - return callback(error) if error? - startExistingContainer() - - startExistingContainer = -> - DockerRunner.attachToContainer options.name, attachStreamHandler, (error)-> - return callback(error) if error? - container.start (error) -> - if error? and error?.statusCode != 304 #already running - return callback(error) - else - callback() - - container.inspect (error, stats)-> - if error?.statusCode == 404 - createAndStartContainer() - else if error? - logger.err {container_name: name, error:error}, "unable to inspect container to start" - return callback(error) - else - startExistingContainer() - - - attachToContainer: (containerId, attachStreamHandler, attachStartCallback) -> - container = dockerode.getContainer(containerId) - container.attach {stdout: 1, stderr: 1, stream: 1}, (error, stream) -> - if error? - logger.error err: error, container_id: containerId, "error attaching to container" - return attachStartCallback(error) - else - attachStartCallback() - - - logger.log container_id: containerId, "attached to container" - - MAX_OUTPUT = 1024 * 1024 # limit output to 1MB - createStringOutputStream = (name) -> - return { - data: "" - overflowed: false - write: (data) -> - return if @overflowed - if @data.length < MAX_OUTPUT - @data += data - else - logger.error container_id: containerId, length: @data.length, maxLen: MAX_OUTPUT, "#{name} exceeds max size" - @data += "(...truncated at #{MAX_OUTPUT} chars...)" - @overflowed = true - # kill container if too much output - # docker.containers.kill(containerId, () ->) + return DockerRunner.waitForContainer(name, timeout, function(error, exitCode) { + let err; + if (error != null) { return callback(error); } + if (exitCode === 137) { // exit status from kill -9 + err = DockerRunner.ERR_TERMINATED; + err.terminated = true; + return callback(err); } + if (exitCode === 1) { // exit status from chktex + err = DockerRunner.ERR_EXITED; + err.code = exitCode; + return callback(err); + } + containerReturned = true; + __guard__(options != null ? options.HostConfig : undefined, x => x.SecurityOpt = null); //small log line + logger.log({err, exitCode, options}, "docker container has exited"); + return callbackIfFinished(); + }); + }); + }, - stdout = createStringOutputStream "stdout" - stderr = createStringOutputStream "stderr" + _getContainerOptions(command, image, volumes, timeout, environment) { + let m, year; + let key, value, hostVol, dockerVol; + const timeoutInSeconds = timeout / 1000; - container.modem.demuxStream(stream, stdout, stderr) + const dockerVolumes = {}; + for (hostVol in volumes) { + dockerVol = volumes[hostVol]; + dockerVolumes[dockerVol] = {}; - stream.on "error", (err) -> - logger.error err: err, container_id: containerId, "error reading from container stream" + if (volumes[hostVol].slice(-3).indexOf(":r") === -1) { + volumes[hostVol] = `${dockerVol}:rw`; + } + } - stream.on "end", () -> - attachStreamHandler null, {stdout: stdout.data, stderr: stderr.data} + // merge settings and environment parameter + const env = {}; + for (let src of [Settings.clsi.docker.env, environment || {}]) { + for (key in src) { value = src[key]; env[key] = value; } + } + // set the path based on the image year + if ((m = image.match(/:([0-9]+)\.[0-9]+/))) { + year = m[1]; + } else { + year = "2014"; + } + env['PATH'] = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`; + const options = { + "Cmd" : command, + "Image" : image, + "Volumes" : dockerVolumes, + "WorkingDir" : "/compile", + "NetworkDisabled" : true, + "Memory" : 1024 * 1024 * 1024 * 1024, // 1 Gb + "User" : Settings.clsi.docker.user, + "Env" : (((() => { + const result = []; + for (key in env) { + value = env[key]; + result.push(`${key}=${value}`); + } + return result; + })())), // convert the environment hash to an array + "HostConfig" : { + "Binds": (((() => { + const result1 = []; + for (hostVol in volumes) { + dockerVol = volumes[hostVol]; + result1.push(`${hostVol}:${dockerVol}`); + } + return result1; + })())), + "LogConfig": {"Type": "none", "Config": {}}, + "Ulimits": [{'Name': 'cpu', 'Soft': timeoutInSeconds+5, 'Hard': timeoutInSeconds+10}], + "CapDrop": "ALL", + "SecurityOpt": ["no-new-privileges"] + } + }; - waitForContainer: (containerId, timeout, _callback = (error, exitCode) ->) -> - callback = (args...) -> - _callback(args...) - # Only call the callback once - _callback = () -> - container = dockerode.getContainer(containerId) + if ((Settings.path != null ? Settings.path.synctexBinHostPath : undefined) != null) { + options["HostConfig"]["Binds"].push(`${Settings.path.synctexBinHostPath}:/opt/synctex:ro`); + } - timedOut = false - timeoutId = setTimeout () -> - timedOut = true - logger.log container_id: containerId, "timeout reached, killing container" - container.kill(() ->) - , timeout + if (Settings.clsi.docker.seccomp_profile != null) { + options.HostConfig.SecurityOpt.push(`seccomp=${Settings.clsi.docker.seccomp_profile}`); + } - logger.log container_id: containerId, "waiting for docker container" - container.wait (error, res) -> - if error? - clearTimeout timeoutId - logger.error err: error, container_id: containerId, "error waiting for container" - return callback(error) - if timedOut - logger.log containerId: containerId, "docker container timed out" - error = DockerRunner.ERR_TIMED_OUT - error.timedout = true - callback error - else - clearTimeout timeoutId - logger.log container_id: containerId, exitCode: res.StatusCode, "docker container returned" - callback null, res.StatusCode + return options; + }, - destroyContainer: (containerName, containerId, shouldForce, callback = (error) ->) -> - # We want the containerName for the lock and, ideally, the - # containerId to delete. There is a bug in the docker.io module - # where if you delete by name and there is an error, it throws an - # async exception, but if you delete by id it just does a normal - # error callback. We fall back to deleting by name if no id is - # supplied. - LockManager.runWithLock containerName, (releaseLock) -> - DockerRunner._destroyContainer containerId or containerName, shouldForce, releaseLock - , callback + _fingerprintContainer(containerOptions) { + // Yay, Hashing! + const json = JSON.stringify(containerOptions); + return crypto.createHash("md5").update(json).digest("hex"); + }, - _destroyContainer: (containerId, shouldForce, callback = (error) ->) -> - logger.log container_id: containerId, "destroying docker container" - container = dockerode.getContainer(containerId) - container.remove {force: shouldForce == true}, (error) -> - if error? and error?.statusCode == 404 - logger.warn err: error, container_id: containerId, "container not found, continuing" - error = null - if error? - logger.error err: error, container_id: containerId, "error destroying container" - else - logger.log container_id: containerId, "destroyed container" - callback(error) + startContainer(options, volumes, attachStreamHandler, callback) { + return LockManager.runWithLock(options.name, releaseLock => + // Check that volumes exist before starting the container. + // When a container is started with volume pointing to a + // non-existent directory then docker creates the directory but + // with root ownership. + DockerRunner._checkVolumes(options, volumes, function(err) { + if (err != null) { return releaseLock(err); } + return DockerRunner._startContainer(options, volumes, attachStreamHandler, releaseLock); + }) + + , callback); + }, - # handle expiry of docker containers + // Check that volumes exist and are directories + _checkVolumes(options, volumes, callback) { + if (callback == null) { callback = function(error, containerName) {}; } + if (usingSiblingContainers()) { + // Server Pro, with sibling-containers active, skip checks + return callback(null); + } - MAX_CONTAINER_AGE: Settings.clsi.docker.maxContainerAge or oneHour = 60 * 60 * 1000 + const checkVolume = (path, cb) => + fs.stat(path, function(err, stats) { + if (err != null) { return cb(err); } + if (!(stats != null ? stats.isDirectory() : undefined)) { return cb(DockerRunner.ERR_NOT_DIRECTORY); } + return cb(); + }) + ; + const jobs = []; + for (let vol in volumes) { + (vol => jobs.push(cb => checkVolume(vol, cb)))(vol); + } + return async.series(jobs, callback); + }, - examineOldContainer: (container, callback = (error, name, id, ttl)->) -> - name = container.Name or container.Names?[0] - created = container.Created * 1000 # creation time is returned in seconds - now = Date.now() - age = now - created - maxAge = DockerRunner.MAX_CONTAINER_AGE - ttl = maxAge - age - logger.log {containerName: name, created: created, now: now, age: age, maxAge: maxAge, ttl: ttl}, "checking whether to destroy container" - callback(null, name, container.Id, ttl) + _startContainer(options, volumes, attachStreamHandler, callback) { + if (callback == null) { callback = function(error, output) {}; } + callback = _.once(callback); + const { name } = options; - destroyOldContainers: (callback = (error) ->) -> - dockerode.listContainers all: true, (error, containers) -> - return callback(error) if error? - jobs = [] - for container in containers or [] - do (container) -> - DockerRunner.examineOldContainer container, (err, name, id, ttl) -> - if name.slice(0, 9) == '/project-' && ttl <= 0 - jobs.push (cb) -> - DockerRunner.destroyContainer name, id, false, () -> cb() - # Ignore errors because some containers get stuck but - # will be destroyed next time - async.series jobs, callback + logger.log({container_name: name}, "starting container"); + const container = dockerode.getContainer(name); - startContainerMonitor: () -> - logger.log {maxAge: DockerRunner.MAX_CONTAINER_AGE}, "starting container expiry" - # randomise the start time - randomDelay = Math.floor(Math.random() * 5 * 60 * 1000) - setTimeout () -> - setInterval () -> - DockerRunner.destroyOldContainers() - , oneHour = 60 * 60 * 1000 - , randomDelay + const createAndStartContainer = () => + dockerode.createContainer(options, function(error, container) { + if (error != null) { return callback(error); } + return startExistingContainer(); + }) + ; -DockerRunner.startContainerMonitor() + var startExistingContainer = () => + DockerRunner.attachToContainer(options.name, attachStreamHandler, function(error){ + if (error != null) { return callback(error); } + return container.start(function(error) { + if ((error != null) && ((error != null ? error.statusCode : undefined) !== 304)) { //already running + return callback(error); + } else { + return callback(); + } + }); + }) + ; + + return container.inspect(function(error, stats){ + if ((error != null ? error.statusCode : undefined) === 404) { + return createAndStartContainer(); + } else if (error != null) { + logger.err({container_name: name, error}, "unable to inspect container to start"); + return callback(error); + } else { + return startExistingContainer(); + } + }); + }, + + + attachToContainer(containerId, attachStreamHandler, attachStartCallback) { + const container = dockerode.getContainer(containerId); + return container.attach({stdout: 1, stderr: 1, stream: 1}, function(error, stream) { + if (error != null) { + logger.error({err: error, container_id: containerId}, "error attaching to container"); + return attachStartCallback(error); + } else { + attachStartCallback(); + } + + + logger.log({container_id: containerId}, "attached to container"); + + const MAX_OUTPUT = 1024 * 1024; // limit output to 1MB + const createStringOutputStream = function(name) { + return { + data: "", + overflowed: false, + write(data) { + if (this.overflowed) { return; } + if (this.data.length < MAX_OUTPUT) { + return this.data += data; + } else { + logger.error({container_id: containerId, length: this.data.length, maxLen: MAX_OUTPUT}, `${name} exceeds max size`); + this.data += `(...truncated at ${MAX_OUTPUT} chars...)`; + return this.overflowed = true; + } + } + // kill container if too much output + // docker.containers.kill(containerId, () ->) + }; + }; + + const stdout = createStringOutputStream("stdout"); + const stderr = createStringOutputStream("stderr"); + + container.modem.demuxStream(stream, stdout, stderr); + + stream.on("error", err => logger.error({err, container_id: containerId}, "error reading from container stream")); + + return stream.on("end", () => attachStreamHandler(null, {stdout: stdout.data, stderr: stderr.data})); + }); + }, + + waitForContainer(containerId, timeout, _callback) { + if (_callback == null) { _callback = function(error, exitCode) {}; } + const callback = function(...args) { + _callback(...Array.from(args || [])); + // Only call the callback once + return _callback = function() {}; + }; + + const container = dockerode.getContainer(containerId); + + let timedOut = false; + const timeoutId = setTimeout(function() { + timedOut = true; + logger.log({container_id: containerId}, "timeout reached, killing container"); + return container.kill(function() {}); + } + , timeout); + + logger.log({container_id: containerId}, "waiting for docker container"); + return container.wait(function(error, res) { + if (error != null) { + clearTimeout(timeoutId); + logger.error({err: error, container_id: containerId}, "error waiting for container"); + return callback(error); + } + if (timedOut) { + logger.log({containerId}, "docker container timed out"); + error = DockerRunner.ERR_TIMED_OUT; + error.timedout = true; + return callback(error); + } else { + clearTimeout(timeoutId); + logger.log({container_id: containerId, exitCode: res.StatusCode}, "docker container returned"); + return callback(null, res.StatusCode); + } + }); + }, + + destroyContainer(containerName, containerId, shouldForce, callback) { + // We want the containerName for the lock and, ideally, the + // containerId to delete. There is a bug in the docker.io module + // where if you delete by name and there is an error, it throws an + // async exception, but if you delete by id it just does a normal + // error callback. We fall back to deleting by name if no id is + // supplied. + if (callback == null) { callback = function(error) {}; } + return LockManager.runWithLock(containerName, releaseLock => DockerRunner._destroyContainer(containerId || containerName, shouldForce, releaseLock) + , callback); + }, + + _destroyContainer(containerId, shouldForce, callback) { + if (callback == null) { callback = function(error) {}; } + logger.log({container_id: containerId}, "destroying docker container"); + const container = dockerode.getContainer(containerId); + return container.remove({force: shouldForce === true}, function(error) { + if ((error != null) && ((error != null ? error.statusCode : undefined) === 404)) { + logger.warn({err: error, container_id: containerId}, "container not found, continuing"); + error = null; + } + if (error != null) { + logger.error({err: error, container_id: containerId}, "error destroying container"); + } else { + logger.log({container_id: containerId}, "destroyed container"); + } + return callback(error); + }); + }, + + // handle expiry of docker containers + + MAX_CONTAINER_AGE: Settings.clsi.docker.maxContainerAge || (oneHour = 60 * 60 * 1000), + + examineOldContainer(container, callback) { + if (callback == null) { callback = function(error, name, id, ttl){}; } + const name = container.Name || (container.Names != null ? container.Names[0] : undefined); + const created = container.Created * 1000; // creation time is returned in seconds + const now = Date.now(); + const age = now - created; + const maxAge = DockerRunner.MAX_CONTAINER_AGE; + const ttl = maxAge - age; + logger.log({containerName: name, created, now, age, maxAge, ttl}, "checking whether to destroy container"); + return callback(null, name, container.Id, ttl); + }, + + destroyOldContainers(callback) { + if (callback == null) { callback = function(error) {}; } + return dockerode.listContainers({all: true}, function(error, containers) { + if (error != null) { return callback(error); } + const jobs = []; + for (let container of Array.from(containers || [])) { + (container => + DockerRunner.examineOldContainer(container, function(err, name, id, ttl) { + if ((name.slice(0, 9) === '/project-') && (ttl <= 0)) { + return jobs.push(cb => DockerRunner.destroyContainer(name, id, false, () => cb())); + } + }) + )(container); + } + // Ignore errors because some containers get stuck but + // will be destroyed next time + return async.series(jobs, callback); + }); + }, + + startContainerMonitor() { + logger.log({maxAge: DockerRunner.MAX_CONTAINER_AGE}, "starting container expiry"); + // randomise the start time + const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000); + return setTimeout(() => + setInterval(() => DockerRunner.destroyOldContainers() + , (oneHour = 60 * 60 * 1000)) + + , randomDelay); + } +}); + +DockerRunner.startContainerMonitor(); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} +function __guardMethod__(obj, methodName, transform) { + if (typeof obj !== 'undefined' && obj !== null && typeof obj[methodName] === 'function') { + return transform(obj, methodName); + } else { + return undefined; + } +} \ No newline at end of file diff --git a/app/coffee/DraftModeManager.js b/app/coffee/DraftModeManager.js index 2f9e931..8ddbbd0 100644 --- a/app/coffee/DraftModeManager.js +++ b/app/coffee/DraftModeManager.js @@ -1,24 +1,37 @@ -fs = require "fs" -logger = require "logger-sharelatex" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let DraftModeManager; +const fs = require("fs"); +const logger = require("logger-sharelatex"); -module.exports = DraftModeManager = - injectDraftMode: (filename, callback = (error) ->) -> - fs.readFile filename, "utf8", (error, content) -> - return callback(error) if error? - # avoid adding draft mode more than once - if content?.indexOf("\\documentclass\[draft") >= 0 - return callback() - modified_content = DraftModeManager._injectDraftOption content - logger.log { - content: content.slice(0,1024), # \documentclass is normally v near the top +module.exports = (DraftModeManager = { + injectDraftMode(filename, callback) { + if (callback == null) { callback = function(error) {}; } + return fs.readFile(filename, "utf8", function(error, content) { + if (error != null) { return callback(error); } + // avoid adding draft mode more than once + if ((content != null ? content.indexOf("\\documentclass\[draft") : undefined) >= 0) { + return callback(); + } + const modified_content = DraftModeManager._injectDraftOption(content); + logger.log({ + content: content.slice(0,1024), // \documentclass is normally v near the top modified_content: modified_content.slice(0,1024), filename - }, "injected draft class" - fs.writeFile filename, modified_content, callback + }, "injected draft class"); + return fs.writeFile(filename, modified_content, callback); + }); + }, - _injectDraftOption: (content) -> - content - # With existing options (must be first, otherwise both are applied) + _injectDraftOption(content) { + return content + // With existing options (must be first, otherwise both are applied) .replace(/\\documentclass\[/g, "\\documentclass[draft,") - # Without existing options - .replace(/\\documentclass\{/g, "\\documentclass[draft]{") + // Without existing options + .replace(/\\documentclass\{/g, "\\documentclass[draft]{"); + } +}); diff --git a/app/coffee/Errors.js b/app/coffee/Errors.js index b375513..3a9ef22 100644 --- a/app/coffee/Errors.js +++ b/app/coffee/Errors.js @@ -1,25 +1,30 @@ -NotFoundError = (message) -> - error = new Error(message) - error.name = "NotFoundError" - error.__proto__ = NotFoundError.prototype - return error -NotFoundError.prototype.__proto__ = Error.prototype +let Errors; +var NotFoundError = function(message) { + const error = new Error(message); + error.name = "NotFoundError"; + error.__proto__ = NotFoundError.prototype; + return error; +}; +NotFoundError.prototype.__proto__ = Error.prototype; -FilesOutOfSyncError = (message) -> - error = new Error(message) - error.name = "FilesOutOfSyncError" - error.__proto__ = FilesOutOfSyncError.prototype - return error -FilesOutOfSyncError.prototype.__proto__ = Error.prototype +var FilesOutOfSyncError = function(message) { + const error = new Error(message); + error.name = "FilesOutOfSyncError"; + error.__proto__ = FilesOutOfSyncError.prototype; + return error; +}; +FilesOutOfSyncError.prototype.__proto__ = Error.prototype; -AlreadyCompilingError = (message) -> - error = new Error(message) - error.name = "AlreadyCompilingError" - error.__proto__ = AlreadyCompilingError.prototype - return error -AlreadyCompilingError.prototype.__proto__ = Error.prototype +var AlreadyCompilingError = function(message) { + const error = new Error(message); + error.name = "AlreadyCompilingError"; + error.__proto__ = AlreadyCompilingError.prototype; + return error; +}; +AlreadyCompilingError.prototype.__proto__ = Error.prototype; -module.exports = Errors = - NotFoundError: NotFoundError - FilesOutOfSyncError: FilesOutOfSyncError - AlreadyCompilingError: AlreadyCompilingError +module.exports = (Errors = { + NotFoundError, + FilesOutOfSyncError, + AlreadyCompilingError +}); diff --git a/app/coffee/LatexRunner.js b/app/coffee/LatexRunner.js index 29433f8..4c83e08 100644 --- a/app/coffee/LatexRunner.js +++ b/app/coffee/LatexRunner.js @@ -1,95 +1,123 @@ -Path = require "path" -Settings = require "settings-sharelatex" -logger = require "logger-sharelatex" -Metrics = require "./Metrics" -CommandRunner = require "./CommandRunner" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let LatexRunner; +const Path = require("path"); +const Settings = require("settings-sharelatex"); +const logger = require("logger-sharelatex"); +const Metrics = require("./Metrics"); +const CommandRunner = require("./CommandRunner"); -ProcessTable = {} # table of currently running jobs (pids or docker container names) +const ProcessTable = {}; // table of currently running jobs (pids or docker container names) -module.exports = LatexRunner = - runLatex: (project_id, options, callback = (error) ->) -> - {directory, mainFile, compiler, timeout, image, environment, flags} = options - compiler ||= "pdflatex" - timeout ||= 60000 # milliseconds +module.exports = (LatexRunner = { + runLatex(project_id, options, callback) { + let command; + if (callback == null) { callback = function(error) {}; } + let {directory, mainFile, compiler, timeout, image, environment, flags} = options; + if (!compiler) { compiler = "pdflatex"; } + if (!timeout) { timeout = 60000; } // milliseconds - logger.log directory: directory, compiler: compiler, timeout: timeout, mainFile: mainFile, environment: environment, flags:flags, "starting compile" + logger.log({directory, compiler, timeout, mainFile, environment, flags}, "starting compile"); - # We want to run latexmk on the tex file which we will automatically - # generate from the Rtex/Rmd/md file. - mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".tex") + // We want to run latexmk on the tex file which we will automatically + // generate from the Rtex/Rmd/md file. + mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".tex"); - if compiler == "pdflatex" - command = LatexRunner._pdflatexCommand mainFile, flags - else if compiler == "latex" - command = LatexRunner._latexCommand mainFile, flags - else if compiler == "xelatex" - command = LatexRunner._xelatexCommand mainFile, flags - else if compiler == "lualatex" - command = LatexRunner._lualatexCommand mainFile, flags - else - return callback new Error("unknown compiler: #{compiler}") + if (compiler === "pdflatex") { + command = LatexRunner._pdflatexCommand(mainFile, flags); + } else if (compiler === "latex") { + command = LatexRunner._latexCommand(mainFile, flags); + } else if (compiler === "xelatex") { + command = LatexRunner._xelatexCommand(mainFile, flags); + } else if (compiler === "lualatex") { + command = LatexRunner._lualatexCommand(mainFile, flags); + } else { + return callback(new Error(`unknown compiler: ${compiler}`)); + } - if Settings.clsi?.strace - command = ["strace", "-o", "strace", "-ff"].concat(command) + if (Settings.clsi != null ? Settings.clsi.strace : undefined) { + command = ["strace", "-o", "strace", "-ff"].concat(command); + } - id = "#{project_id}" # record running project under this id + const id = `${project_id}`; // record running project under this id - ProcessTable[id] = CommandRunner.run project_id, command, directory, image, timeout, environment, (error, output) -> - delete ProcessTable[id] - return callback(error) if error? - runs = output?.stderr?.match(/^Run number \d+ of .*latex/mg)?.length or 0 - failed = if output?.stdout?.match(/^Latexmk: Errors/m)? then 1 else 0 - # counters from latexmk output - stats = {} - stats["latexmk-errors"] = failed - stats["latex-runs"] = runs - stats["latex-runs-with-errors"] = if failed then runs else 0 - stats["latex-runs-#{runs}"] = 1 - stats["latex-runs-with-errors-#{runs}"] = if failed then 1 else 0 - # timing information from /usr/bin/time - timings = {} - stderr = output?.stderr - timings["cpu-percent"] = stderr?.match(/Percent of CPU this job got: (\d+)/m)?[1] or 0 - timings["cpu-time"] = stderr?.match(/User time.*: (\d+.\d+)/m)?[1] or 0 - timings["sys-time"] = stderr?.match(/System time.*: (\d+.\d+)/m)?[1] or 0 - callback error, output, stats, timings + return ProcessTable[id] = CommandRunner.run(project_id, command, directory, image, timeout, environment, function(error, output) { + delete ProcessTable[id]; + if (error != null) { return callback(error); } + const runs = __guard__(__guard__(output != null ? output.stderr : undefined, x1 => x1.match(/^Run number \d+ of .*latex/mg)), x => x.length) || 0; + const failed = (__guard__(output != null ? output.stdout : undefined, x2 => x2.match(/^Latexmk: Errors/m)) != null) ? 1 : 0; + // counters from latexmk output + const stats = {}; + stats["latexmk-errors"] = failed; + stats["latex-runs"] = runs; + stats["latex-runs-with-errors"] = failed ? runs : 0; + stats[`latex-runs-${runs}`] = 1; + stats[`latex-runs-with-errors-${runs}`] = failed ? 1 : 0; + // timing information from /usr/bin/time + const timings = {}; + const stderr = output != null ? output.stderr : undefined; + timings["cpu-percent"] = __guard__(stderr != null ? stderr.match(/Percent of CPU this job got: (\d+)/m) : undefined, x3 => x3[1]) || 0; + timings["cpu-time"] = __guard__(stderr != null ? stderr.match(/User time.*: (\d+.\d+)/m) : undefined, x4 => x4[1]) || 0; + timings["sys-time"] = __guard__(stderr != null ? stderr.match(/System time.*: (\d+.\d+)/m) : undefined, x5 => x5[1]) || 0; + return callback(error, output, stats, timings); + }); + }, - killLatex: (project_id, callback = (error) ->) -> - id = "#{project_id}" - logger.log {id:id}, "killing running compile" - if not ProcessTable[id]? - logger.warn {id}, "no such project to kill" - return callback(null) - else - CommandRunner.kill ProcessTable[id], callback + killLatex(project_id, callback) { + if (callback == null) { callback = function(error) {}; } + const id = `${project_id}`; + logger.log({id}, "killing running compile"); + if ((ProcessTable[id] == null)) { + logger.warn({id}, "no such project to kill"); + return callback(null); + } else { + return CommandRunner.kill(ProcessTable[id], callback); + } + }, - _latexmkBaseCommand: (flags) -> - args = ["latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR", "-synctex=1","-interaction=batchmode"] - if flags - args = args.concat(flags) - (Settings?.clsi?.latexmkCommandPrefix || []).concat(args) + _latexmkBaseCommand(flags) { + let args = ["latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR", "-synctex=1","-interaction=batchmode"]; + if (flags) { + args = args.concat(flags); + } + return (__guard__(Settings != null ? Settings.clsi : undefined, x => x.latexmkCommandPrefix) || []).concat(args); + }, - _pdflatexCommand: (mainFile, flags) -> - LatexRunner._latexmkBaseCommand(flags).concat [ + _pdflatexCommand(mainFile, flags) { + return LatexRunner._latexmkBaseCommand(flags).concat([ "-pdf", Path.join("$COMPILE_DIR", mainFile) - ] + ]); + }, - _latexCommand: (mainFile, flags) -> - LatexRunner._latexmkBaseCommand(flags).concat [ + _latexCommand(mainFile, flags) { + return LatexRunner._latexmkBaseCommand(flags).concat([ "-pdfdvi", Path.join("$COMPILE_DIR", mainFile) - ] + ]); + }, - _xelatexCommand: (mainFile, flags) -> - LatexRunner._latexmkBaseCommand(flags).concat [ + _xelatexCommand(mainFile, flags) { + return LatexRunner._latexmkBaseCommand(flags).concat([ "-xelatex", Path.join("$COMPILE_DIR", mainFile) - ] + ]); + }, - _lualatexCommand: (mainFile, flags) -> - LatexRunner._latexmkBaseCommand(flags).concat [ + _lualatexCommand(mainFile, flags) { + return LatexRunner._latexmkBaseCommand(flags).concat([ "-lualatex", Path.join("$COMPILE_DIR", mainFile) - ] + ]); + } +}); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/app/coffee/LocalCommandRunner.js b/app/coffee/LocalCommandRunner.js index c5ef3c6..405c51b 100644 --- a/app/coffee/LocalCommandRunner.js +++ b/app/coffee/LocalCommandRunner.js @@ -1,48 +1,66 @@ -spawn = require("child_process").spawn -logger = require "logger-sharelatex" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let CommandRunner; +const { spawn } = require("child_process"); +const logger = require("logger-sharelatex"); -logger.info "using standard command runner" +logger.info("using standard command runner"); -module.exports = CommandRunner = - run: (project_id, command, directory, image, timeout, environment, callback = (error) ->) -> - command = (arg.toString().replace('$COMPILE_DIR', directory) for arg in command) - logger.log project_id: project_id, command: command, directory: directory, "running command" - logger.warn "timeouts and sandboxing are not enabled with CommandRunner" +module.exports = (CommandRunner = { + run(project_id, command, directory, image, timeout, environment, callback) { + let key, value; + if (callback == null) { callback = function(error) {}; } + command = (Array.from(command).map((arg) => arg.toString().replace('$COMPILE_DIR', directory))); + logger.log({project_id, command, directory}, "running command"); + logger.warn("timeouts and sandboxing are not enabled with CommandRunner"); - # merge environment settings - env = {} - env[key] = value for key, value of process.env - env[key] = value for key, value of environment + // merge environment settings + const env = {}; + for (key in process.env) { value = process.env[key]; env[key] = value; } + for (key in environment) { value = environment[key]; env[key] = value; } - # run command as detached process so it has its own process group (which can be killed if needed) - proc = spawn command[0], command.slice(1), cwd: directory, env: env + // run command as detached process so it has its own process group (which can be killed if needed) + const proc = spawn(command[0], command.slice(1), {cwd: directory, env}); - stdout = "" - proc.stdout.on "data", (data)-> - stdout += data + let stdout = ""; + proc.stdout.on("data", data=> stdout += data); - proc.on "error", (err)-> - logger.err err:err, project_id:project_id, command: command, directory: directory, "error running command" - callback(err) + proc.on("error", function(err){ + logger.err({err, project_id, command, directory}, "error running command"); + return callback(err); + }); - proc.on "close", (code, signal) -> - logger.info code:code, signal:signal, project_id:project_id, "command exited" - if signal is 'SIGTERM' # signal from kill method below - err = new Error("terminated") - err.terminated = true - return callback(err) - else if code is 1 # exit status from chktex - err = new Error("exited") - err.code = code - return callback(err) - else - callback(null, {"stdout": stdout}) + proc.on("close", function(code, signal) { + let err; + logger.info({code, signal, project_id}, "command exited"); + if (signal === 'SIGTERM') { // signal from kill method below + err = new Error("terminated"); + err.terminated = true; + return callback(err); + } else if (code === 1) { // exit status from chktex + err = new Error("exited"); + err.code = code; + return callback(err); + } else { + return callback(null, {"stdout": stdout}); + } + }); - return proc.pid # return process id to allow job to be killed if necessary + return proc.pid; + }, // return process id to allow job to be killed if necessary - kill: (pid, callback = (error) ->) -> - try - process.kill -pid # kill all processes in group - catch err - return callback(err) - callback() + kill(pid, callback) { + if (callback == null) { callback = function(error) {}; } + try { + process.kill(-pid); // kill all processes in group + } catch (err) { + return callback(err); + } + return callback(); + } +}); diff --git a/app/coffee/LockManager.js b/app/coffee/LockManager.js index 5d9fe26..2405e8a 100644 --- a/app/coffee/LockManager.js +++ b/app/coffee/LockManager.js @@ -1,31 +1,50 @@ -Settings = require('settings-sharelatex') -logger = require "logger-sharelatex" -Lockfile = require('lockfile') # from https://github.com/npm/lockfile -Errors = require "./Errors" -fs = require("fs") -Path = require("path") -module.exports = LockManager = - LOCK_TEST_INTERVAL: 1000 # 50ms between each test of the lock - MAX_LOCK_WAIT_TIME: 15000 # 10s maximum time to spend trying to get the lock - LOCK_STALE: 5*60*1000 # 5 mins time until lock auto expires +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let LockManager; +const Settings = require('settings-sharelatex'); +const logger = require("logger-sharelatex"); +const Lockfile = require('lockfile'); // from https://github.com/npm/lockfile +const Errors = require("./Errors"); +const fs = require("fs"); +const Path = require("path"); +module.exports = (LockManager = { + LOCK_TEST_INTERVAL: 1000, // 50ms between each test of the lock + MAX_LOCK_WAIT_TIME: 15000, // 10s maximum time to spend trying to get the lock + LOCK_STALE: 5*60*1000, // 5 mins time until lock auto expires - runWithLock: (path, runner, callback = ((error) ->)) -> - lockOpts = - wait: @MAX_LOCK_WAIT_TIME - pollPeriod: @LOCK_TEST_INTERVAL - stale: @LOCK_STALE - Lockfile.lock path, lockOpts, (error) -> - if error?.code is 'EEXIST' - return callback new Errors.AlreadyCompilingError("compile in progress") - else if error? - fs.lstat path, (statLockErr, statLock)-> - fs.lstat Path.dirname(path), (statDirErr, statDir)-> - fs.readdir Path.dirname(path), (readdirErr, readdirDir)-> - logger.err error:error, path:path, statLock:statLock, statLockErr:statLockErr, statDir:statDir, statDirErr: statDirErr, readdirErr:readdirErr, readdirDir:readdirDir, "unable to get lock" - return callback(error) - else - runner (error1, args...) -> - Lockfile.unlock path, (error2) -> - error = error1 or error2 - return callback(error) if error? - callback(null, args...) + runWithLock(path, runner, callback) { + if (callback == null) { callback = function(error) {}; } + const lockOpts = { + wait: this.MAX_LOCK_WAIT_TIME, + pollPeriod: this.LOCK_TEST_INTERVAL, + stale: this.LOCK_STALE + }; + return Lockfile.lock(path, lockOpts, function(error) { + if ((error != null ? error.code : undefined) === 'EEXIST') { + return callback(new Errors.AlreadyCompilingError("compile in progress")); + } else if (error != null) { + return fs.lstat(path, (statLockErr, statLock)=> + fs.lstat(Path.dirname(path), (statDirErr, statDir)=> + fs.readdir(Path.dirname(path), function(readdirErr, readdirDir){ + logger.err({error, path, statLock, statLockErr, statDir, statDirErr, readdirErr, readdirDir}, "unable to get lock"); + return callback(error); + }) + ) + ); + } else { + return runner((error1, ...args) => + Lockfile.unlock(path, function(error2) { + error = error1 || error2; + if (error != null) { return callback(error); } + return callback(null, ...Array.from(args)); + }) + ); + } + }); + } +}); diff --git a/app/coffee/Metrics.js b/app/coffee/Metrics.js index 9965b25..8148d66 100644 --- a/app/coffee/Metrics.js +++ b/app/coffee/Metrics.js @@ -1,2 +1,2 @@ -module.exports = require "metrics-sharelatex" +module.exports = require("metrics-sharelatex"); diff --git a/app/coffee/OutputCacheManager.js b/app/coffee/OutputCacheManager.js index 5ef92ec..6d03a10 100644 --- a/app/coffee/OutputCacheManager.js +++ b/app/coffee/OutputCacheManager.js @@ -1,199 +1,270 @@ -async = require "async" -fs = require "fs" -fse = require "fs-extra" -Path = require "path" -logger = require "logger-sharelatex" -_ = require "underscore" -Settings = require "settings-sharelatex" -crypto = require "crypto" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS104: Avoid inline assignments + * DS204: Change includes calls to have a more natural evaluation order + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let OutputCacheManager; +const async = require("async"); +const fs = require("fs"); +const fse = require("fs-extra"); +const Path = require("path"); +const logger = require("logger-sharelatex"); +const _ = require("underscore"); +const Settings = require("settings-sharelatex"); +const crypto = require("crypto"); -OutputFileOptimiser = require "./OutputFileOptimiser" +const OutputFileOptimiser = require("./OutputFileOptimiser"); -module.exports = OutputCacheManager = - CACHE_SUBDIR: '.cache/clsi' - ARCHIVE_SUBDIR: '.archive/clsi' - # build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes - # for backwards compatibility, make the randombytes part optional - BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/ - CACHE_LIMIT: 2 # maximum number of cache directories - CACHE_AGE: 60*60*1000 # up to one hour old +module.exports = (OutputCacheManager = { + CACHE_SUBDIR: '.cache/clsi', + ARCHIVE_SUBDIR: '.archive/clsi', + // build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes + // for backwards compatibility, make the randombytes part optional + BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/, + CACHE_LIMIT: 2, // maximum number of cache directories + CACHE_AGE: 60*60*1000, // up to one hour old - path: (buildId, file) -> - # used by static server, given build id return '.cache/clsi/buildId' - if buildId.match OutputCacheManager.BUILD_REGEX - return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file) - else - # for invalid build id, return top level - return file + path(buildId, file) { + // used by static server, given build id return '.cache/clsi/buildId' + if (buildId.match(OutputCacheManager.BUILD_REGEX)) { + return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file); + } else { + // for invalid build id, return top level + return file; + } + }, - generateBuildId: (callback = (error, buildId) ->) -> - # generate a secure build id from Date.now() and 8 random bytes in hex - crypto.randomBytes 8, (err, buf) -> - return callback(err) if err? - random = buf.toString('hex') - date = Date.now().toString(16) - callback err, "#{date}-#{random}" + generateBuildId(callback) { + // generate a secure build id from Date.now() and 8 random bytes in hex + if (callback == null) { callback = function(error, buildId) {}; } + return crypto.randomBytes(8, function(err, buf) { + if (err != null) { return callback(err); } + const random = buf.toString('hex'); + const date = Date.now().toString(16); + return callback(err, `${date}-${random}`); + }); + }, - saveOutputFiles: (outputFiles, compileDir, callback = (error) ->) -> - OutputCacheManager.generateBuildId (err, buildId) -> - return callback(err) if err? - OutputCacheManager.saveOutputFilesInBuildDir outputFiles, compileDir, buildId, callback + saveOutputFiles(outputFiles, compileDir, callback) { + if (callback == null) { callback = function(error) {}; } + return OutputCacheManager.generateBuildId(function(err, buildId) { + if (err != null) { return callback(err); } + return OutputCacheManager.saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback); + }); + }, - saveOutputFilesInBuildDir: (outputFiles, compileDir, buildId, callback = (error) ->) -> - # make a compileDir/CACHE_SUBDIR/build_id directory and - # copy all the output files into it - cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR) - # Put the files into a new cache subdirectory - cacheDir = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR, buildId) - # Is it a per-user compile? check if compile directory is PROJECTID-USERID - perUser = Path.basename(compileDir).match(/^[0-9a-f]{24}-[0-9a-f]{24}$/) + saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback) { + // make a compileDir/CACHE_SUBDIR/build_id directory and + // copy all the output files into it + if (callback == null) { callback = function(error) {}; } + const cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR); + // Put the files into a new cache subdirectory + const cacheDir = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR, buildId); + // Is it a per-user compile? check if compile directory is PROJECTID-USERID + const perUser = Path.basename(compileDir).match(/^[0-9a-f]{24}-[0-9a-f]{24}$/); - # Archive logs in background - if Settings.clsi?.archive_logs or Settings.clsi?.strace - OutputCacheManager.archiveLogs outputFiles, compileDir, buildId, (err) -> - if err? - logger.warn err:err, "erroring archiving log files" + // Archive logs in background + if ((Settings.clsi != null ? Settings.clsi.archive_logs : undefined) || (Settings.clsi != null ? Settings.clsi.strace : undefined)) { + OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function(err) { + if (err != null) { + return logger.warn({err}, "erroring archiving log files"); + } + }); + } - # make the new cache directory - fse.ensureDir cacheDir, (err) -> - if err? - logger.error err: err, directory: cacheDir, "error creating cache directory" - callback(err, outputFiles) - else - # copy all the output files into the new cache directory - results = [] - async.mapSeries outputFiles, (file, cb) -> - # don't send dot files as output, express doesn't serve them - if OutputCacheManager._fileIsHidden(file.path) - logger.debug compileDir: compileDir, path: file.path, "ignoring dotfile in output" - return cb() - # copy other files into cache directory if valid - newFile = _.clone(file) - [src, dst] = [Path.join(compileDir, file.path), Path.join(cacheDir, file.path)] - OutputCacheManager._checkFileIsSafe src, (err, isSafe) -> - return cb(err) if err? - if !isSafe - return cb() - OutputCacheManager._checkIfShouldCopy src, (err, shouldCopy) -> - return cb(err) if err? - if !shouldCopy - return cb() - OutputCacheManager._copyFile src, dst, (err) -> - return cb(err) if err? - newFile.build = buildId # attach a build id if we cached the file - results.push newFile - cb() - , (err) -> - if err? - # pass back the original files if we encountered *any* error - callback(err, outputFiles) - # clean up the directory we just created - fse.remove cacheDir, (err) -> - if err? - logger.error err: err, dir: cacheDir, "error removing cache dir after failure" - else - # pass back the list of new files in the cache - callback(err, results) - # let file expiry run in the background, expire all previous files if per-user - OutputCacheManager.expireOutputFiles cacheRoot, {keep: buildId, limit: if perUser then 1 else null} + // make the new cache directory + return fse.ensureDir(cacheDir, function(err) { + if (err != null) { + logger.error({err, directory: cacheDir}, "error creating cache directory"); + return callback(err, outputFiles); + } else { + // copy all the output files into the new cache directory + const results = []; + return async.mapSeries(outputFiles, function(file, cb) { + // don't send dot files as output, express doesn't serve them + if (OutputCacheManager._fileIsHidden(file.path)) { + logger.debug({compileDir, path: file.path}, "ignoring dotfile in output"); + return cb(); + } + // copy other files into cache directory if valid + const newFile = _.clone(file); + const [src, dst] = Array.from([Path.join(compileDir, file.path), Path.join(cacheDir, file.path)]); + return OutputCacheManager._checkFileIsSafe(src, function(err, isSafe) { + if (err != null) { return cb(err); } + if (!isSafe) { + return cb(); + } + return OutputCacheManager._checkIfShouldCopy(src, function(err, shouldCopy) { + if (err != null) { return cb(err); } + if (!shouldCopy) { + return cb(); + } + return OutputCacheManager._copyFile(src, dst, function(err) { + if (err != null) { return cb(err); } + newFile.build = buildId; // attach a build id if we cached the file + results.push(newFile); + return cb(); + }); + }); + }); + } + , function(err) { + if (err != null) { + // pass back the original files if we encountered *any* error + callback(err, outputFiles); + // clean up the directory we just created + return fse.remove(cacheDir, function(err) { + if (err != null) { + return logger.error({err, dir: cacheDir}, "error removing cache dir after failure"); + } + }); + } else { + // pass back the list of new files in the cache + callback(err, results); + // let file expiry run in the background, expire all previous files if per-user + return OutputCacheManager.expireOutputFiles(cacheRoot, {keep: buildId, limit: perUser ? 1 : null}); + } + }); + } + }); + }, - archiveLogs: (outputFiles, compileDir, buildId, callback = (error) ->) -> - archiveDir = Path.join(compileDir, OutputCacheManager.ARCHIVE_SUBDIR, buildId) - logger.log {dir: archiveDir}, "archiving log files for project" - fse.ensureDir archiveDir, (err) -> - return callback(err) if err? - async.mapSeries outputFiles, (file, cb) -> - [src, dst] = [Path.join(compileDir, file.path), Path.join(archiveDir, file.path)] - OutputCacheManager._checkFileIsSafe src, (err, isSafe) -> - return cb(err) if err? - return cb() if !isSafe - OutputCacheManager._checkIfShouldArchive src, (err, shouldArchive) -> - return cb(err) if err? - return cb() if !shouldArchive - OutputCacheManager._copyFile src, dst, cb - , callback + archiveLogs(outputFiles, compileDir, buildId, callback) { + if (callback == null) { callback = function(error) {}; } + const archiveDir = Path.join(compileDir, OutputCacheManager.ARCHIVE_SUBDIR, buildId); + logger.log({dir: archiveDir}, "archiving log files for project"); + return fse.ensureDir(archiveDir, function(err) { + if (err != null) { return callback(err); } + return async.mapSeries(outputFiles, function(file, cb) { + const [src, dst] = Array.from([Path.join(compileDir, file.path), Path.join(archiveDir, file.path)]); + return OutputCacheManager._checkFileIsSafe(src, function(err, isSafe) { + if (err != null) { return cb(err); } + if (!isSafe) { return cb(); } + return OutputCacheManager._checkIfShouldArchive(src, function(err, shouldArchive) { + if (err != null) { return cb(err); } + if (!shouldArchive) { return cb(); } + return OutputCacheManager._copyFile(src, dst, cb); + }); + }); + } + , callback); + }); + }, - expireOutputFiles: (cacheRoot, options, callback = (error) ->) -> - # look in compileDir for build dirs and delete if > N or age of mod time > T - fs.readdir cacheRoot, (err, results) -> - if err? - return callback(null) if err.code == 'ENOENT' # cache directory is empty - logger.error err: err, project_id: cacheRoot, "error clearing cache" - return callback(err) + expireOutputFiles(cacheRoot, options, callback) { + // look in compileDir for build dirs and delete if > N or age of mod time > T + if (callback == null) { callback = function(error) {}; } + return fs.readdir(cacheRoot, function(err, results) { + if (err != null) { + if (err.code === 'ENOENT') { return callback(null); } // cache directory is empty + logger.error({err, project_id: cacheRoot}, "error clearing cache"); + return callback(err); + } - dirs = results.sort().reverse() - currentTime = Date.now() + const dirs = results.sort().reverse(); + const currentTime = Date.now(); - isExpired = (dir, index) -> - return false if options?.keep == dir - # remove any directories over the requested (non-null) limit - return true if options?.limit? and index > options.limit - # remove any directories over the hard limit - return true if index > OutputCacheManager.CACHE_LIMIT - # we can get the build time from the first part of the directory name DDDD-RRRR - # DDDD is date and RRRR is random bytes - dirTime = parseInt(dir.split('-')?[0], 16) - age = currentTime - dirTime - return age > OutputCacheManager.CACHE_AGE + const isExpired = function(dir, index) { + if ((options != null ? options.keep : undefined) === dir) { return false; } + // remove any directories over the requested (non-null) limit + if (((options != null ? options.limit : undefined) != null) && (index > options.limit)) { return true; } + // remove any directories over the hard limit + if (index > OutputCacheManager.CACHE_LIMIT) { return true; } + // we can get the build time from the first part of the directory name DDDD-RRRR + // DDDD is date and RRRR is random bytes + const dirTime = parseInt(__guard__(dir.split('-'), x => x[0]), 16); + const age = currentTime - dirTime; + return age > OutputCacheManager.CACHE_AGE; + }; - toRemove = _.filter(dirs, isExpired) + const toRemove = _.filter(dirs, isExpired); - removeDir = (dir, cb) -> - fse.remove Path.join(cacheRoot, dir), (err, result) -> - logger.log cache: cacheRoot, dir: dir, "removed expired cache dir" - if err? - logger.error err: err, dir: dir, "cache remove error" - cb(err, result) + const removeDir = (dir, cb) => + fse.remove(Path.join(cacheRoot, dir), function(err, result) { + logger.log({cache: cacheRoot, dir}, "removed expired cache dir"); + if (err != null) { + logger.error({err, dir}, "cache remove error"); + } + return cb(err, result); + }) + ; - async.eachSeries toRemove, (dir, cb) -> - removeDir dir, cb - , callback + return async.eachSeries(toRemove, (dir, cb) => removeDir(dir, cb) + , callback); + }); + }, - _fileIsHidden: (path) -> - return path?.match(/^\.|\/\./)? + _fileIsHidden(path) { + return ((path != null ? path.match(/^\.|\/\./) : undefined) != null); + }, - _checkFileIsSafe: (src, callback = (error, isSafe) ->) -> - # check if we have a valid file to copy into the cache - fs.stat src, (err, stats) -> - if err?.code is 'ENOENT' - logger.warn err: err, file: src, "file has disappeared before copying to build cache" - callback(err, false) - else if err? - # some other problem reading the file - logger.error err: err, file: src, "stat error for file in cache" - callback(err, false) - else if not stats.isFile() - # other filetype - reject it - logger.warn src: src, stat: stats, "nonfile output - refusing to copy to cache" - callback(null, false) - else - # it's a plain file, ok to copy - callback(null, true) + _checkFileIsSafe(src, callback) { + // check if we have a valid file to copy into the cache + if (callback == null) { callback = function(error, isSafe) {}; } + return fs.stat(src, function(err, stats) { + if ((err != null ? err.code : undefined) === 'ENOENT') { + logger.warn({err, file: src}, "file has disappeared before copying to build cache"); + return callback(err, false); + } else if (err != null) { + // some other problem reading the file + logger.error({err, file: src}, "stat error for file in cache"); + return callback(err, false); + } else if (!stats.isFile()) { + // other filetype - reject it + logger.warn({src, stat: stats}, "nonfile output - refusing to copy to cache"); + return callback(null, false); + } else { + // it's a plain file, ok to copy + return callback(null, true); + } + }); + }, - _copyFile: (src, dst, callback) -> - # copy output file into the cache - fse.copy src, dst, (err) -> - if err?.code is 'ENOENT' - logger.warn err: err, file: src, "file has disappeared when copying to build cache" - callback(err, false) - else if err? - logger.error err: err, src: src, dst: dst, "copy error for file in cache" - callback(err) - else - if Settings.clsi?.optimiseInDocker - # don't run any optimisations on the pdf when they are done - # in the docker container - callback() - else - # call the optimiser for the file too - OutputFileOptimiser.optimiseFile src, dst, callback + _copyFile(src, dst, callback) { + // copy output file into the cache + return fse.copy(src, dst, function(err) { + if ((err != null ? err.code : undefined) === 'ENOENT') { + logger.warn({err, file: src}, "file has disappeared when copying to build cache"); + return callback(err, false); + } else if (err != null) { + logger.error({err, src, dst}, "copy error for file in cache"); + return callback(err); + } else { + if ((Settings.clsi != null ? Settings.clsi.optimiseInDocker : undefined)) { + // don't run any optimisations on the pdf when they are done + // in the docker container + return callback(); + } else { + // call the optimiser for the file too + return OutputFileOptimiser.optimiseFile(src, dst, callback); + } + } + }); + }, - _checkIfShouldCopy: (src, callback = (err, shouldCopy) ->) -> - return callback(null, !Path.basename(src).match(/^strace/)) + _checkIfShouldCopy(src, callback) { + if (callback == null) { callback = function(err, shouldCopy) {}; } + return callback(null, !Path.basename(src).match(/^strace/)); + }, - _checkIfShouldArchive: (src, callback = (err, shouldCopy) ->) -> - if Path.basename(src).match(/^strace/) - return callback(null, true) - if Settings.clsi?.archive_logs and Path.basename(src) in ["output.log", "output.blg"] - return callback(null, true) - return callback(null, false) + _checkIfShouldArchive(src, callback) { + let needle; + if (callback == null) { callback = function(err, shouldCopy) {}; } + if (Path.basename(src).match(/^strace/)) { + return callback(null, true); + } + if ((Settings.clsi != null ? Settings.clsi.archive_logs : undefined) && (needle = Path.basename(src), ["output.log", "output.blg"].includes(needle))) { + return callback(null, true); + } + return callback(null, false); + } +}); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/app/coffee/OutputFileFinder.js b/app/coffee/OutputFileFinder.js index 662440b..f0f837c 100644 --- a/app/coffee/OutputFileFinder.js +++ b/app/coffee/OutputFileFinder.js @@ -1,50 +1,78 @@ -async = require "async" -fs = require "fs" -Path = require "path" -spawn = require("child_process").spawn -logger = require "logger-sharelatex" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let OutputFileFinder; +const async = require("async"); +const fs = require("fs"); +const Path = require("path"); +const { spawn } = require("child_process"); +const logger = require("logger-sharelatex"); -module.exports = OutputFileFinder = - findOutputFiles: (resources, directory, callback = (error, outputFiles, allFiles) ->) -> - incomingResources = {} - for resource in resources - incomingResources[resource.path] = true +module.exports = (OutputFileFinder = { + findOutputFiles(resources, directory, callback) { + if (callback == null) { callback = function(error, outputFiles, allFiles) {}; } + const incomingResources = {}; + for (let resource of Array.from(resources)) { + incomingResources[resource.path] = true; + } - OutputFileFinder._getAllFiles directory, (error, allFiles = []) -> - if error? - logger.err err:error, "error finding all output files" - return callback(error) - outputFiles = [] - for file in allFiles - if !incomingResources[file] - outputFiles.push { - path: file - type: file.match(/\.([^\.]+)$/)?[1] - } - callback null, outputFiles, allFiles + return OutputFileFinder._getAllFiles(directory, function(error, allFiles) { + if (allFiles == null) { allFiles = []; } + if (error != null) { + logger.err({err:error}, "error finding all output files"); + return callback(error); + } + const outputFiles = []; + for (let file of Array.from(allFiles)) { + if (!incomingResources[file]) { + outputFiles.push({ + path: file, + type: __guard__(file.match(/\.([^\.]+)$/), x => x[1]) + }); + } + } + return callback(null, outputFiles, allFiles); + }); + }, - _getAllFiles: (directory, _callback = (error, fileList) ->) -> - callback = (error, fileList) -> - _callback(error, fileList) - _callback = () -> + _getAllFiles(directory, _callback) { + if (_callback == null) { _callback = function(error, fileList) {}; } + const callback = function(error, fileList) { + _callback(error, fileList); + return _callback = function() {}; + }; - # don't include clsi-specific files/directories in the output list - EXCLUDE_DIRS = ["-name", ".cache", "-o", "-name", ".archive","-o", "-name", ".project-*"] - args = [directory, "(", EXCLUDE_DIRS..., ")", "-prune", "-o", "-type", "f", "-print"] - logger.log args: args, "running find command" + // don't include clsi-specific files/directories in the output list + const EXCLUDE_DIRS = ["-name", ".cache", "-o", "-name", ".archive","-o", "-name", ".project-*"]; + const args = [directory, "(", ...Array.from(EXCLUDE_DIRS), ")", "-prune", "-o", "-type", "f", "-print"]; + logger.log({args}, "running find command"); - proc = spawn("find", args) - stdout = "" - proc.stdout.on "data", (chunk) -> - stdout += chunk.toString() - proc.on "error", callback - proc.on "close", (code) -> - if code != 0 - logger.warn {directory, code}, "find returned error, directory likely doesn't exist" - return callback null, [] - fileList = stdout.trim().split("\n") - fileList = fileList.map (file) -> - # Strip leading directory - path = Path.relative(directory, file) - return callback null, fileList + const proc = spawn("find", args); + let stdout = ""; + proc.stdout.on("data", chunk => stdout += chunk.toString()); + proc.on("error", callback); + return proc.on("close", function(code) { + if (code !== 0) { + logger.warn({directory, code}, "find returned error, directory likely doesn't exist"); + return callback(null, []); + } + let fileList = stdout.trim().split("\n"); + fileList = fileList.map(function(file) { + // Strip leading directory + let path; + return path = Path.relative(directory, file); + }); + return callback(null, fileList); + }); + } +}); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/app/coffee/OutputFileOptimiser.js b/app/coffee/OutputFileOptimiser.js index b702f36..f8302aa 100644 --- a/app/coffee/OutputFileOptimiser.js +++ b/app/coffee/OutputFileOptimiser.js @@ -1,55 +1,77 @@ -fs = require "fs" -Path = require "path" -spawn = require("child_process").spawn -logger = require "logger-sharelatex" -Metrics = require "./Metrics" -_ = require "underscore" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let OutputFileOptimiser; +const fs = require("fs"); +const Path = require("path"); +const { spawn } = require("child_process"); +const logger = require("logger-sharelatex"); +const Metrics = require("./Metrics"); +const _ = require("underscore"); -module.exports = OutputFileOptimiser = +module.exports = (OutputFileOptimiser = { - optimiseFile: (src, dst, callback = (error) ->) -> - # check output file (src) and see if we can optimise it, storing - # the result in the build directory (dst) - if src.match(/\/output\.pdf$/) - OutputFileOptimiser.checkIfPDFIsOptimised src, (err, isOptimised) -> - return callback(null) if err? or isOptimised - OutputFileOptimiser.optimisePDF src, dst, callback - else - callback (null) + optimiseFile(src, dst, callback) { + // check output file (src) and see if we can optimise it, storing + // the result in the build directory (dst) + if (callback == null) { callback = function(error) {}; } + if (src.match(/\/output\.pdf$/)) { + return OutputFileOptimiser.checkIfPDFIsOptimised(src, function(err, isOptimised) { + if ((err != null) || isOptimised) { return callback(null); } + return OutputFileOptimiser.optimisePDF(src, dst, callback); + }); + } else { + return callback((null)); + } + }, - checkIfPDFIsOptimised: (file, callback) -> - SIZE = 16*1024 # check the header of the pdf - result = new Buffer(SIZE) - result.fill(0) # prevent leakage of uninitialised buffer - fs.open file, "r", (err, fd) -> - return callback(err) if err? - fs.read fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) -> - fs.close fd, (errClose) -> - return callback(errRead) if errRead? - return callback(errClose) if errReadClose? - isOptimised = buffer.toString('ascii').indexOf("/Linearized 1") >= 0 - callback(null, isOptimised) + checkIfPDFIsOptimised(file, callback) { + const SIZE = 16*1024; // check the header of the pdf + const result = new Buffer(SIZE); + result.fill(0); // prevent leakage of uninitialised buffer + return fs.open(file, "r", function(err, fd) { + if (err != null) { return callback(err); } + return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) => + fs.close(fd, function(errClose) { + if (errRead != null) { return callback(errRead); } + if (typeof errReadClose !== 'undefined' && errReadClose !== null) { return callback(errClose); } + const isOptimised = buffer.toString('ascii').indexOf("/Linearized 1") >= 0; + return callback(null, isOptimised); + }) + ); + }); + }, - optimisePDF: (src, dst, callback = (error) ->) -> - tmpOutput = dst + '.opt' - args = ["--linearize", src, tmpOutput] - logger.log args: args, "running qpdf command" + optimisePDF(src, dst, callback) { + if (callback == null) { callback = function(error) {}; } + const tmpOutput = dst + '.opt'; + const args = ["--linearize", src, tmpOutput]; + logger.log({args}, "running qpdf command"); - timer = new Metrics.Timer("qpdf") - proc = spawn("qpdf", args) - stdout = "" - proc.stdout.on "data", (chunk) -> - stdout += chunk.toString() - callback = _.once(callback) # avoid double call back for error and close event - proc.on "error", (err) -> - logger.warn {err, args}, "qpdf failed" - callback(null) # ignore the error - proc.on "close", (code) -> - timer.done() - if code != 0 - logger.warn {code, args}, "qpdf returned error" - return callback(null) # ignore the error - fs.rename tmpOutput, dst, (err) -> - if err? - logger.warn {tmpOutput, dst}, "failed to rename output of qpdf command" - callback(null) # ignore the error + const timer = new Metrics.Timer("qpdf"); + const proc = spawn("qpdf", args); + let stdout = ""; + proc.stdout.on("data", chunk => stdout += chunk.toString()); + callback = _.once(callback); // avoid double call back for error and close event + proc.on("error", function(err) { + logger.warn({err, args}, "qpdf failed"); + return callback(null); + }); // ignore the error + return proc.on("close", function(code) { + timer.done(); + if (code !== 0) { + logger.warn({code, args}, "qpdf returned error"); + return callback(null); // ignore the error + } + return fs.rename(tmpOutput, dst, function(err) { + if (err != null) { + logger.warn({tmpOutput, dst}, "failed to rename output of qpdf command"); + } + return callback(null); + }); + }); + } // ignore the error +}); diff --git a/app/coffee/ProjectPersistenceManager.js b/app/coffee/ProjectPersistenceManager.js index 4ea02bf..7b3d5ee 100644 --- a/app/coffee/ProjectPersistenceManager.js +++ b/app/coffee/ProjectPersistenceManager.js @@ -1,84 +1,117 @@ -UrlCache = require "./UrlCache" -CompileManager = require "./CompileManager" -db = require "./db" -dbQueue = require "./DbQueue" -async = require "async" -logger = require "logger-sharelatex" -oneDay = 24 * 60 * 60 * 1000 -Settings = require "settings-sharelatex" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ProjectPersistenceManager; +const UrlCache = require("./UrlCache"); +const CompileManager = require("./CompileManager"); +const db = require("./db"); +const dbQueue = require("./DbQueue"); +const async = require("async"); +const logger = require("logger-sharelatex"); +const oneDay = 24 * 60 * 60 * 1000; +const Settings = require("settings-sharelatex"); -module.exports = ProjectPersistenceManager = +module.exports = (ProjectPersistenceManager = { - EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5 + EXPIRY_TIMEOUT: Settings.project_cache_length_ms || (oneDay * 2.5), - markProjectAsJustAccessed: (project_id, callback = (error) ->) -> - job = (cb)-> - db.Project.findOrCreate(where: {project_id: project_id}) + markProjectAsJustAccessed(project_id, callback) { + if (callback == null) { callback = function(error) {}; } + const job = cb=> + db.Project.findOrCreate({where: {project_id}}) .spread( - (project, created) -> - project.updateAttributes(lastAccessed: new Date()) - .then(() -> cb()) - .error cb + (project, created) => + project.updateAttributes({lastAccessed: new Date()}) + .then(() => cb()) + .error(cb) ) - .error cb - dbQueue.queue.push(job, callback) + .error(cb) + ; + return dbQueue.queue.push(job, callback); + }, - clearExpiredProjects: (callback = (error) ->) -> - ProjectPersistenceManager._findExpiredProjectIds (error, project_ids) -> - return callback(error) if error? - logger.log project_ids: project_ids, "clearing expired projects" - jobs = for project_id in (project_ids or []) - do (project_id) -> - (callback) -> - ProjectPersistenceManager.clearProjectFromCache project_id, (err) -> - if err? - logger.error err: err, project_id: project_id, "error clearing project" - callback() - async.series jobs, (error) -> - return callback(error) if error? - CompileManager.clearExpiredProjects ProjectPersistenceManager.EXPIRY_TIMEOUT, (error) -> - callback() # ignore any errors from deleting directories + clearExpiredProjects(callback) { + if (callback == null) { callback = function(error) {}; } + return ProjectPersistenceManager._findExpiredProjectIds(function(error, project_ids) { + if (error != null) { return callback(error); } + logger.log({project_ids}, "clearing expired projects"); + const jobs = (Array.from(project_ids || [])).map((project_id) => + (project_id => + callback => + ProjectPersistenceManager.clearProjectFromCache(project_id, function(err) { + if (err != null) { + logger.error({err, project_id}, "error clearing project"); + } + return callback(); + }) + + )(project_id)); + return async.series(jobs, function(error) { + if (error != null) { return callback(error); } + return CompileManager.clearExpiredProjects(ProjectPersistenceManager.EXPIRY_TIMEOUT, error => callback()); + }); + }); + }, // ignore any errors from deleting directories - clearProject: (project_id, user_id, callback = (error) ->) -> - logger.log project_id: project_id, user_id:user_id, "clearing project for user" - CompileManager.clearProject project_id, user_id, (error) -> - return callback(error) if error? - ProjectPersistenceManager.clearProjectFromCache project_id, (error) -> - return callback(error) if error? - callback() + clearProject(project_id, user_id, callback) { + if (callback == null) { callback = function(error) {}; } + logger.log({project_id, user_id}, "clearing project for user"); + return CompileManager.clearProject(project_id, user_id, function(error) { + if (error != null) { return callback(error); } + return ProjectPersistenceManager.clearProjectFromCache(project_id, function(error) { + if (error != null) { return callback(error); } + return callback(); + }); + }); + }, - clearProjectFromCache: (project_id, callback = (error) ->) -> - logger.log project_id: project_id, "clearing project from cache" - UrlCache.clearProject project_id, (error) -> - if error? - logger.err error:error, project_id: project_id, "error clearing project from cache" - return callback(error) - ProjectPersistenceManager._clearProjectFromDatabase project_id, (error) -> - if error? - logger.err error:error, project_id:project_id, "error clearing project from database" - callback(error) + clearProjectFromCache(project_id, callback) { + if (callback == null) { callback = function(error) {}; } + logger.log({project_id}, "clearing project from cache"); + return UrlCache.clearProject(project_id, function(error) { + if (error != null) { + logger.err({error, project_id}, "error clearing project from cache"); + return callback(error); + } + return ProjectPersistenceManager._clearProjectFromDatabase(project_id, function(error) { + if (error != null) { + logger.err({error, project_id}, "error clearing project from database"); + } + return callback(error); + }); + }); + }, - _clearProjectFromDatabase: (project_id, callback = (error) ->) -> - logger.log project_id:project_id, "clearing project from database" - job = (cb)-> - db.Project.destroy(where: {project_id: project_id}) - .then(() -> cb()) - .error cb - dbQueue.queue.push(job, callback) + _clearProjectFromDatabase(project_id, callback) { + if (callback == null) { callback = function(error) {}; } + logger.log({project_id}, "clearing project from database"); + const job = cb=> + db.Project.destroy({where: {project_id}}) + .then(() => cb()) + .error(cb) + ; + return dbQueue.queue.push(job, callback); + }, - _findExpiredProjectIds: (callback = (error, project_ids) ->) -> - job = (cb)-> - keepProjectsFrom = new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT) - q = {} - q[db.op.lt] = keepProjectsFrom - db.Project.findAll(where:{lastAccessed:q}) - .then((projects) -> - cb null, projects.map((project) -> project.project_id) - ).error cb + _findExpiredProjectIds(callback) { + if (callback == null) { callback = function(error, project_ids) {}; } + const job = function(cb){ + const keepProjectsFrom = new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT); + const q = {}; + q[db.op.lt] = keepProjectsFrom; + return db.Project.findAll({where:{lastAccessed:q}}) + .then(projects => cb(null, projects.map(project => project.project_id))).error(cb); + }; - dbQueue.queue.push(job, callback) + return dbQueue.queue.push(job, callback); + } +}); -logger.log {EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout" +logger.log({EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout"); diff --git a/app/coffee/RequestParser.js b/app/coffee/RequestParser.js index 9b94712..fdfb8bf 100644 --- a/app/coffee/RequestParser.js +++ b/app/coffee/RequestParser.js @@ -1,128 +1,182 @@ -settings = require("settings-sharelatex") +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let RequestParser; +const settings = require("settings-sharelatex"); -module.exports = RequestParser = - VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"] - MAX_TIMEOUT: 600 +module.exports = (RequestParser = { + VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"], + MAX_TIMEOUT: 600, - parse: (body, callback = (error, data) ->) -> - response = {} + parse(body, callback) { + let resource; + if (callback == null) { callback = function(error, data) {}; } + const response = {}; - if !body.compile? - return callback "top level object should have a compile attribute" - - compile = body.compile - compile.options ||= {} - - try - response.compiler = @_parseAttribute "compiler", - compile.options.compiler, - validValues: @VALID_COMPILERS - default: "pdflatex" - type: "string" - response.timeout = @_parseAttribute "timeout", - compile.options.timeout - default: RequestParser.MAX_TIMEOUT - type: "number" - response.imageName = @_parseAttribute "imageName", - compile.options.imageName, - type: "string" - response.draft = @_parseAttribute "draft", - compile.options.draft, - default: false, - type: "boolean" - response.check = @_parseAttribute "check", - compile.options.check, - type: "string" - response.flags = @_parseAttribute "flags", - compile.options.flags, - default: [], - type: "object" - - # The syncType specifies whether the request contains all - # resources (full) or only those resources to be updated - # in-place (incremental). - response.syncType = @_parseAttribute "syncType", - compile.options.syncType, - validValues: ["full", "incremental"] - type: "string" - - # The syncState is an identifier passed in with the request - # which has the property that it changes when any resource is - # added, deleted, moved or renamed. - # - # on syncType full the syncState identifier is passed in and - # stored - # - # on syncType incremental the syncState identifier must match - # the stored value - response.syncState = @_parseAttribute "syncState", - compile.options.syncState, - type: "string" - - if response.timeout > RequestParser.MAX_TIMEOUT - response.timeout = RequestParser.MAX_TIMEOUT - response.timeout = response.timeout * 1000 # milliseconds - - response.resources = (@_parseResource(resource) for resource in (compile.resources or [])) - - rootResourcePath = @_parseAttribute "rootResourcePath", - compile.rootResourcePath - default: "main.tex" - type: "string" - originalRootResourcePath = rootResourcePath - sanitizedRootResourcePath = RequestParser._sanitizePath(rootResourcePath) - response.rootResourcePath = RequestParser._checkPath(sanitizedRootResourcePath) - - for resource in response.resources - if resource.path == originalRootResourcePath - resource.path = sanitizedRootResourcePath - catch error - return callback error - - callback null, response - - _parseResource: (resource) -> - if !resource.path? or typeof resource.path != "string" - throw "all resources should have a path attribute" - - if resource.modified? - modified = new Date(resource.modified) - if isNaN(modified.getTime()) - throw "resource modified date could not be understood: #{resource.modified}" - - if !resource.url? and !resource.content? - throw "all resources should have either a url or content attribute" - if resource.content? and typeof resource.content != "string" - throw "content attribute should be a string" - if resource.url? and typeof resource.url != "string" - throw "url attribute should be a string" - - return { - path: resource.path - modified: modified - url: resource.url - content: resource.content + if ((body.compile == null)) { + return callback("top level object should have a compile attribute"); } - _parseAttribute: (name, attribute, options) -> - if attribute? - if options.validValues? - if options.validValues.indexOf(attribute) == -1 - throw "#{name} attribute should be one of: #{options.validValues.join(", ")}" - if options.type? - if typeof attribute != options.type - throw "#{name} attribute should be a #{options.type}" - else - return options.default if options.default? - return attribute + const { compile } = body; + if (!compile.options) { compile.options = {}; } - _sanitizePath: (path) -> - # See http://php.net/manual/en/function.escapeshellcmd.php - path.replace(/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g, "") + try { + response.compiler = this._parseAttribute("compiler", + compile.options.compiler, { + validValues: this.VALID_COMPILERS, + default: "pdflatex", + type: "string" + } + ); + response.timeout = this._parseAttribute("timeout", + compile.options.timeout, { + default: RequestParser.MAX_TIMEOUT, + type: "number" + } + ); + response.imageName = this._parseAttribute("imageName", + compile.options.imageName, + {type: "string"}); + response.draft = this._parseAttribute("draft", + compile.options.draft, { + default: false, + type: "boolean" + } + ); + response.check = this._parseAttribute("check", + compile.options.check, + {type: "string"}); + response.flags = this._parseAttribute("flags", + compile.options.flags, { + default: [], + type: "object" + } + ); - _checkPath: (path) -> - # check that the request does not use a relative path - for dir in path.split('/') - if dir == '..' - throw "relative path in root resource" - return path + // The syncType specifies whether the request contains all + // resources (full) or only those resources to be updated + // in-place (incremental). + response.syncType = this._parseAttribute("syncType", + compile.options.syncType, { + validValues: ["full", "incremental"], + type: "string" + } + ); + + // The syncState is an identifier passed in with the request + // which has the property that it changes when any resource is + // added, deleted, moved or renamed. + // + // on syncType full the syncState identifier is passed in and + // stored + // + // on syncType incremental the syncState identifier must match + // the stored value + response.syncState = this._parseAttribute("syncState", + compile.options.syncState, + {type: "string"}); + + if (response.timeout > RequestParser.MAX_TIMEOUT) { + response.timeout = RequestParser.MAX_TIMEOUT; + } + response.timeout = response.timeout * 1000; // milliseconds + + response.resources = ((() => { + const result = []; + for (resource of Array.from((compile.resources || []))) { result.push(this._parseResource(resource)); + } + return result; + })()); + + const rootResourcePath = this._parseAttribute("rootResourcePath", + compile.rootResourcePath, { + default: "main.tex", + type: "string" + } + ); + const originalRootResourcePath = rootResourcePath; + const sanitizedRootResourcePath = RequestParser._sanitizePath(rootResourcePath); + response.rootResourcePath = RequestParser._checkPath(sanitizedRootResourcePath); + + for (resource of Array.from(response.resources)) { + if (resource.path === originalRootResourcePath) { + resource.path = sanitizedRootResourcePath; + } + } + } catch (error1) { + const error = error1; + return callback(error); + } + + return callback(null, response); + }, + + _parseResource(resource) { + let modified; + if ((resource.path == null) || (typeof resource.path !== "string")) { + throw "all resources should have a path attribute"; + } + + if (resource.modified != null) { + modified = new Date(resource.modified); + if (isNaN(modified.getTime())) { + throw `resource modified date could not be understood: ${resource.modified}`; + } + } + + if ((resource.url == null) && (resource.content == null)) { + throw "all resources should have either a url or content attribute"; + } + if ((resource.content != null) && (typeof resource.content !== "string")) { + throw "content attribute should be a string"; + } + if ((resource.url != null) && (typeof resource.url !== "string")) { + throw "url attribute should be a string"; + } + + return { + path: resource.path, + modified, + url: resource.url, + content: resource.content + }; + }, + + _parseAttribute(name, attribute, options) { + if (attribute != null) { + if (options.validValues != null) { + if (options.validValues.indexOf(attribute) === -1) { + throw `${name} attribute should be one of: ${options.validValues.join(", ")}`; + } + } + if (options.type != null) { + if (typeof attribute !== options.type) { + throw `${name} attribute should be a ${options.type}`; + } + } + } else { + if (options.default != null) { return options.default; } + } + return attribute; + }, + + _sanitizePath(path) { + // See http://php.net/manual/en/function.escapeshellcmd.php + return path.replace(/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g, ""); + }, + + _checkPath(path) { + // check that the request does not use a relative path + for (let dir of Array.from(path.split('/'))) { + if (dir === '..') { + throw "relative path in root resource"; + } + } + return path; + } +}); diff --git a/app/coffee/ResourceStateManager.js b/app/coffee/ResourceStateManager.js index 19fea47..f430c8f 100644 --- a/app/coffee/ResourceStateManager.js +++ b/app/coffee/ResourceStateManager.js @@ -1,72 +1,108 @@ -Path = require "path" -fs = require "fs" -logger = require "logger-sharelatex" -settings = require("settings-sharelatex") -Errors = require "./Errors" -SafeReader = require "./SafeReader" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS201: Simplify complex destructure assignments + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ResourceStateManager; +const Path = require("path"); +const fs = require("fs"); +const logger = require("logger-sharelatex"); +const settings = require("settings-sharelatex"); +const Errors = require("./Errors"); +const SafeReader = require("./SafeReader"); -module.exports = ResourceStateManager = +module.exports = (ResourceStateManager = { - # The sync state is an identifier which must match for an - # incremental update to be allowed. - # - # The initial value is passed in and stored on a full - # compile, along with the list of resources.. - # - # Subsequent incremental compiles must come with the same value - if - # not they will be rejected with a 409 Conflict response. The - # previous list of resources is returned. - # - # An incremental compile can only update existing files with new - # content. The sync state identifier must change if any docs or - # files are moved, added, deleted or renamed. + // The sync state is an identifier which must match for an + // incremental update to be allowed. + // + // The initial value is passed in and stored on a full + // compile, along with the list of resources.. + // + // Subsequent incremental compiles must come with the same value - if + // not they will be rejected with a 409 Conflict response. The + // previous list of resources is returned. + // + // An incremental compile can only update existing files with new + // content. The sync state identifier must change if any docs or + // files are moved, added, deleted or renamed. - SYNC_STATE_FILE: ".project-sync-state" - SYNC_STATE_MAX_SIZE: 128*1024 + SYNC_STATE_FILE: ".project-sync-state", + SYNC_STATE_MAX_SIZE: 128*1024, - saveProjectState: (state, resources, basePath, callback = (error) ->) -> - stateFile = Path.join(basePath, @SYNC_STATE_FILE) - if not state? # remove the file if no state passed in - logger.log state:state, basePath:basePath, "clearing sync state" - fs.unlink stateFile, (err) -> - if err? and err.code isnt 'ENOENT' - return callback(err) - else - return callback() - else - logger.log state:state, basePath:basePath, "writing sync state" - resourceList = (resource.path for resource in resources) - fs.writeFile stateFile, [resourceList..., "stateHash:#{state}"].join("\n"), callback + saveProjectState(state, resources, basePath, callback) { + if (callback == null) { callback = function(error) {}; } + const stateFile = Path.join(basePath, this.SYNC_STATE_FILE); + if ((state == null)) { // remove the file if no state passed in + logger.log({state, basePath}, "clearing sync state"); + return fs.unlink(stateFile, function(err) { + if ((err != null) && (err.code !== 'ENOENT')) { + return callback(err); + } else { + return callback(); + } + }); + } else { + logger.log({state, basePath}, "writing sync state"); + const resourceList = (Array.from(resources).map((resource) => resource.path)); + return fs.writeFile(stateFile, [...Array.from(resourceList), `stateHash:${state}`].join("\n"), callback); + } + }, - checkProjectStateMatches: (state, basePath, callback = (error, resources) ->) -> - stateFile = Path.join(basePath, @SYNC_STATE_FILE) - size = @SYNC_STATE_MAX_SIZE - SafeReader.readFile stateFile, size, 'utf8', (err, result, bytesRead) -> - return callback(err) if err? - if bytesRead is size - logger.error file:stateFile, size:size, bytesRead:bytesRead, "project state file truncated" - [resourceList..., oldState] = result?.toString()?.split("\n") or [] - newState = "stateHash:#{state}" - logger.log state:state, oldState: oldState, basePath:basePath, stateMatches: (newState is oldState), "checking sync state" - if newState isnt oldState - return callback new Errors.FilesOutOfSyncError("invalid state for incremental update") - else - resources = ({path: path} for path in resourceList) - callback(null, resources) + checkProjectStateMatches(state, basePath, callback) { + if (callback == null) { callback = function(error, resources) {}; } + const stateFile = Path.join(basePath, this.SYNC_STATE_FILE); + const size = this.SYNC_STATE_MAX_SIZE; + return SafeReader.readFile(stateFile, size, 'utf8', function(err, result, bytesRead) { + if (err != null) { return callback(err); } + if (bytesRead === size) { + logger.error({file:stateFile, size, bytesRead}, "project state file truncated"); + } + const array = __guard__(result != null ? result.toString() : undefined, x => x.split("\n")) || [], + adjustedLength = Math.max(array.length, 1), + resourceList = array.slice(0, adjustedLength - 1), + oldState = array[adjustedLength - 1]; + const newState = `stateHash:${state}`; + logger.log({state, oldState, basePath, stateMatches: (newState === oldState)}, "checking sync state"); + if (newState !== oldState) { + return callback(new Errors.FilesOutOfSyncError("invalid state for incremental update")); + } else { + const resources = (Array.from(resourceList).map((path) => ({path}))); + return callback(null, resources); + } + }); + }, - checkResourceFiles: (resources, allFiles, basePath, callback = (error) ->) -> - # check the paths are all relative to current directory - for file in resources or [] - for dir in file?.path?.split('/') - if dir == '..' - return callback new Error("relative path in resource file list") - # check if any of the input files are not present in list of files - seenFile = {} - for file in allFiles - seenFile[file] = true - missingFiles = (resource.path for resource in resources when not seenFile[resource.path]) - if missingFiles?.length > 0 - logger.err missingFiles:missingFiles, basePath:basePath, allFiles:allFiles, resources:resources, "missing input files for project" - return callback new Errors.FilesOutOfSyncError("resource files missing in incremental update") - else - callback() + checkResourceFiles(resources, allFiles, basePath, callback) { + // check the paths are all relative to current directory + let file; + if (callback == null) { callback = function(error) {}; } + for (file of Array.from(resources || [])) { + for (let dir of Array.from(__guard__(file != null ? file.path : undefined, x => x.split('/')))) { + if (dir === '..') { + return callback(new Error("relative path in resource file list")); + } + } + } + // check if any of the input files are not present in list of files + const seenFile = {}; + for (file of Array.from(allFiles)) { + seenFile[file] = true; + } + const missingFiles = (Array.from(resources).filter((resource) => !seenFile[resource.path]).map((resource) => resource.path)); + if ((missingFiles != null ? missingFiles.length : undefined) > 0) { + logger.err({missingFiles, basePath, allFiles, resources}, "missing input files for project"); + return callback(new Errors.FilesOutOfSyncError("resource files missing in incremental update")); + } else { + return callback(); + } + } +}); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/app/coffee/ResourceWriter.js b/app/coffee/ResourceWriter.js index f3a4bd0..0044ad9 100644 --- a/app/coffee/ResourceWriter.js +++ b/app/coffee/ResourceWriter.js @@ -1,142 +1,206 @@ -UrlCache = require "./UrlCache" -Path = require "path" -fs = require "fs" -async = require "async" -mkdirp = require "mkdirp" -OutputFileFinder = require "./OutputFileFinder" -ResourceStateManager = require "./ResourceStateManager" -Metrics = require "./Metrics" -logger = require "logger-sharelatex" -settings = require("settings-sharelatex") +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ResourceWriter; +const UrlCache = require("./UrlCache"); +const Path = require("path"); +const fs = require("fs"); +const async = require("async"); +const mkdirp = require("mkdirp"); +const OutputFileFinder = require("./OutputFileFinder"); +const ResourceStateManager = require("./ResourceStateManager"); +const Metrics = require("./Metrics"); +const logger = require("logger-sharelatex"); +const settings = require("settings-sharelatex"); -parallelFileDownloads = settings.parallelFileDownloads or 1 +const parallelFileDownloads = settings.parallelFileDownloads || 1; -module.exports = ResourceWriter = +module.exports = (ResourceWriter = { - syncResourcesToDisk: (request, basePath, callback = (error, resourceList) ->) -> - if request.syncType is "incremental" - logger.log project_id: request.project_id, user_id: request.user_id, "incremental sync" - ResourceStateManager.checkProjectStateMatches request.syncState, basePath, (error, resourceList) -> - return callback(error) if error? - ResourceWriter._removeExtraneousFiles resourceList, basePath, (error, outputFiles, allFiles) -> - return callback(error) if error? - ResourceStateManager.checkResourceFiles resourceList, allFiles, basePath, (error) -> - return callback(error) if error? - ResourceWriter.saveIncrementalResourcesToDisk request.project_id, request.resources, basePath, (error) -> - return callback(error) if error? - callback(null, resourceList) - else - logger.log project_id: request.project_id, user_id: request.user_id, "full sync" - @saveAllResourcesToDisk request.project_id, request.resources, basePath, (error) -> - return callback(error) if error? - ResourceStateManager.saveProjectState request.syncState, request.resources, basePath, (error) -> - return callback(error) if error? - callback(null, request.resources) + syncResourcesToDisk(request, basePath, callback) { + if (callback == null) { callback = function(error, resourceList) {}; } + if (request.syncType === "incremental") { + logger.log({project_id: request.project_id, user_id: request.user_id}, "incremental sync"); + return ResourceStateManager.checkProjectStateMatches(request.syncState, basePath, function(error, resourceList) { + if (error != null) { return callback(error); } + return ResourceWriter._removeExtraneousFiles(resourceList, basePath, function(error, outputFiles, allFiles) { + if (error != null) { return callback(error); } + return ResourceStateManager.checkResourceFiles(resourceList, allFiles, basePath, function(error) { + if (error != null) { return callback(error); } + return ResourceWriter.saveIncrementalResourcesToDisk(request.project_id, request.resources, basePath, function(error) { + if (error != null) { return callback(error); } + return callback(null, resourceList); + }); + }); + }); + }); + } else { + logger.log({project_id: request.project_id, user_id: request.user_id}, "full sync"); + return this.saveAllResourcesToDisk(request.project_id, request.resources, basePath, function(error) { + if (error != null) { return callback(error); } + return ResourceStateManager.saveProjectState(request.syncState, request.resources, basePath, function(error) { + if (error != null) { return callback(error); } + return callback(null, request.resources); + }); + }); + } + }, - saveIncrementalResourcesToDisk: (project_id, resources, basePath, callback = (error) ->) -> - @_createDirectory basePath, (error) => - return callback(error) if error? - jobs = for resource in resources - do (resource) => - (callback) => @_writeResourceToDisk(project_id, resource, basePath, callback) - async.parallelLimit jobs, parallelFileDownloads, callback + saveIncrementalResourcesToDisk(project_id, resources, basePath, callback) { + if (callback == null) { callback = function(error) {}; } + return this._createDirectory(basePath, error => { + if (error != null) { return callback(error); } + const jobs = Array.from(resources).map((resource) => + (resource => { + return callback => this._writeResourceToDisk(project_id, resource, basePath, callback); + })(resource)); + return async.parallelLimit(jobs, parallelFileDownloads, callback); + }); + }, - saveAllResourcesToDisk: (project_id, resources, basePath, callback = (error) ->) -> - @_createDirectory basePath, (error) => - return callback(error) if error? - @_removeExtraneousFiles resources, basePath, (error) => - return callback(error) if error? - jobs = for resource in resources - do (resource) => - (callback) => @_writeResourceToDisk(project_id, resource, basePath, callback) - async.parallelLimit jobs, parallelFileDownloads, callback + saveAllResourcesToDisk(project_id, resources, basePath, callback) { + if (callback == null) { callback = function(error) {}; } + return this._createDirectory(basePath, error => { + if (error != null) { return callback(error); } + return this._removeExtraneousFiles(resources, basePath, error => { + if (error != null) { return callback(error); } + const jobs = Array.from(resources).map((resource) => + (resource => { + return callback => this._writeResourceToDisk(project_id, resource, basePath, callback); + })(resource)); + return async.parallelLimit(jobs, parallelFileDownloads, callback); + }); + }); + }, - _createDirectory: (basePath, callback = (error) ->) -> - fs.mkdir basePath, (err) -> - if err? - if err.code is 'EEXIST' - return callback() - else - logger.log {err: err, dir:basePath}, "error creating directory" - return callback(err) - else - return callback() + _createDirectory(basePath, callback) { + if (callback == null) { callback = function(error) {}; } + return fs.mkdir(basePath, function(err) { + if (err != null) { + if (err.code === 'EEXIST') { + return callback(); + } else { + logger.log({err, dir:basePath}, "error creating directory"); + return callback(err); + } + } else { + return callback(); + } + }); + }, - _removeExtraneousFiles: (resources, basePath, _callback = (error, outputFiles, allFiles) ->) -> - timer = new Metrics.Timer("unlink-output-files") - callback = (error, result...) -> - timer.done() - _callback(error, result...) + _removeExtraneousFiles(resources, basePath, _callback) { + if (_callback == null) { _callback = function(error, outputFiles, allFiles) {}; } + const timer = new Metrics.Timer("unlink-output-files"); + const callback = function(error, ...result) { + timer.done(); + return _callback(error, ...Array.from(result)); + }; - OutputFileFinder.findOutputFiles resources, basePath, (error, outputFiles, allFiles) -> - return callback(error) if error? + return OutputFileFinder.findOutputFiles(resources, basePath, function(error, outputFiles, allFiles) { + if (error != null) { return callback(error); } - jobs = [] - for file in outputFiles or [] - do (file) -> - path = file.path - should_delete = true - if path.match(/^output\./) or path.match(/\.aux$/) or path.match(/^cache\//) # knitr cache - should_delete = false - if path.match(/^output-.*/) # Tikz cached figures (default case) - should_delete = false - if path.match(/\.(pdf|dpth|md5)$/) # Tikz cached figures (by extension) - should_delete = false - if path.match(/\.(pygtex|pygstyle)$/) or path.match(/(^|\/)_minted-[^\/]+\//) # minted files/directory - should_delete = false - if path.match(/\.md\.tex$/) or path.match(/(^|\/)_markdown_[^\/]+\//) # markdown files/directory - should_delete = false - if path.match(/-eps-converted-to\.pdf$/) # Epstopdf generated files - should_delete = false - if path == "output.pdf" or path == "output.dvi" or path == "output.log" or path == "output.xdv" - should_delete = true - if path == "output.tex" # created by TikzManager if present in output files - should_delete = true - if should_delete - jobs.push (callback) -> ResourceWriter._deleteFileIfNotDirectory Path.join(basePath, path), callback + const jobs = []; + for (let file of Array.from(outputFiles || [])) { + (function(file) { + const { path } = file; + let should_delete = true; + if (path.match(/^output\./) || path.match(/\.aux$/) || path.match(/^cache\//)) { // knitr cache + should_delete = false; + } + if (path.match(/^output-.*/)) { // Tikz cached figures (default case) + should_delete = false; + } + if (path.match(/\.(pdf|dpth|md5)$/)) { // Tikz cached figures (by extension) + should_delete = false; + } + if (path.match(/\.(pygtex|pygstyle)$/) || path.match(/(^|\/)_minted-[^\/]+\//)) { // minted files/directory + should_delete = false; + } + if (path.match(/\.md\.tex$/) || path.match(/(^|\/)_markdown_[^\/]+\//)) { // markdown files/directory + should_delete = false; + } + if (path.match(/-eps-converted-to\.pdf$/)) { // Epstopdf generated files + should_delete = false; + } + if ((path === "output.pdf") || (path === "output.dvi") || (path === "output.log") || (path === "output.xdv")) { + should_delete = true; + } + if (path === "output.tex") { // created by TikzManager if present in output files + should_delete = true; + } + if (should_delete) { + return jobs.push(callback => ResourceWriter._deleteFileIfNotDirectory(Path.join(basePath, path), callback)); + } + })(file); + } - async.series jobs, (error) -> - return callback(error) if error? - callback(null, outputFiles, allFiles) + return async.series(jobs, function(error) { + if (error != null) { return callback(error); } + return callback(null, outputFiles, allFiles); + }); + }); + }, - _deleteFileIfNotDirectory: (path, callback = (error) ->) -> - fs.stat path, (error, stat) -> - if error? and error.code is 'ENOENT' - return callback() - else if error? - logger.err {err: error, path: path}, "error stating file in deleteFileIfNotDirectory" - return callback(error) - else if stat.isFile() - fs.unlink path, (error) -> - if error? - logger.err {err: error, path: path}, "error removing file in deleteFileIfNotDirectory" - callback(error) - else - callback() - else - callback() + _deleteFileIfNotDirectory(path, callback) { + if (callback == null) { callback = function(error) {}; } + return fs.stat(path, function(error, stat) { + if ((error != null) && (error.code === 'ENOENT')) { + return callback(); + } else if (error != null) { + logger.err({err: error, path}, "error stating file in deleteFileIfNotDirectory"); + return callback(error); + } else if (stat.isFile()) { + return fs.unlink(path, function(error) { + if (error != null) { + logger.err({err: error, path}, "error removing file in deleteFileIfNotDirectory"); + return callback(error); + } else { + return callback(); + } + }); + } else { + return callback(); + } + }); + }, - _writeResourceToDisk: (project_id, resource, basePath, callback = (error) ->) -> - ResourceWriter.checkPath basePath, resource.path, (error, path) -> - return callback(error) if error? - mkdirp Path.dirname(path), (error) -> - return callback(error) if error? - # TODO: Don't overwrite file if it hasn't been modified - if resource.url? - UrlCache.downloadUrlToFile project_id, resource.url, path, resource.modified, (err)-> - if err? - logger.err err:err, project_id:project_id, path:path, resource_url:resource.url, modified:resource.modified, "error downloading file for resources" - callback() #try and continue compiling even if http resource can not be downloaded at this time - else - process = require("process") - fs.writeFile path, resource.content, callback - try - result = fs.lstatSync(path) - catch e + _writeResourceToDisk(project_id, resource, basePath, callback) { + if (callback == null) { callback = function(error) {}; } + return ResourceWriter.checkPath(basePath, resource.path, function(error, path) { + if (error != null) { return callback(error); } + return mkdirp(Path.dirname(path), function(error) { + if (error != null) { return callback(error); } + // TODO: Don't overwrite file if it hasn't been modified + if (resource.url != null) { + return UrlCache.downloadUrlToFile(project_id, resource.url, path, resource.modified, function(err){ + if (err != null) { + logger.err({err, project_id, path, resource_url:resource.url, modified:resource.modified}, "error downloading file for resources"); + } + return callback(); + }); //try and continue compiling even if http resource can not be downloaded at this time + } else { + const process = require("process"); + fs.writeFile(path, resource.content, callback); + try { + let result; + return result = fs.lstatSync(path); + } catch (e) {} + } + }); + }); + }, - checkPath: (basePath, resourcePath, callback) -> - path = Path.normalize(Path.join(basePath, resourcePath)) - if (path.slice(0, basePath.length + 1) != basePath + "/") - return callback new Error("resource path is outside root directory") - else - return callback(null, path) + checkPath(basePath, resourcePath, callback) { + const path = Path.normalize(Path.join(basePath, resourcePath)); + if (path.slice(0, basePath.length + 1) !== (basePath + "/")) { + return callback(new Error("resource path is outside root directory")); + } else { + return callback(null, path); + } + } +}); diff --git a/app/coffee/SafeReader.js b/app/coffee/SafeReader.js index adb96b1..f1a6349 100644 --- a/app/coffee/SafeReader.js +++ b/app/coffee/SafeReader.js @@ -1,25 +1,40 @@ -fs = require "fs" -logger = require "logger-sharelatex" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let SafeReader; +const fs = require("fs"); +const logger = require("logger-sharelatex"); -module.exports = SafeReader = +module.exports = (SafeReader = { - # safely read up to size bytes from a file and return result as a - # string + // safely read up to size bytes from a file and return result as a + // string - readFile: (file, size, encoding, callback = (error, result) ->) -> - fs.open file, 'r', (err, fd) -> - return callback() if err? and err.code is 'ENOENT' - return callback(err) if err? + readFile(file, size, encoding, callback) { + if (callback == null) { callback = function(error, result) {}; } + return fs.open(file, 'r', function(err, fd) { + if ((err != null) && (err.code === 'ENOENT')) { return callback(); } + if (err != null) { return callback(err); } - # safely return always closing the file - callbackWithClose = (err, result...) -> - fs.close fd, (err1) -> - return callback(err) if err? - return callback(err1) if err1? - callback(null, result...) + // safely return always closing the file + const callbackWithClose = (err, ...result) => + fs.close(fd, function(err1) { + if (err != null) { return callback(err); } + if (err1 != null) { return callback(err1); } + return callback(null, ...Array.from(result)); + }) + ; - buff = new Buffer(size, 0) # fill with zeros - fs.read fd, buff, 0, buff.length, 0, (err, bytesRead, buffer) -> - return callbackWithClose(err) if err? - result = buffer.toString(encoding, 0, bytesRead) - callbackWithClose(null, result, bytesRead) + const buff = new Buffer(size, 0); // fill with zeros + return fs.read(fd, buff, 0, buff.length, 0, function(err, bytesRead, buffer) { + if (err != null) { return callbackWithClose(err); } + const result = buffer.toString(encoding, 0, bytesRead); + return callbackWithClose(null, result, bytesRead); + }); + }); + } +}); diff --git a/app/coffee/StaticServerForbidSymlinks.js b/app/coffee/StaticServerForbidSymlinks.js index 1b3cd45..90a8879 100644 --- a/app/coffee/StaticServerForbidSymlinks.js +++ b/app/coffee/StaticServerForbidSymlinks.js @@ -1,41 +1,64 @@ -Path = require("path") -fs = require("fs") -Settings = require("settings-sharelatex") -logger = require("logger-sharelatex") -url = require "url" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ForbidSymlinks; +const Path = require("path"); +const fs = require("fs"); +const Settings = require("settings-sharelatex"); +const logger = require("logger-sharelatex"); +const url = require("url"); -module.exports = ForbidSymlinks = (staticFn, root, options) -> - expressStatic = staticFn root, options - basePath = Path.resolve(root) - return (req, res, next) -> - path = url.parse(req.url)?.pathname - # check that the path is of the form /project_id_or_name/path/to/file.log - if result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/) - project_id = result[1] - file = result[2] - else - logger.warn path: path, "unrecognized file request" - return res.sendStatus(404) - # check that the file does not use a relative path - for dir in file.split('/') - if dir == '..' - logger.warn path: path, "attempt to use a relative path" - return res.sendStatus(404) - # check that the requested path is normalized - requestedFsPath = "#{basePath}/#{project_id}/#{file}" - if requestedFsPath != Path.normalize(requestedFsPath) - logger.error path: requestedFsPath, "requestedFsPath is not normalized" - return res.sendStatus(404) - # check that the requested path is not a symlink - fs.realpath requestedFsPath, (err, realFsPath)-> - if err? - if err.code == 'ENOENT' - return res.sendStatus(404) - else - logger.error err:err, requestedFsPath:requestedFsPath, realFsPath:realFsPath, path: req.params[0], project_id: req.params.project_id, "error checking file access" - return res.sendStatus(500) - else if requestedFsPath != realFsPath - logger.warn requestedFsPath:requestedFsPath, realFsPath:realFsPath, path: req.params[0], project_id: req.params.project_id, "trying to access a different file (symlink), aborting" - return res.sendStatus(404) - else - expressStatic(req, res, next) +module.exports = (ForbidSymlinks = function(staticFn, root, options) { + const expressStatic = staticFn(root, options); + const basePath = Path.resolve(root); + return function(req, res, next) { + let file, project_id, result; + const path = __guard__(url.parse(req.url), x => x.pathname); + // check that the path is of the form /project_id_or_name/path/to/file.log + if (result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/)) { + project_id = result[1]; + file = result[2]; + } else { + logger.warn({path}, "unrecognized file request"); + return res.sendStatus(404); + } + // check that the file does not use a relative path + for (let dir of Array.from(file.split('/'))) { + if (dir === '..') { + logger.warn({path}, "attempt to use a relative path"); + return res.sendStatus(404); + } + } + // check that the requested path is normalized + const requestedFsPath = `${basePath}/${project_id}/${file}`; + if (requestedFsPath !== Path.normalize(requestedFsPath)) { + logger.error({path: requestedFsPath}, "requestedFsPath is not normalized"); + return res.sendStatus(404); + } + // check that the requested path is not a symlink + return fs.realpath(requestedFsPath, function(err, realFsPath){ + if (err != null) { + if (err.code === 'ENOENT') { + return res.sendStatus(404); + } else { + logger.error({err, requestedFsPath, realFsPath, path: req.params[0], project_id: req.params.project_id}, "error checking file access"); + return res.sendStatus(500); + } + } else if (requestedFsPath !== realFsPath) { + logger.warn({requestedFsPath, realFsPath, path: req.params[0], project_id: req.params.project_id}, "trying to access a different file (symlink), aborting"); + return res.sendStatus(404); + } else { + return expressStatic(req, res, next); + } + }); + }; +}); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/app/coffee/TikzManager.js b/app/coffee/TikzManager.js index 22def27..fb52644 100644 --- a/app/coffee/TikzManager.js +++ b/app/coffee/TikzManager.js @@ -1,37 +1,56 @@ -fs = require "fs" -Path = require "path" -ResourceWriter = require "./ResourceWriter" -SafeReader = require "./SafeReader" -logger = require "logger-sharelatex" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let TikzManager; +const fs = require("fs"); +const Path = require("path"); +const ResourceWriter = require("./ResourceWriter"); +const SafeReader = require("./SafeReader"); +const logger = require("logger-sharelatex"); -# for \tikzexternalize or pstool to work the main file needs to match the -# jobname. Since we set the -jobname to output, we have to create a -# copy of the main file as 'output.tex'. +// for \tikzexternalize or pstool to work the main file needs to match the +// jobname. Since we set the -jobname to output, we have to create a +// copy of the main file as 'output.tex'. -module.exports = TikzManager = +module.exports = (TikzManager = { - checkMainFile: (compileDir, mainFile, resources, callback = (error, needsMainFile) ->) -> - # if there's already an output.tex file, we don't want to touch it - for resource in resources - if resource.path is "output.tex" - logger.log compileDir: compileDir, mainFile: mainFile, "output.tex already in resources" - return callback(null, false) - # if there's no output.tex, see if we are using tikz/pgf or pstool in the main file - ResourceWriter.checkPath compileDir, mainFile, (error, path) -> - return callback(error) if error? - SafeReader.readFile path, 65536, "utf8", (error, content) -> - return callback(error) if error? - usesTikzExternalize = content?.indexOf("\\tikzexternalize") >= 0 - usesPsTool = content?.indexOf("{pstool}") >= 0 - logger.log compileDir: compileDir, mainFile: mainFile, usesTikzExternalize:usesTikzExternalize, usesPsTool: usesPsTool, "checked for packages needing main file as output.tex" - needsMainFile = (usesTikzExternalize || usesPsTool) - callback null, needsMainFile + checkMainFile(compileDir, mainFile, resources, callback) { + // if there's already an output.tex file, we don't want to touch it + if (callback == null) { callback = function(error, needsMainFile) {}; } + for (let resource of Array.from(resources)) { + if (resource.path === "output.tex") { + logger.log({compileDir, mainFile}, "output.tex already in resources"); + return callback(null, false); + } + } + // if there's no output.tex, see if we are using tikz/pgf or pstool in the main file + return ResourceWriter.checkPath(compileDir, mainFile, function(error, path) { + if (error != null) { return callback(error); } + return SafeReader.readFile(path, 65536, "utf8", function(error, content) { + if (error != null) { return callback(error); } + const usesTikzExternalize = (content != null ? content.indexOf("\\tikzexternalize") : undefined) >= 0; + const usesPsTool = (content != null ? content.indexOf("{pstool}") : undefined) >= 0; + logger.log({compileDir, mainFile, usesTikzExternalize, usesPsTool}, "checked for packages needing main file as output.tex"); + const needsMainFile = (usesTikzExternalize || usesPsTool); + return callback(null, needsMainFile); + }); + }); + }, - injectOutputFile: (compileDir, mainFile, callback = (error) ->) -> - ResourceWriter.checkPath compileDir, mainFile, (error, path) -> - return callback(error) if error? - fs.readFile path, "utf8", (error, content) -> - return callback(error) if error? - logger.log compileDir: compileDir, mainFile: mainFile, "copied file to output.tex as project uses packages which require it" - # use wx flag to ensure that output file does not already exist - fs.writeFile Path.join(compileDir, "output.tex"), content, {flag:'wx'}, callback + injectOutputFile(compileDir, mainFile, callback) { + if (callback == null) { callback = function(error) {}; } + return ResourceWriter.checkPath(compileDir, mainFile, function(error, path) { + if (error != null) { return callback(error); } + return fs.readFile(path, "utf8", function(error, content) { + if (error != null) { return callback(error); } + logger.log({compileDir, mainFile}, "copied file to output.tex as project uses packages which require it"); + // use wx flag to ensure that output file does not already exist + return fs.writeFile(Path.join(compileDir, "output.tex"), content, {flag:'wx'}, callback); + }); + }); + } +}); diff --git a/app/coffee/UrlCache.js b/app/coffee/UrlCache.js index d44479a..9a19968 100644 --- a/app/coffee/UrlCache.js +++ b/app/coffee/UrlCache.js @@ -1,134 +1,189 @@ -db = require("./db") -dbQueue = require "./DbQueue" -UrlFetcher = require("./UrlFetcher") -Settings = require("settings-sharelatex") -crypto = require("crypto") -fs = require("fs") -logger = require "logger-sharelatex" -async = require "async" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let UrlCache; +const db = require("./db"); +const dbQueue = require("./DbQueue"); +const UrlFetcher = require("./UrlFetcher"); +const Settings = require("settings-sharelatex"); +const crypto = require("crypto"); +const fs = require("fs"); +const logger = require("logger-sharelatex"); +const async = require("async"); -module.exports = UrlCache = - downloadUrlToFile: (project_id, url, destPath, lastModified, callback = (error) ->) -> - UrlCache._ensureUrlIsInCache project_id, url, lastModified, (error, pathToCachedUrl) => - return callback(error) if error? - UrlCache._copyFile pathToCachedUrl, destPath, (error) -> - if error? - UrlCache._clearUrlDetails project_id, url, () -> - callback(error) - else - callback(error) +module.exports = (UrlCache = { + downloadUrlToFile(project_id, url, destPath, lastModified, callback) { + if (callback == null) { callback = function(error) {}; } + return UrlCache._ensureUrlIsInCache(project_id, url, lastModified, (error, pathToCachedUrl) => { + if (error != null) { return callback(error); } + return UrlCache._copyFile(pathToCachedUrl, destPath, function(error) { + if (error != null) { + return UrlCache._clearUrlDetails(project_id, url, () => callback(error)); + } else { + return callback(error); + } + }); + }); + }, - clearProject: (project_id, callback = (error) ->) -> - UrlCache._findAllUrlsInProject project_id, (error, urls) -> - logger.log project_id: project_id, url_count: urls.length, "clearing project URLs" - return callback(error) if error? - jobs = for url in (urls or []) - do (url) -> - (callback) -> - UrlCache._clearUrlFromCache project_id, url, (error) -> - if error? - logger.error err: error, project_id: project_id, url: url, "error clearing project URL" - callback() - async.series jobs, callback + clearProject(project_id, callback) { + if (callback == null) { callback = function(error) {}; } + return UrlCache._findAllUrlsInProject(project_id, function(error, urls) { + logger.log({project_id, url_count: urls.length}, "clearing project URLs"); + if (error != null) { return callback(error); } + const jobs = (Array.from(urls || [])).map((url) => + (url => + callback => + UrlCache._clearUrlFromCache(project_id, url, function(error) { + if (error != null) { + logger.error({err: error, project_id, url}, "error clearing project URL"); + } + return callback(); + }) + + )(url)); + return async.series(jobs, callback); + }); + }, - _ensureUrlIsInCache: (project_id, url, lastModified, callback = (error, pathOnDisk) ->) -> - if lastModified? - # MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds. - # So round down to seconds - lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000) - UrlCache._doesUrlNeedDownloading project_id, url, lastModified, (error, needsDownloading) => - return callback(error) if error? - if needsDownloading - logger.log url: url, lastModified: lastModified, "downloading URL" - UrlFetcher.pipeUrlToFile url, UrlCache._cacheFilePathForUrl(project_id, url), (error) => - return callback(error) if error? - UrlCache._updateOrCreateUrlDetails project_id, url, lastModified, (error) => - return callback(error) if error? - callback null, UrlCache._cacheFilePathForUrl(project_id, url) - else - logger.log url: url, lastModified: lastModified, "URL is up to date in cache" - callback null, UrlCache._cacheFilePathForUrl(project_id, url) + _ensureUrlIsInCache(project_id, url, lastModified, callback) { + if (callback == null) { callback = function(error, pathOnDisk) {}; } + if (lastModified != null) { + // MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds. + // So round down to seconds + lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000); + } + return UrlCache._doesUrlNeedDownloading(project_id, url, lastModified, (error, needsDownloading) => { + if (error != null) { return callback(error); } + if (needsDownloading) { + logger.log({url, lastModified}, "downloading URL"); + return UrlFetcher.pipeUrlToFile(url, UrlCache._cacheFilePathForUrl(project_id, url), error => { + if (error != null) { return callback(error); } + return UrlCache._updateOrCreateUrlDetails(project_id, url, lastModified, error => { + if (error != null) { return callback(error); } + return callback(null, UrlCache._cacheFilePathForUrl(project_id, url)); + }); + }); + } else { + logger.log({url, lastModified}, "URL is up to date in cache"); + return callback(null, UrlCache._cacheFilePathForUrl(project_id, url)); + } + }); + }, - _doesUrlNeedDownloading: (project_id, url, lastModified, callback = (error, needsDownloading) ->) -> - if !lastModified? - return callback null, true - UrlCache._findUrlDetails project_id, url, (error, urlDetails) -> - return callback(error) if error? - if !urlDetails? or !urlDetails.lastModified? or urlDetails.lastModified.getTime() < lastModified.getTime() - return callback null, true - else - return callback null, false + _doesUrlNeedDownloading(project_id, url, lastModified, callback) { + if (callback == null) { callback = function(error, needsDownloading) {}; } + if ((lastModified == null)) { + return callback(null, true); + } + return UrlCache._findUrlDetails(project_id, url, function(error, urlDetails) { + if (error != null) { return callback(error); } + if ((urlDetails == null) || (urlDetails.lastModified == null) || (urlDetails.lastModified.getTime() < lastModified.getTime())) { + return callback(null, true); + } else { + return callback(null, false); + } + }); + }, - _cacheFileNameForUrl: (project_id, url) -> - project_id + ":" + crypto.createHash("md5").update(url).digest("hex") + _cacheFileNameForUrl(project_id, url) { + return project_id + ":" + crypto.createHash("md5").update(url).digest("hex"); + }, - _cacheFilePathForUrl: (project_id, url) -> - "#{Settings.path.clsiCacheDir}/#{UrlCache._cacheFileNameForUrl(project_id, url)}" + _cacheFilePathForUrl(project_id, url) { + return `${Settings.path.clsiCacheDir}/${UrlCache._cacheFileNameForUrl(project_id, url)}`; + }, - _copyFile: (from, to, _callback = (error) ->) -> - callbackOnce = (error) -> - if error? - logger.error err: error, from:from, to:to, "error copying file from cache" - _callback(error) - _callback = () -> - writeStream = fs.createWriteStream(to) - readStream = fs.createReadStream(from) - writeStream.on "error", callbackOnce - readStream.on "error", callbackOnce - writeStream.on "close", callbackOnce - writeStream.on "open", () -> - readStream.pipe(writeStream) + _copyFile(from, to, _callback) { + if (_callback == null) { _callback = function(error) {}; } + const callbackOnce = function(error) { + if (error != null) { + logger.error({err: error, from, to}, "error copying file from cache"); + } + _callback(error); + return _callback = function() {}; + }; + const writeStream = fs.createWriteStream(to); + const readStream = fs.createReadStream(from); + writeStream.on("error", callbackOnce); + readStream.on("error", callbackOnce); + writeStream.on("close", callbackOnce); + return writeStream.on("open", () => readStream.pipe(writeStream)); + }, - _clearUrlFromCache: (project_id, url, callback = (error) ->) -> - UrlCache._clearUrlDetails project_id, url, (error) -> - return callback(error) if error? - UrlCache._deleteUrlCacheFromDisk project_id, url, (error) -> - return callback(error) if error? - callback null + _clearUrlFromCache(project_id, url, callback) { + if (callback == null) { callback = function(error) {}; } + return UrlCache._clearUrlDetails(project_id, url, function(error) { + if (error != null) { return callback(error); } + return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) { + if (error != null) { return callback(error); } + return callback(null); + }); + }); + }, - _deleteUrlCacheFromDisk: (project_id, url, callback = (error) ->) -> - fs.unlink UrlCache._cacheFilePathForUrl(project_id, url), (error) -> - if error? and error.code != 'ENOENT' # no error if the file isn't present - return callback(error) - else - return callback() + _deleteUrlCacheFromDisk(project_id, url, callback) { + if (callback == null) { callback = function(error) {}; } + return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function(error) { + if ((error != null) && (error.code !== 'ENOENT')) { // no error if the file isn't present + return callback(error); + } else { + return callback(); + } + }); + }, - _findUrlDetails: (project_id, url, callback = (error, urlDetails) ->) -> - job = (cb)-> - db.UrlCache.find(where: { url: url, project_id: project_id }) - .then((urlDetails) -> cb null, urlDetails) - .error cb - dbQueue.queue.push job, callback + _findUrlDetails(project_id, url, callback) { + if (callback == null) { callback = function(error, urlDetails) {}; } + const job = cb=> + db.UrlCache.find({where: { url, project_id }}) + .then(urlDetails => cb(null, urlDetails)) + .error(cb) + ; + return dbQueue.queue.push(job, callback); + }, - _updateOrCreateUrlDetails: (project_id, url, lastModified, callback = (error) ->) -> - job = (cb)-> - db.UrlCache.findOrCreate(where: {url: url, project_id: project_id}) + _updateOrCreateUrlDetails(project_id, url, lastModified, callback) { + if (callback == null) { callback = function(error) {}; } + const job = cb=> + db.UrlCache.findOrCreate({where: {url, project_id}}) .spread( - (urlDetails, created) -> - urlDetails.updateAttributes(lastModified: lastModified) - .then(() -> cb()) + (urlDetails, created) => + urlDetails.updateAttributes({lastModified}) + .then(() => cb()) .error(cb) ) - .error cb - dbQueue.queue.push(job, callback) + .error(cb) + ; + return dbQueue.queue.push(job, callback); + }, - _clearUrlDetails: (project_id, url, callback = (error) ->) -> - job = (cb)-> - db.UrlCache.destroy(where: {url: url, project_id: project_id}) - .then(() -> cb null) - .error cb - dbQueue.queue.push(job, callback) + _clearUrlDetails(project_id, url, callback) { + if (callback == null) { callback = function(error) {}; } + const job = cb=> + db.UrlCache.destroy({where: {url, project_id}}) + .then(() => cb(null)) + .error(cb) + ; + return dbQueue.queue.push(job, callback); + }, - _findAllUrlsInProject: (project_id, callback = (error, urls) ->) -> - job = (cb)-> - db.UrlCache.findAll(where: { project_id: project_id }) + _findAllUrlsInProject(project_id, callback) { + if (callback == null) { callback = function(error, urls) {}; } + const job = cb=> + db.UrlCache.findAll({where: { project_id }}) .then( - (urlEntries) -> - cb null, urlEntries.map((entry) -> entry.url) - ) - .error cb - dbQueue.queue.push(job, callback) + urlEntries => cb(null, urlEntries.map(entry => entry.url))) + .error(cb) + ; + return dbQueue.queue.push(job, callback); + } +}); diff --git a/app/coffee/UrlFetcher.js b/app/coffee/UrlFetcher.js index da10859..ea20956 100644 --- a/app/coffee/UrlFetcher.js +++ b/app/coffee/UrlFetcher.js @@ -1,70 +1,88 @@ -request = require("request").defaults(jar: false) -fs = require("fs") -logger = require "logger-sharelatex" -settings = require("settings-sharelatex") -URL = require('url'); +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let UrlFetcher; +const request = require("request").defaults({jar: false}); +const fs = require("fs"); +const logger = require("logger-sharelatex"); +const settings = require("settings-sharelatex"); +const URL = require('url'); -oneMinute = 60 * 1000 +const oneMinute = 60 * 1000; -module.exports = UrlFetcher = - pipeUrlToFile: (url, filePath, _callback = (error) ->) -> - callbackOnce = (error) -> - clearTimeout timeoutHandler if timeoutHandler? - _callback(error) - _callback = () -> +module.exports = (UrlFetcher = { + pipeUrlToFile(url, filePath, _callback) { + if (_callback == null) { _callback = function(error) {}; } + const callbackOnce = function(error) { + if (timeoutHandler != null) { clearTimeout(timeoutHandler); } + _callback(error); + return _callback = function() {}; + }; - if settings.filestoreDomainOveride? - p = URL.parse(url).path - url = "#{settings.filestoreDomainOveride}#{p}" - timeoutHandler = setTimeout () -> - timeoutHandler = null - logger.error url:url, filePath: filePath, "Timed out downloading file to cache" - callbackOnce(new Error("Timed out downloading file to cache #{url}")) - # FIXME: maybe need to close fileStream here - , 3 * oneMinute + if (settings.filestoreDomainOveride != null) { + const p = URL.parse(url).path; + url = `${settings.filestoreDomainOveride}${p}`; + } + var timeoutHandler = setTimeout(function() { + timeoutHandler = null; + logger.error({url, filePath}, "Timed out downloading file to cache"); + return callbackOnce(new Error(`Timed out downloading file to cache ${url}`)); + } + // FIXME: maybe need to close fileStream here + , 3 * oneMinute); - logger.log url:url, filePath: filePath, "started downloading url to cache" - urlStream = request.get({url: url, timeout: oneMinute}) - urlStream.pause() # stop data flowing until we are ready + logger.log({url, filePath}, "started downloading url to cache"); + const urlStream = request.get({url, timeout: oneMinute}); + urlStream.pause(); // stop data flowing until we are ready - # attach handlers before setting up pipes - urlStream.on "error", (error) -> - logger.error err: error, url:url, filePath: filePath, "error downloading url" - callbackOnce(error or new Error("Something went wrong downloading the URL #{url}")) + // attach handlers before setting up pipes + urlStream.on("error", function(error) { + logger.error({err: error, url, filePath}, "error downloading url"); + return callbackOnce(error || new Error(`Something went wrong downloading the URL ${url}`)); + }); - urlStream.on "end", () -> - logger.log url:url, filePath: filePath, "finished downloading file into cache" + urlStream.on("end", () => logger.log({url, filePath}, "finished downloading file into cache")); - urlStream.on "response", (res) -> - if res.statusCode >= 200 and res.statusCode < 300 - fileStream = fs.createWriteStream(filePath) + return urlStream.on("response", function(res) { + if ((res.statusCode >= 200) && (res.statusCode < 300)) { + const fileStream = fs.createWriteStream(filePath); - # attach handlers before setting up pipes - fileStream.on 'error', (error) -> - logger.error err: error, url:url, filePath: filePath, "error writing file into cache" - fs.unlink filePath, (err) -> - if err? - logger.err err: err, filePath: filePath, "error deleting file from cache" - callbackOnce(error) + // attach handlers before setting up pipes + fileStream.on('error', function(error) { + logger.error({err: error, url, filePath}, "error writing file into cache"); + return fs.unlink(filePath, function(err) { + if (err != null) { + logger.err({err, filePath}, "error deleting file from cache"); + } + return callbackOnce(error); + }); + }); - fileStream.on 'finish', () -> - logger.log url:url, filePath: filePath, "finished writing file into cache" - callbackOnce() + fileStream.on('finish', function() { + logger.log({url, filePath}, "finished writing file into cache"); + return callbackOnce(); + }); - fileStream.on 'pipe', () -> - logger.log url:url, filePath: filePath, "piping into filestream" + fileStream.on('pipe', () => logger.log({url, filePath}, "piping into filestream")); - urlStream.pipe(fileStream) - urlStream.resume() # now we are ready to handle the data - else - logger.error statusCode: res.statusCode, url:url, filePath: filePath, "unexpected status code downloading url to cache" - # https://nodejs.org/api/http.html#http_class_http_clientrequest - # If you add a 'response' event handler, then you must consume - # the data from the response object, either by calling - # response.read() whenever there is a 'readable' event, or by - # adding a 'data' handler, or by calling the .resume() - # method. Until the data is consumed, the 'end' event will not - # fire. Also, until the data is read it will consume memory - # that can eventually lead to a 'process out of memory' error. - urlStream.resume() # discard the data - callbackOnce(new Error("URL returned non-success status code: #{res.statusCode} #{url}")) + urlStream.pipe(fileStream); + return urlStream.resume(); // now we are ready to handle the data + } else { + logger.error({statusCode: res.statusCode, url, filePath}, "unexpected status code downloading url to cache"); + // https://nodejs.org/api/http.html#http_class_http_clientrequest + // If you add a 'response' event handler, then you must consume + // the data from the response object, either by calling + // response.read() whenever there is a 'readable' event, or by + // adding a 'data' handler, or by calling the .resume() + // method. Until the data is consumed, the 'end' event will not + // fire. Also, until the data is read it will consume memory + // that can eventually lead to a 'process out of memory' error. + urlStream.resume(); // discard the data + return callbackOnce(new Error(`URL returned non-success status code: ${res.statusCode} ${url}`)); + } + }); + } +}); diff --git a/app/coffee/db.js b/app/coffee/db.js index de48dfd..385ad8d 100644 --- a/app/coffee/db.js +++ b/app/coffee/db.js @@ -1,55 +1,59 @@ -Sequelize = require("sequelize") -Settings = require("settings-sharelatex") -_ = require("underscore") -logger = require "logger-sharelatex" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Sequelize = require("sequelize"); +const Settings = require("settings-sharelatex"); +const _ = require("underscore"); +const logger = require("logger-sharelatex"); -options = _.extend {logging:false}, Settings.mysql.clsi +const options = _.extend({logging:false}, Settings.mysql.clsi); -logger.log dbPath:Settings.mysql.clsi.storage, "connecting to db" +logger.log({dbPath:Settings.mysql.clsi.storage}, "connecting to db"); -sequelize = new Sequelize( +const sequelize = new Sequelize( Settings.mysql.clsi.database, Settings.mysql.clsi.username, Settings.mysql.clsi.password, options -) +); -if Settings.mysql.clsi.dialect == "sqlite" - logger.log "running PRAGMA journal_mode=WAL;" - sequelize.query("PRAGMA journal_mode=WAL;") - sequelize.query("PRAGMA synchronous=OFF;") - sequelize.query("PRAGMA read_uncommitted = true;") +if (Settings.mysql.clsi.dialect === "sqlite") { + logger.log("running PRAGMA journal_mode=WAL;"); + sequelize.query("PRAGMA journal_mode=WAL;"); + sequelize.query("PRAGMA synchronous=OFF;"); + sequelize.query("PRAGMA read_uncommitted = true;"); +} -module.exports = +module.exports = { UrlCache: sequelize.define("UrlCache", { - url: Sequelize.STRING - project_id: Sequelize.STRING + url: Sequelize.STRING, + project_id: Sequelize.STRING, lastModified: Sequelize.DATE }, { indexes: [ {fields: ['url', 'project_id']}, {fields: ['project_id']} ] - }) + }), Project: sequelize.define("Project", { - project_id: {type: Sequelize.STRING, primaryKey: true} + project_id: {type: Sequelize.STRING, primaryKey: true}, lastAccessed: Sequelize.DATE }, { indexes: [ {fields: ['lastAccessed']} ] - }) + }), - op: Sequelize.Op + op: Sequelize.Op, - sync: () -> - logger.log dbPath:Settings.mysql.clsi.storage, "syncing db schema" - sequelize.sync() - .then(-> - logger.log "db sync complete" - ).catch((err)-> - console.log err, "error syncing" - ) + sync() { + logger.log({dbPath:Settings.mysql.clsi.storage}, "syncing db schema"); + return sequelize.sync() + .then(() => logger.log("db sync complete")).catch(err=> console.log(err, "error syncing")); + } +}; From c056ca6968f0cafeb4e40893f692b3e42178f52f Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 Feb 2020 12:14:28 +0100 Subject: [PATCH 07/24] decaffeinate: Run post-processing cleanups on CommandRunner.coffee and 25 other files --- app/coffee/CommandRunner.js | 2 ++ app/coffee/CompileController.js | 7 ++++ app/coffee/CompileManager.js | 43 ++++++++++++++---------- app/coffee/ContentTypeMapper.js | 5 +++ app/coffee/DbQueue.js | 2 ++ app/coffee/DockerLockManager.js | 5 +++ app/coffee/DockerRunner.js | 22 ++++++++---- app/coffee/DraftModeManager.js | 7 ++++ app/coffee/Errors.js | 6 ++++ app/coffee/LatexRunner.js | 8 +++++ app/coffee/LocalCommandRunner.js | 8 +++++ app/coffee/LockManager.js | 6 ++++ app/coffee/Metrics.js | 2 ++ app/coffee/OutputCacheManager.js | 5 +++ app/coffee/OutputFileFinder.js | 12 +++++-- app/coffee/OutputFileOptimiser.js | 9 +++++ app/coffee/ProjectPersistenceManager.js | 6 ++++ app/coffee/RequestParser.js | 13 ++++++- app/coffee/ResourceStateManager.js | 16 ++++++--- app/coffee/ResourceWriter.js | 13 +++++-- app/coffee/SafeReader.js | 7 ++++ app/coffee/StaticServerForbidSymlinks.js | 10 +++++- app/coffee/TikzManager.js | 8 ++++- app/coffee/UrlCache.js | 7 ++++ app/coffee/UrlFetcher.js | 8 +++++ app/coffee/db.js | 5 +++ 26 files changed, 206 insertions(+), 36 deletions(-) diff --git a/app/coffee/CommandRunner.js b/app/coffee/CommandRunner.js index dd7210a..1c46ee9 100644 --- a/app/coffee/CommandRunner.js +++ b/app/coffee/CommandRunner.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS207: Consider shorter variations of null checks diff --git a/app/coffee/CompileController.js b/app/coffee/CompileController.js index cfdbcfe..60925fc 100644 --- a/app/coffee/CompileController.js +++ b/app/coffee/CompileController.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/app/coffee/CompileManager.js b/app/coffee/CompileManager.js index 82dafd1..76fb8b0 100644 --- a/app/coffee/CompileManager.js +++ b/app/coffee/CompileManager.js @@ -1,3 +1,12 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -88,13 +97,13 @@ module.exports = (CompileManager = { // only run chktex on LaTeX files (not knitr .Rtex files or any others) const isLaTeXFile = request.rootResourcePath != null ? request.rootResourcePath.match(/\.tex$/i) : undefined; if ((request.check != null) && isLaTeXFile) { - env['CHKTEX_OPTIONS'] = '-nall -e9 -e10 -w15 -w16'; - env['CHKTEX_ULIMIT_OPTIONS'] = '-t 5 -v 64000'; + env.CHKTEX_OPTIONS = '-nall -e9 -e10 -w15 -w16'; + env.CHKTEX_ULIMIT_OPTIONS = '-t 5 -v 64000'; if (request.check === 'error') { - env['CHKTEX_EXIT_ON_ERROR'] = 1; + env.CHKTEX_EXIT_ON_ERROR = 1; } if (request.check === 'validate') { - env['CHKTEX_VALIDATE'] = 1; + env.CHKTEX_VALIDATE = 1; } } @@ -337,7 +346,7 @@ module.exports = (CompileManager = { _parseSynctexFromCodeOutput(output) { const results = []; - for (let line of Array.from(output.split("\n"))) { + for (const line of Array.from(output.split("\n"))) { const [node, page, h, v, width, height] = Array.from(line.split("\t")); if (node === "NODE") { results.push({ @@ -387,7 +396,7 @@ module.exports = (CompileManager = { if (error != null) { return callback(error); } return fs.readFile(compileDir + "/" + file_name + ".wc", "utf-8", function(err, stdout) { if (err != null) { - //call it node_err so sentry doesn't use random path error as unique id so it can't be ignored + // call it node_err so sentry doesn't use random path error as unique id so it can't be ignored logger.err({node_err:err, command, compileDir, project_id, user_id}, "error reading word count output"); return callback(err); } @@ -412,37 +421,37 @@ module.exports = (CompileManager = { errors: 0, messages: "" }; - for (let line of Array.from(output.split("\n"))) { + for (const line of Array.from(output.split("\n"))) { const [data, info] = Array.from(line.split(":")); if (data.indexOf("Encoding") > -1) { - results['encode'] = info.trim(); + results.encode = info.trim(); } if (data.indexOf("in text") > -1) { - results['textWords'] = parseInt(info, 10); + results.textWords = parseInt(info, 10); } if (data.indexOf("in head") > -1) { - results['headWords'] = parseInt(info, 10); + results.headWords = parseInt(info, 10); } if (data.indexOf("outside") > -1) { - results['outside'] = parseInt(info, 10); + results.outside = parseInt(info, 10); } if (data.indexOf("of head") > -1) { - results['headers'] = parseInt(info, 10); + results.headers = parseInt(info, 10); } if (data.indexOf("Number of floats/tables/figures") > -1) { - results['elements'] = parseInt(info, 10); + results.elements = parseInt(info, 10); } if (data.indexOf("Number of math inlines") > -1) { - results['mathInline'] = parseInt(info, 10); + results.mathInline = parseInt(info, 10); } if (data.indexOf("Number of math displayed") > -1) { - results['mathDisplay'] = parseInt(info, 10); + results.mathDisplay = parseInt(info, 10); } if (data === "(errors") { // errors reported as (errors:123) - results['errors'] = parseInt(info, 10); + results.errors = parseInt(info, 10); } if (line.indexOf("!!! ") > -1) { // errors logged as !!! message !!! - results['messages'] += line + "\n"; + results.messages += line + "\n"; } } return results; diff --git a/app/coffee/ContentTypeMapper.js b/app/coffee/ContentTypeMapper.js index c57057f..fdd66d3 100644 --- a/app/coffee/ContentTypeMapper.js +++ b/app/coffee/ContentTypeMapper.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. let ContentTypeMapper; const Path = require('path'); diff --git a/app/coffee/DbQueue.js b/app/coffee/DbQueue.js index 0f1f8cf..89ff323 100644 --- a/app/coffee/DbQueue.js +++ b/app/coffee/DbQueue.js @@ -1,3 +1,5 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/app/coffee/DockerLockManager.js b/app/coffee/DockerLockManager.js index 9c7deff..274ff66 100644 --- a/app/coffee/DockerLockManager.js +++ b/app/coffee/DockerLockManager.js @@ -1,3 +1,8 @@ +/* eslint-disable + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/app/coffee/DockerRunner.js b/app/coffee/DockerRunner.js index ab78419..dc04b5d 100644 --- a/app/coffee/DockerRunner.js +++ b/app/coffee/DockerRunner.js @@ -1,3 +1,11 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -140,7 +148,7 @@ module.exports = (DockerRunner = { return callback(err); } containerReturned = true; - __guard__(options != null ? options.HostConfig : undefined, x => x.SecurityOpt = null); //small log line + __guard__(options != null ? options.HostConfig : undefined, x => x.SecurityOpt = null); // small log line logger.log({err, exitCode, options}, "docker container has exited"); return callbackIfFinished(); }); @@ -164,7 +172,7 @@ module.exports = (DockerRunner = { // merge settings and environment parameter const env = {}; - for (let src of [Settings.clsi.docker.env, environment || {}]) { + for (const src of [Settings.clsi.docker.env, environment || {}]) { for (key in src) { value = src[key]; env[key] = value; } } // set the path based on the image year @@ -173,7 +181,7 @@ module.exports = (DockerRunner = { } else { year = "2014"; } - env['PATH'] = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`; + env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`; const options = { "Cmd" : command, "Image" : image, @@ -208,7 +216,7 @@ module.exports = (DockerRunner = { if ((Settings.path != null ? Settings.path.synctexBinHostPath : undefined) != null) { - options["HostConfig"]["Binds"].push(`${Settings.path.synctexBinHostPath}:/opt/synctex:ro`); + options.HostConfig.Binds.push(`${Settings.path.synctexBinHostPath}:/opt/synctex:ro`); } if (Settings.clsi.docker.seccomp_profile != null) { @@ -254,7 +262,7 @@ module.exports = (DockerRunner = { }) ; const jobs = []; - for (let vol in volumes) { + for (const vol in volumes) { (vol => jobs.push(cb => checkVolume(vol, cb)))(vol); } return async.series(jobs, callback); @@ -279,7 +287,7 @@ module.exports = (DockerRunner = { DockerRunner.attachToContainer(options.name, attachStreamHandler, function(error){ if (error != null) { return callback(error); } return container.start(function(error) { - if ((error != null) && ((error != null ? error.statusCode : undefined) !== 304)) { //already running + if ((error != null) && ((error != null ? error.statusCode : undefined) !== 304)) { // already running return callback(error); } else { return callback(); @@ -434,7 +442,7 @@ module.exports = (DockerRunner = { return dockerode.listContainers({all: true}, function(error, containers) { if (error != null) { return callback(error); } const jobs = []; - for (let container of Array.from(containers || [])) { + for (const container of Array.from(containers || [])) { (container => DockerRunner.examineOldContainer(container, function(err, name, id, ttl) { if ((name.slice(0, 9) === '/project-') && (ttl <= 0)) { diff --git a/app/coffee/DraftModeManager.js b/app/coffee/DraftModeManager.js index 8ddbbd0..79f39ab 100644 --- a/app/coffee/DraftModeManager.js +++ b/app/coffee/DraftModeManager.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-useless-escape, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/app/coffee/Errors.js b/app/coffee/Errors.js index 3a9ef22..e7ace2c 100644 --- a/app/coffee/Errors.js +++ b/app/coffee/Errors.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-proto, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. let Errors; var NotFoundError = function(message) { const error = new Error(message); diff --git a/app/coffee/LatexRunner.js b/app/coffee/LatexRunner.js index 4c83e08..e569df8 100644 --- a/app/coffee/LatexRunner.js +++ b/app/coffee/LatexRunner.js @@ -1,3 +1,11 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/app/coffee/LocalCommandRunner.js b/app/coffee/LocalCommandRunner.js index 405c51b..24c0d8e 100644 --- a/app/coffee/LocalCommandRunner.js +++ b/app/coffee/LocalCommandRunner.js @@ -1,3 +1,11 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/app/coffee/LockManager.js b/app/coffee/LockManager.js index 2405e8a..8930fab 100644 --- a/app/coffee/LockManager.js +++ b/app/coffee/LockManager.js @@ -1,3 +1,9 @@ +/* eslint-disable + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/app/coffee/Metrics.js b/app/coffee/Metrics.js index 8148d66..94623da 100644 --- a/app/coffee/Metrics.js +++ b/app/coffee/Metrics.js @@ -1,2 +1,4 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. module.exports = require("metrics-sharelatex"); diff --git a/app/coffee/OutputCacheManager.js b/app/coffee/OutputCacheManager.js index 6d03a10..b1bda0e 100644 --- a/app/coffee/OutputCacheManager.js +++ b/app/coffee/OutputCacheManager.js @@ -1,3 +1,8 @@ +/* eslint-disable + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/app/coffee/OutputFileFinder.js b/app/coffee/OutputFileFinder.js index f0f837c..21a7587 100644 --- a/app/coffee/OutputFileFinder.js +++ b/app/coffee/OutputFileFinder.js @@ -1,3 +1,11 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, + no-unused-vars, + no-useless-escape, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -17,7 +25,7 @@ module.exports = (OutputFileFinder = { findOutputFiles(resources, directory, callback) { if (callback == null) { callback = function(error, outputFiles, allFiles) {}; } const incomingResources = {}; - for (let resource of Array.from(resources)) { + for (const resource of Array.from(resources)) { incomingResources[resource.path] = true; } @@ -28,7 +36,7 @@ module.exports = (OutputFileFinder = { return callback(error); } const outputFiles = []; - for (let file of Array.from(allFiles)) { + for (const file of Array.from(allFiles)) { if (!incomingResources[file]) { outputFiles.push({ path: file, diff --git a/app/coffee/OutputFileOptimiser.js b/app/coffee/OutputFileOptimiser.js index f8302aa..149d384 100644 --- a/app/coffee/OutputFileOptimiser.js +++ b/app/coffee/OutputFileOptimiser.js @@ -1,3 +1,12 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, + no-undef, + no-unused-vars, + node/no-deprecated-api, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/app/coffee/ProjectPersistenceManager.js b/app/coffee/ProjectPersistenceManager.js index 7b3d5ee..856c156 100644 --- a/app/coffee/ProjectPersistenceManager.js +++ b/app/coffee/ProjectPersistenceManager.js @@ -1,3 +1,9 @@ +/* eslint-disable + camelcase, + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/app/coffee/RequestParser.js b/app/coffee/RequestParser.js index fdfb8bf..6641086 100644 --- a/app/coffee/RequestParser.js +++ b/app/coffee/RequestParser.js @@ -1,3 +1,14 @@ +/* eslint-disable + handle-callback-err, + no-control-regex, + no-throw-literal, + no-unused-vars, + no-useless-escape, + standard/no-callback-literal, + valid-typeof, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -172,7 +183,7 @@ module.exports = (RequestParser = { _checkPath(path) { // check that the request does not use a relative path - for (let dir of Array.from(path.split('/'))) { + for (const dir of Array.from(path.split('/'))) { if (dir === '..') { throw "relative path in root resource"; } diff --git a/app/coffee/ResourceStateManager.js b/app/coffee/ResourceStateManager.js index f430c8f..45cfdc6 100644 --- a/app/coffee/ResourceStateManager.js +++ b/app/coffee/ResourceStateManager.js @@ -1,3 +1,9 @@ +/* eslint-disable + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -62,10 +68,10 @@ module.exports = (ResourceStateManager = { if (bytesRead === size) { logger.error({file:stateFile, size, bytesRead}, "project state file truncated"); } - const array = __guard__(result != null ? result.toString() : undefined, x => x.split("\n")) || [], - adjustedLength = Math.max(array.length, 1), - resourceList = array.slice(0, adjustedLength - 1), - oldState = array[adjustedLength - 1]; + const array = __guard__(result != null ? result.toString() : undefined, x => x.split("\n")) || []; + const adjustedLength = Math.max(array.length, 1); + const resourceList = array.slice(0, adjustedLength - 1); + const oldState = array[adjustedLength - 1]; const newState = `stateHash:${state}`; logger.log({state, oldState, basePath, stateMatches: (newState === oldState)}, "checking sync state"); if (newState !== oldState) { @@ -82,7 +88,7 @@ module.exports = (ResourceStateManager = { let file; if (callback == null) { callback = function(error) {}; } for (file of Array.from(resources || [])) { - for (let dir of Array.from(__guard__(file != null ? file.path : undefined, x => x.split('/')))) { + for (const dir of Array.from(__guard__(file != null ? file.path : undefined, x => x.split('/')))) { if (dir === '..') { return callback(new Error("relative path in resource file list")); } diff --git a/app/coffee/ResourceWriter.js b/app/coffee/ResourceWriter.js index 0044ad9..028fc53 100644 --- a/app/coffee/ResourceWriter.js +++ b/app/coffee/ResourceWriter.js @@ -1,3 +1,12 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, + no-unused-vars, + no-useless-escape, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -105,7 +114,7 @@ module.exports = (ResourceWriter = { if (error != null) { return callback(error); } const jobs = []; - for (let file of Array.from(outputFiles || [])) { + for (const file of Array.from(outputFiles || [])) { (function(file) { const { path } = file; let should_delete = true; @@ -182,7 +191,7 @@ module.exports = (ResourceWriter = { logger.err({err, project_id, path, resource_url:resource.url, modified:resource.modified}, "error downloading file for resources"); } return callback(); - }); //try and continue compiling even if http resource can not be downloaded at this time + }); // try and continue compiling even if http resource can not be downloaded at this time } else { const process = require("process"); fs.writeFile(path, resource.content, callback); diff --git a/app/coffee/SafeReader.js b/app/coffee/SafeReader.js index f1a6349..2fd599b 100644 --- a/app/coffee/SafeReader.js +++ b/app/coffee/SafeReader.js @@ -1,3 +1,10 @@ +/* eslint-disable + handle-callback-err, + no-unused-vars, + node/no-deprecated-api, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/app/coffee/StaticServerForbidSymlinks.js b/app/coffee/StaticServerForbidSymlinks.js index 90a8879..8ac3e48 100644 --- a/app/coffee/StaticServerForbidSymlinks.js +++ b/app/coffee/StaticServerForbidSymlinks.js @@ -1,3 +1,11 @@ +/* eslint-disable + camelcase, + no-cond-assign, + no-unused-vars, + node/no-deprecated-api, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -28,7 +36,7 @@ module.exports = (ForbidSymlinks = function(staticFn, root, options) { return res.sendStatus(404); } // check that the file does not use a relative path - for (let dir of Array.from(file.split('/'))) { + for (const dir of Array.from(file.split('/'))) { if (dir === '..') { logger.warn({path}, "attempt to use a relative path"); return res.sendStatus(404); diff --git a/app/coffee/TikzManager.js b/app/coffee/TikzManager.js index fb52644..9fa4a93 100644 --- a/app/coffee/TikzManager.js +++ b/app/coffee/TikzManager.js @@ -1,3 +1,9 @@ +/* eslint-disable + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -21,7 +27,7 @@ module.exports = (TikzManager = { checkMainFile(compileDir, mainFile, resources, callback) { // if there's already an output.tex file, we don't want to touch it if (callback == null) { callback = function(error, needsMainFile) {}; } - for (let resource of Array.from(resources)) { + for (const resource of Array.from(resources)) { if (resource.path === "output.tex") { logger.log({compileDir, mainFile}, "output.tex already in resources"); return callback(null, false); diff --git a/app/coffee/UrlCache.js b/app/coffee/UrlCache.js index 9a19968..ade815b 100644 --- a/app/coffee/UrlCache.js +++ b/app/coffee/UrlCache.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/app/coffee/UrlFetcher.js b/app/coffee/UrlFetcher.js index ea20956..fec397c 100644 --- a/app/coffee/UrlFetcher.js +++ b/app/coffee/UrlFetcher.js @@ -1,3 +1,11 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, + no-unused-vars, + node/no-deprecated-api, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/app/coffee/db.js b/app/coffee/db.js index 385ad8d..c5dd980 100644 --- a/app/coffee/db.js +++ b/app/coffee/db.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-console, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns From 4576ef54fbbaf3e2a13da28bc36ce1d05fd64e99 Mon Sep 17 00:00:00 2001 From: mserranom Date: Wed, 19 Feb 2020 12:14:34 +0100 Subject: [PATCH 08/24] decaffeinate: rename app/coffee dir to app/js --- app/{coffee => js}/CommandRunner.js | 0 app/{coffee => js}/CompileController.js | 0 app/{coffee => js}/CompileManager.js | 0 app/{coffee => js}/ContentTypeMapper.js | 0 app/{coffee => js}/DbQueue.js | 0 app/{coffee => js}/DockerLockManager.js | 0 app/{coffee => js}/DockerRunner.js | 0 app/{coffee => js}/DraftModeManager.js | 0 app/{coffee => js}/Errors.js | 0 app/{coffee => js}/LatexRunner.js | 0 app/{coffee => js}/LocalCommandRunner.js | 0 app/{coffee => js}/LockManager.js | 0 app/{coffee => js}/Metrics.js | 0 app/{coffee => js}/OutputCacheManager.js | 0 app/{coffee => js}/OutputFileFinder.js | 0 app/{coffee => js}/OutputFileOptimiser.js | 0 app/{coffee => js}/ProjectPersistenceManager.js | 0 app/{coffee => js}/RequestParser.js | 0 app/{coffee => js}/ResourceStateManager.js | 0 app/{coffee => js}/ResourceWriter.js | 0 app/{coffee => js}/SafeReader.js | 0 app/{coffee => js}/StaticServerForbidSymlinks.js | 0 app/{coffee => js}/TikzManager.js | 0 app/{coffee => js}/UrlCache.js | 0 app/{coffee => js}/UrlFetcher.js | 0 app/{coffee => js}/db.js | 0 26 files changed, 0 insertions(+), 0 deletions(-) rename app/{coffee => js}/CommandRunner.js (100%) rename app/{coffee => js}/CompileController.js (100%) rename app/{coffee => js}/CompileManager.js (100%) rename app/{coffee => js}/ContentTypeMapper.js (100%) rename app/{coffee => js}/DbQueue.js (100%) rename app/{coffee => js}/DockerLockManager.js (100%) rename app/{coffee => js}/DockerRunner.js (100%) rename app/{coffee => js}/DraftModeManager.js (100%) rename app/{coffee => js}/Errors.js (100%) rename app/{coffee => js}/LatexRunner.js (100%) rename app/{coffee => js}/LocalCommandRunner.js (100%) rename app/{coffee => js}/LockManager.js (100%) rename app/{coffee => js}/Metrics.js (100%) rename app/{coffee => js}/OutputCacheManager.js (100%) rename app/{coffee => js}/OutputFileFinder.js (100%) rename app/{coffee => js}/OutputFileOptimiser.js (100%) rename app/{coffee => js}/ProjectPersistenceManager.js (100%) rename app/{coffee => js}/RequestParser.js (100%) rename app/{coffee => js}/ResourceStateManager.js (100%) rename app/{coffee => js}/ResourceWriter.js (100%) rename app/{coffee => js}/SafeReader.js (100%) rename app/{coffee => js}/StaticServerForbidSymlinks.js (100%) rename app/{coffee => js}/TikzManager.js (100%) rename app/{coffee => js}/UrlCache.js (100%) rename app/{coffee => js}/UrlFetcher.js (100%) rename app/{coffee => js}/db.js (100%) diff --git a/app/coffee/CommandRunner.js b/app/js/CommandRunner.js similarity index 100% rename from app/coffee/CommandRunner.js rename to app/js/CommandRunner.js diff --git a/app/coffee/CompileController.js b/app/js/CompileController.js similarity index 100% rename from app/coffee/CompileController.js rename to app/js/CompileController.js diff --git a/app/coffee/CompileManager.js b/app/js/CompileManager.js similarity index 100% rename from app/coffee/CompileManager.js rename to app/js/CompileManager.js diff --git a/app/coffee/ContentTypeMapper.js b/app/js/ContentTypeMapper.js similarity index 100% rename from app/coffee/ContentTypeMapper.js rename to app/js/ContentTypeMapper.js diff --git a/app/coffee/DbQueue.js b/app/js/DbQueue.js similarity index 100% rename from app/coffee/DbQueue.js rename to app/js/DbQueue.js diff --git a/app/coffee/DockerLockManager.js b/app/js/DockerLockManager.js similarity index 100% rename from app/coffee/DockerLockManager.js rename to app/js/DockerLockManager.js diff --git a/app/coffee/DockerRunner.js b/app/js/DockerRunner.js similarity index 100% rename from app/coffee/DockerRunner.js rename to app/js/DockerRunner.js diff --git a/app/coffee/DraftModeManager.js b/app/js/DraftModeManager.js similarity index 100% rename from app/coffee/DraftModeManager.js rename to app/js/DraftModeManager.js diff --git a/app/coffee/Errors.js b/app/js/Errors.js similarity index 100% rename from app/coffee/Errors.js rename to app/js/Errors.js diff --git a/app/coffee/LatexRunner.js b/app/js/LatexRunner.js similarity index 100% rename from app/coffee/LatexRunner.js rename to app/js/LatexRunner.js diff --git a/app/coffee/LocalCommandRunner.js b/app/js/LocalCommandRunner.js similarity index 100% rename from app/coffee/LocalCommandRunner.js rename to app/js/LocalCommandRunner.js diff --git a/app/coffee/LockManager.js b/app/js/LockManager.js similarity index 100% rename from app/coffee/LockManager.js rename to app/js/LockManager.js diff --git a/app/coffee/Metrics.js b/app/js/Metrics.js similarity index 100% rename from app/coffee/Metrics.js rename to app/js/Metrics.js diff --git a/app/coffee/OutputCacheManager.js b/app/js/OutputCacheManager.js similarity index 100% rename from app/coffee/OutputCacheManager.js rename to app/js/OutputCacheManager.js diff --git a/app/coffee/OutputFileFinder.js b/app/js/OutputFileFinder.js similarity index 100% rename from app/coffee/OutputFileFinder.js rename to app/js/OutputFileFinder.js diff --git a/app/coffee/OutputFileOptimiser.js b/app/js/OutputFileOptimiser.js similarity index 100% rename from app/coffee/OutputFileOptimiser.js rename to app/js/OutputFileOptimiser.js diff --git a/app/coffee/ProjectPersistenceManager.js b/app/js/ProjectPersistenceManager.js similarity index 100% rename from app/coffee/ProjectPersistenceManager.js rename to app/js/ProjectPersistenceManager.js diff --git a/app/coffee/RequestParser.js b/app/js/RequestParser.js similarity index 100% rename from app/coffee/RequestParser.js rename to app/js/RequestParser.js diff --git a/app/coffee/ResourceStateManager.js b/app/js/ResourceStateManager.js similarity index 100% rename from app/coffee/ResourceStateManager.js rename to app/js/ResourceStateManager.js diff --git a/app/coffee/ResourceWriter.js b/app/js/ResourceWriter.js similarity index 100% rename from app/coffee/ResourceWriter.js rename to app/js/ResourceWriter.js diff --git a/app/coffee/SafeReader.js b/app/js/SafeReader.js similarity index 100% rename from app/coffee/SafeReader.js rename to app/js/SafeReader.js diff --git a/app/coffee/StaticServerForbidSymlinks.js b/app/js/StaticServerForbidSymlinks.js similarity index 100% rename from app/coffee/StaticServerForbidSymlinks.js rename to app/js/StaticServerForbidSymlinks.js diff --git a/app/coffee/TikzManager.js b/app/js/TikzManager.js similarity index 100% rename from app/coffee/TikzManager.js rename to app/js/TikzManager.js diff --git a/app/coffee/UrlCache.js b/app/js/UrlCache.js similarity index 100% rename from app/coffee/UrlCache.js rename to app/js/UrlCache.js diff --git a/app/coffee/UrlFetcher.js b/app/js/UrlFetcher.js similarity index 100% rename from app/coffee/UrlFetcher.js rename to app/js/UrlFetcher.js diff --git a/app/coffee/db.js b/app/js/db.js similarity index 100% rename from app/coffee/db.js rename to app/js/db.js From cffbd4e9efbdab23534f9c3e96ea7a808417fa5d Mon Sep 17 00:00:00 2001 From: mserranom Date: Wed, 19 Feb 2020 12:14:37 +0100 Subject: [PATCH 09/24] prettier: convert app/js decaffeinated files to Prettier format --- app/js/CommandRunner.js | 18 +- app/js/CompileController.js | 356 +++++---- app/js/CompileManager.js | 1070 ++++++++++++++++---------- app/js/ContentTypeMapper.js | 53 +- app/js/DbQueue.js | 18 +- app/js/DockerLockManager.js | 166 ++-- app/js/DockerRunner.js | 1011 ++++++++++++++---------- app/js/DraftModeManager.js | 73 +- app/js/Errors.js | 48 +- app/js/LatexRunner.js | 273 ++++--- app/js/LocalCommandRunner.js | 117 +-- app/js/LockManager.js | 100 ++- app/js/Metrics.js | 3 +- app/js/OutputCacheManager.js | 602 +++++++++------ app/js/OutputFileFinder.js | 155 ++-- app/js/OutputFileOptimiser.js | 152 ++-- app/js/ProjectPersistenceManager.js | 242 +++--- app/js/RequestParser.js | 334 ++++---- app/js/ResourceStateManager.js | 224 +++--- app/js/ResourceWriter.js | 511 +++++++----- app/js/SafeReader.js | 73 +- app/js/StaticServerForbidSymlinks.js | 128 +-- app/js/TikzManager.js | 118 +-- app/js/UrlCache.js | 420 ++++++---- app/js/UrlFetcher.js | 166 ++-- app/js/db.js | 89 +-- 26 files changed, 3881 insertions(+), 2639 deletions(-) diff --git a/app/js/CommandRunner.js b/app/js/CommandRunner.js index 1c46ee9..8e07dac 100644 --- a/app/js/CommandRunner.js +++ b/app/js/CommandRunner.js @@ -5,16 +5,16 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let commandRunnerPath; -const Settings = require("settings-sharelatex"); -const logger = require("logger-sharelatex"); +let commandRunnerPath +const Settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) { - commandRunnerPath = "./DockerRunner"; -} else { - commandRunnerPath = "./LocalCommandRunner"; + commandRunnerPath = './DockerRunner' +} else { + commandRunnerPath = './LocalCommandRunner' } -logger.info({commandRunnerPath}, "selecting command runner for clsi"); -const CommandRunner = require(commandRunnerPath); +logger.info({ commandRunnerPath }, 'selecting command runner for clsi') +const CommandRunner = require(commandRunnerPath) -module.exports = CommandRunner; +module.exports = CommandRunner diff --git a/app/js/CompileController.js b/app/js/CompileController.js index 60925fc..e146b62 100644 --- a/app/js/CompileController.js +++ b/app/js/CompileController.js @@ -12,159 +12,227 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let CompileController; -const RequestParser = require("./RequestParser"); -const CompileManager = require("./CompileManager"); -const Settings = require("settings-sharelatex"); -const Metrics = require("./Metrics"); -const ProjectPersistenceManager = require("./ProjectPersistenceManager"); -const logger = require("logger-sharelatex"); -const Errors = require("./Errors"); +let CompileController +const RequestParser = require('./RequestParser') +const CompileManager = require('./CompileManager') +const Settings = require('settings-sharelatex') +const Metrics = require('./Metrics') +const ProjectPersistenceManager = require('./ProjectPersistenceManager') +const logger = require('logger-sharelatex') +const Errors = require('./Errors') -module.exports = (CompileController = { - compile(req, res, next) { - if (next == null) { next = function(error) {}; } - const timer = new Metrics.Timer("compile-request"); - return RequestParser.parse(req.body, function(error, request) { - if (error != null) { return next(error); } - request.project_id = req.params.project_id; - if (req.params.user_id != null) { request.user_id = req.params.user_id; } - return ProjectPersistenceManager.markProjectAsJustAccessed(request.project_id, function(error) { - if (error != null) { return next(error); } - return CompileManager.doCompileWithLock(request, function(error, outputFiles) { - let code, status; - if (outputFiles == null) { outputFiles = []; } - if (error instanceof Errors.AlreadyCompilingError) { - code = 423; // Http 423 Locked - status = "compile-in-progress"; - } else if (error instanceof Errors.FilesOutOfSyncError) { - code = 409; // Http 409 Conflict - status = "retry"; - } else if (error != null ? error.terminated : undefined) { - status = "terminated"; - } else if (error != null ? error.validate : undefined) { - status = `validation-${error.validate}`; - } else if (error != null ? error.timedout : undefined) { - status = "timedout"; - logger.log({err: error, project_id: request.project_id}, "timeout running compile"); - } else if (error != null) { - status = "error"; - code = 500; - logger.warn({err: error, project_id: request.project_id}, "error running compile"); - } else { - let file; - status = "failure"; - for (file of Array.from(outputFiles)) { - if (file.path != null ? file.path.match(/output\.pdf$/) : undefined) { - status = "success"; - } - } +module.exports = CompileController = { + compile(req, res, next) { + if (next == null) { + next = function(error) {} + } + const timer = new Metrics.Timer('compile-request') + return RequestParser.parse(req.body, function(error, request) { + if (error != null) { + return next(error) + } + request.project_id = req.params.project_id + if (req.params.user_id != null) { + request.user_id = req.params.user_id + } + return ProjectPersistenceManager.markProjectAsJustAccessed( + request.project_id, + function(error) { + if (error != null) { + return next(error) + } + return CompileManager.doCompileWithLock(request, function( + error, + outputFiles + ) { + let code, status + if (outputFiles == null) { + outputFiles = [] + } + if (error instanceof Errors.AlreadyCompilingError) { + code = 423 // Http 423 Locked + status = 'compile-in-progress' + } else if (error instanceof Errors.FilesOutOfSyncError) { + code = 409 // Http 409 Conflict + status = 'retry' + } else if (error != null ? error.terminated : undefined) { + status = 'terminated' + } else if (error != null ? error.validate : undefined) { + status = `validation-${error.validate}` + } else if (error != null ? error.timedout : undefined) { + status = 'timedout' + logger.log( + { err: error, project_id: request.project_id }, + 'timeout running compile' + ) + } else if (error != null) { + status = 'error' + code = 500 + logger.warn( + { err: error, project_id: request.project_id }, + 'error running compile' + ) + } else { + let file + status = 'failure' + for (file of Array.from(outputFiles)) { + if ( + file.path != null + ? file.path.match(/output\.pdf$/) + : undefined + ) { + status = 'success' + } + } - if (status === "failure") { - logger.warn({project_id: request.project_id, outputFiles}, "project failed to compile successfully, no output.pdf generated"); - } + if (status === 'failure') { + logger.warn( + { project_id: request.project_id, outputFiles }, + 'project failed to compile successfully, no output.pdf generated' + ) + } - // log an error if any core files are found - for (file of Array.from(outputFiles)) { - if (file.path === "core") { - logger.error({project_id:request.project_id, req, outputFiles}, "core file found in output"); - } - } - } + // log an error if any core files are found + for (file of Array.from(outputFiles)) { + if (file.path === 'core') { + logger.error( + { project_id: request.project_id, req, outputFiles }, + 'core file found in output' + ) + } + } + } - if (error != null) { - outputFiles = error.outputFiles || []; - } + if (error != null) { + outputFiles = error.outputFiles || [] + } - timer.done(); - return res.status(code || 200).send({ - compile: { - status, - error: (error != null ? error.message : undefined) || error, - outputFiles: outputFiles.map(file => - ({ - url: - `${Settings.apis.clsi.url}/project/${request.project_id}` + - ((request.user_id != null) ? `/user/${request.user_id}` : "") + - ((file.build != null) ? `/build/${file.build}` : "") + - `/output/${file.path}`, - path: file.path, - type: file.type, - build: file.build - }) - ) - } - }); - }); - }); - }); - }, + timer.done() + return res.status(code || 200).send({ + compile: { + status, + error: (error != null ? error.message : undefined) || error, + outputFiles: outputFiles.map(file => ({ + url: + `${Settings.apis.clsi.url}/project/${request.project_id}` + + (request.user_id != null + ? `/user/${request.user_id}` + : '') + + (file.build != null ? `/build/${file.build}` : '') + + `/output/${file.path}`, + path: file.path, + type: file.type, + build: file.build + })) + } + }) + }) + } + ) + }) + }, - stopCompile(req, res, next) { - const {project_id, user_id} = req.params; - return CompileManager.stopCompile(project_id, user_id, function(error) { - if (error != null) { return next(error); } - return res.sendStatus(204); - }); - }, + stopCompile(req, res, next) { + const { project_id, user_id } = req.params + return CompileManager.stopCompile(project_id, user_id, function(error) { + if (error != null) { + return next(error) + } + return res.sendStatus(204) + }) + }, - clearCache(req, res, next) { - if (next == null) { next = function(error) {}; } - return ProjectPersistenceManager.clearProject(req.params.project_id, req.params.user_id, function(error) { - if (error != null) { return next(error); } - return res.sendStatus(204); - }); - }, // No content + clearCache(req, res, next) { + if (next == null) { + next = function(error) {} + } + return ProjectPersistenceManager.clearProject( + req.params.project_id, + req.params.user_id, + function(error) { + if (error != null) { + return next(error) + } + return res.sendStatus(204) + } + ) + }, // No content - syncFromCode(req, res, next) { - if (next == null) { next = function(error) {}; } - const { file } = req.query; - const line = parseInt(req.query.line, 10); - const column = parseInt(req.query.column, 10); - const { project_id } = req.params; - const { user_id } = req.params; - return CompileManager.syncFromCode(project_id, user_id, file, line, column, function(error, pdfPositions) { - if (error != null) { return next(error); } - return res.json({ - pdf: pdfPositions - }); - }); - }, + syncFromCode(req, res, next) { + if (next == null) { + next = function(error) {} + } + const { file } = req.query + const line = parseInt(req.query.line, 10) + const column = parseInt(req.query.column, 10) + const { project_id } = req.params + const { user_id } = req.params + return CompileManager.syncFromCode( + project_id, + user_id, + file, + line, + column, + function(error, pdfPositions) { + if (error != null) { + return next(error) + } + return res.json({ + pdf: pdfPositions + }) + } + ) + }, - syncFromPdf(req, res, next) { - if (next == null) { next = function(error) {}; } - const page = parseInt(req.query.page, 10); - const h = parseFloat(req.query.h); - const v = parseFloat(req.query.v); - const { project_id } = req.params; - const { user_id } = req.params; - return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function(error, codePositions) { - if (error != null) { return next(error); } - return res.json({ - code: codePositions - }); - }); - }, + syncFromPdf(req, res, next) { + if (next == null) { + next = function(error) {} + } + const page = parseInt(req.query.page, 10) + const h = parseFloat(req.query.h) + const v = parseFloat(req.query.v) + const { project_id } = req.params + const { user_id } = req.params + return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function( + error, + codePositions + ) { + if (error != null) { + return next(error) + } + return res.json({ + code: codePositions + }) + }) + }, - wordcount(req, res, next) { - if (next == null) { next = function(error) {}; } - const file = req.query.file || "main.tex"; - const { project_id } = req.params; - const { user_id } = req.params; - const { image } = req.query; - logger.log({image, file, project_id}, "word count request"); + wordcount(req, res, next) { + if (next == null) { + next = function(error) {} + } + const file = req.query.file || 'main.tex' + const { project_id } = req.params + const { user_id } = req.params + const { image } = req.query + logger.log({ image, file, project_id }, 'word count request') - return CompileManager.wordcount(project_id, user_id, file, image, function(error, result) { - if (error != null) { return next(error); } - return res.json({ - texcount: result - }); - }); - }, - - status(req, res, next ){ - if (next == null) { next = function(error){}; } - return res.send("OK"); - } -}); + return CompileManager.wordcount(project_id, user_id, file, image, function( + error, + result + ) { + if (error != null) { + return next(error) + } + return res.json({ + texcount: result + }) + }) + }, + status(req, res, next) { + if (next == null) { + next = function(error) {} + } + return res.send('OK') + } +} diff --git a/app/js/CompileManager.js b/app/js/CompileManager.js index 76fb8b0..3bf54bc 100644 --- a/app/js/CompileManager.js +++ b/app/js/CompileManager.js @@ -15,449 +15,691 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let CompileManager; -const ResourceWriter = require("./ResourceWriter"); -const LatexRunner = require("./LatexRunner"); -const OutputFileFinder = require("./OutputFileFinder"); -const OutputCacheManager = require("./OutputCacheManager"); -const Settings = require("settings-sharelatex"); -const Path = require("path"); -const logger = require("logger-sharelatex"); -const Metrics = require("./Metrics"); -const child_process = require("child_process"); -const DraftModeManager = require("./DraftModeManager"); -const TikzManager = require("./TikzManager"); -const LockManager = require("./LockManager"); -const fs = require("fs"); -const fse = require("fs-extra"); -const os = require("os"); -const async = require("async"); -const Errors = require('./Errors'); -const CommandRunner = require("./CommandRunner"); +let CompileManager +const ResourceWriter = require('./ResourceWriter') +const LatexRunner = require('./LatexRunner') +const OutputFileFinder = require('./OutputFileFinder') +const OutputCacheManager = require('./OutputCacheManager') +const Settings = require('settings-sharelatex') +const Path = require('path') +const logger = require('logger-sharelatex') +const Metrics = require('./Metrics') +const child_process = require('child_process') +const DraftModeManager = require('./DraftModeManager') +const TikzManager = require('./TikzManager') +const LockManager = require('./LockManager') +const fs = require('fs') +const fse = require('fs-extra') +const os = require('os') +const async = require('async') +const Errors = require('./Errors') +const CommandRunner = require('./CommandRunner') const getCompileName = function(project_id, user_id) { - if (user_id != null) { return `${project_id}-${user_id}`; } else { return project_id; } -}; + if (user_id != null) { + return `${project_id}-${user_id}` + } else { + return project_id + } +} -const getCompileDir = (project_id, user_id) => Path.join(Settings.path.compilesDir, getCompileName(project_id, user_id)); +const getCompileDir = (project_id, user_id) => + Path.join(Settings.path.compilesDir, getCompileName(project_id, user_id)) -module.exports = (CompileManager = { +module.exports = CompileManager = { + doCompileWithLock(request, callback) { + if (callback == null) { + callback = function(error, outputFiles) {} + } + const compileDir = getCompileDir(request.project_id, request.user_id) + const lockFile = Path.join(compileDir, '.project-lock') + // use a .project-lock file in the compile directory to prevent + // simultaneous compiles + return fse.ensureDir(compileDir, function(error) { + if (error != null) { + return callback(error) + } + return LockManager.runWithLock( + lockFile, + releaseLock => CompileManager.doCompile(request, releaseLock), + callback + ) + }) + }, - doCompileWithLock(request, callback) { - if (callback == null) { callback = function(error, outputFiles) {}; } - const compileDir = getCompileDir(request.project_id, request.user_id); - const lockFile = Path.join(compileDir, ".project-lock"); - // use a .project-lock file in the compile directory to prevent - // simultaneous compiles - return fse.ensureDir(compileDir, function(error) { - if (error != null) { return callback(error); } - return LockManager.runWithLock(lockFile, releaseLock => CompileManager.doCompile(request, releaseLock) - , callback); - }); - }, + doCompile(request, callback) { + if (callback == null) { + callback = function(error, outputFiles) {} + } + const compileDir = getCompileDir(request.project_id, request.user_id) + let timer = new Metrics.Timer('write-to-disk') + logger.log( + { project_id: request.project_id, user_id: request.user_id }, + 'syncing resources to disk' + ) + return ResourceWriter.syncResourcesToDisk(request, compileDir, function( + error, + resourceList + ) { + // NOTE: resourceList is insecure, it should only be used to exclude files from the output list + if (error != null && error instanceof Errors.FilesOutOfSyncError) { + logger.warn( + { project_id: request.project_id, user_id: request.user_id }, + 'files out of sync, please retry' + ) + return callback(error) + } else if (error != null) { + logger.err( + { + err: error, + project_id: request.project_id, + user_id: request.user_id + }, + 'error writing resources to disk' + ) + return callback(error) + } + logger.log( + { + project_id: request.project_id, + user_id: request.user_id, + time_taken: Date.now() - timer.start + }, + 'written files to disk' + ) + timer.done() - doCompile(request, callback) { - if (callback == null) { callback = function(error, outputFiles) {}; } - const compileDir = getCompileDir(request.project_id, request.user_id); - let timer = new Metrics.Timer("write-to-disk"); - logger.log({project_id: request.project_id, user_id: request.user_id}, "syncing resources to disk"); - return ResourceWriter.syncResourcesToDisk(request, compileDir, function(error, resourceList) { - // NOTE: resourceList is insecure, it should only be used to exclude files from the output list - if ((error != null) && error instanceof Errors.FilesOutOfSyncError) { - logger.warn({project_id: request.project_id, user_id: request.user_id}, "files out of sync, please retry"); - return callback(error); - } else if (error != null) { - logger.err({err:error, project_id: request.project_id, user_id: request.user_id}, "error writing resources to disk"); - return callback(error); - } - logger.log({project_id: request.project_id, user_id: request.user_id, time_taken: Date.now() - timer.start}, "written files to disk"); - timer.done(); + const injectDraftModeIfRequired = function(callback) { + if (request.draft) { + return DraftModeManager.injectDraftMode( + Path.join(compileDir, request.rootResourcePath), + callback + ) + } else { + return callback() + } + } - const injectDraftModeIfRequired = function(callback) { - if (request.draft) { - return DraftModeManager.injectDraftMode(Path.join(compileDir, request.rootResourcePath), callback); - } else { - return callback(); - } - }; + const createTikzFileIfRequired = callback => + TikzManager.checkMainFile( + compileDir, + request.rootResourcePath, + resourceList, + function(error, needsMainFile) { + if (error != null) { + return callback(error) + } + if (needsMainFile) { + return TikzManager.injectOutputFile( + compileDir, + request.rootResourcePath, + callback + ) + } else { + return callback() + } + } + ) + // set up environment variables for chktex + const env = {} + // only run chktex on LaTeX files (not knitr .Rtex files or any others) + const isLaTeXFile = + request.rootResourcePath != null + ? request.rootResourcePath.match(/\.tex$/i) + : undefined + if (request.check != null && isLaTeXFile) { + env.CHKTEX_OPTIONS = '-nall -e9 -e10 -w15 -w16' + env.CHKTEX_ULIMIT_OPTIONS = '-t 5 -v 64000' + if (request.check === 'error') { + env.CHKTEX_EXIT_ON_ERROR = 1 + } + if (request.check === 'validate') { + env.CHKTEX_VALIDATE = 1 + } + } - const createTikzFileIfRequired = callback => - TikzManager.checkMainFile(compileDir, request.rootResourcePath, resourceList, function(error, needsMainFile) { - if (error != null) { return callback(error); } - if (needsMainFile) { - return TikzManager.injectOutputFile(compileDir, request.rootResourcePath, callback); - } else { - return callback(); - } - }) - ; + // apply a series of file modifications/creations for draft mode and tikz + return async.series( + [injectDraftModeIfRequired, createTikzFileIfRequired], + function(error) { + if (error != null) { + return callback(error) + } + timer = new Metrics.Timer('run-compile') + // find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite) + let tag = + __guard__( + __guard__( + request.imageName != null + ? request.imageName.match(/:(.*)/) + : undefined, + x1 => x1[1] + ), + x => x.replace(/\./g, '-') + ) || 'default' + if (!request.project_id.match(/^[0-9a-f]{24}$/)) { + tag = 'other' + } // exclude smoke test + Metrics.inc('compiles') + Metrics.inc(`compiles-with-image.${tag}`) + const compileName = getCompileName( + request.project_id, + request.user_id + ) + return LatexRunner.runLatex( + compileName, + { + directory: compileDir, + mainFile: request.rootResourcePath, + compiler: request.compiler, + timeout: request.timeout, + image: request.imageName, + flags: request.flags, + environment: env + }, + function(error, output, stats, timings) { + // request was for validation only + let metric_key, metric_value + if (request.check === 'validate') { + const result = (error != null + ? error.code + : undefined) + ? 'fail' + : 'pass' + error = new Error('validation') + error.validate = result + } + // request was for compile, and failed on validation + if ( + request.check === 'error' && + (error != null ? error.message : undefined) === 'exited' + ) { + error = new Error('compilation') + error.validate = 'fail' + } + // compile was killed by user, was a validation, or a compile which failed validation + if ( + (error != null ? error.terminated : undefined) || + (error != null ? error.validate : undefined) || + (error != null ? error.timedout : undefined) + ) { + OutputFileFinder.findOutputFiles( + resourceList, + compileDir, + function(err, outputFiles) { + if (err != null) { + return callback(err) + } + error.outputFiles = outputFiles // return output files so user can check logs + return callback(error) + } + ) + return + } + // compile completed normally + if (error != null) { + return callback(error) + } + Metrics.inc('compiles-succeeded') + const object = stats || {} + for (metric_key in object) { + metric_value = object[metric_key] + Metrics.count(metric_key, metric_value) + } + const object1 = timings || {} + for (metric_key in object1) { + metric_value = object1[metric_key] + Metrics.timing(metric_key, metric_value) + } + const loadavg = + typeof os.loadavg === 'function' ? os.loadavg() : undefined + if (loadavg != null) { + Metrics.gauge('load-avg', loadavg[0]) + } + const ts = timer.done() + logger.log( + { + project_id: request.project_id, + user_id: request.user_id, + time_taken: ts, + stats, + timings, + loadavg + }, + 'done compile' + ) + if ((stats != null ? stats['latex-runs'] : undefined) > 0) { + Metrics.timing('run-compile-per-pass', ts / stats['latex-runs']) + } + if ( + (stats != null ? stats['latex-runs'] : undefined) > 0 && + (timings != null ? timings['cpu-time'] : undefined) > 0 + ) { + Metrics.timing( + 'run-compile-cpu-time-per-pass', + timings['cpu-time'] / stats['latex-runs'] + ) + } - // set up environment variables for chktex - const env = {}; - // only run chktex on LaTeX files (not knitr .Rtex files or any others) - const isLaTeXFile = request.rootResourcePath != null ? request.rootResourcePath.match(/\.tex$/i) : undefined; - if ((request.check != null) && isLaTeXFile) { - env.CHKTEX_OPTIONS = '-nall -e9 -e10 -w15 -w16'; - env.CHKTEX_ULIMIT_OPTIONS = '-t 5 -v 64000'; - if (request.check === 'error') { - env.CHKTEX_EXIT_ON_ERROR = 1; - } - if (request.check === 'validate') { - env.CHKTEX_VALIDATE = 1; - } - } + return OutputFileFinder.findOutputFiles( + resourceList, + compileDir, + function(error, outputFiles) { + if (error != null) { + return callback(error) + } + return OutputCacheManager.saveOutputFiles( + outputFiles, + compileDir, + (error, newOutputFiles) => callback(null, newOutputFiles) + ) + } + ) + } + ) + } + ) + }) + }, - // apply a series of file modifications/creations for draft mode and tikz - return async.series([injectDraftModeIfRequired, createTikzFileIfRequired], function(error) { - if (error != null) { return callback(error); } - timer = new Metrics.Timer("run-compile"); - // find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite) - let tag = __guard__(__guard__(request.imageName != null ? request.imageName.match(/:(.*)/) : undefined, x1 => x1[1]), x => x.replace(/\./g,'-')) || "default"; - if (!request.project_id.match(/^[0-9a-f]{24}$/)) { tag = "other"; } // exclude smoke test - Metrics.inc("compiles"); - Metrics.inc(`compiles-with-image.${tag}`); - const compileName = getCompileName(request.project_id, request.user_id); - return LatexRunner.runLatex(compileName, { - directory: compileDir, - mainFile: request.rootResourcePath, - compiler: request.compiler, - timeout: request.timeout, - image: request.imageName, - flags: request.flags, - environment: env - }, function(error, output, stats, timings) { - // request was for validation only - let metric_key, metric_value; - if (request.check === "validate") { - const result = (error != null ? error.code : undefined) ? "fail" : "pass"; - error = new Error("validation"); - error.validate = result; - } - // request was for compile, and failed on validation - if ((request.check === "error") && ((error != null ? error.message : undefined) === 'exited')) { - error = new Error("compilation"); - error.validate = "fail"; - } - // compile was killed by user, was a validation, or a compile which failed validation - if ((error != null ? error.terminated : undefined) || (error != null ? error.validate : undefined) || (error != null ? error.timedout : undefined)) { - OutputFileFinder.findOutputFiles(resourceList, compileDir, function(err, outputFiles) { - if (err != null) { return callback(err); } - error.outputFiles = outputFiles; // return output files so user can check logs - return callback(error); - }); - return; - } - // compile completed normally - if (error != null) { return callback(error); } - Metrics.inc("compiles-succeeded"); - const object = stats || {}; - for (metric_key in object) { - metric_value = object[metric_key]; - Metrics.count(metric_key, metric_value); - } - const object1 = timings || {}; - for (metric_key in object1) { - metric_value = object1[metric_key]; - Metrics.timing(metric_key, metric_value); - } - const loadavg = typeof os.loadavg === 'function' ? os.loadavg() : undefined; - if (loadavg != null) { Metrics.gauge("load-avg", loadavg[0]); } - const ts = timer.done(); - logger.log({project_id: request.project_id, user_id: request.user_id, time_taken: ts, stats, timings, loadavg}, "done compile"); - if ((stats != null ? stats["latex-runs"] : undefined) > 0) { - Metrics.timing("run-compile-per-pass", ts / stats["latex-runs"]); - } - if (((stats != null ? stats["latex-runs"] : undefined) > 0) && ((timings != null ? timings["cpu-time"] : undefined) > 0)) { - Metrics.timing("run-compile-cpu-time-per-pass", timings["cpu-time"] / stats["latex-runs"]); - } + stopCompile(project_id, user_id, callback) { + if (callback == null) { + callback = function(error) {} + } + const compileName = getCompileName(project_id, user_id) + return LatexRunner.killLatex(compileName, callback) + }, - return OutputFileFinder.findOutputFiles(resourceList, compileDir, function(error, outputFiles) { - if (error != null) { return callback(error); } - return OutputCacheManager.saveOutputFiles(outputFiles, compileDir, (error, newOutputFiles) => callback(null, newOutputFiles)); - }); - }); - }); - }); - }, + clearProject(project_id, user_id, _callback) { + if (_callback == null) { + _callback = function(error) {} + } + const callback = function(error) { + _callback(error) + return (_callback = function() {}) + } - stopCompile(project_id, user_id, callback) { - if (callback == null) { callback = function(error) {}; } - const compileName = getCompileName(project_id, user_id); - return LatexRunner.killLatex(compileName, callback); - }, + const compileDir = getCompileDir(project_id, user_id) - clearProject(project_id, user_id, _callback) { - if (_callback == null) { _callback = function(error) {}; } - const callback = function(error) { - _callback(error); - return _callback = function() {}; - }; + return CompileManager._checkDirectory(compileDir, function(err, exists) { + if (err != null) { + return callback(err) + } + if (!exists) { + return callback() + } // skip removal if no directory present - const compileDir = getCompileDir(project_id, user_id); + const proc = child_process.spawn('rm', ['-r', compileDir]) - return CompileManager._checkDirectory(compileDir, function(err, exists) { - if (err != null) { return callback(err); } - if (!exists) { return callback(); } // skip removal if no directory present + proc.on('error', callback) - const proc = child_process.spawn("rm", ["-r", compileDir]); + let stderr = '' + proc.stderr.on('data', chunk => (stderr += chunk.toString())) - proc.on("error", callback); + return proc.on('close', function(code) { + if (code === 0) { + return callback(null) + } else { + return callback(new Error(`rm -r ${compileDir} failed: ${stderr}`)) + } + }) + }) + }, - let stderr = ""; - proc.stderr.on("data", chunk => stderr += chunk.toString()); + _findAllDirs(callback) { + if (callback == null) { + callback = function(error, allDirs) {} + } + const root = Settings.path.compilesDir + return fs.readdir(root, function(err, files) { + if (err != null) { + return callback(err) + } + const allDirs = Array.from(files).map(file => Path.join(root, file)) + return callback(null, allDirs) + }) + }, - return proc.on("close", function(code) { - if (code === 0) { - return callback(null); - } else { - return callback(new Error(`rm -r ${compileDir} failed: ${stderr}`)); - } - }); - }); - }, + clearExpiredProjects(max_cache_age_ms, callback) { + if (callback == null) { + callback = function(error) {} + } + const now = Date.now() + // action for each directory + const expireIfNeeded = (checkDir, cb) => + fs.stat(checkDir, function(err, stats) { + if (err != null) { + return cb() + } // ignore errors checking directory + const age = now - stats.mtime + const hasExpired = age > max_cache_age_ms + if (hasExpired) { + return fse.remove(checkDir, cb) + } else { + return cb() + } + }) + // iterate over all project directories + return CompileManager._findAllDirs(function(error, allDirs) { + if (error != null) { + return callback() + } + return async.eachSeries(allDirs, expireIfNeeded, callback) + }) + }, - _findAllDirs(callback) { - if (callback == null) { callback = function(error, allDirs) {}; } - const root = Settings.path.compilesDir; - return fs.readdir(root, function(err, files) { - if (err != null) { return callback(err); } - const allDirs = (Array.from(files).map((file) => Path.join(root, file))); - return callback(null, allDirs); - }); - }, + _checkDirectory(compileDir, callback) { + if (callback == null) { + callback = function(error, exists) {} + } + return fs.lstat(compileDir, function(err, stats) { + if ((err != null ? err.code : undefined) === 'ENOENT') { + return callback(null, false) // directory does not exist + } else if (err != null) { + logger.err( + { dir: compileDir, err }, + 'error on stat of project directory for removal' + ) + return callback(err) + } else if (!(stats != null ? stats.isDirectory() : undefined)) { + logger.err( + { dir: compileDir, stats }, + 'bad project directory for removal' + ) + return callback(new Error('project directory is not directory')) + } else { + return callback(null, true) + } + }) + }, // directory exists - clearExpiredProjects(max_cache_age_ms, callback) { - if (callback == null) { callback = function(error) {}; } - const now = Date.now(); - // action for each directory - const expireIfNeeded = (checkDir, cb) => - fs.stat(checkDir, function(err, stats) { - if (err != null) { return cb(); } // ignore errors checking directory - const age = now - stats.mtime; - const hasExpired = (age > max_cache_age_ms); - if (hasExpired) { return fse.remove(checkDir, cb); } else { return cb(); } - }) - ; - // iterate over all project directories - return CompileManager._findAllDirs(function(error, allDirs) { - if (error != null) { return callback(); } - return async.eachSeries(allDirs, expireIfNeeded, callback); - }); - }, + syncFromCode(project_id, user_id, file_name, line, column, callback) { + // If LaTeX was run in a virtual environment, the file path that synctex expects + // might not match the file path on the host. The .synctex.gz file however, will be accessed + // wherever it is on the host. + if (callback == null) { + callback = function(error, pdfPositions) {} + } + const compileName = getCompileName(project_id, user_id) + const base_dir = Settings.path.synctexBaseDir(compileName) + const file_path = base_dir + '/' + file_name + const compileDir = getCompileDir(project_id, user_id) + const synctex_path = `${base_dir}/output.pdf` + const command = ['code', synctex_path, file_path, line, column] + return fse.ensureDir(compileDir, function(error) { + if (error != null) { + logger.err( + { error, project_id, user_id, file_name }, + 'error ensuring dir for sync from code' + ) + return callback(error) + } + return CompileManager._runSynctex(project_id, user_id, command, function( + error, + stdout + ) { + if (error != null) { + return callback(error) + } + logger.log( + { project_id, user_id, file_name, line, column, command, stdout }, + 'synctex code output' + ) + return callback( + null, + CompileManager._parseSynctexFromCodeOutput(stdout) + ) + }) + }) + }, - _checkDirectory(compileDir, callback) { - if (callback == null) { callback = function(error, exists) {}; } - return fs.lstat(compileDir, function(err, stats) { - if ((err != null ? err.code : undefined) === 'ENOENT') { - return callback(null, false); // directory does not exist - } else if (err != null) { - logger.err({dir: compileDir, err}, "error on stat of project directory for removal"); - return callback(err); - } else if (!(stats != null ? stats.isDirectory() : undefined)) { - logger.err({dir: compileDir, stats}, "bad project directory for removal"); - return callback(new Error("project directory is not directory")); - } else { - return callback(null, true); - } - }); - }, // directory exists + syncFromPdf(project_id, user_id, page, h, v, callback) { + if (callback == null) { + callback = function(error, filePositions) {} + } + const compileName = getCompileName(project_id, user_id) + const compileDir = getCompileDir(project_id, user_id) + const base_dir = Settings.path.synctexBaseDir(compileName) + const synctex_path = `${base_dir}/output.pdf` + const command = ['pdf', synctex_path, page, h, v] + return fse.ensureDir(compileDir, function(error) { + if (error != null) { + logger.err( + { error, project_id, user_id, file_name }, + 'error ensuring dir for sync to code' + ) + return callback(error) + } + return CompileManager._runSynctex(project_id, user_id, command, function( + error, + stdout + ) { + if (error != null) { + return callback(error) + } + logger.log( + { project_id, user_id, page, h, v, stdout }, + 'synctex pdf output' + ) + return callback( + null, + CompileManager._parseSynctexFromPdfOutput(stdout, base_dir) + ) + }) + }) + }, - syncFromCode(project_id, user_id, file_name, line, column, callback) { - // If LaTeX was run in a virtual environment, the file path that synctex expects - // might not match the file path on the host. The .synctex.gz file however, will be accessed - // wherever it is on the host. - if (callback == null) { callback = function(error, pdfPositions) {}; } - const compileName = getCompileName(project_id, user_id); - const base_dir = Settings.path.synctexBaseDir(compileName); - const file_path = base_dir + "/" + file_name; - const compileDir = getCompileDir(project_id, user_id); - const synctex_path = `${base_dir}/output.pdf`; - const command = ["code", synctex_path, file_path, line, column]; - return fse.ensureDir(compileDir, function(error) { - if (error != null) { - logger.err({error, project_id, user_id, file_name}, "error ensuring dir for sync from code"); - return callback(error); - } - return CompileManager._runSynctex(project_id, user_id, command, function(error, stdout) { - if (error != null) { return callback(error); } - logger.log({project_id, user_id, file_name, line, column, command, stdout}, "synctex code output"); - return callback(null, CompileManager._parseSynctexFromCodeOutput(stdout)); - }); - }); - }, + _checkFileExists(path, callback) { + if (callback == null) { + callback = function(error) {} + } + const synctexDir = Path.dirname(path) + const synctexFile = Path.join(synctexDir, 'output.synctex.gz') + return fs.stat(synctexDir, function(error, stats) { + if ((error != null ? error.code : undefined) === 'ENOENT') { + return callback( + new Errors.NotFoundError('called synctex with no output directory') + ) + } + if (error != null) { + return callback(error) + } + return fs.stat(synctexFile, function(error, stats) { + if ((error != null ? error.code : undefined) === 'ENOENT') { + return callback( + new Errors.NotFoundError('called synctex with no output file') + ) + } + if (error != null) { + return callback(error) + } + if (!(stats != null ? stats.isFile() : undefined)) { + return callback(new Error('not a file')) + } + return callback() + }) + }) + }, - syncFromPdf(project_id, user_id, page, h, v, callback) { - if (callback == null) { callback = function(error, filePositions) {}; } - const compileName = getCompileName(project_id, user_id); - const compileDir = getCompileDir(project_id, user_id); - const base_dir = Settings.path.synctexBaseDir(compileName); - const synctex_path = `${base_dir}/output.pdf`; - const command = ["pdf", synctex_path, page, h, v]; - return fse.ensureDir(compileDir, function(error) { - if (error != null) { - logger.err({error, project_id, user_id, file_name}, "error ensuring dir for sync to code"); - return callback(error); - } - return CompileManager._runSynctex(project_id, user_id, command, function(error, stdout) { - if (error != null) { return callback(error); } - logger.log({project_id, user_id, page, h, v, stdout}, "synctex pdf output"); - return callback(null, CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)); - }); - }); - }, + _runSynctex(project_id, user_id, command, callback) { + if (callback == null) { + callback = function(error, stdout) {} + } + const seconds = 1000 - _checkFileExists(path, callback) { - if (callback == null) { callback = function(error) {}; } - const synctexDir = Path.dirname(path); - const synctexFile = Path.join(synctexDir, "output.synctex.gz"); - return fs.stat(synctexDir, function(error, stats) { - if ((error != null ? error.code : undefined) === 'ENOENT') { - return callback(new Errors.NotFoundError("called synctex with no output directory")); - } - if (error != null) { return callback(error); } - return fs.stat(synctexFile, function(error, stats) { - if ((error != null ? error.code : undefined) === 'ENOENT') { - return callback(new Errors.NotFoundError("called synctex with no output file")); - } - if (error != null) { return callback(error); } - if (!(stats != null ? stats.isFile() : undefined)) { return callback(new Error("not a file")); } - return callback(); - }); - }); - }, + command.unshift('/opt/synctex') - _runSynctex(project_id, user_id, command, callback) { - if (callback == null) { callback = function(error, stdout) {}; } - const seconds = 1000; + const directory = getCompileDir(project_id, user_id) + const timeout = 60 * 1000 // increased to allow for large projects + const compileName = getCompileName(project_id, user_id) + return CommandRunner.run( + compileName, + command, + directory, + Settings.clsi != null ? Settings.clsi.docker.image : undefined, + timeout, + {}, + function(error, output) { + if (error != null) { + logger.err( + { err: error, command, project_id, user_id }, + 'error running synctex' + ) + return callback(error) + } + return callback(null, output.stdout) + } + ) + }, - command.unshift("/opt/synctex"); + _parseSynctexFromCodeOutput(output) { + const results = [] + for (const line of Array.from(output.split('\n'))) { + const [node, page, h, v, width, height] = Array.from(line.split('\t')) + if (node === 'NODE') { + results.push({ + page: parseInt(page, 10), + h: parseFloat(h), + v: parseFloat(v), + height: parseFloat(height), + width: parseFloat(width) + }) + } + } + return results + }, - const directory = getCompileDir(project_id, user_id); - const timeout = 60 * 1000; // increased to allow for large projects - const compileName = getCompileName(project_id, user_id); - return CommandRunner.run(compileName, command, directory, Settings.clsi != null ? Settings.clsi.docker.image : undefined, timeout, {}, function(error, output) { - if (error != null) { - logger.err({err:error, command, project_id, user_id}, "error running synctex"); - return callback(error); - } - return callback(null, output.stdout); - }); - }, + _parseSynctexFromPdfOutput(output, base_dir) { + const results = [] + for (let line of Array.from(output.split('\n'))) { + let column, file_path, node + ;[node, file_path, line, column] = Array.from(line.split('\t')) + if (node === 'NODE') { + const file = file_path.slice(base_dir.length + 1) + results.push({ + file, + line: parseInt(line, 10), + column: parseInt(column, 10) + }) + } + } + return results + }, - _parseSynctexFromCodeOutput(output) { - const results = []; - for (const line of Array.from(output.split("\n"))) { - const [node, page, h, v, width, height] = Array.from(line.split("\t")); - if (node === "NODE") { - results.push({ - page: parseInt(page, 10), - h: parseFloat(h), - v: parseFloat(v), - height: parseFloat(height), - width: parseFloat(width) - }); - } - } - return results; - }, + wordcount(project_id, user_id, file_name, image, callback) { + if (callback == null) { + callback = function(error, pdfPositions) {} + } + logger.log({ project_id, user_id, file_name, image }, 'running wordcount') + const file_path = `$COMPILE_DIR/${file_name}` + const command = [ + 'texcount', + '-nocol', + '-inc', + file_path, + `-out=${file_path}.wc` + ] + const compileDir = getCompileDir(project_id, user_id) + const timeout = 60 * 1000 + const compileName = getCompileName(project_id, user_id) + return fse.ensureDir(compileDir, function(error) { + if (error != null) { + logger.err( + { error, project_id, user_id, file_name }, + 'error ensuring dir for sync from code' + ) + return callback(error) + } + return CommandRunner.run( + compileName, + command, + compileDir, + image, + timeout, + {}, + function(error) { + if (error != null) { + return callback(error) + } + return fs.readFile( + compileDir + '/' + file_name + '.wc', + 'utf-8', + function(err, stdout) { + if (err != null) { + // call it node_err so sentry doesn't use random path error as unique id so it can't be ignored + logger.err( + { node_err: err, command, compileDir, project_id, user_id }, + 'error reading word count output' + ) + return callback(err) + } + const results = CompileManager._parseWordcountFromOutput(stdout) + logger.log( + { project_id, user_id, wordcount: results }, + 'word count results' + ) + return callback(null, results) + } + ) + } + ) + }) + }, - _parseSynctexFromPdfOutput(output, base_dir) { - const results = []; - for (let line of Array.from(output.split("\n"))) { - let column, file_path, node; - [node, file_path, line, column] = Array.from(line.split("\t")); - if (node === "NODE") { - const file = file_path.slice(base_dir.length + 1); - results.push({ - file, - line: parseInt(line, 10), - column: parseInt(column, 10) - }); - } - } - return results; - }, - - - wordcount(project_id, user_id, file_name, image, callback) { - if (callback == null) { callback = function(error, pdfPositions) {}; } - logger.log({project_id, user_id, file_name, image}, "running wordcount"); - const file_path = `$COMPILE_DIR/${file_name}`; - const command = [ "texcount", '-nocol', '-inc', file_path, `-out=${file_path}.wc`]; - const compileDir = getCompileDir(project_id, user_id); - const timeout = 60 * 1000; - const compileName = getCompileName(project_id, user_id); - return fse.ensureDir(compileDir, function(error) { - if (error != null) { - logger.err({error, project_id, user_id, file_name}, "error ensuring dir for sync from code"); - return callback(error); - } - return CommandRunner.run(compileName, command, compileDir, image, timeout, {}, function(error) { - if (error != null) { return callback(error); } - return fs.readFile(compileDir + "/" + file_name + ".wc", "utf-8", function(err, stdout) { - if (err != null) { - // call it node_err so sentry doesn't use random path error as unique id so it can't be ignored - logger.err({node_err:err, command, compileDir, project_id, user_id}, "error reading word count output"); - return callback(err); - } - const results = CompileManager._parseWordcountFromOutput(stdout); - logger.log({project_id, user_id, wordcount: results}, "word count results"); - return callback(null, results); - }); - }); - }); - }, - - _parseWordcountFromOutput(output) { - const results = { - encode: "", - textWords: 0, - headWords: 0, - outside: 0, - headers: 0, - elements: 0, - mathInline: 0, - mathDisplay: 0, - errors: 0, - messages: "" - }; - for (const line of Array.from(output.split("\n"))) { - const [data, info] = Array.from(line.split(":")); - if (data.indexOf("Encoding") > -1) { - results.encode = info.trim(); - } - if (data.indexOf("in text") > -1) { - results.textWords = parseInt(info, 10); - } - if (data.indexOf("in head") > -1) { - results.headWords = parseInt(info, 10); - } - if (data.indexOf("outside") > -1) { - results.outside = parseInt(info, 10); - } - if (data.indexOf("of head") > -1) { - results.headers = parseInt(info, 10); - } - if (data.indexOf("Number of floats/tables/figures") > -1) { - results.elements = parseInt(info, 10); - } - if (data.indexOf("Number of math inlines") > -1) { - results.mathInline = parseInt(info, 10); - } - if (data.indexOf("Number of math displayed") > -1) { - results.mathDisplay = parseInt(info, 10); - } - if (data === "(errors") { // errors reported as (errors:123) - results.errors = parseInt(info, 10); - } - if (line.indexOf("!!! ") > -1) { // errors logged as !!! message !!! - results.messages += line + "\n"; - } - } - return results; - } -}); + _parseWordcountFromOutput(output) { + const results = { + encode: '', + textWords: 0, + headWords: 0, + outside: 0, + headers: 0, + elements: 0, + mathInline: 0, + mathDisplay: 0, + errors: 0, + messages: '' + } + for (const line of Array.from(output.split('\n'))) { + const [data, info] = Array.from(line.split(':')) + if (data.indexOf('Encoding') > -1) { + results.encode = info.trim() + } + if (data.indexOf('in text') > -1) { + results.textWords = parseInt(info, 10) + } + if (data.indexOf('in head') > -1) { + results.headWords = parseInt(info, 10) + } + if (data.indexOf('outside') > -1) { + results.outside = parseInt(info, 10) + } + if (data.indexOf('of head') > -1) { + results.headers = parseInt(info, 10) + } + if (data.indexOf('Number of floats/tables/figures') > -1) { + results.elements = parseInt(info, 10) + } + if (data.indexOf('Number of math inlines') > -1) { + results.mathInline = parseInt(info, 10) + } + if (data.indexOf('Number of math displayed') > -1) { + results.mathDisplay = parseInt(info, 10) + } + if (data === '(errors') { + // errors reported as (errors:123) + results.errors = parseInt(info, 10) + } + if (line.indexOf('!!! ') > -1) { + // errors logged as !!! message !!! + results.messages += line + '\n' + } + } + return results + } +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/app/js/ContentTypeMapper.js b/app/js/ContentTypeMapper.js index fdd66d3..f690bf9 100644 --- a/app/js/ContentTypeMapper.js +++ b/app/js/ContentTypeMapper.js @@ -3,31 +3,36 @@ */ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. -let ContentTypeMapper; -const Path = require('path'); +let ContentTypeMapper +const Path = require('path') // here we coerce html, css and js to text/plain, // otherwise choose correct mime type based on file extension, // falling back to octet-stream -module.exports = (ContentTypeMapper = { - map(path) { - switch (Path.extname(path)) { - case '.txt': case '.html': case '.js': case '.css': case '.svg': - return 'text/plain'; - case '.csv': - return 'text/csv'; - case '.pdf': - return 'application/pdf'; - case '.png': - return 'image/png'; - case '.jpg': case '.jpeg': - return 'image/jpeg'; - case '.tiff': - return 'image/tiff'; - case '.gif': - return 'image/gif'; - default: - return 'application/octet-stream'; - } - } -}); +module.exports = ContentTypeMapper = { + map(path) { + switch (Path.extname(path)) { + case '.txt': + case '.html': + case '.js': + case '.css': + case '.svg': + return 'text/plain' + case '.csv': + return 'text/csv' + case '.pdf': + return 'application/pdf' + case '.png': + return 'image/png' + case '.jpg': + case '.jpeg': + return 'image/jpeg' + case '.tiff': + return 'image/tiff' + case '.gif': + return 'image/gif' + default: + return 'application/octet-stream' + } + } +} diff --git a/app/js/DbQueue.js b/app/js/DbQueue.js index 89ff323..7589370 100644 --- a/app/js/DbQueue.js +++ b/app/js/DbQueue.js @@ -5,14 +5,14 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const async = require("async"); -const Settings = require("settings-sharelatex"); -const logger = require("logger-sharelatex"); -const queue = async.queue((task, cb)=> task(cb) - , Settings.parallelSqlQueryLimit); +const async = require('async') +const Settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') +const queue = async.queue( + (task, cb) => task(cb), + Settings.parallelSqlQueryLimit +) -queue.drain = ()=> logger.debug('all items have been processed'); - -module.exports = - {queue}; +queue.drain = () => logger.debug('all items have been processed') +module.exports = { queue } diff --git a/app/js/DockerLockManager.js b/app/js/DockerLockManager.js index 274ff66..2685b42 100644 --- a/app/js/DockerLockManager.js +++ b/app/js/DockerLockManager.js @@ -10,80 +10,104 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let LockManager; -const logger = require("logger-sharelatex"); +let LockManager +const logger = require('logger-sharelatex') -const LockState = {}; // locks for docker container operations, by container name +const LockState = {} // locks for docker container operations, by container name -module.exports = (LockManager = { +module.exports = LockManager = { + MAX_LOCK_HOLD_TIME: 15000, // how long we can keep a lock + MAX_LOCK_WAIT_TIME: 10000, // how long we wait for a lock + LOCK_TEST_INTERVAL: 1000, // retry time - MAX_LOCK_HOLD_TIME: 15000, // how long we can keep a lock - MAX_LOCK_WAIT_TIME: 10000, // how long we wait for a lock - LOCK_TEST_INTERVAL: 1000, // retry time + tryLock(key, callback) { + let lockValue + if (callback == null) { + callback = function(err, gotLock) {} + } + const existingLock = LockState[key] + if (existingLock != null) { + // the lock is already taken, check how old it is + const lockAge = Date.now() - existingLock.created + if (lockAge < LockManager.MAX_LOCK_HOLD_TIME) { + return callback(null, false) // we didn't get the lock, bail out + } else { + logger.error( + { key, lock: existingLock, age: lockAge }, + 'taking old lock by force' + ) + } + } + // take the lock + LockState[key] = lockValue = { created: Date.now() } + return callback(null, true, lockValue) + }, - tryLock(key, callback) { - let lockValue; - if (callback == null) { callback = function(err, gotLock) {}; } - const existingLock = LockState[key]; - if (existingLock != null) { // the lock is already taken, check how old it is - const lockAge = Date.now() - existingLock.created; - if (lockAge < LockManager.MAX_LOCK_HOLD_TIME) { - return callback(null, false); // we didn't get the lock, bail out - } else { - logger.error({key, lock: existingLock, age:lockAge}, "taking old lock by force"); - } - } - // take the lock - LockState[key] = (lockValue = {created: Date.now()}); - return callback(null, true, lockValue); - }, + getLock(key, callback) { + let attempt + if (callback == null) { + callback = function(error, lockValue) {} + } + const startTime = Date.now() + return (attempt = () => + LockManager.tryLock(key, function(error, gotLock, lockValue) { + if (error != null) { + return callback(error) + } + if (gotLock) { + return callback(null, lockValue) + } else if (Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME) { + const e = new Error('Lock timeout') + e.key = key + return callback(e) + } else { + return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL) + } + }))() + }, - getLock(key, callback) { - let attempt; - if (callback == null) { callback = function(error, lockValue) {}; } - const startTime = Date.now(); - return (attempt = () => - LockManager.tryLock(key, function(error, gotLock, lockValue) { - if (error != null) { return callback(error); } - if (gotLock) { - return callback(null, lockValue); - } else if ((Date.now() - startTime) > LockManager.MAX_LOCK_WAIT_TIME) { - const e = new Error("Lock timeout"); - e.key = key; - return callback(e); - } else { - return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL); - } - }) - )(); - }, + releaseLock(key, lockValue, callback) { + if (callback == null) { + callback = function(error) {} + } + const existingLock = LockState[key] + if (existingLock === lockValue) { + // lockValue is an object, so we can test by reference + delete LockState[key] // our lock, so we can free it + return callback() + } else if (existingLock != null) { + // lock exists but doesn't match ours + logger.error( + { key, lock: existingLock }, + 'tried to release lock taken by force' + ) + return callback() + } else { + logger.error( + { key, lock: existingLock }, + 'tried to release lock that has gone' + ) + return callback() + } + }, - releaseLock(key, lockValue, callback) { - if (callback == null) { callback = function(error) {}; } - const existingLock = LockState[key]; - if (existingLock === lockValue) { // lockValue is an object, so we can test by reference - delete LockState[key]; // our lock, so we can free it - return callback(); - } else if (existingLock != null) { // lock exists but doesn't match ours - logger.error({key, lock: existingLock}, "tried to release lock taken by force"); - return callback(); - } else { - logger.error({key, lock: existingLock}, "tried to release lock that has gone"); - return callback(); - } - }, - - runWithLock(key, runner, callback) { - if (callback == null) { callback = function(error) {}; } - return LockManager.getLock(key, function(error, lockValue) { - if (error != null) { return callback(error); } - return runner((error1, ...args) => - LockManager.releaseLock(key, lockValue, function(error2) { - error = error1 || error2; - if (error != null) { return callback(error); } - return callback(null, ...Array.from(args)); - }) - ); - }); - } -}); + runWithLock(key, runner, callback) { + if (callback == null) { + callback = function(error) {} + } + return LockManager.getLock(key, function(error, lockValue) { + if (error != null) { + return callback(error) + } + return runner((error1, ...args) => + LockManager.releaseLock(key, lockValue, function(error2) { + error = error1 || error2 + if (error != null) { + return callback(error) + } + return callback(null, ...Array.from(args)) + }) + ) + }) + } +} diff --git a/app/js/DockerRunner.js b/app/js/DockerRunner.js index dc04b5d..5ac234b 100644 --- a/app/js/DockerRunner.js +++ b/app/js/DockerRunner.js @@ -15,469 +15,666 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let DockerRunner, oneHour; -const Settings = require("settings-sharelatex"); -const logger = require("logger-sharelatex"); -const Docker = require("dockerode"); -const dockerode = new Docker(); -const crypto = require("crypto"); -const async = require("async"); -const LockManager = require("./DockerLockManager"); -const fs = require("fs"); -const Path = require('path'); -const _ = require("underscore"); +let DockerRunner, oneHour +const Settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') +const Docker = require('dockerode') +const dockerode = new Docker() +const crypto = require('crypto') +const async = require('async') +const LockManager = require('./DockerLockManager') +const fs = require('fs') +const Path = require('path') +const _ = require('underscore') -logger.info("using docker runner"); +logger.info('using docker runner') -const usingSiblingContainers = () => __guard__(Settings != null ? Settings.path : undefined, x => x.sandboxedCompilesHostDir) != null; +const usingSiblingContainers = () => + __guard__( + Settings != null ? Settings.path : undefined, + x => x.sandboxedCompilesHostDir + ) != null -module.exports = (DockerRunner = { - ERR_NOT_DIRECTORY: new Error("not a directory"), - ERR_TERMINATED: new Error("terminated"), - ERR_EXITED: new Error("exited"), - ERR_TIMED_OUT: new Error("container timed out"), +module.exports = DockerRunner = { + ERR_NOT_DIRECTORY: new Error('not a directory'), + ERR_TERMINATED: new Error('terminated'), + ERR_EXITED: new Error('exited'), + ERR_TIMED_OUT: new Error('container timed out'), - run(project_id, command, directory, image, timeout, environment, callback) { + run(project_id, command, directory, image, timeout, environment, callback) { + let name + if (callback == null) { + callback = function(error, output) {} + } + if (usingSiblingContainers()) { + const _newPath = Settings.path.sandboxedCompilesHostDir + logger.log( + { path: _newPath }, + 'altering bind path for sibling containers' + ) + // Server Pro, example: + // '/var/lib/sharelatex/data/compiles/' + // ... becomes ... + // '/opt/sharelatex_data/data/compiles/' + directory = Path.join( + Settings.path.sandboxedCompilesHostDir, + Path.basename(directory) + ) + } - let name; - if (callback == null) { callback = function(error, output) {}; } - if (usingSiblingContainers()) { - const _newPath = Settings.path.sandboxedCompilesHostDir; - logger.log({path: _newPath}, "altering bind path for sibling containers"); - // Server Pro, example: - // '/var/lib/sharelatex/data/compiles/' - // ... becomes ... - // '/opt/sharelatex_data/data/compiles/' - directory = Path.join(Settings.path.sandboxedCompilesHostDir, Path.basename(directory)); - } + const volumes = {} + volumes[directory] = '/compile' - const volumes = {}; - volumes[directory] = "/compile"; + command = Array.from(command).map(arg => + __guardMethod__(arg.toString(), 'replace', o => + o.replace('$COMPILE_DIR', '/compile') + ) + ) + if (image == null) { + ;({ image } = Settings.clsi.docker) + } - command = (Array.from(command).map((arg) => __guardMethod__(arg.toString(), 'replace', o => o.replace('$COMPILE_DIR', "/compile")))); - if ((image == null)) { - ({ image } = Settings.clsi.docker); - } + if (Settings.texliveImageNameOveride != null) { + const img = image.split('/') + image = `${Settings.texliveImageNameOveride}/${img[2]}` + } - if (Settings.texliveImageNameOveride != null) { - const img = image.split("/"); - image = `${Settings.texliveImageNameOveride}/${img[2]}`; - } + const options = DockerRunner._getContainerOptions( + command, + image, + volumes, + timeout, + environment + ) + const fingerprint = DockerRunner._fingerprintContainer(options) + options.name = name = `project-${project_id}-${fingerprint}` - const options = DockerRunner._getContainerOptions(command, image, volumes, timeout, environment); - const fingerprint = DockerRunner._fingerprintContainer(options); - options.name = (name = `project-${project_id}-${fingerprint}`); + // logOptions = _.clone(options) + // logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging" + logger.log({ project_id }, 'running docker container') + DockerRunner._runAndWaitForContainer(options, volumes, timeout, function( + error, + output + ) { + if ( + __guard__(error != null ? error.message : undefined, x => + x.match('HTTP code is 500') + ) + ) { + logger.log( + { err: error, project_id }, + 'error running container so destroying and retrying' + ) + return DockerRunner.destroyContainer(name, null, true, function(error) { + if (error != null) { + return callback(error) + } + return DockerRunner._runAndWaitForContainer( + options, + volumes, + timeout, + callback + ) + }) + } else { + return callback(error, output) + } + }) - // logOptions = _.clone(options) - // logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging" - logger.log({project_id}, "running docker container"); - DockerRunner._runAndWaitForContainer(options, volumes, timeout, function(error, output) { - if (__guard__(error != null ? error.message : undefined, x => x.match("HTTP code is 500"))) { - logger.log({err: error, project_id}, "error running container so destroying and retrying"); - return DockerRunner.destroyContainer(name, null, true, function(error) { - if (error != null) { return callback(error); } - return DockerRunner._runAndWaitForContainer(options, volumes, timeout, callback); - }); - } else { - return callback(error, output); - } - }); + return name + }, // pass back the container name to allow it to be killed - return name; - }, // pass back the container name to allow it to be killed + kill(container_id, callback) { + if (callback == null) { + callback = function(error) {} + } + logger.log({ container_id }, 'sending kill signal to container') + const container = dockerode.getContainer(container_id) + return container.kill(function(error) { + if ( + error != null && + __guardMethod__(error != null ? error.message : undefined, 'match', o => + o.match(/Cannot kill container .* is not running/) + ) + ) { + logger.warn( + { err: error, container_id }, + 'container not running, continuing' + ) + error = null + } + if (error != null) { + logger.error({ err: error, container_id }, 'error killing container') + return callback(error) + } else { + return callback() + } + }) + }, - kill(container_id, callback) { - if (callback == null) { callback = function(error) {}; } - logger.log({container_id}, "sending kill signal to container"); - const container = dockerode.getContainer(container_id); - return container.kill(function(error) { - if ((error != null) && __guardMethod__(error != null ? error.message : undefined, 'match', o => o.match(/Cannot kill container .* is not running/))) { - logger.warn({err: error, container_id}, "container not running, continuing"); - error = null; - } - if (error != null) { - logger.error({err: error, container_id}, "error killing container"); - return callback(error); - } else { - return callback(); - } - }); - }, + _runAndWaitForContainer(options, volumes, timeout, _callback) { + if (_callback == null) { + _callback = function(error, output) {} + } + const callback = function(...args) { + _callback(...Array.from(args || [])) + // Only call the callback once + return (_callback = function() {}) + } - _runAndWaitForContainer(options, volumes, timeout, _callback) { - if (_callback == null) { _callback = function(error, output) {}; } - const callback = function(...args) { - _callback(...Array.from(args || [])); - // Only call the callback once - return _callback = function() {}; - }; + const { name } = options - const { name } = options; + let streamEnded = false + let containerReturned = false + let output = {} - let streamEnded = false; - let containerReturned = false; - let output = {}; + const callbackIfFinished = function() { + if (streamEnded && containerReturned) { + return callback(null, output) + } + } - const callbackIfFinished = function() { - if (streamEnded && containerReturned) { - return callback(null, output); - } - }; + const attachStreamHandler = function(error, _output) { + if (error != null) { + return callback(error) + } + output = _output + streamEnded = true + return callbackIfFinished() + } - const attachStreamHandler = function(error, _output) { - if (error != null) { return callback(error); } - output = _output; - streamEnded = true; - return callbackIfFinished(); - }; + return DockerRunner.startContainer( + options, + volumes, + attachStreamHandler, + function(error, containerId) { + if (error != null) { + return callback(error) + } - return DockerRunner.startContainer(options, volumes, attachStreamHandler, function(error, containerId) { - if (error != null) { return callback(error); } - - return DockerRunner.waitForContainer(name, timeout, function(error, exitCode) { - let err; - if (error != null) { return callback(error); } - if (exitCode === 137) { // exit status from kill -9 - err = DockerRunner.ERR_TERMINATED; - err.terminated = true; - return callback(err); - } - if (exitCode === 1) { // exit status from chktex - err = DockerRunner.ERR_EXITED; - err.code = exitCode; - return callback(err); - } - containerReturned = true; - __guard__(options != null ? options.HostConfig : undefined, x => x.SecurityOpt = null); // small log line - logger.log({err, exitCode, options}, "docker container has exited"); - return callbackIfFinished(); - }); - }); - }, + return DockerRunner.waitForContainer(name, timeout, function( + error, + exitCode + ) { + let err + if (error != null) { + return callback(error) + } + if (exitCode === 137) { + // exit status from kill -9 + err = DockerRunner.ERR_TERMINATED + err.terminated = true + return callback(err) + } + if (exitCode === 1) { + // exit status from chktex + err = DockerRunner.ERR_EXITED + err.code = exitCode + return callback(err) + } + containerReturned = true + __guard__( + options != null ? options.HostConfig : undefined, + x => (x.SecurityOpt = null) + ) // small log line + logger.log({ err, exitCode, options }, 'docker container has exited') + return callbackIfFinished() + }) + } + ) + }, - _getContainerOptions(command, image, volumes, timeout, environment) { - let m, year; - let key, value, hostVol, dockerVol; - const timeoutInSeconds = timeout / 1000; + _getContainerOptions(command, image, volumes, timeout, environment) { + let m, year + let key, value, hostVol, dockerVol + const timeoutInSeconds = timeout / 1000 - const dockerVolumes = {}; - for (hostVol in volumes) { - dockerVol = volumes[hostVol]; - dockerVolumes[dockerVol] = {}; + const dockerVolumes = {} + for (hostVol in volumes) { + dockerVol = volumes[hostVol] + dockerVolumes[dockerVol] = {} - if (volumes[hostVol].slice(-3).indexOf(":r") === -1) { - volumes[hostVol] = `${dockerVol}:rw`; - } - } + if (volumes[hostVol].slice(-3).indexOf(':r') === -1) { + volumes[hostVol] = `${dockerVol}:rw` + } + } - // merge settings and environment parameter - const env = {}; - for (const src of [Settings.clsi.docker.env, environment || {}]) { - for (key in src) { value = src[key]; env[key] = value; } - } - // set the path based on the image year - if ((m = image.match(/:([0-9]+)\.[0-9]+/))) { - year = m[1]; - } else { - year = "2014"; - } - env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`; - const options = { - "Cmd" : command, - "Image" : image, - "Volumes" : dockerVolumes, - "WorkingDir" : "/compile", - "NetworkDisabled" : true, - "Memory" : 1024 * 1024 * 1024 * 1024, // 1 Gb - "User" : Settings.clsi.docker.user, - "Env" : (((() => { - const result = []; - for (key in env) { - value = env[key]; - result.push(`${key}=${value}`); - } - return result; - })())), // convert the environment hash to an array - "HostConfig" : { - "Binds": (((() => { - const result1 = []; - for (hostVol in volumes) { - dockerVol = volumes[hostVol]; - result1.push(`${hostVol}:${dockerVol}`); - } - return result1; - })())), - "LogConfig": {"Type": "none", "Config": {}}, - "Ulimits": [{'Name': 'cpu', 'Soft': timeoutInSeconds+5, 'Hard': timeoutInSeconds+10}], - "CapDrop": "ALL", - "SecurityOpt": ["no-new-privileges"] - } - }; + // merge settings and environment parameter + const env = {} + for (const src of [Settings.clsi.docker.env, environment || {}]) { + for (key in src) { + value = src[key] + env[key] = value + } + } + // set the path based on the image year + if ((m = image.match(/:([0-9]+)\.[0-9]+/))) { + year = m[1] + } else { + year = '2014' + } + env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/` + const options = { + Cmd: command, + Image: image, + Volumes: dockerVolumes, + WorkingDir: '/compile', + NetworkDisabled: true, + Memory: 1024 * 1024 * 1024 * 1024, // 1 Gb + User: Settings.clsi.docker.user, + Env: (() => { + const result = [] + for (key in env) { + value = env[key] + result.push(`${key}=${value}`) + } + return result + })(), // convert the environment hash to an array + HostConfig: { + Binds: (() => { + const result1 = [] + for (hostVol in volumes) { + dockerVol = volumes[hostVol] + result1.push(`${hostVol}:${dockerVol}`) + } + return result1 + })(), + LogConfig: { Type: 'none', Config: {} }, + Ulimits: [ + { + Name: 'cpu', + Soft: timeoutInSeconds + 5, + Hard: timeoutInSeconds + 10 + } + ], + CapDrop: 'ALL', + SecurityOpt: ['no-new-privileges'] + } + } + if ( + (Settings.path != null ? Settings.path.synctexBinHostPath : undefined) != + null + ) { + options.HostConfig.Binds.push( + `${Settings.path.synctexBinHostPath}:/opt/synctex:ro` + ) + } - if ((Settings.path != null ? Settings.path.synctexBinHostPath : undefined) != null) { - options.HostConfig.Binds.push(`${Settings.path.synctexBinHostPath}:/opt/synctex:ro`); - } + if (Settings.clsi.docker.seccomp_profile != null) { + options.HostConfig.SecurityOpt.push( + `seccomp=${Settings.clsi.docker.seccomp_profile}` + ) + } - if (Settings.clsi.docker.seccomp_profile != null) { - options.HostConfig.SecurityOpt.push(`seccomp=${Settings.clsi.docker.seccomp_profile}`); - } + return options + }, - return options; - }, + _fingerprintContainer(containerOptions) { + // Yay, Hashing! + const json = JSON.stringify(containerOptions) + return crypto + .createHash('md5') + .update(json) + .digest('hex') + }, - _fingerprintContainer(containerOptions) { - // Yay, Hashing! - const json = JSON.stringify(containerOptions); - return crypto.createHash("md5").update(json).digest("hex"); - }, + startContainer(options, volumes, attachStreamHandler, callback) { + return LockManager.runWithLock( + options.name, + releaseLock => + // Check that volumes exist before starting the container. + // When a container is started with volume pointing to a + // non-existent directory then docker creates the directory but + // with root ownership. + DockerRunner._checkVolumes(options, volumes, function(err) { + if (err != null) { + return releaseLock(err) + } + return DockerRunner._startContainer( + options, + volumes, + attachStreamHandler, + releaseLock + ) + }), - startContainer(options, volumes, attachStreamHandler, callback) { - return LockManager.runWithLock(options.name, releaseLock => - // Check that volumes exist before starting the container. - // When a container is started with volume pointing to a - // non-existent directory then docker creates the directory but - // with root ownership. - DockerRunner._checkVolumes(options, volumes, function(err) { - if (err != null) { return releaseLock(err); } - return DockerRunner._startContainer(options, volumes, attachStreamHandler, releaseLock); - }) - - , callback); - }, + callback + ) + }, - // Check that volumes exist and are directories - _checkVolumes(options, volumes, callback) { - if (callback == null) { callback = function(error, containerName) {}; } - if (usingSiblingContainers()) { - // Server Pro, with sibling-containers active, skip checks - return callback(null); - } + // Check that volumes exist and are directories + _checkVolumes(options, volumes, callback) { + if (callback == null) { + callback = function(error, containerName) {} + } + if (usingSiblingContainers()) { + // Server Pro, with sibling-containers active, skip checks + return callback(null) + } - const checkVolume = (path, cb) => - fs.stat(path, function(err, stats) { - if (err != null) { return cb(err); } - if (!(stats != null ? stats.isDirectory() : undefined)) { return cb(DockerRunner.ERR_NOT_DIRECTORY); } - return cb(); - }) - ; - const jobs = []; - for (const vol in volumes) { - (vol => jobs.push(cb => checkVolume(vol, cb)))(vol); - } - return async.series(jobs, callback); - }, + const checkVolume = (path, cb) => + fs.stat(path, function(err, stats) { + if (err != null) { + return cb(err) + } + if (!(stats != null ? stats.isDirectory() : undefined)) { + return cb(DockerRunner.ERR_NOT_DIRECTORY) + } + return cb() + }) + const jobs = [] + for (const vol in volumes) { + ;(vol => jobs.push(cb => checkVolume(vol, cb)))(vol) + } + return async.series(jobs, callback) + }, - _startContainer(options, volumes, attachStreamHandler, callback) { - if (callback == null) { callback = function(error, output) {}; } - callback = _.once(callback); - const { name } = options; + _startContainer(options, volumes, attachStreamHandler, callback) { + if (callback == null) { + callback = function(error, output) {} + } + callback = _.once(callback) + const { name } = options - logger.log({container_name: name}, "starting container"); - const container = dockerode.getContainer(name); + logger.log({ container_name: name }, 'starting container') + const container = dockerode.getContainer(name) - const createAndStartContainer = () => - dockerode.createContainer(options, function(error, container) { - if (error != null) { return callback(error); } - return startExistingContainer(); - }) - ; + const createAndStartContainer = () => + dockerode.createContainer(options, function(error, container) { + if (error != null) { + return callback(error) + } + return startExistingContainer() + }) + var startExistingContainer = () => + DockerRunner.attachToContainer( + options.name, + attachStreamHandler, + function(error) { + if (error != null) { + return callback(error) + } + return container.start(function(error) { + if ( + error != null && + (error != null ? error.statusCode : undefined) !== 304 + ) { + // already running + return callback(error) + } else { + return callback() + } + }) + } + ) + return container.inspect(function(error, stats) { + if ((error != null ? error.statusCode : undefined) === 404) { + return createAndStartContainer() + } else if (error != null) { + logger.err( + { container_name: name, error }, + 'unable to inspect container to start' + ) + return callback(error) + } else { + return startExistingContainer() + } + }) + }, - var startExistingContainer = () => - DockerRunner.attachToContainer(options.name, attachStreamHandler, function(error){ - if (error != null) { return callback(error); } - return container.start(function(error) { - if ((error != null) && ((error != null ? error.statusCode : undefined) !== 304)) { // already running - return callback(error); - } else { - return callback(); - } - }); - }) - ; + attachToContainer(containerId, attachStreamHandler, attachStartCallback) { + const container = dockerode.getContainer(containerId) + return container.attach({ stdout: 1, stderr: 1, stream: 1 }, function( + error, + stream + ) { + if (error != null) { + logger.error( + { err: error, container_id: containerId }, + 'error attaching to container' + ) + return attachStartCallback(error) + } else { + attachStartCallback() + } - return container.inspect(function(error, stats){ - if ((error != null ? error.statusCode : undefined) === 404) { - return createAndStartContainer(); - } else if (error != null) { - logger.err({container_name: name, error}, "unable to inspect container to start"); - return callback(error); - } else { - return startExistingContainer(); - } - }); - }, + logger.log({ container_id: containerId }, 'attached to container') + const MAX_OUTPUT = 1024 * 1024 // limit output to 1MB + const createStringOutputStream = function(name) { + return { + data: '', + overflowed: false, + write(data) { + if (this.overflowed) { + return + } + if (this.data.length < MAX_OUTPUT) { + return (this.data += data) + } else { + logger.error( + { + container_id: containerId, + length: this.data.length, + maxLen: MAX_OUTPUT + }, + `${name} exceeds max size` + ) + this.data += `(...truncated at ${MAX_OUTPUT} chars...)` + return (this.overflowed = true) + } + } + // kill container if too much output + // docker.containers.kill(containerId, () ->) + } + } - attachToContainer(containerId, attachStreamHandler, attachStartCallback) { - const container = dockerode.getContainer(containerId); - return container.attach({stdout: 1, stderr: 1, stream: 1}, function(error, stream) { - if (error != null) { - logger.error({err: error, container_id: containerId}, "error attaching to container"); - return attachStartCallback(error); - } else { - attachStartCallback(); - } + const stdout = createStringOutputStream('stdout') + const stderr = createStringOutputStream('stderr') + container.modem.demuxStream(stream, stdout, stderr) - logger.log({container_id: containerId}, "attached to container"); + stream.on('error', err => + logger.error( + { err, container_id: containerId }, + 'error reading from container stream' + ) + ) - const MAX_OUTPUT = 1024 * 1024; // limit output to 1MB - const createStringOutputStream = function(name) { - return { - data: "", - overflowed: false, - write(data) { - if (this.overflowed) { return; } - if (this.data.length < MAX_OUTPUT) { - return this.data += data; - } else { - logger.error({container_id: containerId, length: this.data.length, maxLen: MAX_OUTPUT}, `${name} exceeds max size`); - this.data += `(...truncated at ${MAX_OUTPUT} chars...)`; - return this.overflowed = true; - } - } - // kill container if too much output - // docker.containers.kill(containerId, () ->) - }; - }; + return stream.on('end', () => + attachStreamHandler(null, { stdout: stdout.data, stderr: stderr.data }) + ) + }) + }, - const stdout = createStringOutputStream("stdout"); - const stderr = createStringOutputStream("stderr"); + waitForContainer(containerId, timeout, _callback) { + if (_callback == null) { + _callback = function(error, exitCode) {} + } + const callback = function(...args) { + _callback(...Array.from(args || [])) + // Only call the callback once + return (_callback = function() {}) + } - container.modem.demuxStream(stream, stdout, stderr); + const container = dockerode.getContainer(containerId) - stream.on("error", err => logger.error({err, container_id: containerId}, "error reading from container stream")); + let timedOut = false + const timeoutId = setTimeout(function() { + timedOut = true + logger.log( + { container_id: containerId }, + 'timeout reached, killing container' + ) + return container.kill(function() {}) + }, timeout) - return stream.on("end", () => attachStreamHandler(null, {stdout: stdout.data, stderr: stderr.data})); - }); - }, + logger.log({ container_id: containerId }, 'waiting for docker container') + return container.wait(function(error, res) { + if (error != null) { + clearTimeout(timeoutId) + logger.error( + { err: error, container_id: containerId }, + 'error waiting for container' + ) + return callback(error) + } + if (timedOut) { + logger.log({ containerId }, 'docker container timed out') + error = DockerRunner.ERR_TIMED_OUT + error.timedout = true + return callback(error) + } else { + clearTimeout(timeoutId) + logger.log( + { container_id: containerId, exitCode: res.StatusCode }, + 'docker container returned' + ) + return callback(null, res.StatusCode) + } + }) + }, - waitForContainer(containerId, timeout, _callback) { - if (_callback == null) { _callback = function(error, exitCode) {}; } - const callback = function(...args) { - _callback(...Array.from(args || [])); - // Only call the callback once - return _callback = function() {}; - }; + destroyContainer(containerName, containerId, shouldForce, callback) { + // We want the containerName for the lock and, ideally, the + // containerId to delete. There is a bug in the docker.io module + // where if you delete by name and there is an error, it throws an + // async exception, but if you delete by id it just does a normal + // error callback. We fall back to deleting by name if no id is + // supplied. + if (callback == null) { + callback = function(error) {} + } + return LockManager.runWithLock( + containerName, + releaseLock => + DockerRunner._destroyContainer( + containerId || containerName, + shouldForce, + releaseLock + ), + callback + ) + }, - const container = dockerode.getContainer(containerId); + _destroyContainer(containerId, shouldForce, callback) { + if (callback == null) { + callback = function(error) {} + } + logger.log({ container_id: containerId }, 'destroying docker container') + const container = dockerode.getContainer(containerId) + return container.remove({ force: shouldForce === true }, function(error) { + if ( + error != null && + (error != null ? error.statusCode : undefined) === 404 + ) { + logger.warn( + { err: error, container_id: containerId }, + 'container not found, continuing' + ) + error = null + } + if (error != null) { + logger.error( + { err: error, container_id: containerId }, + 'error destroying container' + ) + } else { + logger.log({ container_id: containerId }, 'destroyed container') + } + return callback(error) + }) + }, - let timedOut = false; - const timeoutId = setTimeout(function() { - timedOut = true; - logger.log({container_id: containerId}, "timeout reached, killing container"); - return container.kill(function() {}); - } - , timeout); + // handle expiry of docker containers - logger.log({container_id: containerId}, "waiting for docker container"); - return container.wait(function(error, res) { - if (error != null) { - clearTimeout(timeoutId); - logger.error({err: error, container_id: containerId}, "error waiting for container"); - return callback(error); - } - if (timedOut) { - logger.log({containerId}, "docker container timed out"); - error = DockerRunner.ERR_TIMED_OUT; - error.timedout = true; - return callback(error); - } else { - clearTimeout(timeoutId); - logger.log({container_id: containerId, exitCode: res.StatusCode}, "docker container returned"); - return callback(null, res.StatusCode); - } - }); - }, + MAX_CONTAINER_AGE: + Settings.clsi.docker.maxContainerAge || (oneHour = 60 * 60 * 1000), - destroyContainer(containerName, containerId, shouldForce, callback) { - // We want the containerName for the lock and, ideally, the - // containerId to delete. There is a bug in the docker.io module - // where if you delete by name and there is an error, it throws an - // async exception, but if you delete by id it just does a normal - // error callback. We fall back to deleting by name if no id is - // supplied. - if (callback == null) { callback = function(error) {}; } - return LockManager.runWithLock(containerName, releaseLock => DockerRunner._destroyContainer(containerId || containerName, shouldForce, releaseLock) - , callback); - }, + examineOldContainer(container, callback) { + if (callback == null) { + callback = function(error, name, id, ttl) {} + } + const name = + container.Name || + (container.Names != null ? container.Names[0] : undefined) + const created = container.Created * 1000 // creation time is returned in seconds + const now = Date.now() + const age = now - created + const maxAge = DockerRunner.MAX_CONTAINER_AGE + const ttl = maxAge - age + logger.log( + { containerName: name, created, now, age, maxAge, ttl }, + 'checking whether to destroy container' + ) + return callback(null, name, container.Id, ttl) + }, - _destroyContainer(containerId, shouldForce, callback) { - if (callback == null) { callback = function(error) {}; } - logger.log({container_id: containerId}, "destroying docker container"); - const container = dockerode.getContainer(containerId); - return container.remove({force: shouldForce === true}, function(error) { - if ((error != null) && ((error != null ? error.statusCode : undefined) === 404)) { - logger.warn({err: error, container_id: containerId}, "container not found, continuing"); - error = null; - } - if (error != null) { - logger.error({err: error, container_id: containerId}, "error destroying container"); - } else { - logger.log({container_id: containerId}, "destroyed container"); - } - return callback(error); - }); - }, + destroyOldContainers(callback) { + if (callback == null) { + callback = function(error) {} + } + return dockerode.listContainers({ all: true }, function(error, containers) { + if (error != null) { + return callback(error) + } + const jobs = [] + for (const container of Array.from(containers || [])) { + ;(container => + DockerRunner.examineOldContainer(container, function( + err, + name, + id, + ttl + ) { + if (name.slice(0, 9) === '/project-' && ttl <= 0) { + return jobs.push(cb => + DockerRunner.destroyContainer(name, id, false, () => cb()) + ) + } + }))(container) + } + // Ignore errors because some containers get stuck but + // will be destroyed next time + return async.series(jobs, callback) + }) + }, - // handle expiry of docker containers + startContainerMonitor() { + logger.log( + { maxAge: DockerRunner.MAX_CONTAINER_AGE }, + 'starting container expiry' + ) + // randomise the start time + const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000) + return setTimeout( + () => + setInterval( + () => DockerRunner.destroyOldContainers(), + (oneHour = 60 * 60 * 1000) + ), - MAX_CONTAINER_AGE: Settings.clsi.docker.maxContainerAge || (oneHour = 60 * 60 * 1000), + randomDelay + ) + } +} - examineOldContainer(container, callback) { - if (callback == null) { callback = function(error, name, id, ttl){}; } - const name = container.Name || (container.Names != null ? container.Names[0] : undefined); - const created = container.Created * 1000; // creation time is returned in seconds - const now = Date.now(); - const age = now - created; - const maxAge = DockerRunner.MAX_CONTAINER_AGE; - const ttl = maxAge - age; - logger.log({containerName: name, created, now, age, maxAge, ttl}, "checking whether to destroy container"); - return callback(null, name, container.Id, ttl); - }, - - destroyOldContainers(callback) { - if (callback == null) { callback = function(error) {}; } - return dockerode.listContainers({all: true}, function(error, containers) { - if (error != null) { return callback(error); } - const jobs = []; - for (const container of Array.from(containers || [])) { - (container => - DockerRunner.examineOldContainer(container, function(err, name, id, ttl) { - if ((name.slice(0, 9) === '/project-') && (ttl <= 0)) { - return jobs.push(cb => DockerRunner.destroyContainer(name, id, false, () => cb())); - } - }) - )(container); - } - // Ignore errors because some containers get stuck but - // will be destroyed next time - return async.series(jobs, callback); - }); - }, - - startContainerMonitor() { - logger.log({maxAge: DockerRunner.MAX_CONTAINER_AGE}, "starting container expiry"); - // randomise the start time - const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000); - return setTimeout(() => - setInterval(() => DockerRunner.destroyOldContainers() - , (oneHour = 60 * 60 * 1000)) - - , randomDelay); - } -}); - -DockerRunner.startContainerMonitor(); +DockerRunner.startContainerMonitor() function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined } function __guardMethod__(obj, methodName, transform) { - if (typeof obj !== 'undefined' && obj !== null && typeof obj[methodName] === 'function') { - return transform(obj, methodName); + if ( + typeof obj !== 'undefined' && + obj !== null && + typeof obj[methodName] === 'function' + ) { + return transform(obj, methodName) } else { - return undefined; + return undefined } -} \ No newline at end of file +} diff --git a/app/js/DraftModeManager.js b/app/js/DraftModeManager.js index 79f39ab..c8f59aa 100644 --- a/app/js/DraftModeManager.js +++ b/app/js/DraftModeManager.js @@ -11,34 +11,47 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let DraftModeManager; -const fs = require("fs"); -const logger = require("logger-sharelatex"); +let DraftModeManager +const fs = require('fs') +const logger = require('logger-sharelatex') -module.exports = (DraftModeManager = { - injectDraftMode(filename, callback) { - if (callback == null) { callback = function(error) {}; } - return fs.readFile(filename, "utf8", function(error, content) { - if (error != null) { return callback(error); } - // avoid adding draft mode more than once - if ((content != null ? content.indexOf("\\documentclass\[draft") : undefined) >= 0) { - return callback(); - } - const modified_content = DraftModeManager._injectDraftOption(content); - logger.log({ - content: content.slice(0,1024), // \documentclass is normally v near the top - modified_content: modified_content.slice(0,1024), - filename - }, "injected draft class"); - return fs.writeFile(filename, modified_content, callback); - }); - }, - - _injectDraftOption(content) { - return content - // With existing options (must be first, otherwise both are applied) - .replace(/\\documentclass\[/g, "\\documentclass[draft,") - // Without existing options - .replace(/\\documentclass\{/g, "\\documentclass[draft]{"); - } -}); +module.exports = DraftModeManager = { + injectDraftMode(filename, callback) { + if (callback == null) { + callback = function(error) {} + } + return fs.readFile(filename, 'utf8', function(error, content) { + if (error != null) { + return callback(error) + } + // avoid adding draft mode more than once + if ( + (content != null + ? content.indexOf('\\documentclass[draft') + : undefined) >= 0 + ) { + return callback() + } + const modified_content = DraftModeManager._injectDraftOption(content) + logger.log( + { + content: content.slice(0, 1024), // \documentclass is normally v near the top + modified_content: modified_content.slice(0, 1024), + filename + }, + 'injected draft class' + ) + return fs.writeFile(filename, modified_content, callback) + }) + }, + + _injectDraftOption(content) { + return ( + content + // With existing options (must be first, otherwise both are applied) + .replace(/\\documentclass\[/g, '\\documentclass[draft,') + // Without existing options + .replace(/\\documentclass\{/g, '\\documentclass[draft]{') + ) + } +} diff --git a/app/js/Errors.js b/app/js/Errors.js index e7ace2c..d3a5f5a 100644 --- a/app/js/Errors.js +++ b/app/js/Errors.js @@ -4,33 +4,33 @@ */ // TODO: This file was created by bulk-decaffeinate. // Fix any style issues and re-enable lint. -let Errors; +let Errors var NotFoundError = function(message) { - const error = new Error(message); - error.name = "NotFoundError"; - error.__proto__ = NotFoundError.prototype; - return error; -}; -NotFoundError.prototype.__proto__ = Error.prototype; + const error = new Error(message) + error.name = 'NotFoundError' + error.__proto__ = NotFoundError.prototype + return error +} +NotFoundError.prototype.__proto__ = Error.prototype var FilesOutOfSyncError = function(message) { - const error = new Error(message); - error.name = "FilesOutOfSyncError"; - error.__proto__ = FilesOutOfSyncError.prototype; - return error; -}; -FilesOutOfSyncError.prototype.__proto__ = Error.prototype; + const error = new Error(message) + error.name = 'FilesOutOfSyncError' + error.__proto__ = FilesOutOfSyncError.prototype + return error +} +FilesOutOfSyncError.prototype.__proto__ = Error.prototype var AlreadyCompilingError = function(message) { - const error = new Error(message); - error.name = "AlreadyCompilingError"; - error.__proto__ = AlreadyCompilingError.prototype; - return error; -}; -AlreadyCompilingError.prototype.__proto__ = Error.prototype; + const error = new Error(message) + error.name = 'AlreadyCompilingError' + error.__proto__ = AlreadyCompilingError.prototype + return error +} +AlreadyCompilingError.prototype.__proto__ = Error.prototype -module.exports = (Errors = { - NotFoundError, - FilesOutOfSyncError, - AlreadyCompilingError -}); +module.exports = Errors = { + NotFoundError, + FilesOutOfSyncError, + AlreadyCompilingError +} diff --git a/app/js/LatexRunner.js b/app/js/LatexRunner.js index e569df8..972f1fe 100644 --- a/app/js/LatexRunner.js +++ b/app/js/LatexRunner.js @@ -13,119 +13,192 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let LatexRunner; -const Path = require("path"); -const Settings = require("settings-sharelatex"); -const logger = require("logger-sharelatex"); -const Metrics = require("./Metrics"); -const CommandRunner = require("./CommandRunner"); +let LatexRunner +const Path = require('path') +const Settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') +const Metrics = require('./Metrics') +const CommandRunner = require('./CommandRunner') -const ProcessTable = {}; // table of currently running jobs (pids or docker container names) +const ProcessTable = {} // table of currently running jobs (pids or docker container names) -module.exports = (LatexRunner = { - runLatex(project_id, options, callback) { - let command; - if (callback == null) { callback = function(error) {}; } - let {directory, mainFile, compiler, timeout, image, environment, flags} = options; - if (!compiler) { compiler = "pdflatex"; } - if (!timeout) { timeout = 60000; } // milliseconds +module.exports = LatexRunner = { + runLatex(project_id, options, callback) { + let command + if (callback == null) { + callback = function(error) {} + } + let { + directory, + mainFile, + compiler, + timeout, + image, + environment, + flags + } = options + if (!compiler) { + compiler = 'pdflatex' + } + if (!timeout) { + timeout = 60000 + } // milliseconds - logger.log({directory, compiler, timeout, mainFile, environment, flags}, "starting compile"); + logger.log( + { directory, compiler, timeout, mainFile, environment, flags }, + 'starting compile' + ) - // We want to run latexmk on the tex file which we will automatically - // generate from the Rtex/Rmd/md file. - mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".tex"); + // We want to run latexmk on the tex file which we will automatically + // generate from the Rtex/Rmd/md file. + mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, '.tex') - if (compiler === "pdflatex") { - command = LatexRunner._pdflatexCommand(mainFile, flags); - } else if (compiler === "latex") { - command = LatexRunner._latexCommand(mainFile, flags); - } else if (compiler === "xelatex") { - command = LatexRunner._xelatexCommand(mainFile, flags); - } else if (compiler === "lualatex") { - command = LatexRunner._lualatexCommand(mainFile, flags); - } else { - return callback(new Error(`unknown compiler: ${compiler}`)); - } + if (compiler === 'pdflatex') { + command = LatexRunner._pdflatexCommand(mainFile, flags) + } else if (compiler === 'latex') { + command = LatexRunner._latexCommand(mainFile, flags) + } else if (compiler === 'xelatex') { + command = LatexRunner._xelatexCommand(mainFile, flags) + } else if (compiler === 'lualatex') { + command = LatexRunner._lualatexCommand(mainFile, flags) + } else { + return callback(new Error(`unknown compiler: ${compiler}`)) + } - if (Settings.clsi != null ? Settings.clsi.strace : undefined) { - command = ["strace", "-o", "strace", "-ff"].concat(command); - } + if (Settings.clsi != null ? Settings.clsi.strace : undefined) { + command = ['strace', '-o', 'strace', '-ff'].concat(command) + } - const id = `${project_id}`; // record running project under this id + const id = `${project_id}` // record running project under this id - return ProcessTable[id] = CommandRunner.run(project_id, command, directory, image, timeout, environment, function(error, output) { - delete ProcessTable[id]; - if (error != null) { return callback(error); } - const runs = __guard__(__guard__(output != null ? output.stderr : undefined, x1 => x1.match(/^Run number \d+ of .*latex/mg)), x => x.length) || 0; - const failed = (__guard__(output != null ? output.stdout : undefined, x2 => x2.match(/^Latexmk: Errors/m)) != null) ? 1 : 0; - // counters from latexmk output - const stats = {}; - stats["latexmk-errors"] = failed; - stats["latex-runs"] = runs; - stats["latex-runs-with-errors"] = failed ? runs : 0; - stats[`latex-runs-${runs}`] = 1; - stats[`latex-runs-with-errors-${runs}`] = failed ? 1 : 0; - // timing information from /usr/bin/time - const timings = {}; - const stderr = output != null ? output.stderr : undefined; - timings["cpu-percent"] = __guard__(stderr != null ? stderr.match(/Percent of CPU this job got: (\d+)/m) : undefined, x3 => x3[1]) || 0; - timings["cpu-time"] = __guard__(stderr != null ? stderr.match(/User time.*: (\d+.\d+)/m) : undefined, x4 => x4[1]) || 0; - timings["sys-time"] = __guard__(stderr != null ? stderr.match(/System time.*: (\d+.\d+)/m) : undefined, x5 => x5[1]) || 0; - return callback(error, output, stats, timings); - }); - }, + return (ProcessTable[id] = CommandRunner.run( + project_id, + command, + directory, + image, + timeout, + environment, + function(error, output) { + delete ProcessTable[id] + if (error != null) { + return callback(error) + } + const runs = + __guard__( + __guard__(output != null ? output.stderr : undefined, x1 => + x1.match(/^Run number \d+ of .*latex/gm) + ), + x => x.length + ) || 0 + const failed = + __guard__(output != null ? output.stdout : undefined, x2 => + x2.match(/^Latexmk: Errors/m) + ) != null + ? 1 + : 0 + // counters from latexmk output + const stats = {} + stats['latexmk-errors'] = failed + stats['latex-runs'] = runs + stats['latex-runs-with-errors'] = failed ? runs : 0 + stats[`latex-runs-${runs}`] = 1 + stats[`latex-runs-with-errors-${runs}`] = failed ? 1 : 0 + // timing information from /usr/bin/time + const timings = {} + const stderr = output != null ? output.stderr : undefined + timings['cpu-percent'] = + __guard__( + stderr != null + ? stderr.match(/Percent of CPU this job got: (\d+)/m) + : undefined, + x3 => x3[1] + ) || 0 + timings['cpu-time'] = + __guard__( + stderr != null + ? stderr.match(/User time.*: (\d+.\d+)/m) + : undefined, + x4 => x4[1] + ) || 0 + timings['sys-time'] = + __guard__( + stderr != null + ? stderr.match(/System time.*: (\d+.\d+)/m) + : undefined, + x5 => x5[1] + ) || 0 + return callback(error, output, stats, timings) + } + )) + }, - killLatex(project_id, callback) { - if (callback == null) { callback = function(error) {}; } - const id = `${project_id}`; - logger.log({id}, "killing running compile"); - if ((ProcessTable[id] == null)) { - logger.warn({id}, "no such project to kill"); - return callback(null); - } else { - return CommandRunner.kill(ProcessTable[id], callback); - } - }, + killLatex(project_id, callback) { + if (callback == null) { + callback = function(error) {} + } + const id = `${project_id}` + logger.log({ id }, 'killing running compile') + if (ProcessTable[id] == null) { + logger.warn({ id }, 'no such project to kill') + return callback(null) + } else { + return CommandRunner.kill(ProcessTable[id], callback) + } + }, - _latexmkBaseCommand(flags) { - let args = ["latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR", "-synctex=1","-interaction=batchmode"]; - if (flags) { - args = args.concat(flags); - } - return (__guard__(Settings != null ? Settings.clsi : undefined, x => x.latexmkCommandPrefix) || []).concat(args); - }, + _latexmkBaseCommand(flags) { + let args = [ + 'latexmk', + '-cd', + '-f', + '-jobname=output', + '-auxdir=$COMPILE_DIR', + '-outdir=$COMPILE_DIR', + '-synctex=1', + '-interaction=batchmode' + ] + if (flags) { + args = args.concat(flags) + } + return ( + __guard__( + Settings != null ? Settings.clsi : undefined, + x => x.latexmkCommandPrefix + ) || [] + ).concat(args) + }, - _pdflatexCommand(mainFile, flags) { - return LatexRunner._latexmkBaseCommand(flags).concat([ - "-pdf", - Path.join("$COMPILE_DIR", mainFile) - ]); - }, + _pdflatexCommand(mainFile, flags) { + return LatexRunner._latexmkBaseCommand(flags).concat([ + '-pdf', + Path.join('$COMPILE_DIR', mainFile) + ]) + }, - _latexCommand(mainFile, flags) { - return LatexRunner._latexmkBaseCommand(flags).concat([ - "-pdfdvi", - Path.join("$COMPILE_DIR", mainFile) - ]); - }, + _latexCommand(mainFile, flags) { + return LatexRunner._latexmkBaseCommand(flags).concat([ + '-pdfdvi', + Path.join('$COMPILE_DIR', mainFile) + ]) + }, - _xelatexCommand(mainFile, flags) { - return LatexRunner._latexmkBaseCommand(flags).concat([ - "-xelatex", - Path.join("$COMPILE_DIR", mainFile) - ]); - }, - - _lualatexCommand(mainFile, flags) { - return LatexRunner._latexmkBaseCommand(flags).concat([ - "-lualatex", - Path.join("$COMPILE_DIR", mainFile) - ]); - } -}); + _xelatexCommand(mainFile, flags) { + return LatexRunner._latexmkBaseCommand(flags).concat([ + '-xelatex', + Path.join('$COMPILE_DIR', mainFile) + ]) + }, + _lualatexCommand(mainFile, flags) { + return LatexRunner._latexmkBaseCommand(flags).concat([ + '-lualatex', + Path.join('$COMPILE_DIR', mainFile) + ]) + } +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/app/js/LocalCommandRunner.js b/app/js/LocalCommandRunner.js index 24c0d8e..61ecd88 100644 --- a/app/js/LocalCommandRunner.js +++ b/app/js/LocalCommandRunner.js @@ -13,62 +13,79 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let CommandRunner; -const { spawn } = require("child_process"); -const logger = require("logger-sharelatex"); +let CommandRunner +const { spawn } = require('child_process') +const logger = require('logger-sharelatex') -logger.info("using standard command runner"); +logger.info('using standard command runner') -module.exports = (CommandRunner = { - run(project_id, command, directory, image, timeout, environment, callback) { - let key, value; - if (callback == null) { callback = function(error) {}; } - command = (Array.from(command).map((arg) => arg.toString().replace('$COMPILE_DIR', directory))); - logger.log({project_id, command, directory}, "running command"); - logger.warn("timeouts and sandboxing are not enabled with CommandRunner"); +module.exports = CommandRunner = { + run(project_id, command, directory, image, timeout, environment, callback) { + let key, value + if (callback == null) { + callback = function(error) {} + } + command = Array.from(command).map(arg => + arg.toString().replace('$COMPILE_DIR', directory) + ) + logger.log({ project_id, command, directory }, 'running command') + logger.warn('timeouts and sandboxing are not enabled with CommandRunner') - // merge environment settings - const env = {}; - for (key in process.env) { value = process.env[key]; env[key] = value; } - for (key in environment) { value = environment[key]; env[key] = value; } + // merge environment settings + const env = {} + for (key in process.env) { + value = process.env[key] + env[key] = value + } + for (key in environment) { + value = environment[key] + env[key] = value + } - // run command as detached process so it has its own process group (which can be killed if needed) - const proc = spawn(command[0], command.slice(1), {cwd: directory, env}); + // run command as detached process so it has its own process group (which can be killed if needed) + const proc = spawn(command[0], command.slice(1), { cwd: directory, env }) - let stdout = ""; - proc.stdout.on("data", data=> stdout += data); + let stdout = '' + proc.stdout.on('data', data => (stdout += data)) - proc.on("error", function(err){ - logger.err({err, project_id, command, directory}, "error running command"); - return callback(err); - }); + proc.on('error', function(err) { + logger.err( + { err, project_id, command, directory }, + 'error running command' + ) + return callback(err) + }) - proc.on("close", function(code, signal) { - let err; - logger.info({code, signal, project_id}, "command exited"); - if (signal === 'SIGTERM') { // signal from kill method below - err = new Error("terminated"); - err.terminated = true; - return callback(err); - } else if (code === 1) { // exit status from chktex - err = new Error("exited"); - err.code = code; - return callback(err); - } else { - return callback(null, {"stdout": stdout}); - } - }); + proc.on('close', function(code, signal) { + let err + logger.info({ code, signal, project_id }, 'command exited') + if (signal === 'SIGTERM') { + // signal from kill method below + err = new Error('terminated') + err.terminated = true + return callback(err) + } else if (code === 1) { + // exit status from chktex + err = new Error('exited') + err.code = code + return callback(err) + } else { + return callback(null, { stdout: stdout }) + } + }) - return proc.pid; - }, // return process id to allow job to be killed if necessary + return proc.pid + }, // return process id to allow job to be killed if necessary - kill(pid, callback) { - if (callback == null) { callback = function(error) {}; } - try { - process.kill(-pid); // kill all processes in group - } catch (err) { - return callback(err); - } - return callback(); - } -}); + kill(pid, callback) { + if (callback == null) { + callback = function(error) {} + } + try { + process.kill(-pid) // kill all processes in group + } catch (err) { + return callback(err) + } + return callback() + } +} diff --git a/app/js/LockManager.js b/app/js/LockManager.js index 8930fab..2da7da1 100644 --- a/app/js/LockManager.js +++ b/app/js/LockManager.js @@ -11,46 +11,62 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let LockManager; -const Settings = require('settings-sharelatex'); -const logger = require("logger-sharelatex"); -const Lockfile = require('lockfile'); // from https://github.com/npm/lockfile -const Errors = require("./Errors"); -const fs = require("fs"); -const Path = require("path"); -module.exports = (LockManager = { - LOCK_TEST_INTERVAL: 1000, // 50ms between each test of the lock - MAX_LOCK_WAIT_TIME: 15000, // 10s maximum time to spend trying to get the lock - LOCK_STALE: 5*60*1000, // 5 mins time until lock auto expires +let LockManager +const Settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') +const Lockfile = require('lockfile') // from https://github.com/npm/lockfile +const Errors = require('./Errors') +const fs = require('fs') +const Path = require('path') +module.exports = LockManager = { + LOCK_TEST_INTERVAL: 1000, // 50ms between each test of the lock + MAX_LOCK_WAIT_TIME: 15000, // 10s maximum time to spend trying to get the lock + LOCK_STALE: 5 * 60 * 1000, // 5 mins time until lock auto expires - runWithLock(path, runner, callback) { - if (callback == null) { callback = function(error) {}; } - const lockOpts = { - wait: this.MAX_LOCK_WAIT_TIME, - pollPeriod: this.LOCK_TEST_INTERVAL, - stale: this.LOCK_STALE - }; - return Lockfile.lock(path, lockOpts, function(error) { - if ((error != null ? error.code : undefined) === 'EEXIST') { - return callback(new Errors.AlreadyCompilingError("compile in progress")); - } else if (error != null) { - return fs.lstat(path, (statLockErr, statLock)=> - fs.lstat(Path.dirname(path), (statDirErr, statDir)=> - fs.readdir(Path.dirname(path), function(readdirErr, readdirDir){ - logger.err({error, path, statLock, statLockErr, statDir, statDirErr, readdirErr, readdirDir}, "unable to get lock"); - return callback(error); - }) - ) - ); - } else { - return runner((error1, ...args) => - Lockfile.unlock(path, function(error2) { - error = error1 || error2; - if (error != null) { return callback(error); } - return callback(null, ...Array.from(args)); - }) - ); - } - }); - } -}); + runWithLock(path, runner, callback) { + if (callback == null) { + callback = function(error) {} + } + const lockOpts = { + wait: this.MAX_LOCK_WAIT_TIME, + pollPeriod: this.LOCK_TEST_INTERVAL, + stale: this.LOCK_STALE + } + return Lockfile.lock(path, lockOpts, function(error) { + if ((error != null ? error.code : undefined) === 'EEXIST') { + return callback(new Errors.AlreadyCompilingError('compile in progress')) + } else if (error != null) { + return fs.lstat(path, (statLockErr, statLock) => + fs.lstat(Path.dirname(path), (statDirErr, statDir) => + fs.readdir(Path.dirname(path), function(readdirErr, readdirDir) { + logger.err( + { + error, + path, + statLock, + statLockErr, + statDir, + statDirErr, + readdirErr, + readdirDir + }, + 'unable to get lock' + ) + return callback(error) + }) + ) + ) + } else { + return runner((error1, ...args) => + Lockfile.unlock(path, function(error2) { + error = error1 || error2 + if (error != null) { + return callback(error) + } + return callback(null, ...Array.from(args)) + }) + ) + } + }) + } +} diff --git a/app/js/Metrics.js b/app/js/Metrics.js index 94623da..e967641 100644 --- a/app/js/Metrics.js +++ b/app/js/Metrics.js @@ -1,4 +1,3 @@ // TODO: This file was created by bulk-decaffeinate. // Sanity-check the conversion and remove this comment. -module.exports = require("metrics-sharelatex"); - +module.exports = require('metrics-sharelatex') diff --git a/app/js/OutputCacheManager.js b/app/js/OutputCacheManager.js index b1bda0e..c2c962f 100644 --- a/app/js/OutputCacheManager.js +++ b/app/js/OutputCacheManager.js @@ -13,263 +13,387 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let OutputCacheManager; -const async = require("async"); -const fs = require("fs"); -const fse = require("fs-extra"); -const Path = require("path"); -const logger = require("logger-sharelatex"); -const _ = require("underscore"); -const Settings = require("settings-sharelatex"); -const crypto = require("crypto"); +let OutputCacheManager +const async = require('async') +const fs = require('fs') +const fse = require('fs-extra') +const Path = require('path') +const logger = require('logger-sharelatex') +const _ = require('underscore') +const Settings = require('settings-sharelatex') +const crypto = require('crypto') -const OutputFileOptimiser = require("./OutputFileOptimiser"); +const OutputFileOptimiser = require('./OutputFileOptimiser') -module.exports = (OutputCacheManager = { - CACHE_SUBDIR: '.cache/clsi', - ARCHIVE_SUBDIR: '.archive/clsi', - // build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes - // for backwards compatibility, make the randombytes part optional - BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/, - CACHE_LIMIT: 2, // maximum number of cache directories - CACHE_AGE: 60*60*1000, // up to one hour old +module.exports = OutputCacheManager = { + CACHE_SUBDIR: '.cache/clsi', + ARCHIVE_SUBDIR: '.archive/clsi', + // build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes + // for backwards compatibility, make the randombytes part optional + BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/, + CACHE_LIMIT: 2, // maximum number of cache directories + CACHE_AGE: 60 * 60 * 1000, // up to one hour old - path(buildId, file) { - // used by static server, given build id return '.cache/clsi/buildId' - if (buildId.match(OutputCacheManager.BUILD_REGEX)) { - return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file); - } else { - // for invalid build id, return top level - return file; - } - }, + path(buildId, file) { + // used by static server, given build id return '.cache/clsi/buildId' + if (buildId.match(OutputCacheManager.BUILD_REGEX)) { + return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file) + } else { + // for invalid build id, return top level + return file + } + }, - generateBuildId(callback) { - // generate a secure build id from Date.now() and 8 random bytes in hex - if (callback == null) { callback = function(error, buildId) {}; } - return crypto.randomBytes(8, function(err, buf) { - if (err != null) { return callback(err); } - const random = buf.toString('hex'); - const date = Date.now().toString(16); - return callback(err, `${date}-${random}`); - }); - }, + generateBuildId(callback) { + // generate a secure build id from Date.now() and 8 random bytes in hex + if (callback == null) { + callback = function(error, buildId) {} + } + return crypto.randomBytes(8, function(err, buf) { + if (err != null) { + return callback(err) + } + const random = buf.toString('hex') + const date = Date.now().toString(16) + return callback(err, `${date}-${random}`) + }) + }, - saveOutputFiles(outputFiles, compileDir, callback) { - if (callback == null) { callback = function(error) {}; } - return OutputCacheManager.generateBuildId(function(err, buildId) { - if (err != null) { return callback(err); } - return OutputCacheManager.saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback); - }); - }, + saveOutputFiles(outputFiles, compileDir, callback) { + if (callback == null) { + callback = function(error) {} + } + return OutputCacheManager.generateBuildId(function(err, buildId) { + if (err != null) { + return callback(err) + } + return OutputCacheManager.saveOutputFilesInBuildDir( + outputFiles, + compileDir, + buildId, + callback + ) + }) + }, - saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback) { - // make a compileDir/CACHE_SUBDIR/build_id directory and - // copy all the output files into it - if (callback == null) { callback = function(error) {}; } - const cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR); - // Put the files into a new cache subdirectory - const cacheDir = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR, buildId); - // Is it a per-user compile? check if compile directory is PROJECTID-USERID - const perUser = Path.basename(compileDir).match(/^[0-9a-f]{24}-[0-9a-f]{24}$/); + saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback) { + // make a compileDir/CACHE_SUBDIR/build_id directory and + // copy all the output files into it + if (callback == null) { + callback = function(error) {} + } + const cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR) + // Put the files into a new cache subdirectory + const cacheDir = Path.join( + compileDir, + OutputCacheManager.CACHE_SUBDIR, + buildId + ) + // Is it a per-user compile? check if compile directory is PROJECTID-USERID + const perUser = Path.basename(compileDir).match( + /^[0-9a-f]{24}-[0-9a-f]{24}$/ + ) - // Archive logs in background - if ((Settings.clsi != null ? Settings.clsi.archive_logs : undefined) || (Settings.clsi != null ? Settings.clsi.strace : undefined)) { - OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function(err) { - if (err != null) { - return logger.warn({err}, "erroring archiving log files"); - } - }); - } + // Archive logs in background + if ( + (Settings.clsi != null ? Settings.clsi.archive_logs : undefined) || + (Settings.clsi != null ? Settings.clsi.strace : undefined) + ) { + OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function( + err + ) { + if (err != null) { + return logger.warn({ err }, 'erroring archiving log files') + } + }) + } - // make the new cache directory - return fse.ensureDir(cacheDir, function(err) { - if (err != null) { - logger.error({err, directory: cacheDir}, "error creating cache directory"); - return callback(err, outputFiles); - } else { - // copy all the output files into the new cache directory - const results = []; - return async.mapSeries(outputFiles, function(file, cb) { - // don't send dot files as output, express doesn't serve them - if (OutputCacheManager._fileIsHidden(file.path)) { - logger.debug({compileDir, path: file.path}, "ignoring dotfile in output"); - return cb(); - } - // copy other files into cache directory if valid - const newFile = _.clone(file); - const [src, dst] = Array.from([Path.join(compileDir, file.path), Path.join(cacheDir, file.path)]); - return OutputCacheManager._checkFileIsSafe(src, function(err, isSafe) { - if (err != null) { return cb(err); } - if (!isSafe) { - return cb(); - } - return OutputCacheManager._checkIfShouldCopy(src, function(err, shouldCopy) { - if (err != null) { return cb(err); } - if (!shouldCopy) { - return cb(); - } - return OutputCacheManager._copyFile(src, dst, function(err) { - if (err != null) { return cb(err); } - newFile.build = buildId; // attach a build id if we cached the file - results.push(newFile); - return cb(); - }); - }); - }); - } - , function(err) { - if (err != null) { - // pass back the original files if we encountered *any* error - callback(err, outputFiles); - // clean up the directory we just created - return fse.remove(cacheDir, function(err) { - if (err != null) { - return logger.error({err, dir: cacheDir}, "error removing cache dir after failure"); - } - }); - } else { - // pass back the list of new files in the cache - callback(err, results); - // let file expiry run in the background, expire all previous files if per-user - return OutputCacheManager.expireOutputFiles(cacheRoot, {keep: buildId, limit: perUser ? 1 : null}); - } - }); - } - }); - }, + // make the new cache directory + return fse.ensureDir(cacheDir, function(err) { + if (err != null) { + logger.error( + { err, directory: cacheDir }, + 'error creating cache directory' + ) + return callback(err, outputFiles) + } else { + // copy all the output files into the new cache directory + const results = [] + return async.mapSeries( + outputFiles, + function(file, cb) { + // don't send dot files as output, express doesn't serve them + if (OutputCacheManager._fileIsHidden(file.path)) { + logger.debug( + { compileDir, path: file.path }, + 'ignoring dotfile in output' + ) + return cb() + } + // copy other files into cache directory if valid + const newFile = _.clone(file) + const [src, dst] = Array.from([ + Path.join(compileDir, file.path), + Path.join(cacheDir, file.path) + ]) + return OutputCacheManager._checkFileIsSafe(src, function( + err, + isSafe + ) { + if (err != null) { + return cb(err) + } + if (!isSafe) { + return cb() + } + return OutputCacheManager._checkIfShouldCopy(src, function( + err, + shouldCopy + ) { + if (err != null) { + return cb(err) + } + if (!shouldCopy) { + return cb() + } + return OutputCacheManager._copyFile(src, dst, function(err) { + if (err != null) { + return cb(err) + } + newFile.build = buildId // attach a build id if we cached the file + results.push(newFile) + return cb() + }) + }) + }) + }, + function(err) { + if (err != null) { + // pass back the original files if we encountered *any* error + callback(err, outputFiles) + // clean up the directory we just created + return fse.remove(cacheDir, function(err) { + if (err != null) { + return logger.error( + { err, dir: cacheDir }, + 'error removing cache dir after failure' + ) + } + }) + } else { + // pass back the list of new files in the cache + callback(err, results) + // let file expiry run in the background, expire all previous files if per-user + return OutputCacheManager.expireOutputFiles(cacheRoot, { + keep: buildId, + limit: perUser ? 1 : null + }) + } + } + ) + } + }) + }, - archiveLogs(outputFiles, compileDir, buildId, callback) { - if (callback == null) { callback = function(error) {}; } - const archiveDir = Path.join(compileDir, OutputCacheManager.ARCHIVE_SUBDIR, buildId); - logger.log({dir: archiveDir}, "archiving log files for project"); - return fse.ensureDir(archiveDir, function(err) { - if (err != null) { return callback(err); } - return async.mapSeries(outputFiles, function(file, cb) { - const [src, dst] = Array.from([Path.join(compileDir, file.path), Path.join(archiveDir, file.path)]); - return OutputCacheManager._checkFileIsSafe(src, function(err, isSafe) { - if (err != null) { return cb(err); } - if (!isSafe) { return cb(); } - return OutputCacheManager._checkIfShouldArchive(src, function(err, shouldArchive) { - if (err != null) { return cb(err); } - if (!shouldArchive) { return cb(); } - return OutputCacheManager._copyFile(src, dst, cb); - }); - }); - } - , callback); - }); - }, + archiveLogs(outputFiles, compileDir, buildId, callback) { + if (callback == null) { + callback = function(error) {} + } + const archiveDir = Path.join( + compileDir, + OutputCacheManager.ARCHIVE_SUBDIR, + buildId + ) + logger.log({ dir: archiveDir }, 'archiving log files for project') + return fse.ensureDir(archiveDir, function(err) { + if (err != null) { + return callback(err) + } + return async.mapSeries( + outputFiles, + function(file, cb) { + const [src, dst] = Array.from([ + Path.join(compileDir, file.path), + Path.join(archiveDir, file.path) + ]) + return OutputCacheManager._checkFileIsSafe(src, function( + err, + isSafe + ) { + if (err != null) { + return cb(err) + } + if (!isSafe) { + return cb() + } + return OutputCacheManager._checkIfShouldArchive(src, function( + err, + shouldArchive + ) { + if (err != null) { + return cb(err) + } + if (!shouldArchive) { + return cb() + } + return OutputCacheManager._copyFile(src, dst, cb) + }) + }) + }, + callback + ) + }) + }, - expireOutputFiles(cacheRoot, options, callback) { - // look in compileDir for build dirs and delete if > N or age of mod time > T - if (callback == null) { callback = function(error) {}; } - return fs.readdir(cacheRoot, function(err, results) { - if (err != null) { - if (err.code === 'ENOENT') { return callback(null); } // cache directory is empty - logger.error({err, project_id: cacheRoot}, "error clearing cache"); - return callback(err); - } + expireOutputFiles(cacheRoot, options, callback) { + // look in compileDir for build dirs and delete if > N or age of mod time > T + if (callback == null) { + callback = function(error) {} + } + return fs.readdir(cacheRoot, function(err, results) { + if (err != null) { + if (err.code === 'ENOENT') { + return callback(null) + } // cache directory is empty + logger.error({ err, project_id: cacheRoot }, 'error clearing cache') + return callback(err) + } - const dirs = results.sort().reverse(); - const currentTime = Date.now(); + const dirs = results.sort().reverse() + const currentTime = Date.now() - const isExpired = function(dir, index) { - if ((options != null ? options.keep : undefined) === dir) { return false; } - // remove any directories over the requested (non-null) limit - if (((options != null ? options.limit : undefined) != null) && (index > options.limit)) { return true; } - // remove any directories over the hard limit - if (index > OutputCacheManager.CACHE_LIMIT) { return true; } - // we can get the build time from the first part of the directory name DDDD-RRRR - // DDDD is date and RRRR is random bytes - const dirTime = parseInt(__guard__(dir.split('-'), x => x[0]), 16); - const age = currentTime - dirTime; - return age > OutputCacheManager.CACHE_AGE; - }; + const isExpired = function(dir, index) { + if ((options != null ? options.keep : undefined) === dir) { + return false + } + // remove any directories over the requested (non-null) limit + if ( + (options != null ? options.limit : undefined) != null && + index > options.limit + ) { + return true + } + // remove any directories over the hard limit + if (index > OutputCacheManager.CACHE_LIMIT) { + return true + } + // we can get the build time from the first part of the directory name DDDD-RRRR + // DDDD is date and RRRR is random bytes + const dirTime = parseInt( + __guard__(dir.split('-'), x => x[0]), + 16 + ) + const age = currentTime - dirTime + return age > OutputCacheManager.CACHE_AGE + } - const toRemove = _.filter(dirs, isExpired); + const toRemove = _.filter(dirs, isExpired) - const removeDir = (dir, cb) => - fse.remove(Path.join(cacheRoot, dir), function(err, result) { - logger.log({cache: cacheRoot, dir}, "removed expired cache dir"); - if (err != null) { - logger.error({err, dir}, "cache remove error"); - } - return cb(err, result); - }) - ; + const removeDir = (dir, cb) => + fse.remove(Path.join(cacheRoot, dir), function(err, result) { + logger.log({ cache: cacheRoot, dir }, 'removed expired cache dir') + if (err != null) { + logger.error({ err, dir }, 'cache remove error') + } + return cb(err, result) + }) + return async.eachSeries( + toRemove, + (dir, cb) => removeDir(dir, cb), + callback + ) + }) + }, - return async.eachSeries(toRemove, (dir, cb) => removeDir(dir, cb) - , callback); - }); - }, + _fileIsHidden(path) { + return (path != null ? path.match(/^\.|\/\./) : undefined) != null + }, - _fileIsHidden(path) { - return ((path != null ? path.match(/^\.|\/\./) : undefined) != null); - }, + _checkFileIsSafe(src, callback) { + // check if we have a valid file to copy into the cache + if (callback == null) { + callback = function(error, isSafe) {} + } + return fs.stat(src, function(err, stats) { + if ((err != null ? err.code : undefined) === 'ENOENT') { + logger.warn( + { err, file: src }, + 'file has disappeared before copying to build cache' + ) + return callback(err, false) + } else if (err != null) { + // some other problem reading the file + logger.error({ err, file: src }, 'stat error for file in cache') + return callback(err, false) + } else if (!stats.isFile()) { + // other filetype - reject it + logger.warn( + { src, stat: stats }, + 'nonfile output - refusing to copy to cache' + ) + return callback(null, false) + } else { + // it's a plain file, ok to copy + return callback(null, true) + } + }) + }, - _checkFileIsSafe(src, callback) { - // check if we have a valid file to copy into the cache - if (callback == null) { callback = function(error, isSafe) {}; } - return fs.stat(src, function(err, stats) { - if ((err != null ? err.code : undefined) === 'ENOENT') { - logger.warn({err, file: src}, "file has disappeared before copying to build cache"); - return callback(err, false); - } else if (err != null) { - // some other problem reading the file - logger.error({err, file: src}, "stat error for file in cache"); - return callback(err, false); - } else if (!stats.isFile()) { - // other filetype - reject it - logger.warn({src, stat: stats}, "nonfile output - refusing to copy to cache"); - return callback(null, false); - } else { - // it's a plain file, ok to copy - return callback(null, true); - } - }); - }, + _copyFile(src, dst, callback) { + // copy output file into the cache + return fse.copy(src, dst, function(err) { + if ((err != null ? err.code : undefined) === 'ENOENT') { + logger.warn( + { err, file: src }, + 'file has disappeared when copying to build cache' + ) + return callback(err, false) + } else if (err != null) { + logger.error({ err, src, dst }, 'copy error for file in cache') + return callback(err) + } else { + if ( + Settings.clsi != null ? Settings.clsi.optimiseInDocker : undefined + ) { + // don't run any optimisations on the pdf when they are done + // in the docker container + return callback() + } else { + // call the optimiser for the file too + return OutputFileOptimiser.optimiseFile(src, dst, callback) + } + } + }) + }, - _copyFile(src, dst, callback) { - // copy output file into the cache - return fse.copy(src, dst, function(err) { - if ((err != null ? err.code : undefined) === 'ENOENT') { - logger.warn({err, file: src}, "file has disappeared when copying to build cache"); - return callback(err, false); - } else if (err != null) { - logger.error({err, src, dst}, "copy error for file in cache"); - return callback(err); - } else { - if ((Settings.clsi != null ? Settings.clsi.optimiseInDocker : undefined)) { - // don't run any optimisations on the pdf when they are done - // in the docker container - return callback(); - } else { - // call the optimiser for the file too - return OutputFileOptimiser.optimiseFile(src, dst, callback); - } - } - }); - }, + _checkIfShouldCopy(src, callback) { + if (callback == null) { + callback = function(err, shouldCopy) {} + } + return callback(null, !Path.basename(src).match(/^strace/)) + }, - _checkIfShouldCopy(src, callback) { - if (callback == null) { callback = function(err, shouldCopy) {}; } - return callback(null, !Path.basename(src).match(/^strace/)); - }, - - _checkIfShouldArchive(src, callback) { - let needle; - if (callback == null) { callback = function(err, shouldCopy) {}; } - if (Path.basename(src).match(/^strace/)) { - return callback(null, true); - } - if ((Settings.clsi != null ? Settings.clsi.archive_logs : undefined) && (needle = Path.basename(src), ["output.log", "output.blg"].includes(needle))) { - return callback(null, true); - } - return callback(null, false); - } -}); + _checkIfShouldArchive(src, callback) { + let needle + if (callback == null) { + callback = function(err, shouldCopy) {} + } + if (Path.basename(src).match(/^strace/)) { + return callback(null, true) + } + if ( + (Settings.clsi != null ? Settings.clsi.archive_logs : undefined) && + ((needle = Path.basename(src)), + ['output.log', 'output.blg'].includes(needle)) + ) { + return callback(null, true) + } + return callback(null, false) + } +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/app/js/OutputFileFinder.js b/app/js/OutputFileFinder.js index 21a7587..50012b5 100644 --- a/app/js/OutputFileFinder.js +++ b/app/js/OutputFileFinder.js @@ -14,73 +14,102 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let OutputFileFinder; -const async = require("async"); -const fs = require("fs"); -const Path = require("path"); -const { spawn } = require("child_process"); -const logger = require("logger-sharelatex"); +let OutputFileFinder +const async = require('async') +const fs = require('fs') +const Path = require('path') +const { spawn } = require('child_process') +const logger = require('logger-sharelatex') -module.exports = (OutputFileFinder = { - findOutputFiles(resources, directory, callback) { - if (callback == null) { callback = function(error, outputFiles, allFiles) {}; } - const incomingResources = {}; - for (const resource of Array.from(resources)) { - incomingResources[resource.path] = true; - } - - return OutputFileFinder._getAllFiles(directory, function(error, allFiles) { - if (allFiles == null) { allFiles = []; } - if (error != null) { - logger.err({err:error}, "error finding all output files"); - return callback(error); - } - const outputFiles = []; - for (const file of Array.from(allFiles)) { - if (!incomingResources[file]) { - outputFiles.push({ - path: file, - type: __guard__(file.match(/\.([^\.]+)$/), x => x[1]) - }); - } - } - return callback(null, outputFiles, allFiles); - }); - }, +module.exports = OutputFileFinder = { + findOutputFiles(resources, directory, callback) { + if (callback == null) { + callback = function(error, outputFiles, allFiles) {} + } + const incomingResources = {} + for (const resource of Array.from(resources)) { + incomingResources[resource.path] = true + } - _getAllFiles(directory, _callback) { - if (_callback == null) { _callback = function(error, fileList) {}; } - const callback = function(error, fileList) { - _callback(error, fileList); - return _callback = function() {}; - }; + return OutputFileFinder._getAllFiles(directory, function(error, allFiles) { + if (allFiles == null) { + allFiles = [] + } + if (error != null) { + logger.err({ err: error }, 'error finding all output files') + return callback(error) + } + const outputFiles = [] + for (const file of Array.from(allFiles)) { + if (!incomingResources[file]) { + outputFiles.push({ + path: file, + type: __guard__(file.match(/\.([^\.]+)$/), x => x[1]) + }) + } + } + return callback(null, outputFiles, allFiles) + }) + }, - // don't include clsi-specific files/directories in the output list - const EXCLUDE_DIRS = ["-name", ".cache", "-o", "-name", ".archive","-o", "-name", ".project-*"]; - const args = [directory, "(", ...Array.from(EXCLUDE_DIRS), ")", "-prune", "-o", "-type", "f", "-print"]; - logger.log({args}, "running find command"); + _getAllFiles(directory, _callback) { + if (_callback == null) { + _callback = function(error, fileList) {} + } + const callback = function(error, fileList) { + _callback(error, fileList) + return (_callback = function() {}) + } - const proc = spawn("find", args); - let stdout = ""; - proc.stdout.on("data", chunk => stdout += chunk.toString()); - proc.on("error", callback); - return proc.on("close", function(code) { - if (code !== 0) { - logger.warn({directory, code}, "find returned error, directory likely doesn't exist"); - return callback(null, []); - } - let fileList = stdout.trim().split("\n"); - fileList = fileList.map(function(file) { - // Strip leading directory - let path; - return path = Path.relative(directory, file); - }); - return callback(null, fileList); - }); - } -}); + // don't include clsi-specific files/directories in the output list + const EXCLUDE_DIRS = [ + '-name', + '.cache', + '-o', + '-name', + '.archive', + '-o', + '-name', + '.project-*' + ] + const args = [ + directory, + '(', + ...Array.from(EXCLUDE_DIRS), + ')', + '-prune', + '-o', + '-type', + 'f', + '-print' + ] + logger.log({ args }, 'running find command') + const proc = spawn('find', args) + let stdout = '' + proc.stdout.on('data', chunk => (stdout += chunk.toString())) + proc.on('error', callback) + return proc.on('close', function(code) { + if (code !== 0) { + logger.warn( + { directory, code }, + "find returned error, directory likely doesn't exist" + ) + return callback(null, []) + } + let fileList = stdout.trim().split('\n') + fileList = fileList.map(function(file) { + // Strip leading directory + let path + return (path = Path.relative(directory, file)) + }) + return callback(null, fileList) + }) + } +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/app/js/OutputFileOptimiser.js b/app/js/OutputFileOptimiser.js index 149d384..c0b8cc1 100644 --- a/app/js/OutputFileOptimiser.js +++ b/app/js/OutputFileOptimiser.js @@ -13,74 +13,92 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let OutputFileOptimiser; -const fs = require("fs"); -const Path = require("path"); -const { spawn } = require("child_process"); -const logger = require("logger-sharelatex"); -const Metrics = require("./Metrics"); -const _ = require("underscore"); +let OutputFileOptimiser +const fs = require('fs') +const Path = require('path') +const { spawn } = require('child_process') +const logger = require('logger-sharelatex') +const Metrics = require('./Metrics') +const _ = require('underscore') -module.exports = (OutputFileOptimiser = { +module.exports = OutputFileOptimiser = { + optimiseFile(src, dst, callback) { + // check output file (src) and see if we can optimise it, storing + // the result in the build directory (dst) + if (callback == null) { + callback = function(error) {} + } + if (src.match(/\/output\.pdf$/)) { + return OutputFileOptimiser.checkIfPDFIsOptimised(src, function( + err, + isOptimised + ) { + if (err != null || isOptimised) { + return callback(null) + } + return OutputFileOptimiser.optimisePDF(src, dst, callback) + }) + } else { + return callback(null) + } + }, - optimiseFile(src, dst, callback) { - // check output file (src) and see if we can optimise it, storing - // the result in the build directory (dst) - if (callback == null) { callback = function(error) {}; } - if (src.match(/\/output\.pdf$/)) { - return OutputFileOptimiser.checkIfPDFIsOptimised(src, function(err, isOptimised) { - if ((err != null) || isOptimised) { return callback(null); } - return OutputFileOptimiser.optimisePDF(src, dst, callback); - }); - } else { - return callback((null)); - } - }, + checkIfPDFIsOptimised(file, callback) { + const SIZE = 16 * 1024 // check the header of the pdf + const result = new Buffer(SIZE) + result.fill(0) // prevent leakage of uninitialised buffer + return fs.open(file, 'r', function(err, fd) { + if (err != null) { + return callback(err) + } + return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) => + fs.close(fd, function(errClose) { + if (errRead != null) { + return callback(errRead) + } + if (typeof errReadClose !== 'undefined' && errReadClose !== null) { + return callback(errClose) + } + const isOptimised = + buffer.toString('ascii').indexOf('/Linearized 1') >= 0 + return callback(null, isOptimised) + }) + ) + }) + }, - checkIfPDFIsOptimised(file, callback) { - const SIZE = 16*1024; // check the header of the pdf - const result = new Buffer(SIZE); - result.fill(0); // prevent leakage of uninitialised buffer - return fs.open(file, "r", function(err, fd) { - if (err != null) { return callback(err); } - return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) => - fs.close(fd, function(errClose) { - if (errRead != null) { return callback(errRead); } - if (typeof errReadClose !== 'undefined' && errReadClose !== null) { return callback(errClose); } - const isOptimised = buffer.toString('ascii').indexOf("/Linearized 1") >= 0; - return callback(null, isOptimised); - }) - ); - }); - }, + optimisePDF(src, dst, callback) { + if (callback == null) { + callback = function(error) {} + } + const tmpOutput = dst + '.opt' + const args = ['--linearize', src, tmpOutput] + logger.log({ args }, 'running qpdf command') - optimisePDF(src, dst, callback) { - if (callback == null) { callback = function(error) {}; } - const tmpOutput = dst + '.opt'; - const args = ["--linearize", src, tmpOutput]; - logger.log({args}, "running qpdf command"); - - const timer = new Metrics.Timer("qpdf"); - const proc = spawn("qpdf", args); - let stdout = ""; - proc.stdout.on("data", chunk => stdout += chunk.toString()); - callback = _.once(callback); // avoid double call back for error and close event - proc.on("error", function(err) { - logger.warn({err, args}, "qpdf failed"); - return callback(null); - }); // ignore the error - return proc.on("close", function(code) { - timer.done(); - if (code !== 0) { - logger.warn({code, args}, "qpdf returned error"); - return callback(null); // ignore the error - } - return fs.rename(tmpOutput, dst, function(err) { - if (err != null) { - logger.warn({tmpOutput, dst}, "failed to rename output of qpdf command"); - } - return callback(null); - }); - }); - } // ignore the error -}); + const timer = new Metrics.Timer('qpdf') + const proc = spawn('qpdf', args) + let stdout = '' + proc.stdout.on('data', chunk => (stdout += chunk.toString())) + callback = _.once(callback) // avoid double call back for error and close event + proc.on('error', function(err) { + logger.warn({ err, args }, 'qpdf failed') + return callback(null) + }) // ignore the error + return proc.on('close', function(code) { + timer.done() + if (code !== 0) { + logger.warn({ code, args }, 'qpdf returned error') + return callback(null) // ignore the error + } + return fs.rename(tmpOutput, dst, function(err) { + if (err != null) { + logger.warn( + { tmpOutput, dst }, + 'failed to rename output of qpdf command' + ) + } + return callback(null) + }) + }) + } // ignore the error +} diff --git a/app/js/ProjectPersistenceManager.js b/app/js/ProjectPersistenceManager.js index 856c156..8015baa 100644 --- a/app/js/ProjectPersistenceManager.js +++ b/app/js/ProjectPersistenceManager.js @@ -11,113 +11,153 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let ProjectPersistenceManager; -const UrlCache = require("./UrlCache"); -const CompileManager = require("./CompileManager"); -const db = require("./db"); -const dbQueue = require("./DbQueue"); -const async = require("async"); -const logger = require("logger-sharelatex"); -const oneDay = 24 * 60 * 60 * 1000; -const Settings = require("settings-sharelatex"); +let ProjectPersistenceManager +const UrlCache = require('./UrlCache') +const CompileManager = require('./CompileManager') +const db = require('./db') +const dbQueue = require('./DbQueue') +const async = require('async') +const logger = require('logger-sharelatex') +const oneDay = 24 * 60 * 60 * 1000 +const Settings = require('settings-sharelatex') -module.exports = (ProjectPersistenceManager = { +module.exports = ProjectPersistenceManager = { + EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5, - EXPIRY_TIMEOUT: Settings.project_cache_length_ms || (oneDay * 2.5), + markProjectAsJustAccessed(project_id, callback) { + if (callback == null) { + callback = function(error) {} + } + const job = cb => + db.Project.findOrCreate({ where: { project_id } }) + .spread((project, created) => + project + .updateAttributes({ lastAccessed: new Date() }) + .then(() => cb()) + .error(cb) + ) + .error(cb) + return dbQueue.queue.push(job, callback) + }, - markProjectAsJustAccessed(project_id, callback) { - if (callback == null) { callback = function(error) {}; } - const job = cb=> - db.Project.findOrCreate({where: {project_id}}) - .spread( - (project, created) => - project.updateAttributes({lastAccessed: new Date()}) - .then(() => cb()) - .error(cb) - ) - .error(cb) - ; - return dbQueue.queue.push(job, callback); - }, + clearExpiredProjects(callback) { + if (callback == null) { + callback = function(error) {} + } + return ProjectPersistenceManager._findExpiredProjectIds(function( + error, + project_ids + ) { + if (error != null) { + return callback(error) + } + logger.log({ project_ids }, 'clearing expired projects') + const jobs = Array.from(project_ids || []).map(project_id => + (project_id => callback => + ProjectPersistenceManager.clearProjectFromCache(project_id, function( + err + ) { + if (err != null) { + logger.error({ err, project_id }, 'error clearing project') + } + return callback() + }))(project_id) + ) + return async.series(jobs, function(error) { + if (error != null) { + return callback(error) + } + return CompileManager.clearExpiredProjects( + ProjectPersistenceManager.EXPIRY_TIMEOUT, + error => callback() + ) + }) + }) + }, // ignore any errors from deleting directories + clearProject(project_id, user_id, callback) { + if (callback == null) { + callback = function(error) {} + } + logger.log({ project_id, user_id }, 'clearing project for user') + return CompileManager.clearProject(project_id, user_id, function(error) { + if (error != null) { + return callback(error) + } + return ProjectPersistenceManager.clearProjectFromCache( + project_id, + function(error) { + if (error != null) { + return callback(error) + } + return callback() + } + ) + }) + }, - clearExpiredProjects(callback) { - if (callback == null) { callback = function(error) {}; } - return ProjectPersistenceManager._findExpiredProjectIds(function(error, project_ids) { - if (error != null) { return callback(error); } - logger.log({project_ids}, "clearing expired projects"); - const jobs = (Array.from(project_ids || [])).map((project_id) => - (project_id => - callback => - ProjectPersistenceManager.clearProjectFromCache(project_id, function(err) { - if (err != null) { - logger.error({err, project_id}, "error clearing project"); - } - return callback(); - }) - - )(project_id)); - return async.series(jobs, function(error) { - if (error != null) { return callback(error); } - return CompileManager.clearExpiredProjects(ProjectPersistenceManager.EXPIRY_TIMEOUT, error => callback()); - }); - }); - }, // ignore any errors from deleting directories + clearProjectFromCache(project_id, callback) { + if (callback == null) { + callback = function(error) {} + } + logger.log({ project_id }, 'clearing project from cache') + return UrlCache.clearProject(project_id, function(error) { + if (error != null) { + logger.err({ error, project_id }, 'error clearing project from cache') + return callback(error) + } + return ProjectPersistenceManager._clearProjectFromDatabase( + project_id, + function(error) { + if (error != null) { + logger.err( + { error, project_id }, + 'error clearing project from database' + ) + } + return callback(error) + } + ) + }) + }, - clearProject(project_id, user_id, callback) { - if (callback == null) { callback = function(error) {}; } - logger.log({project_id, user_id}, "clearing project for user"); - return CompileManager.clearProject(project_id, user_id, function(error) { - if (error != null) { return callback(error); } - return ProjectPersistenceManager.clearProjectFromCache(project_id, function(error) { - if (error != null) { return callback(error); } - return callback(); - }); - }); - }, + _clearProjectFromDatabase(project_id, callback) { + if (callback == null) { + callback = function(error) {} + } + logger.log({ project_id }, 'clearing project from database') + const job = cb => + db.Project.destroy({ where: { project_id } }) + .then(() => cb()) + .error(cb) + return dbQueue.queue.push(job, callback) + }, - clearProjectFromCache(project_id, callback) { - if (callback == null) { callback = function(error) {}; } - logger.log({project_id}, "clearing project from cache"); - return UrlCache.clearProject(project_id, function(error) { - if (error != null) { - logger.err({error, project_id}, "error clearing project from cache"); - return callback(error); - } - return ProjectPersistenceManager._clearProjectFromDatabase(project_id, function(error) { - if (error != null) { - logger.err({error, project_id}, "error clearing project from database"); - } - return callback(error); - }); - }); - }, + _findExpiredProjectIds(callback) { + if (callback == null) { + callback = function(error, project_ids) {} + } + const job = function(cb) { + const keepProjectsFrom = new Date( + Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT + ) + const q = {} + q[db.op.lt] = keepProjectsFrom + return db.Project.findAll({ where: { lastAccessed: q } }) + .then(projects => + cb( + null, + projects.map(project => project.project_id) + ) + ) + .error(cb) + } - _clearProjectFromDatabase(project_id, callback) { - if (callback == null) { callback = function(error) {}; } - logger.log({project_id}, "clearing project from database"); - const job = cb=> - db.Project.destroy({where: {project_id}}) - .then(() => cb()) - .error(cb) - ; - return dbQueue.queue.push(job, callback); - }, + return dbQueue.queue.push(job, callback) + } +} - - _findExpiredProjectIds(callback) { - if (callback == null) { callback = function(error, project_ids) {}; } - const job = function(cb){ - const keepProjectsFrom = new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT); - const q = {}; - q[db.op.lt] = keepProjectsFrom; - return db.Project.findAll({where:{lastAccessed:q}}) - .then(projects => cb(null, projects.map(project => project.project_id))).error(cb); - }; - - return dbQueue.queue.push(job, callback); - } -}); - - -logger.log({EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout"); +logger.log( + { EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT }, + 'project assets kept timeout' +) diff --git a/app/js/RequestParser.js b/app/js/RequestParser.js index 6641086..acfdc66 100644 --- a/app/js/RequestParser.js +++ b/app/js/RequestParser.js @@ -17,177 +17,201 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let RequestParser; -const settings = require("settings-sharelatex"); +let RequestParser +const settings = require('settings-sharelatex') -module.exports = (RequestParser = { - VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"], - MAX_TIMEOUT: 600, +module.exports = RequestParser = { + VALID_COMPILERS: ['pdflatex', 'latex', 'xelatex', 'lualatex'], + MAX_TIMEOUT: 600, - parse(body, callback) { - let resource; - if (callback == null) { callback = function(error, data) {}; } - const response = {}; + parse(body, callback) { + let resource + if (callback == null) { + callback = function(error, data) {} + } + const response = {} - if ((body.compile == null)) { - return callback("top level object should have a compile attribute"); - } + if (body.compile == null) { + return callback('top level object should have a compile attribute') + } - const { compile } = body; - if (!compile.options) { compile.options = {}; } + const { compile } = body + if (!compile.options) { + compile.options = {} + } - try { - response.compiler = this._parseAttribute("compiler", - compile.options.compiler, { - validValues: this.VALID_COMPILERS, - default: "pdflatex", - type: "string" - } - ); - response.timeout = this._parseAttribute("timeout", - compile.options.timeout, { - default: RequestParser.MAX_TIMEOUT, - type: "number" - } - ); - response.imageName = this._parseAttribute("imageName", - compile.options.imageName, - {type: "string"}); - response.draft = this._parseAttribute("draft", - compile.options.draft, { - default: false, - type: "boolean" - } - ); - response.check = this._parseAttribute("check", - compile.options.check, - {type: "string"}); - response.flags = this._parseAttribute("flags", - compile.options.flags, { - default: [], - type: "object" - } - ); + try { + response.compiler = this._parseAttribute( + 'compiler', + compile.options.compiler, + { + validValues: this.VALID_COMPILERS, + default: 'pdflatex', + type: 'string' + } + ) + response.timeout = this._parseAttribute( + 'timeout', + compile.options.timeout, + { + default: RequestParser.MAX_TIMEOUT, + type: 'number' + } + ) + response.imageName = this._parseAttribute( + 'imageName', + compile.options.imageName, + { type: 'string' } + ) + response.draft = this._parseAttribute('draft', compile.options.draft, { + default: false, + type: 'boolean' + }) + response.check = this._parseAttribute('check', compile.options.check, { + type: 'string' + }) + response.flags = this._parseAttribute('flags', compile.options.flags, { + default: [], + type: 'object' + }) - // The syncType specifies whether the request contains all - // resources (full) or only those resources to be updated - // in-place (incremental). - response.syncType = this._parseAttribute("syncType", - compile.options.syncType, { - validValues: ["full", "incremental"], - type: "string" - } - ); + // The syncType specifies whether the request contains all + // resources (full) or only those resources to be updated + // in-place (incremental). + response.syncType = this._parseAttribute( + 'syncType', + compile.options.syncType, + { + validValues: ['full', 'incremental'], + type: 'string' + } + ) - // The syncState is an identifier passed in with the request - // which has the property that it changes when any resource is - // added, deleted, moved or renamed. - // - // on syncType full the syncState identifier is passed in and - // stored - // - // on syncType incremental the syncState identifier must match - // the stored value - response.syncState = this._parseAttribute("syncState", - compile.options.syncState, - {type: "string"}); + // The syncState is an identifier passed in with the request + // which has the property that it changes when any resource is + // added, deleted, moved or renamed. + // + // on syncType full the syncState identifier is passed in and + // stored + // + // on syncType incremental the syncState identifier must match + // the stored value + response.syncState = this._parseAttribute( + 'syncState', + compile.options.syncState, + { type: 'string' } + ) - if (response.timeout > RequestParser.MAX_TIMEOUT) { - response.timeout = RequestParser.MAX_TIMEOUT; - } - response.timeout = response.timeout * 1000; // milliseconds + if (response.timeout > RequestParser.MAX_TIMEOUT) { + response.timeout = RequestParser.MAX_TIMEOUT + } + response.timeout = response.timeout * 1000 // milliseconds - response.resources = ((() => { - const result = []; - for (resource of Array.from((compile.resources || []))) { result.push(this._parseResource(resource)); - } - return result; - })()); + response.resources = (() => { + const result = [] + for (resource of Array.from(compile.resources || [])) { + result.push(this._parseResource(resource)) + } + return result + })() - const rootResourcePath = this._parseAttribute("rootResourcePath", - compile.rootResourcePath, { - default: "main.tex", - type: "string" - } - ); - const originalRootResourcePath = rootResourcePath; - const sanitizedRootResourcePath = RequestParser._sanitizePath(rootResourcePath); - response.rootResourcePath = RequestParser._checkPath(sanitizedRootResourcePath); + const rootResourcePath = this._parseAttribute( + 'rootResourcePath', + compile.rootResourcePath, + { + default: 'main.tex', + type: 'string' + } + ) + const originalRootResourcePath = rootResourcePath + const sanitizedRootResourcePath = RequestParser._sanitizePath( + rootResourcePath + ) + response.rootResourcePath = RequestParser._checkPath( + sanitizedRootResourcePath + ) - for (resource of Array.from(response.resources)) { - if (resource.path === originalRootResourcePath) { - resource.path = sanitizedRootResourcePath; - } - } - } catch (error1) { - const error = error1; - return callback(error); - } + for (resource of Array.from(response.resources)) { + if (resource.path === originalRootResourcePath) { + resource.path = sanitizedRootResourcePath + } + } + } catch (error1) { + const error = error1 + return callback(error) + } - return callback(null, response); - }, + return callback(null, response) + }, - _parseResource(resource) { - let modified; - if ((resource.path == null) || (typeof resource.path !== "string")) { - throw "all resources should have a path attribute"; - } + _parseResource(resource) { + let modified + if (resource.path == null || typeof resource.path !== 'string') { + throw 'all resources should have a path attribute' + } - if (resource.modified != null) { - modified = new Date(resource.modified); - if (isNaN(modified.getTime())) { - throw `resource modified date could not be understood: ${resource.modified}`; - } - } + if (resource.modified != null) { + modified = new Date(resource.modified) + if (isNaN(modified.getTime())) { + throw `resource modified date could not be understood: ${resource.modified}` + } + } - if ((resource.url == null) && (resource.content == null)) { - throw "all resources should have either a url or content attribute"; - } - if ((resource.content != null) && (typeof resource.content !== "string")) { - throw "content attribute should be a string"; - } - if ((resource.url != null) && (typeof resource.url !== "string")) { - throw "url attribute should be a string"; - } + if (resource.url == null && resource.content == null) { + throw 'all resources should have either a url or content attribute' + } + if (resource.content != null && typeof resource.content !== 'string') { + throw 'content attribute should be a string' + } + if (resource.url != null && typeof resource.url !== 'string') { + throw 'url attribute should be a string' + } - return { - path: resource.path, - modified, - url: resource.url, - content: resource.content - }; - }, + return { + path: resource.path, + modified, + url: resource.url, + content: resource.content + } + }, - _parseAttribute(name, attribute, options) { - if (attribute != null) { - if (options.validValues != null) { - if (options.validValues.indexOf(attribute) === -1) { - throw `${name} attribute should be one of: ${options.validValues.join(", ")}`; - } - } - if (options.type != null) { - if (typeof attribute !== options.type) { - throw `${name} attribute should be a ${options.type}`; - } - } - } else { - if (options.default != null) { return options.default; } - } - return attribute; - }, + _parseAttribute(name, attribute, options) { + if (attribute != null) { + if (options.validValues != null) { + if (options.validValues.indexOf(attribute) === -1) { + throw `${name} attribute should be one of: ${options.validValues.join( + ', ' + )}` + } + } + if (options.type != null) { + if (typeof attribute !== options.type) { + throw `${name} attribute should be a ${options.type}` + } + } + } else { + if (options.default != null) { + return options.default + } + } + return attribute + }, - _sanitizePath(path) { - // See http://php.net/manual/en/function.escapeshellcmd.php - return path.replace(/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g, ""); - }, + _sanitizePath(path) { + // See http://php.net/manual/en/function.escapeshellcmd.php + return path.replace( + /[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g, + '' + ) + }, - _checkPath(path) { - // check that the request does not use a relative path - for (const dir of Array.from(path.split('/'))) { - if (dir === '..') { - throw "relative path in root resource"; - } - } - return path; - } -}); + _checkPath(path) { + // check that the request does not use a relative path + for (const dir of Array.from(path.split('/'))) { + if (dir === '..') { + throw 'relative path in root resource' + } + } + return path + } +} diff --git a/app/js/ResourceStateManager.js b/app/js/ResourceStateManager.js index 45cfdc6..5a5d811 100644 --- a/app/js/ResourceStateManager.js +++ b/app/js/ResourceStateManager.js @@ -13,102 +13,142 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let ResourceStateManager; -const Path = require("path"); -const fs = require("fs"); -const logger = require("logger-sharelatex"); -const settings = require("settings-sharelatex"); -const Errors = require("./Errors"); -const SafeReader = require("./SafeReader"); +let ResourceStateManager +const Path = require('path') +const fs = require('fs') +const logger = require('logger-sharelatex') +const settings = require('settings-sharelatex') +const Errors = require('./Errors') +const SafeReader = require('./SafeReader') -module.exports = (ResourceStateManager = { +module.exports = ResourceStateManager = { + // The sync state is an identifier which must match for an + // incremental update to be allowed. + // + // The initial value is passed in and stored on a full + // compile, along with the list of resources.. + // + // Subsequent incremental compiles must come with the same value - if + // not they will be rejected with a 409 Conflict response. The + // previous list of resources is returned. + // + // An incremental compile can only update existing files with new + // content. The sync state identifier must change if any docs or + // files are moved, added, deleted or renamed. - // The sync state is an identifier which must match for an - // incremental update to be allowed. - // - // The initial value is passed in and stored on a full - // compile, along with the list of resources.. - // - // Subsequent incremental compiles must come with the same value - if - // not they will be rejected with a 409 Conflict response. The - // previous list of resources is returned. - // - // An incremental compile can only update existing files with new - // content. The sync state identifier must change if any docs or - // files are moved, added, deleted or renamed. + SYNC_STATE_FILE: '.project-sync-state', + SYNC_STATE_MAX_SIZE: 128 * 1024, - SYNC_STATE_FILE: ".project-sync-state", - SYNC_STATE_MAX_SIZE: 128*1024, + saveProjectState(state, resources, basePath, callback) { + if (callback == null) { + callback = function(error) {} + } + const stateFile = Path.join(basePath, this.SYNC_STATE_FILE) + if (state == null) { + // remove the file if no state passed in + logger.log({ state, basePath }, 'clearing sync state') + return fs.unlink(stateFile, function(err) { + if (err != null && err.code !== 'ENOENT') { + return callback(err) + } else { + return callback() + } + }) + } else { + logger.log({ state, basePath }, 'writing sync state') + const resourceList = Array.from(resources).map(resource => resource.path) + return fs.writeFile( + stateFile, + [...Array.from(resourceList), `stateHash:${state}`].join('\n'), + callback + ) + } + }, - saveProjectState(state, resources, basePath, callback) { - if (callback == null) { callback = function(error) {}; } - const stateFile = Path.join(basePath, this.SYNC_STATE_FILE); - if ((state == null)) { // remove the file if no state passed in - logger.log({state, basePath}, "clearing sync state"); - return fs.unlink(stateFile, function(err) { - if ((err != null) && (err.code !== 'ENOENT')) { - return callback(err); - } else { - return callback(); - } - }); - } else { - logger.log({state, basePath}, "writing sync state"); - const resourceList = (Array.from(resources).map((resource) => resource.path)); - return fs.writeFile(stateFile, [...Array.from(resourceList), `stateHash:${state}`].join("\n"), callback); - } - }, + checkProjectStateMatches(state, basePath, callback) { + if (callback == null) { + callback = function(error, resources) {} + } + const stateFile = Path.join(basePath, this.SYNC_STATE_FILE) + const size = this.SYNC_STATE_MAX_SIZE + return SafeReader.readFile(stateFile, size, 'utf8', function( + err, + result, + bytesRead + ) { + if (err != null) { + return callback(err) + } + if (bytesRead === size) { + logger.error( + { file: stateFile, size, bytesRead }, + 'project state file truncated' + ) + } + const array = + __guard__(result != null ? result.toString() : undefined, x => + x.split('\n') + ) || [] + const adjustedLength = Math.max(array.length, 1) + const resourceList = array.slice(0, adjustedLength - 1) + const oldState = array[adjustedLength - 1] + const newState = `stateHash:${state}` + logger.log( + { state, oldState, basePath, stateMatches: newState === oldState }, + 'checking sync state' + ) + if (newState !== oldState) { + return callback( + new Errors.FilesOutOfSyncError('invalid state for incremental update') + ) + } else { + const resources = Array.from(resourceList).map(path => ({ path })) + return callback(null, resources) + } + }) + }, - checkProjectStateMatches(state, basePath, callback) { - if (callback == null) { callback = function(error, resources) {}; } - const stateFile = Path.join(basePath, this.SYNC_STATE_FILE); - const size = this.SYNC_STATE_MAX_SIZE; - return SafeReader.readFile(stateFile, size, 'utf8', function(err, result, bytesRead) { - if (err != null) { return callback(err); } - if (bytesRead === size) { - logger.error({file:stateFile, size, bytesRead}, "project state file truncated"); - } - const array = __guard__(result != null ? result.toString() : undefined, x => x.split("\n")) || []; - const adjustedLength = Math.max(array.length, 1); - const resourceList = array.slice(0, adjustedLength - 1); - const oldState = array[adjustedLength - 1]; - const newState = `stateHash:${state}`; - logger.log({state, oldState, basePath, stateMatches: (newState === oldState)}, "checking sync state"); - if (newState !== oldState) { - return callback(new Errors.FilesOutOfSyncError("invalid state for incremental update")); - } else { - const resources = (Array.from(resourceList).map((path) => ({path}))); - return callback(null, resources); - } - }); - }, - - checkResourceFiles(resources, allFiles, basePath, callback) { - // check the paths are all relative to current directory - let file; - if (callback == null) { callback = function(error) {}; } - for (file of Array.from(resources || [])) { - for (const dir of Array.from(__guard__(file != null ? file.path : undefined, x => x.split('/')))) { - if (dir === '..') { - return callback(new Error("relative path in resource file list")); - } - } - } - // check if any of the input files are not present in list of files - const seenFile = {}; - for (file of Array.from(allFiles)) { - seenFile[file] = true; - } - const missingFiles = (Array.from(resources).filter((resource) => !seenFile[resource.path]).map((resource) => resource.path)); - if ((missingFiles != null ? missingFiles.length : undefined) > 0) { - logger.err({missingFiles, basePath, allFiles, resources}, "missing input files for project"); - return callback(new Errors.FilesOutOfSyncError("resource files missing in incremental update")); - } else { - return callback(); - } - } -}); + checkResourceFiles(resources, allFiles, basePath, callback) { + // check the paths are all relative to current directory + let file + if (callback == null) { + callback = function(error) {} + } + for (file of Array.from(resources || [])) { + for (const dir of Array.from( + __guard__(file != null ? file.path : undefined, x => x.split('/')) + )) { + if (dir === '..') { + return callback(new Error('relative path in resource file list')) + } + } + } + // check if any of the input files are not present in list of files + const seenFile = {} + for (file of Array.from(allFiles)) { + seenFile[file] = true + } + const missingFiles = Array.from(resources) + .filter(resource => !seenFile[resource.path]) + .map(resource => resource.path) + if ((missingFiles != null ? missingFiles.length : undefined) > 0) { + logger.err( + { missingFiles, basePath, allFiles, resources }, + 'missing input files for project' + ) + return callback( + new Errors.FilesOutOfSyncError( + 'resource files missing in incremental update' + ) + ) + } else { + return callback() + } + } +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/app/js/ResourceWriter.js b/app/js/ResourceWriter.js index 028fc53..ba9706b 100644 --- a/app/js/ResourceWriter.js +++ b/app/js/ResourceWriter.js @@ -14,202 +14,339 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let ResourceWriter; -const UrlCache = require("./UrlCache"); -const Path = require("path"); -const fs = require("fs"); -const async = require("async"); -const mkdirp = require("mkdirp"); -const OutputFileFinder = require("./OutputFileFinder"); -const ResourceStateManager = require("./ResourceStateManager"); -const Metrics = require("./Metrics"); -const logger = require("logger-sharelatex"); -const settings = require("settings-sharelatex"); +let ResourceWriter +const UrlCache = require('./UrlCache') +const Path = require('path') +const fs = require('fs') +const async = require('async') +const mkdirp = require('mkdirp') +const OutputFileFinder = require('./OutputFileFinder') +const ResourceStateManager = require('./ResourceStateManager') +const Metrics = require('./Metrics') +const logger = require('logger-sharelatex') +const settings = require('settings-sharelatex') -const parallelFileDownloads = settings.parallelFileDownloads || 1; +const parallelFileDownloads = settings.parallelFileDownloads || 1 -module.exports = (ResourceWriter = { +module.exports = ResourceWriter = { + syncResourcesToDisk(request, basePath, callback) { + if (callback == null) { + callback = function(error, resourceList) {} + } + if (request.syncType === 'incremental') { + logger.log( + { project_id: request.project_id, user_id: request.user_id }, + 'incremental sync' + ) + return ResourceStateManager.checkProjectStateMatches( + request.syncState, + basePath, + function(error, resourceList) { + if (error != null) { + return callback(error) + } + return ResourceWriter._removeExtraneousFiles( + resourceList, + basePath, + function(error, outputFiles, allFiles) { + if (error != null) { + return callback(error) + } + return ResourceStateManager.checkResourceFiles( + resourceList, + allFiles, + basePath, + function(error) { + if (error != null) { + return callback(error) + } + return ResourceWriter.saveIncrementalResourcesToDisk( + request.project_id, + request.resources, + basePath, + function(error) { + if (error != null) { + return callback(error) + } + return callback(null, resourceList) + } + ) + } + ) + } + ) + } + ) + } else { + logger.log( + { project_id: request.project_id, user_id: request.user_id }, + 'full sync' + ) + return this.saveAllResourcesToDisk( + request.project_id, + request.resources, + basePath, + function(error) { + if (error != null) { + return callback(error) + } + return ResourceStateManager.saveProjectState( + request.syncState, + request.resources, + basePath, + function(error) { + if (error != null) { + return callback(error) + } + return callback(null, request.resources) + } + ) + } + ) + } + }, - syncResourcesToDisk(request, basePath, callback) { - if (callback == null) { callback = function(error, resourceList) {}; } - if (request.syncType === "incremental") { - logger.log({project_id: request.project_id, user_id: request.user_id}, "incremental sync"); - return ResourceStateManager.checkProjectStateMatches(request.syncState, basePath, function(error, resourceList) { - if (error != null) { return callback(error); } - return ResourceWriter._removeExtraneousFiles(resourceList, basePath, function(error, outputFiles, allFiles) { - if (error != null) { return callback(error); } - return ResourceStateManager.checkResourceFiles(resourceList, allFiles, basePath, function(error) { - if (error != null) { return callback(error); } - return ResourceWriter.saveIncrementalResourcesToDisk(request.project_id, request.resources, basePath, function(error) { - if (error != null) { return callback(error); } - return callback(null, resourceList); - }); - }); - }); - }); - } else { - logger.log({project_id: request.project_id, user_id: request.user_id}, "full sync"); - return this.saveAllResourcesToDisk(request.project_id, request.resources, basePath, function(error) { - if (error != null) { return callback(error); } - return ResourceStateManager.saveProjectState(request.syncState, request.resources, basePath, function(error) { - if (error != null) { return callback(error); } - return callback(null, request.resources); - }); - }); - } - }, + saveIncrementalResourcesToDisk(project_id, resources, basePath, callback) { + if (callback == null) { + callback = function(error) {} + } + return this._createDirectory(basePath, error => { + if (error != null) { + return callback(error) + } + const jobs = Array.from(resources).map(resource => + (resource => { + return callback => + this._writeResourceToDisk(project_id, resource, basePath, callback) + })(resource) + ) + return async.parallelLimit(jobs, parallelFileDownloads, callback) + }) + }, - saveIncrementalResourcesToDisk(project_id, resources, basePath, callback) { - if (callback == null) { callback = function(error) {}; } - return this._createDirectory(basePath, error => { - if (error != null) { return callback(error); } - const jobs = Array.from(resources).map((resource) => - (resource => { - return callback => this._writeResourceToDisk(project_id, resource, basePath, callback); - })(resource)); - return async.parallelLimit(jobs, parallelFileDownloads, callback); - }); - }, + saveAllResourcesToDisk(project_id, resources, basePath, callback) { + if (callback == null) { + callback = function(error) {} + } + return this._createDirectory(basePath, error => { + if (error != null) { + return callback(error) + } + return this._removeExtraneousFiles(resources, basePath, error => { + if (error != null) { + return callback(error) + } + const jobs = Array.from(resources).map(resource => + (resource => { + return callback => + this._writeResourceToDisk( + project_id, + resource, + basePath, + callback + ) + })(resource) + ) + return async.parallelLimit(jobs, parallelFileDownloads, callback) + }) + }) + }, - saveAllResourcesToDisk(project_id, resources, basePath, callback) { - if (callback == null) { callback = function(error) {}; } - return this._createDirectory(basePath, error => { - if (error != null) { return callback(error); } - return this._removeExtraneousFiles(resources, basePath, error => { - if (error != null) { return callback(error); } - const jobs = Array.from(resources).map((resource) => - (resource => { - return callback => this._writeResourceToDisk(project_id, resource, basePath, callback); - })(resource)); - return async.parallelLimit(jobs, parallelFileDownloads, callback); - }); - }); - }, + _createDirectory(basePath, callback) { + if (callback == null) { + callback = function(error) {} + } + return fs.mkdir(basePath, function(err) { + if (err != null) { + if (err.code === 'EEXIST') { + return callback() + } else { + logger.log({ err, dir: basePath }, 'error creating directory') + return callback(err) + } + } else { + return callback() + } + }) + }, - _createDirectory(basePath, callback) { - if (callback == null) { callback = function(error) {}; } - return fs.mkdir(basePath, function(err) { - if (err != null) { - if (err.code === 'EEXIST') { - return callback(); - } else { - logger.log({err, dir:basePath}, "error creating directory"); - return callback(err); - } - } else { - return callback(); - } - }); - }, + _removeExtraneousFiles(resources, basePath, _callback) { + if (_callback == null) { + _callback = function(error, outputFiles, allFiles) {} + } + const timer = new Metrics.Timer('unlink-output-files') + const callback = function(error, ...result) { + timer.done() + return _callback(error, ...Array.from(result)) + } - _removeExtraneousFiles(resources, basePath, _callback) { - if (_callback == null) { _callback = function(error, outputFiles, allFiles) {}; } - const timer = new Metrics.Timer("unlink-output-files"); - const callback = function(error, ...result) { - timer.done(); - return _callback(error, ...Array.from(result)); - }; + return OutputFileFinder.findOutputFiles(resources, basePath, function( + error, + outputFiles, + allFiles + ) { + if (error != null) { + return callback(error) + } - return OutputFileFinder.findOutputFiles(resources, basePath, function(error, outputFiles, allFiles) { - if (error != null) { return callback(error); } + const jobs = [] + for (const file of Array.from(outputFiles || [])) { + ;(function(file) { + const { path } = file + let should_delete = true + if ( + path.match(/^output\./) || + path.match(/\.aux$/) || + path.match(/^cache\//) + ) { + // knitr cache + should_delete = false + } + if (path.match(/^output-.*/)) { + // Tikz cached figures (default case) + should_delete = false + } + if (path.match(/\.(pdf|dpth|md5)$/)) { + // Tikz cached figures (by extension) + should_delete = false + } + if ( + path.match(/\.(pygtex|pygstyle)$/) || + path.match(/(^|\/)_minted-[^\/]+\//) + ) { + // minted files/directory + should_delete = false + } + if ( + path.match(/\.md\.tex$/) || + path.match(/(^|\/)_markdown_[^\/]+\//) + ) { + // markdown files/directory + should_delete = false + } + if (path.match(/-eps-converted-to\.pdf$/)) { + // Epstopdf generated files + should_delete = false + } + if ( + path === 'output.pdf' || + path === 'output.dvi' || + path === 'output.log' || + path === 'output.xdv' + ) { + should_delete = true + } + if (path === 'output.tex') { + // created by TikzManager if present in output files + should_delete = true + } + if (should_delete) { + return jobs.push(callback => + ResourceWriter._deleteFileIfNotDirectory( + Path.join(basePath, path), + callback + ) + ) + } + })(file) + } - const jobs = []; - for (const file of Array.from(outputFiles || [])) { - (function(file) { - const { path } = file; - let should_delete = true; - if (path.match(/^output\./) || path.match(/\.aux$/) || path.match(/^cache\//)) { // knitr cache - should_delete = false; - } - if (path.match(/^output-.*/)) { // Tikz cached figures (default case) - should_delete = false; - } - if (path.match(/\.(pdf|dpth|md5)$/)) { // Tikz cached figures (by extension) - should_delete = false; - } - if (path.match(/\.(pygtex|pygstyle)$/) || path.match(/(^|\/)_minted-[^\/]+\//)) { // minted files/directory - should_delete = false; - } - if (path.match(/\.md\.tex$/) || path.match(/(^|\/)_markdown_[^\/]+\//)) { // markdown files/directory - should_delete = false; - } - if (path.match(/-eps-converted-to\.pdf$/)) { // Epstopdf generated files - should_delete = false; - } - if ((path === "output.pdf") || (path === "output.dvi") || (path === "output.log") || (path === "output.xdv")) { - should_delete = true; - } - if (path === "output.tex") { // created by TikzManager if present in output files - should_delete = true; - } - if (should_delete) { - return jobs.push(callback => ResourceWriter._deleteFileIfNotDirectory(Path.join(basePath, path), callback)); - } - })(file); - } + return async.series(jobs, function(error) { + if (error != null) { + return callback(error) + } + return callback(null, outputFiles, allFiles) + }) + }) + }, - return async.series(jobs, function(error) { - if (error != null) { return callback(error); } - return callback(null, outputFiles, allFiles); - }); - }); - }, + _deleteFileIfNotDirectory(path, callback) { + if (callback == null) { + callback = function(error) {} + } + return fs.stat(path, function(error, stat) { + if (error != null && error.code === 'ENOENT') { + return callback() + } else if (error != null) { + logger.err( + { err: error, path }, + 'error stating file in deleteFileIfNotDirectory' + ) + return callback(error) + } else if (stat.isFile()) { + return fs.unlink(path, function(error) { + if (error != null) { + logger.err( + { err: error, path }, + 'error removing file in deleteFileIfNotDirectory' + ) + return callback(error) + } else { + return callback() + } + }) + } else { + return callback() + } + }) + }, - _deleteFileIfNotDirectory(path, callback) { - if (callback == null) { callback = function(error) {}; } - return fs.stat(path, function(error, stat) { - if ((error != null) && (error.code === 'ENOENT')) { - return callback(); - } else if (error != null) { - logger.err({err: error, path}, "error stating file in deleteFileIfNotDirectory"); - return callback(error); - } else if (stat.isFile()) { - return fs.unlink(path, function(error) { - if (error != null) { - logger.err({err: error, path}, "error removing file in deleteFileIfNotDirectory"); - return callback(error); - } else { - return callback(); - } - }); - } else { - return callback(); - } - }); - }, + _writeResourceToDisk(project_id, resource, basePath, callback) { + if (callback == null) { + callback = function(error) {} + } + return ResourceWriter.checkPath(basePath, resource.path, function( + error, + path + ) { + if (error != null) { + return callback(error) + } + return mkdirp(Path.dirname(path), function(error) { + if (error != null) { + return callback(error) + } + // TODO: Don't overwrite file if it hasn't been modified + if (resource.url != null) { + return UrlCache.downloadUrlToFile( + project_id, + resource.url, + path, + resource.modified, + function(err) { + if (err != null) { + logger.err( + { + err, + project_id, + path, + resource_url: resource.url, + modified: resource.modified + }, + 'error downloading file for resources' + ) + } + return callback() + } + ) // try and continue compiling even if http resource can not be downloaded at this time + } else { + const process = require('process') + fs.writeFile(path, resource.content, callback) + try { + let result + return (result = fs.lstatSync(path)) + } catch (e) {} + } + }) + }) + }, - _writeResourceToDisk(project_id, resource, basePath, callback) { - if (callback == null) { callback = function(error) {}; } - return ResourceWriter.checkPath(basePath, resource.path, function(error, path) { - if (error != null) { return callback(error); } - return mkdirp(Path.dirname(path), function(error) { - if (error != null) { return callback(error); } - // TODO: Don't overwrite file if it hasn't been modified - if (resource.url != null) { - return UrlCache.downloadUrlToFile(project_id, resource.url, path, resource.modified, function(err){ - if (err != null) { - logger.err({err, project_id, path, resource_url:resource.url, modified:resource.modified}, "error downloading file for resources"); - } - return callback(); - }); // try and continue compiling even if http resource can not be downloaded at this time - } else { - const process = require("process"); - fs.writeFile(path, resource.content, callback); - try { - let result; - return result = fs.lstatSync(path); - } catch (e) {} - } - }); - }); - }, - - checkPath(basePath, resourcePath, callback) { - const path = Path.normalize(Path.join(basePath, resourcePath)); - if (path.slice(0, basePath.length + 1) !== (basePath + "/")) { - return callback(new Error("resource path is outside root directory")); - } else { - return callback(null, path); - } - } -}); + checkPath(basePath, resourcePath, callback) { + const path = Path.normalize(Path.join(basePath, resourcePath)) + if (path.slice(0, basePath.length + 1) !== basePath + '/') { + return callback(new Error('resource path is outside root directory')) + } else { + return callback(null, path) + } + } +} diff --git a/app/js/SafeReader.js b/app/js/SafeReader.js index 2fd599b..d909e37 100644 --- a/app/js/SafeReader.js +++ b/app/js/SafeReader.js @@ -12,36 +12,49 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let SafeReader; -const fs = require("fs"); -const logger = require("logger-sharelatex"); +let SafeReader +const fs = require('fs') +const logger = require('logger-sharelatex') -module.exports = (SafeReader = { +module.exports = SafeReader = { + // safely read up to size bytes from a file and return result as a + // string - // safely read up to size bytes from a file and return result as a - // string + readFile(file, size, encoding, callback) { + if (callback == null) { + callback = function(error, result) {} + } + return fs.open(file, 'r', function(err, fd) { + if (err != null && err.code === 'ENOENT') { + return callback() + } + if (err != null) { + return callback(err) + } - readFile(file, size, encoding, callback) { - if (callback == null) { callback = function(error, result) {}; } - return fs.open(file, 'r', function(err, fd) { - if ((err != null) && (err.code === 'ENOENT')) { return callback(); } - if (err != null) { return callback(err); } - - // safely return always closing the file - const callbackWithClose = (err, ...result) => - fs.close(fd, function(err1) { - if (err != null) { return callback(err); } - if (err1 != null) { return callback(err1); } - return callback(null, ...Array.from(result)); - }) - ; - - const buff = new Buffer(size, 0); // fill with zeros - return fs.read(fd, buff, 0, buff.length, 0, function(err, bytesRead, buffer) { - if (err != null) { return callbackWithClose(err); } - const result = buffer.toString(encoding, 0, bytesRead); - return callbackWithClose(null, result, bytesRead); - }); - }); - } -}); + // safely return always closing the file + const callbackWithClose = (err, ...result) => + fs.close(fd, function(err1) { + if (err != null) { + return callback(err) + } + if (err1 != null) { + return callback(err1) + } + return callback(null, ...Array.from(result)) + }) + const buff = new Buffer(size, 0) // fill with zeros + return fs.read(fd, buff, 0, buff.length, 0, function( + err, + bytesRead, + buffer + ) { + if (err != null) { + return callbackWithClose(err) + } + const result = buffer.toString(encoding, 0, bytesRead) + return callbackWithClose(null, result, bytesRead) + }) + }) + } +} diff --git a/app/js/StaticServerForbidSymlinks.js b/app/js/StaticServerForbidSymlinks.js index 8ac3e48..999ae20 100644 --- a/app/js/StaticServerForbidSymlinks.js +++ b/app/js/StaticServerForbidSymlinks.js @@ -14,59 +14,81 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let ForbidSymlinks; -const Path = require("path"); -const fs = require("fs"); -const Settings = require("settings-sharelatex"); -const logger = require("logger-sharelatex"); -const url = require("url"); +let ForbidSymlinks +const Path = require('path') +const fs = require('fs') +const Settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') +const url = require('url') -module.exports = (ForbidSymlinks = function(staticFn, root, options) { - const expressStatic = staticFn(root, options); - const basePath = Path.resolve(root); - return function(req, res, next) { - let file, project_id, result; - const path = __guard__(url.parse(req.url), x => x.pathname); - // check that the path is of the form /project_id_or_name/path/to/file.log - if (result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/)) { - project_id = result[1]; - file = result[2]; - } else { - logger.warn({path}, "unrecognized file request"); - return res.sendStatus(404); - } - // check that the file does not use a relative path - for (const dir of Array.from(file.split('/'))) { - if (dir === '..') { - logger.warn({path}, "attempt to use a relative path"); - return res.sendStatus(404); - } - } - // check that the requested path is normalized - const requestedFsPath = `${basePath}/${project_id}/${file}`; - if (requestedFsPath !== Path.normalize(requestedFsPath)) { - logger.error({path: requestedFsPath}, "requestedFsPath is not normalized"); - return res.sendStatus(404); - } - // check that the requested path is not a symlink - return fs.realpath(requestedFsPath, function(err, realFsPath){ - if (err != null) { - if (err.code === 'ENOENT') { - return res.sendStatus(404); - } else { - logger.error({err, requestedFsPath, realFsPath, path: req.params[0], project_id: req.params.project_id}, "error checking file access"); - return res.sendStatus(500); - } - } else if (requestedFsPath !== realFsPath) { - logger.warn({requestedFsPath, realFsPath, path: req.params[0], project_id: req.params.project_id}, "trying to access a different file (symlink), aborting"); - return res.sendStatus(404); - } else { - return expressStatic(req, res, next); - } - }); - }; -}); +module.exports = ForbidSymlinks = function(staticFn, root, options) { + const expressStatic = staticFn(root, options) + const basePath = Path.resolve(root) + return function(req, res, next) { + let file, project_id, result + const path = __guard__(url.parse(req.url), x => x.pathname) + // check that the path is of the form /project_id_or_name/path/to/file.log + if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) { + project_id = result[1] + file = result[2] + } else { + logger.warn({ path }, 'unrecognized file request') + return res.sendStatus(404) + } + // check that the file does not use a relative path + for (const dir of Array.from(file.split('/'))) { + if (dir === '..') { + logger.warn({ path }, 'attempt to use a relative path') + return res.sendStatus(404) + } + } + // check that the requested path is normalized + const requestedFsPath = `${basePath}/${project_id}/${file}` + if (requestedFsPath !== Path.normalize(requestedFsPath)) { + logger.error( + { path: requestedFsPath }, + 'requestedFsPath is not normalized' + ) + return res.sendStatus(404) + } + // check that the requested path is not a symlink + return fs.realpath(requestedFsPath, function(err, realFsPath) { + if (err != null) { + if (err.code === 'ENOENT') { + return res.sendStatus(404) + } else { + logger.error( + { + err, + requestedFsPath, + realFsPath, + path: req.params[0], + project_id: req.params.project_id + }, + 'error checking file access' + ) + return res.sendStatus(500) + } + } else if (requestedFsPath !== realFsPath) { + logger.warn( + { + requestedFsPath, + realFsPath, + path: req.params[0], + project_id: req.params.project_id + }, + 'trying to access a different file (symlink), aborting' + ) + return res.sendStatus(404) + } else { + return expressStatic(req, res, next) + } + }) + } +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/app/js/TikzManager.js b/app/js/TikzManager.js index 9fa4a93..3c57873 100644 --- a/app/js/TikzManager.js +++ b/app/js/TikzManager.js @@ -11,52 +11,84 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let TikzManager; -const fs = require("fs"); -const Path = require("path"); -const ResourceWriter = require("./ResourceWriter"); -const SafeReader = require("./SafeReader"); -const logger = require("logger-sharelatex"); +let TikzManager +const fs = require('fs') +const Path = require('path') +const ResourceWriter = require('./ResourceWriter') +const SafeReader = require('./SafeReader') +const logger = require('logger-sharelatex') // for \tikzexternalize or pstool to work the main file needs to match the // jobname. Since we set the -jobname to output, we have to create a // copy of the main file as 'output.tex'. -module.exports = (TikzManager = { +module.exports = TikzManager = { + checkMainFile(compileDir, mainFile, resources, callback) { + // if there's already an output.tex file, we don't want to touch it + if (callback == null) { + callback = function(error, needsMainFile) {} + } + for (const resource of Array.from(resources)) { + if (resource.path === 'output.tex') { + logger.log({ compileDir, mainFile }, 'output.tex already in resources') + return callback(null, false) + } + } + // if there's no output.tex, see if we are using tikz/pgf or pstool in the main file + return ResourceWriter.checkPath(compileDir, mainFile, function( + error, + path + ) { + if (error != null) { + return callback(error) + } + return SafeReader.readFile(path, 65536, 'utf8', function(error, content) { + if (error != null) { + return callback(error) + } + const usesTikzExternalize = + (content != null + ? content.indexOf('\\tikzexternalize') + : undefined) >= 0 + const usesPsTool = + (content != null ? content.indexOf('{pstool}') : undefined) >= 0 + logger.log( + { compileDir, mainFile, usesTikzExternalize, usesPsTool }, + 'checked for packages needing main file as output.tex' + ) + const needsMainFile = usesTikzExternalize || usesPsTool + return callback(null, needsMainFile) + }) + }) + }, - checkMainFile(compileDir, mainFile, resources, callback) { - // if there's already an output.tex file, we don't want to touch it - if (callback == null) { callback = function(error, needsMainFile) {}; } - for (const resource of Array.from(resources)) { - if (resource.path === "output.tex") { - logger.log({compileDir, mainFile}, "output.tex already in resources"); - return callback(null, false); - } - } - // if there's no output.tex, see if we are using tikz/pgf or pstool in the main file - return ResourceWriter.checkPath(compileDir, mainFile, function(error, path) { - if (error != null) { return callback(error); } - return SafeReader.readFile(path, 65536, "utf8", function(error, content) { - if (error != null) { return callback(error); } - const usesTikzExternalize = (content != null ? content.indexOf("\\tikzexternalize") : undefined) >= 0; - const usesPsTool = (content != null ? content.indexOf("{pstool}") : undefined) >= 0; - logger.log({compileDir, mainFile, usesTikzExternalize, usesPsTool}, "checked for packages needing main file as output.tex"); - const needsMainFile = (usesTikzExternalize || usesPsTool); - return callback(null, needsMainFile); - }); - }); - }, - - injectOutputFile(compileDir, mainFile, callback) { - if (callback == null) { callback = function(error) {}; } - return ResourceWriter.checkPath(compileDir, mainFile, function(error, path) { - if (error != null) { return callback(error); } - return fs.readFile(path, "utf8", function(error, content) { - if (error != null) { return callback(error); } - logger.log({compileDir, mainFile}, "copied file to output.tex as project uses packages which require it"); - // use wx flag to ensure that output file does not already exist - return fs.writeFile(Path.join(compileDir, "output.tex"), content, {flag:'wx'}, callback); - }); - }); - } -}); + injectOutputFile(compileDir, mainFile, callback) { + if (callback == null) { + callback = function(error) {} + } + return ResourceWriter.checkPath(compileDir, mainFile, function( + error, + path + ) { + if (error != null) { + return callback(error) + } + return fs.readFile(path, 'utf8', function(error, content) { + if (error != null) { + return callback(error) + } + logger.log( + { compileDir, mainFile }, + 'copied file to output.tex as project uses packages which require it' + ) + // use wx flag to ensure that output file does not already exist + return fs.writeFile( + Path.join(compileDir, 'output.tex'), + content, + { flag: 'wx' }, + callback + ) + }) + }) + } +} diff --git a/app/js/UrlCache.js b/app/js/UrlCache.js index ade815b..babdf9c 100644 --- a/app/js/UrlCache.js +++ b/app/js/UrlCache.js @@ -12,185 +12,267 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let UrlCache; -const db = require("./db"); -const dbQueue = require("./DbQueue"); -const UrlFetcher = require("./UrlFetcher"); -const Settings = require("settings-sharelatex"); -const crypto = require("crypto"); -const fs = require("fs"); -const logger = require("logger-sharelatex"); -const async = require("async"); +let UrlCache +const db = require('./db') +const dbQueue = require('./DbQueue') +const UrlFetcher = require('./UrlFetcher') +const Settings = require('settings-sharelatex') +const crypto = require('crypto') +const fs = require('fs') +const logger = require('logger-sharelatex') +const async = require('async') -module.exports = (UrlCache = { - downloadUrlToFile(project_id, url, destPath, lastModified, callback) { - if (callback == null) { callback = function(error) {}; } - return UrlCache._ensureUrlIsInCache(project_id, url, lastModified, (error, pathToCachedUrl) => { - if (error != null) { return callback(error); } - return UrlCache._copyFile(pathToCachedUrl, destPath, function(error) { - if (error != null) { - return UrlCache._clearUrlDetails(project_id, url, () => callback(error)); - } else { - return callback(error); - } - }); - }); - }, +module.exports = UrlCache = { + downloadUrlToFile(project_id, url, destPath, lastModified, callback) { + if (callback == null) { + callback = function(error) {} + } + return UrlCache._ensureUrlIsInCache( + project_id, + url, + lastModified, + (error, pathToCachedUrl) => { + if (error != null) { + return callback(error) + } + return UrlCache._copyFile(pathToCachedUrl, destPath, function(error) { + if (error != null) { + return UrlCache._clearUrlDetails(project_id, url, () => + callback(error) + ) + } else { + return callback(error) + } + }) + } + ) + }, - clearProject(project_id, callback) { - if (callback == null) { callback = function(error) {}; } - return UrlCache._findAllUrlsInProject(project_id, function(error, urls) { - logger.log({project_id, url_count: urls.length}, "clearing project URLs"); - if (error != null) { return callback(error); } - const jobs = (Array.from(urls || [])).map((url) => - (url => - callback => - UrlCache._clearUrlFromCache(project_id, url, function(error) { - if (error != null) { - logger.error({err: error, project_id, url}, "error clearing project URL"); - } - return callback(); - }) - - )(url)); - return async.series(jobs, callback); - }); - }, + clearProject(project_id, callback) { + if (callback == null) { + callback = function(error) {} + } + return UrlCache._findAllUrlsInProject(project_id, function(error, urls) { + logger.log( + { project_id, url_count: urls.length }, + 'clearing project URLs' + ) + if (error != null) { + return callback(error) + } + const jobs = Array.from(urls || []).map(url => + (url => callback => + UrlCache._clearUrlFromCache(project_id, url, function(error) { + if (error != null) { + logger.error( + { err: error, project_id, url }, + 'error clearing project URL' + ) + } + return callback() + }))(url) + ) + return async.series(jobs, callback) + }) + }, - _ensureUrlIsInCache(project_id, url, lastModified, callback) { - if (callback == null) { callback = function(error, pathOnDisk) {}; } - if (lastModified != null) { - // MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds. - // So round down to seconds - lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000); - } - return UrlCache._doesUrlNeedDownloading(project_id, url, lastModified, (error, needsDownloading) => { - if (error != null) { return callback(error); } - if (needsDownloading) { - logger.log({url, lastModified}, "downloading URL"); - return UrlFetcher.pipeUrlToFile(url, UrlCache._cacheFilePathForUrl(project_id, url), error => { - if (error != null) { return callback(error); } - return UrlCache._updateOrCreateUrlDetails(project_id, url, lastModified, error => { - if (error != null) { return callback(error); } - return callback(null, UrlCache._cacheFilePathForUrl(project_id, url)); - }); - }); - } else { - logger.log({url, lastModified}, "URL is up to date in cache"); - return callback(null, UrlCache._cacheFilePathForUrl(project_id, url)); - } - }); - }, - - _doesUrlNeedDownloading(project_id, url, lastModified, callback) { - if (callback == null) { callback = function(error, needsDownloading) {}; } - if ((lastModified == null)) { - return callback(null, true); - } - return UrlCache._findUrlDetails(project_id, url, function(error, urlDetails) { - if (error != null) { return callback(error); } - if ((urlDetails == null) || (urlDetails.lastModified == null) || (urlDetails.lastModified.getTime() < lastModified.getTime())) { - return callback(null, true); - } else { - return callback(null, false); - } - }); - }, + _ensureUrlIsInCache(project_id, url, lastModified, callback) { + if (callback == null) { + callback = function(error, pathOnDisk) {} + } + if (lastModified != null) { + // MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds. + // So round down to seconds + lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000) + } + return UrlCache._doesUrlNeedDownloading( + project_id, + url, + lastModified, + (error, needsDownloading) => { + if (error != null) { + return callback(error) + } + if (needsDownloading) { + logger.log({ url, lastModified }, 'downloading URL') + return UrlFetcher.pipeUrlToFile( + url, + UrlCache._cacheFilePathForUrl(project_id, url), + error => { + if (error != null) { + return callback(error) + } + return UrlCache._updateOrCreateUrlDetails( + project_id, + url, + lastModified, + error => { + if (error != null) { + return callback(error) + } + return callback( + null, + UrlCache._cacheFilePathForUrl(project_id, url) + ) + } + ) + } + ) + } else { + logger.log({ url, lastModified }, 'URL is up to date in cache') + return callback(null, UrlCache._cacheFilePathForUrl(project_id, url)) + } + } + ) + }, - _cacheFileNameForUrl(project_id, url) { - return project_id + ":" + crypto.createHash("md5").update(url).digest("hex"); - }, + _doesUrlNeedDownloading(project_id, url, lastModified, callback) { + if (callback == null) { + callback = function(error, needsDownloading) {} + } + if (lastModified == null) { + return callback(null, true) + } + return UrlCache._findUrlDetails(project_id, url, function( + error, + urlDetails + ) { + if (error != null) { + return callback(error) + } + if ( + urlDetails == null || + urlDetails.lastModified == null || + urlDetails.lastModified.getTime() < lastModified.getTime() + ) { + return callback(null, true) + } else { + return callback(null, false) + } + }) + }, - _cacheFilePathForUrl(project_id, url) { - return `${Settings.path.clsiCacheDir}/${UrlCache._cacheFileNameForUrl(project_id, url)}`; - }, + _cacheFileNameForUrl(project_id, url) { + return ( + project_id + + ':' + + crypto + .createHash('md5') + .update(url) + .digest('hex') + ) + }, - _copyFile(from, to, _callback) { - if (_callback == null) { _callback = function(error) {}; } - const callbackOnce = function(error) { - if (error != null) { - logger.error({err: error, from, to}, "error copying file from cache"); - } - _callback(error); - return _callback = function() {}; - }; - const writeStream = fs.createWriteStream(to); - const readStream = fs.createReadStream(from); - writeStream.on("error", callbackOnce); - readStream.on("error", callbackOnce); - writeStream.on("close", callbackOnce); - return writeStream.on("open", () => readStream.pipe(writeStream)); - }, + _cacheFilePathForUrl(project_id, url) { + return `${Settings.path.clsiCacheDir}/${UrlCache._cacheFileNameForUrl( + project_id, + url + )}` + }, - _clearUrlFromCache(project_id, url, callback) { - if (callback == null) { callback = function(error) {}; } - return UrlCache._clearUrlDetails(project_id, url, function(error) { - if (error != null) { return callback(error); } - return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) { - if (error != null) { return callback(error); } - return callback(null); - }); - }); - }, + _copyFile(from, to, _callback) { + if (_callback == null) { + _callback = function(error) {} + } + const callbackOnce = function(error) { + if (error != null) { + logger.error({ err: error, from, to }, 'error copying file from cache') + } + _callback(error) + return (_callback = function() {}) + } + const writeStream = fs.createWriteStream(to) + const readStream = fs.createReadStream(from) + writeStream.on('error', callbackOnce) + readStream.on('error', callbackOnce) + writeStream.on('close', callbackOnce) + return writeStream.on('open', () => readStream.pipe(writeStream)) + }, - _deleteUrlCacheFromDisk(project_id, url, callback) { - if (callback == null) { callback = function(error) {}; } - return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function(error) { - if ((error != null) && (error.code !== 'ENOENT')) { // no error if the file isn't present - return callback(error); - } else { - return callback(); - } - }); - }, + _clearUrlFromCache(project_id, url, callback) { + if (callback == null) { + callback = function(error) {} + } + return UrlCache._clearUrlDetails(project_id, url, function(error) { + if (error != null) { + return callback(error) + } + return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) { + if (error != null) { + return callback(error) + } + return callback(null) + }) + }) + }, - _findUrlDetails(project_id, url, callback) { - if (callback == null) { callback = function(error, urlDetails) {}; } - const job = cb=> - db.UrlCache.find({where: { url, project_id }}) - .then(urlDetails => cb(null, urlDetails)) - .error(cb) - ; - return dbQueue.queue.push(job, callback); - }, + _deleteUrlCacheFromDisk(project_id, url, callback) { + if (callback == null) { + callback = function(error) {} + } + return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function( + error + ) { + if (error != null && error.code !== 'ENOENT') { + // no error if the file isn't present + return callback(error) + } else { + return callback() + } + }) + }, - _updateOrCreateUrlDetails(project_id, url, lastModified, callback) { - if (callback == null) { callback = function(error) {}; } - const job = cb=> - db.UrlCache.findOrCreate({where: {url, project_id}}) - .spread( - (urlDetails, created) => - urlDetails.updateAttributes({lastModified}) - .then(() => cb()) - .error(cb) - ) - .error(cb) - ; - return dbQueue.queue.push(job, callback); - }, + _findUrlDetails(project_id, url, callback) { + if (callback == null) { + callback = function(error, urlDetails) {} + } + const job = cb => + db.UrlCache.find({ where: { url, project_id } }) + .then(urlDetails => cb(null, urlDetails)) + .error(cb) + return dbQueue.queue.push(job, callback) + }, - _clearUrlDetails(project_id, url, callback) { - if (callback == null) { callback = function(error) {}; } - const job = cb=> - db.UrlCache.destroy({where: {url, project_id}}) - .then(() => cb(null)) - .error(cb) - ; - return dbQueue.queue.push(job, callback); - }, + _updateOrCreateUrlDetails(project_id, url, lastModified, callback) { + if (callback == null) { + callback = function(error) {} + } + const job = cb => + db.UrlCache.findOrCreate({ where: { url, project_id } }) + .spread((urlDetails, created) => + urlDetails + .updateAttributes({ lastModified }) + .then(() => cb()) + .error(cb) + ) + .error(cb) + return dbQueue.queue.push(job, callback) + }, + _clearUrlDetails(project_id, url, callback) { + if (callback == null) { + callback = function(error) {} + } + const job = cb => + db.UrlCache.destroy({ where: { url, project_id } }) + .then(() => cb(null)) + .error(cb) + return dbQueue.queue.push(job, callback) + }, - _findAllUrlsInProject(project_id, callback) { - if (callback == null) { callback = function(error, urls) {}; } - const job = cb=> - db.UrlCache.findAll({where: { project_id }}) - .then( - urlEntries => cb(null, urlEntries.map(entry => entry.url))) - .error(cb) - ; - return dbQueue.queue.push(job, callback); - } -}); - - - + _findAllUrlsInProject(project_id, callback) { + if (callback == null) { + callback = function(error, urls) {} + } + const job = cb => + db.UrlCache.findAll({ where: { project_id } }) + .then(urlEntries => + cb( + null, + urlEntries.map(entry => entry.url) + ) + ) + .error(cb) + return dbQueue.queue.push(job, callback) + } +} diff --git a/app/js/UrlFetcher.js b/app/js/UrlFetcher.js index fec397c..19c681c 100644 --- a/app/js/UrlFetcher.js +++ b/app/js/UrlFetcher.js @@ -12,85 +12,109 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let UrlFetcher; -const request = require("request").defaults({jar: false}); -const fs = require("fs"); -const logger = require("logger-sharelatex"); -const settings = require("settings-sharelatex"); -const URL = require('url'); +let UrlFetcher +const request = require('request').defaults({ jar: false }) +const fs = require('fs') +const logger = require('logger-sharelatex') +const settings = require('settings-sharelatex') +const URL = require('url') -const oneMinute = 60 * 1000; +const oneMinute = 60 * 1000 -module.exports = (UrlFetcher = { - pipeUrlToFile(url, filePath, _callback) { - if (_callback == null) { _callback = function(error) {}; } - const callbackOnce = function(error) { - if (timeoutHandler != null) { clearTimeout(timeoutHandler); } - _callback(error); - return _callback = function() {}; - }; +module.exports = UrlFetcher = { + pipeUrlToFile(url, filePath, _callback) { + if (_callback == null) { + _callback = function(error) {} + } + const callbackOnce = function(error) { + if (timeoutHandler != null) { + clearTimeout(timeoutHandler) + } + _callback(error) + return (_callback = function() {}) + } - if (settings.filestoreDomainOveride != null) { - const p = URL.parse(url).path; - url = `${settings.filestoreDomainOveride}${p}`; - } - var timeoutHandler = setTimeout(function() { - timeoutHandler = null; - logger.error({url, filePath}, "Timed out downloading file to cache"); - return callbackOnce(new Error(`Timed out downloading file to cache ${url}`)); - } - // FIXME: maybe need to close fileStream here - , 3 * oneMinute); + if (settings.filestoreDomainOveride != null) { + const p = URL.parse(url).path + url = `${settings.filestoreDomainOveride}${p}` + } + var timeoutHandler = setTimeout( + function() { + timeoutHandler = null + logger.error({ url, filePath }, 'Timed out downloading file to cache') + return callbackOnce( + new Error(`Timed out downloading file to cache ${url}`) + ) + }, + // FIXME: maybe need to close fileStream here + 3 * oneMinute + ) - logger.log({url, filePath}, "started downloading url to cache"); - const urlStream = request.get({url, timeout: oneMinute}); - urlStream.pause(); // stop data flowing until we are ready + logger.log({ url, filePath }, 'started downloading url to cache') + const urlStream = request.get({ url, timeout: oneMinute }) + urlStream.pause() // stop data flowing until we are ready - // attach handlers before setting up pipes - urlStream.on("error", function(error) { - logger.error({err: error, url, filePath}, "error downloading url"); - return callbackOnce(error || new Error(`Something went wrong downloading the URL ${url}`)); - }); + // attach handlers before setting up pipes + urlStream.on('error', function(error) { + logger.error({ err: error, url, filePath }, 'error downloading url') + return callbackOnce( + error || new Error(`Something went wrong downloading the URL ${url}`) + ) + }) - urlStream.on("end", () => logger.log({url, filePath}, "finished downloading file into cache")); + urlStream.on('end', () => + logger.log({ url, filePath }, 'finished downloading file into cache') + ) - return urlStream.on("response", function(res) { - if ((res.statusCode >= 200) && (res.statusCode < 300)) { - const fileStream = fs.createWriteStream(filePath); + return urlStream.on('response', function(res) { + if (res.statusCode >= 200 && res.statusCode < 300) { + const fileStream = fs.createWriteStream(filePath) - // attach handlers before setting up pipes - fileStream.on('error', function(error) { - logger.error({err: error, url, filePath}, "error writing file into cache"); - return fs.unlink(filePath, function(err) { - if (err != null) { - logger.err({err, filePath}, "error deleting file from cache"); - } - return callbackOnce(error); - }); - }); + // attach handlers before setting up pipes + fileStream.on('error', function(error) { + logger.error( + { err: error, url, filePath }, + 'error writing file into cache' + ) + return fs.unlink(filePath, function(err) { + if (err != null) { + logger.err({ err, filePath }, 'error deleting file from cache') + } + return callbackOnce(error) + }) + }) - fileStream.on('finish', function() { - logger.log({url, filePath}, "finished writing file into cache"); - return callbackOnce(); - }); + fileStream.on('finish', function() { + logger.log({ url, filePath }, 'finished writing file into cache') + return callbackOnce() + }) - fileStream.on('pipe', () => logger.log({url, filePath}, "piping into filestream")); + fileStream.on('pipe', () => + logger.log({ url, filePath }, 'piping into filestream') + ) - urlStream.pipe(fileStream); - return urlStream.resume(); // now we are ready to handle the data - } else { - logger.error({statusCode: res.statusCode, url, filePath}, "unexpected status code downloading url to cache"); - // https://nodejs.org/api/http.html#http_class_http_clientrequest - // If you add a 'response' event handler, then you must consume - // the data from the response object, either by calling - // response.read() whenever there is a 'readable' event, or by - // adding a 'data' handler, or by calling the .resume() - // method. Until the data is consumed, the 'end' event will not - // fire. Also, until the data is read it will consume memory - // that can eventually lead to a 'process out of memory' error. - urlStream.resume(); // discard the data - return callbackOnce(new Error(`URL returned non-success status code: ${res.statusCode} ${url}`)); - } - }); - } -}); + urlStream.pipe(fileStream) + return urlStream.resume() // now we are ready to handle the data + } else { + logger.error( + { statusCode: res.statusCode, url, filePath }, + 'unexpected status code downloading url to cache' + ) + // https://nodejs.org/api/http.html#http_class_http_clientrequest + // If you add a 'response' event handler, then you must consume + // the data from the response object, either by calling + // response.read() whenever there is a 'readable' event, or by + // adding a 'data' handler, or by calling the .resume() + // method. Until the data is consumed, the 'end' event will not + // fire. Also, until the data is read it will consume memory + // that can eventually lead to a 'process out of memory' error. + urlStream.resume() // discard the data + return callbackOnce( + new Error( + `URL returned non-success status code: ${res.statusCode} ${url}` + ) + ) + } + }) + } +} diff --git a/app/js/db.js b/app/js/db.js index c5dd980..c749af2 100644 --- a/app/js/db.js +++ b/app/js/db.js @@ -8,57 +8,60 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const Sequelize = require("sequelize"); -const Settings = require("settings-sharelatex"); -const _ = require("underscore"); -const logger = require("logger-sharelatex"); +const Sequelize = require('sequelize') +const Settings = require('settings-sharelatex') +const _ = require('underscore') +const logger = require('logger-sharelatex') -const options = _.extend({logging:false}, Settings.mysql.clsi); +const options = _.extend({ logging: false }, Settings.mysql.clsi) -logger.log({dbPath:Settings.mysql.clsi.storage}, "connecting to db"); +logger.log({ dbPath: Settings.mysql.clsi.storage }, 'connecting to db') const sequelize = new Sequelize( - Settings.mysql.clsi.database, - Settings.mysql.clsi.username, - Settings.mysql.clsi.password, - options -); + Settings.mysql.clsi.database, + Settings.mysql.clsi.username, + Settings.mysql.clsi.password, + options +) -if (Settings.mysql.clsi.dialect === "sqlite") { - logger.log("running PRAGMA journal_mode=WAL;"); - sequelize.query("PRAGMA journal_mode=WAL;"); - sequelize.query("PRAGMA synchronous=OFF;"); - sequelize.query("PRAGMA read_uncommitted = true;"); +if (Settings.mysql.clsi.dialect === 'sqlite') { + logger.log('running PRAGMA journal_mode=WAL;') + sequelize.query('PRAGMA journal_mode=WAL;') + sequelize.query('PRAGMA synchronous=OFF;') + sequelize.query('PRAGMA read_uncommitted = true;') } module.exports = { - UrlCache: sequelize.define("UrlCache", { - url: Sequelize.STRING, - project_id: Sequelize.STRING, - lastModified: Sequelize.DATE - }, { - indexes: [ - {fields: ['url', 'project_id']}, - {fields: ['project_id']} - ] - }), + UrlCache: sequelize.define( + 'UrlCache', + { + url: Sequelize.STRING, + project_id: Sequelize.STRING, + lastModified: Sequelize.DATE + }, + { + indexes: [{ fields: ['url', 'project_id'] }, { fields: ['project_id'] }] + } + ), - Project: sequelize.define("Project", { - project_id: {type: Sequelize.STRING, primaryKey: true}, - lastAccessed: Sequelize.DATE - }, { - indexes: [ - {fields: ['lastAccessed']} - ] - }), + Project: sequelize.define( + 'Project', + { + project_id: { type: Sequelize.STRING, primaryKey: true }, + lastAccessed: Sequelize.DATE + }, + { + indexes: [{ fields: ['lastAccessed'] }] + } + ), - op: Sequelize.Op, - - sync() { - logger.log({dbPath:Settings.mysql.clsi.storage}, "syncing db schema"); - return sequelize.sync() - .then(() => logger.log("db sync complete")).catch(err=> console.log(err, "error syncing")); - } -}; + op: Sequelize.Op, - + sync() { + logger.log({ dbPath: Settings.mysql.clsi.storage }, 'syncing db schema') + return sequelize + .sync() + .then(() => logger.log('db sync complete')) + .catch(err => console.log(err, 'error syncing')) + } +} From 18e6b4715d6235f0408efbfdd9072a7f51c1ed69 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 Feb 2020 12:14:56 +0100 Subject: [PATCH 10/24] decaffeinate: Rename CompileControllerTests.coffee and 17 other files from .coffee to .js --- .../{CompileControllerTests.coffee => CompileControllerTests.js} | 0 .../coffee/{CompileManagerTests.coffee => CompileManagerTests.js} | 0 .../{ContentTypeMapperTests.coffee => ContentTypeMapperTests.js} | 0 .../{DockerLockManagerTests.coffee => DockerLockManagerTests.js} | 0 .../coffee/{DockerRunnerTests.coffee => DockerRunnerTests.js} | 0 .../{DraftModeManagerTests.coffee => DraftModeManagerTests.js} | 0 test/unit/coffee/{LatexRunnerTests.coffee => LatexRunnerTests.js} | 0 test/unit/coffee/{LockManagerTests.coffee => LockManagerTests.js} | 0 .../{OutputFileFinderTests.coffee => OutputFileFinderTests.js} | 0 ...utputFileOptimiserTests.coffee => OutputFileOptimiserTests.js} | 0 ...tenceManagerTests.coffee => ProjectPersistenceManagerTests.js} | 0 .../coffee/{RequestParserTests.coffee => RequestParserTests.js} | 0 ...ourceStateManagerTests.coffee => ResourceStateManagerTests.js} | 0 .../coffee/{ResourceWriterTests.coffee => ResourceWriterTests.js} | 0 ...bidSymlinksTests.coffee => StaticServerForbidSymlinksTests.js} | 0 test/unit/coffee/{TikzManager.coffee => TikzManager.js} | 0 test/unit/coffee/{UrlCacheTests.coffee => UrlCacheTests.js} | 0 test/unit/coffee/{UrlFetcherTests.coffee => UrlFetcherTests.js} | 0 18 files changed, 0 insertions(+), 0 deletions(-) rename test/unit/coffee/{CompileControllerTests.coffee => CompileControllerTests.js} (100%) rename test/unit/coffee/{CompileManagerTests.coffee => CompileManagerTests.js} (100%) rename test/unit/coffee/{ContentTypeMapperTests.coffee => ContentTypeMapperTests.js} (100%) rename test/unit/coffee/{DockerLockManagerTests.coffee => DockerLockManagerTests.js} (100%) rename test/unit/coffee/{DockerRunnerTests.coffee => DockerRunnerTests.js} (100%) rename test/unit/coffee/{DraftModeManagerTests.coffee => DraftModeManagerTests.js} (100%) rename test/unit/coffee/{LatexRunnerTests.coffee => LatexRunnerTests.js} (100%) rename test/unit/coffee/{LockManagerTests.coffee => LockManagerTests.js} (100%) rename test/unit/coffee/{OutputFileFinderTests.coffee => OutputFileFinderTests.js} (100%) rename test/unit/coffee/{OutputFileOptimiserTests.coffee => OutputFileOptimiserTests.js} (100%) rename test/unit/coffee/{ProjectPersistenceManagerTests.coffee => ProjectPersistenceManagerTests.js} (100%) rename test/unit/coffee/{RequestParserTests.coffee => RequestParserTests.js} (100%) rename test/unit/coffee/{ResourceStateManagerTests.coffee => ResourceStateManagerTests.js} (100%) rename test/unit/coffee/{ResourceWriterTests.coffee => ResourceWriterTests.js} (100%) rename test/unit/coffee/{StaticServerForbidSymlinksTests.coffee => StaticServerForbidSymlinksTests.js} (100%) rename test/unit/coffee/{TikzManager.coffee => TikzManager.js} (100%) rename test/unit/coffee/{UrlCacheTests.coffee => UrlCacheTests.js} (100%) rename test/unit/coffee/{UrlFetcherTests.coffee => UrlFetcherTests.js} (100%) diff --git a/test/unit/coffee/CompileControllerTests.coffee b/test/unit/coffee/CompileControllerTests.js similarity index 100% rename from test/unit/coffee/CompileControllerTests.coffee rename to test/unit/coffee/CompileControllerTests.js diff --git a/test/unit/coffee/CompileManagerTests.coffee b/test/unit/coffee/CompileManagerTests.js similarity index 100% rename from test/unit/coffee/CompileManagerTests.coffee rename to test/unit/coffee/CompileManagerTests.js diff --git a/test/unit/coffee/ContentTypeMapperTests.coffee b/test/unit/coffee/ContentTypeMapperTests.js similarity index 100% rename from test/unit/coffee/ContentTypeMapperTests.coffee rename to test/unit/coffee/ContentTypeMapperTests.js diff --git a/test/unit/coffee/DockerLockManagerTests.coffee b/test/unit/coffee/DockerLockManagerTests.js similarity index 100% rename from test/unit/coffee/DockerLockManagerTests.coffee rename to test/unit/coffee/DockerLockManagerTests.js diff --git a/test/unit/coffee/DockerRunnerTests.coffee b/test/unit/coffee/DockerRunnerTests.js similarity index 100% rename from test/unit/coffee/DockerRunnerTests.coffee rename to test/unit/coffee/DockerRunnerTests.js diff --git a/test/unit/coffee/DraftModeManagerTests.coffee b/test/unit/coffee/DraftModeManagerTests.js similarity index 100% rename from test/unit/coffee/DraftModeManagerTests.coffee rename to test/unit/coffee/DraftModeManagerTests.js diff --git a/test/unit/coffee/LatexRunnerTests.coffee b/test/unit/coffee/LatexRunnerTests.js similarity index 100% rename from test/unit/coffee/LatexRunnerTests.coffee rename to test/unit/coffee/LatexRunnerTests.js diff --git a/test/unit/coffee/LockManagerTests.coffee b/test/unit/coffee/LockManagerTests.js similarity index 100% rename from test/unit/coffee/LockManagerTests.coffee rename to test/unit/coffee/LockManagerTests.js diff --git a/test/unit/coffee/OutputFileFinderTests.coffee b/test/unit/coffee/OutputFileFinderTests.js similarity index 100% rename from test/unit/coffee/OutputFileFinderTests.coffee rename to test/unit/coffee/OutputFileFinderTests.js diff --git a/test/unit/coffee/OutputFileOptimiserTests.coffee b/test/unit/coffee/OutputFileOptimiserTests.js similarity index 100% rename from test/unit/coffee/OutputFileOptimiserTests.coffee rename to test/unit/coffee/OutputFileOptimiserTests.js diff --git a/test/unit/coffee/ProjectPersistenceManagerTests.coffee b/test/unit/coffee/ProjectPersistenceManagerTests.js similarity index 100% rename from test/unit/coffee/ProjectPersistenceManagerTests.coffee rename to test/unit/coffee/ProjectPersistenceManagerTests.js diff --git a/test/unit/coffee/RequestParserTests.coffee b/test/unit/coffee/RequestParserTests.js similarity index 100% rename from test/unit/coffee/RequestParserTests.coffee rename to test/unit/coffee/RequestParserTests.js diff --git a/test/unit/coffee/ResourceStateManagerTests.coffee b/test/unit/coffee/ResourceStateManagerTests.js similarity index 100% rename from test/unit/coffee/ResourceStateManagerTests.coffee rename to test/unit/coffee/ResourceStateManagerTests.js diff --git a/test/unit/coffee/ResourceWriterTests.coffee b/test/unit/coffee/ResourceWriterTests.js similarity index 100% rename from test/unit/coffee/ResourceWriterTests.coffee rename to test/unit/coffee/ResourceWriterTests.js diff --git a/test/unit/coffee/StaticServerForbidSymlinksTests.coffee b/test/unit/coffee/StaticServerForbidSymlinksTests.js similarity index 100% rename from test/unit/coffee/StaticServerForbidSymlinksTests.coffee rename to test/unit/coffee/StaticServerForbidSymlinksTests.js diff --git a/test/unit/coffee/TikzManager.coffee b/test/unit/coffee/TikzManager.js similarity index 100% rename from test/unit/coffee/TikzManager.coffee rename to test/unit/coffee/TikzManager.js diff --git a/test/unit/coffee/UrlCacheTests.coffee b/test/unit/coffee/UrlCacheTests.js similarity index 100% rename from test/unit/coffee/UrlCacheTests.coffee rename to test/unit/coffee/UrlCacheTests.js diff --git a/test/unit/coffee/UrlFetcherTests.coffee b/test/unit/coffee/UrlFetcherTests.js similarity index 100% rename from test/unit/coffee/UrlFetcherTests.coffee rename to test/unit/coffee/UrlFetcherTests.js From 79a0891feeacbaaa8f63fb07d56a99f34569183c Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 Feb 2020 12:15:08 +0100 Subject: [PATCH 11/24] decaffeinate: Convert CompileControllerTests.coffee and 17 other files to JS --- test/unit/coffee/CompileControllerTests.js | 418 ++++---- test/unit/coffee/CompileManagerTests.js | 646 ++++++------ test/unit/coffee/ContentTypeMapperTests.js | 100 +- test/unit/coffee/DockerLockManagerTests.js | 303 +++--- test/unit/coffee/DockerRunnerTests.js | 947 ++++++++++-------- test/unit/coffee/DraftModeManagerTests.js | 124 ++- test/unit/coffee/LatexRunnerTests.js | 162 +-- test/unit/coffee/LockManagerTests.js | 108 +- test/unit/coffee/OutputFileFinderTests.js | 122 ++- test/unit/coffee/OutputFileOptimiserTests.js | 198 ++-- .../coffee/ProjectPersistenceManagerTests.js | 120 ++- test/unit/coffee/RequestParserTests.js | 535 ++++++---- test/unit/coffee/ResourceStateManagerTests.js | 210 ++-- test/unit/coffee/ResourceWriterTests.js | 567 ++++++----- .../coffee/StaticServerForbidSymlinksTests.js | 295 +++--- test/unit/coffee/TikzManager.js | 229 +++-- test/unit/coffee/UrlCacheTests.js | 374 ++++--- test/unit/coffee/UrlFetcherTests.js | 234 +++-- 18 files changed, 3291 insertions(+), 2401 deletions(-) diff --git a/test/unit/coffee/CompileControllerTests.js b/test/unit/coffee/CompileControllerTests.js index 034adfc..1defed7 100644 --- a/test/unit/coffee/CompileControllerTests.js +++ b/test/unit/coffee/CompileControllerTests.js @@ -1,217 +1,269 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -modulePath = require('path').join __dirname, '../../../app/js/CompileController' -tk = require("timekeeper") +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../app/js/CompileController'); +const tk = require("timekeeper"); -describe "CompileController", -> - beforeEach -> - @CompileController = SandboxedModule.require modulePath, requires: - "./CompileManager": @CompileManager = {} - "./RequestParser": @RequestParser = {} - "settings-sharelatex": @Settings = - apis: - clsi: +describe("CompileController", function() { + beforeEach(function() { + this.CompileController = SandboxedModule.require(modulePath, { requires: { + "./CompileManager": (this.CompileManager = {}), + "./RequestParser": (this.RequestParser = {}), + "settings-sharelatex": (this.Settings = { + apis: { + clsi: { url: "http://clsi.example.com" - "./ProjectPersistenceManager": @ProjectPersistenceManager = {} - "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), err:sinon.stub(), warn: sinon.stub()} - @Settings.externalUrl = "http://www.example.com" - @req = {} - @res = {} - @next = sinon.stub() + } + } + }), + "./ProjectPersistenceManager": (this.ProjectPersistenceManager = {}), + "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub(), err:sinon.stub(), warn: sinon.stub()}) + } + }); + this.Settings.externalUrl = "http://www.example.com"; + this.req = {}; + this.res = {}; + return this.next = sinon.stub(); + }); - describe "compile", -> - beforeEach -> - @req.body = { + describe("compile", function() { + beforeEach(function() { + this.req.body = { compile: "mock-body" - } - @req.params = - project_id: @project_id = "project-id-123" - @request = { + }; + this.req.params = + {project_id: (this.project_id = "project-id-123")}; + this.request = { compile: "mock-parsed-request" - } - @request_with_project_id = - compile: @request.compile - project_id: @project_id - @output_files = [{ - path: "output.pdf" - type: "pdf" + }; + this.request_with_project_id = { + compile: this.request.compile, + project_id: this.project_id + }; + this.output_files = [{ + path: "output.pdf", + type: "pdf", build: 1234 }, { - path: "output.log" - type: "log" + path: "output.log", + type: "log", build: 1234 - }] - @RequestParser.parse = sinon.stub().callsArgWith(1, null, @request) - @ProjectPersistenceManager.markProjectAsJustAccessed = sinon.stub().callsArg(1) - @res.status = sinon.stub().returnsThis() - @res.send = sinon.stub() + }]; + this.RequestParser.parse = sinon.stub().callsArgWith(1, null, this.request); + this.ProjectPersistenceManager.markProjectAsJustAccessed = sinon.stub().callsArg(1); + this.res.status = sinon.stub().returnsThis(); + return this.res.send = sinon.stub(); + }); - describe "successfully", -> - beforeEach -> - @CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, null, @output_files) - @CompileController.compile @req, @res + describe("successfully", function() { + beforeEach(function() { + this.CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, null, this.output_files); + return this.CompileController.compile(this.req, this.res); + }); - it "should parse the request", -> - @RequestParser.parse - .calledWith(@req.body) - .should.equal true + it("should parse the request", function() { + return this.RequestParser.parse + .calledWith(this.req.body) + .should.equal(true); + }); - it "should run the compile for the specified project", -> - @CompileManager.doCompileWithLock - .calledWith(@request_with_project_id) - .should.equal true + it("should run the compile for the specified project", function() { + return this.CompileManager.doCompileWithLock + .calledWith(this.request_with_project_id) + .should.equal(true); + }); - it "should mark the project as accessed", -> - @ProjectPersistenceManager.markProjectAsJustAccessed - .calledWith(@project_id) - .should.equal true + it("should mark the project as accessed", function() { + return this.ProjectPersistenceManager.markProjectAsJustAccessed + .calledWith(this.project_id) + .should.equal(true); + }); - it "should return the JSON response", -> - @res.status.calledWith(200).should.equal true - @res.send - .calledWith( - compile: - status: "success" - error: null - outputFiles: @output_files.map (file) => - url: "#{@Settings.apis.clsi.url}/project/#{@project_id}/build/#{file.build}/output/#{file.path}" - path: file.path - type: file.type - build: file.build - ) - .should.equal true + return it("should return the JSON response", function() { + this.res.status.calledWith(200).should.equal(true); + return this.res.send + .calledWith({ + compile: { + status: "success", + error: null, + outputFiles: this.output_files.map(file => { + return { + url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`, + path: file.path, + type: file.type, + build: file.build + }; + }) + } + }) + .should.equal(true); + }); + }); - describe "with an error", -> - beforeEach -> - @CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, new Error(@message = "error message"), null) - @CompileController.compile @req, @res + describe("with an error", function() { + beforeEach(function() { + this.CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, new Error(this.message = "error message"), null); + return this.CompileController.compile(this.req, this.res); + }); - it "should return the JSON response with the error", -> - @res.status.calledWith(500).should.equal true - @res.send - .calledWith( - compile: - status: "error" - error: @message + return it("should return the JSON response with the error", function() { + this.res.status.calledWith(500).should.equal(true); + return this.res.send + .calledWith({ + compile: { + status: "error", + error: this.message, outputFiles: [] - ) - .should.equal true + } + }) + .should.equal(true); + }); + }); - describe "when the request times out", -> - beforeEach -> - @error = new Error(@message = "container timed out") - @error.timedout = true - @CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, @error, null) - @CompileController.compile @req, @res + describe("when the request times out", function() { + beforeEach(function() { + this.error = new Error(this.message = "container timed out"); + this.error.timedout = true; + this.CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, this.error, null); + return this.CompileController.compile(this.req, this.res); + }); - it "should return the JSON response with the timeout status", -> - @res.status.calledWith(200).should.equal true - @res.send - .calledWith( - compile: - status: "timedout" - error: @message + return it("should return the JSON response with the timeout status", function() { + this.res.status.calledWith(200).should.equal(true); + return this.res.send + .calledWith({ + compile: { + status: "timedout", + error: this.message, outputFiles: [] - ) - .should.equal true + } + }) + .should.equal(true); + }); + }); - describe "when the request returns no output files", -> - beforeEach -> - @CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, null, []) - @CompileController.compile @req, @res + return describe("when the request returns no output files", function() { + beforeEach(function() { + this.CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, null, []); + return this.CompileController.compile(this.req, this.res); + }); - it "should return the JSON response with the failure status", -> - @res.status.calledWith(200).should.equal true - @res.send - .calledWith( - compile: - error: null - status: "failure" + return it("should return the JSON response with the failure status", function() { + this.res.status.calledWith(200).should.equal(true); + return this.res.send + .calledWith({ + compile: { + error: null, + status: "failure", outputFiles: [] - ) - .should.equal true + } + }) + .should.equal(true); + }); + }); + }); - describe "syncFromCode", -> - beforeEach -> - @file = "main.tex" - @line = 42 - @column = 5 - @project_id = "mock-project-id" - @req.params = - project_id: @project_id - @req.query = - file: @file - line: @line.toString() - column: @column.toString() - @res.json = sinon.stub() + describe("syncFromCode", function() { + beforeEach(function() { + this.file = "main.tex"; + this.line = 42; + this.column = 5; + this.project_id = "mock-project-id"; + this.req.params = + {project_id: this.project_id}; + this.req.query = { + file: this.file, + line: this.line.toString(), + column: this.column.toString() + }; + this.res.json = sinon.stub(); - @CompileManager.syncFromCode = sinon.stub().callsArgWith(5, null, @pdfPositions = ["mock-positions"]) - @CompileController.syncFromCode @req, @res, @next + this.CompileManager.syncFromCode = sinon.stub().callsArgWith(5, null, (this.pdfPositions = ["mock-positions"])); + return this.CompileController.syncFromCode(this.req, this.res, this.next); + }); - it "should find the corresponding location in the PDF", -> - @CompileManager.syncFromCode - .calledWith(@project_id, undefined, @file, @line, @column) - .should.equal true + it("should find the corresponding location in the PDF", function() { + return this.CompileManager.syncFromCode + .calledWith(this.project_id, undefined, this.file, this.line, this.column) + .should.equal(true); + }); - it "should return the positions", -> - @res.json - .calledWith( - pdf: @pdfPositions - ) - .should.equal true + return it("should return the positions", function() { + return this.res.json + .calledWith({ + pdf: this.pdfPositions + }) + .should.equal(true); + }); + }); - describe "syncFromPdf", -> - beforeEach -> - @page = 5 - @h = 100.23 - @v = 45.67 - @project_id = "mock-project-id" - @req.params = - project_id: @project_id - @req.query = - page: @page.toString() - h: @h.toString() - v: @v.toString() - @res.json = sinon.stub() + describe("syncFromPdf", function() { + beforeEach(function() { + this.page = 5; + this.h = 100.23; + this.v = 45.67; + this.project_id = "mock-project-id"; + this.req.params = + {project_id: this.project_id}; + this.req.query = { + page: this.page.toString(), + h: this.h.toString(), + v: this.v.toString() + }; + this.res.json = sinon.stub(); - @CompileManager.syncFromPdf = sinon.stub().callsArgWith(5, null, @codePositions = ["mock-positions"]) - @CompileController.syncFromPdf @req, @res, @next + this.CompileManager.syncFromPdf = sinon.stub().callsArgWith(5, null, (this.codePositions = ["mock-positions"])); + return this.CompileController.syncFromPdf(this.req, this.res, this.next); + }); - it "should find the corresponding location in the code", -> - @CompileManager.syncFromPdf - .calledWith(@project_id, undefined, @page, @h, @v) - .should.equal true + it("should find the corresponding location in the code", function() { + return this.CompileManager.syncFromPdf + .calledWith(this.project_id, undefined, this.page, this.h, this.v) + .should.equal(true); + }); - it "should return the positions", -> - @res.json - .calledWith( - code: @codePositions - ) - .should.equal true + return it("should return the positions", function() { + return this.res.json + .calledWith({ + code: this.codePositions + }) + .should.equal(true); + }); + }); - describe "wordcount", -> - beforeEach -> - @file = "main.tex" - @project_id = "mock-project-id" - @req.params = - project_id: @project_id - @req.query = - file: @file - image: @image = "example.com/image" - @res.json = sinon.stub() + return describe("wordcount", function() { + beforeEach(function() { + this.file = "main.tex"; + this.project_id = "mock-project-id"; + this.req.params = + {project_id: this.project_id}; + this.req.query = { + file: this.file, + image: (this.image = "example.com/image") + }; + this.res.json = sinon.stub(); - @CompileManager.wordcount = sinon.stub().callsArgWith(4, null, @texcount = ["mock-texcount"]) - @CompileController.wordcount @req, @res, @next + this.CompileManager.wordcount = sinon.stub().callsArgWith(4, null, (this.texcount = ["mock-texcount"])); + return this.CompileController.wordcount(this.req, this.res, this.next); + }); - it "should return the word count of a file", -> - @CompileManager.wordcount - .calledWith(@project_id, undefined, @file, @image) - .should.equal true + it("should return the word count of a file", function() { + return this.CompileManager.wordcount + .calledWith(this.project_id, undefined, this.file, this.image) + .should.equal(true); + }); - it "should return the texcount info", -> - @res.json - .calledWith( - texcount: @texcount - ) - .should.equal true + return it("should return the texcount info", function() { + return this.res.json + .calledWith({ + texcount: this.texcount + }) + .should.equal(true); + }); + }); +}); diff --git a/test/unit/coffee/CompileManagerTests.js b/test/unit/coffee/CompileManagerTests.js index c4b0f85..5675ac1 100644 --- a/test/unit/coffee/CompileManagerTests.js +++ b/test/unit/coffee/CompileManagerTests.js @@ -1,356 +1,426 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -modulePath = require('path').join __dirname, '../../../app/js/CompileManager' -tk = require("timekeeper") -EventEmitter = require("events").EventEmitter -Path = require "path" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../app/js/CompileManager'); +const tk = require("timekeeper"); +const { EventEmitter } = require("events"); +const Path = require("path"); -describe "CompileManager", -> - beforeEach -> - @CompileManager = SandboxedModule.require modulePath, requires: - "./LatexRunner": @LatexRunner = {} - "./ResourceWriter": @ResourceWriter = {} - "./OutputFileFinder": @OutputFileFinder = {} - "./OutputCacheManager": @OutputCacheManager = {} - "settings-sharelatex": @Settings = - path: +describe("CompileManager", function() { + beforeEach(function() { + this.CompileManager = SandboxedModule.require(modulePath, { requires: { + "./LatexRunner": (this.LatexRunner = {}), + "./ResourceWriter": (this.ResourceWriter = {}), + "./OutputFileFinder": (this.OutputFileFinder = {}), + "./OutputCacheManager": (this.OutputCacheManager = {}), + "settings-sharelatex": (this.Settings = { + path: { compilesDir: "/compiles/dir" - synctexBaseDir: -> "/compile" - clsi: - docker: + }, + synctexBaseDir() { return "/compile"; }, + clsi: { + docker: { image: "SOMEIMAGE" + } + } + }), - "logger-sharelatex": @logger = { log: sinon.stub() , info:->} - "child_process": @child_process = {} - "./CommandRunner": @CommandRunner = {} - "./DraftModeManager": @DraftModeManager = {} - "./TikzManager": @TikzManager = {} - "./LockManager": @LockManager = {} - "fs": @fs = {} - "fs-extra": @fse = { ensureDir: sinon.stub().callsArg(1) } - @callback = sinon.stub() - @project_id = "project-id-123" - @user_id = "1234" - describe "doCompileWithLock", -> - beforeEach -> - @request = - resources: @resources = "mock-resources" - project_id: @project_id - user_id: @user_id - @output_files = ["foo", "bar"] - @Settings.compileDir = "compiles" - @compileDir = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}" - @CompileManager.doCompile = sinon.stub().callsArgWith(1, null, @output_files) - @LockManager.runWithLock = (lockFile, runner, callback) -> - runner (err, result...) -> - callback(err, result...) + "logger-sharelatex": (this.logger = { log: sinon.stub() , info() {}}), + "child_process": (this.child_process = {}), + "./CommandRunner": (this.CommandRunner = {}), + "./DraftModeManager": (this.DraftModeManager = {}), + "./TikzManager": (this.TikzManager = {}), + "./LockManager": (this.LockManager = {}), + "fs": (this.fs = {}), + "fs-extra": (this.fse = { ensureDir: sinon.stub().callsArg(1) }) + } + }); + this.callback = sinon.stub(); + this.project_id = "project-id-123"; + return this.user_id = "1234"; + }); + describe("doCompileWithLock", function() { + beforeEach(function() { + this.request = { + resources: (this.resources = "mock-resources"), + project_id: this.project_id, + user_id: this.user_id + }; + this.output_files = ["foo", "bar"]; + this.Settings.compileDir = "compiles"; + this.compileDir = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`; + this.CompileManager.doCompile = sinon.stub().callsArgWith(1, null, this.output_files); + return this.LockManager.runWithLock = (lockFile, runner, callback) => + runner((err, ...result) => callback(err, ...Array.from(result))) + ; + }); - describe "when the project is not locked", -> - beforeEach -> - @CompileManager.doCompileWithLock @request, @callback + describe("when the project is not locked", function() { + beforeEach(function() { + return this.CompileManager.doCompileWithLock(this.request, this.callback); + }); - it "should ensure that the compile directory exists", -> - @fse.ensureDir.calledWith(@compileDir) - .should.equal true + it("should ensure that the compile directory exists", function() { + return this.fse.ensureDir.calledWith(this.compileDir) + .should.equal(true); + }); - it "should call doCompile with the request", -> - @CompileManager.doCompile - .calledWith(@request) - .should.equal true + it("should call doCompile with the request", function() { + return this.CompileManager.doCompile + .calledWith(this.request) + .should.equal(true); + }); - it "should call the callback with the output files", -> - @callback.calledWithExactly(null, @output_files) - .should.equal true + return it("should call the callback with the output files", function() { + return this.callback.calledWithExactly(null, this.output_files) + .should.equal(true); + }); + }); - describe "when the project is locked", -> - beforeEach -> - @error = new Error("locked") - @LockManager.runWithLock = (lockFile, runner, callback) => - callback(@error) - @CompileManager.doCompileWithLock @request, @callback + return describe("when the project is locked", function() { + beforeEach(function() { + this.error = new Error("locked"); + this.LockManager.runWithLock = (lockFile, runner, callback) => { + return callback(this.error); + }; + return this.CompileManager.doCompileWithLock(this.request, this.callback); + }); - it "should ensure that the compile directory exists", -> - @fse.ensureDir.calledWith(@compileDir) - .should.equal true + it("should ensure that the compile directory exists", function() { + return this.fse.ensureDir.calledWith(this.compileDir) + .should.equal(true); + }); - it "should not call doCompile with the request", -> - @CompileManager.doCompile - .called.should.equal false + it("should not call doCompile with the request", function() { + return this.CompileManager.doCompile + .called.should.equal(false); + }); - it "should call the callback with the error", -> - @callback.calledWithExactly(@error) - .should.equal true + return it("should call the callback with the error", function() { + return this.callback.calledWithExactly(this.error) + .should.equal(true); + }); + }); + }); - describe "doCompile", -> - beforeEach -> - @output_files = [{ - path: "output.log" + describe("doCompile", function() { + beforeEach(function() { + this.output_files = [{ + path: "output.log", type: "log" }, { - path: "output.pdf" + path: "output.pdf", type: "pdf" - }] - @build_files = [{ - path: "output.log" - type: "log" + }]; + this.build_files = [{ + path: "output.log", + type: "log", build: 1234 }, { - path: "output.pdf" - type: "pdf" + path: "output.pdf", + type: "pdf", build: 1234 - }] - @request = - resources: @resources = "mock-resources" - rootResourcePath: @rootResourcePath = "main.tex" - project_id: @project_id - user_id: @user_id - compiler: @compiler = "pdflatex" - timeout: @timeout = 42000 - imageName: @image = "example.com/image" - flags: @flags = ["-file-line-error"] - @env = {} - @Settings.compileDir = "compiles" - @compileDir = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}" - @ResourceWriter.syncResourcesToDisk = sinon.stub().callsArgWith(2, null, @resources) - @LatexRunner.runLatex = sinon.stub().callsArg(2) - @OutputFileFinder.findOutputFiles = sinon.stub().callsArgWith(2, null, @output_files) - @OutputCacheManager.saveOutputFiles = sinon.stub().callsArgWith(2, null, @build_files) - @DraftModeManager.injectDraftMode = sinon.stub().callsArg(1) - @TikzManager.checkMainFile = sinon.stub().callsArg(3, false) + }]; + this.request = { + resources: (this.resources = "mock-resources"), + rootResourcePath: (this.rootResourcePath = "main.tex"), + project_id: this.project_id, + user_id: this.user_id, + compiler: (this.compiler = "pdflatex"), + timeout: (this.timeout = 42000), + imageName: (this.image = "example.com/image"), + flags: (this.flags = ["-file-line-error"]) + }; + this.env = {}; + this.Settings.compileDir = "compiles"; + this.compileDir = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`; + this.ResourceWriter.syncResourcesToDisk = sinon.stub().callsArgWith(2, null, this.resources); + this.LatexRunner.runLatex = sinon.stub().callsArg(2); + this.OutputFileFinder.findOutputFiles = sinon.stub().callsArgWith(2, null, this.output_files); + this.OutputCacheManager.saveOutputFiles = sinon.stub().callsArgWith(2, null, this.build_files); + this.DraftModeManager.injectDraftMode = sinon.stub().callsArg(1); + return this.TikzManager.checkMainFile = sinon.stub().callsArg(3, false); + }); - describe "normally", -> - beforeEach -> - @CompileManager.doCompile @request, @callback + describe("normally", function() { + beforeEach(function() { + return this.CompileManager.doCompile(this.request, this.callback); + }); - it "should write the resources to disk", -> - @ResourceWriter.syncResourcesToDisk - .calledWith(@request, @compileDir) - .should.equal true + it("should write the resources to disk", function() { + return this.ResourceWriter.syncResourcesToDisk + .calledWith(this.request, this.compileDir) + .should.equal(true); + }); - it "should run LaTeX", -> - @LatexRunner.runLatex - .calledWith("#{@project_id}-#{@user_id}", { - directory: @compileDir - mainFile: @rootResourcePath - compiler: @compiler - timeout: @timeout - image: @image - flags: @flags - environment: @env + it("should run LaTeX", function() { + return this.LatexRunner.runLatex + .calledWith(`${this.project_id}-${this.user_id}`, { + directory: this.compileDir, + mainFile: this.rootResourcePath, + compiler: this.compiler, + timeout: this.timeout, + image: this.image, + flags: this.flags, + environment: this.env }) - .should.equal true + .should.equal(true); + }); - it "should find the output files", -> - @OutputFileFinder.findOutputFiles - .calledWith(@resources, @compileDir) - .should.equal true + it("should find the output files", function() { + return this.OutputFileFinder.findOutputFiles + .calledWith(this.resources, this.compileDir) + .should.equal(true); + }); - it "should return the output files", -> - @callback.calledWith(null, @build_files).should.equal true + it("should return the output files", function() { + return this.callback.calledWith(null, this.build_files).should.equal(true); + }); - it "should not inject draft mode by default", -> - @DraftModeManager.injectDraftMode.called.should.equal false + return it("should not inject draft mode by default", function() { + return this.DraftModeManager.injectDraftMode.called.should.equal(false); + }); + }); - describe "with draft mode", -> - beforeEach -> - @request.draft = true - @CompileManager.doCompile @request, @callback + describe("with draft mode", function() { + beforeEach(function() { + this.request.draft = true; + return this.CompileManager.doCompile(this.request, this.callback); + }); - it "should inject the draft mode header", -> - @DraftModeManager.injectDraftMode - .calledWith(@compileDir + "/" + @rootResourcePath) - .should.equal true + return it("should inject the draft mode header", function() { + return this.DraftModeManager.injectDraftMode + .calledWith(this.compileDir + "/" + this.rootResourcePath) + .should.equal(true); + }); + }); - describe "with a check option", -> - beforeEach -> - @request.check = "error" - @CompileManager.doCompile @request, @callback + describe("with a check option", function() { + beforeEach(function() { + this.request.check = "error"; + return this.CompileManager.doCompile(this.request, this.callback); + }); - it "should run chktex", -> - @LatexRunner.runLatex - .calledWith("#{@project_id}-#{@user_id}", { - directory: @compileDir - mainFile: @rootResourcePath - compiler: @compiler - timeout: @timeout - image: @image - flags: @flags + return it("should run chktex", function() { + return this.LatexRunner.runLatex + .calledWith(`${this.project_id}-${this.user_id}`, { + directory: this.compileDir, + mainFile: this.rootResourcePath, + compiler: this.compiler, + timeout: this.timeout, + image: this.image, + flags: this.flags, environment: {'CHKTEX_OPTIONS': '-nall -e9 -e10 -w15 -w16', 'CHKTEX_EXIT_ON_ERROR':1, 'CHKTEX_ULIMIT_OPTIONS': '-t 5 -v 64000'} }) - .should.equal true + .should.equal(true); + }); + }); - describe "with a knitr file and check options", -> - beforeEach -> - @request.rootResourcePath = "main.Rtex" - @request.check = "error" - @CompileManager.doCompile @request, @callback + return describe("with a knitr file and check options", function() { + beforeEach(function() { + this.request.rootResourcePath = "main.Rtex"; + this.request.check = "error"; + return this.CompileManager.doCompile(this.request, this.callback); + }); - it "should not run chktex", -> - @LatexRunner.runLatex - .calledWith("#{@project_id}-#{@user_id}", { - directory: @compileDir - mainFile: "main.Rtex" - compiler: @compiler - timeout: @timeout - image: @image - flags: @flags - environment: @env + return it("should not run chktex", function() { + return this.LatexRunner.runLatex + .calledWith(`${this.project_id}-${this.user_id}`, { + directory: this.compileDir, + mainFile: "main.Rtex", + compiler: this.compiler, + timeout: this.timeout, + image: this.image, + flags: this.flags, + environment: this.env }) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "clearProject", -> - describe "succesfully", -> - beforeEach -> - @Settings.compileDir = "compiles" - @fs.lstat = sinon.stub().callsArgWith(1, null,{isDirectory: ()->true}) - @proc = new EventEmitter() - @proc.stdout = new EventEmitter() - @proc.stderr = new EventEmitter() - @child_process.spawn = sinon.stub().returns(@proc) - @CompileManager.clearProject @project_id, @user_id, @callback - @proc.emit "close", 0 + describe("clearProject", function() { + describe("succesfully", function() { + beforeEach(function() { + this.Settings.compileDir = "compiles"; + this.fs.lstat = sinon.stub().callsArgWith(1, null,{isDirectory(){ return true; }}); + this.proc = new EventEmitter(); + this.proc.stdout = new EventEmitter(); + this.proc.stderr = new EventEmitter(); + this.child_process.spawn = sinon.stub().returns(this.proc); + this.CompileManager.clearProject(this.project_id, this.user_id, this.callback); + return this.proc.emit("close", 0); + }); - it "should remove the project directory", -> - @child_process.spawn - .calledWith("rm", ["-r", "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"]) - .should.equal true + it("should remove the project directory", function() { + return this.child_process.spawn + .calledWith("rm", ["-r", `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`]) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "with a non-success status code", -> - beforeEach -> - @Settings.compileDir = "compiles" - @fs.lstat = sinon.stub().callsArgWith(1, null,{isDirectory: ()->true}) - @proc = new EventEmitter() - @proc.stdout = new EventEmitter() - @proc.stderr = new EventEmitter() - @child_process.spawn = sinon.stub().returns(@proc) - @CompileManager.clearProject @project_id, @user_id, @callback - @proc.stderr.emit "data", @error = "oops" - @proc.emit "close", 1 + return describe("with a non-success status code", function() { + beforeEach(function() { + this.Settings.compileDir = "compiles"; + this.fs.lstat = sinon.stub().callsArgWith(1, null,{isDirectory(){ return true; }}); + this.proc = new EventEmitter(); + this.proc.stdout = new EventEmitter(); + this.proc.stderr = new EventEmitter(); + this.child_process.spawn = sinon.stub().returns(this.proc); + this.CompileManager.clearProject(this.project_id, this.user_id, this.callback); + this.proc.stderr.emit("data", (this.error = "oops")); + return this.proc.emit("close", 1); + }); - it "should remove the project directory", -> - @child_process.spawn - .calledWith("rm", ["-r", "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"]) - .should.equal true + it("should remove the project directory", function() { + return this.child_process.spawn + .calledWith("rm", ["-r", `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`]) + .should.equal(true); + }); - it "should call the callback with an error from the stderr", -> - @callback + return it("should call the callback with an error from the stderr", function() { + this.callback .calledWith(new Error()) - .should.equal true + .should.equal(true); - @callback.args[0][0].message.should.equal "rm -r #{@Settings.path.compilesDir}/#{@project_id}-#{@user_id} failed: #{@error}" + return this.callback.args[0][0].message.should.equal(`rm -r ${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id} failed: ${this.error}`); + }); + }); + }); - describe "syncing", -> - beforeEach -> - @page = 1 - @h = 42.23 - @v = 87.56 - @width = 100.01 - @height = 234.56 - @line = 5 - @column = 3 - @file_name = "main.tex" - @child_process.execFile = sinon.stub() - @Settings.path.synctexBaseDir = (project_id) => "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}" + describe("syncing", function() { + beforeEach(function() { + this.page = 1; + this.h = 42.23; + this.v = 87.56; + this.width = 100.01; + this.height = 234.56; + this.line = 5; + this.column = 3; + this.file_name = "main.tex"; + this.child_process.execFile = sinon.stub(); + return this.Settings.path.synctexBaseDir = project_id => `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`; + }); - describe "syncFromCode", -> - beforeEach -> - @fs.stat = sinon.stub().callsArgWith(1, null,{isFile: ()->true}) - @stdout = "NODE\t#{@page}\t#{@h}\t#{@v}\t#{@width}\t#{@height}\n" - @CommandRunner.run = sinon.stub().callsArgWith(6, null, {stdout:@stdout}) - @CompileManager.syncFromCode @project_id, @user_id, @file_name, @line, @column, @callback + describe("syncFromCode", function() { + beforeEach(function() { + this.fs.stat = sinon.stub().callsArgWith(1, null,{isFile(){ return true; }}); + this.stdout = `NODE\t${this.page}\t${this.h}\t${this.v}\t${this.width}\t${this.height}\n`; + this.CommandRunner.run = sinon.stub().callsArgWith(6, null, {stdout:this.stdout}); + return this.CompileManager.syncFromCode(this.project_id, this.user_id, this.file_name, this.line, this.column, this.callback); + }); - it "should execute the synctex binary", -> - bin_path = Path.resolve(__dirname + "/../../../bin/synctex") - synctex_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/output.pdf" - file_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/#{@file_name}" - @CommandRunner.run + it("should execute the synctex binary", function() { + const bin_path = Path.resolve(__dirname + "/../../../bin/synctex"); + const synctex_path = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/output.pdf`; + const file_path = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/${this.file_name}`; + return this.CommandRunner.run .calledWith( - "#{@project_id}-#{@user_id}", - ['/opt/synctex', 'code', synctex_path, file_path, @line, @column], - "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}", - @Settings.clsi.docker.image, + `${this.project_id}-${this.user_id}`, + ['/opt/synctex', 'code', synctex_path, file_path, this.line, this.column], + `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`, + this.Settings.clsi.docker.image, 60000, {} - ).should.equal true + ).should.equal(true); + }); - it "should call the callback with the parsed output", -> - @callback + return it("should call the callback with the parsed output", function() { + return this.callback .calledWith(null, [{ - page: @page - h: @h - v: @v - height: @height - width: @width + page: this.page, + h: this.h, + v: this.v, + height: this.height, + width: this.width }]) - .should.equal true + .should.equal(true); + }); + }); - describe "syncFromPdf", -> - beforeEach -> - @fs.stat = sinon.stub().callsArgWith(1, null,{isFile: ()->true}) - @stdout = "NODE\t#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/#{@file_name}\t#{@line}\t#{@column}\n" - @CommandRunner.run = sinon.stub().callsArgWith(6, null, {stdout:@stdout}) - @CompileManager.syncFromPdf @project_id, @user_id, @page, @h, @v, @callback + return describe("syncFromPdf", function() { + beforeEach(function() { + this.fs.stat = sinon.stub().callsArgWith(1, null,{isFile(){ return true; }}); + this.stdout = `NODE\t${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/${this.file_name}\t${this.line}\t${this.column}\n`; + this.CommandRunner.run = sinon.stub().callsArgWith(6, null, {stdout:this.stdout}); + return this.CompileManager.syncFromPdf(this.project_id, this.user_id, this.page, this.h, this.v, this.callback); + }); - it "should execute the synctex binary", -> - bin_path = Path.resolve(__dirname + "/../../../bin/synctex") - synctex_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/output.pdf" - @CommandRunner.run + it("should execute the synctex binary", function() { + const bin_path = Path.resolve(__dirname + "/../../../bin/synctex"); + const synctex_path = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/output.pdf`; + return this.CommandRunner.run .calledWith( - "#{@project_id}-#{@user_id}", - ['/opt/synctex', "pdf", synctex_path, @page, @h, @v], - "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}", - @Settings.clsi.docker.image, + `${this.project_id}-${this.user_id}`, + ['/opt/synctex', "pdf", synctex_path, this.page, this.h, this.v], + `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`, + this.Settings.clsi.docker.image, 60000, - {}).should.equal true + {}).should.equal(true); + }); - it "should call the callback with the parsed output", -> - @callback + return it("should call the callback with the parsed output", function() { + return this.callback .calledWith(null, [{ - file: @file_name - line: @line - column: @column + file: this.file_name, + line: this.line, + column: this.column }]) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "wordcount", -> - beforeEach -> - @CommandRunner.run = sinon.stub().callsArg(6) - @fs.readFile = sinon.stub().callsArgWith(2, null, @stdout = "Encoding: ascii\nWords in text: 2") - @callback = sinon.stub() + return describe("wordcount", function() { + beforeEach(function() { + this.CommandRunner.run = sinon.stub().callsArg(6); + this.fs.readFile = sinon.stub().callsArgWith(2, null, (this.stdout = "Encoding: ascii\nWords in text: 2")); + this.callback = sinon.stub(); - @project_id - @timeout = 60 * 1000 - @file_name = "main.tex" - @Settings.path.compilesDir = "/local/compile/directory" - @image = "example.com/image" + this.project_id; + this.timeout = 60 * 1000; + this.file_name = "main.tex"; + this.Settings.path.compilesDir = "/local/compile/directory"; + this.image = "example.com/image"; - @CompileManager.wordcount @project_id, @user_id, @file_name, @image, @callback + return this.CompileManager.wordcount(this.project_id, this.user_id, this.file_name, this.image, this.callback); + }); - it "should run the texcount command", -> - @directory = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}" - @file_path = "$COMPILE_DIR/#{@file_name}" - @command =[ "texcount", "-nocol", "-inc", @file_path, "-out=" + @file_path + ".wc"] + it("should run the texcount command", function() { + this.directory = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`; + this.file_path = `$COMPILE_DIR/${this.file_name}`; + this.command =[ "texcount", "-nocol", "-inc", this.file_path, `-out=${this.file_path}.wc`]; - @CommandRunner.run - .calledWith("#{@project_id}-#{@user_id}", @command, @directory, @image, @timeout, {}) - .should.equal true + return this.CommandRunner.run + .calledWith(`${this.project_id}-${this.user_id}`, this.command, this.directory, this.image, this.timeout, {}) + .should.equal(true); + }); - it "should call the callback with the parsed output", -> - @callback + return it("should call the callback with the parsed output", function() { + return this.callback .calledWith(null, { - encode: "ascii" - textWords: 2 - headWords: 0 - outside: 0 - headers: 0 - elements: 0 - mathInline: 0 - mathDisplay: 0 - errors: 0 + encode: "ascii", + textWords: 2, + headWords: 0, + outside: 0, + headers: 0, + elements: 0, + mathInline: 0, + mathDisplay: 0, + errors: 0, messages: "" }) - .should.equal true + .should.equal(true); + }); + }); +}); diff --git a/test/unit/coffee/ContentTypeMapperTests.js b/test/unit/coffee/ContentTypeMapperTests.js index 2439120..64a6091 100644 --- a/test/unit/coffee/ContentTypeMapperTests.js +++ b/test/unit/coffee/ContentTypeMapperTests.js @@ -1,55 +1,75 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -modulePath = require('path').join __dirname, '../../../app/js/ContentTypeMapper' +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../app/js/ContentTypeMapper'); -describe 'ContentTypeMapper', -> +describe('ContentTypeMapper', function() { - beforeEach -> - @ContentTypeMapper = SandboxedModule.require modulePath + beforeEach(function() { + return this.ContentTypeMapper = SandboxedModule.require(modulePath); + }); - describe 'map', -> + return describe('map', function() { - it 'should map .txt to text/plain', -> - content_type = @ContentTypeMapper.map('example.txt') - content_type.should.equal 'text/plain' + it('should map .txt to text/plain', function() { + const content_type = this.ContentTypeMapper.map('example.txt'); + return content_type.should.equal('text/plain'); + }); - it 'should map .csv to text/csv', -> - content_type = @ContentTypeMapper.map('example.csv') - content_type.should.equal 'text/csv' + it('should map .csv to text/csv', function() { + const content_type = this.ContentTypeMapper.map('example.csv'); + return content_type.should.equal('text/csv'); + }); - it 'should map .pdf to application/pdf', -> - content_type = @ContentTypeMapper.map('example.pdf') - content_type.should.equal 'application/pdf' + it('should map .pdf to application/pdf', function() { + const content_type = this.ContentTypeMapper.map('example.pdf'); + return content_type.should.equal('application/pdf'); + }); - it 'should fall back to octet-stream', -> - content_type = @ContentTypeMapper.map('example.unknown') - content_type.should.equal 'application/octet-stream' + it('should fall back to octet-stream', function() { + const content_type = this.ContentTypeMapper.map('example.unknown'); + return content_type.should.equal('application/octet-stream'); + }); - describe 'coercing web files to plain text', -> + describe('coercing web files to plain text', function() { - it 'should map .js to plain text', -> - content_type = @ContentTypeMapper.map('example.js') - content_type.should.equal 'text/plain' + it('should map .js to plain text', function() { + const content_type = this.ContentTypeMapper.map('example.js'); + return content_type.should.equal('text/plain'); + }); - it 'should map .html to plain text', -> - content_type = @ContentTypeMapper.map('example.html') - content_type.should.equal 'text/plain' + it('should map .html to plain text', function() { + const content_type = this.ContentTypeMapper.map('example.html'); + return content_type.should.equal('text/plain'); + }); - it 'should map .css to plain text', -> - content_type = @ContentTypeMapper.map('example.css') - content_type.should.equal 'text/plain' + return it('should map .css to plain text', function() { + const content_type = this.ContentTypeMapper.map('example.css'); + return content_type.should.equal('text/plain'); + }); + }); - describe 'image files', -> + return describe('image files', function() { - it 'should map .png to image/png', -> - content_type = @ContentTypeMapper.map('example.png') - content_type.should.equal 'image/png' + it('should map .png to image/png', function() { + const content_type = this.ContentTypeMapper.map('example.png'); + return content_type.should.equal('image/png'); + }); - it 'should map .jpeg to image/jpeg', -> - content_type = @ContentTypeMapper.map('example.jpeg') - content_type.should.equal 'image/jpeg' + it('should map .jpeg to image/jpeg', function() { + const content_type = this.ContentTypeMapper.map('example.jpeg'); + return content_type.should.equal('image/jpeg'); + }); - it 'should map .svg to text/plain to protect against XSS (SVG can execute JS)', -> - content_type = @ContentTypeMapper.map('example.svg') - content_type.should.equal 'text/plain' + return it('should map .svg to text/plain to protect against XSS (SVG can execute JS)', function() { + const content_type = this.ContentTypeMapper.map('example.svg'); + return content_type.should.equal('text/plain'); + }); + }); + }); +}); diff --git a/test/unit/coffee/DockerLockManagerTests.js b/test/unit/coffee/DockerLockManagerTests.js index 6161bec..5ef3ca2 100644 --- a/test/unit/coffee/DockerLockManagerTests.js +++ b/test/unit/coffee/DockerLockManagerTests.js @@ -1,145 +1,188 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -require "coffee-script" -modulePath = require('path').join __dirname, '../../../app/coffee/DockerLockManager' +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +require("coffee-script"); +const modulePath = require('path').join(__dirname, '../../../app/coffee/DockerLockManager'); -describe "LockManager", -> - beforeEach -> - @LockManager = SandboxedModule.require modulePath, requires: - "settings-sharelatex": @Settings = - clsi: docker: {} - "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } +describe("LockManager", function() { + beforeEach(function() { + return this.LockManager = SandboxedModule.require(modulePath, { requires: { + "settings-sharelatex": (this.Settings = + {clsi: {docker: {}}}), + "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub() }) + } + });}); - describe "runWithLock", -> - describe "with a single lock", -> - beforeEach (done) -> - @callback = sinon.stub() - @LockManager.runWithLock "lock-one", (releaseLock) -> - setTimeout () -> - releaseLock(null, "hello", "world") - , 100 - , (err, args...) => - @callback(err,args...) - done() + return describe("runWithLock", function() { + describe("with a single lock", function() { + beforeEach(function(done) { + this.callback = sinon.stub(); + return this.LockManager.runWithLock("lock-one", releaseLock => + setTimeout(() => releaseLock(null, "hello", "world") + , 100) + + , (err, ...args) => { + this.callback(err,...Array.from(args)); + return done(); + }); + }); - it "should call the callback", -> - @callback.calledWith(null,"hello","world").should.equal true + return it("should call the callback", function() { + return this.callback.calledWith(null,"hello","world").should.equal(true); + }); + }); - describe "with two locks", -> - beforeEach (done) -> - @callback1 = sinon.stub() - @callback2 = sinon.stub() - @LockManager.runWithLock "lock-one", (releaseLock) -> - setTimeout () -> - releaseLock(null, "hello", "world","one") - , 100 - , (err, args...) => - @callback1(err,args...) - @LockManager.runWithLock "lock-two", (releaseLock) -> - setTimeout () -> - releaseLock(null, "hello", "world","two") - , 200 - , (err, args...) => - @callback2(err,args...) - done() + describe("with two locks", function() { + beforeEach(function(done) { + this.callback1 = sinon.stub(); + this.callback2 = sinon.stub(); + this.LockManager.runWithLock("lock-one", releaseLock => + setTimeout(() => releaseLock(null, "hello", "world","one") + , 100) + + , (err, ...args) => { + return this.callback1(err,...Array.from(args)); + }); + return this.LockManager.runWithLock("lock-two", releaseLock => + setTimeout(() => releaseLock(null, "hello", "world","two") + , 200) + + , (err, ...args) => { + this.callback2(err,...Array.from(args)); + return done(); + }); + }); - it "should call the first callback", -> - @callback1.calledWith(null,"hello","world","one").should.equal true + it("should call the first callback", function() { + return this.callback1.calledWith(null,"hello","world","one").should.equal(true); + }); - it "should call the second callback", -> - @callback2.calledWith(null,"hello","world","two").should.equal true + return it("should call the second callback", function() { + return this.callback2.calledWith(null,"hello","world","two").should.equal(true); + }); + }); - describe "with lock contention", -> - describe "where the first lock is released quickly", -> - beforeEach (done) -> - @LockManager.MAX_LOCK_WAIT_TIME = 1000 - @LockManager.LOCK_TEST_INTERVAL = 100 - @callback1 = sinon.stub() - @callback2 = sinon.stub() - @LockManager.runWithLock "lock", (releaseLock) -> - setTimeout () -> - releaseLock(null, "hello", "world","one") - , 100 - , (err, args...) => - @callback1(err,args...) - @LockManager.runWithLock "lock", (releaseLock) -> - setTimeout () -> - releaseLock(null, "hello", "world","two") - , 200 - , (err, args...) => - @callback2(err,args...) - done() + return describe("with lock contention", function() { + describe("where the first lock is released quickly", function() { + beforeEach(function(done) { + this.LockManager.MAX_LOCK_WAIT_TIME = 1000; + this.LockManager.LOCK_TEST_INTERVAL = 100; + this.callback1 = sinon.stub(); + this.callback2 = sinon.stub(); + this.LockManager.runWithLock("lock", releaseLock => + setTimeout(() => releaseLock(null, "hello", "world","one") + , 100) + + , (err, ...args) => { + return this.callback1(err,...Array.from(args)); + }); + return this.LockManager.runWithLock("lock", releaseLock => + setTimeout(() => releaseLock(null, "hello", "world","two") + , 200) + + , (err, ...args) => { + this.callback2(err,...Array.from(args)); + return done(); + }); + }); - it "should call the first callback", -> - @callback1.calledWith(null,"hello","world","one").should.equal true + it("should call the first callback", function() { + return this.callback1.calledWith(null,"hello","world","one").should.equal(true); + }); - it "should call the second callback", -> - @callback2.calledWith(null,"hello","world","two").should.equal true + return it("should call the second callback", function() { + return this.callback2.calledWith(null,"hello","world","two").should.equal(true); + }); + }); - describe "where the first lock is held longer than the waiting time", -> - beforeEach (done) -> - @LockManager.MAX_LOCK_HOLD_TIME = 10000 - @LockManager.MAX_LOCK_WAIT_TIME = 1000 - @LockManager.LOCK_TEST_INTERVAL = 100 - @callback1 = sinon.stub() - @callback2 = sinon.stub() - doneOne = doneTwo = false - finish = (key) -> - doneOne = true if key is 1 - doneTwo = true if key is 2 - done() if doneOne and doneTwo - @LockManager.runWithLock "lock", (releaseLock) -> - setTimeout () -> - releaseLock(null, "hello", "world","one") - , 1100 - , (err, args...) => - @callback1(err,args...) - finish(1) - @LockManager.runWithLock "lock", (releaseLock) -> - setTimeout () -> - releaseLock(null, "hello", "world","two") - , 100 - , (err, args...) => - @callback2(err,args...) - finish(2) + describe("where the first lock is held longer than the waiting time", function() { + beforeEach(function(done) { + let doneTwo; + this.LockManager.MAX_LOCK_HOLD_TIME = 10000; + this.LockManager.MAX_LOCK_WAIT_TIME = 1000; + this.LockManager.LOCK_TEST_INTERVAL = 100; + this.callback1 = sinon.stub(); + this.callback2 = sinon.stub(); + let doneOne = (doneTwo = false); + const finish = function(key) { + if (key === 1) { doneOne = true; } + if (key === 2) { doneTwo = true; } + if (doneOne && doneTwo) { return done(); } + }; + this.LockManager.runWithLock("lock", releaseLock => + setTimeout(() => releaseLock(null, "hello", "world","one") + , 1100) + + , (err, ...args) => { + this.callback1(err,...Array.from(args)); + return finish(1); + }); + return this.LockManager.runWithLock("lock", releaseLock => + setTimeout(() => releaseLock(null, "hello", "world","two") + , 100) + + , (err, ...args) => { + this.callback2(err,...Array.from(args)); + return finish(2); + }); + }); - it "should call the first callback", -> - @callback1.calledWith(null,"hello","world","one").should.equal true + it("should call the first callback", function() { + return this.callback1.calledWith(null,"hello","world","one").should.equal(true); + }); - it "should call the second callback with an error", -> - error = sinon.match.instanceOf Error - @callback2.calledWith(error).should.equal true + return it("should call the second callback with an error", function() { + const error = sinon.match.instanceOf(Error); + return this.callback2.calledWith(error).should.equal(true); + }); + }); - describe "where the first lock is held longer than the max holding time", -> - beforeEach (done) -> - @LockManager.MAX_LOCK_HOLD_TIME = 1000 - @LockManager.MAX_LOCK_WAIT_TIME = 2000 - @LockManager.LOCK_TEST_INTERVAL = 100 - @callback1 = sinon.stub() - @callback2 = sinon.stub() - doneOne = doneTwo = false - finish = (key) -> - doneOne = true if key is 1 - doneTwo = true if key is 2 - done() if doneOne and doneTwo - @LockManager.runWithLock "lock", (releaseLock) -> - setTimeout () -> - releaseLock(null, "hello", "world","one") - , 1500 - , (err, args...) => - @callback1(err,args...) - finish(1) - @LockManager.runWithLock "lock", (releaseLock) -> - setTimeout () -> - releaseLock(null, "hello", "world","two") - , 100 - , (err, args...) => - @callback2(err,args...) - finish(2) + return describe("where the first lock is held longer than the max holding time", function() { + beforeEach(function(done) { + let doneTwo; + this.LockManager.MAX_LOCK_HOLD_TIME = 1000; + this.LockManager.MAX_LOCK_WAIT_TIME = 2000; + this.LockManager.LOCK_TEST_INTERVAL = 100; + this.callback1 = sinon.stub(); + this.callback2 = sinon.stub(); + let doneOne = (doneTwo = false); + const finish = function(key) { + if (key === 1) { doneOne = true; } + if (key === 2) { doneTwo = true; } + if (doneOne && doneTwo) { return done(); } + }; + this.LockManager.runWithLock("lock", releaseLock => + setTimeout(() => releaseLock(null, "hello", "world","one") + , 1500) + + , (err, ...args) => { + this.callback1(err,...Array.from(args)); + return finish(1); + }); + return this.LockManager.runWithLock("lock", releaseLock => + setTimeout(() => releaseLock(null, "hello", "world","two") + , 100) + + , (err, ...args) => { + this.callback2(err,...Array.from(args)); + return finish(2); + }); + }); - it "should call the first callback", -> - @callback1.calledWith(null,"hello","world","one").should.equal true + it("should call the first callback", function() { + return this.callback1.calledWith(null,"hello","world","one").should.equal(true); + }); - it "should call the second callback", -> - @callback2.calledWith(null,"hello","world","two").should.equal true + return it("should call the second callback", function() { + return this.callback2.calledWith(null,"hello","world","two").should.equal(true); + }); + }); + }); + }); +}); diff --git a/test/unit/coffee/DockerRunnerTests.js b/test/unit/coffee/DockerRunnerTests.js index 307ffde..79ac5df 100644 --- a/test/unit/coffee/DockerRunnerTests.js +++ b/test/unit/coffee/DockerRunnerTests.js @@ -1,509 +1,656 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -expect = require('chai').expect -require "coffee-script" -modulePath = require('path').join __dirname, '../../../app/coffee/DockerRunner' -Path = require "path" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const { expect } = require('chai'); +require("coffee-script"); +const modulePath = require('path').join(__dirname, '../../../app/coffee/DockerRunner'); +const Path = require("path"); -describe "DockerRunner", -> - beforeEach -> - @container = container = {} - @DockerRunner = SandboxedModule.require modulePath, requires: - "settings-sharelatex": @Settings = - clsi: docker: {} +describe("DockerRunner", function() { + beforeEach(function() { + let container, Docker, Timer; + this.container = (container = {}); + this.DockerRunner = SandboxedModule.require(modulePath, { requires: { + "settings-sharelatex": (this.Settings = { + clsi: { docker: {} + }, path: {} - "logger-sharelatex": @logger = { + }), + "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub(), info: sinon.stub(), warn: sinon.stub() + }), + "dockerode": (Docker = (function() { + Docker = class Docker { + static initClass() { + this.prototype.getContainer = sinon.stub().returns(container); + this.prototype.createContainer = sinon.stub().yields(null, container); + this.prototype.listContainers = sinon.stub(); + } + }; + Docker.initClass(); + return Docker; + })()), + "fs": (this.fs = { stat: sinon.stub().yields(null,{isDirectory(){ return true; }}) }), + "./Metrics": { + Timer: (Timer = class Timer { + done() {} + }) + }, + "./LockManager": { + runWithLock(key, runner, callback) { return runner(callback); } } - "dockerode": class Docker - getContainer: sinon.stub().returns(container) - createContainer: sinon.stub().yields(null, container) - listContainers: sinon.stub() - "fs": @fs = { stat: sinon.stub().yields(null,{isDirectory:()->true}) } - "./Metrics": - Timer: class Timer - done: () -> - "./LockManager": - runWithLock: (key, runner, callback) -> runner(callback) - @Docker = Docker - @getContainer = Docker::getContainer - @createContainer = Docker::createContainer - @listContainers = Docker::listContainers + } + } + ); + this.Docker = Docker; + this.getContainer = Docker.prototype.getContainer; + this.createContainer = Docker.prototype.createContainer; + this.listContainers = Docker.prototype.listContainers; - @directory = "/local/compile/directory" - @mainFile = "main-file.tex" - @compiler = "pdflatex" - @image = "example.com/sharelatex/image:2016.2" - @env = {} - @callback = sinon.stub() - @project_id = "project-id-123" - @volumes = - "/local/compile/directory": "/compile" - @Settings.clsi.docker.image = @defaultImage = "default-image" - @Settings.clsi.docker.env = PATH: "mock-path" + this.directory = "/local/compile/directory"; + this.mainFile = "main-file.tex"; + this.compiler = "pdflatex"; + this.image = "example.com/sharelatex/image:2016.2"; + this.env = {}; + this.callback = sinon.stub(); + this.project_id = "project-id-123"; + this.volumes = + {"/local/compile/directory": "/compile"}; + this.Settings.clsi.docker.image = (this.defaultImage = "default-image"); + return this.Settings.clsi.docker.env = {PATH: "mock-path"}; + }); - describe "run", -> - beforeEach (done)-> - @DockerRunner._getContainerOptions = sinon.stub().returns(@options = {mockoptions: "foo"}) - @DockerRunner._fingerprintContainer = sinon.stub().returns(@fingerprint = "fingerprint") + describe("run", function() { + beforeEach(function(done){ + this.DockerRunner._getContainerOptions = sinon.stub().returns(this.options = {mockoptions: "foo"}); + this.DockerRunner._fingerprintContainer = sinon.stub().returns(this.fingerprint = "fingerprint"); - @name = "project-#{@project_id}-#{@fingerprint}" + this.name = `project-${this.project_id}-${this.fingerprint}`; - @command = ["mock", "command", "--outdir=$COMPILE_DIR"] - @command_with_dir = ["mock", "command", "--outdir=/compile"] - @timeout = 42000 - done() + this.command = ["mock", "command", "--outdir=$COMPILE_DIR"]; + this.command_with_dir = ["mock", "command", "--outdir=/compile"]; + this.timeout = 42000; + return done(); + }); - describe "successfully", -> - beforeEach (done)-> - @DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output") - @DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, (err, output)=> - @callback(err, output) - done() + describe("successfully", function() { + beforeEach(function(done){ + this.DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, (this.output = "mock-output")); + return this.DockerRunner.run(this.project_id, this.command, this.directory, this.image, this.timeout, this.env, (err, output)=> { + this.callback(err, output); + return done(); + }); + }); - it "should generate the options for the container", -> - @DockerRunner._getContainerOptions - .calledWith(@command_with_dir, @image, @volumes, @timeout) - .should.equal true + it("should generate the options for the container", function() { + return this.DockerRunner._getContainerOptions + .calledWith(this.command_with_dir, this.image, this.volumes, this.timeout) + .should.equal(true); + }); - it "should generate the fingerprint from the returned options", -> - @DockerRunner._fingerprintContainer - .calledWith(@options) - .should.equal true + it("should generate the fingerprint from the returned options", function() { + return this.DockerRunner._fingerprintContainer + .calledWith(this.options) + .should.equal(true); + }); - it "should do the run", -> - @DockerRunner._runAndWaitForContainer - .calledWith(@options, @volumes, @timeout) - .should.equal true + it("should do the run", function() { + return this.DockerRunner._runAndWaitForContainer + .calledWith(this.options, this.volumes, this.timeout) + .should.equal(true); + }); - it "should call the callback", -> - @callback.calledWith(null, @output).should.equal true + return it("should call the callback", function() { + return this.callback.calledWith(null, this.output).should.equal(true); + }); + }); - describe 'when path.sandboxedCompilesHostDir is set', -> + describe('when path.sandboxedCompilesHostDir is set', function() { - beforeEach -> - @Settings.path.sandboxedCompilesHostDir = '/some/host/dir/compiles' - @directory = '/var/lib/sharelatex/data/compiles/xyz' - @DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output") - @DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, @callback + beforeEach(function() { + this.Settings.path.sandboxedCompilesHostDir = '/some/host/dir/compiles'; + this.directory = '/var/lib/sharelatex/data/compiles/xyz'; + this.DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, (this.output = "mock-output")); + return this.DockerRunner.run(this.project_id, this.command, this.directory, this.image, this.timeout, this.env, this.callback); + }); - it 'should re-write the bind directory', -> - volumes = @DockerRunner._runAndWaitForContainer.lastCall.args[1] - expect(volumes).to.deep.equal { + it('should re-write the bind directory', function() { + const volumes = this.DockerRunner._runAndWaitForContainer.lastCall.args[1]; + return expect(volumes).to.deep.equal({ '/some/host/dir/compiles/xyz': '/compile' - } + }); + }); - it "should call the callback", -> - @callback.calledWith(null, @output).should.equal true + return it("should call the callback", function() { + return this.callback.calledWith(null, this.output).should.equal(true); + }); + }); - describe "when the run throws an error", -> - beforeEach -> - firstTime = true - @output = "mock-output" - @DockerRunner._runAndWaitForContainer = (options, volumes, timeout, callback = (error, output)->) => - if firstTime - firstTime = false - callback new Error("HTTP code is 500 which indicates error: server error") - else - callback(null, @output) - sinon.spy @DockerRunner, "_runAndWaitForContainer" - @DockerRunner.destroyContainer = sinon.stub().callsArg(3) - @DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, @callback + describe("when the run throws an error", function() { + beforeEach(function() { + let firstTime = true; + this.output = "mock-output"; + this.DockerRunner._runAndWaitForContainer = (options, volumes, timeout, callback) => { + if (callback == null) { callback = function(error, output){}; } + if (firstTime) { + firstTime = false; + return callback(new Error("HTTP code is 500 which indicates error: server error")); + } else { + return callback(null, this.output); + } + }; + sinon.spy(this.DockerRunner, "_runAndWaitForContainer"); + this.DockerRunner.destroyContainer = sinon.stub().callsArg(3); + return this.DockerRunner.run(this.project_id, this.command, this.directory, this.image, this.timeout, this.env, this.callback); + }); - it "should do the run twice", -> - @DockerRunner._runAndWaitForContainer - .calledTwice.should.equal true + it("should do the run twice", function() { + return this.DockerRunner._runAndWaitForContainer + .calledTwice.should.equal(true); + }); - it "should destroy the container in between", -> - @DockerRunner.destroyContainer - .calledWith(@name, null) - .should.equal true + it("should destroy the container in between", function() { + return this.DockerRunner.destroyContainer + .calledWith(this.name, null) + .should.equal(true); + }); - it "should call the callback", -> - @callback.calledWith(null, @output).should.equal true + return it("should call the callback", function() { + return this.callback.calledWith(null, this.output).should.equal(true); + }); + }); - describe "with no image", -> - beforeEach -> - @DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output") - @DockerRunner.run @project_id, @command, @directory, null, @timeout, @env, @callback + describe("with no image", function() { + beforeEach(function() { + this.DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, (this.output = "mock-output")); + return this.DockerRunner.run(this.project_id, this.command, this.directory, null, this.timeout, this.env, this.callback); + }); - it "should use the default image", -> - @DockerRunner._getContainerOptions - .calledWith(@command_with_dir, @defaultImage, @volumes, @timeout) - .should.equal true + return it("should use the default image", function() { + return this.DockerRunner._getContainerOptions + .calledWith(this.command_with_dir, this.defaultImage, this.volumes, this.timeout) + .should.equal(true); + }); + }); - describe "with image override", -> - beforeEach -> - @Settings.texliveImageNameOveride = "overrideimage.com/something" - @DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output") - @DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, @callback + return describe("with image override", function() { + beforeEach(function() { + this.Settings.texliveImageNameOveride = "overrideimage.com/something"; + this.DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, (this.output = "mock-output")); + return this.DockerRunner.run(this.project_id, this.command, this.directory, this.image, this.timeout, this.env, this.callback); + }); - it "should use the override and keep the tag", -> - image = @DockerRunner._getContainerOptions.args[0][1] - image.should.equal "overrideimage.com/something/image:2016.2" + return it("should use the override and keep the tag", function() { + const image = this.DockerRunner._getContainerOptions.args[0][1]; + return image.should.equal("overrideimage.com/something/image:2016.2"); + }); + }); + }); - describe "_runAndWaitForContainer", -> - beforeEach -> - @options = {mockoptions: "foo", name: @name = "mock-name"} - @DockerRunner.startContainer = (options, volumes, attachStreamHandler, callback) => - attachStreamHandler(null, @output = "mock-output") - callback(null, @containerId = "container-id") - sinon.spy @DockerRunner, "startContainer" - @DockerRunner.waitForContainer = sinon.stub().callsArgWith(2, null, @exitCode = 42) - @DockerRunner._runAndWaitForContainer @options, @volumes, @timeout, @callback + describe("_runAndWaitForContainer", function() { + beforeEach(function() { + this.options = {mockoptions: "foo", name: (this.name = "mock-name")}; + this.DockerRunner.startContainer = (options, volumes, attachStreamHandler, callback) => { + attachStreamHandler(null, (this.output = "mock-output")); + return callback(null, (this.containerId = "container-id")); + }; + sinon.spy(this.DockerRunner, "startContainer"); + this.DockerRunner.waitForContainer = sinon.stub().callsArgWith(2, null, (this.exitCode = 42)); + return this.DockerRunner._runAndWaitForContainer(this.options, this.volumes, this.timeout, this.callback); + }); - it "should create/start the container", -> - @DockerRunner.startContainer - .calledWith(@options, @volumes) - .should.equal true + it("should create/start the container", function() { + return this.DockerRunner.startContainer + .calledWith(this.options, this.volumes) + .should.equal(true); + }); - it "should wait for the container to finish", -> - @DockerRunner.waitForContainer - .calledWith(@name, @timeout) - .should.equal true + it("should wait for the container to finish", function() { + return this.DockerRunner.waitForContainer + .calledWith(this.name, this.timeout) + .should.equal(true); + }); - it "should call the callback with the output", -> - @callback.calledWith(null, @output).should.equal true + return it("should call the callback with the output", function() { + return this.callback.calledWith(null, this.output).should.equal(true); + }); + }); - describe "startContainer", -> - beforeEach -> - @attachStreamHandler = sinon.stub() - @attachStreamHandler.cock = true - @options = {mockoptions: "foo", name: "mock-name"} - @container.inspect = sinon.stub().callsArgWith(0) - @DockerRunner.attachToContainer = (containerId, attachStreamHandler, cb)=> - attachStreamHandler() - cb() - sinon.spy @DockerRunner, "attachToContainer" + describe("startContainer", function() { + beforeEach(function() { + this.attachStreamHandler = sinon.stub(); + this.attachStreamHandler.cock = true; + this.options = {mockoptions: "foo", name: "mock-name"}; + this.container.inspect = sinon.stub().callsArgWith(0); + this.DockerRunner.attachToContainer = (containerId, attachStreamHandler, cb)=> { + attachStreamHandler(); + return cb(); + }; + return sinon.spy(this.DockerRunner, "attachToContainer"); + }); - describe "when the container exists", -> - beforeEach -> - @container.inspect = sinon.stub().callsArgWith(0) - @container.start = sinon.stub().yields() + describe("when the container exists", function() { + beforeEach(function() { + this.container.inspect = sinon.stub().callsArgWith(0); + this.container.start = sinon.stub().yields(); - @DockerRunner.startContainer @options, @volumes, @callback, -> + return this.DockerRunner.startContainer(this.options, this.volumes, this.callback, function() {}); + }); - it "should start the container with the given name", -> - @getContainer - .calledWith(@options.name) - .should.equal true - @container.start + it("should start the container with the given name", function() { + this.getContainer + .calledWith(this.options.name) + .should.equal(true); + return this.container.start .called - .should.equal true + .should.equal(true); + }); - it "should not try to create the container", -> - @createContainer.called.should.equal false + it("should not try to create the container", function() { + return this.createContainer.called.should.equal(false); + }); - it "should attach to the container", -> - @DockerRunner.attachToContainer.called.should.equal true + it("should attach to the container", function() { + return this.DockerRunner.attachToContainer.called.should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); - it "should attach before the container starts", -> - sinon.assert.callOrder(@DockerRunner.attachToContainer, @container.start) + return it("should attach before the container starts", function() { + return sinon.assert.callOrder(this.DockerRunner.attachToContainer, this.container.start); + }); + }); - describe "when the container does not exist", -> - beforeEach ()-> - exists = false - @container.start = sinon.stub().yields() - @container.inspect = sinon.stub().callsArgWith(0, {statusCode:404}) - @DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback + describe("when the container does not exist", function() { + beforeEach(function(){ + const exists = false; + this.container.start = sinon.stub().yields(); + this.container.inspect = sinon.stub().callsArgWith(0, {statusCode:404}); + return this.DockerRunner.startContainer(this.options, this.volumes, this.attachStreamHandler, this.callback); + }); - it "should create the container", -> - @createContainer - .calledWith(@options) - .should.equal true + it("should create the container", function() { + return this.createContainer + .calledWith(this.options) + .should.equal(true); + }); - it "should call the callback and stream handler", -> - @attachStreamHandler.called.should.equal true - @callback.called.should.equal true + it("should call the callback and stream handler", function() { + this.attachStreamHandler.called.should.equal(true); + return this.callback.called.should.equal(true); + }); - it "should attach to the container", -> - @DockerRunner.attachToContainer.called.should.equal true + it("should attach to the container", function() { + return this.DockerRunner.attachToContainer.called.should.equal(true); + }); - it "should attach before the container starts", -> - sinon.assert.callOrder(@DockerRunner.attachToContainer, @container.start) + return it("should attach before the container starts", function() { + return sinon.assert.callOrder(this.DockerRunner.attachToContainer, this.container.start); + }); + }); - describe "when the container is already running", -> - beforeEach -> - error = new Error("HTTP code is 304 which indicates error: server error - start: Cannot start container #{@name}: The container MOCKID is already running.") - error.statusCode = 304 - @container.start = sinon.stub().yields(error) - @container.inspect = sinon.stub().callsArgWith(0) - @DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback + describe("when the container is already running", function() { + beforeEach(function() { + const error = new Error(`HTTP code is 304 which indicates error: server error - start: Cannot start container ${this.name}: The container MOCKID is already running.`); + error.statusCode = 304; + this.container.start = sinon.stub().yields(error); + this.container.inspect = sinon.stub().callsArgWith(0); + return this.DockerRunner.startContainer(this.options, this.volumes, this.attachStreamHandler, this.callback); + }); - it "should not try to create the container", -> - @createContainer.called.should.equal false + it("should not try to create the container", function() { + return this.createContainer.called.should.equal(false); + }); - it "should call the callback and stream handler without an error", -> - @attachStreamHandler.called.should.equal true - @callback.called.should.equal true + return it("should call the callback and stream handler without an error", function() { + this.attachStreamHandler.called.should.equal(true); + return this.callback.called.should.equal(true); + }); + }); - describe "when a volume does not exist", -> - beforeEach ()-> - @fs.stat = sinon.stub().yields(new Error("no such path")) - @DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback + describe("when a volume does not exist", function() { + beforeEach(function(){ + this.fs.stat = sinon.stub().yields(new Error("no such path")); + return this.DockerRunner.startContainer(this.options, this.volumes, this.attachStreamHandler, this.callback); + }); - it "should not try to create the container", -> - @createContainer.called.should.equal false + it("should not try to create the container", function() { + return this.createContainer.called.should.equal(false); + }); - it "should call the callback with an error", -> - @callback.calledWith(new Error()).should.equal true + return it("should call the callback with an error", function() { + return this.callback.calledWith(new Error()).should.equal(true); + }); + }); - describe "when a volume exists but is not a directory", -> - beforeEach -> - @fs.stat = sinon.stub().yields(null, {isDirectory: () -> return false}) - @DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback + describe("when a volume exists but is not a directory", function() { + beforeEach(function() { + this.fs.stat = sinon.stub().yields(null, {isDirectory() { return false; }}); + return this.DockerRunner.startContainer(this.options, this.volumes, this.attachStreamHandler, this.callback); + }); - it "should not try to create the container", -> - @createContainer.called.should.equal false + it("should not try to create the container", function() { + return this.createContainer.called.should.equal(false); + }); - it "should call the callback with an error", -> - @callback.calledWith(new Error()).should.equal true + return it("should call the callback with an error", function() { + return this.callback.calledWith(new Error()).should.equal(true); + }); + }); - describe "when a volume does not exist, but sibling-containers are used", -> - beforeEach -> - @fs.stat = sinon.stub().yields(new Error("no such path")) - @Settings.path.sandboxedCompilesHostDir = '/some/path' - @container.start = sinon.stub().yields() - @DockerRunner.startContainer @options, @volumes, @callback + describe("when a volume does not exist, but sibling-containers are used", function() { + beforeEach(function() { + this.fs.stat = sinon.stub().yields(new Error("no such path")); + this.Settings.path.sandboxedCompilesHostDir = '/some/path'; + this.container.start = sinon.stub().yields(); + return this.DockerRunner.startContainer(this.options, this.volumes, this.callback); + }); - afterEach -> - delete @Settings.path.sandboxedCompilesHostDir + afterEach(function() { + return delete this.Settings.path.sandboxedCompilesHostDir; + }); - it "should start the container with the given name", -> - @getContainer - .calledWith(@options.name) - .should.equal true - @container.start + it("should start the container with the given name", function() { + this.getContainer + .calledWith(this.options.name) + .should.equal(true); + return this.container.start .called - .should.equal true + .should.equal(true); + }); - it "should not try to create the container", -> - @createContainer.called.should.equal false + it("should not try to create the container", function() { + return this.createContainer.called.should.equal(false); + }); - it "should call the callback", -> - @callback.called.should.equal true - @callback.calledWith(new Error()).should.equal false + return it("should call the callback", function() { + this.callback.called.should.equal(true); + return this.callback.calledWith(new Error()).should.equal(false); + }); + }); - describe "when the container tries to be created, but already has been (race condition)", -> + return describe("when the container tries to be created, but already has been (race condition)", function() {}); + }); - describe "waitForContainer", -> - beforeEach -> - @containerId = "container-id" - @timeout = 5000 - @container.wait = sinon.stub().yields(null, StatusCode: @statusCode = 42) - @container.kill = sinon.stub().yields() + describe("waitForContainer", function() { + beforeEach(function() { + this.containerId = "container-id"; + this.timeout = 5000; + this.container.wait = sinon.stub().yields(null, {StatusCode: (this.statusCode = 42)}); + return this.container.kill = sinon.stub().yields(); + }); - describe "when the container returns in time", -> - beforeEach -> - @DockerRunner.waitForContainer @containerId, @timeout, @callback + describe("when the container returns in time", function() { + beforeEach(function() { + return this.DockerRunner.waitForContainer(this.containerId, this.timeout, this.callback); + }); - it "should wait for the container", -> - @getContainer - .calledWith(@containerId) - .should.equal true - @container.wait + it("should wait for the container", function() { + this.getContainer + .calledWith(this.containerId) + .should.equal(true); + return this.container.wait .called - .should.equal true + .should.equal(true); + }); - it "should call the callback with the exit", -> - @callback - .calledWith(null, @statusCode) - .should.equal true + return it("should call the callback with the exit", function() { + return this.callback + .calledWith(null, this.statusCode) + .should.equal(true); + }); + }); - describe "when the container does not return before the timeout", -> - beforeEach (done) -> - @container.wait = (callback = (error, exitCode) ->) -> - setTimeout () -> - callback(null, StatusCode: 42) - , 100 - @timeout = 5 - @DockerRunner.waitForContainer @containerId, @timeout, (args...) => - @callback(args...) - done() + return describe("when the container does not return before the timeout", function() { + beforeEach(function(done) { + this.container.wait = function(callback) { + if (callback == null) { callback = function(error, exitCode) {}; } + return setTimeout(() => callback(null, {StatusCode: 42}) + , 100); + }; + this.timeout = 5; + return this.DockerRunner.waitForContainer(this.containerId, this.timeout, (...args) => { + this.callback(...Array.from(args || [])); + return done(); + }); + }); - it "should call kill on the container", -> - @getContainer - .calledWith(@containerId) - .should.equal true - @container.kill + it("should call kill on the container", function() { + this.getContainer + .calledWith(this.containerId) + .should.equal(true); + return this.container.kill .called - .should.equal true + .should.equal(true); + }); - it "should call the callback with an error", -> - error = new Error("container timed out") - error.timedout = true - @callback + return it("should call the callback with an error", function() { + const error = new Error("container timed out"); + error.timedout = true; + return this.callback .calledWith(error) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "destroyOldContainers", -> - beforeEach (done) -> - oneHourInSeconds = 60 * 60 - oneHourInMilliseconds = oneHourInSeconds * 1000 - nowInSeconds = Date.now()/1000 - @containers = [{ - Name: "/project-old-container-name" - Id: "old-container-id" + describe("destroyOldContainers", function() { + beforeEach(function(done) { + const oneHourInSeconds = 60 * 60; + const oneHourInMilliseconds = oneHourInSeconds * 1000; + const nowInSeconds = Date.now()/1000; + this.containers = [{ + Name: "/project-old-container-name", + Id: "old-container-id", Created: nowInSeconds - oneHourInSeconds - 100 }, { - Name: "/project-new-container-name" - Id: "new-container-id" - Created: nowInSeconds - oneHourInSeconds + 100 + Name: "/project-new-container-name", + Id: "new-container-id", + Created: (nowInSeconds - oneHourInSeconds) + 100 }, { - Name: "/totally-not-a-project-container" - Id: "some-random-id" + Name: "/totally-not-a-project-container", + Id: "some-random-id", Created: nowInSeconds - (2 * oneHourInSeconds ) - }] - @DockerRunner.MAX_CONTAINER_AGE = oneHourInMilliseconds - @listContainers.callsArgWith(1, null, @containers) - @DockerRunner.destroyContainer = sinon.stub().callsArg(3) - @DockerRunner.destroyOldContainers (error) => - @callback(error) - done() + }]; + this.DockerRunner.MAX_CONTAINER_AGE = oneHourInMilliseconds; + this.listContainers.callsArgWith(1, null, this.containers); + this.DockerRunner.destroyContainer = sinon.stub().callsArg(3); + return this.DockerRunner.destroyOldContainers(error => { + this.callback(error); + return done(); + }); + }); - it "should list all containers", -> - @listContainers - .calledWith(all: true) - .should.equal true + it("should list all containers", function() { + return this.listContainers + .calledWith({all: true}) + .should.equal(true); + }); - it "should destroy old containers", -> - @DockerRunner.destroyContainer + it("should destroy old containers", function() { + this.DockerRunner.destroyContainer .callCount - .should.equal 1 - @DockerRunner.destroyContainer + .should.equal(1); + return this.DockerRunner.destroyContainer .calledWith("/project-old-container-name", "old-container-id") - .should.equal true + .should.equal(true); + }); - it "should not destroy new containers", -> - @DockerRunner.destroyContainer + it("should not destroy new containers", function() { + return this.DockerRunner.destroyContainer .calledWith("/project-new-container-name", "new-container-id") - .should.equal false + .should.equal(false); + }); - it "should not destroy non-project containers", -> - @DockerRunner.destroyContainer + it("should not destroy non-project containers", function() { + return this.DockerRunner.destroyContainer .calledWith("/totally-not-a-project-container", "some-random-id") - .should.equal false + .should.equal(false); + }); - it "should callback the callback", -> - @callback.called.should.equal true + return it("should callback the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe '_destroyContainer', -> - beforeEach -> - @containerId = 'some_id' - @fakeContainer = - remove: sinon.stub().callsArgWith(1, null) - @Docker::getContainer = sinon.stub().returns(@fakeContainer) + describe('_destroyContainer', function() { + beforeEach(function() { + this.containerId = 'some_id'; + this.fakeContainer = + {remove: sinon.stub().callsArgWith(1, null)}; + return this.Docker.prototype.getContainer = sinon.stub().returns(this.fakeContainer); + }); - it 'should get the container', (done) -> - @DockerRunner._destroyContainer @containerId, false, (err) => - @Docker::getContainer.callCount.should.equal 1 - @Docker::getContainer.calledWith(@containerId).should.equal true - done() + it('should get the container', function(done) { + return this.DockerRunner._destroyContainer(this.containerId, false, err => { + this.Docker.prototype.getContainer.callCount.should.equal(1); + this.Docker.prototype.getContainer.calledWith(this.containerId).should.equal(true); + return done(); + }); + }); - it 'should try to force-destroy the container when shouldForce=true', (done) -> - @DockerRunner._destroyContainer @containerId, true, (err) => - @fakeContainer.remove.callCount.should.equal 1 - @fakeContainer.remove.calledWith({force: true}).should.equal true - done() + it('should try to force-destroy the container when shouldForce=true', function(done) { + return this.DockerRunner._destroyContainer(this.containerId, true, err => { + this.fakeContainer.remove.callCount.should.equal(1); + this.fakeContainer.remove.calledWith({force: true}).should.equal(true); + return done(); + }); + }); - it 'should not try to force-destroy the container when shouldForce=false', (done) -> - @DockerRunner._destroyContainer @containerId, false, (err) => - @fakeContainer.remove.callCount.should.equal 1 - @fakeContainer.remove.calledWith({force: false}).should.equal true - done() + it('should not try to force-destroy the container when shouldForce=false', function(done) { + return this.DockerRunner._destroyContainer(this.containerId, false, err => { + this.fakeContainer.remove.callCount.should.equal(1); + this.fakeContainer.remove.calledWith({force: false}).should.equal(true); + return done(); + }); + }); - it 'should not produce an error', (done) -> - @DockerRunner._destroyContainer @containerId, false, (err) => - expect(err).to.equal null - done() + it('should not produce an error', function(done) { + return this.DockerRunner._destroyContainer(this.containerId, false, err => { + expect(err).to.equal(null); + return done(); + }); + }); - describe 'when the container is already gone', -> - beforeEach -> - @fakeError = new Error('woops') - @fakeError.statusCode = 404 - @fakeContainer = - remove: sinon.stub().callsArgWith(1, @fakeError) - @Docker::getContainer = sinon.stub().returns(@fakeContainer) + describe('when the container is already gone', function() { + beforeEach(function() { + this.fakeError = new Error('woops'); + this.fakeError.statusCode = 404; + this.fakeContainer = + {remove: sinon.stub().callsArgWith(1, this.fakeError)}; + return this.Docker.prototype.getContainer = sinon.stub().returns(this.fakeContainer); + }); - it 'should not produce an error', (done) -> - @DockerRunner._destroyContainer @containerId, false, (err) => - expect(err).to.equal null - done() + return it('should not produce an error', function(done) { + return this.DockerRunner._destroyContainer(this.containerId, false, err => { + expect(err).to.equal(null); + return done(); + }); + }); + }); - describe 'when container.destroy produces an error', (done) -> - beforeEach -> - @fakeError = new Error('woops') - @fakeError.statusCode = 500 - @fakeContainer = - remove: sinon.stub().callsArgWith(1, @fakeError) - @Docker::getContainer = sinon.stub().returns(@fakeContainer) + return describe('when container.destroy produces an error', function(done) { + beforeEach(function() { + this.fakeError = new Error('woops'); + this.fakeError.statusCode = 500; + this.fakeContainer = + {remove: sinon.stub().callsArgWith(1, this.fakeError)}; + return this.Docker.prototype.getContainer = sinon.stub().returns(this.fakeContainer); + }); - it 'should produce an error', (done) -> - @DockerRunner._destroyContainer @containerId, false, (err) => - expect(err).to.not.equal null - expect(err).to.equal @fakeError - done() + return it('should produce an error', function(done) { + return this.DockerRunner._destroyContainer(this.containerId, false, err => { + expect(err).to.not.equal(null); + expect(err).to.equal(this.fakeError); + return done(); + }); + }); + }); + }); - describe 'kill', -> - beforeEach -> - @containerId = 'some_id' - @fakeContainer = - kill: sinon.stub().callsArgWith(0, null) - @Docker::getContainer = sinon.stub().returns(@fakeContainer) + return describe('kill', function() { + beforeEach(function() { + this.containerId = 'some_id'; + this.fakeContainer = + {kill: sinon.stub().callsArgWith(0, null)}; + return this.Docker.prototype.getContainer = sinon.stub().returns(this.fakeContainer); + }); - it 'should get the container', (done) -> - @DockerRunner.kill @containerId, (err) => - @Docker::getContainer.callCount.should.equal 1 - @Docker::getContainer.calledWith(@containerId).should.equal true - done() + it('should get the container', function(done) { + return this.DockerRunner.kill(this.containerId, err => { + this.Docker.prototype.getContainer.callCount.should.equal(1); + this.Docker.prototype.getContainer.calledWith(this.containerId).should.equal(true); + return done(); + }); + }); - it 'should try to force-destroy the container', (done) -> - @DockerRunner.kill @containerId, (err) => - @fakeContainer.kill.callCount.should.equal 1 - done() + it('should try to force-destroy the container', function(done) { + return this.DockerRunner.kill(this.containerId, err => { + this.fakeContainer.kill.callCount.should.equal(1); + return done(); + }); + }); - it 'should not produce an error', (done) -> - @DockerRunner.kill @containerId, (err) => - expect(err).to.equal undefined - done() + it('should not produce an error', function(done) { + return this.DockerRunner.kill(this.containerId, err => { + expect(err).to.equal(undefined); + return done(); + }); + }); - describe 'when the container is not actually running', -> - beforeEach -> - @fakeError = new Error('woops') - @fakeError.statusCode = 500 - @fakeError.message = 'Cannot kill container is not running' - @fakeContainer = - kill: sinon.stub().callsArgWith(0, @fakeError) - @Docker::getContainer = sinon.stub().returns(@fakeContainer) + describe('when the container is not actually running', function() { + beforeEach(function() { + this.fakeError = new Error('woops'); + this.fakeError.statusCode = 500; + this.fakeError.message = 'Cannot kill container is not running'; + this.fakeContainer = + {kill: sinon.stub().callsArgWith(0, this.fakeError)}; + return this.Docker.prototype.getContainer = sinon.stub().returns(this.fakeContainer); + }); - it 'should not produce an error', (done) -> - @DockerRunner.kill @containerId, (err) => - expect(err).to.equal undefined - done() + return it('should not produce an error', function(done) { + return this.DockerRunner.kill(this.containerId, err => { + expect(err).to.equal(undefined); + return done(); + }); + }); + }); - describe 'when container.kill produces a legitimate error', (done) -> - beforeEach -> - @fakeError = new Error('woops') - @fakeError.statusCode = 500 - @fakeError.message = 'Totally legitimate reason to throw an error' - @fakeContainer = - kill: sinon.stub().callsArgWith(0, @fakeError) - @Docker::getContainer = sinon.stub().returns(@fakeContainer) + return describe('when container.kill produces a legitimate error', function(done) { + beforeEach(function() { + this.fakeError = new Error('woops'); + this.fakeError.statusCode = 500; + this.fakeError.message = 'Totally legitimate reason to throw an error'; + this.fakeContainer = + {kill: sinon.stub().callsArgWith(0, this.fakeError)}; + return this.Docker.prototype.getContainer = sinon.stub().returns(this.fakeContainer); + }); - it 'should produce an error', (done) -> - @DockerRunner.kill @containerId, (err) => - expect(err).to.not.equal undefined - expect(err).to.equal @fakeError - done() + return it('should produce an error', function(done) { + return this.DockerRunner.kill(this.containerId, err => { + expect(err).to.not.equal(undefined); + expect(err).to.equal(this.fakeError); + return done(); + }); + }); + }); + }); +}); diff --git a/test/unit/coffee/DraftModeManagerTests.js b/test/unit/coffee/DraftModeManagerTests.js index 549be29..ffea050 100644 --- a/test/unit/coffee/DraftModeManagerTests.js +++ b/test/unit/coffee/DraftModeManagerTests.js @@ -1,61 +1,77 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -modulePath = require('path').join __dirname, '../../../app/js/DraftModeManager' +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../app/js/DraftModeManager'); -describe 'DraftModeManager', -> - beforeEach -> - @DraftModeManager = SandboxedModule.require modulePath, requires: - "fs": @fs = {} - "logger-sharelatex": @logger = {log: () ->} +describe('DraftModeManager', function() { + beforeEach(function() { + return this.DraftModeManager = SandboxedModule.require(modulePath, { requires: { + "fs": (this.fs = {}), + "logger-sharelatex": (this.logger = {log() {}}) + } + });}); - describe "_injectDraftOption", -> - it "should add draft option into documentclass with existing options", -> - @DraftModeManager - ._injectDraftOption(''' - \\documentclass[a4paper,foo=bar]{article} - ''') - .should.equal(''' - \\documentclass[draft,a4paper,foo=bar]{article} - ''') + describe("_injectDraftOption", function() { + it("should add draft option into documentclass with existing options", function() { + return this.DraftModeManager + ._injectDraftOption(`\ +\\documentclass[a4paper,foo=bar]{article}\ +`) + .should.equal(`\ +\\documentclass[draft,a4paper,foo=bar]{article}\ +`); + }); - it "should add draft option into documentclass with no options", -> - @DraftModeManager - ._injectDraftOption(''' - \\documentclass{article} - ''') - .should.equal(''' - \\documentclass[draft]{article} - ''') + return it("should add draft option into documentclass with no options", function() { + return this.DraftModeManager + ._injectDraftOption(`\ +\\documentclass{article}\ +`) + .should.equal(`\ +\\documentclass[draft]{article}\ +`); + }); + }); - describe "injectDraftMode", -> - beforeEach -> - @filename = "/mock/filename.tex" - @callback = sinon.stub() - content = ''' - \\documentclass{article} - \\begin{document} - Hello world - \\end{document} - ''' - @fs.readFile = sinon.stub().callsArgWith(2, null, content) - @fs.writeFile = sinon.stub().callsArg(2) - @DraftModeManager.injectDraftMode @filename, @callback + return describe("injectDraftMode", function() { + beforeEach(function() { + this.filename = "/mock/filename.tex"; + this.callback = sinon.stub(); + const content = `\ +\\documentclass{article} +\\begin{document} +Hello world +\\end{document}\ +`; + this.fs.readFile = sinon.stub().callsArgWith(2, null, content); + this.fs.writeFile = sinon.stub().callsArg(2); + return this.DraftModeManager.injectDraftMode(this.filename, this.callback); + }); - it "should read the file", -> - @fs.readFile - .calledWith(@filename, "utf8") - .should.equal true + it("should read the file", function() { + return this.fs.readFile + .calledWith(this.filename, "utf8") + .should.equal(true); + }); - it "should write the modified file", -> - @fs.writeFile - .calledWith(@filename, """ - \\documentclass[draft]{article} - \\begin{document} - Hello world - \\end{document} - """) - .should.equal true + it("should write the modified file", function() { + return this.fs.writeFile + .calledWith(this.filename, `\ +\\documentclass[draft]{article} +\\begin{document} +Hello world +\\end{document}\ +`) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); +}); diff --git a/test/unit/coffee/LatexRunnerTests.js b/test/unit/coffee/LatexRunnerTests.js index 77c6edb..5cb4d06 100644 --- a/test/unit/coffee/LatexRunnerTests.js +++ b/test/unit/coffee/LatexRunnerTests.js @@ -1,79 +1,105 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -modulePath = require('path').join __dirname, '../../../app/js/LatexRunner' -Path = require "path" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../app/js/LatexRunner'); +const Path = require("path"); -describe "LatexRunner", -> - beforeEach -> - @LatexRunner = SandboxedModule.require modulePath, requires: - "settings-sharelatex": @Settings = - docker: +describe("LatexRunner", function() { + beforeEach(function() { + let Timer; + this.LatexRunner = SandboxedModule.require(modulePath, { requires: { + "settings-sharelatex": (this.Settings = { + docker: { socketPath: "/var/run/docker.sock" - "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } - "./Metrics": - Timer: class Timer - done: () -> - "./CommandRunner": @CommandRunner = {} + } + }), + "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub() }), + "./Metrics": { + Timer: (Timer = class Timer { + done() {} + }) + }, + "./CommandRunner": (this.CommandRunner = {}) + } + }); - @directory = "/local/compile/directory" - @mainFile = "main-file.tex" - @compiler = "pdflatex" - @image = "example.com/image" - @callback = sinon.stub() - @project_id = "project-id-123" - @env = {'foo': '123'} + this.directory = "/local/compile/directory"; + this.mainFile = "main-file.tex"; + this.compiler = "pdflatex"; + this.image = "example.com/image"; + this.callback = sinon.stub(); + this.project_id = "project-id-123"; + return this.env = {'foo': '123'};}); - describe "runLatex", -> - beforeEach -> - @CommandRunner.run = sinon.stub().callsArg(6) + return describe("runLatex", function() { + beforeEach(function() { + return this.CommandRunner.run = sinon.stub().callsArg(6); + }); - describe "normally", -> - beforeEach -> - @LatexRunner.runLatex @project_id, - directory: @directory - mainFile: @mainFile - compiler: @compiler - timeout: @timeout = 42000 - image: @image - environment: @env - @callback + describe("normally", function() { + beforeEach(function() { + return this.LatexRunner.runLatex(this.project_id, { + directory: this.directory, + mainFile: this.mainFile, + compiler: this.compiler, + timeout: (this.timeout = 42000), + image: this.image, + environment: this.env + }, + this.callback); + }); - it "should run the latex command", -> - @CommandRunner.run - .calledWith(@project_id, sinon.match.any, @directory, @image, @timeout, @env) - .should.equal true + return it("should run the latex command", function() { + return this.CommandRunner.run + .calledWith(this.project_id, sinon.match.any, this.directory, this.image, this.timeout, this.env) + .should.equal(true); + }); + }); - describe "with an .Rtex main file", -> - beforeEach -> - @LatexRunner.runLatex @project_id, - directory: @directory - mainFile: "main-file.Rtex" - compiler: @compiler - image: @image - timeout: @timeout = 42000 - @callback + describe("with an .Rtex main file", function() { + beforeEach(function() { + return this.LatexRunner.runLatex(this.project_id, { + directory: this.directory, + mainFile: "main-file.Rtex", + compiler: this.compiler, + image: this.image, + timeout: (this.timeout = 42000) + }, + this.callback); + }); - it "should run the latex command on the equivalent .tex file", -> - command = @CommandRunner.run.args[0][1] - mainFile = command.slice(-1)[0] - mainFile.should.equal "$COMPILE_DIR/main-file.tex" + return it("should run the latex command on the equivalent .tex file", function() { + const command = this.CommandRunner.run.args[0][1]; + const mainFile = command.slice(-1)[0]; + return mainFile.should.equal("$COMPILE_DIR/main-file.tex"); + }); + }); - describe "with a flags option", -> - beforeEach -> - @LatexRunner.runLatex @project_id, - directory: @directory - mainFile: @mainFile - compiler: @compiler - image: @image - timeout: @timeout = 42000 + return describe("with a flags option", function() { + beforeEach(function() { + return this.LatexRunner.runLatex(this.project_id, { + directory: this.directory, + mainFile: this.mainFile, + compiler: this.compiler, + image: this.image, + timeout: (this.timeout = 42000), flags: ["-file-line-error", "-halt-on-error"] - @callback + }, + this.callback); + }); - it "should include the flags in the command", -> - command = @CommandRunner.run.args[0][1] - flags = command.filter (arg) -> - (arg == "-file-line-error") || (arg == "-halt-on-error") - flags.length.should.equal 2 - flags[0].should.equal "-file-line-error" - flags[1].should.equal "-halt-on-error" + return it("should include the flags in the command", function() { + const command = this.CommandRunner.run.args[0][1]; + const flags = command.filter(arg => (arg === "-file-line-error") || (arg === "-halt-on-error")); + flags.length.should.equal(2); + flags[0].should.equal("-file-line-error"); + return flags[1].should.equal("-halt-on-error"); + }); + }); + }); +}); diff --git a/test/unit/coffee/LockManagerTests.js b/test/unit/coffee/LockManagerTests.js index 9dd1d46..d716a44 100644 --- a/test/unit/coffee/LockManagerTests.js +++ b/test/unit/coffee/LockManagerTests.js @@ -1,57 +1,77 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -modulePath = require('path').join __dirname, '../../../app/js/LockManager' -Path = require "path" -Errors = require "../../../app/js/Errors" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../app/js/LockManager'); +const Path = require("path"); +const Errors = require("../../../app/js/Errors"); -describe "DockerLockManager", -> - beforeEach -> - @LockManager = SandboxedModule.require modulePath, requires: - "settings-sharelatex": {} - "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), err:-> } - "fs": - lstat:sinon.stub().callsArgWith(1) +describe("DockerLockManager", function() { + beforeEach(function() { + this.LockManager = SandboxedModule.require(modulePath, { requires: { + "settings-sharelatex": {}, + "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub(), err() {} }), + "fs": { + lstat:sinon.stub().callsArgWith(1), readdir: sinon.stub().callsArgWith(1) - "lockfile": @Lockfile = {} - @lockFile = "/local/compile/directory/.project-lock" + }, + "lockfile": (this.Lockfile = {}) + } + }); + return this.lockFile = "/local/compile/directory/.project-lock"; + }); - describe "runWithLock", -> - beforeEach -> - @runner = sinon.stub().callsArgWith(0, null, "foo", "bar") - @callback = sinon.stub() + return describe("runWithLock", function() { + beforeEach(function() { + this.runner = sinon.stub().callsArgWith(0, null, "foo", "bar"); + return this.callback = sinon.stub(); + }); - describe "normally", -> - beforeEach -> - @Lockfile.lock = sinon.stub().callsArgWith(2, null) - @Lockfile.unlock = sinon.stub().callsArgWith(1, null) - @LockManager.runWithLock @lockFile, @runner, @callback + describe("normally", function() { + beforeEach(function() { + this.Lockfile.lock = sinon.stub().callsArgWith(2, null); + this.Lockfile.unlock = sinon.stub().callsArgWith(1, null); + return this.LockManager.runWithLock(this.lockFile, this.runner, this.callback); + }); - it "should run the compile", -> - @runner + it("should run the compile", function() { + return this.runner .calledWith() - .should.equal true + .should.equal(true); + }); - it "should call the callback with the response from the compile", -> - @callback + return it("should call the callback with the response from the compile", function() { + return this.callback .calledWithExactly(null, "foo", "bar") - .should.equal true + .should.equal(true); + }); + }); - describe "when the project is locked", -> - beforeEach -> - @error = new Error() - @error.code = "EEXIST" - @Lockfile.lock = sinon.stub().callsArgWith(2,@error) - @Lockfile.unlock = sinon.stub().callsArgWith(1, null) - @LockManager.runWithLock @lockFile, @runner, @callback + return describe("when the project is locked", function() { + beforeEach(function() { + this.error = new Error(); + this.error.code = "EEXIST"; + this.Lockfile.lock = sinon.stub().callsArgWith(2,this.error); + this.Lockfile.unlock = sinon.stub().callsArgWith(1, null); + return this.LockManager.runWithLock(this.lockFile, this.runner, this.callback); + }); - it "should not run the compile", -> - @runner + it("should not run the compile", function() { + return this.runner .called - .should.equal false + .should.equal(false); + }); - it "should return an error", -> - error = new Errors.AlreadyCompilingError() - @callback + return it("should return an error", function() { + const error = new Errors.AlreadyCompilingError(); + return this.callback .calledWithExactly(error) - .should.equal true + .should.equal(true); + }); + }); + }); +}); diff --git a/test/unit/coffee/OutputFileFinderTests.js b/test/unit/coffee/OutputFileFinderTests.js index 46d8c1f..3292d0a 100644 --- a/test/unit/coffee/OutputFileFinderTests.js +++ b/test/unit/coffee/OutputFileFinderTests.js @@ -1,68 +1,92 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -modulePath = require('path').join __dirname, '../../../app/js/OutputFileFinder' -path = require "path" -expect = require("chai").expect -EventEmitter = require("events").EventEmitter +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../app/js/OutputFileFinder'); +const path = require("path"); +const { expect } = require("chai"); +const { EventEmitter } = require("events"); -describe "OutputFileFinder", -> - beforeEach -> - @OutputFileFinder = SandboxedModule.require modulePath, requires: - "fs": @fs = {} - "child_process": spawn: @spawn = sinon.stub() +describe("OutputFileFinder", function() { + beforeEach(function() { + this.OutputFileFinder = SandboxedModule.require(modulePath, { requires: { + "fs": (this.fs = {}), + "child_process": { spawn: (this.spawn = sinon.stub()) + }, "logger-sharelatex": { log: sinon.stub(), warn: sinon.stub() } - @directory = "/test/dir" - @callback = sinon.stub() + } + }); + this.directory = "/test/dir"; + return this.callback = sinon.stub(); + }); - describe "findOutputFiles", -> - beforeEach -> - @resource_path = "resource/path.tex" - @output_paths = ["output.pdf", "extra/file.tex"] - @all_paths = @output_paths.concat [@resource_path] - @resources = [ - path: @resource_path = "resource/path.tex" - ] - @OutputFileFinder._getAllFiles = sinon.stub().callsArgWith(1, null, @all_paths) - @OutputFileFinder.findOutputFiles @resources, @directory, (error, @outputFiles) => + describe("findOutputFiles", function() { + beforeEach(function() { + this.resource_path = "resource/path.tex"; + this.output_paths = ["output.pdf", "extra/file.tex"]; + this.all_paths = this.output_paths.concat([this.resource_path]); + this.resources = [ + {path: (this.resource_path = "resource/path.tex")} + ]; + this.OutputFileFinder._getAllFiles = sinon.stub().callsArgWith(1, null, this.all_paths); + return this.OutputFileFinder.findOutputFiles(this.resources, this.directory, (error, outputFiles) => { + this.outputFiles = outputFiles; + + }); + }); - it "should only return the output files, not directories or resource paths", -> - expect(@outputFiles).to.deep.equal [{ - path: "output.pdf" + return it("should only return the output files, not directories or resource paths", function() { + return expect(this.outputFiles).to.deep.equal([{ + path: "output.pdf", type: "pdf" }, { path: "extra/file.tex", type: "tex" - }] + }]); + }); +}); - describe "_getAllFiles", -> - beforeEach -> - @proc = new EventEmitter() - @proc.stdout = new EventEmitter() - @spawn.returns @proc - @directory = "/base/dir" - @OutputFileFinder._getAllFiles @directory, @callback + return describe("_getAllFiles", function() { + beforeEach(function() { + this.proc = new EventEmitter(); + this.proc.stdout = new EventEmitter(); + this.spawn.returns(this.proc); + this.directory = "/base/dir"; + return this.OutputFileFinder._getAllFiles(this.directory, this.callback); + }); - describe "successfully", -> - beforeEach -> - @proc.stdout.emit( + describe("successfully", function() { + beforeEach(function() { + this.proc.stdout.emit( "data", ["/base/dir/main.tex", "/base/dir/chapters/chapter1.tex"].join("\n") + "\n" - ) - @proc.emit "close", 0 + ); + return this.proc.emit("close", 0); + }); - it "should call the callback with the relative file paths", -> - @callback.calledWith( + return it("should call the callback with the relative file paths", function() { + return this.callback.calledWith( null, ["main.tex", "chapters/chapter1.tex"] - ).should.equal true + ).should.equal(true); + }); + }); - describe "when the directory doesn't exist", -> - beforeEach -> - @proc.emit "close", 1 + return describe("when the directory doesn't exist", function() { + beforeEach(function() { + return this.proc.emit("close", 1); + }); - it "should call the callback with a blank array", -> - @callback.calledWith( + return it("should call the callback with a blank array", function() { + return this.callback.calledWith( null, [] - ).should.equal true + ).should.equal(true); + }); + }); + }); +}); diff --git a/test/unit/coffee/OutputFileOptimiserTests.js b/test/unit/coffee/OutputFileOptimiserTests.js index 2988715..8934c71 100644 --- a/test/unit/coffee/OutputFileOptimiserTests.js +++ b/test/unit/coffee/OutputFileOptimiserTests.js @@ -1,103 +1,141 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -modulePath = require('path').join __dirname, '../../../app/js/OutputFileOptimiser' -path = require "path" -expect = require("chai").expect -EventEmitter = require("events").EventEmitter +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../app/js/OutputFileOptimiser'); +const path = require("path"); +const { expect } = require("chai"); +const { EventEmitter } = require("events"); -describe "OutputFileOptimiser", -> - beforeEach -> - @OutputFileOptimiser = SandboxedModule.require modulePath, requires: - "fs": @fs = {} - "path": @Path = {} - "child_process": spawn: @spawn = sinon.stub() - "logger-sharelatex": { log: sinon.stub(), warn: sinon.stub() } +describe("OutputFileOptimiser", function() { + beforeEach(function() { + this.OutputFileOptimiser = SandboxedModule.require(modulePath, { requires: { + "fs": (this.fs = {}), + "path": (this.Path = {}), + "child_process": { spawn: (this.spawn = sinon.stub()) + }, + "logger-sharelatex": { log: sinon.stub(), warn: sinon.stub() }, "./Metrics" : {} - @directory = "/test/dir" - @callback = sinon.stub() + } + }); + this.directory = "/test/dir"; + return this.callback = sinon.stub(); + }); - describe "optimiseFile", -> - beforeEach -> - @src = "./output.pdf" - @dst = "./output.pdf" + describe("optimiseFile", function() { + beforeEach(function() { + this.src = "./output.pdf"; + return this.dst = "./output.pdf"; + }); - describe "when the file is not a pdf file", -> - beforeEach (done)-> - @src = "./output.log" - @OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, false) - @OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null) - @OutputFileOptimiser.optimiseFile @src, @dst, done + describe("when the file is not a pdf file", function() { + beforeEach(function(done){ + this.src = "./output.log"; + this.OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, false); + this.OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null); + return this.OutputFileOptimiser.optimiseFile(this.src, this.dst, done); + }); - it "should not check if the file is optimised", -> - @OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(@src).should.equal false + it("should not check if the file is optimised", function() { + return this.OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(this.src).should.equal(false); + }); - it "should not optimise the file", -> - @OutputFileOptimiser.optimisePDF.calledWith(@src, @dst).should.equal false + return it("should not optimise the file", function() { + return this.OutputFileOptimiser.optimisePDF.calledWith(this.src, this.dst).should.equal(false); + }); + }); - describe "when the pdf file is not optimised", -> - beforeEach (done) -> - @OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, false) - @OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null) - @OutputFileOptimiser.optimiseFile @src, @dst, done + describe("when the pdf file is not optimised", function() { + beforeEach(function(done) { + this.OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, false); + this.OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null); + return this.OutputFileOptimiser.optimiseFile(this.src, this.dst, done); + }); - it "should check if the pdf is optimised", -> - @OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(@src).should.equal true + it("should check if the pdf is optimised", function() { + return this.OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(this.src).should.equal(true); + }); - it "should optimise the pdf", -> - @OutputFileOptimiser.optimisePDF.calledWith(@src, @dst).should.equal true + return it("should optimise the pdf", function() { + return this.OutputFileOptimiser.optimisePDF.calledWith(this.src, this.dst).should.equal(true); + }); + }); - describe "when the pdf file is optimised", -> - beforeEach (done) -> - @OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, true) - @OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null) - @OutputFileOptimiser.optimiseFile @src, @dst, done + return describe("when the pdf file is optimised", function() { + beforeEach(function(done) { + this.OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, true); + this.OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null); + return this.OutputFileOptimiser.optimiseFile(this.src, this.dst, done); + }); - it "should check if the pdf is optimised", -> - @OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(@src).should.equal true + it("should check if the pdf is optimised", function() { + return this.OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(this.src).should.equal(true); + }); - it "should not optimise the pdf", -> - @OutputFileOptimiser.optimisePDF.calledWith(@src, @dst).should.equal false + return it("should not optimise the pdf", function() { + return this.OutputFileOptimiser.optimisePDF.calledWith(this.src, this.dst).should.equal(false); + }); + }); + }); - describe "checkIfPDFISOptimised", -> - beforeEach () -> - @callback = sinon.stub() - @fd = 1234 - @fs.open = sinon.stub().yields(null, @fd) - @fs.read = sinon.stub().withArgs(@fd).yields(null, 100, new Buffer("hello /Linearized 1")) - @fs.close = sinon.stub().withArgs(@fd).yields(null) - @OutputFileOptimiser.checkIfPDFIsOptimised @src, @callback + return describe("checkIfPDFISOptimised", function() { + beforeEach(function() { + this.callback = sinon.stub(); + this.fd = 1234; + this.fs.open = sinon.stub().yields(null, this.fd); + this.fs.read = sinon.stub().withArgs(this.fd).yields(null, 100, new Buffer("hello /Linearized 1")); + this.fs.close = sinon.stub().withArgs(this.fd).yields(null); + return this.OutputFileOptimiser.checkIfPDFIsOptimised(this.src, this.callback); + }); - describe "for a linearised file", -> - beforeEach () -> - @fs.read = sinon.stub().withArgs(@fd).yields(null, 100, new Buffer("hello /Linearized 1")) - @OutputFileOptimiser.checkIfPDFIsOptimised @src, @callback + describe("for a linearised file", function() { + beforeEach(function() { + this.fs.read = sinon.stub().withArgs(this.fd).yields(null, 100, new Buffer("hello /Linearized 1")); + return this.OutputFileOptimiser.checkIfPDFIsOptimised(this.src, this.callback); + }); - it "should open the file", -> - @fs.open.calledWith(@src, "r").should.equal true + it("should open the file", function() { + return this.fs.open.calledWith(this.src, "r").should.equal(true); + }); - it "should read the header", -> - @fs.read.calledWith(@fd).should.equal true + it("should read the header", function() { + return this.fs.read.calledWith(this.fd).should.equal(true); + }); - it "should close the file", -> - @fs.close.calledWith(@fd).should.equal true + it("should close the file", function() { + return this.fs.close.calledWith(this.fd).should.equal(true); + }); - it "should call the callback with a true result", -> - @callback.calledWith(null, true).should.equal true + return it("should call the callback with a true result", function() { + return this.callback.calledWith(null, true).should.equal(true); + }); + }); - describe "for an unlinearised file", -> - beforeEach () -> - @fs.read = sinon.stub().withArgs(@fd).yields(null, 100, new Buffer("hello not linearized 1")) - @OutputFileOptimiser.checkIfPDFIsOptimised @src, @callback + return describe("for an unlinearised file", function() { + beforeEach(function() { + this.fs.read = sinon.stub().withArgs(this.fd).yields(null, 100, new Buffer("hello not linearized 1")); + return this.OutputFileOptimiser.checkIfPDFIsOptimised(this.src, this.callback); + }); - it "should open the file", -> - @fs.open.calledWith(@src, "r").should.equal true + it("should open the file", function() { + return this.fs.open.calledWith(this.src, "r").should.equal(true); + }); - it "should read the header", -> - @fs.read.calledWith(@fd).should.equal true + it("should read the header", function() { + return this.fs.read.calledWith(this.fd).should.equal(true); + }); - it "should close the file", -> - @fs.close.calledWith(@fd).should.equal true + it("should close the file", function() { + return this.fs.close.calledWith(this.fd).should.equal(true); + }); - it "should call the callback with a false result", -> - @callback.calledWith(null, false).should.equal true + return it("should call the callback with a false result", function() { + return this.callback.calledWith(null, false).should.equal(true); + }); + }); + }); +}); diff --git a/test/unit/coffee/ProjectPersistenceManagerTests.js b/test/unit/coffee/ProjectPersistenceManagerTests.js index 69bfd4f..c15cd80 100644 --- a/test/unit/coffee/ProjectPersistenceManagerTests.js +++ b/test/unit/coffee/ProjectPersistenceManagerTests.js @@ -1,62 +1,82 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -modulePath = require('path').join __dirname, '../../../app/js/ProjectPersistenceManager' -tk = require("timekeeper") +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../app/js/ProjectPersistenceManager'); +const tk = require("timekeeper"); -describe "ProjectPersistenceManager", -> - beforeEach -> - @ProjectPersistenceManager = SandboxedModule.require modulePath, requires: - "./UrlCache": @UrlCache = {} - "./CompileManager": @CompileManager = {} - "logger-sharelatex": @logger = { log: sinon.stub() } - "./db": @db = {} - @callback = sinon.stub() - @project_id = "project-id-123" - @user_id = "1234" +describe("ProjectPersistenceManager", function() { + beforeEach(function() { + this.ProjectPersistenceManager = SandboxedModule.require(modulePath, { requires: { + "./UrlCache": (this.UrlCache = {}), + "./CompileManager": (this.CompileManager = {}), + "logger-sharelatex": (this.logger = { log: sinon.stub() }), + "./db": (this.db = {}) + } + }); + this.callback = sinon.stub(); + this.project_id = "project-id-123"; + return this.user_id = "1234"; + }); - describe "clearExpiredProjects", -> - beforeEach -> - @project_ids = [ - "project-id-1" + describe("clearExpiredProjects", function() { + beforeEach(function() { + this.project_ids = [ + "project-id-1", "project-id-2" - ] - @ProjectPersistenceManager._findExpiredProjectIds = sinon.stub().callsArgWith(0, null, @project_ids) - @ProjectPersistenceManager.clearProjectFromCache = sinon.stub().callsArg(1) - @CompileManager.clearExpiredProjects = sinon.stub().callsArg(1) - @ProjectPersistenceManager.clearExpiredProjects @callback + ]; + this.ProjectPersistenceManager._findExpiredProjectIds = sinon.stub().callsArgWith(0, null, this.project_ids); + this.ProjectPersistenceManager.clearProjectFromCache = sinon.stub().callsArg(1); + this.CompileManager.clearExpiredProjects = sinon.stub().callsArg(1); + return this.ProjectPersistenceManager.clearExpiredProjects(this.callback); + }); - it "should clear each expired project", -> - for project_id in @project_ids - @ProjectPersistenceManager.clearProjectFromCache + it("should clear each expired project", function() { + return Array.from(this.project_ids).map((project_id) => + this.ProjectPersistenceManager.clearProjectFromCache .calledWith(project_id) - .should.equal true + .should.equal(true)); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "clearProject", -> - beforeEach -> - @ProjectPersistenceManager._clearProjectFromDatabase = sinon.stub().callsArg(1) - @UrlCache.clearProject = sinon.stub().callsArg(1) - @CompileManager.clearProject = sinon.stub().callsArg(2) - @ProjectPersistenceManager.clearProject @project_id, @user_id, @callback + return describe("clearProject", function() { + beforeEach(function() { + this.ProjectPersistenceManager._clearProjectFromDatabase = sinon.stub().callsArg(1); + this.UrlCache.clearProject = sinon.stub().callsArg(1); + this.CompileManager.clearProject = sinon.stub().callsArg(2); + return this.ProjectPersistenceManager.clearProject(this.project_id, this.user_id, this.callback); + }); - it "should clear the project from the database", -> - @ProjectPersistenceManager._clearProjectFromDatabase - .calledWith(@project_id) - .should.equal true + it("should clear the project from the database", function() { + return this.ProjectPersistenceManager._clearProjectFromDatabase + .calledWith(this.project_id) + .should.equal(true); + }); - it "should clear all the cached Urls for the project", -> - @UrlCache.clearProject - .calledWith(@project_id) - .should.equal true + it("should clear all the cached Urls for the project", function() { + return this.UrlCache.clearProject + .calledWith(this.project_id) + .should.equal(true); + }); - it "should clear the project compile folder", -> - @CompileManager.clearProject - .calledWith(@project_id, @user_id) - .should.equal true + it("should clear the project compile folder", function() { + return this.CompileManager.clearProject + .calledWith(this.project_id, this.user_id) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); +}); diff --git a/test/unit/coffee/RequestParserTests.js b/test/unit/coffee/RequestParserTests.js index e263e49..5ca0941 100644 --- a/test/unit/coffee/RequestParserTests.js +++ b/test/unit/coffee/RequestParserTests.js @@ -1,279 +1,380 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -expect = require('chai').expect -modulePath = require('path').join __dirname, '../../../app/js/RequestParser' -tk = require("timekeeper") +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const { expect } = require('chai'); +const modulePath = require('path').join(__dirname, '../../../app/js/RequestParser'); +const tk = require("timekeeper"); -describe "RequestParser", -> - beforeEach -> - tk.freeze() - @callback = sinon.stub() - @validResource = - path: "main.tex" - date: "12:00 01/02/03" +describe("RequestParser", function() { + beforeEach(function() { + tk.freeze(); + this.callback = sinon.stub(); + this.validResource = { + path: "main.tex", + date: "12:00 01/02/03", content: "Hello world" - @validRequest = - compile: - token: "token-123" - options: - imageName: "basicImageName/here:2017-1" - compiler: "pdflatex" + }; + this.validRequest = { + compile: { + token: "token-123", + options: { + imageName: "basicImageName/here:2017-1", + compiler: "pdflatex", timeout: 42 + }, resources: [] - @RequestParser = SandboxedModule.require modulePath, requires: - "settings-sharelatex": @settings = {} + } + }; + return this.RequestParser = SandboxedModule.require(modulePath, { requires: { + "settings-sharelatex": (this.settings = {}) + } + });}); - afterEach -> - tk.reset() + afterEach(() => tk.reset()); - describe "without a top level object", -> - beforeEach -> - @RequestParser.parse [], @callback + describe("without a top level object", function() { + beforeEach(function() { + return this.RequestParser.parse([], this.callback); + }); - it "should return an error", -> - @callback.calledWith("top level object should have a compile attribute") - .should.equal true + return it("should return an error", function() { + return this.callback.calledWith("top level object should have a compile attribute") + .should.equal(true); + }); + }); - describe "without a compile attribute", -> - beforeEach -> - @RequestParser.parse {}, @callback + describe("without a compile attribute", function() { + beforeEach(function() { + return this.RequestParser.parse({}, this.callback); + }); - it "should return an error", -> - @callback.calledWith("top level object should have a compile attribute") - .should.equal true + return it("should return an error", function() { + return this.callback.calledWith("top level object should have a compile attribute") + .should.equal(true); + }); + }); - describe "without a valid compiler", -> - beforeEach -> - @validRequest.compile.options.compiler = "not-a-compiler" - @RequestParser.parse @validRequest, @callback + describe("without a valid compiler", function() { + beforeEach(function() { + this.validRequest.compile.options.compiler = "not-a-compiler"; + return this.RequestParser.parse(this.validRequest, this.callback); + }); - it "should return an error", -> - @callback.calledWith("compiler attribute should be one of: pdflatex, latex, xelatex, lualatex") - .should.equal true + return it("should return an error", function() { + return this.callback.calledWith("compiler attribute should be one of: pdflatex, latex, xelatex, lualatex") + .should.equal(true); + }); + }); - describe "without a compiler specified", -> - beforeEach -> - delete @validRequest.compile.options.compiler - @RequestParser.parse @validRequest, (error, @data) => + describe("without a compiler specified", function() { + beforeEach(function() { + delete this.validRequest.compile.options.compiler; + return this.RequestParser.parse(this.validRequest, (error, data) => { + this.data = data; + + }); + }); - it "should set the compiler to pdflatex by default", -> - @data.compiler.should.equal "pdflatex" + return it("should set the compiler to pdflatex by default", function() { + return this.data.compiler.should.equal("pdflatex"); + }); + }); - describe "with imageName set", -> - beforeEach -> - @RequestParser.parse @validRequest, (error, @data) => + describe("with imageName set", function() { + beforeEach(function() { + return this.RequestParser.parse(this.validRequest, (error, data) => { + this.data = data; + + }); + }); - it "should set the imageName", -> - @data.imageName.should.equal "basicImageName/here:2017-1" + return it("should set the imageName", function() { + return this.data.imageName.should.equal("basicImageName/here:2017-1"); + }); + }); - describe "with flags set", -> - beforeEach -> - @validRequest.compile.options.flags = ["-file-line-error"] - @RequestParser.parse @validRequest, (error, @data) => + describe("with flags set", function() { + beforeEach(function() { + this.validRequest.compile.options.flags = ["-file-line-error"]; + return this.RequestParser.parse(this.validRequest, (error, data) => { + this.data = data; + + }); + }); - it "should set the flags attribute", -> - expect(@data.flags).to.deep.equal ["-file-line-error"] + return it("should set the flags attribute", function() { + return expect(this.data.flags).to.deep.equal(["-file-line-error"]); + }); +}); - describe "with flags not specified", -> - beforeEach -> - @RequestParser.parse @validRequest, (error, @data) => + describe("with flags not specified", function() { + beforeEach(function() { + return this.RequestParser.parse(this.validRequest, (error, data) => { + this.data = data; + + }); + }); - it "it should have an empty flags list", -> - expect(@data.flags).to.deep.equal [] + return it("it should have an empty flags list", function() { + return expect(this.data.flags).to.deep.equal([]); + }); +}); - describe "without a timeout specified", -> - beforeEach -> - delete @validRequest.compile.options.timeout - @RequestParser.parse @validRequest, (error, @data) => + describe("without a timeout specified", function() { + beforeEach(function() { + delete this.validRequest.compile.options.timeout; + return this.RequestParser.parse(this.validRequest, (error, data) => { + this.data = data; + + }); + }); - it "should set the timeout to MAX_TIMEOUT", -> - @data.timeout.should.equal @RequestParser.MAX_TIMEOUT * 1000 + return it("should set the timeout to MAX_TIMEOUT", function() { + return this.data.timeout.should.equal(this.RequestParser.MAX_TIMEOUT * 1000); + }); + }); - describe "with a timeout larger than the maximum", -> - beforeEach -> - @validRequest.compile.options.timeout = @RequestParser.MAX_TIMEOUT + 1 - @RequestParser.parse @validRequest, (error, @data) => + describe("with a timeout larger than the maximum", function() { + beforeEach(function() { + this.validRequest.compile.options.timeout = this.RequestParser.MAX_TIMEOUT + 1; + return this.RequestParser.parse(this.validRequest, (error, data) => { + this.data = data; + + }); + }); - it "should set the timeout to MAX_TIMEOUT", -> - @data.timeout.should.equal @RequestParser.MAX_TIMEOUT * 1000 + return it("should set the timeout to MAX_TIMEOUT", function() { + return this.data.timeout.should.equal(this.RequestParser.MAX_TIMEOUT * 1000); + }); + }); - describe "with a timeout", -> - beforeEach -> - @RequestParser.parse @validRequest, (error, @data) => + describe("with a timeout", function() { + beforeEach(function() { + return this.RequestParser.parse(this.validRequest, (error, data) => { + this.data = data; + + }); + }); - it "should set the timeout (in milliseconds)", -> - @data.timeout.should.equal @validRequest.compile.options.timeout * 1000 + return it("should set the timeout (in milliseconds)", function() { + return this.data.timeout.should.equal(this.validRequest.compile.options.timeout * 1000); + }); + }); - describe "with a resource without a path", -> - beforeEach -> - delete @validResource.path - @validRequest.compile.resources.push @validResource - @RequestParser.parse @validRequest, @callback + describe("with a resource without a path", function() { + beforeEach(function() { + delete this.validResource.path; + this.validRequest.compile.resources.push(this.validResource); + return this.RequestParser.parse(this.validRequest, this.callback); + }); - it "should return an error", -> - @callback.calledWith("all resources should have a path attribute") - .should.equal true + return it("should return an error", function() { + return this.callback.calledWith("all resources should have a path attribute") + .should.equal(true); + }); + }); - describe "with a resource with a path", -> - beforeEach -> - @validResource.path = @path = "test.tex" - @validRequest.compile.resources.push @validResource - @RequestParser.parse @validRequest, @callback - @data = @callback.args[0][1] + describe("with a resource with a path", function() { + beforeEach(function() { + this.validResource.path = (this.path = "test.tex"); + this.validRequest.compile.resources.push(this.validResource); + this.RequestParser.parse(this.validRequest, this.callback); + return this.data = this.callback.args[0][1];}); - it "should return the path in the parsed response", -> - @data.resources[0].path.should.equal @path + return it("should return the path in the parsed response", function() { + return this.data.resources[0].path.should.equal(this.path); + }); + }); - describe "with a resource with a malformed modified date", -> - beforeEach -> - @validResource.modified = "not-a-date" - @validRequest.compile.resources.push @validResource - @RequestParser.parse @validRequest, @callback + describe("with a resource with a malformed modified date", function() { + beforeEach(function() { + this.validResource.modified = "not-a-date"; + this.validRequest.compile.resources.push(this.validResource); + return this.RequestParser.parse(this.validRequest, this.callback); + }); - it "should return an error", -> - @callback + return it("should return an error", function() { + return this.callback .calledWith( "resource modified date could not be understood: "+ - @validResource.modified + this.validResource.modified ) - .should.equal true + .should.equal(true); + }); + }); - describe "with a resource with a valid date", -> - beforeEach -> - @date = "12:00 01/02/03" - @validResource.modified = @date - @validRequest.compile.resources.push @validResource - @RequestParser.parse @validRequest, @callback - @data = @callback.args[0][1] + describe("with a resource with a valid date", function() { + beforeEach(function() { + this.date = "12:00 01/02/03"; + this.validResource.modified = this.date; + this.validRequest.compile.resources.push(this.validResource); + this.RequestParser.parse(this.validRequest, this.callback); + return this.data = this.callback.args[0][1];}); - it "should return the date as a Javascript Date object", -> - (@data.resources[0].modified instanceof Date).should.equal true - @data.resources[0].modified.getTime().should.equal Date.parse(@date) + return it("should return the date as a Javascript Date object", function() { + (this.data.resources[0].modified instanceof Date).should.equal(true); + return this.data.resources[0].modified.getTime().should.equal(Date.parse(this.date)); + }); + }); - describe "with a resource without either a content or URL attribute", -> - beforeEach -> - delete @validResource.url - delete @validResource.content - @validRequest.compile.resources.push @validResource - @RequestParser.parse @validRequest, @callback + describe("with a resource without either a content or URL attribute", function() { + beforeEach(function() { + delete this.validResource.url; + delete this.validResource.content; + this.validRequest.compile.resources.push(this.validResource); + return this.RequestParser.parse(this.validRequest, this.callback); + }); - it "should return an error", -> - @callback.calledWith("all resources should have either a url or content attribute") - .should.equal true + return it("should return an error", function() { + return this.callback.calledWith("all resources should have either a url or content attribute") + .should.equal(true); + }); + }); - describe "with a resource where the content is not a string", -> - beforeEach -> - @validResource.content = [] - @validRequest.compile.resources.push @validResource - @RequestParser.parse (@validRequest), @callback + describe("with a resource where the content is not a string", function() { + beforeEach(function() { + this.validResource.content = []; + this.validRequest.compile.resources.push(this.validResource); + return this.RequestParser.parse((this.validRequest), this.callback); + }); - it "should return an error", -> - @callback.calledWith("content attribute should be a string") - .should.equal true + return it("should return an error", function() { + return this.callback.calledWith("content attribute should be a string") + .should.equal(true); + }); + }); - describe "with a resource where the url is not a string", -> - beforeEach -> - @validResource.url = [] - @validRequest.compile.resources.push @validResource - @RequestParser.parse (@validRequest), @callback + describe("with a resource where the url is not a string", function() { + beforeEach(function() { + this.validResource.url = []; + this.validRequest.compile.resources.push(this.validResource); + return this.RequestParser.parse((this.validRequest), this.callback); + }); - it "should return an error", -> - @callback.calledWith("url attribute should be a string") - .should.equal true + return it("should return an error", function() { + return this.callback.calledWith("url attribute should be a string") + .should.equal(true); + }); + }); - describe "with a resource with a url", -> - beforeEach -> - @validResource.url = @url = "www.example.com" - @validRequest.compile.resources.push @validResource - @RequestParser.parse (@validRequest), @callback - @data = @callback.args[0][1] + describe("with a resource with a url", function() { + beforeEach(function() { + this.validResource.url = (this.url = "www.example.com"); + this.validRequest.compile.resources.push(this.validResource); + this.RequestParser.parse((this.validRequest), this.callback); + return this.data = this.callback.args[0][1];}); - it "should return the url in the parsed response", -> - @data.resources[0].url.should.equal @url + return it("should return the url in the parsed response", function() { + return this.data.resources[0].url.should.equal(this.url); + }); + }); - describe "with a resource with a content attribute", -> - beforeEach -> - @validResource.content = @content = "Hello world" - @validRequest.compile.resources.push @validResource - @RequestParser.parse (@validRequest), @callback - @data = @callback.args[0][1] + describe("with a resource with a content attribute", function() { + beforeEach(function() { + this.validResource.content = (this.content = "Hello world"); + this.validRequest.compile.resources.push(this.validResource); + this.RequestParser.parse((this.validRequest), this.callback); + return this.data = this.callback.args[0][1];}); - it "should return the content in the parsed response", -> - @data.resources[0].content.should.equal @content + return it("should return the content in the parsed response", function() { + return this.data.resources[0].content.should.equal(this.content); + }); + }); - describe "without a root resource path", -> - beforeEach -> - delete @validRequest.compile.rootResourcePath - @RequestParser.parse (@validRequest), @callback - @data = @callback.args[0][1] + describe("without a root resource path", function() { + beforeEach(function() { + delete this.validRequest.compile.rootResourcePath; + this.RequestParser.parse((this.validRequest), this.callback); + return this.data = this.callback.args[0][1];}); - it "should set the root resource path to 'main.tex' by default", -> - @data.rootResourcePath.should.equal "main.tex" + return it("should set the root resource path to 'main.tex' by default", function() { + return this.data.rootResourcePath.should.equal("main.tex"); + }); + }); - describe "with a root resource path", -> - beforeEach -> - @validRequest.compile.rootResourcePath = @path = "test.tex" - @RequestParser.parse (@validRequest), @callback - @data = @callback.args[0][1] + describe("with a root resource path", function() { + beforeEach(function() { + this.validRequest.compile.rootResourcePath = (this.path = "test.tex"); + this.RequestParser.parse((this.validRequest), this.callback); + return this.data = this.callback.args[0][1];}); - it "should return the root resource path in the parsed response", -> - @data.rootResourcePath.should.equal @path + return it("should return the root resource path in the parsed response", function() { + return this.data.rootResourcePath.should.equal(this.path); + }); + }); - describe "with a root resource path that is not a string", -> - beforeEach -> - @validRequest.compile.rootResourcePath = [] - @RequestParser.parse (@validRequest), @callback + describe("with a root resource path that is not a string", function() { + beforeEach(function() { + this.validRequest.compile.rootResourcePath = []; + return this.RequestParser.parse((this.validRequest), this.callback); + }); - it "should return an error", -> - @callback.calledWith("rootResourcePath attribute should be a string") - .should.equal true + return it("should return an error", function() { + return this.callback.calledWith("rootResourcePath attribute should be a string") + .should.equal(true); + }); + }); - describe "with a root resource path that needs escaping", -> - beforeEach -> - @badPath = "`rm -rf foo`.tex" - @goodPath = "rm -rf foo.tex" - @validRequest.compile.rootResourcePath = @badPath - @validRequest.compile.resources.push { - path: @badPath - date: "12:00 01/02/03" + describe("with a root resource path that needs escaping", function() { + beforeEach(function() { + this.badPath = "`rm -rf foo`.tex"; + this.goodPath = "rm -rf foo.tex"; + this.validRequest.compile.rootResourcePath = this.badPath; + this.validRequest.compile.resources.push({ + path: this.badPath, + date: "12:00 01/02/03", content: "Hello world" - } - @RequestParser.parse @validRequest, @callback - @data = @callback.args[0][1] + }); + this.RequestParser.parse(this.validRequest, this.callback); + return this.data = this.callback.args[0][1];}); - it "should return the escaped resource", -> - @data.rootResourcePath.should.equal @goodPath + it("should return the escaped resource", function() { + return this.data.rootResourcePath.should.equal(this.goodPath); + }); - it "should also escape the resource path", -> - @data.resources[0].path.should.equal @goodPath + return it("should also escape the resource path", function() { + return this.data.resources[0].path.should.equal(this.goodPath); + }); + }); - describe "with a root resource path that has a relative path", -> - beforeEach -> - @validRequest.compile.rootResourcePath = "foo/../../bar.tex" - @RequestParser.parse @validRequest, @callback - @data = @callback.args[0][1] + describe("with a root resource path that has a relative path", function() { + beforeEach(function() { + this.validRequest.compile.rootResourcePath = "foo/../../bar.tex"; + this.RequestParser.parse(this.validRequest, this.callback); + return this.data = this.callback.args[0][1];}); - it "should return an error", -> - @callback.calledWith("relative path in root resource") - .should.equal true + return it("should return an error", function() { + return this.callback.calledWith("relative path in root resource") + .should.equal(true); + }); + }); - describe "with a root resource path that has unescaped + relative path", -> - beforeEach -> - @validRequest.compile.rootResourcePath = "foo/#../bar.tex" - @RequestParser.parse @validRequest, @callback - @data = @callback.args[0][1] + describe("with a root resource path that has unescaped + relative path", function() { + beforeEach(function() { + this.validRequest.compile.rootResourcePath = "foo/#../bar.tex"; + this.RequestParser.parse(this.validRequest, this.callback); + return this.data = this.callback.args[0][1];}); - it "should return an error", -> - @callback.calledWith("relative path in root resource") - .should.equal true + return it("should return an error", function() { + return this.callback.calledWith("relative path in root resource") + .should.equal(true); + }); + }); - describe "with an unknown syncType", -> - beforeEach -> - @validRequest.compile.options.syncType = "unexpected" - @RequestParser.parse @validRequest, @callback - @data = @callback.args[0][1] + return describe("with an unknown syncType", function() { + beforeEach(function() { + this.validRequest.compile.options.syncType = "unexpected"; + this.RequestParser.parse(this.validRequest, this.callback); + return this.data = this.callback.args[0][1];}); - it "should return an error", -> - @callback.calledWith("syncType attribute should be one of: full, incremental") - .should.equal true + return it("should return an error", function() { + return this.callback.calledWith("syncType attribute should be one of: full, incremental") + .should.equal(true); + }); + }); +}); diff --git a/test/unit/coffee/ResourceStateManagerTests.js b/test/unit/coffee/ResourceStateManagerTests.js index e5e1c13..4b09135 100644 --- a/test/unit/coffee/ResourceStateManagerTests.js +++ b/test/unit/coffee/ResourceStateManagerTests.js @@ -1,109 +1,147 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -should = require('chai').should() -modulePath = require('path').join __dirname, '../../../app/js/ResourceStateManager' -Path = require "path" -Errors = require "../../../app/js/Errors" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +const should = require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../app/js/ResourceStateManager'); +const Path = require("path"); +const Errors = require("../../../app/js/Errors"); -describe "ResourceStateManager", -> - beforeEach -> - @ResourceStateManager = SandboxedModule.require modulePath, requires: - "fs": @fs = {} - "logger-sharelatex": {log: sinon.stub(), err: sinon.stub()} - "./SafeReader": @SafeReader = {} - @basePath = "/path/to/write/files/to" - @resources = [ - {path: "resource-1-mock"} - {path: "resource-2-mock"} +describe("ResourceStateManager", function() { + beforeEach(function() { + this.ResourceStateManager = SandboxedModule.require(modulePath, { requires: { + "fs": (this.fs = {}), + "logger-sharelatex": {log: sinon.stub(), err: sinon.stub()}, + "./SafeReader": (this.SafeReader = {}) + } + }); + this.basePath = "/path/to/write/files/to"; + this.resources = [ + {path: "resource-1-mock"}, + {path: "resource-2-mock"}, {path: "resource-3-mock"} - ] - @state = "1234567890" - @resourceFileName = "#{@basePath}/.project-sync-state" - @resourceFileContents = "#{@resources[0].path}\n#{@resources[1].path}\n#{@resources[2].path}\nstateHash:#{@state}" - @callback = sinon.stub() + ]; + this.state = "1234567890"; + this.resourceFileName = `${this.basePath}/.project-sync-state`; + this.resourceFileContents = `${this.resources[0].path}\n${this.resources[1].path}\n${this.resources[2].path}\nstateHash:${this.state}`; + return this.callback = sinon.stub(); + }); - describe "saveProjectState", -> - beforeEach -> - @fs.writeFile = sinon.stub().callsArg(2) + describe("saveProjectState", function() { + beforeEach(function() { + return this.fs.writeFile = sinon.stub().callsArg(2); + }); - describe "when the state is specified", -> - beforeEach -> - @ResourceStateManager.saveProjectState(@state, @resources, @basePath, @callback) + describe("when the state is specified", function() { + beforeEach(function() { + return this.ResourceStateManager.saveProjectState(this.state, this.resources, this.basePath, this.callback); + }); - it "should write the resource list to disk", -> - @fs.writeFile - .calledWith(@resourceFileName, @resourceFileContents) - .should.equal true + it("should write the resource list to disk", function() { + return this.fs.writeFile + .calledWith(this.resourceFileName, this.resourceFileContents) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "when the state is undefined", -> - beforeEach -> - @state = undefined - @fs.unlink = sinon.stub().callsArg(1) - @ResourceStateManager.saveProjectState(@state, @resources, @basePath, @callback) + return describe("when the state is undefined", function() { + beforeEach(function() { + this.state = undefined; + this.fs.unlink = sinon.stub().callsArg(1); + return this.ResourceStateManager.saveProjectState(this.state, this.resources, this.basePath, this.callback); + }); - it "should unlink the resource file", -> - @fs.unlink - .calledWith(@resourceFileName) - .should.equal true + it("should unlink the resource file", function() { + return this.fs.unlink + .calledWith(this.resourceFileName) + .should.equal(true); + }); - it "should not write the resource list to disk", -> - @fs.writeFile.called.should.equal false + it("should not write the resource list to disk", function() { + return this.fs.writeFile.called.should.equal(false); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); + }); - describe "checkProjectStateMatches", -> + describe("checkProjectStateMatches", function() { - describe "when the state matches", -> - beforeEach -> - @SafeReader.readFile = sinon.stub().callsArgWith(3, null, @resourceFileContents) - @ResourceStateManager.checkProjectStateMatches(@state, @basePath, @callback) + describe("when the state matches", function() { + beforeEach(function() { + this.SafeReader.readFile = sinon.stub().callsArgWith(3, null, this.resourceFileContents); + return this.ResourceStateManager.checkProjectStateMatches(this.state, this.basePath, this.callback); + }); - it "should read the resource file", -> - @SafeReader.readFile - .calledWith(@resourceFileName) - .should.equal true + it("should read the resource file", function() { + return this.SafeReader.readFile + .calledWith(this.resourceFileName) + .should.equal(true); + }); - it "should call the callback with the results", -> - @callback.calledWithMatch(null, @resources).should.equal true + return it("should call the callback with the results", function() { + return this.callback.calledWithMatch(null, this.resources).should.equal(true); + }); + }); - describe "when the state does not match", -> - beforeEach -> - @SafeReader.readFile = sinon.stub().callsArgWith(3, null, @resourceFileContents) - @ResourceStateManager.checkProjectStateMatches("not-the-original-state", @basePath, @callback) + return describe("when the state does not match", function() { + beforeEach(function() { + this.SafeReader.readFile = sinon.stub().callsArgWith(3, null, this.resourceFileContents); + return this.ResourceStateManager.checkProjectStateMatches("not-the-original-state", this.basePath, this.callback); + }); - it "should call the callback with an error", -> - error = new Errors.FilesOutOfSyncError("invalid state for incremental update") - @callback.calledWith(error).should.equal true + return it("should call the callback with an error", function() { + const error = new Errors.FilesOutOfSyncError("invalid state for incremental update"); + return this.callback.calledWith(error).should.equal(true); + }); + }); + }); - describe "checkResourceFiles", -> - describe "when all the files are present", -> - beforeEach -> - @allFiles = [ @resources[0].path, @resources[1].path, @resources[2].path] - @ResourceStateManager.checkResourceFiles(@resources, @allFiles, @basePath, @callback) + return describe("checkResourceFiles", function() { + describe("when all the files are present", function() { + beforeEach(function() { + this.allFiles = [ this.resources[0].path, this.resources[1].path, this.resources[2].path]; + return this.ResourceStateManager.checkResourceFiles(this.resources, this.allFiles, this.basePath, this.callback); + }); - it "should call the callback", -> - @callback.calledWithExactly().should.equal true + return it("should call the callback", function() { + return this.callback.calledWithExactly().should.equal(true); + }); + }); - describe "when there is a missing file", -> - beforeEach -> - @allFiles = [ @resources[0].path, @resources[1].path] - @fs.stat = sinon.stub().callsArgWith(1, new Error()) - @ResourceStateManager.checkResourceFiles(@resources, @allFiles, @basePath, @callback) + describe("when there is a missing file", function() { + beforeEach(function() { + this.allFiles = [ this.resources[0].path, this.resources[1].path]; + this.fs.stat = sinon.stub().callsArgWith(1, new Error()); + return this.ResourceStateManager.checkResourceFiles(this.resources, this.allFiles, this.basePath, this.callback); + }); - it "should call the callback with an error", -> - error = new Errors.FilesOutOfSyncError("resource files missing in incremental update") - @callback.calledWith(error).should.equal true + return it("should call the callback with an error", function() { + const error = new Errors.FilesOutOfSyncError("resource files missing in incremental update"); + return this.callback.calledWith(error).should.equal(true); + }); + }); - describe "when a resource contains a relative path", -> - beforeEach -> - @resources[0].path = "../foo/bar.tex" - @allFiles = [ @resources[0].path, @resources[1].path, @resources[2].path] - @ResourceStateManager.checkResourceFiles(@resources, @allFiles, @basePath, @callback) + return describe("when a resource contains a relative path", function() { + beforeEach(function() { + this.resources[0].path = "../foo/bar.tex"; + this.allFiles = [ this.resources[0].path, this.resources[1].path, this.resources[2].path]; + return this.ResourceStateManager.checkResourceFiles(this.resources, this.allFiles, this.basePath, this.callback); + }); - it "should call the callback with an error", -> - @callback.calledWith(new Error("relative path in resource file list")).should.equal true + return it("should call the callback with an error", function() { + return this.callback.calledWith(new Error("relative path in resource file list")).should.equal(true); + }); + }); + }); +}); diff --git a/test/unit/coffee/ResourceWriterTests.js b/test/unit/coffee/ResourceWriterTests.js index 4a88226..89433c8 100644 --- a/test/unit/coffee/ResourceWriterTests.js +++ b/test/unit/coffee/ResourceWriterTests.js @@ -1,324 +1,409 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -should = require('chai').should() -modulePath = require('path').join __dirname, '../../../app/js/ResourceWriter' -path = require "path" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +const should = require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../app/js/ResourceWriter'); +const path = require("path"); -describe "ResourceWriter", -> - beforeEach -> - @ResourceWriter = SandboxedModule.require modulePath, requires: - "fs": @fs = - mkdir: sinon.stub().callsArg(1) +describe("ResourceWriter", function() { + beforeEach(function() { + let Timer; + this.ResourceWriter = SandboxedModule.require(modulePath, { requires: { + "fs": (this.fs = { + mkdir: sinon.stub().callsArg(1), unlink: sinon.stub().callsArg(1) - "./ResourceStateManager": @ResourceStateManager = {} - "wrench": @wrench = {} - "./UrlCache" : @UrlCache = {} - "mkdirp" : @mkdirp = sinon.stub().callsArg(1) - "./OutputFileFinder": @OutputFileFinder = {} - "logger-sharelatex": {log: sinon.stub(), err: sinon.stub()} - "./Metrics": @Metrics = - Timer: class Timer - done: sinon.stub() - @project_id = "project-id-123" - @basePath = "/path/to/write/files/to" - @callback = sinon.stub() + }), + "./ResourceStateManager": (this.ResourceStateManager = {}), + "wrench": (this.wrench = {}), + "./UrlCache" : (this.UrlCache = {}), + "mkdirp" : (this.mkdirp = sinon.stub().callsArg(1)), + "./OutputFileFinder": (this.OutputFileFinder = {}), + "logger-sharelatex": {log: sinon.stub(), err: sinon.stub()}, + "./Metrics": (this.Metrics = { + Timer: (Timer = (function() { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub(); + } + }; + Timer.initClass(); + return Timer; + })()) + }) + } + } + ); + this.project_id = "project-id-123"; + this.basePath = "/path/to/write/files/to"; + return this.callback = sinon.stub(); + }); - describe "syncResourcesToDisk on a full request", -> - beforeEach -> - @resources = [ - "resource-1-mock" - "resource-2-mock" + describe("syncResourcesToDisk on a full request", function() { + beforeEach(function() { + this.resources = [ + "resource-1-mock", + "resource-2-mock", "resource-3-mock" - ] - @ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3) - @ResourceWriter._removeExtraneousFiles = sinon.stub().callsArg(2) - @ResourceStateManager.saveProjectState = sinon.stub().callsArg(3) - @ResourceWriter.syncResourcesToDisk({ - project_id: @project_id - syncState: @syncState = "0123456789abcdef" - resources: @resources - }, @basePath, @callback) + ]; + this.ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3); + this.ResourceWriter._removeExtraneousFiles = sinon.stub().callsArg(2); + this.ResourceStateManager.saveProjectState = sinon.stub().callsArg(3); + return this.ResourceWriter.syncResourcesToDisk({ + project_id: this.project_id, + syncState: (this.syncState = "0123456789abcdef"), + resources: this.resources + }, this.basePath, this.callback); + }); - it "should remove old files", -> - @ResourceWriter._removeExtraneousFiles - .calledWith(@resources, @basePath) - .should.equal true + it("should remove old files", function() { + return this.ResourceWriter._removeExtraneousFiles + .calledWith(this.resources, this.basePath) + .should.equal(true); + }); - it "should write each resource to disk", -> - for resource in @resources - @ResourceWriter._writeResourceToDisk - .calledWith(@project_id, resource, @basePath) - .should.equal true + it("should write each resource to disk", function() { + return Array.from(this.resources).map((resource) => + this.ResourceWriter._writeResourceToDisk + .calledWith(this.project_id, resource, this.basePath) + .should.equal(true)); + }); - it "should store the sync state and resource list", -> - @ResourceStateManager.saveProjectState - .calledWith(@syncState, @resources, @basePath) - .should.equal true + it("should store the sync state and resource list", function() { + return this.ResourceStateManager.saveProjectState + .calledWith(this.syncState, this.resources, this.basePath) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "syncResourcesToDisk on an incremental update", -> - beforeEach -> - @resources = [ + describe("syncResourcesToDisk on an incremental update", function() { + beforeEach(function() { + this.resources = [ "resource-1-mock" - ] - @ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3) - @ResourceWriter._removeExtraneousFiles = sinon.stub().callsArgWith(2, null, @outputFiles = [], @allFiles = []) - @ResourceStateManager.checkProjectStateMatches = sinon.stub().callsArgWith(2, null, @resources) - @ResourceStateManager.saveProjectState = sinon.stub().callsArg(3) - @ResourceStateManager.checkResourceFiles = sinon.stub().callsArg(3) - @ResourceWriter.syncResourcesToDisk({ - project_id: @project_id, + ]; + this.ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3); + this.ResourceWriter._removeExtraneousFiles = sinon.stub().callsArgWith(2, null, (this.outputFiles = []), (this.allFiles = [])); + this.ResourceStateManager.checkProjectStateMatches = sinon.stub().callsArgWith(2, null, this.resources); + this.ResourceStateManager.saveProjectState = sinon.stub().callsArg(3); + this.ResourceStateManager.checkResourceFiles = sinon.stub().callsArg(3); + return this.ResourceWriter.syncResourcesToDisk({ + project_id: this.project_id, syncType: "incremental", - syncState: @syncState = "1234567890abcdef", - resources: @resources - }, @basePath, @callback) + syncState: (this.syncState = "1234567890abcdef"), + resources: this.resources + }, this.basePath, this.callback); + }); - it "should check the sync state matches", -> - @ResourceStateManager.checkProjectStateMatches - .calledWith(@syncState, @basePath) - .should.equal true + it("should check the sync state matches", function() { + return this.ResourceStateManager.checkProjectStateMatches + .calledWith(this.syncState, this.basePath) + .should.equal(true); + }); - it "should remove old files", -> - @ResourceWriter._removeExtraneousFiles - .calledWith(@resources, @basePath) - .should.equal true + it("should remove old files", function() { + return this.ResourceWriter._removeExtraneousFiles + .calledWith(this.resources, this.basePath) + .should.equal(true); + }); - it "should check each resource exists", -> - @ResourceStateManager.checkResourceFiles - .calledWith(@resources, @allFiles, @basePath) - .should.equal true + it("should check each resource exists", function() { + return this.ResourceStateManager.checkResourceFiles + .calledWith(this.resources, this.allFiles, this.basePath) + .should.equal(true); + }); - it "should write each resource to disk", -> - for resource in @resources - @ResourceWriter._writeResourceToDisk - .calledWith(@project_id, resource, @basePath) - .should.equal true + it("should write each resource to disk", function() { + return Array.from(this.resources).map((resource) => + this.ResourceWriter._writeResourceToDisk + .calledWith(this.project_id, resource, this.basePath) + .should.equal(true)); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "syncResourcesToDisk on an incremental update when the state does not match", -> - beforeEach -> - @resources = [ + describe("syncResourcesToDisk on an incremental update when the state does not match", function() { + beforeEach(function() { + this.resources = [ "resource-1-mock" - ] - @ResourceStateManager.checkProjectStateMatches = sinon.stub().callsArgWith(2, @error = new Error()) - @ResourceWriter.syncResourcesToDisk({ - project_id: @project_id, + ]; + this.ResourceStateManager.checkProjectStateMatches = sinon.stub().callsArgWith(2, (this.error = new Error())); + return this.ResourceWriter.syncResourcesToDisk({ + project_id: this.project_id, syncType: "incremental", - syncState: @syncState = "1234567890abcdef", - resources: @resources - }, @basePath, @callback) + syncState: (this.syncState = "1234567890abcdef"), + resources: this.resources + }, this.basePath, this.callback); + }); - it "should check whether the sync state matches", -> - @ResourceStateManager.checkProjectStateMatches - .calledWith(@syncState, @basePath) - .should.equal true + it("should check whether the sync state matches", function() { + return this.ResourceStateManager.checkProjectStateMatches + .calledWith(this.syncState, this.basePath) + .should.equal(true); + }); - it "should call the callback with an error", -> - @callback.calledWith(@error).should.equal true + return it("should call the callback with an error", function() { + return this.callback.calledWith(this.error).should.equal(true); + }); + }); - describe "_removeExtraneousFiles", -> - beforeEach -> - @output_files = [{ - path: "output.pdf" + describe("_removeExtraneousFiles", function() { + beforeEach(function() { + this.output_files = [{ + path: "output.pdf", type: "pdf" }, { - path: "extra/file.tex" + path: "extra/file.tex", type: "tex" }, { - path: "extra.aux" + path: "extra.aux", type: "aux" }, { path: "cache/_chunk1" },{ - path: "figures/image-eps-converted-to.pdf" + path: "figures/image-eps-converted-to.pdf", type: "pdf" },{ - path: "foo/main-figure0.md5" + path: "foo/main-figure0.md5", type: "md5" }, { - path: "foo/main-figure0.dpth" + path: "foo/main-figure0.dpth", type: "dpth" }, { - path: "foo/main-figure0.pdf" + path: "foo/main-figure0.pdf", type: "pdf" }, { - path: "_minted-main/default-pyg-prefix.pygstyle" + path: "_minted-main/default-pyg-prefix.pygstyle", type: "pygstyle" }, { - path: "_minted-main/default.pygstyle" + path: "_minted-main/default.pygstyle", type: "pygstyle" }, { - path: "_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex" + path: "_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex", type: "pygtex" }, { - path: "_markdown_main/30893013dec5d869a415610079774c2f.md.tex" + path: "_markdown_main/30893013dec5d869a415610079774c2f.md.tex", type: "tex" - }] - @resources = "mock-resources" - @OutputFileFinder.findOutputFiles = sinon.stub().callsArgWith(2, null, @output_files) - @ResourceWriter._deleteFileIfNotDirectory = sinon.stub().callsArg(1) - @ResourceWriter._removeExtraneousFiles(@resources, @basePath, @callback) + }]; + this.resources = "mock-resources"; + this.OutputFileFinder.findOutputFiles = sinon.stub().callsArgWith(2, null, this.output_files); + this.ResourceWriter._deleteFileIfNotDirectory = sinon.stub().callsArg(1); + return this.ResourceWriter._removeExtraneousFiles(this.resources, this.basePath, this.callback); + }); - it "should find the existing output files", -> - @OutputFileFinder.findOutputFiles - .calledWith(@resources, @basePath) - .should.equal true + it("should find the existing output files", function() { + return this.OutputFileFinder.findOutputFiles + .calledWith(this.resources, this.basePath) + .should.equal(true); + }); - it "should delete the output files", -> - @ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(@basePath, "output.pdf")) - .should.equal true + it("should delete the output files", function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, "output.pdf")) + .should.equal(true); + }); - it "should delete the extra files", -> - @ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(@basePath, "extra/file.tex")) - .should.equal true + it("should delete the extra files", function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, "extra/file.tex")) + .should.equal(true); + }); - it "should not delete the extra aux files", -> - @ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(@basePath, "extra.aux")) - .should.equal false + it("should not delete the extra aux files", function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, "extra.aux")) + .should.equal(false); + }); - it "should not delete the knitr cache file", -> - @ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(@basePath, "cache/_chunk1")) - .should.equal false + it("should not delete the knitr cache file", function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, "cache/_chunk1")) + .should.equal(false); + }); - it "should not delete the epstopdf converted files", -> - @ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(@basePath, "figures/image-eps-converted-to.pdf")) - .should.equal false + it("should not delete the epstopdf converted files", function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, "figures/image-eps-converted-to.pdf")) + .should.equal(false); + }); - it "should not delete the tikz md5 files", -> - @ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(@basePath, "foo/main-figure0.md5")) - .should.equal false + it("should not delete the tikz md5 files", function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, "foo/main-figure0.md5")) + .should.equal(false); + }); - it "should not delete the tikz dpth files", -> - @ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(@basePath, "foo/main-figure0.dpth")) - .should.equal false + it("should not delete the tikz dpth files", function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, "foo/main-figure0.dpth")) + .should.equal(false); + }); - it "should not delete the tikz pdf files", -> - @ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(@basePath, "foo/main-figure0.pdf")) - .should.equal false + it("should not delete the tikz pdf files", function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, "foo/main-figure0.pdf")) + .should.equal(false); + }); - it "should not delete the minted pygstyle files", -> - @ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(@basePath, "_minted-main/default-pyg-prefix.pygstyle")) - .should.equal false + it("should not delete the minted pygstyle files", function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, "_minted-main/default-pyg-prefix.pygstyle")) + .should.equal(false); + }); - it "should not delete the minted default pygstyle files", -> - @ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(@basePath, "_minted-main/default.pygstyle")) - .should.equal false + it("should not delete the minted default pygstyle files", function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, "_minted-main/default.pygstyle")) + .should.equal(false); + }); - it "should not delete the minted default pygtex files", -> - @ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(@basePath, "_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex")) - .should.equal false + it("should not delete the minted default pygtex files", function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, "_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex")) + .should.equal(false); + }); - it "should not delete the markdown md.tex files", -> - @ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(@basePath, "_markdown_main/30893013dec5d869a415610079774c2f.md.tex")) - .should.equal false + it("should not delete the markdown md.tex files", function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, "_markdown_main/30893013dec5d869a415610079774c2f.md.tex")) + .should.equal(false); + }); - it "should call the callback", -> - @callback.called.should.equal true + it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); - it "should time the request", -> - @Metrics.Timer::done.called.should.equal true + return it("should time the request", function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true); + }); + }); - describe "_writeResourceToDisk", -> - describe "with a url based resource", -> - beforeEach -> - @resource = - path: "main.tex" - url: "http://www.example.com/main.tex" + describe("_writeResourceToDisk", function() { + describe("with a url based resource", function() { + beforeEach(function() { + this.resource = { + path: "main.tex", + url: "http://www.example.com/main.tex", modified: Date.now() - @UrlCache.downloadUrlToFile = sinon.stub().callsArgWith(4, "fake error downloading file") - @ResourceWriter._writeResourceToDisk(@project_id, @resource, @basePath, @callback) + }; + this.UrlCache.downloadUrlToFile = sinon.stub().callsArgWith(4, "fake error downloading file"); + return this.ResourceWriter._writeResourceToDisk(this.project_id, this.resource, this.basePath, this.callback); + }); - it "should ensure the directory exists", -> - @mkdirp - .calledWith(path.dirname(path.join(@basePath, @resource.path))) - .should.equal true + it("should ensure the directory exists", function() { + return this.mkdirp + .calledWith(path.dirname(path.join(this.basePath, this.resource.path))) + .should.equal(true); + }); - it "should write the URL from the cache", -> - @UrlCache.downloadUrlToFile - .calledWith(@project_id, @resource.url, path.join(@basePath, @resource.path), @resource.modified) - .should.equal true + it("should write the URL from the cache", function() { + return this.UrlCache.downloadUrlToFile + .calledWith(this.project_id, this.resource.url, path.join(this.basePath, this.resource.path), this.resource.modified) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); - it "should not return an error if the resource writer errored", -> - should.not.exist @callback.args[0][0] + return it("should not return an error if the resource writer errored", function() { + return should.not.exist(this.callback.args[0][0]); + }); + }); - describe "with a content based resource", -> - beforeEach -> - @resource = - path: "main.tex" + describe("with a content based resource", function() { + beforeEach(function() { + this.resource = { + path: "main.tex", content: "Hello world" - @fs.writeFile = sinon.stub().callsArg(2) - @ResourceWriter._writeResourceToDisk(@project_id, @resource, @basePath, @callback) + }; + this.fs.writeFile = sinon.stub().callsArg(2); + return this.ResourceWriter._writeResourceToDisk(this.project_id, this.resource, this.basePath, this.callback); + }); - it "should ensure the directory exists", -> - @mkdirp - .calledWith(path.dirname(path.join(@basePath, @resource.path))) - .should.equal true + it("should ensure the directory exists", function() { + return this.mkdirp + .calledWith(path.dirname(path.join(this.basePath, this.resource.path))) + .should.equal(true); + }); - it "should write the contents to disk", -> - @fs.writeFile - .calledWith(path.join(@basePath, @resource.path), @resource.content) - .should.equal true + it("should write the contents to disk", function() { + return this.fs.writeFile + .calledWith(path.join(this.basePath, this.resource.path), this.resource.content) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "with a file path that breaks out of the root folder", -> - beforeEach -> - @resource = - path: "../../main.tex" + return describe("with a file path that breaks out of the root folder", function() { + beforeEach(function() { + this.resource = { + path: "../../main.tex", content: "Hello world" - @fs.writeFile = sinon.stub().callsArg(2) - @ResourceWriter._writeResourceToDisk(@project_id, @resource, @basePath, @callback) + }; + this.fs.writeFile = sinon.stub().callsArg(2); + return this.ResourceWriter._writeResourceToDisk(this.project_id, this.resource, this.basePath, this.callback); + }); - it "should not write to disk", -> - @fs.writeFile.called.should.equal false + it("should not write to disk", function() { + return this.fs.writeFile.called.should.equal(false); + }); - it "should return an error", -> - @callback + return it("should return an error", function() { + return this.callback .calledWith(new Error("resource path is outside root directory")) - .should.equal true + .should.equal(true); + }); + }); + }); - describe "checkPath", -> - describe "with a valid path", -> - beforeEach -> - @ResourceWriter.checkPath("foo", "bar", @callback) + return describe("checkPath", function() { + describe("with a valid path", function() { + beforeEach(function() { + return this.ResourceWriter.checkPath("foo", "bar", this.callback); + }); - it "should return the joined path", -> - @callback.calledWith(null, "foo/bar") - .should.equal true + return it("should return the joined path", function() { + return this.callback.calledWith(null, "foo/bar") + .should.equal(true); + }); + }); - describe "with an invalid path", -> - beforeEach -> - @ResourceWriter.checkPath("foo", "baz/../../bar", @callback) + describe("with an invalid path", function() { + beforeEach(function() { + return this.ResourceWriter.checkPath("foo", "baz/../../bar", this.callback); + }); - it "should return an error", -> - @callback.calledWith(new Error("resource path is outside root directory")) - .should.equal true + return it("should return an error", function() { + return this.callback.calledWith(new Error("resource path is outside root directory")) + .should.equal(true); + }); + }); - describe "with another invalid path matching on a prefix", -> - beforeEach -> - @ResourceWriter.checkPath("foo", "../foobar/baz", @callback) + return describe("with another invalid path matching on a prefix", function() { + beforeEach(function() { + return this.ResourceWriter.checkPath("foo", "../foobar/baz", this.callback); + }); - it "should return an error", -> - @callback.calledWith(new Error("resource path is outside root directory")) - .should.equal true + return it("should return an error", function() { + return this.callback.calledWith(new Error("resource path is outside root directory")) + .should.equal(true); + }); + }); + }); +}); diff --git a/test/unit/coffee/StaticServerForbidSymlinksTests.js b/test/unit/coffee/StaticServerForbidSymlinksTests.js index 4a87d64..9063c1f 100644 --- a/test/unit/coffee/StaticServerForbidSymlinksTests.js +++ b/test/unit/coffee/StaticServerForbidSymlinksTests.js @@ -1,158 +1,219 @@ -should = require('chai').should() -SandboxedModule = require('sandboxed-module') -assert = require('assert') -path = require('path') -sinon = require('sinon') -modulePath = path.join __dirname, "../../../app/js/StaticServerForbidSymlinks" -expect = require("chai").expect +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const should = require('chai').should(); +const SandboxedModule = require('sandboxed-module'); +const assert = require('assert'); +const path = require('path'); +const sinon = require('sinon'); +const modulePath = path.join(__dirname, "../../../app/js/StaticServerForbidSymlinks"); +const { expect } = require("chai"); -describe "StaticServerForbidSymlinks", -> +describe("StaticServerForbidSymlinks", function() { - beforeEach -> + beforeEach(function() { - @settings = - path: + this.settings = { + path: { compilesDir: "/compiles/here" + } + }; - @fs = {} - @ForbidSymlinks = SandboxedModule.require modulePath, requires: - "settings-sharelatex":@settings - "logger-sharelatex": - log:-> - warn:-> - error:-> - "fs":@fs + this.fs = {}; + this.ForbidSymlinks = SandboxedModule.require(modulePath, { requires: { + "settings-sharelatex":this.settings, + "logger-sharelatex": { + log() {}, + warn() {}, + error() {} + }, + "fs":this.fs + } + } + ); - @dummyStatic = (rootDir, options) -> - return (req, res, next) -> - # console.log "dummyStatic serving file", rootDir, "called with", req.url - # serve it + this.dummyStatic = (rootDir, options) => + (req, res, next) => + // console.log "dummyStatic serving file", rootDir, "called with", req.url + // serve it next() + + ; - @StaticServerForbidSymlinks = @ForbidSymlinks @dummyStatic, @settings.path.compilesDir - @req = - params: + this.StaticServerForbidSymlinks = this.ForbidSymlinks(this.dummyStatic, this.settings.path.compilesDir); + this.req = { + params: { project_id:"12345" + } + }; - @res = {} - @req.url = "/12345/output.pdf" + this.res = {}; + return this.req.url = "/12345/output.pdf"; + }); - describe "sending a normal file through", -> - beforeEach -> - @fs.realpath = sinon.stub().callsArgWith(1, null, "#{@settings.path.compilesDir}/#{@req.params.project_id}/output.pdf") + describe("sending a normal file through", function() { + beforeEach(function() { + return this.fs.realpath = sinon.stub().callsArgWith(1, null, `${this.settings.path.compilesDir}/${this.req.params.project_id}/output.pdf`); + }); - it "should call next", (done)-> - @res.sendStatus = (resCode)-> - resCode.should.equal 200 - done() - @StaticServerForbidSymlinks @req, @res, done + return it("should call next", function(done){ + this.res.sendStatus = function(resCode){ + resCode.should.equal(200); + return done(); + }; + return this.StaticServerForbidSymlinks(this.req, this.res, done); + }); + }); - describe "with a missing file", -> - beforeEach -> - @fs.realpath = sinon.stub().callsArgWith(1, {code: 'ENOENT'}, "#{@settings.path.compilesDir}/#{@req.params.project_id}/unknown.pdf") + describe("with a missing file", function() { + beforeEach(function() { + return this.fs.realpath = sinon.stub().callsArgWith(1, {code: 'ENOENT'}, `${this.settings.path.compilesDir}/${this.req.params.project_id}/unknown.pdf`); + }); - it "should send a 404", (done)-> - @res.sendStatus = (resCode)-> - resCode.should.equal 404 - done() - @StaticServerForbidSymlinks @req, @res + return it("should send a 404", function(done){ + this.res.sendStatus = function(resCode){ + resCode.should.equal(404); + return done(); + }; + return this.StaticServerForbidSymlinks(this.req, this.res); + }); + }); - describe "with a symlink file", -> - beforeEach -> - @fs.realpath = sinon.stub().callsArgWith(1, null, "/etc/#{@req.params.project_id}/output.pdf") + describe("with a symlink file", function() { + beforeEach(function() { + return this.fs.realpath = sinon.stub().callsArgWith(1, null, `/etc/${this.req.params.project_id}/output.pdf`); + }); - it "should send a 404", (done)-> - @res.sendStatus = (resCode)-> - resCode.should.equal 404 - done() - @StaticServerForbidSymlinks @req, @res + return it("should send a 404", function(done){ + this.res.sendStatus = function(resCode){ + resCode.should.equal(404); + return done(); + }; + return this.StaticServerForbidSymlinks(this.req, this.res); + }); + }); - describe "with a relative file", -> - beforeEach -> - @req.url = "/12345/../67890/output.pdf" + describe("with a relative file", function() { + beforeEach(function() { + return this.req.url = "/12345/../67890/output.pdf"; + }); - it "should send a 404", (done)-> - @res.sendStatus = (resCode)-> - resCode.should.equal 404 - done() - @StaticServerForbidSymlinks @req, @res + return it("should send a 404", function(done){ + this.res.sendStatus = function(resCode){ + resCode.should.equal(404); + return done(); + }; + return this.StaticServerForbidSymlinks(this.req, this.res); + }); + }); - describe "with a unnormalized file containing .", -> - beforeEach -> - @req.url = "/12345/foo/./output.pdf" + describe("with a unnormalized file containing .", function() { + beforeEach(function() { + return this.req.url = "/12345/foo/./output.pdf"; + }); - it "should send a 404", (done)-> - @res.sendStatus = (resCode)-> - resCode.should.equal 404 - done() - @StaticServerForbidSymlinks @req, @res + return it("should send a 404", function(done){ + this.res.sendStatus = function(resCode){ + resCode.should.equal(404); + return done(); + }; + return this.StaticServerForbidSymlinks(this.req, this.res); + }); + }); - describe "with a file containing an empty path", -> - beforeEach -> - @req.url = "/12345/foo//output.pdf" + describe("with a file containing an empty path", function() { + beforeEach(function() { + return this.req.url = "/12345/foo//output.pdf"; + }); - it "should send a 404", (done)-> - @res.sendStatus = (resCode)-> - resCode.should.equal 404 - done() - @StaticServerForbidSymlinks @req, @res + return it("should send a 404", function(done){ + this.res.sendStatus = function(resCode){ + resCode.should.equal(404); + return done(); + }; + return this.StaticServerForbidSymlinks(this.req, this.res); + }); + }); - describe "with a non-project file", -> - beforeEach -> - @req.url = "/.foo/output.pdf" + describe("with a non-project file", function() { + beforeEach(function() { + return this.req.url = "/.foo/output.pdf"; + }); - it "should send a 404", (done)-> - @res.sendStatus = (resCode)-> - resCode.should.equal 404 - done() - @StaticServerForbidSymlinks @req, @res + return it("should send a 404", function(done){ + this.res.sendStatus = function(resCode){ + resCode.should.equal(404); + return done(); + }; + return this.StaticServerForbidSymlinks(this.req, this.res); + }); + }); - describe "with a file outside the compiledir", -> - beforeEach -> - @req.url = "/../bar/output.pdf" + describe("with a file outside the compiledir", function() { + beforeEach(function() { + return this.req.url = "/../bar/output.pdf"; + }); - it "should send a 404", (done)-> - @res.sendStatus = (resCode)-> - resCode.should.equal 404 - done() - @StaticServerForbidSymlinks @req, @res + return it("should send a 404", function(done){ + this.res.sendStatus = function(resCode){ + resCode.should.equal(404); + return done(); + }; + return this.StaticServerForbidSymlinks(this.req, this.res); + }); + }); - describe "with a file with no leading /", -> - beforeEach -> - @req.url = "./../bar/output.pdf" + describe("with a file with no leading /", function() { + beforeEach(function() { + return this.req.url = "./../bar/output.pdf"; + }); - it "should send a 404", (done)-> - @res.sendStatus = (resCode)-> - resCode.should.equal 404 - done() - @StaticServerForbidSymlinks @req, @res + return it("should send a 404", function(done){ + this.res.sendStatus = function(resCode){ + resCode.should.equal(404); + return done(); + }; + return this.StaticServerForbidSymlinks(this.req, this.res); + }); + }); - describe "with a github style path", -> - beforeEach -> - @req.url = "/henryoswald-latex_example/output/output.log" - @fs.realpath = sinon.stub().callsArgWith(1, null, "#{@settings.path.compilesDir}/henryoswald-latex_example/output/output.log") + describe("with a github style path", function() { + beforeEach(function() { + this.req.url = "/henryoswald-latex_example/output/output.log"; + return this.fs.realpath = sinon.stub().callsArgWith(1, null, `${this.settings.path.compilesDir}/henryoswald-latex_example/output/output.log`); + }); - it "should call next", (done)-> - @res.sendStatus = (resCode)-> - resCode.should.equal 200 - done() - @StaticServerForbidSymlinks @req, @res, done + return it("should call next", function(done){ + this.res.sendStatus = function(resCode){ + resCode.should.equal(200); + return done(); + }; + return this.StaticServerForbidSymlinks(this.req, this.res, done); + }); + }); - describe "with an error from fs.realpath", -> + return describe("with an error from fs.realpath", function() { - beforeEach -> - @fs.realpath = sinon.stub().callsArgWith(1, "error") + beforeEach(function() { + return this.fs.realpath = sinon.stub().callsArgWith(1, "error"); + }); - it "should send a 500", (done)-> - @res.sendStatus = (resCode)-> - resCode.should.equal 500 - done() - @StaticServerForbidSymlinks @req, @res + return it("should send a 500", function(done){ + this.res.sendStatus = function(resCode){ + resCode.should.equal(500); + return done(); + }; + return this.StaticServerForbidSymlinks(this.req, this.res); + }); + }); +}); diff --git a/test/unit/coffee/TikzManager.js b/test/unit/coffee/TikzManager.js index 69968aa..c792fab 100644 --- a/test/unit/coffee/TikzManager.js +++ b/test/unit/coffee/TikzManager.js @@ -1,117 +1,150 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -modulePath = require('path').join __dirname, '../../../app/js/TikzManager' +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../app/js/TikzManager'); -describe 'TikzManager', -> - beforeEach -> - @TikzManager = SandboxedModule.require modulePath, requires: - "./ResourceWriter": @ResourceWriter = {} - "./SafeReader": @SafeReader = {} - "fs": @fs = {} - "logger-sharelatex": @logger = {log: () ->} +describe('TikzManager', function() { + beforeEach(function() { + return this.TikzManager = SandboxedModule.require(modulePath, { requires: { + "./ResourceWriter": (this.ResourceWriter = {}), + "./SafeReader": (this.SafeReader = {}), + "fs": (this.fs = {}), + "logger-sharelatex": (this.logger = {log() {}}) + } + });}); - describe "checkMainFile", -> - beforeEach -> - @compileDir = "compile-dir" - @mainFile = "main.tex" - @callback = sinon.stub() + describe("checkMainFile", function() { + beforeEach(function() { + this.compileDir = "compile-dir"; + this.mainFile = "main.tex"; + return this.callback = sinon.stub(); + }); - describe "if there is already an output.tex file in the resources", -> - beforeEach -> - @resources = [{path:"main.tex"},{path:"output.tex"}] - @TikzManager.checkMainFile @compileDir, @mainFile, @resources, @callback + describe("if there is already an output.tex file in the resources", function() { + beforeEach(function() { + this.resources = [{path:"main.tex"},{path:"output.tex"}]; + return this.TikzManager.checkMainFile(this.compileDir, this.mainFile, this.resources, this.callback); + }); - it "should call the callback with false ", -> - @callback.calledWithExactly(null, false) - .should.equal true + return it("should call the callback with false ", function() { + return this.callback.calledWithExactly(null, false) + .should.equal(true); + }); + }); - describe "if there is no output.tex file in the resources", -> - beforeEach -> - @resources = [{path:"main.tex"}] - @ResourceWriter.checkPath = sinon.stub() - .withArgs(@compileDir, @mainFile) - .callsArgWith(2, null, "#{@compileDir}/#{@mainFile}") + return describe("if there is no output.tex file in the resources", function() { + beforeEach(function() { + this.resources = [{path:"main.tex"}]; + return this.ResourceWriter.checkPath = sinon.stub() + .withArgs(this.compileDir, this.mainFile) + .callsArgWith(2, null, `${this.compileDir}/${this.mainFile}`); + }); - describe "and the main file contains tikzexternalize", -> - beforeEach -> - @SafeReader.readFile = sinon.stub() - .withArgs("#{@compileDir}/#{@mainFile}") - .callsArgWith(3, null, "hello \\tikzexternalize") - @TikzManager.checkMainFile @compileDir, @mainFile, @resources, @callback + describe("and the main file contains tikzexternalize", function() { + beforeEach(function() { + this.SafeReader.readFile = sinon.stub() + .withArgs(`${this.compileDir}/${this.mainFile}`) + .callsArgWith(3, null, "hello \\tikzexternalize"); + return this.TikzManager.checkMainFile(this.compileDir, this.mainFile, this.resources, this.callback); + }); - it "should look at the file on disk", -> - @SafeReader.readFile - .calledWith("#{@compileDir}/#{@mainFile}") - .should.equal true + it("should look at the file on disk", function() { + return this.SafeReader.readFile + .calledWith(`${this.compileDir}/${this.mainFile}`) + .should.equal(true); + }); - it "should call the callback with true ", -> - @callback.calledWithExactly(null, true) - .should.equal true + return it("should call the callback with true ", function() { + return this.callback.calledWithExactly(null, true) + .should.equal(true); + }); + }); - describe "and the main file does not contain tikzexternalize", -> - beforeEach -> - @SafeReader.readFile = sinon.stub() - .withArgs("#{@compileDir}/#{@mainFile}") - .callsArgWith(3, null, "hello") - @TikzManager.checkMainFile @compileDir, @mainFile, @resources, @callback + describe("and the main file does not contain tikzexternalize", function() { + beforeEach(function() { + this.SafeReader.readFile = sinon.stub() + .withArgs(`${this.compileDir}/${this.mainFile}`) + .callsArgWith(3, null, "hello"); + return this.TikzManager.checkMainFile(this.compileDir, this.mainFile, this.resources, this.callback); + }); - it "should look at the file on disk", -> - @SafeReader.readFile - .calledWith("#{@compileDir}/#{@mainFile}") - .should.equal true + it("should look at the file on disk", function() { + return this.SafeReader.readFile + .calledWith(`${this.compileDir}/${this.mainFile}`) + .should.equal(true); + }); - it "should call the callback with false", -> - @callback.calledWithExactly(null, false) - .should.equal true + return it("should call the callback with false", function() { + return this.callback.calledWithExactly(null, false) + .should.equal(true); + }); + }); - describe "and the main file contains \\usepackage{pstool}", -> - beforeEach -> - @SafeReader.readFile = sinon.stub() - .withArgs("#{@compileDir}/#{@mainFile}") - .callsArgWith(3, null, "hello \\usepackage[random-options]{pstool}") - @TikzManager.checkMainFile @compileDir, @mainFile, @resources, @callback + return describe("and the main file contains \\usepackage{pstool}", function() { + beforeEach(function() { + this.SafeReader.readFile = sinon.stub() + .withArgs(`${this.compileDir}/${this.mainFile}`) + .callsArgWith(3, null, "hello \\usepackage[random-options]{pstool}"); + return this.TikzManager.checkMainFile(this.compileDir, this.mainFile, this.resources, this.callback); + }); - it "should look at the file on disk", -> - @SafeReader.readFile - .calledWith("#{@compileDir}/#{@mainFile}") - .should.equal true + it("should look at the file on disk", function() { + return this.SafeReader.readFile + .calledWith(`${this.compileDir}/${this.mainFile}`) + .should.equal(true); + }); - it "should call the callback with true ", -> - @callback.calledWithExactly(null, true) - .should.equal true + return it("should call the callback with true ", function() { + return this.callback.calledWithExactly(null, true) + .should.equal(true); + }); + }); + }); + }); - describe "injectOutputFile", -> - beforeEach -> - @rootDir = "/mock" - @filename = "filename.tex" - @callback = sinon.stub() - @content = ''' - \\documentclass{article} - \\usepackage{tikz} - \\tikzexternalize - \\begin{document} - Hello world - \\end{document} - ''' - @fs.readFile = sinon.stub().callsArgWith(2, null, @content) - @fs.writeFile = sinon.stub().callsArg(3) - @ResourceWriter.checkPath = sinon.stub().callsArgWith(2, null, "#{@rootDir}/#{@filename}") - @TikzManager.injectOutputFile @rootDir, @filename, @callback + return describe("injectOutputFile", function() { + beforeEach(function() { + this.rootDir = "/mock"; + this.filename = "filename.tex"; + this.callback = sinon.stub(); + this.content = `\ +\\documentclass{article} +\\usepackage{tikz} +\\tikzexternalize +\\begin{document} +Hello world +\\end{document}\ +`; + this.fs.readFile = sinon.stub().callsArgWith(2, null, this.content); + this.fs.writeFile = sinon.stub().callsArg(3); + this.ResourceWriter.checkPath = sinon.stub().callsArgWith(2, null, `${this.rootDir}/${this.filename}`); + return this.TikzManager.injectOutputFile(this.rootDir, this.filename, this.callback); + }); - it "sould check the path", -> - @ResourceWriter.checkPath.calledWith(@rootDir, @filename) - .should.equal true + it("sould check the path", function() { + return this.ResourceWriter.checkPath.calledWith(this.rootDir, this.filename) + .should.equal(true); + }); - it "should read the file", -> - @fs.readFile - .calledWith("#{@rootDir}/#{@filename}", "utf8") - .should.equal true + it("should read the file", function() { + return this.fs.readFile + .calledWith(`${this.rootDir}/${this.filename}`, "utf8") + .should.equal(true); + }); - it "should write out the same file as output.tex", -> - @fs.writeFile - .calledWith("#{@rootDir}/output.tex", @content, {flag: 'wx'}) - .should.equal true + it("should write out the same file as output.tex", function() { + return this.fs.writeFile + .calledWith(`${this.rootDir}/output.tex`, this.content, {flag: 'wx'}) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); +}); diff --git a/test/unit/coffee/UrlCacheTests.js b/test/unit/coffee/UrlCacheTests.js index 36a11cb..a3af008 100644 --- a/test/unit/coffee/UrlCacheTests.js +++ b/test/unit/coffee/UrlCacheTests.js @@ -1,200 +1,262 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -modulePath = require('path').join __dirname, '../../../app/js/UrlCache' -EventEmitter = require("events").EventEmitter +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../app/js/UrlCache'); +const { EventEmitter } = require("events"); -describe "UrlCache", -> - beforeEach -> - @callback = sinon.stub() - @url = "www.example.com/file" - @project_id = "project-id-123" - @UrlCache = SandboxedModule.require modulePath, requires: - "./db" : {} - "./UrlFetcher" : @UrlFetcher = {} - "logger-sharelatex": @logger = {log: sinon.stub()} - "settings-sharelatex": @Settings = { path: clsiCacheDir: "/cache/dir" } - "fs": @fs = {} +describe("UrlCache", function() { + beforeEach(function() { + this.callback = sinon.stub(); + this.url = "www.example.com/file"; + this.project_id = "project-id-123"; + return this.UrlCache = SandboxedModule.require(modulePath, { requires: { + "./db" : {}, + "./UrlFetcher" : (this.UrlFetcher = {}), + "logger-sharelatex": (this.logger = {log: sinon.stub()}), + "settings-sharelatex": (this.Settings = { path: {clsiCacheDir: "/cache/dir"} }), + "fs": (this.fs = {}) + } + });}); - describe "_doesUrlNeedDownloading", -> - beforeEach -> - @lastModified = new Date() - @lastModifiedRoundedToSeconds = new Date(Math.floor(@lastModified.getTime() / 1000) * 1000) + describe("_doesUrlNeedDownloading", function() { + beforeEach(function() { + this.lastModified = new Date(); + return this.lastModifiedRoundedToSeconds = new Date(Math.floor(this.lastModified.getTime() / 1000) * 1000); + }); - describe "when URL does not exist in cache", -> - beforeEach -> - @UrlCache._findUrlDetails = sinon.stub().callsArgWith(2, null, null) - @UrlCache._doesUrlNeedDownloading(@project_id, @url, @lastModified, @callback) + describe("when URL does not exist in cache", function() { + beforeEach(function() { + this.UrlCache._findUrlDetails = sinon.stub().callsArgWith(2, null, null); + return this.UrlCache._doesUrlNeedDownloading(this.project_id, this.url, this.lastModified, this.callback); + }); - it "should return the callback with true", -> - @callback.calledWith(null, true).should.equal true + return it("should return the callback with true", function() { + return this.callback.calledWith(null, true).should.equal(true); + }); + }); - describe "when URL does exist in cache", -> - beforeEach -> - @urlDetails = {} - @UrlCache._findUrlDetails = sinon.stub().callsArgWith(2, null, @urlDetails) + return describe("when URL does exist in cache", function() { + beforeEach(function() { + this.urlDetails = {}; + return this.UrlCache._findUrlDetails = sinon.stub().callsArgWith(2, null, this.urlDetails); + }); - describe "when the modified date is more recent than the cached modified date", -> - beforeEach -> - @urlDetails.lastModified = new Date(@lastModified.getTime() - 1000) - @UrlCache._doesUrlNeedDownloading(@project_id, @url, @lastModified, @callback) + describe("when the modified date is more recent than the cached modified date", function() { + beforeEach(function() { + this.urlDetails.lastModified = new Date(this.lastModified.getTime() - 1000); + return this.UrlCache._doesUrlNeedDownloading(this.project_id, this.url, this.lastModified, this.callback); + }); - it "should get the url details", -> - @UrlCache._findUrlDetails - .calledWith(@project_id, @url) - .should.equal true + it("should get the url details", function() { + return this.UrlCache._findUrlDetails + .calledWith(this.project_id, this.url) + .should.equal(true); + }); - it "should return the callback with true", -> - @callback.calledWith(null, true).should.equal true + return it("should return the callback with true", function() { + return this.callback.calledWith(null, true).should.equal(true); + }); + }); - describe "when the cached modified date is more recent than the modified date", -> - beforeEach -> - @urlDetails.lastModified = new Date(@lastModified.getTime() + 1000) - @UrlCache._doesUrlNeedDownloading(@project_id, @url, @lastModified, @callback) + describe("when the cached modified date is more recent than the modified date", function() { + beforeEach(function() { + this.urlDetails.lastModified = new Date(this.lastModified.getTime() + 1000); + return this.UrlCache._doesUrlNeedDownloading(this.project_id, this.url, this.lastModified, this.callback); + }); - it "should return the callback with false", -> - @callback.calledWith(null, false).should.equal true + return it("should return the callback with false", function() { + return this.callback.calledWith(null, false).should.equal(true); + }); + }); - describe "when the cached modified date is equal to the modified date", -> - beforeEach -> - @urlDetails.lastModified = @lastModified - @UrlCache._doesUrlNeedDownloading(@project_id, @url, @lastModified, @callback) + describe("when the cached modified date is equal to the modified date", function() { + beforeEach(function() { + this.urlDetails.lastModified = this.lastModified; + return this.UrlCache._doesUrlNeedDownloading(this.project_id, this.url, this.lastModified, this.callback); + }); - it "should return the callback with false", -> - @callback.calledWith(null, false).should.equal true + return it("should return the callback with false", function() { + return this.callback.calledWith(null, false).should.equal(true); + }); + }); - describe "when the provided modified date does not exist", -> - beforeEach -> - @lastModified = null - @UrlCache._doesUrlNeedDownloading(@project_id, @url, @lastModified, @callback) + describe("when the provided modified date does not exist", function() { + beforeEach(function() { + this.lastModified = null; + return this.UrlCache._doesUrlNeedDownloading(this.project_id, this.url, this.lastModified, this.callback); + }); - it "should return the callback with true", -> - @callback.calledWith(null, true).should.equal true + return it("should return the callback with true", function() { + return this.callback.calledWith(null, true).should.equal(true); + }); + }); - describe "when the URL does not have a modified date", -> - beforeEach -> - @urlDetails.lastModified = null - @UrlCache._doesUrlNeedDownloading(@project_id, @url, @lastModified, @callback) + return describe("when the URL does not have a modified date", function() { + beforeEach(function() { + this.urlDetails.lastModified = null; + return this.UrlCache._doesUrlNeedDownloading(this.project_id, this.url, this.lastModified, this.callback); + }); - it "should return the callback with true", -> - @callback.calledWith(null, true).should.equal true + return it("should return the callback with true", function() { + return this.callback.calledWith(null, true).should.equal(true); + }); + }); + }); + }); - describe "_ensureUrlIsInCache", -> - beforeEach -> - @UrlFetcher.pipeUrlToFile = sinon.stub().callsArg(2) - @UrlCache._updateOrCreateUrlDetails = sinon.stub().callsArg(3) + describe("_ensureUrlIsInCache", function() { + beforeEach(function() { + this.UrlFetcher.pipeUrlToFile = sinon.stub().callsArg(2); + return this.UrlCache._updateOrCreateUrlDetails = sinon.stub().callsArg(3); + }); - describe "when the URL needs updating", -> - beforeEach -> - @UrlCache._doesUrlNeedDownloading = sinon.stub().callsArgWith(3, null, true) - @UrlCache._ensureUrlIsInCache(@project_id, @url, @lastModified, @callback) + describe("when the URL needs updating", function() { + beforeEach(function() { + this.UrlCache._doesUrlNeedDownloading = sinon.stub().callsArgWith(3, null, true); + return this.UrlCache._ensureUrlIsInCache(this.project_id, this.url, this.lastModified, this.callback); + }); - it "should check that the url needs downloading", -> - @UrlCache._doesUrlNeedDownloading - .calledWith(@project_id, @url, @lastModifiedRoundedToSeconds) - .should.equal true + it("should check that the url needs downloading", function() { + return this.UrlCache._doesUrlNeedDownloading + .calledWith(this.project_id, this.url, this.lastModifiedRoundedToSeconds) + .should.equal(true); + }); - it "should download the URL to the cache file", -> - @UrlFetcher.pipeUrlToFile - .calledWith(@url, @UrlCache._cacheFilePathForUrl(@project_id, @url)) - .should.equal true + it("should download the URL to the cache file", function() { + return this.UrlFetcher.pipeUrlToFile + .calledWith(this.url, this.UrlCache._cacheFilePathForUrl(this.project_id, this.url)) + .should.equal(true); + }); - it "should update the database entry", -> - @UrlCache._updateOrCreateUrlDetails - .calledWith(@project_id, @url, @lastModifiedRoundedToSeconds) - .should.equal true + it("should update the database entry", function() { + return this.UrlCache._updateOrCreateUrlDetails + .calledWith(this.project_id, this.url, this.lastModifiedRoundedToSeconds) + .should.equal(true); + }); - it "should return the callback with the cache file path", -> - @callback - .calledWith(null, @UrlCache._cacheFilePathForUrl(@project_id, @url)) - .should.equal true + return it("should return the callback with the cache file path", function() { + return this.callback + .calledWith(null, this.UrlCache._cacheFilePathForUrl(this.project_id, this.url)) + .should.equal(true); + }); + }); - describe "when the URL does not need updating", -> - beforeEach -> - @UrlCache._doesUrlNeedDownloading = sinon.stub().callsArgWith(3, null, false) - @UrlCache._ensureUrlIsInCache(@project_id, @url, @lastModified, @callback) + return describe("when the URL does not need updating", function() { + beforeEach(function() { + this.UrlCache._doesUrlNeedDownloading = sinon.stub().callsArgWith(3, null, false); + return this.UrlCache._ensureUrlIsInCache(this.project_id, this.url, this.lastModified, this.callback); + }); - it "should not download the URL to the cache file", -> - @UrlFetcher.pipeUrlToFile - .called.should.equal false + it("should not download the URL to the cache file", function() { + return this.UrlFetcher.pipeUrlToFile + .called.should.equal(false); + }); - it "should return the callback with the cache file path", -> - @callback - .calledWith(null, @UrlCache._cacheFilePathForUrl(@project_id, @url)) - .should.equal true + return it("should return the callback with the cache file path", function() { + return this.callback + .calledWith(null, this.UrlCache._cacheFilePathForUrl(this.project_id, this.url)) + .should.equal(true); + }); + }); + }); - describe "downloadUrlToFile", -> - beforeEach -> - @cachePath = "path/to/cached/url" - @destPath = "path/to/destination" - @UrlCache._copyFile = sinon.stub().callsArg(2) - @UrlCache._ensureUrlIsInCache = sinon.stub().callsArgWith(3, null, @cachePath) - @UrlCache.downloadUrlToFile(@project_id, @url, @destPath, @lastModified, @callback) + describe("downloadUrlToFile", function() { + beforeEach(function() { + this.cachePath = "path/to/cached/url"; + this.destPath = "path/to/destination"; + this.UrlCache._copyFile = sinon.stub().callsArg(2); + this.UrlCache._ensureUrlIsInCache = sinon.stub().callsArgWith(3, null, this.cachePath); + return this.UrlCache.downloadUrlToFile(this.project_id, this.url, this.destPath, this.lastModified, this.callback); + }); - it "should ensure the URL is downloaded and updated in the cache", -> - @UrlCache._ensureUrlIsInCache - .calledWith(@project_id, @url, @lastModified) - .should.equal true + it("should ensure the URL is downloaded and updated in the cache", function() { + return this.UrlCache._ensureUrlIsInCache + .calledWith(this.project_id, this.url, this.lastModified) + .should.equal(true); + }); - it "should copy the file to the new location", -> - @UrlCache._copyFile - .calledWith(@cachePath, @destPath) - .should.equal true + it("should copy the file to the new location", function() { + return this.UrlCache._copyFile + .calledWith(this.cachePath, this.destPath) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "_deleteUrlCacheFromDisk", -> - beforeEach -> - @fs.unlink = sinon.stub().callsArg(1) - @UrlCache._deleteUrlCacheFromDisk(@project_id, @url, @callback) + describe("_deleteUrlCacheFromDisk", function() { + beforeEach(function() { + this.fs.unlink = sinon.stub().callsArg(1); + return this.UrlCache._deleteUrlCacheFromDisk(this.project_id, this.url, this.callback); + }); - it "should delete the cache file", -> - @fs.unlink - .calledWith(@UrlCache._cacheFilePathForUrl(@project_id, @url)) - .should.equal true + it("should delete the cache file", function() { + return this.fs.unlink + .calledWith(this.UrlCache._cacheFilePathForUrl(this.project_id, this.url)) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "_clearUrlFromCache", -> - beforeEach -> - @UrlCache._deleteUrlCacheFromDisk = sinon.stub().callsArg(2) - @UrlCache._clearUrlDetails = sinon.stub().callsArg(2) - @UrlCache._clearUrlFromCache @project_id, @url, @callback + describe("_clearUrlFromCache", function() { + beforeEach(function() { + this.UrlCache._deleteUrlCacheFromDisk = sinon.stub().callsArg(2); + this.UrlCache._clearUrlDetails = sinon.stub().callsArg(2); + return this.UrlCache._clearUrlFromCache(this.project_id, this.url, this.callback); + }); - it "should delete the file on disk", -> - @UrlCache._deleteUrlCacheFromDisk - .calledWith(@project_id, @url) - .should.equal true + it("should delete the file on disk", function() { + return this.UrlCache._deleteUrlCacheFromDisk + .calledWith(this.project_id, this.url) + .should.equal(true); + }); - it "should clear the entry in the database", -> - @UrlCache._clearUrlDetails - .calledWith(@project_id, @url) - .should.equal true + it("should clear the entry in the database", function() { + return this.UrlCache._clearUrlDetails + .calledWith(this.project_id, this.url) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "clearProject", -> - beforeEach -> - @urls = [ - "www.example.com/file1" + return describe("clearProject", function() { + beforeEach(function() { + this.urls = [ + "www.example.com/file1", "www.example.com/file2" - ] - @UrlCache._findAllUrlsInProject = sinon.stub().callsArgWith(1, null, @urls) - @UrlCache._clearUrlFromCache = sinon.stub().callsArg(2) - @UrlCache.clearProject @project_id, @callback + ]; + this.UrlCache._findAllUrlsInProject = sinon.stub().callsArgWith(1, null, this.urls); + this.UrlCache._clearUrlFromCache = sinon.stub().callsArg(2); + return this.UrlCache.clearProject(this.project_id, this.callback); + }); - it "should clear the cache for each url in the project", -> - for url in @urls - @UrlCache._clearUrlFromCache - .calledWith(@project_id, url) - .should.equal true + it("should clear the cache for each url in the project", function() { + return Array.from(this.urls).map((url) => + this.UrlCache._clearUrlFromCache + .calledWith(this.project_id, url) + .should.equal(true)); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); +}); diff --git a/test/unit/coffee/UrlFetcherTests.js b/test/unit/coffee/UrlFetcherTests.js index e91720e..21258ab 100644 --- a/test/unit/coffee/UrlFetcherTests.js +++ b/test/unit/coffee/UrlFetcherTests.js @@ -1,120 +1,154 @@ -SandboxedModule = require('sandboxed-module') -sinon = require('sinon') -require('chai').should() -modulePath = require('path').join __dirname, '../../../app/js/UrlFetcher' -EventEmitter = require("events").EventEmitter +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module'); +const sinon = require('sinon'); +require('chai').should(); +const modulePath = require('path').join(__dirname, '../../../app/js/UrlFetcher'); +const { EventEmitter } = require("events"); -describe "UrlFetcher", -> - beforeEach -> - @callback = sinon.stub() - @url = "https://www.example.com/file/here?query=string" - @UrlFetcher = SandboxedModule.require modulePath, requires: - request: defaults: @defaults = sinon.stub().returns(@request = {}) - fs: @fs = {} - "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } - "settings-sharelatex": @settings = {} +describe("UrlFetcher", function() { + beforeEach(function() { + this.callback = sinon.stub(); + this.url = "https://www.example.com/file/here?query=string"; + return this.UrlFetcher = SandboxedModule.require(modulePath, { requires: { + request: { defaults: (this.defaults = sinon.stub().returns(this.request = {})) + }, + fs: (this.fs = {}), + "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub() }), + "settings-sharelatex": (this.settings = {}) + } + });}); - it "should turn off the cookie jar in request", -> - @defaults.calledWith(jar: false) - .should.equal true + it("should turn off the cookie jar in request", function() { + return this.defaults.calledWith({jar: false}) + .should.equal(true); + }); - describe "rewrite url domain if filestoreDomainOveride is set", -> - beforeEach -> - @path = "/path/to/file/on/disk" - @request.get = sinon.stub().returns(@urlStream = new EventEmitter) - @urlStream.pipe = sinon.stub() - @urlStream.pause = sinon.stub() - @urlStream.resume = sinon.stub() - @fs.createWriteStream = sinon.stub().returns(@fileStream = new EventEmitter) - @fs.unlink = (file, callback) -> callback() + describe("rewrite url domain if filestoreDomainOveride is set", function() { + beforeEach(function() { + this.path = "/path/to/file/on/disk"; + this.request.get = sinon.stub().returns(this.urlStream = new EventEmitter); + this.urlStream.pipe = sinon.stub(); + this.urlStream.pause = sinon.stub(); + this.urlStream.resume = sinon.stub(); + this.fs.createWriteStream = sinon.stub().returns(this.fileStream = new EventEmitter); + return this.fs.unlink = (file, callback) => callback(); + }); - it "should use the normal domain when override not set", (done)-> - @UrlFetcher.pipeUrlToFile @url, @path, => - @request.get.args[0][0].url.should.equal @url - done() - @res = statusCode: 200 - @urlStream.emit "response", @res - @urlStream.emit "end" - @fileStream.emit "finish" + it("should use the normal domain when override not set", function(done){ + this.UrlFetcher.pipeUrlToFile(this.url, this.path, () => { + this.request.get.args[0][0].url.should.equal(this.url); + return done(); + }); + this.res = {statusCode: 200}; + this.urlStream.emit("response", this.res); + this.urlStream.emit("end"); + return this.fileStream.emit("finish"); + }); - it "should use override domain when filestoreDomainOveride is set", (done)-> - @settings.filestoreDomainOveride = "192.11.11.11" - @UrlFetcher.pipeUrlToFile @url, @path, => - @request.get.args[0][0].url.should.equal "192.11.11.11/file/here?query=string" - done() - @res = statusCode: 200 - @urlStream.emit "response", @res - @urlStream.emit "end" - @fileStream.emit "finish" + return it("should use override domain when filestoreDomainOveride is set", function(done){ + this.settings.filestoreDomainOveride = "192.11.11.11"; + this.UrlFetcher.pipeUrlToFile(this.url, this.path, () => { + this.request.get.args[0][0].url.should.equal("192.11.11.11/file/here?query=string"); + return done(); + }); + this.res = {statusCode: 200}; + this.urlStream.emit("response", this.res); + this.urlStream.emit("end"); + return this.fileStream.emit("finish"); + }); + }); - describe "pipeUrlToFile", -> - beforeEach (done)-> - @path = "/path/to/file/on/disk" - @request.get = sinon.stub().returns(@urlStream = new EventEmitter) - @urlStream.pipe = sinon.stub() - @urlStream.pause = sinon.stub() - @urlStream.resume = sinon.stub() - @fs.createWriteStream = sinon.stub().returns(@fileStream = new EventEmitter) - @fs.unlink = (file, callback) -> callback() - done() + return describe("pipeUrlToFile", function() { + beforeEach(function(done){ + this.path = "/path/to/file/on/disk"; + this.request.get = sinon.stub().returns(this.urlStream = new EventEmitter); + this.urlStream.pipe = sinon.stub(); + this.urlStream.pause = sinon.stub(); + this.urlStream.resume = sinon.stub(); + this.fs.createWriteStream = sinon.stub().returns(this.fileStream = new EventEmitter); + this.fs.unlink = (file, callback) => callback(); + return done(); + }); - describe "successfully", -> - beforeEach (done)-> - @UrlFetcher.pipeUrlToFile @url, @path, => - @callback() - done() - @res = statusCode: 200 - @urlStream.emit "response", @res - @urlStream.emit "end" - @fileStream.emit "finish" + describe("successfully", function() { + beforeEach(function(done){ + this.UrlFetcher.pipeUrlToFile(this.url, this.path, () => { + this.callback(); + return done(); + }); + this.res = {statusCode: 200}; + this.urlStream.emit("response", this.res); + this.urlStream.emit("end"); + return this.fileStream.emit("finish"); + }); - it "should request the URL", -> - @request.get - .calledWith(sinon.match {"url": @url}) - .should.equal true + it("should request the URL", function() { + return this.request.get + .calledWith(sinon.match({"url": this.url})) + .should.equal(true); + }); - it "should open the file for writing", -> - @fs.createWriteStream - .calledWith(@path) - .should.equal true + it("should open the file for writing", function() { + return this.fs.createWriteStream + .calledWith(this.path) + .should.equal(true); + }); - it "should pipe the URL to the file", -> - @urlStream.pipe - .calledWith(@fileStream) - .should.equal true + it("should pipe the URL to the file", function() { + return this.urlStream.pipe + .calledWith(this.fileStream) + .should.equal(true); + }); - it "should call the callback", -> - @callback.called.should.equal true + return it("should call the callback", function() { + return this.callback.called.should.equal(true); + }); + }); - describe "with non success status code", -> - beforeEach (done)-> - @UrlFetcher.pipeUrlToFile @url, @path, (err)=> - @callback(err) - done() - @res = statusCode: 404 - @urlStream.emit "response", @res - @urlStream.emit "end" + describe("with non success status code", function() { + beforeEach(function(done){ + this.UrlFetcher.pipeUrlToFile(this.url, this.path, err=> { + this.callback(err); + return done(); + }); + this.res = {statusCode: 404}; + this.urlStream.emit("response", this.res); + return this.urlStream.emit("end"); + }); - it "should call the callback with an error", -> - @callback + return it("should call the callback with an error", function() { + return this.callback .calledWith(new Error("URL returned non-success status code: 404")) - .should.equal true + .should.equal(true); + }); + }); - describe "with error", -> - beforeEach (done)-> - @UrlFetcher.pipeUrlToFile @url, @path, (err)=> - @callback(err) - done() - @urlStream.emit "error", @error = new Error("something went wrong") + return describe("with error", function() { + beforeEach(function(done){ + this.UrlFetcher.pipeUrlToFile(this.url, this.path, err=> { + this.callback(err); + return done(); + }); + return this.urlStream.emit("error", (this.error = new Error("something went wrong"))); + }); - it "should call the callback with the error", -> - @callback - .calledWith(@error) - .should.equal true + it("should call the callback with the error", function() { + return this.callback + .calledWith(this.error) + .should.equal(true); + }); - it "should only call the callback once, even if end is called", -> - @urlStream.emit "end" - @callback.calledOnce.should.equal true + return it("should only call the callback once, even if end is called", function() { + this.urlStream.emit("end"); + return this.callback.calledOnce.should.equal(true); + }); + }); + }); +}); From 0cb5426548ceac3e95d74c7c5afedcfa7600dd30 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 Feb 2020 12:15:25 +0100 Subject: [PATCH 12/24] decaffeinate: Run post-processing cleanups on CompileControllerTests.coffee and 17 other files --- test/unit/coffee/CompileControllerTests.js | 6 ++++++ test/unit/coffee/CompileManagerTests.js | 9 +++++++++ test/unit/coffee/ContentTypeMapperTests.js | 7 +++++++ test/unit/coffee/DockerLockManagerTests.js | 5 +++++ test/unit/coffee/DockerRunnerTests.js | 9 ++++++++- test/unit/coffee/DraftModeManagerTests.js | 5 +++++ test/unit/coffee/LatexRunnerTests.js | 6 ++++++ test/unit/coffee/LockManagerTests.js | 6 ++++++ test/unit/coffee/OutputFileFinderTests.js | 7 +++++++ test/unit/coffee/OutputFileOptimiserTests.js | 7 +++++++ test/unit/coffee/ProjectPersistenceManagerTests.js | 7 +++++++ test/unit/coffee/RequestParserTests.js | 8 +++++++- test/unit/coffee/ResourceStateManagerTests.js | 6 ++++++ test/unit/coffee/ResourceWriterTests.js | 5 +++++ test/unit/coffee/StaticServerForbidSymlinksTests.js | 6 ++++++ test/unit/coffee/TikzManager.js | 5 +++++ test/unit/coffee/UrlCacheTests.js | 6 ++++++ test/unit/coffee/UrlFetcherTests.js | 5 +++++ 18 files changed, 113 insertions(+), 2 deletions(-) diff --git a/test/unit/coffee/CompileControllerTests.js b/test/unit/coffee/CompileControllerTests.js index 1defed7..2a06fbc 100644 --- a/test/unit/coffee/CompileControllerTests.js +++ b/test/unit/coffee/CompileControllerTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/test/unit/coffee/CompileManagerTests.js b/test/unit/coffee/CompileManagerTests.js index 5675ac1..e798aec 100644 --- a/test/unit/coffee/CompileManagerTests.js +++ b/test/unit/coffee/CompileManagerTests.js @@ -1,3 +1,12 @@ +/* eslint-disable + camelcase, + chai-friendly/no-unused-expressions, + no-path-concat, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/test/unit/coffee/ContentTypeMapperTests.js b/test/unit/coffee/ContentTypeMapperTests.js index 64a6091..bbde292 100644 --- a/test/unit/coffee/ContentTypeMapperTests.js +++ b/test/unit/coffee/ContentTypeMapperTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/test/unit/coffee/DockerLockManagerTests.js b/test/unit/coffee/DockerLockManagerTests.js index 5ef3ca2..155a246 100644 --- a/test/unit/coffee/DockerLockManagerTests.js +++ b/test/unit/coffee/DockerLockManagerTests.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/test/unit/coffee/DockerRunnerTests.js b/test/unit/coffee/DockerRunnerTests.js index 79ac5df..152b8b9 100644 --- a/test/unit/coffee/DockerRunnerTests.js +++ b/test/unit/coffee/DockerRunnerTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -247,7 +254,7 @@ describe("DockerRunner", function() { this.container.inspect = sinon.stub().callsArgWith(0); this.container.start = sinon.stub().yields(); - return this.DockerRunner.startContainer(this.options, this.volumes, this.callback, function() {}); + return this.DockerRunner.startContainer(this.options, this.volumes, this.callback, () => {}); }); it("should start the container with the given name", function() { diff --git a/test/unit/coffee/DraftModeManagerTests.js b/test/unit/coffee/DraftModeManagerTests.js index ffea050..f270873 100644 --- a/test/unit/coffee/DraftModeManagerTests.js +++ b/test/unit/coffee/DraftModeManagerTests.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/test/unit/coffee/LatexRunnerTests.js b/test/unit/coffee/LatexRunnerTests.js index 5cb4d06..7fe8bc8 100644 --- a/test/unit/coffee/LatexRunnerTests.js +++ b/test/unit/coffee/LatexRunnerTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/test/unit/coffee/LockManagerTests.js b/test/unit/coffee/LockManagerTests.js index d716a44..6d1b156 100644 --- a/test/unit/coffee/LockManagerTests.js +++ b/test/unit/coffee/LockManagerTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/test/unit/coffee/OutputFileFinderTests.js b/test/unit/coffee/OutputFileFinderTests.js index 3292d0a..5c956ad 100644 --- a/test/unit/coffee/OutputFileFinderTests.js +++ b/test/unit/coffee/OutputFileFinderTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/test/unit/coffee/OutputFileOptimiserTests.js b/test/unit/coffee/OutputFileOptimiserTests.js index 8934c71..13b8d60 100644 --- a/test/unit/coffee/OutputFileOptimiserTests.js +++ b/test/unit/coffee/OutputFileOptimiserTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, + node/no-deprecated-api, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/test/unit/coffee/ProjectPersistenceManagerTests.js b/test/unit/coffee/ProjectPersistenceManagerTests.js index c15cd80..5f77a80 100644 --- a/test/unit/coffee/ProjectPersistenceManagerTests.js +++ b/test/unit/coffee/ProjectPersistenceManagerTests.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/test/unit/coffee/RequestParserTests.js b/test/unit/coffee/RequestParserTests.js index 5ca0941..725988f 100644 --- a/test/unit/coffee/RequestParserTests.js +++ b/test/unit/coffee/RequestParserTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + handle-callback-err, + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -35,7 +41,7 @@ describe("RequestParser", function() { } });}); - afterEach(() => tk.reset()); + afterEach(function() { return tk.reset(); }); describe("without a top level object", function() { beforeEach(function() { diff --git a/test/unit/coffee/ResourceStateManagerTests.js b/test/unit/coffee/ResourceStateManagerTests.js index 4b09135..fe52cc5 100644 --- a/test/unit/coffee/ResourceStateManagerTests.js +++ b/test/unit/coffee/ResourceStateManagerTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/test/unit/coffee/ResourceWriterTests.js b/test/unit/coffee/ResourceWriterTests.js index 89433c8..8309547 100644 --- a/test/unit/coffee/ResourceWriterTests.js +++ b/test/unit/coffee/ResourceWriterTests.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/test/unit/coffee/StaticServerForbidSymlinksTests.js b/test/unit/coffee/StaticServerForbidSymlinksTests.js index 9063c1f..e754ea7 100644 --- a/test/unit/coffee/StaticServerForbidSymlinksTests.js +++ b/test/unit/coffee/StaticServerForbidSymlinksTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/test/unit/coffee/TikzManager.js b/test/unit/coffee/TikzManager.js index c792fab..f35d261 100644 --- a/test/unit/coffee/TikzManager.js +++ b/test/unit/coffee/TikzManager.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/test/unit/coffee/UrlCacheTests.js b/test/unit/coffee/UrlCacheTests.js index a3af008..7f02450 100644 --- a/test/unit/coffee/UrlCacheTests.js +++ b/test/unit/coffee/UrlCacheTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from diff --git a/test/unit/coffee/UrlFetcherTests.js b/test/unit/coffee/UrlFetcherTests.js index 21258ab..453a386 100644 --- a/test/unit/coffee/UrlFetcherTests.js +++ b/test/unit/coffee/UrlFetcherTests.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns From b515397b5af58153c944c405d376413e95da1d0d Mon Sep 17 00:00:00 2001 From: mserranom Date: Wed, 19 Feb 2020 12:15:30 +0100 Subject: [PATCH 13/24] decaffeinate: rename test/unit/coffee to test/unit/js --- test/unit/{coffee => js}/CompileControllerTests.js | 0 test/unit/{coffee => js}/CompileManagerTests.js | 0 test/unit/{coffee => js}/ContentTypeMapperTests.js | 0 test/unit/{coffee => js}/DockerLockManagerTests.js | 0 test/unit/{coffee => js}/DockerRunnerTests.js | 0 test/unit/{coffee => js}/DraftModeManagerTests.js | 0 test/unit/{coffee => js}/LatexRunnerTests.js | 0 test/unit/{coffee => js}/LockManagerTests.js | 0 test/unit/{coffee => js}/OutputFileFinderTests.js | 0 test/unit/{coffee => js}/OutputFileOptimiserTests.js | 0 test/unit/{coffee => js}/ProjectPersistenceManagerTests.js | 0 test/unit/{coffee => js}/RequestParserTests.js | 0 test/unit/{coffee => js}/ResourceStateManagerTests.js | 0 test/unit/{coffee => js}/ResourceWriterTests.js | 0 test/unit/{coffee => js}/StaticServerForbidSymlinksTests.js | 0 test/unit/{coffee => js}/TikzManager.js | 0 test/unit/{coffee => js}/UrlCacheTests.js | 0 test/unit/{coffee => js}/UrlFetcherTests.js | 0 18 files changed, 0 insertions(+), 0 deletions(-) rename test/unit/{coffee => js}/CompileControllerTests.js (100%) rename test/unit/{coffee => js}/CompileManagerTests.js (100%) rename test/unit/{coffee => js}/ContentTypeMapperTests.js (100%) rename test/unit/{coffee => js}/DockerLockManagerTests.js (100%) rename test/unit/{coffee => js}/DockerRunnerTests.js (100%) rename test/unit/{coffee => js}/DraftModeManagerTests.js (100%) rename test/unit/{coffee => js}/LatexRunnerTests.js (100%) rename test/unit/{coffee => js}/LockManagerTests.js (100%) rename test/unit/{coffee => js}/OutputFileFinderTests.js (100%) rename test/unit/{coffee => js}/OutputFileOptimiserTests.js (100%) rename test/unit/{coffee => js}/ProjectPersistenceManagerTests.js (100%) rename test/unit/{coffee => js}/RequestParserTests.js (100%) rename test/unit/{coffee => js}/ResourceStateManagerTests.js (100%) rename test/unit/{coffee => js}/ResourceWriterTests.js (100%) rename test/unit/{coffee => js}/StaticServerForbidSymlinksTests.js (100%) rename test/unit/{coffee => js}/TikzManager.js (100%) rename test/unit/{coffee => js}/UrlCacheTests.js (100%) rename test/unit/{coffee => js}/UrlFetcherTests.js (100%) diff --git a/test/unit/coffee/CompileControllerTests.js b/test/unit/js/CompileControllerTests.js similarity index 100% rename from test/unit/coffee/CompileControllerTests.js rename to test/unit/js/CompileControllerTests.js diff --git a/test/unit/coffee/CompileManagerTests.js b/test/unit/js/CompileManagerTests.js similarity index 100% rename from test/unit/coffee/CompileManagerTests.js rename to test/unit/js/CompileManagerTests.js diff --git a/test/unit/coffee/ContentTypeMapperTests.js b/test/unit/js/ContentTypeMapperTests.js similarity index 100% rename from test/unit/coffee/ContentTypeMapperTests.js rename to test/unit/js/ContentTypeMapperTests.js diff --git a/test/unit/coffee/DockerLockManagerTests.js b/test/unit/js/DockerLockManagerTests.js similarity index 100% rename from test/unit/coffee/DockerLockManagerTests.js rename to test/unit/js/DockerLockManagerTests.js diff --git a/test/unit/coffee/DockerRunnerTests.js b/test/unit/js/DockerRunnerTests.js similarity index 100% rename from test/unit/coffee/DockerRunnerTests.js rename to test/unit/js/DockerRunnerTests.js diff --git a/test/unit/coffee/DraftModeManagerTests.js b/test/unit/js/DraftModeManagerTests.js similarity index 100% rename from test/unit/coffee/DraftModeManagerTests.js rename to test/unit/js/DraftModeManagerTests.js diff --git a/test/unit/coffee/LatexRunnerTests.js b/test/unit/js/LatexRunnerTests.js similarity index 100% rename from test/unit/coffee/LatexRunnerTests.js rename to test/unit/js/LatexRunnerTests.js diff --git a/test/unit/coffee/LockManagerTests.js b/test/unit/js/LockManagerTests.js similarity index 100% rename from test/unit/coffee/LockManagerTests.js rename to test/unit/js/LockManagerTests.js diff --git a/test/unit/coffee/OutputFileFinderTests.js b/test/unit/js/OutputFileFinderTests.js similarity index 100% rename from test/unit/coffee/OutputFileFinderTests.js rename to test/unit/js/OutputFileFinderTests.js diff --git a/test/unit/coffee/OutputFileOptimiserTests.js b/test/unit/js/OutputFileOptimiserTests.js similarity index 100% rename from test/unit/coffee/OutputFileOptimiserTests.js rename to test/unit/js/OutputFileOptimiserTests.js diff --git a/test/unit/coffee/ProjectPersistenceManagerTests.js b/test/unit/js/ProjectPersistenceManagerTests.js similarity index 100% rename from test/unit/coffee/ProjectPersistenceManagerTests.js rename to test/unit/js/ProjectPersistenceManagerTests.js diff --git a/test/unit/coffee/RequestParserTests.js b/test/unit/js/RequestParserTests.js similarity index 100% rename from test/unit/coffee/RequestParserTests.js rename to test/unit/js/RequestParserTests.js diff --git a/test/unit/coffee/ResourceStateManagerTests.js b/test/unit/js/ResourceStateManagerTests.js similarity index 100% rename from test/unit/coffee/ResourceStateManagerTests.js rename to test/unit/js/ResourceStateManagerTests.js diff --git a/test/unit/coffee/ResourceWriterTests.js b/test/unit/js/ResourceWriterTests.js similarity index 100% rename from test/unit/coffee/ResourceWriterTests.js rename to test/unit/js/ResourceWriterTests.js diff --git a/test/unit/coffee/StaticServerForbidSymlinksTests.js b/test/unit/js/StaticServerForbidSymlinksTests.js similarity index 100% rename from test/unit/coffee/StaticServerForbidSymlinksTests.js rename to test/unit/js/StaticServerForbidSymlinksTests.js diff --git a/test/unit/coffee/TikzManager.js b/test/unit/js/TikzManager.js similarity index 100% rename from test/unit/coffee/TikzManager.js rename to test/unit/js/TikzManager.js diff --git a/test/unit/coffee/UrlCacheTests.js b/test/unit/js/UrlCacheTests.js similarity index 100% rename from test/unit/coffee/UrlCacheTests.js rename to test/unit/js/UrlCacheTests.js diff --git a/test/unit/coffee/UrlFetcherTests.js b/test/unit/js/UrlFetcherTests.js similarity index 100% rename from test/unit/coffee/UrlFetcherTests.js rename to test/unit/js/UrlFetcherTests.js From 7e2542319fe91ca0a92e2e1b860fe1503c8e643c Mon Sep 17 00:00:00 2001 From: mserranom Date: Wed, 19 Feb 2020 12:15:37 +0100 Subject: [PATCH 14/24] prettier: convert test/unit decaffeinated files to Prettier format --- test/unit/js/CompileControllerTests.js | 518 +++--- test/unit/js/CompileManagerTests.js | 870 +++++----- test/unit/js/ContentTypeMapperTests.js | 115 +- test/unit/js/DockerLockManagerTests.js | 393 +++-- test/unit/js/DockerRunnerTests.js | 1471 +++++++++-------- test/unit/js/DraftModeManagerTests.js | 122 +- test/unit/js/LatexRunnerTests.js | 208 ++- test/unit/js/LockManagerTests.js | 138 +- test/unit/js/OutputFileFinderTests.js | 172 +- test/unit/js/OutputFileOptimiserTests.js | 276 ++-- .../unit/js/ProjectPersistenceManagerTests.js | 145 +- test/unit/js/RequestParserTests.js | 674 ++++---- test/unit/js/ResourceStateManagerTests.js | 299 ++-- test/unit/js/ResourceWriterTests.js | 798 +++++---- .../js/StaticServerForbidSymlinksTests.js | 386 ++--- test/unit/js/TikzManager.js | 270 +-- test/unit/js/UrlCacheTests.js | 526 +++--- test/unit/js/UrlFetcherTests.js | 279 ++-- 18 files changed, 4233 insertions(+), 3427 deletions(-) diff --git a/test/unit/js/CompileControllerTests.js b/test/unit/js/CompileControllerTests.js index 2a06fbc..4480c88 100644 --- a/test/unit/js/CompileControllerTests.js +++ b/test/unit/js/CompileControllerTests.js @@ -9,267 +9,299 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../app/js/CompileController'); -const tk = require("timekeeper"); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const modulePath = require('path').join( + __dirname, + '../../../app/js/CompileController' +) +const tk = require('timekeeper') -describe("CompileController", function() { - beforeEach(function() { - this.CompileController = SandboxedModule.require(modulePath, { requires: { - "./CompileManager": (this.CompileManager = {}), - "./RequestParser": (this.RequestParser = {}), - "settings-sharelatex": (this.Settings = { - apis: { - clsi: { - url: "http://clsi.example.com" - } - } - }), - "./ProjectPersistenceManager": (this.ProjectPersistenceManager = {}), - "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub(), err:sinon.stub(), warn: sinon.stub()}) - } - }); - this.Settings.externalUrl = "http://www.example.com"; - this.req = {}; - this.res = {}; - return this.next = sinon.stub(); - }); +describe('CompileController', function() { + beforeEach(function() { + this.CompileController = SandboxedModule.require(modulePath, { + requires: { + './CompileManager': (this.CompileManager = {}), + './RequestParser': (this.RequestParser = {}), + 'settings-sharelatex': (this.Settings = { + apis: { + clsi: { + url: 'http://clsi.example.com' + } + } + }), + './ProjectPersistenceManager': (this.ProjectPersistenceManager = {}), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub(), + err: sinon.stub(), + warn: sinon.stub() + }) + } + }) + this.Settings.externalUrl = 'http://www.example.com' + this.req = {} + this.res = {} + return (this.next = sinon.stub()) + }) - describe("compile", function() { - beforeEach(function() { - this.req.body = { - compile: "mock-body" - }; - this.req.params = - {project_id: (this.project_id = "project-id-123")}; - this.request = { - compile: "mock-parsed-request" - }; - this.request_with_project_id = { - compile: this.request.compile, - project_id: this.project_id - }; - this.output_files = [{ - path: "output.pdf", - type: "pdf", - build: 1234 - }, { - path: "output.log", - type: "log", - build: 1234 - }]; - this.RequestParser.parse = sinon.stub().callsArgWith(1, null, this.request); - this.ProjectPersistenceManager.markProjectAsJustAccessed = sinon.stub().callsArg(1); - this.res.status = sinon.stub().returnsThis(); - return this.res.send = sinon.stub(); - }); + describe('compile', function() { + beforeEach(function() { + this.req.body = { + compile: 'mock-body' + } + this.req.params = { project_id: (this.project_id = 'project-id-123') } + this.request = { + compile: 'mock-parsed-request' + } + this.request_with_project_id = { + compile: this.request.compile, + project_id: this.project_id + } + this.output_files = [ + { + path: 'output.pdf', + type: 'pdf', + build: 1234 + }, + { + path: 'output.log', + type: 'log', + build: 1234 + } + ] + this.RequestParser.parse = sinon + .stub() + .callsArgWith(1, null, this.request) + this.ProjectPersistenceManager.markProjectAsJustAccessed = sinon + .stub() + .callsArg(1) + this.res.status = sinon.stub().returnsThis() + return (this.res.send = sinon.stub()) + }) - describe("successfully", function() { - beforeEach(function() { - this.CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, null, this.output_files); - return this.CompileController.compile(this.req, this.res); - }); + describe('successfully', function() { + beforeEach(function() { + this.CompileManager.doCompileWithLock = sinon + .stub() + .callsArgWith(1, null, this.output_files) + return this.CompileController.compile(this.req, this.res) + }) - it("should parse the request", function() { - return this.RequestParser.parse - .calledWith(this.req.body) - .should.equal(true); - }); + it('should parse the request', function() { + return this.RequestParser.parse + .calledWith(this.req.body) + .should.equal(true) + }) - it("should run the compile for the specified project", function() { - return this.CompileManager.doCompileWithLock - .calledWith(this.request_with_project_id) - .should.equal(true); - }); + it('should run the compile for the specified project', function() { + return this.CompileManager.doCompileWithLock + .calledWith(this.request_with_project_id) + .should.equal(true) + }) - it("should mark the project as accessed", function() { - return this.ProjectPersistenceManager.markProjectAsJustAccessed - .calledWith(this.project_id) - .should.equal(true); - }); + it('should mark the project as accessed', function() { + return this.ProjectPersistenceManager.markProjectAsJustAccessed + .calledWith(this.project_id) + .should.equal(true) + }) - return it("should return the JSON response", function() { - this.res.status.calledWith(200).should.equal(true); - return this.res.send - .calledWith({ - compile: { - status: "success", - error: null, - outputFiles: this.output_files.map(file => { - return { - url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`, - path: file.path, - type: file.type, - build: file.build - }; - }) - } - }) - .should.equal(true); - }); - }); - - describe("with an error", function() { - beforeEach(function() { - this.CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, new Error(this.message = "error message"), null); - return this.CompileController.compile(this.req, this.res); - }); - - return it("should return the JSON response with the error", function() { - this.res.status.calledWith(500).should.equal(true); - return this.res.send - .calledWith({ - compile: { - status: "error", - error: this.message, - outputFiles: [] - } - }) - .should.equal(true); - }); - }); + return it('should return the JSON response', function() { + this.res.status.calledWith(200).should.equal(true) + return this.res.send + .calledWith({ + compile: { + status: 'success', + error: null, + outputFiles: this.output_files.map(file => { + return { + url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`, + path: file.path, + type: file.type, + build: file.build + } + }) + } + }) + .should.equal(true) + }) + }) - describe("when the request times out", function() { - beforeEach(function() { - this.error = new Error(this.message = "container timed out"); - this.error.timedout = true; - this.CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, this.error, null); - return this.CompileController.compile(this.req, this.res); - }); - - return it("should return the JSON response with the timeout status", function() { - this.res.status.calledWith(200).should.equal(true); - return this.res.send - .calledWith({ - compile: { - status: "timedout", - error: this.message, - outputFiles: [] - } - }) - .should.equal(true); - }); - }); + describe('with an error', function() { + beforeEach(function() { + this.CompileManager.doCompileWithLock = sinon + .stub() + .callsArgWith(1, new Error((this.message = 'error message')), null) + return this.CompileController.compile(this.req, this.res) + }) - return describe("when the request returns no output files", function() { - beforeEach(function() { - this.CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, null, []); - return this.CompileController.compile(this.req, this.res); - }); - - return it("should return the JSON response with the failure status", function() { - this.res.status.calledWith(200).should.equal(true); - return this.res.send - .calledWith({ - compile: { - error: null, - status: "failure", - outputFiles: [] - } - }) - .should.equal(true); - }); - }); - }); + return it('should return the JSON response with the error', function() { + this.res.status.calledWith(500).should.equal(true) + return this.res.send + .calledWith({ + compile: { + status: 'error', + error: this.message, + outputFiles: [] + } + }) + .should.equal(true) + }) + }) - describe("syncFromCode", function() { - beforeEach(function() { - this.file = "main.tex"; - this.line = 42; - this.column = 5; - this.project_id = "mock-project-id"; - this.req.params = - {project_id: this.project_id}; - this.req.query = { - file: this.file, - line: this.line.toString(), - column: this.column.toString() - }; - this.res.json = sinon.stub(); + describe('when the request times out', function() { + beforeEach(function() { + this.error = new Error((this.message = 'container timed out')) + this.error.timedout = true + this.CompileManager.doCompileWithLock = sinon + .stub() + .callsArgWith(1, this.error, null) + return this.CompileController.compile(this.req, this.res) + }) - this.CompileManager.syncFromCode = sinon.stub().callsArgWith(5, null, (this.pdfPositions = ["mock-positions"])); - return this.CompileController.syncFromCode(this.req, this.res, this.next); - }); + return it('should return the JSON response with the timeout status', function() { + this.res.status.calledWith(200).should.equal(true) + return this.res.send + .calledWith({ + compile: { + status: 'timedout', + error: this.message, + outputFiles: [] + } + }) + .should.equal(true) + }) + }) - it("should find the corresponding location in the PDF", function() { - return this.CompileManager.syncFromCode - .calledWith(this.project_id, undefined, this.file, this.line, this.column) - .should.equal(true); - }); + return describe('when the request returns no output files', function() { + beforeEach(function() { + this.CompileManager.doCompileWithLock = sinon + .stub() + .callsArgWith(1, null, []) + return this.CompileController.compile(this.req, this.res) + }) - return it("should return the positions", function() { - return this.res.json - .calledWith({ - pdf: this.pdfPositions - }) - .should.equal(true); - }); - }); + return it('should return the JSON response with the failure status', function() { + this.res.status.calledWith(200).should.equal(true) + return this.res.send + .calledWith({ + compile: { + error: null, + status: 'failure', + outputFiles: [] + } + }) + .should.equal(true) + }) + }) + }) - describe("syncFromPdf", function() { - beforeEach(function() { - this.page = 5; - this.h = 100.23; - this.v = 45.67; - this.project_id = "mock-project-id"; - this.req.params = - {project_id: this.project_id}; - this.req.query = { - page: this.page.toString(), - h: this.h.toString(), - v: this.v.toString() - }; - this.res.json = sinon.stub(); + describe('syncFromCode', function() { + beforeEach(function() { + this.file = 'main.tex' + this.line = 42 + this.column = 5 + this.project_id = 'mock-project-id' + this.req.params = { project_id: this.project_id } + this.req.query = { + file: this.file, + line: this.line.toString(), + column: this.column.toString() + } + this.res.json = sinon.stub() - this.CompileManager.syncFromPdf = sinon.stub().callsArgWith(5, null, (this.codePositions = ["mock-positions"])); - return this.CompileController.syncFromPdf(this.req, this.res, this.next); - }); + this.CompileManager.syncFromCode = sinon + .stub() + .callsArgWith(5, null, (this.pdfPositions = ['mock-positions'])) + return this.CompileController.syncFromCode(this.req, this.res, this.next) + }) - it("should find the corresponding location in the code", function() { - return this.CompileManager.syncFromPdf - .calledWith(this.project_id, undefined, this.page, this.h, this.v) - .should.equal(true); - }); + it('should find the corresponding location in the PDF', function() { + return this.CompileManager.syncFromCode + .calledWith( + this.project_id, + undefined, + this.file, + this.line, + this.column + ) + .should.equal(true) + }) - return it("should return the positions", function() { - return this.res.json - .calledWith({ - code: this.codePositions - }) - .should.equal(true); - }); - }); + return it('should return the positions', function() { + return this.res.json + .calledWith({ + pdf: this.pdfPositions + }) + .should.equal(true) + }) + }) - return describe("wordcount", function() { - beforeEach(function() { - this.file = "main.tex"; - this.project_id = "mock-project-id"; - this.req.params = - {project_id: this.project_id}; - this.req.query = { - file: this.file, - image: (this.image = "example.com/image") - }; - this.res.json = sinon.stub(); + describe('syncFromPdf', function() { + beforeEach(function() { + this.page = 5 + this.h = 100.23 + this.v = 45.67 + this.project_id = 'mock-project-id' + this.req.params = { project_id: this.project_id } + this.req.query = { + page: this.page.toString(), + h: this.h.toString(), + v: this.v.toString() + } + this.res.json = sinon.stub() - this.CompileManager.wordcount = sinon.stub().callsArgWith(4, null, (this.texcount = ["mock-texcount"])); - return this.CompileController.wordcount(this.req, this.res, this.next); - }); + this.CompileManager.syncFromPdf = sinon + .stub() + .callsArgWith(5, null, (this.codePositions = ['mock-positions'])) + return this.CompileController.syncFromPdf(this.req, this.res, this.next) + }) - it("should return the word count of a file", function() { - return this.CompileManager.wordcount - .calledWith(this.project_id, undefined, this.file, this.image) - .should.equal(true); - }); + it('should find the corresponding location in the code', function() { + return this.CompileManager.syncFromPdf + .calledWith(this.project_id, undefined, this.page, this.h, this.v) + .should.equal(true) + }) - return it("should return the texcount info", function() { - return this.res.json - .calledWith({ - texcount: this.texcount - }) - .should.equal(true); - }); - }); -}); + return it('should return the positions', function() { + return this.res.json + .calledWith({ + code: this.codePositions + }) + .should.equal(true) + }) + }) + + return describe('wordcount', function() { + beforeEach(function() { + this.file = 'main.tex' + this.project_id = 'mock-project-id' + this.req.params = { project_id: this.project_id } + this.req.query = { + file: this.file, + image: (this.image = 'example.com/image') + } + this.res.json = sinon.stub() + + this.CompileManager.wordcount = sinon + .stub() + .callsArgWith(4, null, (this.texcount = ['mock-texcount'])) + return this.CompileController.wordcount(this.req, this.res, this.next) + }) + + it('should return the word count of a file', function() { + return this.CompileManager.wordcount + .calledWith(this.project_id, undefined, this.file, this.image) + .should.equal(true) + }) + + return it('should return the texcount info', function() { + return this.res.json + .calledWith({ + texcount: this.texcount + }) + .should.equal(true) + }) + }) +}) diff --git a/test/unit/js/CompileManagerTests.js b/test/unit/js/CompileManagerTests.js index e798aec..ae50bcc 100644 --- a/test/unit/js/CompileManagerTests.js +++ b/test/unit/js/CompileManagerTests.js @@ -13,423 +13,539 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../app/js/CompileManager'); -const tk = require("timekeeper"); -const { EventEmitter } = require("events"); -const Path = require("path"); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const modulePath = require('path').join( + __dirname, + '../../../app/js/CompileManager' +) +const tk = require('timekeeper') +const { EventEmitter } = require('events') +const Path = require('path') -describe("CompileManager", function() { - beforeEach(function() { - this.CompileManager = SandboxedModule.require(modulePath, { requires: { - "./LatexRunner": (this.LatexRunner = {}), - "./ResourceWriter": (this.ResourceWriter = {}), - "./OutputFileFinder": (this.OutputFileFinder = {}), - "./OutputCacheManager": (this.OutputCacheManager = {}), - "settings-sharelatex": (this.Settings = { - path: { - compilesDir: "/compiles/dir" - }, - synctexBaseDir() { return "/compile"; }, - clsi: { - docker: { - image: "SOMEIMAGE" - } - } - }), +describe('CompileManager', function() { + beforeEach(function() { + this.CompileManager = SandboxedModule.require(modulePath, { + requires: { + './LatexRunner': (this.LatexRunner = {}), + './ResourceWriter': (this.ResourceWriter = {}), + './OutputFileFinder': (this.OutputFileFinder = {}), + './OutputCacheManager': (this.OutputCacheManager = {}), + 'settings-sharelatex': (this.Settings = { + path: { + compilesDir: '/compiles/dir' + }, + synctexBaseDir() { + return '/compile' + }, + clsi: { + docker: { + image: 'SOMEIMAGE' + } + } + }), - "logger-sharelatex": (this.logger = { log: sinon.stub() , info() {}}), - "child_process": (this.child_process = {}), - "./CommandRunner": (this.CommandRunner = {}), - "./DraftModeManager": (this.DraftModeManager = {}), - "./TikzManager": (this.TikzManager = {}), - "./LockManager": (this.LockManager = {}), - "fs": (this.fs = {}), - "fs-extra": (this.fse = { ensureDir: sinon.stub().callsArg(1) }) - } - }); - this.callback = sinon.stub(); - this.project_id = "project-id-123"; - return this.user_id = "1234"; - }); - describe("doCompileWithLock", function() { - beforeEach(function() { - this.request = { - resources: (this.resources = "mock-resources"), - project_id: this.project_id, - user_id: this.user_id - }; - this.output_files = ["foo", "bar"]; - this.Settings.compileDir = "compiles"; - this.compileDir = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`; - this.CompileManager.doCompile = sinon.stub().callsArgWith(1, null, this.output_files); - return this.LockManager.runWithLock = (lockFile, runner, callback) => - runner((err, ...result) => callback(err, ...Array.from(result))) - ; - }); + 'logger-sharelatex': (this.logger = { log: sinon.stub(), info() {} }), + child_process: (this.child_process = {}), + './CommandRunner': (this.CommandRunner = {}), + './DraftModeManager': (this.DraftModeManager = {}), + './TikzManager': (this.TikzManager = {}), + './LockManager': (this.LockManager = {}), + fs: (this.fs = {}), + 'fs-extra': (this.fse = { ensureDir: sinon.stub().callsArg(1) }) + } + }) + this.callback = sinon.stub() + this.project_id = 'project-id-123' + return (this.user_id = '1234') + }) + describe('doCompileWithLock', function() { + beforeEach(function() { + this.request = { + resources: (this.resources = 'mock-resources'), + project_id: this.project_id, + user_id: this.user_id + } + this.output_files = ['foo', 'bar'] + this.Settings.compileDir = 'compiles' + this.compileDir = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}` + this.CompileManager.doCompile = sinon + .stub() + .callsArgWith(1, null, this.output_files) + return (this.LockManager.runWithLock = (lockFile, runner, callback) => + runner((err, ...result) => callback(err, ...Array.from(result)))) + }) - describe("when the project is not locked", function() { - beforeEach(function() { - return this.CompileManager.doCompileWithLock(this.request, this.callback); - }); + describe('when the project is not locked', function() { + beforeEach(function() { + return this.CompileManager.doCompileWithLock( + this.request, + this.callback + ) + }) - it("should ensure that the compile directory exists", function() { - return this.fse.ensureDir.calledWith(this.compileDir) - .should.equal(true); - }); + it('should ensure that the compile directory exists', function() { + return this.fse.ensureDir.calledWith(this.compileDir).should.equal(true) + }) - it("should call doCompile with the request", function() { - return this.CompileManager.doCompile - .calledWith(this.request) - .should.equal(true); - }); + it('should call doCompile with the request', function() { + return this.CompileManager.doCompile + .calledWith(this.request) + .should.equal(true) + }) - return it("should call the callback with the output files", function() { - return this.callback.calledWithExactly(null, this.output_files) - .should.equal(true); - }); - }); + return it('should call the callback with the output files', function() { + return this.callback + .calledWithExactly(null, this.output_files) + .should.equal(true) + }) + }) - return describe("when the project is locked", function() { - beforeEach(function() { - this.error = new Error("locked"); - this.LockManager.runWithLock = (lockFile, runner, callback) => { - return callback(this.error); - }; - return this.CompileManager.doCompileWithLock(this.request, this.callback); - }); + return describe('when the project is locked', function() { + beforeEach(function() { + this.error = new Error('locked') + this.LockManager.runWithLock = (lockFile, runner, callback) => { + return callback(this.error) + } + return this.CompileManager.doCompileWithLock( + this.request, + this.callback + ) + }) - it("should ensure that the compile directory exists", function() { - return this.fse.ensureDir.calledWith(this.compileDir) - .should.equal(true); - }); + it('should ensure that the compile directory exists', function() { + return this.fse.ensureDir.calledWith(this.compileDir).should.equal(true) + }) - it("should not call doCompile with the request", function() { - return this.CompileManager.doCompile - .called.should.equal(false); - }); + it('should not call doCompile with the request', function() { + return this.CompileManager.doCompile.called.should.equal(false) + }) - return it("should call the callback with the error", function() { - return this.callback.calledWithExactly(this.error) - .should.equal(true); - }); - }); - }); + return it('should call the callback with the error', function() { + return this.callback.calledWithExactly(this.error).should.equal(true) + }) + }) + }) - describe("doCompile", function() { - beforeEach(function() { - this.output_files = [{ - path: "output.log", - type: "log" - }, { - path: "output.pdf", - type: "pdf" - }]; - this.build_files = [{ - path: "output.log", - type: "log", - build: 1234 - }, { - path: "output.pdf", - type: "pdf", - build: 1234 - }]; - this.request = { - resources: (this.resources = "mock-resources"), - rootResourcePath: (this.rootResourcePath = "main.tex"), - project_id: this.project_id, - user_id: this.user_id, - compiler: (this.compiler = "pdflatex"), - timeout: (this.timeout = 42000), - imageName: (this.image = "example.com/image"), - flags: (this.flags = ["-file-line-error"]) - }; - this.env = {}; - this.Settings.compileDir = "compiles"; - this.compileDir = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`; - this.ResourceWriter.syncResourcesToDisk = sinon.stub().callsArgWith(2, null, this.resources); - this.LatexRunner.runLatex = sinon.stub().callsArg(2); - this.OutputFileFinder.findOutputFiles = sinon.stub().callsArgWith(2, null, this.output_files); - this.OutputCacheManager.saveOutputFiles = sinon.stub().callsArgWith(2, null, this.build_files); - this.DraftModeManager.injectDraftMode = sinon.stub().callsArg(1); - return this.TikzManager.checkMainFile = sinon.stub().callsArg(3, false); - }); + describe('doCompile', function() { + beforeEach(function() { + this.output_files = [ + { + path: 'output.log', + type: 'log' + }, + { + path: 'output.pdf', + type: 'pdf' + } + ] + this.build_files = [ + { + path: 'output.log', + type: 'log', + build: 1234 + }, + { + path: 'output.pdf', + type: 'pdf', + build: 1234 + } + ] + this.request = { + resources: (this.resources = 'mock-resources'), + rootResourcePath: (this.rootResourcePath = 'main.tex'), + project_id: this.project_id, + user_id: this.user_id, + compiler: (this.compiler = 'pdflatex'), + timeout: (this.timeout = 42000), + imageName: (this.image = 'example.com/image'), + flags: (this.flags = ['-file-line-error']) + } + this.env = {} + this.Settings.compileDir = 'compiles' + this.compileDir = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}` + this.ResourceWriter.syncResourcesToDisk = sinon + .stub() + .callsArgWith(2, null, this.resources) + this.LatexRunner.runLatex = sinon.stub().callsArg(2) + this.OutputFileFinder.findOutputFiles = sinon + .stub() + .callsArgWith(2, null, this.output_files) + this.OutputCacheManager.saveOutputFiles = sinon + .stub() + .callsArgWith(2, null, this.build_files) + this.DraftModeManager.injectDraftMode = sinon.stub().callsArg(1) + return (this.TikzManager.checkMainFile = sinon.stub().callsArg(3, false)) + }) - describe("normally", function() { - beforeEach(function() { - return this.CompileManager.doCompile(this.request, this.callback); - }); + describe('normally', function() { + beforeEach(function() { + return this.CompileManager.doCompile(this.request, this.callback) + }) - it("should write the resources to disk", function() { - return this.ResourceWriter.syncResourcesToDisk - .calledWith(this.request, this.compileDir) - .should.equal(true); - }); + it('should write the resources to disk', function() { + return this.ResourceWriter.syncResourcesToDisk + .calledWith(this.request, this.compileDir) + .should.equal(true) + }) - it("should run LaTeX", function() { - return this.LatexRunner.runLatex - .calledWith(`${this.project_id}-${this.user_id}`, { - directory: this.compileDir, - mainFile: this.rootResourcePath, - compiler: this.compiler, - timeout: this.timeout, - image: this.image, - flags: this.flags, - environment: this.env - }) - .should.equal(true); - }); + it('should run LaTeX', function() { + return this.LatexRunner.runLatex + .calledWith(`${this.project_id}-${this.user_id}`, { + directory: this.compileDir, + mainFile: this.rootResourcePath, + compiler: this.compiler, + timeout: this.timeout, + image: this.image, + flags: this.flags, + environment: this.env + }) + .should.equal(true) + }) - it("should find the output files", function() { - return this.OutputFileFinder.findOutputFiles - .calledWith(this.resources, this.compileDir) - .should.equal(true); - }); + it('should find the output files', function() { + return this.OutputFileFinder.findOutputFiles + .calledWith(this.resources, this.compileDir) + .should.equal(true) + }) - it("should return the output files", function() { - return this.callback.calledWith(null, this.build_files).should.equal(true); - }); + it('should return the output files', function() { + return this.callback + .calledWith(null, this.build_files) + .should.equal(true) + }) - return it("should not inject draft mode by default", function() { - return this.DraftModeManager.injectDraftMode.called.should.equal(false); - }); - }); + return it('should not inject draft mode by default', function() { + return this.DraftModeManager.injectDraftMode.called.should.equal(false) + }) + }) - describe("with draft mode", function() { - beforeEach(function() { - this.request.draft = true; - return this.CompileManager.doCompile(this.request, this.callback); - }); + describe('with draft mode', function() { + beforeEach(function() { + this.request.draft = true + return this.CompileManager.doCompile(this.request, this.callback) + }) - return it("should inject the draft mode header", function() { - return this.DraftModeManager.injectDraftMode - .calledWith(this.compileDir + "/" + this.rootResourcePath) - .should.equal(true); - }); - }); + return it('should inject the draft mode header', function() { + return this.DraftModeManager.injectDraftMode + .calledWith(this.compileDir + '/' + this.rootResourcePath) + .should.equal(true) + }) + }) - describe("with a check option", function() { - beforeEach(function() { - this.request.check = "error"; - return this.CompileManager.doCompile(this.request, this.callback); - }); + describe('with a check option', function() { + beforeEach(function() { + this.request.check = 'error' + return this.CompileManager.doCompile(this.request, this.callback) + }) - return it("should run chktex", function() { - return this.LatexRunner.runLatex - .calledWith(`${this.project_id}-${this.user_id}`, { - directory: this.compileDir, - mainFile: this.rootResourcePath, - compiler: this.compiler, - timeout: this.timeout, - image: this.image, - flags: this.flags, - environment: {'CHKTEX_OPTIONS': '-nall -e9 -e10 -w15 -w16', 'CHKTEX_EXIT_ON_ERROR':1, 'CHKTEX_ULIMIT_OPTIONS': '-t 5 -v 64000'} - }) - .should.equal(true); - }); - }); + return it('should run chktex', function() { + return this.LatexRunner.runLatex + .calledWith(`${this.project_id}-${this.user_id}`, { + directory: this.compileDir, + mainFile: this.rootResourcePath, + compiler: this.compiler, + timeout: this.timeout, + image: this.image, + flags: this.flags, + environment: { + CHKTEX_OPTIONS: '-nall -e9 -e10 -w15 -w16', + CHKTEX_EXIT_ON_ERROR: 1, + CHKTEX_ULIMIT_OPTIONS: '-t 5 -v 64000' + } + }) + .should.equal(true) + }) + }) - return describe("with a knitr file and check options", function() { - beforeEach(function() { - this.request.rootResourcePath = "main.Rtex"; - this.request.check = "error"; - return this.CompileManager.doCompile(this.request, this.callback); - }); + return describe('with a knitr file and check options', function() { + beforeEach(function() { + this.request.rootResourcePath = 'main.Rtex' + this.request.check = 'error' + return this.CompileManager.doCompile(this.request, this.callback) + }) - return it("should not run chktex", function() { - return this.LatexRunner.runLatex - .calledWith(`${this.project_id}-${this.user_id}`, { - directory: this.compileDir, - mainFile: "main.Rtex", - compiler: this.compiler, - timeout: this.timeout, - image: this.image, - flags: this.flags, - environment: this.env - }) - .should.equal(true); - }); - }); - }); + return it('should not run chktex', function() { + return this.LatexRunner.runLatex + .calledWith(`${this.project_id}-${this.user_id}`, { + directory: this.compileDir, + mainFile: 'main.Rtex', + compiler: this.compiler, + timeout: this.timeout, + image: this.image, + flags: this.flags, + environment: this.env + }) + .should.equal(true) + }) + }) + }) - describe("clearProject", function() { - describe("succesfully", function() { - beforeEach(function() { - this.Settings.compileDir = "compiles"; - this.fs.lstat = sinon.stub().callsArgWith(1, null,{isDirectory(){ return true; }}); - this.proc = new EventEmitter(); - this.proc.stdout = new EventEmitter(); - this.proc.stderr = new EventEmitter(); - this.child_process.spawn = sinon.stub().returns(this.proc); - this.CompileManager.clearProject(this.project_id, this.user_id, this.callback); - return this.proc.emit("close", 0); - }); + describe('clearProject', function() { + describe('succesfully', function() { + beforeEach(function() { + this.Settings.compileDir = 'compiles' + this.fs.lstat = sinon.stub().callsArgWith(1, null, { + isDirectory() { + return true + } + }) + this.proc = new EventEmitter() + this.proc.stdout = new EventEmitter() + this.proc.stderr = new EventEmitter() + this.child_process.spawn = sinon.stub().returns(this.proc) + this.CompileManager.clearProject( + this.project_id, + this.user_id, + this.callback + ) + return this.proc.emit('close', 0) + }) - it("should remove the project directory", function() { - return this.child_process.spawn - .calledWith("rm", ["-r", `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`]) - .should.equal(true); - }); + it('should remove the project directory', function() { + return this.child_process.spawn + .calledWith('rm', [ + '-r', + `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}` + ]) + .should.equal(true) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) - return describe("with a non-success status code", function() { - beforeEach(function() { - this.Settings.compileDir = "compiles"; - this.fs.lstat = sinon.stub().callsArgWith(1, null,{isDirectory(){ return true; }}); - this.proc = new EventEmitter(); - this.proc.stdout = new EventEmitter(); - this.proc.stderr = new EventEmitter(); - this.child_process.spawn = sinon.stub().returns(this.proc); - this.CompileManager.clearProject(this.project_id, this.user_id, this.callback); - this.proc.stderr.emit("data", (this.error = "oops")); - return this.proc.emit("close", 1); - }); + return describe('with a non-success status code', function() { + beforeEach(function() { + this.Settings.compileDir = 'compiles' + this.fs.lstat = sinon.stub().callsArgWith(1, null, { + isDirectory() { + return true + } + }) + this.proc = new EventEmitter() + this.proc.stdout = new EventEmitter() + this.proc.stderr = new EventEmitter() + this.child_process.spawn = sinon.stub().returns(this.proc) + this.CompileManager.clearProject( + this.project_id, + this.user_id, + this.callback + ) + this.proc.stderr.emit('data', (this.error = 'oops')) + return this.proc.emit('close', 1) + }) - it("should remove the project directory", function() { - return this.child_process.spawn - .calledWith("rm", ["-r", `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`]) - .should.equal(true); - }); + it('should remove the project directory', function() { + return this.child_process.spawn + .calledWith('rm', [ + '-r', + `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}` + ]) + .should.equal(true) + }) - return it("should call the callback with an error from the stderr", function() { - this.callback - .calledWith(new Error()) - .should.equal(true); + return it('should call the callback with an error from the stderr', function() { + this.callback.calledWith(new Error()).should.equal(true) - return this.callback.args[0][0].message.should.equal(`rm -r ${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id} failed: ${this.error}`); - }); - }); - }); + return this.callback.args[0][0].message.should.equal( + `rm -r ${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id} failed: ${this.error}` + ) + }) + }) + }) - describe("syncing", function() { - beforeEach(function() { - this.page = 1; - this.h = 42.23; - this.v = 87.56; - this.width = 100.01; - this.height = 234.56; - this.line = 5; - this.column = 3; - this.file_name = "main.tex"; - this.child_process.execFile = sinon.stub(); - return this.Settings.path.synctexBaseDir = project_id => `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`; - }); + describe('syncing', function() { + beforeEach(function() { + this.page = 1 + this.h = 42.23 + this.v = 87.56 + this.width = 100.01 + this.height = 234.56 + this.line = 5 + this.column = 3 + this.file_name = 'main.tex' + this.child_process.execFile = sinon.stub() + return (this.Settings.path.synctexBaseDir = project_id => + `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`) + }) - describe("syncFromCode", function() { - beforeEach(function() { - this.fs.stat = sinon.stub().callsArgWith(1, null,{isFile(){ return true; }}); - this.stdout = `NODE\t${this.page}\t${this.h}\t${this.v}\t${this.width}\t${this.height}\n`; - this.CommandRunner.run = sinon.stub().callsArgWith(6, null, {stdout:this.stdout}); - return this.CompileManager.syncFromCode(this.project_id, this.user_id, this.file_name, this.line, this.column, this.callback); - }); + describe('syncFromCode', function() { + beforeEach(function() { + this.fs.stat = sinon.stub().callsArgWith(1, null, { + isFile() { + return true + } + }) + this.stdout = `NODE\t${this.page}\t${this.h}\t${this.v}\t${this.width}\t${this.height}\n` + this.CommandRunner.run = sinon + .stub() + .callsArgWith(6, null, { stdout: this.stdout }) + return this.CompileManager.syncFromCode( + this.project_id, + this.user_id, + this.file_name, + this.line, + this.column, + this.callback + ) + }) - it("should execute the synctex binary", function() { - const bin_path = Path.resolve(__dirname + "/../../../bin/synctex"); - const synctex_path = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/output.pdf`; - const file_path = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/${this.file_name}`; - return this.CommandRunner.run - .calledWith( - `${this.project_id}-${this.user_id}`, - ['/opt/synctex', 'code', synctex_path, file_path, this.line, this.column], - `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`, - this.Settings.clsi.docker.image, - 60000, - {} - ).should.equal(true); - }); + it('should execute the synctex binary', function() { + const bin_path = Path.resolve(__dirname + '/../../../bin/synctex') + const synctex_path = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/output.pdf` + const file_path = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/${this.file_name}` + return this.CommandRunner.run + .calledWith( + `${this.project_id}-${this.user_id}`, + [ + '/opt/synctex', + 'code', + synctex_path, + file_path, + this.line, + this.column + ], + `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`, + this.Settings.clsi.docker.image, + 60000, + {} + ) + .should.equal(true) + }) - return it("should call the callback with the parsed output", function() { - return this.callback - .calledWith(null, [{ - page: this.page, - h: this.h, - v: this.v, - height: this.height, - width: this.width - }]) - .should.equal(true); - }); - }); + return it('should call the callback with the parsed output', function() { + return this.callback + .calledWith(null, [ + { + page: this.page, + h: this.h, + v: this.v, + height: this.height, + width: this.width + } + ]) + .should.equal(true) + }) + }) - return describe("syncFromPdf", function() { - beforeEach(function() { - this.fs.stat = sinon.stub().callsArgWith(1, null,{isFile(){ return true; }}); - this.stdout = `NODE\t${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/${this.file_name}\t${this.line}\t${this.column}\n`; - this.CommandRunner.run = sinon.stub().callsArgWith(6, null, {stdout:this.stdout}); - return this.CompileManager.syncFromPdf(this.project_id, this.user_id, this.page, this.h, this.v, this.callback); - }); + return describe('syncFromPdf', function() { + beforeEach(function() { + this.fs.stat = sinon.stub().callsArgWith(1, null, { + isFile() { + return true + } + }) + this.stdout = `NODE\t${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/${this.file_name}\t${this.line}\t${this.column}\n` + this.CommandRunner.run = sinon + .stub() + .callsArgWith(6, null, { stdout: this.stdout }) + return this.CompileManager.syncFromPdf( + this.project_id, + this.user_id, + this.page, + this.h, + this.v, + this.callback + ) + }) - it("should execute the synctex binary", function() { - const bin_path = Path.resolve(__dirname + "/../../../bin/synctex"); - const synctex_path = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/output.pdf`; - return this.CommandRunner.run - .calledWith( - `${this.project_id}-${this.user_id}`, - ['/opt/synctex', "pdf", synctex_path, this.page, this.h, this.v], - `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`, - this.Settings.clsi.docker.image, - 60000, - {}).should.equal(true); - }); + it('should execute the synctex binary', function() { + const bin_path = Path.resolve(__dirname + '/../../../bin/synctex') + const synctex_path = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/output.pdf` + return this.CommandRunner.run + .calledWith( + `${this.project_id}-${this.user_id}`, + ['/opt/synctex', 'pdf', synctex_path, this.page, this.h, this.v], + `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`, + this.Settings.clsi.docker.image, + 60000, + {} + ) + .should.equal(true) + }) - return it("should call the callback with the parsed output", function() { - return this.callback - .calledWith(null, [{ - file: this.file_name, - line: this.line, - column: this.column - }]) - .should.equal(true); - }); - }); - }); + return it('should call the callback with the parsed output', function() { + return this.callback + .calledWith(null, [ + { + file: this.file_name, + line: this.line, + column: this.column + } + ]) + .should.equal(true) + }) + }) + }) - return describe("wordcount", function() { - beforeEach(function() { - this.CommandRunner.run = sinon.stub().callsArg(6); - this.fs.readFile = sinon.stub().callsArgWith(2, null, (this.stdout = "Encoding: ascii\nWords in text: 2")); - this.callback = sinon.stub(); + return describe('wordcount', function() { + beforeEach(function() { + this.CommandRunner.run = sinon.stub().callsArg(6) + this.fs.readFile = sinon + .stub() + .callsArgWith( + 2, + null, + (this.stdout = 'Encoding: ascii\nWords in text: 2') + ) + this.callback = sinon.stub() - this.project_id; - this.timeout = 60 * 1000; - this.file_name = "main.tex"; - this.Settings.path.compilesDir = "/local/compile/directory"; - this.image = "example.com/image"; + this.project_id + this.timeout = 60 * 1000 + this.file_name = 'main.tex' + this.Settings.path.compilesDir = '/local/compile/directory' + this.image = 'example.com/image' - return this.CompileManager.wordcount(this.project_id, this.user_id, this.file_name, this.image, this.callback); - }); + return this.CompileManager.wordcount( + this.project_id, + this.user_id, + this.file_name, + this.image, + this.callback + ) + }) - it("should run the texcount command", function() { - this.directory = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`; - this.file_path = `$COMPILE_DIR/${this.file_name}`; - this.command =[ "texcount", "-nocol", "-inc", this.file_path, `-out=${this.file_path}.wc`]; + it('should run the texcount command', function() { + this.directory = `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}` + this.file_path = `$COMPILE_DIR/${this.file_name}` + this.command = [ + 'texcount', + '-nocol', + '-inc', + this.file_path, + `-out=${this.file_path}.wc` + ] - return this.CommandRunner.run - .calledWith(`${this.project_id}-${this.user_id}`, this.command, this.directory, this.image, this.timeout, {}) - .should.equal(true); - }); + return this.CommandRunner.run + .calledWith( + `${this.project_id}-${this.user_id}`, + this.command, + this.directory, + this.image, + this.timeout, + {} + ) + .should.equal(true) + }) - return it("should call the callback with the parsed output", function() { - return this.callback - .calledWith(null, { - encode: "ascii", - textWords: 2, - headWords: 0, - outside: 0, - headers: 0, - elements: 0, - mathInline: 0, - mathDisplay: 0, - errors: 0, - messages: "" - }) - .should.equal(true); - }); - }); -}); + return it('should call the callback with the parsed output', function() { + return this.callback + .calledWith(null, { + encode: 'ascii', + textWords: 2, + headWords: 0, + outside: 0, + headers: 0, + elements: 0, + mathInline: 0, + mathDisplay: 0, + errors: 0, + messages: '' + }) + .should.equal(true) + }) + }) +}) diff --git a/test/unit/js/ContentTypeMapperTests.js b/test/unit/js/ContentTypeMapperTests.js index bbde292..41fc37e 100644 --- a/test/unit/js/ContentTypeMapperTests.js +++ b/test/unit/js/ContentTypeMapperTests.js @@ -10,73 +10,72 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../app/js/ContentTypeMapper'); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const modulePath = require('path').join( + __dirname, + '../../../app/js/ContentTypeMapper' +) describe('ContentTypeMapper', function() { + beforeEach(function() { + return (this.ContentTypeMapper = SandboxedModule.require(modulePath)) + }) - beforeEach(function() { - return this.ContentTypeMapper = SandboxedModule.require(modulePath); - }); + return describe('map', function() { + it('should map .txt to text/plain', function() { + const content_type = this.ContentTypeMapper.map('example.txt') + return content_type.should.equal('text/plain') + }) - return describe('map', function() { + it('should map .csv to text/csv', function() { + const content_type = this.ContentTypeMapper.map('example.csv') + return content_type.should.equal('text/csv') + }) - it('should map .txt to text/plain', function() { - const content_type = this.ContentTypeMapper.map('example.txt'); - return content_type.should.equal('text/plain'); - }); + it('should map .pdf to application/pdf', function() { + const content_type = this.ContentTypeMapper.map('example.pdf') + return content_type.should.equal('application/pdf') + }) - it('should map .csv to text/csv', function() { - const content_type = this.ContentTypeMapper.map('example.csv'); - return content_type.should.equal('text/csv'); - }); + it('should fall back to octet-stream', function() { + const content_type = this.ContentTypeMapper.map('example.unknown') + return content_type.should.equal('application/octet-stream') + }) - it('should map .pdf to application/pdf', function() { - const content_type = this.ContentTypeMapper.map('example.pdf'); - return content_type.should.equal('application/pdf'); - }); + describe('coercing web files to plain text', function() { + it('should map .js to plain text', function() { + const content_type = this.ContentTypeMapper.map('example.js') + return content_type.should.equal('text/plain') + }) - it('should fall back to octet-stream', function() { - const content_type = this.ContentTypeMapper.map('example.unknown'); - return content_type.should.equal('application/octet-stream'); - }); + it('should map .html to plain text', function() { + const content_type = this.ContentTypeMapper.map('example.html') + return content_type.should.equal('text/plain') + }) - describe('coercing web files to plain text', function() { + return it('should map .css to plain text', function() { + const content_type = this.ContentTypeMapper.map('example.css') + return content_type.should.equal('text/plain') + }) + }) - it('should map .js to plain text', function() { - const content_type = this.ContentTypeMapper.map('example.js'); - return content_type.should.equal('text/plain'); - }); + return describe('image files', function() { + it('should map .png to image/png', function() { + const content_type = this.ContentTypeMapper.map('example.png') + return content_type.should.equal('image/png') + }) - it('should map .html to plain text', function() { - const content_type = this.ContentTypeMapper.map('example.html'); - return content_type.should.equal('text/plain'); - }); + it('should map .jpeg to image/jpeg', function() { + const content_type = this.ContentTypeMapper.map('example.jpeg') + return content_type.should.equal('image/jpeg') + }) - return it('should map .css to plain text', function() { - const content_type = this.ContentTypeMapper.map('example.css'); - return content_type.should.equal('text/plain'); - }); - }); - - return describe('image files', function() { - - it('should map .png to image/png', function() { - const content_type = this.ContentTypeMapper.map('example.png'); - return content_type.should.equal('image/png'); - }); - - it('should map .jpeg to image/jpeg', function() { - const content_type = this.ContentTypeMapper.map('example.jpeg'); - return content_type.should.equal('image/jpeg'); - }); - - return it('should map .svg to text/plain to protect against XSS (SVG can execute JS)', function() { - const content_type = this.ContentTypeMapper.map('example.svg'); - return content_type.should.equal('text/plain'); - }); - }); - }); -}); + return it('should map .svg to text/plain to protect against XSS (SVG can execute JS)', function() { + const content_type = this.ContentTypeMapper.map('example.svg') + return content_type.should.equal('text/plain') + }) + }) + }) +}) diff --git a/test/unit/js/DockerLockManagerTests.js b/test/unit/js/DockerLockManagerTests.js index 155a246..9dcf9dc 100644 --- a/test/unit/js/DockerLockManagerTests.js +++ b/test/unit/js/DockerLockManagerTests.js @@ -9,185 +9,244 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -require("coffee-script"); -const modulePath = require('path').join(__dirname, '../../../app/coffee/DockerLockManager'); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +require('coffee-script') +const modulePath = require('path').join( + __dirname, + '../../../app/coffee/DockerLockManager' +) -describe("LockManager", function() { - beforeEach(function() { - return this.LockManager = SandboxedModule.require(modulePath, { requires: { - "settings-sharelatex": (this.Settings = - {clsi: {docker: {}}}), - "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub() }) - } - });}); +describe('LockManager', function() { + beforeEach(function() { + return (this.LockManager = SandboxedModule.require(modulePath, { + requires: { + 'settings-sharelatex': (this.Settings = { clsi: { docker: {} } }), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub() + }) + } + })) + }) - return describe("runWithLock", function() { - describe("with a single lock", function() { - beforeEach(function(done) { - this.callback = sinon.stub(); - return this.LockManager.runWithLock("lock-one", releaseLock => - setTimeout(() => releaseLock(null, "hello", "world") - , 100) - - , (err, ...args) => { - this.callback(err,...Array.from(args)); - return done(); - }); - }); + return describe('runWithLock', function() { + describe('with a single lock', function() { + beforeEach(function(done) { + this.callback = sinon.stub() + return this.LockManager.runWithLock( + 'lock-one', + releaseLock => + setTimeout(() => releaseLock(null, 'hello', 'world'), 100), - return it("should call the callback", function() { - return this.callback.calledWith(null,"hello","world").should.equal(true); - }); - }); + (err, ...args) => { + this.callback(err, ...Array.from(args)) + return done() + } + ) + }) - describe("with two locks", function() { - beforeEach(function(done) { - this.callback1 = sinon.stub(); - this.callback2 = sinon.stub(); - this.LockManager.runWithLock("lock-one", releaseLock => - setTimeout(() => releaseLock(null, "hello", "world","one") - , 100) - - , (err, ...args) => { - return this.callback1(err,...Array.from(args)); - }); - return this.LockManager.runWithLock("lock-two", releaseLock => - setTimeout(() => releaseLock(null, "hello", "world","two") - , 200) - - , (err, ...args) => { - this.callback2(err,...Array.from(args)); - return done(); - }); - }); + return it('should call the callback', function() { + return this.callback + .calledWith(null, 'hello', 'world') + .should.equal(true) + }) + }) - it("should call the first callback", function() { - return this.callback1.calledWith(null,"hello","world","one").should.equal(true); - }); + describe('with two locks', function() { + beforeEach(function(done) { + this.callback1 = sinon.stub() + this.callback2 = sinon.stub() + this.LockManager.runWithLock( + 'lock-one', + releaseLock => + setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100), - return it("should call the second callback", function() { - return this.callback2.calledWith(null,"hello","world","two").should.equal(true); - }); - }); + (err, ...args) => { + return this.callback1(err, ...Array.from(args)) + } + ) + return this.LockManager.runWithLock( + 'lock-two', + releaseLock => + setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200), - return describe("with lock contention", function() { - describe("where the first lock is released quickly", function() { - beforeEach(function(done) { - this.LockManager.MAX_LOCK_WAIT_TIME = 1000; - this.LockManager.LOCK_TEST_INTERVAL = 100; - this.callback1 = sinon.stub(); - this.callback2 = sinon.stub(); - this.LockManager.runWithLock("lock", releaseLock => - setTimeout(() => releaseLock(null, "hello", "world","one") - , 100) - - , (err, ...args) => { - return this.callback1(err,...Array.from(args)); - }); - return this.LockManager.runWithLock("lock", releaseLock => - setTimeout(() => releaseLock(null, "hello", "world","two") - , 200) - - , (err, ...args) => { - this.callback2(err,...Array.from(args)); - return done(); - }); - }); + (err, ...args) => { + this.callback2(err, ...Array.from(args)) + return done() + } + ) + }) - it("should call the first callback", function() { - return this.callback1.calledWith(null,"hello","world","one").should.equal(true); - }); + it('should call the first callback', function() { + return this.callback1 + .calledWith(null, 'hello', 'world', 'one') + .should.equal(true) + }) - return it("should call the second callback", function() { - return this.callback2.calledWith(null,"hello","world","two").should.equal(true); - }); - }); + return it('should call the second callback', function() { + return this.callback2 + .calledWith(null, 'hello', 'world', 'two') + .should.equal(true) + }) + }) - describe("where the first lock is held longer than the waiting time", function() { - beforeEach(function(done) { - let doneTwo; - this.LockManager.MAX_LOCK_HOLD_TIME = 10000; - this.LockManager.MAX_LOCK_WAIT_TIME = 1000; - this.LockManager.LOCK_TEST_INTERVAL = 100; - this.callback1 = sinon.stub(); - this.callback2 = sinon.stub(); - let doneOne = (doneTwo = false); - const finish = function(key) { - if (key === 1) { doneOne = true; } - if (key === 2) { doneTwo = true; } - if (doneOne && doneTwo) { return done(); } - }; - this.LockManager.runWithLock("lock", releaseLock => - setTimeout(() => releaseLock(null, "hello", "world","one") - , 1100) - - , (err, ...args) => { - this.callback1(err,...Array.from(args)); - return finish(1); - }); - return this.LockManager.runWithLock("lock", releaseLock => - setTimeout(() => releaseLock(null, "hello", "world","two") - , 100) - - , (err, ...args) => { - this.callback2(err,...Array.from(args)); - return finish(2); - }); - }); + return describe('with lock contention', function() { + describe('where the first lock is released quickly', function() { + beforeEach(function(done) { + this.LockManager.MAX_LOCK_WAIT_TIME = 1000 + this.LockManager.LOCK_TEST_INTERVAL = 100 + this.callback1 = sinon.stub() + this.callback2 = sinon.stub() + this.LockManager.runWithLock( + 'lock', + releaseLock => + setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100), - it("should call the first callback", function() { - return this.callback1.calledWith(null,"hello","world","one").should.equal(true); - }); + (err, ...args) => { + return this.callback1(err, ...Array.from(args)) + } + ) + return this.LockManager.runWithLock( + 'lock', + releaseLock => + setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200), - return it("should call the second callback with an error", function() { - const error = sinon.match.instanceOf(Error); - return this.callback2.calledWith(error).should.equal(true); - }); - }); + (err, ...args) => { + this.callback2(err, ...Array.from(args)) + return done() + } + ) + }) - return describe("where the first lock is held longer than the max holding time", function() { - beforeEach(function(done) { - let doneTwo; - this.LockManager.MAX_LOCK_HOLD_TIME = 1000; - this.LockManager.MAX_LOCK_WAIT_TIME = 2000; - this.LockManager.LOCK_TEST_INTERVAL = 100; - this.callback1 = sinon.stub(); - this.callback2 = sinon.stub(); - let doneOne = (doneTwo = false); - const finish = function(key) { - if (key === 1) { doneOne = true; } - if (key === 2) { doneTwo = true; } - if (doneOne && doneTwo) { return done(); } - }; - this.LockManager.runWithLock("lock", releaseLock => - setTimeout(() => releaseLock(null, "hello", "world","one") - , 1500) - - , (err, ...args) => { - this.callback1(err,...Array.from(args)); - return finish(1); - }); - return this.LockManager.runWithLock("lock", releaseLock => - setTimeout(() => releaseLock(null, "hello", "world","two") - , 100) - - , (err, ...args) => { - this.callback2(err,...Array.from(args)); - return finish(2); - }); - }); + it('should call the first callback', function() { + return this.callback1 + .calledWith(null, 'hello', 'world', 'one') + .should.equal(true) + }) - it("should call the first callback", function() { - return this.callback1.calledWith(null,"hello","world","one").should.equal(true); - }); + return it('should call the second callback', function() { + return this.callback2 + .calledWith(null, 'hello', 'world', 'two') + .should.equal(true) + }) + }) - return it("should call the second callback", function() { - return this.callback2.calledWith(null,"hello","world","two").should.equal(true); - }); - }); - }); - }); -}); + describe('where the first lock is held longer than the waiting time', function() { + beforeEach(function(done) { + let doneTwo + this.LockManager.MAX_LOCK_HOLD_TIME = 10000 + this.LockManager.MAX_LOCK_WAIT_TIME = 1000 + this.LockManager.LOCK_TEST_INTERVAL = 100 + this.callback1 = sinon.stub() + this.callback2 = sinon.stub() + let doneOne = (doneTwo = false) + const finish = function(key) { + if (key === 1) { + doneOne = true + } + if (key === 2) { + doneTwo = true + } + if (doneOne && doneTwo) { + return done() + } + } + this.LockManager.runWithLock( + 'lock', + releaseLock => + setTimeout( + () => releaseLock(null, 'hello', 'world', 'one'), + 1100 + ), + + (err, ...args) => { + this.callback1(err, ...Array.from(args)) + return finish(1) + } + ) + return this.LockManager.runWithLock( + 'lock', + releaseLock => + setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100), + + (err, ...args) => { + this.callback2(err, ...Array.from(args)) + return finish(2) + } + ) + }) + + it('should call the first callback', function() { + return this.callback1 + .calledWith(null, 'hello', 'world', 'one') + .should.equal(true) + }) + + return it('should call the second callback with an error', function() { + const error = sinon.match.instanceOf(Error) + return this.callback2.calledWith(error).should.equal(true) + }) + }) + + return describe('where the first lock is held longer than the max holding time', function() { + beforeEach(function(done) { + let doneTwo + this.LockManager.MAX_LOCK_HOLD_TIME = 1000 + this.LockManager.MAX_LOCK_WAIT_TIME = 2000 + this.LockManager.LOCK_TEST_INTERVAL = 100 + this.callback1 = sinon.stub() + this.callback2 = sinon.stub() + let doneOne = (doneTwo = false) + const finish = function(key) { + if (key === 1) { + doneOne = true + } + if (key === 2) { + doneTwo = true + } + if (doneOne && doneTwo) { + return done() + } + } + this.LockManager.runWithLock( + 'lock', + releaseLock => + setTimeout( + () => releaseLock(null, 'hello', 'world', 'one'), + 1500 + ), + + (err, ...args) => { + this.callback1(err, ...Array.from(args)) + return finish(1) + } + ) + return this.LockManager.runWithLock( + 'lock', + releaseLock => + setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100), + + (err, ...args) => { + this.callback2(err, ...Array.from(args)) + return finish(2) + } + ) + }) + + it('should call the first callback', function() { + return this.callback1 + .calledWith(null, 'hello', 'world', 'one') + .should.equal(true) + }) + + return it('should call the second callback', function() { + return this.callback2 + .calledWith(null, 'hello', 'world', 'two') + .should.equal(true) + }) + }) + }) + }) +}) diff --git a/test/unit/js/DockerRunnerTests.js b/test/unit/js/DockerRunnerTests.js index 152b8b9..e43a044 100644 --- a/test/unit/js/DockerRunnerTests.js +++ b/test/unit/js/DockerRunnerTests.js @@ -13,651 +13,826 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const { expect } = require('chai'); -require("coffee-script"); -const modulePath = require('path').join(__dirname, '../../../app/coffee/DockerRunner'); -const Path = require("path"); - -describe("DockerRunner", function() { - beforeEach(function() { - let container, Docker, Timer; - this.container = (container = {}); - this.DockerRunner = SandboxedModule.require(modulePath, { requires: { - "settings-sharelatex": (this.Settings = { - clsi: { docker: {} - }, - path: {} - }), - "logger-sharelatex": (this.logger = { - log: sinon.stub(), - error: sinon.stub(), - info: sinon.stub(), - warn: sinon.stub() - }), - "dockerode": (Docker = (function() { - Docker = class Docker { - static initClass() { - this.prototype.getContainer = sinon.stub().returns(container); - this.prototype.createContainer = sinon.stub().yields(null, container); - this.prototype.listContainers = sinon.stub(); - } - }; - Docker.initClass(); - return Docker; - })()), - "fs": (this.fs = { stat: sinon.stub().yields(null,{isDirectory(){ return true; }}) }), - "./Metrics": { - Timer: (Timer = class Timer { - done() {} - }) - }, - "./LockManager": { - runWithLock(key, runner, callback) { return runner(callback); } - } - } - } - ); - this.Docker = Docker; - this.getContainer = Docker.prototype.getContainer; - this.createContainer = Docker.prototype.createContainer; - this.listContainers = Docker.prototype.listContainers; - - this.directory = "/local/compile/directory"; - this.mainFile = "main-file.tex"; - this.compiler = "pdflatex"; - this.image = "example.com/sharelatex/image:2016.2"; - this.env = {}; - this.callback = sinon.stub(); - this.project_id = "project-id-123"; - this.volumes = - {"/local/compile/directory": "/compile"}; - this.Settings.clsi.docker.image = (this.defaultImage = "default-image"); - return this.Settings.clsi.docker.env = {PATH: "mock-path"}; - }); - - describe("run", function() { - beforeEach(function(done){ - this.DockerRunner._getContainerOptions = sinon.stub().returns(this.options = {mockoptions: "foo"}); - this.DockerRunner._fingerprintContainer = sinon.stub().returns(this.fingerprint = "fingerprint"); - - this.name = `project-${this.project_id}-${this.fingerprint}`; - - this.command = ["mock", "command", "--outdir=$COMPILE_DIR"]; - this.command_with_dir = ["mock", "command", "--outdir=/compile"]; - this.timeout = 42000; - return done(); - }); - - describe("successfully", function() { - beforeEach(function(done){ - this.DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, (this.output = "mock-output")); - return this.DockerRunner.run(this.project_id, this.command, this.directory, this.image, this.timeout, this.env, (err, output)=> { - this.callback(err, output); - return done(); - }); - }); - - it("should generate the options for the container", function() { - return this.DockerRunner._getContainerOptions - .calledWith(this.command_with_dir, this.image, this.volumes, this.timeout) - .should.equal(true); - }); - - it("should generate the fingerprint from the returned options", function() { - return this.DockerRunner._fingerprintContainer - .calledWith(this.options) - .should.equal(true); - }); - - it("should do the run", function() { - return this.DockerRunner._runAndWaitForContainer - .calledWith(this.options, this.volumes, this.timeout) - .should.equal(true); - }); - - return it("should call the callback", function() { - return this.callback.calledWith(null, this.output).should.equal(true); - }); - }); - - describe('when path.sandboxedCompilesHostDir is set', function() { - - beforeEach(function() { - this.Settings.path.sandboxedCompilesHostDir = '/some/host/dir/compiles'; - this.directory = '/var/lib/sharelatex/data/compiles/xyz'; - this.DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, (this.output = "mock-output")); - return this.DockerRunner.run(this.project_id, this.command, this.directory, this.image, this.timeout, this.env, this.callback); - }); - - it('should re-write the bind directory', function() { - const volumes = this.DockerRunner._runAndWaitForContainer.lastCall.args[1]; - return expect(volumes).to.deep.equal({ - '/some/host/dir/compiles/xyz': '/compile' - }); - }); - - return it("should call the callback", function() { - return this.callback.calledWith(null, this.output).should.equal(true); - }); - }); - - describe("when the run throws an error", function() { - beforeEach(function() { - let firstTime = true; - this.output = "mock-output"; - this.DockerRunner._runAndWaitForContainer = (options, volumes, timeout, callback) => { - if (callback == null) { callback = function(error, output){}; } - if (firstTime) { - firstTime = false; - return callback(new Error("HTTP code is 500 which indicates error: server error")); - } else { - return callback(null, this.output); - } - }; - sinon.spy(this.DockerRunner, "_runAndWaitForContainer"); - this.DockerRunner.destroyContainer = sinon.stub().callsArg(3); - return this.DockerRunner.run(this.project_id, this.command, this.directory, this.image, this.timeout, this.env, this.callback); - }); - - it("should do the run twice", function() { - return this.DockerRunner._runAndWaitForContainer - .calledTwice.should.equal(true); - }); - - it("should destroy the container in between", function() { - return this.DockerRunner.destroyContainer - .calledWith(this.name, null) - .should.equal(true); - }); - - return it("should call the callback", function() { - return this.callback.calledWith(null, this.output).should.equal(true); - }); - }); - - describe("with no image", function() { - beforeEach(function() { - this.DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, (this.output = "mock-output")); - return this.DockerRunner.run(this.project_id, this.command, this.directory, null, this.timeout, this.env, this.callback); - }); - - return it("should use the default image", function() { - return this.DockerRunner._getContainerOptions - .calledWith(this.command_with_dir, this.defaultImage, this.volumes, this.timeout) - .should.equal(true); - }); - }); - - return describe("with image override", function() { - beforeEach(function() { - this.Settings.texliveImageNameOveride = "overrideimage.com/something"; - this.DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, (this.output = "mock-output")); - return this.DockerRunner.run(this.project_id, this.command, this.directory, this.image, this.timeout, this.env, this.callback); - }); - - return it("should use the override and keep the tag", function() { - const image = this.DockerRunner._getContainerOptions.args[0][1]; - return image.should.equal("overrideimage.com/something/image:2016.2"); - }); - }); - }); - - describe("_runAndWaitForContainer", function() { - beforeEach(function() { - this.options = {mockoptions: "foo", name: (this.name = "mock-name")}; - this.DockerRunner.startContainer = (options, volumes, attachStreamHandler, callback) => { - attachStreamHandler(null, (this.output = "mock-output")); - return callback(null, (this.containerId = "container-id")); - }; - sinon.spy(this.DockerRunner, "startContainer"); - this.DockerRunner.waitForContainer = sinon.stub().callsArgWith(2, null, (this.exitCode = 42)); - return this.DockerRunner._runAndWaitForContainer(this.options, this.volumes, this.timeout, this.callback); - }); - - it("should create/start the container", function() { - return this.DockerRunner.startContainer - .calledWith(this.options, this.volumes) - .should.equal(true); - }); - - it("should wait for the container to finish", function() { - return this.DockerRunner.waitForContainer - .calledWith(this.name, this.timeout) - .should.equal(true); - }); - - return it("should call the callback with the output", function() { - return this.callback.calledWith(null, this.output).should.equal(true); - }); - }); - - describe("startContainer", function() { - beforeEach(function() { - this.attachStreamHandler = sinon.stub(); - this.attachStreamHandler.cock = true; - this.options = {mockoptions: "foo", name: "mock-name"}; - this.container.inspect = sinon.stub().callsArgWith(0); - this.DockerRunner.attachToContainer = (containerId, attachStreamHandler, cb)=> { - attachStreamHandler(); - return cb(); - }; - return sinon.spy(this.DockerRunner, "attachToContainer"); - }); - - - - describe("when the container exists", function() { - beforeEach(function() { - this.container.inspect = sinon.stub().callsArgWith(0); - this.container.start = sinon.stub().yields(); - - return this.DockerRunner.startContainer(this.options, this.volumes, this.callback, () => {}); - }); - - it("should start the container with the given name", function() { - this.getContainer - .calledWith(this.options.name) - .should.equal(true); - return this.container.start - .called - .should.equal(true); - }); - - it("should not try to create the container", function() { - return this.createContainer.called.should.equal(false); - }); - - it("should attach to the container", function() { - return this.DockerRunner.attachToContainer.called.should.equal(true); - }); - - it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - - return it("should attach before the container starts", function() { - return sinon.assert.callOrder(this.DockerRunner.attachToContainer, this.container.start); - }); - }); - - describe("when the container does not exist", function() { - beforeEach(function(){ - const exists = false; - this.container.start = sinon.stub().yields(); - this.container.inspect = sinon.stub().callsArgWith(0, {statusCode:404}); - return this.DockerRunner.startContainer(this.options, this.volumes, this.attachStreamHandler, this.callback); - }); - - it("should create the container", function() { - return this.createContainer - .calledWith(this.options) - .should.equal(true); - }); - - it("should call the callback and stream handler", function() { - this.attachStreamHandler.called.should.equal(true); - return this.callback.called.should.equal(true); - }); - - it("should attach to the container", function() { - return this.DockerRunner.attachToContainer.called.should.equal(true); - }); - - return it("should attach before the container starts", function() { - return sinon.assert.callOrder(this.DockerRunner.attachToContainer, this.container.start); - }); - }); - - - describe("when the container is already running", function() { - beforeEach(function() { - const error = new Error(`HTTP code is 304 which indicates error: server error - start: Cannot start container ${this.name}: The container MOCKID is already running.`); - error.statusCode = 304; - this.container.start = sinon.stub().yields(error); - this.container.inspect = sinon.stub().callsArgWith(0); - return this.DockerRunner.startContainer(this.options, this.volumes, this.attachStreamHandler, this.callback); - }); - - it("should not try to create the container", function() { - return this.createContainer.called.should.equal(false); - }); - - return it("should call the callback and stream handler without an error", function() { - this.attachStreamHandler.called.should.equal(true); - return this.callback.called.should.equal(true); - }); - }); - - describe("when a volume does not exist", function() { - beforeEach(function(){ - this.fs.stat = sinon.stub().yields(new Error("no such path")); - return this.DockerRunner.startContainer(this.options, this.volumes, this.attachStreamHandler, this.callback); - }); - - it("should not try to create the container", function() { - return this.createContainer.called.should.equal(false); - }); - - return it("should call the callback with an error", function() { - return this.callback.calledWith(new Error()).should.equal(true); - }); - }); - - describe("when a volume exists but is not a directory", function() { - beforeEach(function() { - this.fs.stat = sinon.stub().yields(null, {isDirectory() { return false; }}); - return this.DockerRunner.startContainer(this.options, this.volumes, this.attachStreamHandler, this.callback); - }); - - it("should not try to create the container", function() { - return this.createContainer.called.should.equal(false); - }); - - return it("should call the callback with an error", function() { - return this.callback.calledWith(new Error()).should.equal(true); - }); - }); - - describe("when a volume does not exist, but sibling-containers are used", function() { - beforeEach(function() { - this.fs.stat = sinon.stub().yields(new Error("no such path")); - this.Settings.path.sandboxedCompilesHostDir = '/some/path'; - this.container.start = sinon.stub().yields(); - return this.DockerRunner.startContainer(this.options, this.volumes, this.callback); - }); - - afterEach(function() { - return delete this.Settings.path.sandboxedCompilesHostDir; - }); - - it("should start the container with the given name", function() { - this.getContainer - .calledWith(this.options.name) - .should.equal(true); - return this.container.start - .called - .should.equal(true); - }); - - it("should not try to create the container", function() { - return this.createContainer.called.should.equal(false); - }); - - return it("should call the callback", function() { - this.callback.called.should.equal(true); - return this.callback.calledWith(new Error()).should.equal(false); - }); - }); - - return describe("when the container tries to be created, but already has been (race condition)", function() {}); - }); - - describe("waitForContainer", function() { - beforeEach(function() { - this.containerId = "container-id"; - this.timeout = 5000; - this.container.wait = sinon.stub().yields(null, {StatusCode: (this.statusCode = 42)}); - return this.container.kill = sinon.stub().yields(); - }); - - describe("when the container returns in time", function() { - beforeEach(function() { - return this.DockerRunner.waitForContainer(this.containerId, this.timeout, this.callback); - }); - - it("should wait for the container", function() { - this.getContainer - .calledWith(this.containerId) - .should.equal(true); - return this.container.wait - .called - .should.equal(true); - }); - - return it("should call the callback with the exit", function() { - return this.callback - .calledWith(null, this.statusCode) - .should.equal(true); - }); - }); - - return describe("when the container does not return before the timeout", function() { - beforeEach(function(done) { - this.container.wait = function(callback) { - if (callback == null) { callback = function(error, exitCode) {}; } - return setTimeout(() => callback(null, {StatusCode: 42}) - , 100); - }; - this.timeout = 5; - return this.DockerRunner.waitForContainer(this.containerId, this.timeout, (...args) => { - this.callback(...Array.from(args || [])); - return done(); - }); - }); - - it("should call kill on the container", function() { - this.getContainer - .calledWith(this.containerId) - .should.equal(true); - return this.container.kill - .called - .should.equal(true); - }); - - return it("should call the callback with an error", function() { - const error = new Error("container timed out"); - error.timedout = true; - return this.callback - .calledWith(error) - .should.equal(true); - }); - }); - }); - - describe("destroyOldContainers", function() { - beforeEach(function(done) { - const oneHourInSeconds = 60 * 60; - const oneHourInMilliseconds = oneHourInSeconds * 1000; - const nowInSeconds = Date.now()/1000; - this.containers = [{ - Name: "/project-old-container-name", - Id: "old-container-id", - Created: nowInSeconds - oneHourInSeconds - 100 - }, { - Name: "/project-new-container-name", - Id: "new-container-id", - Created: (nowInSeconds - oneHourInSeconds) + 100 - }, { - Name: "/totally-not-a-project-container", - Id: "some-random-id", - Created: nowInSeconds - (2 * oneHourInSeconds ) - }]; - this.DockerRunner.MAX_CONTAINER_AGE = oneHourInMilliseconds; - this.listContainers.callsArgWith(1, null, this.containers); - this.DockerRunner.destroyContainer = sinon.stub().callsArg(3); - return this.DockerRunner.destroyOldContainers(error => { - this.callback(error); - return done(); - }); - }); - - it("should list all containers", function() { - return this.listContainers - .calledWith({all: true}) - .should.equal(true); - }); - - it("should destroy old containers", function() { - this.DockerRunner.destroyContainer - .callCount - .should.equal(1); - return this.DockerRunner.destroyContainer - .calledWith("/project-old-container-name", "old-container-id") - .should.equal(true); - }); - - it("should not destroy new containers", function() { - return this.DockerRunner.destroyContainer - .calledWith("/project-new-container-name", "new-container-id") - .should.equal(false); - }); - - it("should not destroy non-project containers", function() { - return this.DockerRunner.destroyContainer - .calledWith("/totally-not-a-project-container", "some-random-id") - .should.equal(false); - }); - - return it("should callback the callback", function() { - return this.callback.called.should.equal(true); - }); - }); - - - describe('_destroyContainer', function() { - beforeEach(function() { - this.containerId = 'some_id'; - this.fakeContainer = - {remove: sinon.stub().callsArgWith(1, null)}; - return this.Docker.prototype.getContainer = sinon.stub().returns(this.fakeContainer); - }); - - it('should get the container', function(done) { - return this.DockerRunner._destroyContainer(this.containerId, false, err => { - this.Docker.prototype.getContainer.callCount.should.equal(1); - this.Docker.prototype.getContainer.calledWith(this.containerId).should.equal(true); - return done(); - }); - }); - - it('should try to force-destroy the container when shouldForce=true', function(done) { - return this.DockerRunner._destroyContainer(this.containerId, true, err => { - this.fakeContainer.remove.callCount.should.equal(1); - this.fakeContainer.remove.calledWith({force: true}).should.equal(true); - return done(); - }); - }); - - it('should not try to force-destroy the container when shouldForce=false', function(done) { - return this.DockerRunner._destroyContainer(this.containerId, false, err => { - this.fakeContainer.remove.callCount.should.equal(1); - this.fakeContainer.remove.calledWith({force: false}).should.equal(true); - return done(); - }); - }); - - it('should not produce an error', function(done) { - return this.DockerRunner._destroyContainer(this.containerId, false, err => { - expect(err).to.equal(null); - return done(); - }); - }); - - describe('when the container is already gone', function() { - beforeEach(function() { - this.fakeError = new Error('woops'); - this.fakeError.statusCode = 404; - this.fakeContainer = - {remove: sinon.stub().callsArgWith(1, this.fakeError)}; - return this.Docker.prototype.getContainer = sinon.stub().returns(this.fakeContainer); - }); - - return it('should not produce an error', function(done) { - return this.DockerRunner._destroyContainer(this.containerId, false, err => { - expect(err).to.equal(null); - return done(); - }); - }); - }); - - return describe('when container.destroy produces an error', function(done) { - beforeEach(function() { - this.fakeError = new Error('woops'); - this.fakeError.statusCode = 500; - this.fakeContainer = - {remove: sinon.stub().callsArgWith(1, this.fakeError)}; - return this.Docker.prototype.getContainer = sinon.stub().returns(this.fakeContainer); - }); - - return it('should produce an error', function(done) { - return this.DockerRunner._destroyContainer(this.containerId, false, err => { - expect(err).to.not.equal(null); - expect(err).to.equal(this.fakeError); - return done(); - }); - }); - }); - }); - - - return describe('kill', function() { - beforeEach(function() { - this.containerId = 'some_id'; - this.fakeContainer = - {kill: sinon.stub().callsArgWith(0, null)}; - return this.Docker.prototype.getContainer = sinon.stub().returns(this.fakeContainer); - }); - - it('should get the container', function(done) { - return this.DockerRunner.kill(this.containerId, err => { - this.Docker.prototype.getContainer.callCount.should.equal(1); - this.Docker.prototype.getContainer.calledWith(this.containerId).should.equal(true); - return done(); - }); - }); - - it('should try to force-destroy the container', function(done) { - return this.DockerRunner.kill(this.containerId, err => { - this.fakeContainer.kill.callCount.should.equal(1); - return done(); - }); - }); - - it('should not produce an error', function(done) { - return this.DockerRunner.kill(this.containerId, err => { - expect(err).to.equal(undefined); - return done(); - }); - }); - - describe('when the container is not actually running', function() { - beforeEach(function() { - this.fakeError = new Error('woops'); - this.fakeError.statusCode = 500; - this.fakeError.message = 'Cannot kill container is not running'; - this.fakeContainer = - {kill: sinon.stub().callsArgWith(0, this.fakeError)}; - return this.Docker.prototype.getContainer = sinon.stub().returns(this.fakeContainer); - }); - - return it('should not produce an error', function(done) { - return this.DockerRunner.kill(this.containerId, err => { - expect(err).to.equal(undefined); - return done(); - }); - }); - }); - - return describe('when container.kill produces a legitimate error', function(done) { - beforeEach(function() { - this.fakeError = new Error('woops'); - this.fakeError.statusCode = 500; - this.fakeError.message = 'Totally legitimate reason to throw an error'; - this.fakeContainer = - {kill: sinon.stub().callsArgWith(0, this.fakeError)}; - return this.Docker.prototype.getContainer = sinon.stub().returns(this.fakeContainer); - }); - - return it('should produce an error', function(done) { - return this.DockerRunner.kill(this.containerId, err => { - expect(err).to.not.equal(undefined); - expect(err).to.equal(this.fakeError); - return done(); - }); - }); - }); - }); -}); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const { expect } = require('chai') +require('coffee-script') +const modulePath = require('path').join( + __dirname, + '../../../app/coffee/DockerRunner' +) +const Path = require('path') + +describe('DockerRunner', function() { + beforeEach(function() { + let container, Docker, Timer + this.container = container = {} + this.DockerRunner = SandboxedModule.require(modulePath, { + requires: { + 'settings-sharelatex': (this.Settings = { + clsi: { docker: {} }, + path: {} + }), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub(), + info: sinon.stub(), + warn: sinon.stub() + }), + dockerode: (Docker = (function() { + Docker = class Docker { + static initClass() { + this.prototype.getContainer = sinon.stub().returns(container) + this.prototype.createContainer = sinon + .stub() + .yields(null, container) + this.prototype.listContainers = sinon.stub() + } + } + Docker.initClass() + return Docker + })()), + fs: (this.fs = { + stat: sinon.stub().yields(null, { + isDirectory() { + return true + } + }) + }), + './Metrics': { + Timer: (Timer = class Timer { + done() {} + }) + }, + './LockManager': { + runWithLock(key, runner, callback) { + return runner(callback) + } + } + } + }) + this.Docker = Docker + this.getContainer = Docker.prototype.getContainer + this.createContainer = Docker.prototype.createContainer + this.listContainers = Docker.prototype.listContainers + + this.directory = '/local/compile/directory' + this.mainFile = 'main-file.tex' + this.compiler = 'pdflatex' + this.image = 'example.com/sharelatex/image:2016.2' + this.env = {} + this.callback = sinon.stub() + this.project_id = 'project-id-123' + this.volumes = { '/local/compile/directory': '/compile' } + this.Settings.clsi.docker.image = this.defaultImage = 'default-image' + return (this.Settings.clsi.docker.env = { PATH: 'mock-path' }) + }) + + describe('run', function() { + beforeEach(function(done) { + this.DockerRunner._getContainerOptions = sinon + .stub() + .returns((this.options = { mockoptions: 'foo' })) + this.DockerRunner._fingerprintContainer = sinon + .stub() + .returns((this.fingerprint = 'fingerprint')) + + this.name = `project-${this.project_id}-${this.fingerprint}` + + this.command = ['mock', 'command', '--outdir=$COMPILE_DIR'] + this.command_with_dir = ['mock', 'command', '--outdir=/compile'] + this.timeout = 42000 + return done() + }) + + describe('successfully', function() { + beforeEach(function(done) { + this.DockerRunner._runAndWaitForContainer = sinon + .stub() + .callsArgWith(3, null, (this.output = 'mock-output')) + return this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + this.image, + this.timeout, + this.env, + (err, output) => { + this.callback(err, output) + return done() + } + ) + }) + + it('should generate the options for the container', function() { + return this.DockerRunner._getContainerOptions + .calledWith( + this.command_with_dir, + this.image, + this.volumes, + this.timeout + ) + .should.equal(true) + }) + + it('should generate the fingerprint from the returned options', function() { + return this.DockerRunner._fingerprintContainer + .calledWith(this.options) + .should.equal(true) + }) + + it('should do the run', function() { + return this.DockerRunner._runAndWaitForContainer + .calledWith(this.options, this.volumes, this.timeout) + .should.equal(true) + }) + + return it('should call the callback', function() { + return this.callback.calledWith(null, this.output).should.equal(true) + }) + }) + + describe('when path.sandboxedCompilesHostDir is set', function() { + beforeEach(function() { + this.Settings.path.sandboxedCompilesHostDir = '/some/host/dir/compiles' + this.directory = '/var/lib/sharelatex/data/compiles/xyz' + this.DockerRunner._runAndWaitForContainer = sinon + .stub() + .callsArgWith(3, null, (this.output = 'mock-output')) + return this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + this.image, + this.timeout, + this.env, + this.callback + ) + }) + + it('should re-write the bind directory', function() { + const volumes = this.DockerRunner._runAndWaitForContainer.lastCall + .args[1] + return expect(volumes).to.deep.equal({ + '/some/host/dir/compiles/xyz': '/compile' + }) + }) + + return it('should call the callback', function() { + return this.callback.calledWith(null, this.output).should.equal(true) + }) + }) + + describe('when the run throws an error', function() { + beforeEach(function() { + let firstTime = true + this.output = 'mock-output' + this.DockerRunner._runAndWaitForContainer = ( + options, + volumes, + timeout, + callback + ) => { + if (callback == null) { + callback = function(error, output) {} + } + if (firstTime) { + firstTime = false + return callback( + new Error('HTTP code is 500 which indicates error: server error') + ) + } else { + return callback(null, this.output) + } + } + sinon.spy(this.DockerRunner, '_runAndWaitForContainer') + this.DockerRunner.destroyContainer = sinon.stub().callsArg(3) + return this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + this.image, + this.timeout, + this.env, + this.callback + ) + }) + + it('should do the run twice', function() { + return this.DockerRunner._runAndWaitForContainer.calledTwice.should.equal( + true + ) + }) + + it('should destroy the container in between', function() { + return this.DockerRunner.destroyContainer + .calledWith(this.name, null) + .should.equal(true) + }) + + return it('should call the callback', function() { + return this.callback.calledWith(null, this.output).should.equal(true) + }) + }) + + describe('with no image', function() { + beforeEach(function() { + this.DockerRunner._runAndWaitForContainer = sinon + .stub() + .callsArgWith(3, null, (this.output = 'mock-output')) + return this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + null, + this.timeout, + this.env, + this.callback + ) + }) + + return it('should use the default image', function() { + return this.DockerRunner._getContainerOptions + .calledWith( + this.command_with_dir, + this.defaultImage, + this.volumes, + this.timeout + ) + .should.equal(true) + }) + }) + + return describe('with image override', function() { + beforeEach(function() { + this.Settings.texliveImageNameOveride = 'overrideimage.com/something' + this.DockerRunner._runAndWaitForContainer = sinon + .stub() + .callsArgWith(3, null, (this.output = 'mock-output')) + return this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + this.image, + this.timeout, + this.env, + this.callback + ) + }) + + return it('should use the override and keep the tag', function() { + const image = this.DockerRunner._getContainerOptions.args[0][1] + return image.should.equal('overrideimage.com/something/image:2016.2') + }) + }) + }) + + describe('_runAndWaitForContainer', function() { + beforeEach(function() { + this.options = { mockoptions: 'foo', name: (this.name = 'mock-name') } + this.DockerRunner.startContainer = ( + options, + volumes, + attachStreamHandler, + callback + ) => { + attachStreamHandler(null, (this.output = 'mock-output')) + return callback(null, (this.containerId = 'container-id')) + } + sinon.spy(this.DockerRunner, 'startContainer') + this.DockerRunner.waitForContainer = sinon + .stub() + .callsArgWith(2, null, (this.exitCode = 42)) + return this.DockerRunner._runAndWaitForContainer( + this.options, + this.volumes, + this.timeout, + this.callback + ) + }) + + it('should create/start the container', function() { + return this.DockerRunner.startContainer + .calledWith(this.options, this.volumes) + .should.equal(true) + }) + + it('should wait for the container to finish', function() { + return this.DockerRunner.waitForContainer + .calledWith(this.name, this.timeout) + .should.equal(true) + }) + + return it('should call the callback with the output', function() { + return this.callback.calledWith(null, this.output).should.equal(true) + }) + }) + + describe('startContainer', function() { + beforeEach(function() { + this.attachStreamHandler = sinon.stub() + this.attachStreamHandler.cock = true + this.options = { mockoptions: 'foo', name: 'mock-name' } + this.container.inspect = sinon.stub().callsArgWith(0) + this.DockerRunner.attachToContainer = ( + containerId, + attachStreamHandler, + cb + ) => { + attachStreamHandler() + return cb() + } + return sinon.spy(this.DockerRunner, 'attachToContainer') + }) + + describe('when the container exists', function() { + beforeEach(function() { + this.container.inspect = sinon.stub().callsArgWith(0) + this.container.start = sinon.stub().yields() + + return this.DockerRunner.startContainer( + this.options, + this.volumes, + this.callback, + () => {} + ) + }) + + it('should start the container with the given name', function() { + this.getContainer.calledWith(this.options.name).should.equal(true) + return this.container.start.called.should.equal(true) + }) + + it('should not try to create the container', function() { + return this.createContainer.called.should.equal(false) + }) + + it('should attach to the container', function() { + return this.DockerRunner.attachToContainer.called.should.equal(true) + }) + + it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + + return it('should attach before the container starts', function() { + return sinon.assert.callOrder( + this.DockerRunner.attachToContainer, + this.container.start + ) + }) + }) + + describe('when the container does not exist', function() { + beforeEach(function() { + const exists = false + this.container.start = sinon.stub().yields() + this.container.inspect = sinon + .stub() + .callsArgWith(0, { statusCode: 404 }) + return this.DockerRunner.startContainer( + this.options, + this.volumes, + this.attachStreamHandler, + this.callback + ) + }) + + it('should create the container', function() { + return this.createContainer.calledWith(this.options).should.equal(true) + }) + + it('should call the callback and stream handler', function() { + this.attachStreamHandler.called.should.equal(true) + return this.callback.called.should.equal(true) + }) + + it('should attach to the container', function() { + return this.DockerRunner.attachToContainer.called.should.equal(true) + }) + + return it('should attach before the container starts', function() { + return sinon.assert.callOrder( + this.DockerRunner.attachToContainer, + this.container.start + ) + }) + }) + + describe('when the container is already running', function() { + beforeEach(function() { + const error = new Error( + `HTTP code is 304 which indicates error: server error - start: Cannot start container ${this.name}: The container MOCKID is already running.` + ) + error.statusCode = 304 + this.container.start = sinon.stub().yields(error) + this.container.inspect = sinon.stub().callsArgWith(0) + return this.DockerRunner.startContainer( + this.options, + this.volumes, + this.attachStreamHandler, + this.callback + ) + }) + + it('should not try to create the container', function() { + return this.createContainer.called.should.equal(false) + }) + + return it('should call the callback and stream handler without an error', function() { + this.attachStreamHandler.called.should.equal(true) + return this.callback.called.should.equal(true) + }) + }) + + describe('when a volume does not exist', function() { + beforeEach(function() { + this.fs.stat = sinon.stub().yields(new Error('no such path')) + return this.DockerRunner.startContainer( + this.options, + this.volumes, + this.attachStreamHandler, + this.callback + ) + }) + + it('should not try to create the container', function() { + return this.createContainer.called.should.equal(false) + }) + + return it('should call the callback with an error', function() { + return this.callback.calledWith(new Error()).should.equal(true) + }) + }) + + describe('when a volume exists but is not a directory', function() { + beforeEach(function() { + this.fs.stat = sinon.stub().yields(null, { + isDirectory() { + return false + } + }) + return this.DockerRunner.startContainer( + this.options, + this.volumes, + this.attachStreamHandler, + this.callback + ) + }) + + it('should not try to create the container', function() { + return this.createContainer.called.should.equal(false) + }) + + return it('should call the callback with an error', function() { + return this.callback.calledWith(new Error()).should.equal(true) + }) + }) + + describe('when a volume does not exist, but sibling-containers are used', function() { + beforeEach(function() { + this.fs.stat = sinon.stub().yields(new Error('no such path')) + this.Settings.path.sandboxedCompilesHostDir = '/some/path' + this.container.start = sinon.stub().yields() + return this.DockerRunner.startContainer( + this.options, + this.volumes, + this.callback + ) + }) + + afterEach(function() { + return delete this.Settings.path.sandboxedCompilesHostDir + }) + + it('should start the container with the given name', function() { + this.getContainer.calledWith(this.options.name).should.equal(true) + return this.container.start.called.should.equal(true) + }) + + it('should not try to create the container', function() { + return this.createContainer.called.should.equal(false) + }) + + return it('should call the callback', function() { + this.callback.called.should.equal(true) + return this.callback.calledWith(new Error()).should.equal(false) + }) + }) + + return describe('when the container tries to be created, but already has been (race condition)', function() {}) + }) + + describe('waitForContainer', function() { + beforeEach(function() { + this.containerId = 'container-id' + this.timeout = 5000 + this.container.wait = sinon + .stub() + .yields(null, { StatusCode: (this.statusCode = 42) }) + return (this.container.kill = sinon.stub().yields()) + }) + + describe('when the container returns in time', function() { + beforeEach(function() { + return this.DockerRunner.waitForContainer( + this.containerId, + this.timeout, + this.callback + ) + }) + + it('should wait for the container', function() { + this.getContainer.calledWith(this.containerId).should.equal(true) + return this.container.wait.called.should.equal(true) + }) + + return it('should call the callback with the exit', function() { + return this.callback + .calledWith(null, this.statusCode) + .should.equal(true) + }) + }) + + return describe('when the container does not return before the timeout', function() { + beforeEach(function(done) { + this.container.wait = function(callback) { + if (callback == null) { + callback = function(error, exitCode) {} + } + return setTimeout(() => callback(null, { StatusCode: 42 }), 100) + } + this.timeout = 5 + return this.DockerRunner.waitForContainer( + this.containerId, + this.timeout, + (...args) => { + this.callback(...Array.from(args || [])) + return done() + } + ) + }) + + it('should call kill on the container', function() { + this.getContainer.calledWith(this.containerId).should.equal(true) + return this.container.kill.called.should.equal(true) + }) + + return it('should call the callback with an error', function() { + const error = new Error('container timed out') + error.timedout = true + return this.callback.calledWith(error).should.equal(true) + }) + }) + }) + + describe('destroyOldContainers', function() { + beforeEach(function(done) { + const oneHourInSeconds = 60 * 60 + const oneHourInMilliseconds = oneHourInSeconds * 1000 + const nowInSeconds = Date.now() / 1000 + this.containers = [ + { + Name: '/project-old-container-name', + Id: 'old-container-id', + Created: nowInSeconds - oneHourInSeconds - 100 + }, + { + Name: '/project-new-container-name', + Id: 'new-container-id', + Created: nowInSeconds - oneHourInSeconds + 100 + }, + { + Name: '/totally-not-a-project-container', + Id: 'some-random-id', + Created: nowInSeconds - 2 * oneHourInSeconds + } + ] + this.DockerRunner.MAX_CONTAINER_AGE = oneHourInMilliseconds + this.listContainers.callsArgWith(1, null, this.containers) + this.DockerRunner.destroyContainer = sinon.stub().callsArg(3) + return this.DockerRunner.destroyOldContainers(error => { + this.callback(error) + return done() + }) + }) + + it('should list all containers', function() { + return this.listContainers.calledWith({ all: true }).should.equal(true) + }) + + it('should destroy old containers', function() { + this.DockerRunner.destroyContainer.callCount.should.equal(1) + return this.DockerRunner.destroyContainer + .calledWith('/project-old-container-name', 'old-container-id') + .should.equal(true) + }) + + it('should not destroy new containers', function() { + return this.DockerRunner.destroyContainer + .calledWith('/project-new-container-name', 'new-container-id') + .should.equal(false) + }) + + it('should not destroy non-project containers', function() { + return this.DockerRunner.destroyContainer + .calledWith('/totally-not-a-project-container', 'some-random-id') + .should.equal(false) + }) + + return it('should callback the callback', function() { + return this.callback.called.should.equal(true) + }) + }) + + describe('_destroyContainer', function() { + beforeEach(function() { + this.containerId = 'some_id' + this.fakeContainer = { remove: sinon.stub().callsArgWith(1, null) } + return (this.Docker.prototype.getContainer = sinon + .stub() + .returns(this.fakeContainer)) + }) + + it('should get the container', function(done) { + return this.DockerRunner._destroyContainer( + this.containerId, + false, + err => { + this.Docker.prototype.getContainer.callCount.should.equal(1) + this.Docker.prototype.getContainer + .calledWith(this.containerId) + .should.equal(true) + return done() + } + ) + }) + + it('should try to force-destroy the container when shouldForce=true', function(done) { + return this.DockerRunner._destroyContainer( + this.containerId, + true, + err => { + this.fakeContainer.remove.callCount.should.equal(1) + this.fakeContainer.remove + .calledWith({ force: true }) + .should.equal(true) + return done() + } + ) + }) + + it('should not try to force-destroy the container when shouldForce=false', function(done) { + return this.DockerRunner._destroyContainer( + this.containerId, + false, + err => { + this.fakeContainer.remove.callCount.should.equal(1) + this.fakeContainer.remove + .calledWith({ force: false }) + .should.equal(true) + return done() + } + ) + }) + + it('should not produce an error', function(done) { + return this.DockerRunner._destroyContainer( + this.containerId, + false, + err => { + expect(err).to.equal(null) + return done() + } + ) + }) + + describe('when the container is already gone', function() { + beforeEach(function() { + this.fakeError = new Error('woops') + this.fakeError.statusCode = 404 + this.fakeContainer = { + remove: sinon.stub().callsArgWith(1, this.fakeError) + } + return (this.Docker.prototype.getContainer = sinon + .stub() + .returns(this.fakeContainer)) + }) + + return it('should not produce an error', function(done) { + return this.DockerRunner._destroyContainer( + this.containerId, + false, + err => { + expect(err).to.equal(null) + return done() + } + ) + }) + }) + + return describe('when container.destroy produces an error', function(done) { + beforeEach(function() { + this.fakeError = new Error('woops') + this.fakeError.statusCode = 500 + this.fakeContainer = { + remove: sinon.stub().callsArgWith(1, this.fakeError) + } + return (this.Docker.prototype.getContainer = sinon + .stub() + .returns(this.fakeContainer)) + }) + + return it('should produce an error', function(done) { + return this.DockerRunner._destroyContainer( + this.containerId, + false, + err => { + expect(err).to.not.equal(null) + expect(err).to.equal(this.fakeError) + return done() + } + ) + }) + }) + }) + + return describe('kill', function() { + beforeEach(function() { + this.containerId = 'some_id' + this.fakeContainer = { kill: sinon.stub().callsArgWith(0, null) } + return (this.Docker.prototype.getContainer = sinon + .stub() + .returns(this.fakeContainer)) + }) + + it('should get the container', function(done) { + return this.DockerRunner.kill(this.containerId, err => { + this.Docker.prototype.getContainer.callCount.should.equal(1) + this.Docker.prototype.getContainer + .calledWith(this.containerId) + .should.equal(true) + return done() + }) + }) + + it('should try to force-destroy the container', function(done) { + return this.DockerRunner.kill(this.containerId, err => { + this.fakeContainer.kill.callCount.should.equal(1) + return done() + }) + }) + + it('should not produce an error', function(done) { + return this.DockerRunner.kill(this.containerId, err => { + expect(err).to.equal(undefined) + return done() + }) + }) + + describe('when the container is not actually running', function() { + beforeEach(function() { + this.fakeError = new Error('woops') + this.fakeError.statusCode = 500 + this.fakeError.message = + 'Cannot kill container is not running' + this.fakeContainer = { + kill: sinon.stub().callsArgWith(0, this.fakeError) + } + return (this.Docker.prototype.getContainer = sinon + .stub() + .returns(this.fakeContainer)) + }) + + return it('should not produce an error', function(done) { + return this.DockerRunner.kill(this.containerId, err => { + expect(err).to.equal(undefined) + return done() + }) + }) + }) + + return describe('when container.kill produces a legitimate error', function(done) { + beforeEach(function() { + this.fakeError = new Error('woops') + this.fakeError.statusCode = 500 + this.fakeError.message = 'Totally legitimate reason to throw an error' + this.fakeContainer = { + kill: sinon.stub().callsArgWith(0, this.fakeError) + } + return (this.Docker.prototype.getContainer = sinon + .stub() + .returns(this.fakeContainer)) + }) + + return it('should produce an error', function(done) { + return this.DockerRunner.kill(this.containerId, err => { + expect(err).to.not.equal(undefined) + expect(err).to.equal(this.fakeError) + return done() + }) + }) + }) + }) +}) diff --git a/test/unit/js/DraftModeManagerTests.js b/test/unit/js/DraftModeManagerTests.js index f270873..2c30b40 100644 --- a/test/unit/js/DraftModeManagerTests.js +++ b/test/unit/js/DraftModeManagerTests.js @@ -8,75 +8,79 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../app/js/DraftModeManager'); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const modulePath = require('path').join( + __dirname, + '../../../app/js/DraftModeManager' +) describe('DraftModeManager', function() { - beforeEach(function() { - return this.DraftModeManager = SandboxedModule.require(modulePath, { requires: { - "fs": (this.fs = {}), - "logger-sharelatex": (this.logger = {log() {}}) - } - });}); - - describe("_injectDraftOption", function() { - it("should add draft option into documentclass with existing options", function() { - return this.DraftModeManager - ._injectDraftOption(`\ -\\documentclass[a4paper,foo=bar]{article}\ -`) - .should.equal(`\ -\\documentclass[draft,a4paper,foo=bar]{article}\ -`); - }); + beforeEach(function() { + return (this.DraftModeManager = SandboxedModule.require(modulePath, { + requires: { + fs: (this.fs = {}), + 'logger-sharelatex': (this.logger = { log() {} }) + } + })) + }) - return it("should add draft option into documentclass with no options", function() { - return this.DraftModeManager - ._injectDraftOption(`\ -\\documentclass{article}\ + describe('_injectDraftOption', function() { + it('should add draft option into documentclass with existing options', function() { + return this.DraftModeManager._injectDraftOption(`\ +\\documentclass[a4paper,foo=bar]{article}\ +`).should.equal(`\ +\\documentclass[draft,a4paper,foo=bar]{article}\ `) - .should.equal(`\ + }) + + return it('should add draft option into documentclass with no options', function() { + return this.DraftModeManager._injectDraftOption(`\ +\\documentclass{article}\ +`).should.equal(`\ \\documentclass[draft]{article}\ -`); - }); - }); - - return describe("injectDraftMode", function() { - beforeEach(function() { - this.filename = "/mock/filename.tex"; - this.callback = sinon.stub(); - const content = `\ +`) + }) + }) + + return describe('injectDraftMode', function() { + beforeEach(function() { + this.filename = '/mock/filename.tex' + this.callback = sinon.stub() + const content = `\ \\documentclass{article} \\begin{document} Hello world \\end{document}\ -`; - this.fs.readFile = sinon.stub().callsArgWith(2, null, content); - this.fs.writeFile = sinon.stub().callsArg(2); - return this.DraftModeManager.injectDraftMode(this.filename, this.callback); - }); - - it("should read the file", function() { - return this.fs.readFile - .calledWith(this.filename, "utf8") - .should.equal(true); - }); - - it("should write the modified file", function() { - return this.fs.writeFile - .calledWith(this.filename, `\ +` + this.fs.readFile = sinon.stub().callsArgWith(2, null, content) + this.fs.writeFile = sinon.stub().callsArg(2) + return this.DraftModeManager.injectDraftMode(this.filename, this.callback) + }) + + it('should read the file', function() { + return this.fs.readFile + .calledWith(this.filename, 'utf8') + .should.equal(true) + }) + + it('should write the modified file', function() { + return this.fs.writeFile + .calledWith( + this.filename, + `\ \\documentclass[draft]{article} \\begin{document} Hello world \\end{document}\ -`) - .should.equal(true); - }); - - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); -}); +` + ) + .should.equal(true) + }) + + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) +}) diff --git a/test/unit/js/LatexRunnerTests.js b/test/unit/js/LatexRunnerTests.js index 7fe8bc8..b468b83 100644 --- a/test/unit/js/LatexRunnerTests.js +++ b/test/unit/js/LatexRunnerTests.js @@ -9,103 +9,129 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../app/js/LatexRunner'); -const Path = require("path"); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const modulePath = require('path').join( + __dirname, + '../../../app/js/LatexRunner' +) +const Path = require('path') -describe("LatexRunner", function() { - beforeEach(function() { - let Timer; - this.LatexRunner = SandboxedModule.require(modulePath, { requires: { - "settings-sharelatex": (this.Settings = { - docker: { - socketPath: "/var/run/docker.sock" - } - }), - "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub() }), - "./Metrics": { - Timer: (Timer = class Timer { - done() {} - }) - }, - "./CommandRunner": (this.CommandRunner = {}) - } - }); +describe('LatexRunner', function() { + beforeEach(function() { + let Timer + this.LatexRunner = SandboxedModule.require(modulePath, { + requires: { + 'settings-sharelatex': (this.Settings = { + docker: { + socketPath: '/var/run/docker.sock' + } + }), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub() + }), + './Metrics': { + Timer: (Timer = class Timer { + done() {} + }) + }, + './CommandRunner': (this.CommandRunner = {}) + } + }) - this.directory = "/local/compile/directory"; - this.mainFile = "main-file.tex"; - this.compiler = "pdflatex"; - this.image = "example.com/image"; - this.callback = sinon.stub(); - this.project_id = "project-id-123"; - return this.env = {'foo': '123'};}); + this.directory = '/local/compile/directory' + this.mainFile = 'main-file.tex' + this.compiler = 'pdflatex' + this.image = 'example.com/image' + this.callback = sinon.stub() + this.project_id = 'project-id-123' + return (this.env = { foo: '123' }) + }) - return describe("runLatex", function() { - beforeEach(function() { - return this.CommandRunner.run = sinon.stub().callsArg(6); - }); + return describe('runLatex', function() { + beforeEach(function() { + return (this.CommandRunner.run = sinon.stub().callsArg(6)) + }) - describe("normally", function() { - beforeEach(function() { - return this.LatexRunner.runLatex(this.project_id, { - directory: this.directory, - mainFile: this.mainFile, - compiler: this.compiler, - timeout: (this.timeout = 42000), - image: this.image, - environment: this.env - }, - this.callback); - }); + describe('normally', function() { + beforeEach(function() { + return this.LatexRunner.runLatex( + this.project_id, + { + directory: this.directory, + mainFile: this.mainFile, + compiler: this.compiler, + timeout: (this.timeout = 42000), + image: this.image, + environment: this.env + }, + this.callback + ) + }) - return it("should run the latex command", function() { - return this.CommandRunner.run - .calledWith(this.project_id, sinon.match.any, this.directory, this.image, this.timeout, this.env) - .should.equal(true); - }); - }); + return it('should run the latex command', function() { + return this.CommandRunner.run + .calledWith( + this.project_id, + sinon.match.any, + this.directory, + this.image, + this.timeout, + this.env + ) + .should.equal(true) + }) + }) - describe("with an .Rtex main file", function() { - beforeEach(function() { - return this.LatexRunner.runLatex(this.project_id, { - directory: this.directory, - mainFile: "main-file.Rtex", - compiler: this.compiler, - image: this.image, - timeout: (this.timeout = 42000) - }, - this.callback); - }); + describe('with an .Rtex main file', function() { + beforeEach(function() { + return this.LatexRunner.runLatex( + this.project_id, + { + directory: this.directory, + mainFile: 'main-file.Rtex', + compiler: this.compiler, + image: this.image, + timeout: (this.timeout = 42000) + }, + this.callback + ) + }) - return it("should run the latex command on the equivalent .tex file", function() { - const command = this.CommandRunner.run.args[0][1]; - const mainFile = command.slice(-1)[0]; - return mainFile.should.equal("$COMPILE_DIR/main-file.tex"); - }); - }); + return it('should run the latex command on the equivalent .tex file', function() { + const command = this.CommandRunner.run.args[0][1] + const mainFile = command.slice(-1)[0] + return mainFile.should.equal('$COMPILE_DIR/main-file.tex') + }) + }) - return describe("with a flags option", function() { - beforeEach(function() { - return this.LatexRunner.runLatex(this.project_id, { - directory: this.directory, - mainFile: this.mainFile, - compiler: this.compiler, - image: this.image, - timeout: (this.timeout = 42000), - flags: ["-file-line-error", "-halt-on-error"] - }, - this.callback); - }); + return describe('with a flags option', function() { + beforeEach(function() { + return this.LatexRunner.runLatex( + this.project_id, + { + directory: this.directory, + mainFile: this.mainFile, + compiler: this.compiler, + image: this.image, + timeout: (this.timeout = 42000), + flags: ['-file-line-error', '-halt-on-error'] + }, + this.callback + ) + }) - return it("should include the flags in the command", function() { - const command = this.CommandRunner.run.args[0][1]; - const flags = command.filter(arg => (arg === "-file-line-error") || (arg === "-halt-on-error")); - flags.length.should.equal(2); - flags[0].should.equal("-file-line-error"); - return flags[1].should.equal("-halt-on-error"); - }); - }); - }); -}); + return it('should include the flags in the command', function() { + const command = this.CommandRunner.run.args[0][1] + const flags = command.filter( + arg => arg === '-file-line-error' || arg === '-halt-on-error' + ) + flags.length.should.equal(2) + flags[0].should.equal('-file-line-error') + return flags[1].should.equal('-halt-on-error') + }) + }) + }) +}) diff --git a/test/unit/js/LockManagerTests.js b/test/unit/js/LockManagerTests.js index 6d1b156..ea6c341 100644 --- a/test/unit/js/LockManagerTests.js +++ b/test/unit/js/LockManagerTests.js @@ -9,75 +9,85 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../app/js/LockManager'); -const Path = require("path"); -const Errors = require("../../../app/js/Errors"); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const modulePath = require('path').join( + __dirname, + '../../../app/js/LockManager' +) +const Path = require('path') +const Errors = require('../../../app/js/Errors') -describe("DockerLockManager", function() { - beforeEach(function() { - this.LockManager = SandboxedModule.require(modulePath, { requires: { - "settings-sharelatex": {}, - "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub(), err() {} }), - "fs": { - lstat:sinon.stub().callsArgWith(1), - readdir: sinon.stub().callsArgWith(1) - }, - "lockfile": (this.Lockfile = {}) - } - }); - return this.lockFile = "/local/compile/directory/.project-lock"; - }); +describe('DockerLockManager', function() { + beforeEach(function() { + this.LockManager = SandboxedModule.require(modulePath, { + requires: { + 'settings-sharelatex': {}, + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub(), + err() {} + }), + fs: { + lstat: sinon.stub().callsArgWith(1), + readdir: sinon.stub().callsArgWith(1) + }, + lockfile: (this.Lockfile = {}) + } + }) + return (this.lockFile = '/local/compile/directory/.project-lock') + }) - return describe("runWithLock", function() { - beforeEach(function() { - this.runner = sinon.stub().callsArgWith(0, null, "foo", "bar"); - return this.callback = sinon.stub(); - }); + return describe('runWithLock', function() { + beforeEach(function() { + this.runner = sinon.stub().callsArgWith(0, null, 'foo', 'bar') + return (this.callback = sinon.stub()) + }) - describe("normally", function() { - beforeEach(function() { - this.Lockfile.lock = sinon.stub().callsArgWith(2, null); - this.Lockfile.unlock = sinon.stub().callsArgWith(1, null); - return this.LockManager.runWithLock(this.lockFile, this.runner, this.callback); - }); + describe('normally', function() { + beforeEach(function() { + this.Lockfile.lock = sinon.stub().callsArgWith(2, null) + this.Lockfile.unlock = sinon.stub().callsArgWith(1, null) + return this.LockManager.runWithLock( + this.lockFile, + this.runner, + this.callback + ) + }) - it("should run the compile", function() { - return this.runner - .calledWith() - .should.equal(true); - }); + it('should run the compile', function() { + return this.runner.calledWith().should.equal(true) + }) - return it("should call the callback with the response from the compile", function() { - return this.callback - .calledWithExactly(null, "foo", "bar") - .should.equal(true); - }); - }); + return it('should call the callback with the response from the compile', function() { + return this.callback + .calledWithExactly(null, 'foo', 'bar') + .should.equal(true) + }) + }) - return describe("when the project is locked", function() { - beforeEach(function() { - this.error = new Error(); - this.error.code = "EEXIST"; - this.Lockfile.lock = sinon.stub().callsArgWith(2,this.error); - this.Lockfile.unlock = sinon.stub().callsArgWith(1, null); - return this.LockManager.runWithLock(this.lockFile, this.runner, this.callback); - }); + return describe('when the project is locked', function() { + beforeEach(function() { + this.error = new Error() + this.error.code = 'EEXIST' + this.Lockfile.lock = sinon.stub().callsArgWith(2, this.error) + this.Lockfile.unlock = sinon.stub().callsArgWith(1, null) + return this.LockManager.runWithLock( + this.lockFile, + this.runner, + this.callback + ) + }) - it("should not run the compile", function() { - return this.runner - .called - .should.equal(false); - }); + it('should not run the compile', function() { + return this.runner.called.should.equal(false) + }) - return it("should return an error", function() { - const error = new Errors.AlreadyCompilingError(); - return this.callback - .calledWithExactly(error) - .should.equal(true); - }); - }); - }); -}); + return it('should return an error', function() { + const error = new Errors.AlreadyCompilingError() + return this.callback.calledWithExactly(error).should.equal(true) + }) + }) + }) +}) diff --git a/test/unit/js/OutputFileFinderTests.js b/test/unit/js/OutputFileFinderTests.js index 5c956ad..e5f9904 100644 --- a/test/unit/js/OutputFileFinderTests.js +++ b/test/unit/js/OutputFileFinderTests.js @@ -10,90 +10,96 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../app/js/OutputFileFinder'); -const path = require("path"); -const { expect } = require("chai"); -const { EventEmitter } = require("events"); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const modulePath = require('path').join( + __dirname, + '../../../app/js/OutputFileFinder' +) +const path = require('path') +const { expect } = require('chai') +const { EventEmitter } = require('events') -describe("OutputFileFinder", function() { - beforeEach(function() { - this.OutputFileFinder = SandboxedModule.require(modulePath, { requires: { - "fs": (this.fs = {}), - "child_process": { spawn: (this.spawn = sinon.stub()) - }, - "logger-sharelatex": { log: sinon.stub(), warn: sinon.stub() } - } - }); - this.directory = "/test/dir"; - return this.callback = sinon.stub(); - }); +describe('OutputFileFinder', function() { + beforeEach(function() { + this.OutputFileFinder = SandboxedModule.require(modulePath, { + requires: { + fs: (this.fs = {}), + child_process: { spawn: (this.spawn = sinon.stub()) }, + 'logger-sharelatex': { log: sinon.stub(), warn: sinon.stub() } + } + }) + this.directory = '/test/dir' + return (this.callback = sinon.stub()) + }) - describe("findOutputFiles", function() { - beforeEach(function() { - this.resource_path = "resource/path.tex"; - this.output_paths = ["output.pdf", "extra/file.tex"]; - this.all_paths = this.output_paths.concat([this.resource_path]); - this.resources = [ - {path: (this.resource_path = "resource/path.tex")} - ]; - this.OutputFileFinder._getAllFiles = sinon.stub().callsArgWith(1, null, this.all_paths); - return this.OutputFileFinder.findOutputFiles(this.resources, this.directory, (error, outputFiles) => { - this.outputFiles = outputFiles; - - }); - }); + describe('findOutputFiles', function() { + beforeEach(function() { + this.resource_path = 'resource/path.tex' + this.output_paths = ['output.pdf', 'extra/file.tex'] + this.all_paths = this.output_paths.concat([this.resource_path]) + this.resources = [{ path: (this.resource_path = 'resource/path.tex') }] + this.OutputFileFinder._getAllFiles = sinon + .stub() + .callsArgWith(1, null, this.all_paths) + return this.OutputFileFinder.findOutputFiles( + this.resources, + this.directory, + (error, outputFiles) => { + this.outputFiles = outputFiles + } + ) + }) - return it("should only return the output files, not directories or resource paths", function() { - return expect(this.outputFiles).to.deep.equal([{ - path: "output.pdf", - type: "pdf" - }, { - path: "extra/file.tex", - type: "tex" - }]); - }); -}); - - return describe("_getAllFiles", function() { - beforeEach(function() { - this.proc = new EventEmitter(); - this.proc.stdout = new EventEmitter(); - this.spawn.returns(this.proc); - this.directory = "/base/dir"; - return this.OutputFileFinder._getAllFiles(this.directory, this.callback); - }); - - describe("successfully", function() { - beforeEach(function() { - this.proc.stdout.emit( - "data", - ["/base/dir/main.tex", "/base/dir/chapters/chapter1.tex"].join("\n") + "\n" - ); - return this.proc.emit("close", 0); - }); - - return it("should call the callback with the relative file paths", function() { - return this.callback.calledWith( - null, - ["main.tex", "chapters/chapter1.tex"] - ).should.equal(true); - }); - }); + return it('should only return the output files, not directories or resource paths', function() { + return expect(this.outputFiles).to.deep.equal([ + { + path: 'output.pdf', + type: 'pdf' + }, + { + path: 'extra/file.tex', + type: 'tex' + } + ]) + }) + }) - return describe("when the directory doesn't exist", function() { - beforeEach(function() { - return this.proc.emit("close", 1); - }); - - return it("should call the callback with a blank array", function() { - return this.callback.calledWith( - null, - [] - ).should.equal(true); - }); - }); - }); -}); + return describe('_getAllFiles', function() { + beforeEach(function() { + this.proc = new EventEmitter() + this.proc.stdout = new EventEmitter() + this.spawn.returns(this.proc) + this.directory = '/base/dir' + return this.OutputFileFinder._getAllFiles(this.directory, this.callback) + }) + + describe('successfully', function() { + beforeEach(function() { + this.proc.stdout.emit( + 'data', + ['/base/dir/main.tex', '/base/dir/chapters/chapter1.tex'].join('\n') + + '\n' + ) + return this.proc.emit('close', 0) + }) + + return it('should call the callback with the relative file paths', function() { + return this.callback + .calledWith(null, ['main.tex', 'chapters/chapter1.tex']) + .should.equal(true) + }) + }) + + return describe("when the directory doesn't exist", function() { + beforeEach(function() { + return this.proc.emit('close', 1) + }) + + return it('should call the callback with a blank array', function() { + return this.callback.calledWith(null, []).should.equal(true) + }) + }) + }) +}) diff --git a/test/unit/js/OutputFileOptimiserTests.js b/test/unit/js/OutputFileOptimiserTests.js index 13b8d60..4546f08 100644 --- a/test/unit/js/OutputFileOptimiserTests.js +++ b/test/unit/js/OutputFileOptimiserTests.js @@ -10,139 +10,187 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../app/js/OutputFileOptimiser'); -const path = require("path"); -const { expect } = require("chai"); -const { EventEmitter } = require("events"); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const modulePath = require('path').join( + __dirname, + '../../../app/js/OutputFileOptimiser' +) +const path = require('path') +const { expect } = require('chai') +const { EventEmitter } = require('events') -describe("OutputFileOptimiser", function() { - beforeEach(function() { - this.OutputFileOptimiser = SandboxedModule.require(modulePath, { requires: { - "fs": (this.fs = {}), - "path": (this.Path = {}), - "child_process": { spawn: (this.spawn = sinon.stub()) - }, - "logger-sharelatex": { log: sinon.stub(), warn: sinon.stub() }, - "./Metrics" : {} - } - }); - this.directory = "/test/dir"; - return this.callback = sinon.stub(); - }); +describe('OutputFileOptimiser', function() { + beforeEach(function() { + this.OutputFileOptimiser = SandboxedModule.require(modulePath, { + requires: { + fs: (this.fs = {}), + path: (this.Path = {}), + child_process: { spawn: (this.spawn = sinon.stub()) }, + 'logger-sharelatex': { log: sinon.stub(), warn: sinon.stub() }, + './Metrics': {} + } + }) + this.directory = '/test/dir' + return (this.callback = sinon.stub()) + }) - describe("optimiseFile", function() { - beforeEach(function() { - this.src = "./output.pdf"; - return this.dst = "./output.pdf"; - }); + describe('optimiseFile', function() { + beforeEach(function() { + this.src = './output.pdf' + return (this.dst = './output.pdf') + }) - describe("when the file is not a pdf file", function() { - beforeEach(function(done){ - this.src = "./output.log"; - this.OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, false); - this.OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null); - return this.OutputFileOptimiser.optimiseFile(this.src, this.dst, done); - }); + describe('when the file is not a pdf file', function() { + beforeEach(function(done) { + this.src = './output.log' + this.OutputFileOptimiser.checkIfPDFIsOptimised = sinon + .stub() + .callsArgWith(1, null, false) + this.OutputFileOptimiser.optimisePDF = sinon + .stub() + .callsArgWith(2, null) + return this.OutputFileOptimiser.optimiseFile(this.src, this.dst, done) + }) - it("should not check if the file is optimised", function() { - return this.OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(this.src).should.equal(false); - }); + it('should not check if the file is optimised', function() { + return this.OutputFileOptimiser.checkIfPDFIsOptimised + .calledWith(this.src) + .should.equal(false) + }) - return it("should not optimise the file", function() { - return this.OutputFileOptimiser.optimisePDF.calledWith(this.src, this.dst).should.equal(false); - }); - }); + return it('should not optimise the file', function() { + return this.OutputFileOptimiser.optimisePDF + .calledWith(this.src, this.dst) + .should.equal(false) + }) + }) - describe("when the pdf file is not optimised", function() { - beforeEach(function(done) { - this.OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, false); - this.OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null); - return this.OutputFileOptimiser.optimiseFile(this.src, this.dst, done); - }); + describe('when the pdf file is not optimised', function() { + beforeEach(function(done) { + this.OutputFileOptimiser.checkIfPDFIsOptimised = sinon + .stub() + .callsArgWith(1, null, false) + this.OutputFileOptimiser.optimisePDF = sinon + .stub() + .callsArgWith(2, null) + return this.OutputFileOptimiser.optimiseFile(this.src, this.dst, done) + }) - it("should check if the pdf is optimised", function() { - return this.OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(this.src).should.equal(true); - }); + it('should check if the pdf is optimised', function() { + return this.OutputFileOptimiser.checkIfPDFIsOptimised + .calledWith(this.src) + .should.equal(true) + }) - return it("should optimise the pdf", function() { - return this.OutputFileOptimiser.optimisePDF.calledWith(this.src, this.dst).should.equal(true); - }); - }); + return it('should optimise the pdf', function() { + return this.OutputFileOptimiser.optimisePDF + .calledWith(this.src, this.dst) + .should.equal(true) + }) + }) - return describe("when the pdf file is optimised", function() { - beforeEach(function(done) { - this.OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, true); - this.OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null); - return this.OutputFileOptimiser.optimiseFile(this.src, this.dst, done); - }); + return describe('when the pdf file is optimised', function() { + beforeEach(function(done) { + this.OutputFileOptimiser.checkIfPDFIsOptimised = sinon + .stub() + .callsArgWith(1, null, true) + this.OutputFileOptimiser.optimisePDF = sinon + .stub() + .callsArgWith(2, null) + return this.OutputFileOptimiser.optimiseFile(this.src, this.dst, done) + }) - it("should check if the pdf is optimised", function() { - return this.OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(this.src).should.equal(true); - }); + it('should check if the pdf is optimised', function() { + return this.OutputFileOptimiser.checkIfPDFIsOptimised + .calledWith(this.src) + .should.equal(true) + }) - return it("should not optimise the pdf", function() { - return this.OutputFileOptimiser.optimisePDF.calledWith(this.src, this.dst).should.equal(false); - }); - }); - }); + return it('should not optimise the pdf', function() { + return this.OutputFileOptimiser.optimisePDF + .calledWith(this.src, this.dst) + .should.equal(false) + }) + }) + }) - return describe("checkIfPDFISOptimised", function() { - beforeEach(function() { - this.callback = sinon.stub(); - this.fd = 1234; - this.fs.open = sinon.stub().yields(null, this.fd); - this.fs.read = sinon.stub().withArgs(this.fd).yields(null, 100, new Buffer("hello /Linearized 1")); - this.fs.close = sinon.stub().withArgs(this.fd).yields(null); - return this.OutputFileOptimiser.checkIfPDFIsOptimised(this.src, this.callback); - }); + return describe('checkIfPDFISOptimised', function() { + beforeEach(function() { + this.callback = sinon.stub() + this.fd = 1234 + this.fs.open = sinon.stub().yields(null, this.fd) + this.fs.read = sinon + .stub() + .withArgs(this.fd) + .yields(null, 100, new Buffer('hello /Linearized 1')) + this.fs.close = sinon + .stub() + .withArgs(this.fd) + .yields(null) + return this.OutputFileOptimiser.checkIfPDFIsOptimised( + this.src, + this.callback + ) + }) - describe("for a linearised file", function() { - beforeEach(function() { - this.fs.read = sinon.stub().withArgs(this.fd).yields(null, 100, new Buffer("hello /Linearized 1")); - return this.OutputFileOptimiser.checkIfPDFIsOptimised(this.src, this.callback); - }); + describe('for a linearised file', function() { + beforeEach(function() { + this.fs.read = sinon + .stub() + .withArgs(this.fd) + .yields(null, 100, new Buffer('hello /Linearized 1')) + return this.OutputFileOptimiser.checkIfPDFIsOptimised( + this.src, + this.callback + ) + }) - it("should open the file", function() { - return this.fs.open.calledWith(this.src, "r").should.equal(true); - }); + it('should open the file', function() { + return this.fs.open.calledWith(this.src, 'r').should.equal(true) + }) - it("should read the header", function() { - return this.fs.read.calledWith(this.fd).should.equal(true); - }); + it('should read the header', function() { + return this.fs.read.calledWith(this.fd).should.equal(true) + }) - it("should close the file", function() { - return this.fs.close.calledWith(this.fd).should.equal(true); - }); + it('should close the file', function() { + return this.fs.close.calledWith(this.fd).should.equal(true) + }) - return it("should call the callback with a true result", function() { - return this.callback.calledWith(null, true).should.equal(true); - }); - }); + return it('should call the callback with a true result', function() { + return this.callback.calledWith(null, true).should.equal(true) + }) + }) - return describe("for an unlinearised file", function() { - beforeEach(function() { - this.fs.read = sinon.stub().withArgs(this.fd).yields(null, 100, new Buffer("hello not linearized 1")); - return this.OutputFileOptimiser.checkIfPDFIsOptimised(this.src, this.callback); - }); + return describe('for an unlinearised file', function() { + beforeEach(function() { + this.fs.read = sinon + .stub() + .withArgs(this.fd) + .yields(null, 100, new Buffer('hello not linearized 1')) + return this.OutputFileOptimiser.checkIfPDFIsOptimised( + this.src, + this.callback + ) + }) - it("should open the file", function() { - return this.fs.open.calledWith(this.src, "r").should.equal(true); - }); + it('should open the file', function() { + return this.fs.open.calledWith(this.src, 'r').should.equal(true) + }) - it("should read the header", function() { - return this.fs.read.calledWith(this.fd).should.equal(true); - }); + it('should read the header', function() { + return this.fs.read.calledWith(this.fd).should.equal(true) + }) - it("should close the file", function() { - return this.fs.close.calledWith(this.fd).should.equal(true); - }); + it('should close the file', function() { + return this.fs.close.calledWith(this.fd).should.equal(true) + }) - return it("should call the callback with a false result", function() { - return this.callback.calledWith(null, false).should.equal(true); - }); - }); - }); -}); + return it('should call the callback with a false result', function() { + return this.callback.calledWith(null, false).should.equal(true) + }) + }) + }) +}) diff --git a/test/unit/js/ProjectPersistenceManagerTests.js b/test/unit/js/ProjectPersistenceManagerTests.js index 5f77a80..0d84fc2 100644 --- a/test/unit/js/ProjectPersistenceManagerTests.js +++ b/test/unit/js/ProjectPersistenceManagerTests.js @@ -11,79 +11,90 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../app/js/ProjectPersistenceManager'); -const tk = require("timekeeper"); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const modulePath = require('path').join( + __dirname, + '../../../app/js/ProjectPersistenceManager' +) +const tk = require('timekeeper') -describe("ProjectPersistenceManager", function() { - beforeEach(function() { - this.ProjectPersistenceManager = SandboxedModule.require(modulePath, { requires: { - "./UrlCache": (this.UrlCache = {}), - "./CompileManager": (this.CompileManager = {}), - "logger-sharelatex": (this.logger = { log: sinon.stub() }), - "./db": (this.db = {}) - } - }); - this.callback = sinon.stub(); - this.project_id = "project-id-123"; - return this.user_id = "1234"; - }); +describe('ProjectPersistenceManager', function() { + beforeEach(function() { + this.ProjectPersistenceManager = SandboxedModule.require(modulePath, { + requires: { + './UrlCache': (this.UrlCache = {}), + './CompileManager': (this.CompileManager = {}), + 'logger-sharelatex': (this.logger = { log: sinon.stub() }), + './db': (this.db = {}) + } + }) + this.callback = sinon.stub() + this.project_id = 'project-id-123' + return (this.user_id = '1234') + }) - describe("clearExpiredProjects", function() { - beforeEach(function() { - this.project_ids = [ - "project-id-1", - "project-id-2" - ]; - this.ProjectPersistenceManager._findExpiredProjectIds = sinon.stub().callsArgWith(0, null, this.project_ids); - this.ProjectPersistenceManager.clearProjectFromCache = sinon.stub().callsArg(1); - this.CompileManager.clearExpiredProjects = sinon.stub().callsArg(1); - return this.ProjectPersistenceManager.clearExpiredProjects(this.callback); - }); + describe('clearExpiredProjects', function() { + beforeEach(function() { + this.project_ids = ['project-id-1', 'project-id-2'] + this.ProjectPersistenceManager._findExpiredProjectIds = sinon + .stub() + .callsArgWith(0, null, this.project_ids) + this.ProjectPersistenceManager.clearProjectFromCache = sinon + .stub() + .callsArg(1) + this.CompileManager.clearExpiredProjects = sinon.stub().callsArg(1) + return this.ProjectPersistenceManager.clearExpiredProjects(this.callback) + }) - it("should clear each expired project", function() { - return Array.from(this.project_ids).map((project_id) => - this.ProjectPersistenceManager.clearProjectFromCache - .calledWith(project_id) - .should.equal(true)); - }); + it('should clear each expired project', function() { + return Array.from(this.project_ids).map(project_id => + this.ProjectPersistenceManager.clearProjectFromCache + .calledWith(project_id) + .should.equal(true) + ) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) - return describe("clearProject", function() { - beforeEach(function() { - this.ProjectPersistenceManager._clearProjectFromDatabase = sinon.stub().callsArg(1); - this.UrlCache.clearProject = sinon.stub().callsArg(1); - this.CompileManager.clearProject = sinon.stub().callsArg(2); - return this.ProjectPersistenceManager.clearProject(this.project_id, this.user_id, this.callback); - }); + return describe('clearProject', function() { + beforeEach(function() { + this.ProjectPersistenceManager._clearProjectFromDatabase = sinon + .stub() + .callsArg(1) + this.UrlCache.clearProject = sinon.stub().callsArg(1) + this.CompileManager.clearProject = sinon.stub().callsArg(2) + return this.ProjectPersistenceManager.clearProject( + this.project_id, + this.user_id, + this.callback + ) + }) - it("should clear the project from the database", function() { - return this.ProjectPersistenceManager._clearProjectFromDatabase - .calledWith(this.project_id) - .should.equal(true); - }); + it('should clear the project from the database', function() { + return this.ProjectPersistenceManager._clearProjectFromDatabase + .calledWith(this.project_id) + .should.equal(true) + }) - it("should clear all the cached Urls for the project", function() { - return this.UrlCache.clearProject - .calledWith(this.project_id) - .should.equal(true); - }); + it('should clear all the cached Urls for the project', function() { + return this.UrlCache.clearProject + .calledWith(this.project_id) + .should.equal(true) + }) - it("should clear the project compile folder", function() { - return this.CompileManager.clearProject - .calledWith(this.project_id, this.user_id) - .should.equal(true); - }); + it('should clear the project compile folder', function() { + return this.CompileManager.clearProject + .calledWith(this.project_id, this.user_id) + .should.equal(true) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); -}); - + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) +}) diff --git a/test/unit/js/RequestParserTests.js b/test/unit/js/RequestParserTests.js index 725988f..e2d8b02 100644 --- a/test/unit/js/RequestParserTests.js +++ b/test/unit/js/RequestParserTests.js @@ -9,378 +9,412 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const { expect } = require('chai'); -const modulePath = require('path').join(__dirname, '../../../app/js/RequestParser'); -const tk = require("timekeeper"); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const { expect } = require('chai') +const modulePath = require('path').join( + __dirname, + '../../../app/js/RequestParser' +) +const tk = require('timekeeper') -describe("RequestParser", function() { - beforeEach(function() { - tk.freeze(); - this.callback = sinon.stub(); - this.validResource = { - path: "main.tex", - date: "12:00 01/02/03", - content: "Hello world" - }; - this.validRequest = { - compile: { - token: "token-123", - options: { - imageName: "basicImageName/here:2017-1", - compiler: "pdflatex", - timeout: 42 - }, - resources: [] - } - }; - return this.RequestParser = SandboxedModule.require(modulePath, { requires: { - "settings-sharelatex": (this.settings = {}) - } - });}); +describe('RequestParser', function() { + beforeEach(function() { + tk.freeze() + this.callback = sinon.stub() + this.validResource = { + path: 'main.tex', + date: '12:00 01/02/03', + content: 'Hello world' + } + this.validRequest = { + compile: { + token: 'token-123', + options: { + imageName: 'basicImageName/here:2017-1', + compiler: 'pdflatex', + timeout: 42 + }, + resources: [] + } + } + return (this.RequestParser = SandboxedModule.require(modulePath, { + requires: { + 'settings-sharelatex': (this.settings = {}) + } + })) + }) - afterEach(function() { return tk.reset(); }); + afterEach(function() { + return tk.reset() + }) - describe("without a top level object", function() { - beforeEach(function() { - return this.RequestParser.parse([], this.callback); - }); + describe('without a top level object', function() { + beforeEach(function() { + return this.RequestParser.parse([], this.callback) + }) - return it("should return an error", function() { - return this.callback.calledWith("top level object should have a compile attribute") - .should.equal(true); - }); - }); + return it('should return an error', function() { + return this.callback + .calledWith('top level object should have a compile attribute') + .should.equal(true) + }) + }) - describe("without a compile attribute", function() { - beforeEach(function() { - return this.RequestParser.parse({}, this.callback); - }); + describe('without a compile attribute', function() { + beforeEach(function() { + return this.RequestParser.parse({}, this.callback) + }) - return it("should return an error", function() { - return this.callback.calledWith("top level object should have a compile attribute") - .should.equal(true); - }); - }); + return it('should return an error', function() { + return this.callback + .calledWith('top level object should have a compile attribute') + .should.equal(true) + }) + }) - describe("without a valid compiler", function() { - beforeEach(function() { - this.validRequest.compile.options.compiler = "not-a-compiler"; - return this.RequestParser.parse(this.validRequest, this.callback); - }); + describe('without a valid compiler', function() { + beforeEach(function() { + this.validRequest.compile.options.compiler = 'not-a-compiler' + return this.RequestParser.parse(this.validRequest, this.callback) + }) - return it("should return an error", function() { - return this.callback.calledWith("compiler attribute should be one of: pdflatex, latex, xelatex, lualatex") - .should.equal(true); - }); - }); + return it('should return an error', function() { + return this.callback + .calledWith( + 'compiler attribute should be one of: pdflatex, latex, xelatex, lualatex' + ) + .should.equal(true) + }) + }) - describe("without a compiler specified", function() { - beforeEach(function() { - delete this.validRequest.compile.options.compiler; - return this.RequestParser.parse(this.validRequest, (error, data) => { - this.data = data; - - }); - }); + describe('without a compiler specified', function() { + beforeEach(function() { + delete this.validRequest.compile.options.compiler + return this.RequestParser.parse(this.validRequest, (error, data) => { + this.data = data + }) + }) - return it("should set the compiler to pdflatex by default", function() { - return this.data.compiler.should.equal("pdflatex"); - }); - }); + return it('should set the compiler to pdflatex by default', function() { + return this.data.compiler.should.equal('pdflatex') + }) + }) - describe("with imageName set", function() { - beforeEach(function() { - return this.RequestParser.parse(this.validRequest, (error, data) => { - this.data = data; - - }); - }); + describe('with imageName set', function() { + beforeEach(function() { + return this.RequestParser.parse(this.validRequest, (error, data) => { + this.data = data + }) + }) - return it("should set the imageName", function() { - return this.data.imageName.should.equal("basicImageName/here:2017-1"); - }); - }); + return it('should set the imageName', function() { + return this.data.imageName.should.equal('basicImageName/here:2017-1') + }) + }) - describe("with flags set", function() { - beforeEach(function() { - this.validRequest.compile.options.flags = ["-file-line-error"]; - return this.RequestParser.parse(this.validRequest, (error, data) => { - this.data = data; - - }); - }); + describe('with flags set', function() { + beforeEach(function() { + this.validRequest.compile.options.flags = ['-file-line-error'] + return this.RequestParser.parse(this.validRequest, (error, data) => { + this.data = data + }) + }) - return it("should set the flags attribute", function() { - return expect(this.data.flags).to.deep.equal(["-file-line-error"]); - }); -}); + return it('should set the flags attribute', function() { + return expect(this.data.flags).to.deep.equal(['-file-line-error']) + }) + }) - describe("with flags not specified", function() { - beforeEach(function() { - return this.RequestParser.parse(this.validRequest, (error, data) => { - this.data = data; - - }); - }); + describe('with flags not specified', function() { + beforeEach(function() { + return this.RequestParser.parse(this.validRequest, (error, data) => { + this.data = data + }) + }) - return it("it should have an empty flags list", function() { - return expect(this.data.flags).to.deep.equal([]); - }); -}); + return it('it should have an empty flags list', function() { + return expect(this.data.flags).to.deep.equal([]) + }) + }) - describe("without a timeout specified", function() { - beforeEach(function() { - delete this.validRequest.compile.options.timeout; - return this.RequestParser.parse(this.validRequest, (error, data) => { - this.data = data; - - }); - }); + describe('without a timeout specified', function() { + beforeEach(function() { + delete this.validRequest.compile.options.timeout + return this.RequestParser.parse(this.validRequest, (error, data) => { + this.data = data + }) + }) - return it("should set the timeout to MAX_TIMEOUT", function() { - return this.data.timeout.should.equal(this.RequestParser.MAX_TIMEOUT * 1000); - }); - }); + return it('should set the timeout to MAX_TIMEOUT', function() { + return this.data.timeout.should.equal( + this.RequestParser.MAX_TIMEOUT * 1000 + ) + }) + }) - describe("with a timeout larger than the maximum", function() { - beforeEach(function() { - this.validRequest.compile.options.timeout = this.RequestParser.MAX_TIMEOUT + 1; - return this.RequestParser.parse(this.validRequest, (error, data) => { - this.data = data; - - }); - }); + describe('with a timeout larger than the maximum', function() { + beforeEach(function() { + this.validRequest.compile.options.timeout = + this.RequestParser.MAX_TIMEOUT + 1 + return this.RequestParser.parse(this.validRequest, (error, data) => { + this.data = data + }) + }) - return it("should set the timeout to MAX_TIMEOUT", function() { - return this.data.timeout.should.equal(this.RequestParser.MAX_TIMEOUT * 1000); - }); - }); + return it('should set the timeout to MAX_TIMEOUT', function() { + return this.data.timeout.should.equal( + this.RequestParser.MAX_TIMEOUT * 1000 + ) + }) + }) - describe("with a timeout", function() { - beforeEach(function() { - return this.RequestParser.parse(this.validRequest, (error, data) => { - this.data = data; - - }); - }); + describe('with a timeout', function() { + beforeEach(function() { + return this.RequestParser.parse(this.validRequest, (error, data) => { + this.data = data + }) + }) - return it("should set the timeout (in milliseconds)", function() { - return this.data.timeout.should.equal(this.validRequest.compile.options.timeout * 1000); - }); - }); + return it('should set the timeout (in milliseconds)', function() { + return this.data.timeout.should.equal( + this.validRequest.compile.options.timeout * 1000 + ) + }) + }) - describe("with a resource without a path", function() { - beforeEach(function() { - delete this.validResource.path; - this.validRequest.compile.resources.push(this.validResource); - return this.RequestParser.parse(this.validRequest, this.callback); - }); + describe('with a resource without a path', function() { + beforeEach(function() { + delete this.validResource.path + this.validRequest.compile.resources.push(this.validResource) + return this.RequestParser.parse(this.validRequest, this.callback) + }) - return it("should return an error", function() { - return this.callback.calledWith("all resources should have a path attribute") - .should.equal(true); - }); - }); + return it('should return an error', function() { + return this.callback + .calledWith('all resources should have a path attribute') + .should.equal(true) + }) + }) - describe("with a resource with a path", function() { - beforeEach(function() { - this.validResource.path = (this.path = "test.tex"); - this.validRequest.compile.resources.push(this.validResource); - this.RequestParser.parse(this.validRequest, this.callback); - return this.data = this.callback.args[0][1];}); + describe('with a resource with a path', function() { + beforeEach(function() { + this.validResource.path = this.path = 'test.tex' + this.validRequest.compile.resources.push(this.validResource) + this.RequestParser.parse(this.validRequest, this.callback) + return (this.data = this.callback.args[0][1]) + }) - return it("should return the path in the parsed response", function() { - return this.data.resources[0].path.should.equal(this.path); - }); - }); + return it('should return the path in the parsed response', function() { + return this.data.resources[0].path.should.equal(this.path) + }) + }) - describe("with a resource with a malformed modified date", function() { - beforeEach(function() { - this.validResource.modified = "not-a-date"; - this.validRequest.compile.resources.push(this.validResource); - return this.RequestParser.parse(this.validRequest, this.callback); - }); + describe('with a resource with a malformed modified date', function() { + beforeEach(function() { + this.validResource.modified = 'not-a-date' + this.validRequest.compile.resources.push(this.validResource) + return this.RequestParser.parse(this.validRequest, this.callback) + }) - return it("should return an error", function() { - return this.callback - .calledWith( - "resource modified date could not be understood: "+ - this.validResource.modified - ) - .should.equal(true); - }); - }); + return it('should return an error', function() { + return this.callback + .calledWith( + 'resource modified date could not be understood: ' + + this.validResource.modified + ) + .should.equal(true) + }) + }) - describe("with a resource with a valid date", function() { - beforeEach(function() { - this.date = "12:00 01/02/03"; - this.validResource.modified = this.date; - this.validRequest.compile.resources.push(this.validResource); - this.RequestParser.parse(this.validRequest, this.callback); - return this.data = this.callback.args[0][1];}); + describe('with a resource with a valid date', function() { + beforeEach(function() { + this.date = '12:00 01/02/03' + this.validResource.modified = this.date + this.validRequest.compile.resources.push(this.validResource) + this.RequestParser.parse(this.validRequest, this.callback) + return (this.data = this.callback.args[0][1]) + }) - return it("should return the date as a Javascript Date object", function() { - (this.data.resources[0].modified instanceof Date).should.equal(true); - return this.data.resources[0].modified.getTime().should.equal(Date.parse(this.date)); - }); - }); + return it('should return the date as a Javascript Date object', function() { + ;(this.data.resources[0].modified instanceof Date).should.equal(true) + return this.data.resources[0].modified + .getTime() + .should.equal(Date.parse(this.date)) + }) + }) - describe("with a resource without either a content or URL attribute", function() { - beforeEach(function() { - delete this.validResource.url; - delete this.validResource.content; - this.validRequest.compile.resources.push(this.validResource); - return this.RequestParser.parse(this.validRequest, this.callback); - }); + describe('with a resource without either a content or URL attribute', function() { + beforeEach(function() { + delete this.validResource.url + delete this.validResource.content + this.validRequest.compile.resources.push(this.validResource) + return this.RequestParser.parse(this.validRequest, this.callback) + }) - return it("should return an error", function() { - return this.callback.calledWith("all resources should have either a url or content attribute") - .should.equal(true); - }); - }); + return it('should return an error', function() { + return this.callback + .calledWith( + 'all resources should have either a url or content attribute' + ) + .should.equal(true) + }) + }) - describe("with a resource where the content is not a string", function() { - beforeEach(function() { - this.validResource.content = []; - this.validRequest.compile.resources.push(this.validResource); - return this.RequestParser.parse((this.validRequest), this.callback); - }); + describe('with a resource where the content is not a string', function() { + beforeEach(function() { + this.validResource.content = [] + this.validRequest.compile.resources.push(this.validResource) + return this.RequestParser.parse(this.validRequest, this.callback) + }) - return it("should return an error", function() { - return this.callback.calledWith("content attribute should be a string") - .should.equal(true); - }); - }); + return it('should return an error', function() { + return this.callback + .calledWith('content attribute should be a string') + .should.equal(true) + }) + }) - describe("with a resource where the url is not a string", function() { - beforeEach(function() { - this.validResource.url = []; - this.validRequest.compile.resources.push(this.validResource); - return this.RequestParser.parse((this.validRequest), this.callback); - }); + describe('with a resource where the url is not a string', function() { + beforeEach(function() { + this.validResource.url = [] + this.validRequest.compile.resources.push(this.validResource) + return this.RequestParser.parse(this.validRequest, this.callback) + }) - return it("should return an error", function() { - return this.callback.calledWith("url attribute should be a string") - .should.equal(true); - }); - }); + return it('should return an error', function() { + return this.callback + .calledWith('url attribute should be a string') + .should.equal(true) + }) + }) - describe("with a resource with a url", function() { - beforeEach(function() { - this.validResource.url = (this.url = "www.example.com"); - this.validRequest.compile.resources.push(this.validResource); - this.RequestParser.parse((this.validRequest), this.callback); - return this.data = this.callback.args[0][1];}); + describe('with a resource with a url', function() { + beforeEach(function() { + this.validResource.url = this.url = 'www.example.com' + this.validRequest.compile.resources.push(this.validResource) + this.RequestParser.parse(this.validRequest, this.callback) + return (this.data = this.callback.args[0][1]) + }) - return it("should return the url in the parsed response", function() { - return this.data.resources[0].url.should.equal(this.url); - }); - }); + return it('should return the url in the parsed response', function() { + return this.data.resources[0].url.should.equal(this.url) + }) + }) - describe("with a resource with a content attribute", function() { - beforeEach(function() { - this.validResource.content = (this.content = "Hello world"); - this.validRequest.compile.resources.push(this.validResource); - this.RequestParser.parse((this.validRequest), this.callback); - return this.data = this.callback.args[0][1];}); + describe('with a resource with a content attribute', function() { + beforeEach(function() { + this.validResource.content = this.content = 'Hello world' + this.validRequest.compile.resources.push(this.validResource) + this.RequestParser.parse(this.validRequest, this.callback) + return (this.data = this.callback.args[0][1]) + }) - return it("should return the content in the parsed response", function() { - return this.data.resources[0].content.should.equal(this.content); - }); - }); + return it('should return the content in the parsed response', function() { + return this.data.resources[0].content.should.equal(this.content) + }) + }) - describe("without a root resource path", function() { - beforeEach(function() { - delete this.validRequest.compile.rootResourcePath; - this.RequestParser.parse((this.validRequest), this.callback); - return this.data = this.callback.args[0][1];}); + describe('without a root resource path', function() { + beforeEach(function() { + delete this.validRequest.compile.rootResourcePath + this.RequestParser.parse(this.validRequest, this.callback) + return (this.data = this.callback.args[0][1]) + }) - return it("should set the root resource path to 'main.tex' by default", function() { - return this.data.rootResourcePath.should.equal("main.tex"); - }); - }); + return it("should set the root resource path to 'main.tex' by default", function() { + return this.data.rootResourcePath.should.equal('main.tex') + }) + }) - describe("with a root resource path", function() { - beforeEach(function() { - this.validRequest.compile.rootResourcePath = (this.path = "test.tex"); - this.RequestParser.parse((this.validRequest), this.callback); - return this.data = this.callback.args[0][1];}); + describe('with a root resource path', function() { + beforeEach(function() { + this.validRequest.compile.rootResourcePath = this.path = 'test.tex' + this.RequestParser.parse(this.validRequest, this.callback) + return (this.data = this.callback.args[0][1]) + }) - return it("should return the root resource path in the parsed response", function() { - return this.data.rootResourcePath.should.equal(this.path); - }); - }); + return it('should return the root resource path in the parsed response', function() { + return this.data.rootResourcePath.should.equal(this.path) + }) + }) - describe("with a root resource path that is not a string", function() { - beforeEach(function() { - this.validRequest.compile.rootResourcePath = []; - return this.RequestParser.parse((this.validRequest), this.callback); - }); + describe('with a root resource path that is not a string', function() { + beforeEach(function() { + this.validRequest.compile.rootResourcePath = [] + return this.RequestParser.parse(this.validRequest, this.callback) + }) - return it("should return an error", function() { - return this.callback.calledWith("rootResourcePath attribute should be a string") - .should.equal(true); - }); - }); + return it('should return an error', function() { + return this.callback + .calledWith('rootResourcePath attribute should be a string') + .should.equal(true) + }) + }) - describe("with a root resource path that needs escaping", function() { - beforeEach(function() { - this.badPath = "`rm -rf foo`.tex"; - this.goodPath = "rm -rf foo.tex"; - this.validRequest.compile.rootResourcePath = this.badPath; - this.validRequest.compile.resources.push({ - path: this.badPath, - date: "12:00 01/02/03", - content: "Hello world" - }); - this.RequestParser.parse(this.validRequest, this.callback); - return this.data = this.callback.args[0][1];}); + describe('with a root resource path that needs escaping', function() { + beforeEach(function() { + this.badPath = '`rm -rf foo`.tex' + this.goodPath = 'rm -rf foo.tex' + this.validRequest.compile.rootResourcePath = this.badPath + this.validRequest.compile.resources.push({ + path: this.badPath, + date: '12:00 01/02/03', + content: 'Hello world' + }) + this.RequestParser.parse(this.validRequest, this.callback) + return (this.data = this.callback.args[0][1]) + }) - it("should return the escaped resource", function() { - return this.data.rootResourcePath.should.equal(this.goodPath); - }); + it('should return the escaped resource', function() { + return this.data.rootResourcePath.should.equal(this.goodPath) + }) - return it("should also escape the resource path", function() { - return this.data.resources[0].path.should.equal(this.goodPath); - }); - }); + return it('should also escape the resource path', function() { + return this.data.resources[0].path.should.equal(this.goodPath) + }) + }) - describe("with a root resource path that has a relative path", function() { - beforeEach(function() { - this.validRequest.compile.rootResourcePath = "foo/../../bar.tex"; - this.RequestParser.parse(this.validRequest, this.callback); - return this.data = this.callback.args[0][1];}); + describe('with a root resource path that has a relative path', function() { + beforeEach(function() { + this.validRequest.compile.rootResourcePath = 'foo/../../bar.tex' + this.RequestParser.parse(this.validRequest, this.callback) + return (this.data = this.callback.args[0][1]) + }) - return it("should return an error", function() { - return this.callback.calledWith("relative path in root resource") - .should.equal(true); - }); - }); + return it('should return an error', function() { + return this.callback + .calledWith('relative path in root resource') + .should.equal(true) + }) + }) - describe("with a root resource path that has unescaped + relative path", function() { - beforeEach(function() { - this.validRequest.compile.rootResourcePath = "foo/#../bar.tex"; - this.RequestParser.parse(this.validRequest, this.callback); - return this.data = this.callback.args[0][1];}); + describe('with a root resource path that has unescaped + relative path', function() { + beforeEach(function() { + this.validRequest.compile.rootResourcePath = 'foo/#../bar.tex' + this.RequestParser.parse(this.validRequest, this.callback) + return (this.data = this.callback.args[0][1]) + }) - return it("should return an error", function() { - return this.callback.calledWith("relative path in root resource") - .should.equal(true); - }); - }); + return it('should return an error', function() { + return this.callback + .calledWith('relative path in root resource') + .should.equal(true) + }) + }) - return describe("with an unknown syncType", function() { - beforeEach(function() { - this.validRequest.compile.options.syncType = "unexpected"; - this.RequestParser.parse(this.validRequest, this.callback); - return this.data = this.callback.args[0][1];}); + return describe('with an unknown syncType', function() { + beforeEach(function() { + this.validRequest.compile.options.syncType = 'unexpected' + this.RequestParser.parse(this.validRequest, this.callback) + return (this.data = this.callback.args[0][1]) + }) - return it("should return an error", function() { - return this.callback.calledWith("syncType attribute should be one of: full, incremental") - .should.equal(true); - }); - }); -}); + return it('should return an error', function() { + return this.callback + .calledWith('syncType attribute should be one of: full, incremental') + .should.equal(true) + }) + }) +}) diff --git a/test/unit/js/ResourceStateManagerTests.js b/test/unit/js/ResourceStateManagerTests.js index fe52cc5..c0e89ef 100644 --- a/test/unit/js/ResourceStateManagerTests.js +++ b/test/unit/js/ResourceStateManagerTests.js @@ -9,145 +9,200 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -const should = require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../app/js/ResourceStateManager'); -const Path = require("path"); -const Errors = require("../../../app/js/Errors"); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const should = require('chai').should() +const modulePath = require('path').join( + __dirname, + '../../../app/js/ResourceStateManager' +) +const Path = require('path') +const Errors = require('../../../app/js/Errors') -describe("ResourceStateManager", function() { - beforeEach(function() { - this.ResourceStateManager = SandboxedModule.require(modulePath, { requires: { - "fs": (this.fs = {}), - "logger-sharelatex": {log: sinon.stub(), err: sinon.stub()}, - "./SafeReader": (this.SafeReader = {}) - } - }); - this.basePath = "/path/to/write/files/to"; - this.resources = [ - {path: "resource-1-mock"}, - {path: "resource-2-mock"}, - {path: "resource-3-mock"} - ]; - this.state = "1234567890"; - this.resourceFileName = `${this.basePath}/.project-sync-state`; - this.resourceFileContents = `${this.resources[0].path}\n${this.resources[1].path}\n${this.resources[2].path}\nstateHash:${this.state}`; - return this.callback = sinon.stub(); - }); +describe('ResourceStateManager', function() { + beforeEach(function() { + this.ResourceStateManager = SandboxedModule.require(modulePath, { + requires: { + fs: (this.fs = {}), + 'logger-sharelatex': { log: sinon.stub(), err: sinon.stub() }, + './SafeReader': (this.SafeReader = {}) + } + }) + this.basePath = '/path/to/write/files/to' + this.resources = [ + { path: 'resource-1-mock' }, + { path: 'resource-2-mock' }, + { path: 'resource-3-mock' } + ] + this.state = '1234567890' + this.resourceFileName = `${this.basePath}/.project-sync-state` + this.resourceFileContents = `${this.resources[0].path}\n${this.resources[1].path}\n${this.resources[2].path}\nstateHash:${this.state}` + return (this.callback = sinon.stub()) + }) - describe("saveProjectState", function() { - beforeEach(function() { - return this.fs.writeFile = sinon.stub().callsArg(2); - }); + describe('saveProjectState', function() { + beforeEach(function() { + return (this.fs.writeFile = sinon.stub().callsArg(2)) + }) - describe("when the state is specified", function() { - beforeEach(function() { - return this.ResourceStateManager.saveProjectState(this.state, this.resources, this.basePath, this.callback); - }); + describe('when the state is specified', function() { + beforeEach(function() { + return this.ResourceStateManager.saveProjectState( + this.state, + this.resources, + this.basePath, + this.callback + ) + }) - it("should write the resource list to disk", function() { - return this.fs.writeFile - .calledWith(this.resourceFileName, this.resourceFileContents) - .should.equal(true); - }); + it('should write the resource list to disk', function() { + return this.fs.writeFile + .calledWith(this.resourceFileName, this.resourceFileContents) + .should.equal(true) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) - return describe("when the state is undefined", function() { - beforeEach(function() { - this.state = undefined; - this.fs.unlink = sinon.stub().callsArg(1); - return this.ResourceStateManager.saveProjectState(this.state, this.resources, this.basePath, this.callback); - }); + return describe('when the state is undefined', function() { + beforeEach(function() { + this.state = undefined + this.fs.unlink = sinon.stub().callsArg(1) + return this.ResourceStateManager.saveProjectState( + this.state, + this.resources, + this.basePath, + this.callback + ) + }) - it("should unlink the resource file", function() { - return this.fs.unlink - .calledWith(this.resourceFileName) - .should.equal(true); - }); + it('should unlink the resource file', function() { + return this.fs.unlink + .calledWith(this.resourceFileName) + .should.equal(true) + }) - it("should not write the resource list to disk", function() { - return this.fs.writeFile.called.should.equal(false); - }); + it('should not write the resource list to disk', function() { + return this.fs.writeFile.called.should.equal(false) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); - }); + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) + }) - describe("checkProjectStateMatches", function() { + describe('checkProjectStateMatches', function() { + describe('when the state matches', function() { + beforeEach(function() { + this.SafeReader.readFile = sinon + .stub() + .callsArgWith(3, null, this.resourceFileContents) + return this.ResourceStateManager.checkProjectStateMatches( + this.state, + this.basePath, + this.callback + ) + }) - describe("when the state matches", function() { - beforeEach(function() { - this.SafeReader.readFile = sinon.stub().callsArgWith(3, null, this.resourceFileContents); - return this.ResourceStateManager.checkProjectStateMatches(this.state, this.basePath, this.callback); - }); + it('should read the resource file', function() { + return this.SafeReader.readFile + .calledWith(this.resourceFileName) + .should.equal(true) + }) - it("should read the resource file", function() { - return this.SafeReader.readFile - .calledWith(this.resourceFileName) - .should.equal(true); - }); + return it('should call the callback with the results', function() { + return this.callback + .calledWithMatch(null, this.resources) + .should.equal(true) + }) + }) - return it("should call the callback with the results", function() { - return this.callback.calledWithMatch(null, this.resources).should.equal(true); - }); - }); + return describe('when the state does not match', function() { + beforeEach(function() { + this.SafeReader.readFile = sinon + .stub() + .callsArgWith(3, null, this.resourceFileContents) + return this.ResourceStateManager.checkProjectStateMatches( + 'not-the-original-state', + this.basePath, + this.callback + ) + }) - return describe("when the state does not match", function() { - beforeEach(function() { - this.SafeReader.readFile = sinon.stub().callsArgWith(3, null, this.resourceFileContents); - return this.ResourceStateManager.checkProjectStateMatches("not-the-original-state", this.basePath, this.callback); - }); + return it('should call the callback with an error', function() { + const error = new Errors.FilesOutOfSyncError( + 'invalid state for incremental update' + ) + return this.callback.calledWith(error).should.equal(true) + }) + }) + }) - return it("should call the callback with an error", function() { - const error = new Errors.FilesOutOfSyncError("invalid state for incremental update"); - return this.callback.calledWith(error).should.equal(true); - }); - }); - }); + return describe('checkResourceFiles', function() { + describe('when all the files are present', function() { + beforeEach(function() { + this.allFiles = [ + this.resources[0].path, + this.resources[1].path, + this.resources[2].path + ] + return this.ResourceStateManager.checkResourceFiles( + this.resources, + this.allFiles, + this.basePath, + this.callback + ) + }) - return describe("checkResourceFiles", function() { - describe("when all the files are present", function() { - beforeEach(function() { - this.allFiles = [ this.resources[0].path, this.resources[1].path, this.resources[2].path]; - return this.ResourceStateManager.checkResourceFiles(this.resources, this.allFiles, this.basePath, this.callback); - }); + return it('should call the callback', function() { + return this.callback.calledWithExactly().should.equal(true) + }) + }) - return it("should call the callback", function() { - return this.callback.calledWithExactly().should.equal(true); - }); - }); + describe('when there is a missing file', function() { + beforeEach(function() { + this.allFiles = [this.resources[0].path, this.resources[1].path] + this.fs.stat = sinon.stub().callsArgWith(1, new Error()) + return this.ResourceStateManager.checkResourceFiles( + this.resources, + this.allFiles, + this.basePath, + this.callback + ) + }) - describe("when there is a missing file", function() { - beforeEach(function() { - this.allFiles = [ this.resources[0].path, this.resources[1].path]; - this.fs.stat = sinon.stub().callsArgWith(1, new Error()); - return this.ResourceStateManager.checkResourceFiles(this.resources, this.allFiles, this.basePath, this.callback); - }); + return it('should call the callback with an error', function() { + const error = new Errors.FilesOutOfSyncError( + 'resource files missing in incremental update' + ) + return this.callback.calledWith(error).should.equal(true) + }) + }) - return it("should call the callback with an error", function() { - const error = new Errors.FilesOutOfSyncError("resource files missing in incremental update"); - return this.callback.calledWith(error).should.equal(true); - }); - }); - - return describe("when a resource contains a relative path", function() { - beforeEach(function() { - this.resources[0].path = "../foo/bar.tex"; - this.allFiles = [ this.resources[0].path, this.resources[1].path, this.resources[2].path]; - return this.ResourceStateManager.checkResourceFiles(this.resources, this.allFiles, this.basePath, this.callback); - }); - - return it("should call the callback with an error", function() { - return this.callback.calledWith(new Error("relative path in resource file list")).should.equal(true); - }); - }); - }); -}); + return describe('when a resource contains a relative path', function() { + beforeEach(function() { + this.resources[0].path = '../foo/bar.tex' + this.allFiles = [ + this.resources[0].path, + this.resources[1].path, + this.resources[2].path + ] + return this.ResourceStateManager.checkResourceFiles( + this.resources, + this.allFiles, + this.basePath, + this.callback + ) + }) + return it('should call the callback with an error', function() { + return this.callback + .calledWith(new Error('relative path in resource file list')) + .should.equal(true) + }) + }) + }) +}) diff --git a/test/unit/js/ResourceWriterTests.js b/test/unit/js/ResourceWriterTests.js index 8309547..189908d 100644 --- a/test/unit/js/ResourceWriterTests.js +++ b/test/unit/js/ResourceWriterTests.js @@ -10,405 +10,491 @@ * DS206: Consider reworking classes to avoid initClass * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -const should = require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../app/js/ResourceWriter'); -const path = require("path"); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const should = require('chai').should() +const modulePath = require('path').join( + __dirname, + '../../../app/js/ResourceWriter' +) +const path = require('path') -describe("ResourceWriter", function() { - beforeEach(function() { - let Timer; - this.ResourceWriter = SandboxedModule.require(modulePath, { requires: { - "fs": (this.fs = { - mkdir: sinon.stub().callsArg(1), - unlink: sinon.stub().callsArg(1) - }), - "./ResourceStateManager": (this.ResourceStateManager = {}), - "wrench": (this.wrench = {}), - "./UrlCache" : (this.UrlCache = {}), - "mkdirp" : (this.mkdirp = sinon.stub().callsArg(1)), - "./OutputFileFinder": (this.OutputFileFinder = {}), - "logger-sharelatex": {log: sinon.stub(), err: sinon.stub()}, - "./Metrics": (this.Metrics = { - Timer: (Timer = (function() { - Timer = class Timer { - static initClass() { - this.prototype.done = sinon.stub(); - } - }; - Timer.initClass(); - return Timer; - })()) - }) - } - } - ); - this.project_id = "project-id-123"; - this.basePath = "/path/to/write/files/to"; - return this.callback = sinon.stub(); - }); +describe('ResourceWriter', function() { + beforeEach(function() { + let Timer + this.ResourceWriter = SandboxedModule.require(modulePath, { + requires: { + fs: (this.fs = { + mkdir: sinon.stub().callsArg(1), + unlink: sinon.stub().callsArg(1) + }), + './ResourceStateManager': (this.ResourceStateManager = {}), + wrench: (this.wrench = {}), + './UrlCache': (this.UrlCache = {}), + mkdirp: (this.mkdirp = sinon.stub().callsArg(1)), + './OutputFileFinder': (this.OutputFileFinder = {}), + 'logger-sharelatex': { log: sinon.stub(), err: sinon.stub() }, + './Metrics': (this.Metrics = { + Timer: (Timer = (function() { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()) + }) + } + }) + this.project_id = 'project-id-123' + this.basePath = '/path/to/write/files/to' + return (this.callback = sinon.stub()) + }) - describe("syncResourcesToDisk on a full request", function() { - beforeEach(function() { - this.resources = [ - "resource-1-mock", - "resource-2-mock", - "resource-3-mock" - ]; - this.ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3); - this.ResourceWriter._removeExtraneousFiles = sinon.stub().callsArg(2); - this.ResourceStateManager.saveProjectState = sinon.stub().callsArg(3); - return this.ResourceWriter.syncResourcesToDisk({ - project_id: this.project_id, - syncState: (this.syncState = "0123456789abcdef"), - resources: this.resources - }, this.basePath, this.callback); - }); + describe('syncResourcesToDisk on a full request', function() { + beforeEach(function() { + this.resources = ['resource-1-mock', 'resource-2-mock', 'resource-3-mock'] + this.ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3) + this.ResourceWriter._removeExtraneousFiles = sinon.stub().callsArg(2) + this.ResourceStateManager.saveProjectState = sinon.stub().callsArg(3) + return this.ResourceWriter.syncResourcesToDisk( + { + project_id: this.project_id, + syncState: (this.syncState = '0123456789abcdef'), + resources: this.resources + }, + this.basePath, + this.callback + ) + }) - it("should remove old files", function() { - return this.ResourceWriter._removeExtraneousFiles - .calledWith(this.resources, this.basePath) - .should.equal(true); - }); + it('should remove old files', function() { + return this.ResourceWriter._removeExtraneousFiles + .calledWith(this.resources, this.basePath) + .should.equal(true) + }) - it("should write each resource to disk", function() { - return Array.from(this.resources).map((resource) => - this.ResourceWriter._writeResourceToDisk - .calledWith(this.project_id, resource, this.basePath) - .should.equal(true)); - }); + it('should write each resource to disk', function() { + return Array.from(this.resources).map(resource => + this.ResourceWriter._writeResourceToDisk + .calledWith(this.project_id, resource, this.basePath) + .should.equal(true) + ) + }) - it("should store the sync state and resource list", function() { - return this.ResourceStateManager.saveProjectState - .calledWith(this.syncState, this.resources, this.basePath) - .should.equal(true); - }); + it('should store the sync state and resource list', function() { + return this.ResourceStateManager.saveProjectState + .calledWith(this.syncState, this.resources, this.basePath) + .should.equal(true) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) - describe("syncResourcesToDisk on an incremental update", function() { - beforeEach(function() { - this.resources = [ - "resource-1-mock" - ]; - this.ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3); - this.ResourceWriter._removeExtraneousFiles = sinon.stub().callsArgWith(2, null, (this.outputFiles = []), (this.allFiles = [])); - this.ResourceStateManager.checkProjectStateMatches = sinon.stub().callsArgWith(2, null, this.resources); - this.ResourceStateManager.saveProjectState = sinon.stub().callsArg(3); - this.ResourceStateManager.checkResourceFiles = sinon.stub().callsArg(3); - return this.ResourceWriter.syncResourcesToDisk({ - project_id: this.project_id, - syncType: "incremental", - syncState: (this.syncState = "1234567890abcdef"), - resources: this.resources - }, this.basePath, this.callback); - }); + describe('syncResourcesToDisk on an incremental update', function() { + beforeEach(function() { + this.resources = ['resource-1-mock'] + this.ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3) + this.ResourceWriter._removeExtraneousFiles = sinon + .stub() + .callsArgWith(2, null, (this.outputFiles = []), (this.allFiles = [])) + this.ResourceStateManager.checkProjectStateMatches = sinon + .stub() + .callsArgWith(2, null, this.resources) + this.ResourceStateManager.saveProjectState = sinon.stub().callsArg(3) + this.ResourceStateManager.checkResourceFiles = sinon.stub().callsArg(3) + return this.ResourceWriter.syncResourcesToDisk( + { + project_id: this.project_id, + syncType: 'incremental', + syncState: (this.syncState = '1234567890abcdef'), + resources: this.resources + }, + this.basePath, + this.callback + ) + }) - it("should check the sync state matches", function() { - return this.ResourceStateManager.checkProjectStateMatches - .calledWith(this.syncState, this.basePath) - .should.equal(true); - }); + it('should check the sync state matches', function() { + return this.ResourceStateManager.checkProjectStateMatches + .calledWith(this.syncState, this.basePath) + .should.equal(true) + }) - it("should remove old files", function() { - return this.ResourceWriter._removeExtraneousFiles - .calledWith(this.resources, this.basePath) - .should.equal(true); - }); + it('should remove old files', function() { + return this.ResourceWriter._removeExtraneousFiles + .calledWith(this.resources, this.basePath) + .should.equal(true) + }) - it("should check each resource exists", function() { - return this.ResourceStateManager.checkResourceFiles - .calledWith(this.resources, this.allFiles, this.basePath) - .should.equal(true); - }); + it('should check each resource exists', function() { + return this.ResourceStateManager.checkResourceFiles + .calledWith(this.resources, this.allFiles, this.basePath) + .should.equal(true) + }) - it("should write each resource to disk", function() { - return Array.from(this.resources).map((resource) => - this.ResourceWriter._writeResourceToDisk - .calledWith(this.project_id, resource, this.basePath) - .should.equal(true)); - }); + it('should write each resource to disk', function() { + return Array.from(this.resources).map(resource => + this.ResourceWriter._writeResourceToDisk + .calledWith(this.project_id, resource, this.basePath) + .should.equal(true) + ) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) - describe("syncResourcesToDisk on an incremental update when the state does not match", function() { - beforeEach(function() { - this.resources = [ - "resource-1-mock" - ]; - this.ResourceStateManager.checkProjectStateMatches = sinon.stub().callsArgWith(2, (this.error = new Error())); - return this.ResourceWriter.syncResourcesToDisk({ - project_id: this.project_id, - syncType: "incremental", - syncState: (this.syncState = "1234567890abcdef"), - resources: this.resources - }, this.basePath, this.callback); - }); + describe('syncResourcesToDisk on an incremental update when the state does not match', function() { + beforeEach(function() { + this.resources = ['resource-1-mock'] + this.ResourceStateManager.checkProjectStateMatches = sinon + .stub() + .callsArgWith(2, (this.error = new Error())) + return this.ResourceWriter.syncResourcesToDisk( + { + project_id: this.project_id, + syncType: 'incremental', + syncState: (this.syncState = '1234567890abcdef'), + resources: this.resources + }, + this.basePath, + this.callback + ) + }) - it("should check whether the sync state matches", function() { - return this.ResourceStateManager.checkProjectStateMatches - .calledWith(this.syncState, this.basePath) - .should.equal(true); - }); + it('should check whether the sync state matches', function() { + return this.ResourceStateManager.checkProjectStateMatches + .calledWith(this.syncState, this.basePath) + .should.equal(true) + }) - return it("should call the callback with an error", function() { - return this.callback.calledWith(this.error).should.equal(true); - }); - }); + return it('should call the callback with an error', function() { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + describe('_removeExtraneousFiles', function() { + beforeEach(function() { + this.output_files = [ + { + path: 'output.pdf', + type: 'pdf' + }, + { + path: 'extra/file.tex', + type: 'tex' + }, + { + path: 'extra.aux', + type: 'aux' + }, + { + path: 'cache/_chunk1' + }, + { + path: 'figures/image-eps-converted-to.pdf', + type: 'pdf' + }, + { + path: 'foo/main-figure0.md5', + type: 'md5' + }, + { + path: 'foo/main-figure0.dpth', + type: 'dpth' + }, + { + path: 'foo/main-figure0.pdf', + type: 'pdf' + }, + { + path: '_minted-main/default-pyg-prefix.pygstyle', + type: 'pygstyle' + }, + { + path: '_minted-main/default.pygstyle', + type: 'pygstyle' + }, + { + path: + '_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex', + type: 'pygtex' + }, + { + path: '_markdown_main/30893013dec5d869a415610079774c2f.md.tex', + type: 'tex' + } + ] + this.resources = 'mock-resources' + this.OutputFileFinder.findOutputFiles = sinon + .stub() + .callsArgWith(2, null, this.output_files) + this.ResourceWriter._deleteFileIfNotDirectory = sinon.stub().callsArg(1) + return this.ResourceWriter._removeExtraneousFiles( + this.resources, + this.basePath, + this.callback + ) + }) - describe("_removeExtraneousFiles", function() { - beforeEach(function() { - this.output_files = [{ - path: "output.pdf", - type: "pdf" - }, { - path: "extra/file.tex", - type: "tex" - }, { - path: "extra.aux", - type: "aux" - }, { - path: "cache/_chunk1" - },{ - path: "figures/image-eps-converted-to.pdf", - type: "pdf" - },{ - path: "foo/main-figure0.md5", - type: "md5" - }, { - path: "foo/main-figure0.dpth", - type: "dpth" - }, { - path: "foo/main-figure0.pdf", - type: "pdf" - }, { - path: "_minted-main/default-pyg-prefix.pygstyle", - type: "pygstyle" - }, { - path: "_minted-main/default.pygstyle", - type: "pygstyle" - }, { - path: "_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex", - type: "pygtex" - }, { - path: "_markdown_main/30893013dec5d869a415610079774c2f.md.tex", - type: "tex" - }]; - this.resources = "mock-resources"; - this.OutputFileFinder.findOutputFiles = sinon.stub().callsArgWith(2, null, this.output_files); - this.ResourceWriter._deleteFileIfNotDirectory = sinon.stub().callsArg(1); - return this.ResourceWriter._removeExtraneousFiles(this.resources, this.basePath, this.callback); - }); + it('should find the existing output files', function() { + return this.OutputFileFinder.findOutputFiles + .calledWith(this.resources, this.basePath) + .should.equal(true) + }) - it("should find the existing output files", function() { - return this.OutputFileFinder.findOutputFiles - .calledWith(this.resources, this.basePath) - .should.equal(true); - }); + it('should delete the output files', function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'output.pdf')) + .should.equal(true) + }) - it("should delete the output files", function() { - return this.ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(this.basePath, "output.pdf")) - .should.equal(true); - }); + it('should delete the extra files', function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'extra/file.tex')) + .should.equal(true) + }) - it("should delete the extra files", function() { - return this.ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(this.basePath, "extra/file.tex")) - .should.equal(true); - }); + it('should not delete the extra aux files', function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'extra.aux')) + .should.equal(false) + }) - it("should not delete the extra aux files", function() { - return this.ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(this.basePath, "extra.aux")) - .should.equal(false); - }); - - it("should not delete the knitr cache file", function() { - return this.ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(this.basePath, "cache/_chunk1")) - .should.equal(false); - }); + it('should not delete the knitr cache file', function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'cache/_chunk1')) + .should.equal(false) + }) - it("should not delete the epstopdf converted files", function() { - return this.ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(this.basePath, "figures/image-eps-converted-to.pdf")) - .should.equal(false); - }); + it('should not delete the epstopdf converted files', function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith( + path.join(this.basePath, 'figures/image-eps-converted-to.pdf') + ) + .should.equal(false) + }) - it("should not delete the tikz md5 files", function() { - return this.ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(this.basePath, "foo/main-figure0.md5")) - .should.equal(false); - }); + it('should not delete the tikz md5 files', function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'foo/main-figure0.md5')) + .should.equal(false) + }) - it("should not delete the tikz dpth files", function() { - return this.ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(this.basePath, "foo/main-figure0.dpth")) - .should.equal(false); - }); + it('should not delete the tikz dpth files', function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'foo/main-figure0.dpth')) + .should.equal(false) + }) - it("should not delete the tikz pdf files", function() { - return this.ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(this.basePath, "foo/main-figure0.pdf")) - .should.equal(false); - }); + it('should not delete the tikz pdf files', function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'foo/main-figure0.pdf')) + .should.equal(false) + }) - it("should not delete the minted pygstyle files", function() { - return this.ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(this.basePath, "_minted-main/default-pyg-prefix.pygstyle")) - .should.equal(false); - }); + it('should not delete the minted pygstyle files', function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith( + path.join(this.basePath, '_minted-main/default-pyg-prefix.pygstyle') + ) + .should.equal(false) + }) - it("should not delete the minted default pygstyle files", function() { - return this.ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(this.basePath, "_minted-main/default.pygstyle")) - .should.equal(false); - }); + it('should not delete the minted default pygstyle files', function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, '_minted-main/default.pygstyle')) + .should.equal(false) + }) - it("should not delete the minted default pygtex files", function() { - return this.ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(this.basePath, "_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex")) - .should.equal(false); - }); + it('should not delete the minted default pygtex files', function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith( + path.join( + this.basePath, + '_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex' + ) + ) + .should.equal(false) + }) - it("should not delete the markdown md.tex files", function() { - return this.ResourceWriter._deleteFileIfNotDirectory - .calledWith(path.join(this.basePath, "_markdown_main/30893013dec5d869a415610079774c2f.md.tex")) - .should.equal(false); - }); + it('should not delete the markdown md.tex files', function() { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith( + path.join( + this.basePath, + '_markdown_main/30893013dec5d869a415610079774c2f.md.tex' + ) + ) + .should.equal(false) + }) - it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); + it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) - return it("should time the request", function() { - return this.Metrics.Timer.prototype.done.called.should.equal(true); - }); - }); + return it('should time the request', function() { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) - describe("_writeResourceToDisk", function() { - describe("with a url based resource", function() { - beforeEach(function() { - this.resource = { - path: "main.tex", - url: "http://www.example.com/main.tex", - modified: Date.now() - }; - this.UrlCache.downloadUrlToFile = sinon.stub().callsArgWith(4, "fake error downloading file"); - return this.ResourceWriter._writeResourceToDisk(this.project_id, this.resource, this.basePath, this.callback); - }); + describe('_writeResourceToDisk', function() { + describe('with a url based resource', function() { + beforeEach(function() { + this.resource = { + path: 'main.tex', + url: 'http://www.example.com/main.tex', + modified: Date.now() + } + this.UrlCache.downloadUrlToFile = sinon + .stub() + .callsArgWith(4, 'fake error downloading file') + return this.ResourceWriter._writeResourceToDisk( + this.project_id, + this.resource, + this.basePath, + this.callback + ) + }) - it("should ensure the directory exists", function() { - return this.mkdirp - .calledWith(path.dirname(path.join(this.basePath, this.resource.path))) - .should.equal(true); - }); + it('should ensure the directory exists', function() { + return this.mkdirp + .calledWith( + path.dirname(path.join(this.basePath, this.resource.path)) + ) + .should.equal(true) + }) - it("should write the URL from the cache", function() { - return this.UrlCache.downloadUrlToFile - .calledWith(this.project_id, this.resource.url, path.join(this.basePath, this.resource.path), this.resource.modified) - .should.equal(true); - }); - - it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); + it('should write the URL from the cache', function() { + return this.UrlCache.downloadUrlToFile + .calledWith( + this.project_id, + this.resource.url, + path.join(this.basePath, this.resource.path), + this.resource.modified + ) + .should.equal(true) + }) - return it("should not return an error if the resource writer errored", function() { - return should.not.exist(this.callback.args[0][0]); - }); - }); + it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) - describe("with a content based resource", function() { - beforeEach(function() { - this.resource = { - path: "main.tex", - content: "Hello world" - }; - this.fs.writeFile = sinon.stub().callsArg(2); - return this.ResourceWriter._writeResourceToDisk(this.project_id, this.resource, this.basePath, this.callback); - }); + return it('should not return an error if the resource writer errored', function() { + return should.not.exist(this.callback.args[0][0]) + }) + }) - it("should ensure the directory exists", function() { - return this.mkdirp - .calledWith(path.dirname(path.join(this.basePath, this.resource.path))) - .should.equal(true); - }); + describe('with a content based resource', function() { + beforeEach(function() { + this.resource = { + path: 'main.tex', + content: 'Hello world' + } + this.fs.writeFile = sinon.stub().callsArg(2) + return this.ResourceWriter._writeResourceToDisk( + this.project_id, + this.resource, + this.basePath, + this.callback + ) + }) - it("should write the contents to disk", function() { - return this.fs.writeFile - .calledWith(path.join(this.basePath, this.resource.path), this.resource.content) - .should.equal(true); - }); - - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + it('should ensure the directory exists', function() { + return this.mkdirp + .calledWith( + path.dirname(path.join(this.basePath, this.resource.path)) + ) + .should.equal(true) + }) - return describe("with a file path that breaks out of the root folder", function() { - beforeEach(function() { - this.resource = { - path: "../../main.tex", - content: "Hello world" - }; - this.fs.writeFile = sinon.stub().callsArg(2); - return this.ResourceWriter._writeResourceToDisk(this.project_id, this.resource, this.basePath, this.callback); - }); + it('should write the contents to disk', function() { + return this.fs.writeFile + .calledWith( + path.join(this.basePath, this.resource.path), + this.resource.content + ) + .should.equal(true) + }) - it("should not write to disk", function() { - return this.fs.writeFile.called.should.equal(false); - }); + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) - return it("should return an error", function() { - return this.callback - .calledWith(new Error("resource path is outside root directory")) - .should.equal(true); - }); - }); - }); - - return describe("checkPath", function() { - describe("with a valid path", function() { - beforeEach(function() { - return this.ResourceWriter.checkPath("foo", "bar", this.callback); - }); + return describe('with a file path that breaks out of the root folder', function() { + beforeEach(function() { + this.resource = { + path: '../../main.tex', + content: 'Hello world' + } + this.fs.writeFile = sinon.stub().callsArg(2) + return this.ResourceWriter._writeResourceToDisk( + this.project_id, + this.resource, + this.basePath, + this.callback + ) + }) - return it("should return the joined path", function() { - return this.callback.calledWith(null, "foo/bar") - .should.equal(true); - }); - }); + it('should not write to disk', function() { + return this.fs.writeFile.called.should.equal(false) + }) - describe("with an invalid path", function() { - beforeEach(function() { - return this.ResourceWriter.checkPath("foo", "baz/../../bar", this.callback); - }); + return it('should return an error', function() { + return this.callback + .calledWith(new Error('resource path is outside root directory')) + .should.equal(true) + }) + }) + }) - return it("should return an error", function() { - return this.callback.calledWith(new Error("resource path is outside root directory")) - .should.equal(true); - }); - }); + return describe('checkPath', function() { + describe('with a valid path', function() { + beforeEach(function() { + return this.ResourceWriter.checkPath('foo', 'bar', this.callback) + }) - return describe("with another invalid path matching on a prefix", function() { - beforeEach(function() { - return this.ResourceWriter.checkPath("foo", "../foobar/baz", this.callback); - }); + return it('should return the joined path', function() { + return this.callback.calledWith(null, 'foo/bar').should.equal(true) + }) + }) - return it("should return an error", function() { - return this.callback.calledWith(new Error("resource path is outside root directory")) - .should.equal(true); - }); - }); - }); -}); + describe('with an invalid path', function() { + beforeEach(function() { + return this.ResourceWriter.checkPath( + 'foo', + 'baz/../../bar', + this.callback + ) + }) + + return it('should return an error', function() { + return this.callback + .calledWith(new Error('resource path is outside root directory')) + .should.equal(true) + }) + }) + + return describe('with another invalid path matching on a prefix', function() { + beforeEach(function() { + return this.ResourceWriter.checkPath( + 'foo', + '../foobar/baz', + this.callback + ) + }) + + return it('should return an error', function() { + return this.callback + .calledWith(new Error('resource path is outside root directory')) + .should.equal(true) + }) + }) + }) +}) diff --git a/test/unit/js/StaticServerForbidSymlinksTests.js b/test/unit/js/StaticServerForbidSymlinksTests.js index e754ea7..b9545a4 100644 --- a/test/unit/js/StaticServerForbidSymlinksTests.js +++ b/test/unit/js/StaticServerForbidSymlinksTests.js @@ -9,217 +9,229 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const should = require('chai').should(); -const SandboxedModule = require('sandboxed-module'); -const assert = require('assert'); -const path = require('path'); -const sinon = require('sinon'); -const modulePath = path.join(__dirname, "../../../app/js/StaticServerForbidSymlinks"); -const { expect } = require("chai"); +const should = require('chai').should() +const SandboxedModule = require('sandboxed-module') +const assert = require('assert') +const path = require('path') +const sinon = require('sinon') +const modulePath = path.join( + __dirname, + '../../../app/js/StaticServerForbidSymlinks' +) +const { expect } = require('chai') -describe("StaticServerForbidSymlinks", function() { +describe('StaticServerForbidSymlinks', function() { + beforeEach(function() { + this.settings = { + path: { + compilesDir: '/compiles/here' + } + } - beforeEach(function() { + this.fs = {} + this.ForbidSymlinks = SandboxedModule.require(modulePath, { + requires: { + 'settings-sharelatex': this.settings, + 'logger-sharelatex': { + log() {}, + warn() {}, + error() {} + }, + fs: this.fs + } + }) - this.settings = { - path: { - compilesDir: "/compiles/here" - } - }; + this.dummyStatic = (rootDir, options) => (req, res, next) => + // console.log "dummyStatic serving file", rootDir, "called with", req.url + // serve it + next() - this.fs = {}; - this.ForbidSymlinks = SandboxedModule.require(modulePath, { requires: { - "settings-sharelatex":this.settings, - "logger-sharelatex": { - log() {}, - warn() {}, - error() {} - }, - "fs":this.fs - } - } - ); + this.StaticServerForbidSymlinks = this.ForbidSymlinks( + this.dummyStatic, + this.settings.path.compilesDir + ) + this.req = { + params: { + project_id: '12345' + } + } - this.dummyStatic = (rootDir, options) => - (req, res, next) => - // console.log "dummyStatic serving file", rootDir, "called with", req.url - // serve it - next() - - ; + this.res = {} + return (this.req.url = '/12345/output.pdf') + }) - this.StaticServerForbidSymlinks = this.ForbidSymlinks(this.dummyStatic, this.settings.path.compilesDir); - this.req = { - params: { - project_id:"12345" - } - }; + describe('sending a normal file through', function() { + beforeEach(function() { + return (this.fs.realpath = sinon + .stub() + .callsArgWith( + 1, + null, + `${this.settings.path.compilesDir}/${this.req.params.project_id}/output.pdf` + )) + }) - this.res = {}; - return this.req.url = "/12345/output.pdf"; - }); + return it('should call next', function(done) { + this.res.sendStatus = function(resCode) { + resCode.should.equal(200) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res, done) + }) + }) + describe('with a missing file', function() { + beforeEach(function() { + return (this.fs.realpath = sinon + .stub() + .callsArgWith( + 1, + { code: 'ENOENT' }, + `${this.settings.path.compilesDir}/${this.req.params.project_id}/unknown.pdf` + )) + }) - describe("sending a normal file through", function() { - beforeEach(function() { - return this.fs.realpath = sinon.stub().callsArgWith(1, null, `${this.settings.path.compilesDir}/${this.req.params.project_id}/output.pdf`); - }); + return it('should send a 404', function(done) { + this.res.sendStatus = function(resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) - return it("should call next", function(done){ - this.res.sendStatus = function(resCode){ - resCode.should.equal(200); - return done(); - }; - return this.StaticServerForbidSymlinks(this.req, this.res, done); - }); - }); + describe('with a symlink file', function() { + beforeEach(function() { + return (this.fs.realpath = sinon + .stub() + .callsArgWith(1, null, `/etc/${this.req.params.project_id}/output.pdf`)) + }) + return it('should send a 404', function(done) { + this.res.sendStatus = function(resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) - describe("with a missing file", function() { - beforeEach(function() { - return this.fs.realpath = sinon.stub().callsArgWith(1, {code: 'ENOENT'}, `${this.settings.path.compilesDir}/${this.req.params.project_id}/unknown.pdf`); - }); + describe('with a relative file', function() { + beforeEach(function() { + return (this.req.url = '/12345/../67890/output.pdf') + }) - return it("should send a 404", function(done){ - this.res.sendStatus = function(resCode){ - resCode.should.equal(404); - return done(); - }; - return this.StaticServerForbidSymlinks(this.req, this.res); - }); - }); + return it('should send a 404', function(done) { + this.res.sendStatus = function(resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) + describe('with a unnormalized file containing .', function() { + beforeEach(function() { + return (this.req.url = '/12345/foo/./output.pdf') + }) - describe("with a symlink file", function() { - beforeEach(function() { - return this.fs.realpath = sinon.stub().callsArgWith(1, null, `/etc/${this.req.params.project_id}/output.pdf`); - }); + return it('should send a 404', function(done) { + this.res.sendStatus = function(resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) - return it("should send a 404", function(done){ - this.res.sendStatus = function(resCode){ - resCode.should.equal(404); - return done(); - }; - return this.StaticServerForbidSymlinks(this.req, this.res); - }); - }); + describe('with a file containing an empty path', function() { + beforeEach(function() { + return (this.req.url = '/12345/foo//output.pdf') + }) + return it('should send a 404', function(done) { + this.res.sendStatus = function(resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) - describe("with a relative file", function() { - beforeEach(function() { - return this.req.url = "/12345/../67890/output.pdf"; - }); + describe('with a non-project file', function() { + beforeEach(function() { + return (this.req.url = '/.foo/output.pdf') + }) - return it("should send a 404", function(done){ - this.res.sendStatus = function(resCode){ - resCode.should.equal(404); - return done(); - }; - return this.StaticServerForbidSymlinks(this.req, this.res); - }); - }); + return it('should send a 404', function(done) { + this.res.sendStatus = function(resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) + describe('with a file outside the compiledir', function() { + beforeEach(function() { + return (this.req.url = '/../bar/output.pdf') + }) - describe("with a unnormalized file containing .", function() { - beforeEach(function() { - return this.req.url = "/12345/foo/./output.pdf"; - }); + return it('should send a 404', function(done) { + this.res.sendStatus = function(resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) - return it("should send a 404", function(done){ - this.res.sendStatus = function(resCode){ - resCode.should.equal(404); - return done(); - }; - return this.StaticServerForbidSymlinks(this.req, this.res); - }); - }); + describe('with a file with no leading /', function() { + beforeEach(function() { + return (this.req.url = './../bar/output.pdf') + }) + return it('should send a 404', function(done) { + this.res.sendStatus = function(resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) - describe("with a file containing an empty path", function() { - beforeEach(function() { - return this.req.url = "/12345/foo//output.pdf"; - }); + describe('with a github style path', function() { + beforeEach(function() { + this.req.url = '/henryoswald-latex_example/output/output.log' + return (this.fs.realpath = sinon + .stub() + .callsArgWith( + 1, + null, + `${this.settings.path.compilesDir}/henryoswald-latex_example/output/output.log` + )) + }) - return it("should send a 404", function(done){ - this.res.sendStatus = function(resCode){ - resCode.should.equal(404); - return done(); - }; - return this.StaticServerForbidSymlinks(this.req, this.res); - }); - }); + return it('should call next', function(done) { + this.res.sendStatus = function(resCode) { + resCode.should.equal(200) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res, done) + }) + }) - describe("with a non-project file", function() { - beforeEach(function() { - return this.req.url = "/.foo/output.pdf"; - }); - - return it("should send a 404", function(done){ - this.res.sendStatus = function(resCode){ - resCode.should.equal(404); - return done(); - }; - return this.StaticServerForbidSymlinks(this.req, this.res); - }); - }); - - describe("with a file outside the compiledir", function() { - beforeEach(function() { - return this.req.url = "/../bar/output.pdf"; - }); - - return it("should send a 404", function(done){ - this.res.sendStatus = function(resCode){ - resCode.should.equal(404); - return done(); - }; - return this.StaticServerForbidSymlinks(this.req, this.res); - }); - }); - - - describe("with a file with no leading /", function() { - beforeEach(function() { - return this.req.url = "./../bar/output.pdf"; - }); - - return it("should send a 404", function(done){ - this.res.sendStatus = function(resCode){ - resCode.should.equal(404); - return done(); - }; - return this.StaticServerForbidSymlinks(this.req, this.res); - }); - }); - - describe("with a github style path", function() { - beforeEach(function() { - this.req.url = "/henryoswald-latex_example/output/output.log"; - return this.fs.realpath = sinon.stub().callsArgWith(1, null, `${this.settings.path.compilesDir}/henryoswald-latex_example/output/output.log`); - }); - - return it("should call next", function(done){ - this.res.sendStatus = function(resCode){ - resCode.should.equal(200); - return done(); - }; - return this.StaticServerForbidSymlinks(this.req, this.res, done); - }); - }); - - return describe("with an error from fs.realpath", function() { - - beforeEach(function() { - return this.fs.realpath = sinon.stub().callsArgWith(1, "error"); - }); - - return it("should send a 500", function(done){ - this.res.sendStatus = function(resCode){ - resCode.should.equal(500); - return done(); - }; - return this.StaticServerForbidSymlinks(this.req, this.res); - }); - }); -}); + return describe('with an error from fs.realpath', function() { + beforeEach(function() { + return (this.fs.realpath = sinon.stub().callsArgWith(1, 'error')) + }) + return it('should send a 500', function(done) { + this.res.sendStatus = function(resCode) { + resCode.should.equal(500) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) +}) diff --git a/test/unit/js/TikzManager.js b/test/unit/js/TikzManager.js index f35d261..1a9874c 100644 --- a/test/unit/js/TikzManager.js +++ b/test/unit/js/TikzManager.js @@ -8,148 +8,180 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../app/js/TikzManager'); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const modulePath = require('path').join( + __dirname, + '../../../app/js/TikzManager' +) describe('TikzManager', function() { - beforeEach(function() { - return this.TikzManager = SandboxedModule.require(modulePath, { requires: { - "./ResourceWriter": (this.ResourceWriter = {}), - "./SafeReader": (this.SafeReader = {}), - "fs": (this.fs = {}), - "logger-sharelatex": (this.logger = {log() {}}) - } - });}); + beforeEach(function() { + return (this.TikzManager = SandboxedModule.require(modulePath, { + requires: { + './ResourceWriter': (this.ResourceWriter = {}), + './SafeReader': (this.SafeReader = {}), + fs: (this.fs = {}), + 'logger-sharelatex': (this.logger = { log() {} }) + } + })) + }) - describe("checkMainFile", function() { - beforeEach(function() { - this.compileDir = "compile-dir"; - this.mainFile = "main.tex"; - return this.callback = sinon.stub(); - }); + describe('checkMainFile', function() { + beforeEach(function() { + this.compileDir = 'compile-dir' + this.mainFile = 'main.tex' + return (this.callback = sinon.stub()) + }) - describe("if there is already an output.tex file in the resources", function() { - beforeEach(function() { - this.resources = [{path:"main.tex"},{path:"output.tex"}]; - return this.TikzManager.checkMainFile(this.compileDir, this.mainFile, this.resources, this.callback); - }); + describe('if there is already an output.tex file in the resources', function() { + beforeEach(function() { + this.resources = [{ path: 'main.tex' }, { path: 'output.tex' }] + return this.TikzManager.checkMainFile( + this.compileDir, + this.mainFile, + this.resources, + this.callback + ) + }) - return it("should call the callback with false ", function() { - return this.callback.calledWithExactly(null, false) - .should.equal(true); - }); - }); + return it('should call the callback with false ', function() { + return this.callback.calledWithExactly(null, false).should.equal(true) + }) + }) - return describe("if there is no output.tex file in the resources", function() { - beforeEach(function() { - this.resources = [{path:"main.tex"}]; - return this.ResourceWriter.checkPath = sinon.stub() - .withArgs(this.compileDir, this.mainFile) - .callsArgWith(2, null, `${this.compileDir}/${this.mainFile}`); - }); + return describe('if there is no output.tex file in the resources', function() { + beforeEach(function() { + this.resources = [{ path: 'main.tex' }] + return (this.ResourceWriter.checkPath = sinon + .stub() + .withArgs(this.compileDir, this.mainFile) + .callsArgWith(2, null, `${this.compileDir}/${this.mainFile}`)) + }) - describe("and the main file contains tikzexternalize", function() { - beforeEach(function() { - this.SafeReader.readFile = sinon.stub() - .withArgs(`${this.compileDir}/${this.mainFile}`) - .callsArgWith(3, null, "hello \\tikzexternalize"); - return this.TikzManager.checkMainFile(this.compileDir, this.mainFile, this.resources, this.callback); - }); + describe('and the main file contains tikzexternalize', function() { + beforeEach(function() { + this.SafeReader.readFile = sinon + .stub() + .withArgs(`${this.compileDir}/${this.mainFile}`) + .callsArgWith(3, null, 'hello \\tikzexternalize') + return this.TikzManager.checkMainFile( + this.compileDir, + this.mainFile, + this.resources, + this.callback + ) + }) - it("should look at the file on disk", function() { - return this.SafeReader.readFile - .calledWith(`${this.compileDir}/${this.mainFile}`) - .should.equal(true); - }); + it('should look at the file on disk', function() { + return this.SafeReader.readFile + .calledWith(`${this.compileDir}/${this.mainFile}`) + .should.equal(true) + }) - return it("should call the callback with true ", function() { - return this.callback.calledWithExactly(null, true) - .should.equal(true); - }); - }); + return it('should call the callback with true ', function() { + return this.callback.calledWithExactly(null, true).should.equal(true) + }) + }) - describe("and the main file does not contain tikzexternalize", function() { - beforeEach(function() { - this.SafeReader.readFile = sinon.stub() - .withArgs(`${this.compileDir}/${this.mainFile}`) - .callsArgWith(3, null, "hello"); - return this.TikzManager.checkMainFile(this.compileDir, this.mainFile, this.resources, this.callback); - }); + describe('and the main file does not contain tikzexternalize', function() { + beforeEach(function() { + this.SafeReader.readFile = sinon + .stub() + .withArgs(`${this.compileDir}/${this.mainFile}`) + .callsArgWith(3, null, 'hello') + return this.TikzManager.checkMainFile( + this.compileDir, + this.mainFile, + this.resources, + this.callback + ) + }) - it("should look at the file on disk", function() { - return this.SafeReader.readFile - .calledWith(`${this.compileDir}/${this.mainFile}`) - .should.equal(true); - }); + it('should look at the file on disk', function() { + return this.SafeReader.readFile + .calledWith(`${this.compileDir}/${this.mainFile}`) + .should.equal(true) + }) - return it("should call the callback with false", function() { - return this.callback.calledWithExactly(null, false) - .should.equal(true); - }); - }); + return it('should call the callback with false', function() { + return this.callback.calledWithExactly(null, false).should.equal(true) + }) + }) - return describe("and the main file contains \\usepackage{pstool}", function() { - beforeEach(function() { - this.SafeReader.readFile = sinon.stub() - .withArgs(`${this.compileDir}/${this.mainFile}`) - .callsArgWith(3, null, "hello \\usepackage[random-options]{pstool}"); - return this.TikzManager.checkMainFile(this.compileDir, this.mainFile, this.resources, this.callback); - }); + return describe('and the main file contains \\usepackage{pstool}', function() { + beforeEach(function() { + this.SafeReader.readFile = sinon + .stub() + .withArgs(`${this.compileDir}/${this.mainFile}`) + .callsArgWith(3, null, 'hello \\usepackage[random-options]{pstool}') + return this.TikzManager.checkMainFile( + this.compileDir, + this.mainFile, + this.resources, + this.callback + ) + }) - it("should look at the file on disk", function() { - return this.SafeReader.readFile - .calledWith(`${this.compileDir}/${this.mainFile}`) - .should.equal(true); - }); + it('should look at the file on disk', function() { + return this.SafeReader.readFile + .calledWith(`${this.compileDir}/${this.mainFile}`) + .should.equal(true) + }) - return it("should call the callback with true ", function() { - return this.callback.calledWithExactly(null, true) - .should.equal(true); - }); - }); - }); - }); + return it('should call the callback with true ', function() { + return this.callback.calledWithExactly(null, true).should.equal(true) + }) + }) + }) + }) - return describe("injectOutputFile", function() { - beforeEach(function() { - this.rootDir = "/mock"; - this.filename = "filename.tex"; - this.callback = sinon.stub(); - this.content = `\ + return describe('injectOutputFile', function() { + beforeEach(function() { + this.rootDir = '/mock' + this.filename = 'filename.tex' + this.callback = sinon.stub() + this.content = `\ \\documentclass{article} \\usepackage{tikz} \\tikzexternalize \\begin{document} Hello world \\end{document}\ -`; - this.fs.readFile = sinon.stub().callsArgWith(2, null, this.content); - this.fs.writeFile = sinon.stub().callsArg(3); - this.ResourceWriter.checkPath = sinon.stub().callsArgWith(2, null, `${this.rootDir}/${this.filename}`); - return this.TikzManager.injectOutputFile(this.rootDir, this.filename, this.callback); - }); +` + this.fs.readFile = sinon.stub().callsArgWith(2, null, this.content) + this.fs.writeFile = sinon.stub().callsArg(3) + this.ResourceWriter.checkPath = sinon + .stub() + .callsArgWith(2, null, `${this.rootDir}/${this.filename}`) + return this.TikzManager.injectOutputFile( + this.rootDir, + this.filename, + this.callback + ) + }) - it("sould check the path", function() { - return this.ResourceWriter.checkPath.calledWith(this.rootDir, this.filename) - .should.equal(true); - }); + it('sould check the path', function() { + return this.ResourceWriter.checkPath + .calledWith(this.rootDir, this.filename) + .should.equal(true) + }) - it("should read the file", function() { - return this.fs.readFile - .calledWith(`${this.rootDir}/${this.filename}`, "utf8") - .should.equal(true); - }); + it('should read the file', function() { + return this.fs.readFile + .calledWith(`${this.rootDir}/${this.filename}`, 'utf8') + .should.equal(true) + }) - it("should write out the same file as output.tex", function() { - return this.fs.writeFile - .calledWith(`${this.rootDir}/output.tex`, this.content, {flag: 'wx'}) - .should.equal(true); - }); + it('should write out the same file as output.tex', function() { + return this.fs.writeFile + .calledWith(`${this.rootDir}/output.tex`, this.content, { flag: 'wx' }) + .should.equal(true) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); -}); + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) +}) diff --git a/test/unit/js/UrlCacheTests.js b/test/unit/js/UrlCacheTests.js index 7f02450..f056a6e 100644 --- a/test/unit/js/UrlCacheTests.js +++ b/test/unit/js/UrlCacheTests.js @@ -10,259 +10,347 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../app/js/UrlCache'); -const { EventEmitter } = require("events"); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const modulePath = require('path').join(__dirname, '../../../app/js/UrlCache') +const { EventEmitter } = require('events') -describe("UrlCache", function() { - beforeEach(function() { - this.callback = sinon.stub(); - this.url = "www.example.com/file"; - this.project_id = "project-id-123"; - return this.UrlCache = SandboxedModule.require(modulePath, { requires: { - "./db" : {}, - "./UrlFetcher" : (this.UrlFetcher = {}), - "logger-sharelatex": (this.logger = {log: sinon.stub()}), - "settings-sharelatex": (this.Settings = { path: {clsiCacheDir: "/cache/dir"} }), - "fs": (this.fs = {}) - } - });}); - - describe("_doesUrlNeedDownloading", function() { - beforeEach(function() { - this.lastModified = new Date(); - return this.lastModifiedRoundedToSeconds = new Date(Math.floor(this.lastModified.getTime() / 1000) * 1000); - }); +describe('UrlCache', function() { + beforeEach(function() { + this.callback = sinon.stub() + this.url = 'www.example.com/file' + this.project_id = 'project-id-123' + return (this.UrlCache = SandboxedModule.require(modulePath, { + requires: { + './db': {}, + './UrlFetcher': (this.UrlFetcher = {}), + 'logger-sharelatex': (this.logger = { log: sinon.stub() }), + 'settings-sharelatex': (this.Settings = { + path: { clsiCacheDir: '/cache/dir' } + }), + fs: (this.fs = {}) + } + })) + }) - describe("when URL does not exist in cache", function() { - beforeEach(function() { - this.UrlCache._findUrlDetails = sinon.stub().callsArgWith(2, null, null); - return this.UrlCache._doesUrlNeedDownloading(this.project_id, this.url, this.lastModified, this.callback); - }); + describe('_doesUrlNeedDownloading', function() { + beforeEach(function() { + this.lastModified = new Date() + return (this.lastModifiedRoundedToSeconds = new Date( + Math.floor(this.lastModified.getTime() / 1000) * 1000 + )) + }) - return it("should return the callback with true", function() { - return this.callback.calledWith(null, true).should.equal(true); - }); - }); + describe('when URL does not exist in cache', function() { + beforeEach(function() { + this.UrlCache._findUrlDetails = sinon.stub().callsArgWith(2, null, null) + return this.UrlCache._doesUrlNeedDownloading( + this.project_id, + this.url, + this.lastModified, + this.callback + ) + }) - return describe("when URL does exist in cache", function() { - beforeEach(function() { - this.urlDetails = {}; - return this.UrlCache._findUrlDetails = sinon.stub().callsArgWith(2, null, this.urlDetails); - }); + return it('should return the callback with true', function() { + return this.callback.calledWith(null, true).should.equal(true) + }) + }) - describe("when the modified date is more recent than the cached modified date", function() { - beforeEach(function() { - this.urlDetails.lastModified = new Date(this.lastModified.getTime() - 1000); - return this.UrlCache._doesUrlNeedDownloading(this.project_id, this.url, this.lastModified, this.callback); - }); + return describe('when URL does exist in cache', function() { + beforeEach(function() { + this.urlDetails = {} + return (this.UrlCache._findUrlDetails = sinon + .stub() + .callsArgWith(2, null, this.urlDetails)) + }) - it("should get the url details", function() { - return this.UrlCache._findUrlDetails - .calledWith(this.project_id, this.url) - .should.equal(true); - }); + describe('when the modified date is more recent than the cached modified date', function() { + beforeEach(function() { + this.urlDetails.lastModified = new Date( + this.lastModified.getTime() - 1000 + ) + return this.UrlCache._doesUrlNeedDownloading( + this.project_id, + this.url, + this.lastModified, + this.callback + ) + }) - return it("should return the callback with true", function() { - return this.callback.calledWith(null, true).should.equal(true); - }); - }); + it('should get the url details', function() { + return this.UrlCache._findUrlDetails + .calledWith(this.project_id, this.url) + .should.equal(true) + }) - describe("when the cached modified date is more recent than the modified date", function() { - beforeEach(function() { - this.urlDetails.lastModified = new Date(this.lastModified.getTime() + 1000); - return this.UrlCache._doesUrlNeedDownloading(this.project_id, this.url, this.lastModified, this.callback); - }); + return it('should return the callback with true', function() { + return this.callback.calledWith(null, true).should.equal(true) + }) + }) - return it("should return the callback with false", function() { - return this.callback.calledWith(null, false).should.equal(true); - }); - }); + describe('when the cached modified date is more recent than the modified date', function() { + beforeEach(function() { + this.urlDetails.lastModified = new Date( + this.lastModified.getTime() + 1000 + ) + return this.UrlCache._doesUrlNeedDownloading( + this.project_id, + this.url, + this.lastModified, + this.callback + ) + }) - describe("when the cached modified date is equal to the modified date", function() { - beforeEach(function() { - this.urlDetails.lastModified = this.lastModified; - return this.UrlCache._doesUrlNeedDownloading(this.project_id, this.url, this.lastModified, this.callback); - }); + return it('should return the callback with false', function() { + return this.callback.calledWith(null, false).should.equal(true) + }) + }) - return it("should return the callback with false", function() { - return this.callback.calledWith(null, false).should.equal(true); - }); - }); + describe('when the cached modified date is equal to the modified date', function() { + beforeEach(function() { + this.urlDetails.lastModified = this.lastModified + return this.UrlCache._doesUrlNeedDownloading( + this.project_id, + this.url, + this.lastModified, + this.callback + ) + }) - describe("when the provided modified date does not exist", function() { - beforeEach(function() { - this.lastModified = null; - return this.UrlCache._doesUrlNeedDownloading(this.project_id, this.url, this.lastModified, this.callback); - }); + return it('should return the callback with false', function() { + return this.callback.calledWith(null, false).should.equal(true) + }) + }) - return it("should return the callback with true", function() { - return this.callback.calledWith(null, true).should.equal(true); - }); - }); + describe('when the provided modified date does not exist', function() { + beforeEach(function() { + this.lastModified = null + return this.UrlCache._doesUrlNeedDownloading( + this.project_id, + this.url, + this.lastModified, + this.callback + ) + }) - return describe("when the URL does not have a modified date", function() { - beforeEach(function() { - this.urlDetails.lastModified = null; - return this.UrlCache._doesUrlNeedDownloading(this.project_id, this.url, this.lastModified, this.callback); - }); + return it('should return the callback with true', function() { + return this.callback.calledWith(null, true).should.equal(true) + }) + }) - return it("should return the callback with true", function() { - return this.callback.calledWith(null, true).should.equal(true); - }); - }); - }); - }); + return describe('when the URL does not have a modified date', function() { + beforeEach(function() { + this.urlDetails.lastModified = null + return this.UrlCache._doesUrlNeedDownloading( + this.project_id, + this.url, + this.lastModified, + this.callback + ) + }) - describe("_ensureUrlIsInCache", function() { - beforeEach(function() { - this.UrlFetcher.pipeUrlToFile = sinon.stub().callsArg(2); - return this.UrlCache._updateOrCreateUrlDetails = sinon.stub().callsArg(3); - }); - - describe("when the URL needs updating", function() { - beforeEach(function() { - this.UrlCache._doesUrlNeedDownloading = sinon.stub().callsArgWith(3, null, true); - return this.UrlCache._ensureUrlIsInCache(this.project_id, this.url, this.lastModified, this.callback); - }); + return it('should return the callback with true', function() { + return this.callback.calledWith(null, true).should.equal(true) + }) + }) + }) + }) - it("should check that the url needs downloading", function() { - return this.UrlCache._doesUrlNeedDownloading - .calledWith(this.project_id, this.url, this.lastModifiedRoundedToSeconds) - .should.equal(true); - }); + describe('_ensureUrlIsInCache', function() { + beforeEach(function() { + this.UrlFetcher.pipeUrlToFile = sinon.stub().callsArg(2) + return (this.UrlCache._updateOrCreateUrlDetails = sinon + .stub() + .callsArg(3)) + }) - it("should download the URL to the cache file", function() { - return this.UrlFetcher.pipeUrlToFile - .calledWith(this.url, this.UrlCache._cacheFilePathForUrl(this.project_id, this.url)) - .should.equal(true); - }); - + describe('when the URL needs updating', function() { + beforeEach(function() { + this.UrlCache._doesUrlNeedDownloading = sinon + .stub() + .callsArgWith(3, null, true) + return this.UrlCache._ensureUrlIsInCache( + this.project_id, + this.url, + this.lastModified, + this.callback + ) + }) - it("should update the database entry", function() { - return this.UrlCache._updateOrCreateUrlDetails - .calledWith(this.project_id, this.url, this.lastModifiedRoundedToSeconds) - .should.equal(true); - }); + it('should check that the url needs downloading', function() { + return this.UrlCache._doesUrlNeedDownloading + .calledWith( + this.project_id, + this.url, + this.lastModifiedRoundedToSeconds + ) + .should.equal(true) + }) - return it("should return the callback with the cache file path", function() { - return this.callback - .calledWith(null, this.UrlCache._cacheFilePathForUrl(this.project_id, this.url)) - .should.equal(true); - }); - }); + it('should download the URL to the cache file', function() { + return this.UrlFetcher.pipeUrlToFile + .calledWith( + this.url, + this.UrlCache._cacheFilePathForUrl(this.project_id, this.url) + ) + .should.equal(true) + }) - return describe("when the URL does not need updating", function() { - beforeEach(function() { - this.UrlCache._doesUrlNeedDownloading = sinon.stub().callsArgWith(3, null, false); - return this.UrlCache._ensureUrlIsInCache(this.project_id, this.url, this.lastModified, this.callback); - }); - - it("should not download the URL to the cache file", function() { - return this.UrlFetcher.pipeUrlToFile - .called.should.equal(false); - }); + it('should update the database entry', function() { + return this.UrlCache._updateOrCreateUrlDetails + .calledWith( + this.project_id, + this.url, + this.lastModifiedRoundedToSeconds + ) + .should.equal(true) + }) - return it("should return the callback with the cache file path", function() { - return this.callback - .calledWith(null, this.UrlCache._cacheFilePathForUrl(this.project_id, this.url)) - .should.equal(true); - }); - }); - }); + return it('should return the callback with the cache file path', function() { + return this.callback + .calledWith( + null, + this.UrlCache._cacheFilePathForUrl(this.project_id, this.url) + ) + .should.equal(true) + }) + }) - describe("downloadUrlToFile", function() { - beforeEach(function() { - this.cachePath = "path/to/cached/url"; - this.destPath = "path/to/destination"; - this.UrlCache._copyFile = sinon.stub().callsArg(2); - this.UrlCache._ensureUrlIsInCache = sinon.stub().callsArgWith(3, null, this.cachePath); - return this.UrlCache.downloadUrlToFile(this.project_id, this.url, this.destPath, this.lastModified, this.callback); - }); + return describe('when the URL does not need updating', function() { + beforeEach(function() { + this.UrlCache._doesUrlNeedDownloading = sinon + .stub() + .callsArgWith(3, null, false) + return this.UrlCache._ensureUrlIsInCache( + this.project_id, + this.url, + this.lastModified, + this.callback + ) + }) - it("should ensure the URL is downloaded and updated in the cache", function() { - return this.UrlCache._ensureUrlIsInCache - .calledWith(this.project_id, this.url, this.lastModified) - .should.equal(true); - }); + it('should not download the URL to the cache file', function() { + return this.UrlFetcher.pipeUrlToFile.called.should.equal(false) + }) - it("should copy the file to the new location", function() { - return this.UrlCache._copyFile - .calledWith(this.cachePath, this.destPath) - .should.equal(true); - }); + return it('should return the callback with the cache file path', function() { + return this.callback + .calledWith( + null, + this.UrlCache._cacheFilePathForUrl(this.project_id, this.url) + ) + .should.equal(true) + }) + }) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + describe('downloadUrlToFile', function() { + beforeEach(function() { + this.cachePath = 'path/to/cached/url' + this.destPath = 'path/to/destination' + this.UrlCache._copyFile = sinon.stub().callsArg(2) + this.UrlCache._ensureUrlIsInCache = sinon + .stub() + .callsArgWith(3, null, this.cachePath) + return this.UrlCache.downloadUrlToFile( + this.project_id, + this.url, + this.destPath, + this.lastModified, + this.callback + ) + }) - describe("_deleteUrlCacheFromDisk", function() { - beforeEach(function() { - this.fs.unlink = sinon.stub().callsArg(1); - return this.UrlCache._deleteUrlCacheFromDisk(this.project_id, this.url, this.callback); - }); + it('should ensure the URL is downloaded and updated in the cache', function() { + return this.UrlCache._ensureUrlIsInCache + .calledWith(this.project_id, this.url, this.lastModified) + .should.equal(true) + }) - it("should delete the cache file", function() { - return this.fs.unlink - .calledWith(this.UrlCache._cacheFilePathForUrl(this.project_id, this.url)) - .should.equal(true); - }); + it('should copy the file to the new location', function() { + return this.UrlCache._copyFile + .calledWith(this.cachePath, this.destPath) + .should.equal(true) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) - describe("_clearUrlFromCache", function() { - beforeEach(function() { - this.UrlCache._deleteUrlCacheFromDisk = sinon.stub().callsArg(2); - this.UrlCache._clearUrlDetails = sinon.stub().callsArg(2); - return this.UrlCache._clearUrlFromCache(this.project_id, this.url, this.callback); - }); + describe('_deleteUrlCacheFromDisk', function() { + beforeEach(function() { + this.fs.unlink = sinon.stub().callsArg(1) + return this.UrlCache._deleteUrlCacheFromDisk( + this.project_id, + this.url, + this.callback + ) + }) - it("should delete the file on disk", function() { - return this.UrlCache._deleteUrlCacheFromDisk - .calledWith(this.project_id, this.url) - .should.equal(true); - }); + it('should delete the cache file', function() { + return this.fs.unlink + .calledWith( + this.UrlCache._cacheFilePathForUrl(this.project_id, this.url) + ) + .should.equal(true) + }) - it("should clear the entry in the database", function() { - return this.UrlCache._clearUrlDetails - .calledWith(this.project_id, this.url) - .should.equal(true); - }); + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + describe('_clearUrlFromCache', function() { + beforeEach(function() { + this.UrlCache._deleteUrlCacheFromDisk = sinon.stub().callsArg(2) + this.UrlCache._clearUrlDetails = sinon.stub().callsArg(2) + return this.UrlCache._clearUrlFromCache( + this.project_id, + this.url, + this.callback + ) + }) - return describe("clearProject", function() { - beforeEach(function() { - this.urls = [ - "www.example.com/file1", - "www.example.com/file2" - ]; - this.UrlCache._findAllUrlsInProject = sinon.stub().callsArgWith(1, null, this.urls); - this.UrlCache._clearUrlFromCache = sinon.stub().callsArg(2); - return this.UrlCache.clearProject(this.project_id, this.callback); - }); + it('should delete the file on disk', function() { + return this.UrlCache._deleteUrlCacheFromDisk + .calledWith(this.project_id, this.url) + .should.equal(true) + }) - it("should clear the cache for each url in the project", function() { - return Array.from(this.urls).map((url) => - this.UrlCache._clearUrlFromCache - .calledWith(this.project_id, url) - .should.equal(true)); - }); + it('should clear the entry in the database', function() { + return this.UrlCache._clearUrlDetails + .calledWith(this.project_id, this.url) + .should.equal(true) + }) - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); -}); - - + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) + return describe('clearProject', function() { + beforeEach(function() { + this.urls = ['www.example.com/file1', 'www.example.com/file2'] + this.UrlCache._findAllUrlsInProject = sinon + .stub() + .callsArgWith(1, null, this.urls) + this.UrlCache._clearUrlFromCache = sinon.stub().callsArg(2) + return this.UrlCache.clearProject(this.project_id, this.callback) + }) + + it('should clear the cache for each url in the project', function() { + return Array.from(this.urls).map(url => + this.UrlCache._clearUrlFromCache + .calledWith(this.project_id, url) + .should.equal(true) + ) + }) + + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) +}) diff --git a/test/unit/js/UrlFetcherTests.js b/test/unit/js/UrlFetcherTests.js index 453a386..e5ce52b 100644 --- a/test/unit/js/UrlFetcherTests.js +++ b/test/unit/js/UrlFetcherTests.js @@ -8,152 +8,165 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const SandboxedModule = require('sandboxed-module'); -const sinon = require('sinon'); -require('chai').should(); -const modulePath = require('path').join(__dirname, '../../../app/js/UrlFetcher'); -const { EventEmitter } = require("events"); +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +require('chai').should() +const modulePath = require('path').join(__dirname, '../../../app/js/UrlFetcher') +const { EventEmitter } = require('events') -describe("UrlFetcher", function() { - beforeEach(function() { - this.callback = sinon.stub(); - this.url = "https://www.example.com/file/here?query=string"; - return this.UrlFetcher = SandboxedModule.require(modulePath, { requires: { - request: { defaults: (this.defaults = sinon.stub().returns(this.request = {})) - }, - fs: (this.fs = {}), - "logger-sharelatex": (this.logger = { log: sinon.stub(), error: sinon.stub() }), - "settings-sharelatex": (this.settings = {}) - } - });}); +describe('UrlFetcher', function() { + beforeEach(function() { + this.callback = sinon.stub() + this.url = 'https://www.example.com/file/here?query=string' + return (this.UrlFetcher = SandboxedModule.require(modulePath, { + requires: { + request: { + defaults: (this.defaults = sinon.stub().returns((this.request = {}))) + }, + fs: (this.fs = {}), + 'logger-sharelatex': (this.logger = { + log: sinon.stub(), + error: sinon.stub() + }), + 'settings-sharelatex': (this.settings = {}) + } + })) + }) - it("should turn off the cookie jar in request", function() { - return this.defaults.calledWith({jar: false}) - .should.equal(true); - }); + it('should turn off the cookie jar in request', function() { + return this.defaults.calledWith({ jar: false }).should.equal(true) + }) - describe("rewrite url domain if filestoreDomainOveride is set", function() { - beforeEach(function() { - this.path = "/path/to/file/on/disk"; - this.request.get = sinon.stub().returns(this.urlStream = new EventEmitter); - this.urlStream.pipe = sinon.stub(); - this.urlStream.pause = sinon.stub(); - this.urlStream.resume = sinon.stub(); - this.fs.createWriteStream = sinon.stub().returns(this.fileStream = new EventEmitter); - return this.fs.unlink = (file, callback) => callback(); - }); + describe('rewrite url domain if filestoreDomainOveride is set', function() { + beforeEach(function() { + this.path = '/path/to/file/on/disk' + this.request.get = sinon + .stub() + .returns((this.urlStream = new EventEmitter())) + this.urlStream.pipe = sinon.stub() + this.urlStream.pause = sinon.stub() + this.urlStream.resume = sinon.stub() + this.fs.createWriteStream = sinon + .stub() + .returns((this.fileStream = new EventEmitter())) + return (this.fs.unlink = (file, callback) => callback()) + }) - it("should use the normal domain when override not set", function(done){ - this.UrlFetcher.pipeUrlToFile(this.url, this.path, () => { - this.request.get.args[0][0].url.should.equal(this.url); - return done(); - }); - this.res = {statusCode: 200}; - this.urlStream.emit("response", this.res); - this.urlStream.emit("end"); - return this.fileStream.emit("finish"); - }); + it('should use the normal domain when override not set', function(done) { + this.UrlFetcher.pipeUrlToFile(this.url, this.path, () => { + this.request.get.args[0][0].url.should.equal(this.url) + return done() + }) + this.res = { statusCode: 200 } + this.urlStream.emit('response', this.res) + this.urlStream.emit('end') + return this.fileStream.emit('finish') + }) + return it('should use override domain when filestoreDomainOveride is set', function(done) { + this.settings.filestoreDomainOveride = '192.11.11.11' + this.UrlFetcher.pipeUrlToFile(this.url, this.path, () => { + this.request.get.args[0][0].url.should.equal( + '192.11.11.11/file/here?query=string' + ) + return done() + }) + this.res = { statusCode: 200 } + this.urlStream.emit('response', this.res) + this.urlStream.emit('end') + return this.fileStream.emit('finish') + }) + }) - return it("should use override domain when filestoreDomainOveride is set", function(done){ - this.settings.filestoreDomainOveride = "192.11.11.11"; - this.UrlFetcher.pipeUrlToFile(this.url, this.path, () => { - this.request.get.args[0][0].url.should.equal("192.11.11.11/file/here?query=string"); - return done(); - }); - this.res = {statusCode: 200}; - this.urlStream.emit("response", this.res); - this.urlStream.emit("end"); - return this.fileStream.emit("finish"); - }); - }); + return describe('pipeUrlToFile', function() { + beforeEach(function(done) { + this.path = '/path/to/file/on/disk' + this.request.get = sinon + .stub() + .returns((this.urlStream = new EventEmitter())) + this.urlStream.pipe = sinon.stub() + this.urlStream.pause = sinon.stub() + this.urlStream.resume = sinon.stub() + this.fs.createWriteStream = sinon + .stub() + .returns((this.fileStream = new EventEmitter())) + this.fs.unlink = (file, callback) => callback() + return done() + }) - return describe("pipeUrlToFile", function() { - beforeEach(function(done){ - this.path = "/path/to/file/on/disk"; - this.request.get = sinon.stub().returns(this.urlStream = new EventEmitter); - this.urlStream.pipe = sinon.stub(); - this.urlStream.pause = sinon.stub(); - this.urlStream.resume = sinon.stub(); - this.fs.createWriteStream = sinon.stub().returns(this.fileStream = new EventEmitter); - this.fs.unlink = (file, callback) => callback(); - return done(); - }); + describe('successfully', function() { + beforeEach(function(done) { + this.UrlFetcher.pipeUrlToFile(this.url, this.path, () => { + this.callback() + return done() + }) + this.res = { statusCode: 200 } + this.urlStream.emit('response', this.res) + this.urlStream.emit('end') + return this.fileStream.emit('finish') + }) - describe("successfully", function() { - beforeEach(function(done){ - this.UrlFetcher.pipeUrlToFile(this.url, this.path, () => { - this.callback(); - return done(); - }); - this.res = {statusCode: 200}; - this.urlStream.emit("response", this.res); - this.urlStream.emit("end"); - return this.fileStream.emit("finish"); - }); + it('should request the URL', function() { + return this.request.get + .calledWith(sinon.match({ url: this.url })) + .should.equal(true) + }) + it('should open the file for writing', function() { + return this.fs.createWriteStream + .calledWith(this.path) + .should.equal(true) + }) - it("should request the URL", function() { - return this.request.get - .calledWith(sinon.match({"url": this.url})) - .should.equal(true); - }); + it('should pipe the URL to the file', function() { + return this.urlStream.pipe + .calledWith(this.fileStream) + .should.equal(true) + }) - it("should open the file for writing", function() { - return this.fs.createWriteStream - .calledWith(this.path) - .should.equal(true); - }); + return it('should call the callback', function() { + return this.callback.called.should.equal(true) + }) + }) - it("should pipe the URL to the file", function() { - return this.urlStream.pipe - .calledWith(this.fileStream) - .should.equal(true); - }); - - return it("should call the callback", function() { - return this.callback.called.should.equal(true); - }); - }); + describe('with non success status code', function() { + beforeEach(function(done) { + this.UrlFetcher.pipeUrlToFile(this.url, this.path, err => { + this.callback(err) + return done() + }) + this.res = { statusCode: 404 } + this.urlStream.emit('response', this.res) + return this.urlStream.emit('end') + }) - describe("with non success status code", function() { - beforeEach(function(done){ - this.UrlFetcher.pipeUrlToFile(this.url, this.path, err=> { - this.callback(err); - return done(); - }); - this.res = {statusCode: 404}; - this.urlStream.emit("response", this.res); - return this.urlStream.emit("end"); - }); + return it('should call the callback with an error', function() { + return this.callback + .calledWith(new Error('URL returned non-success status code: 404')) + .should.equal(true) + }) + }) - return it("should call the callback with an error", function() { - return this.callback - .calledWith(new Error("URL returned non-success status code: 404")) - .should.equal(true); - }); - }); + return describe('with error', function() { + beforeEach(function(done) { + this.UrlFetcher.pipeUrlToFile(this.url, this.path, err => { + this.callback(err) + return done() + }) + return this.urlStream.emit( + 'error', + (this.error = new Error('something went wrong')) + ) + }) - return describe("with error", function() { - beforeEach(function(done){ - this.UrlFetcher.pipeUrlToFile(this.url, this.path, err=> { - this.callback(err); - return done(); - }); - return this.urlStream.emit("error", (this.error = new Error("something went wrong"))); - }); - - it("should call the callback with the error", function() { - return this.callback - .calledWith(this.error) - .should.equal(true); - }); - - return it("should only call the callback once, even if end is called", function() { - this.urlStream.emit("end"); - return this.callback.calledOnce.should.equal(true); - }); - }); - }); -}); + it('should call the callback with the error', function() { + return this.callback.calledWith(this.error).should.equal(true) + }) + return it('should only call the callback once, even if end is called', function() { + this.urlStream.emit('end') + return this.callback.calledOnce.should.equal(true) + }) + }) + }) +}) From a2a3fddd5474b075303c45f78a16031684871de2 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 Feb 2020 12:15:54 +0100 Subject: [PATCH 15/24] decaffeinate: Rename BrokenLatexFileTests.coffee and 9 other files from .coffee to .js --- .../{BrokenLatexFileTests.coffee => BrokenLatexFileTests.js} | 0 .../coffee/{DeleteOldFilesTest.coffee => DeleteOldFilesTest.js} | 0 .../{ExampleDocumentTests.coffee => ExampleDocumentTests.js} | 0 .../{SimpleLatexFileTests.coffee => SimpleLatexFileTests.js} | 0 test/acceptance/coffee/{SynctexTests.coffee => SynctexTests.js} | 0 test/acceptance/coffee/{TimeoutTests.coffee => TimeoutTests.js} | 0 .../coffee/{UrlCachingTests.coffee => UrlCachingTests.js} | 0 .../coffee/{WordcountTests.coffee => WordcountTests.js} | 0 test/acceptance/coffee/helpers/{Client.coffee => Client.js} | 0 test/acceptance/coffee/helpers/{ClsiApp.coffee => ClsiApp.js} | 0 10 files changed, 0 insertions(+), 0 deletions(-) rename test/acceptance/coffee/{BrokenLatexFileTests.coffee => BrokenLatexFileTests.js} (100%) rename test/acceptance/coffee/{DeleteOldFilesTest.coffee => DeleteOldFilesTest.js} (100%) rename test/acceptance/coffee/{ExampleDocumentTests.coffee => ExampleDocumentTests.js} (100%) rename test/acceptance/coffee/{SimpleLatexFileTests.coffee => SimpleLatexFileTests.js} (100%) rename test/acceptance/coffee/{SynctexTests.coffee => SynctexTests.js} (100%) rename test/acceptance/coffee/{TimeoutTests.coffee => TimeoutTests.js} (100%) rename test/acceptance/coffee/{UrlCachingTests.coffee => UrlCachingTests.js} (100%) rename test/acceptance/coffee/{WordcountTests.coffee => WordcountTests.js} (100%) rename test/acceptance/coffee/helpers/{Client.coffee => Client.js} (100%) rename test/acceptance/coffee/helpers/{ClsiApp.coffee => ClsiApp.js} (100%) diff --git a/test/acceptance/coffee/BrokenLatexFileTests.coffee b/test/acceptance/coffee/BrokenLatexFileTests.js similarity index 100% rename from test/acceptance/coffee/BrokenLatexFileTests.coffee rename to test/acceptance/coffee/BrokenLatexFileTests.js diff --git a/test/acceptance/coffee/DeleteOldFilesTest.coffee b/test/acceptance/coffee/DeleteOldFilesTest.js similarity index 100% rename from test/acceptance/coffee/DeleteOldFilesTest.coffee rename to test/acceptance/coffee/DeleteOldFilesTest.js diff --git a/test/acceptance/coffee/ExampleDocumentTests.coffee b/test/acceptance/coffee/ExampleDocumentTests.js similarity index 100% rename from test/acceptance/coffee/ExampleDocumentTests.coffee rename to test/acceptance/coffee/ExampleDocumentTests.js diff --git a/test/acceptance/coffee/SimpleLatexFileTests.coffee b/test/acceptance/coffee/SimpleLatexFileTests.js similarity index 100% rename from test/acceptance/coffee/SimpleLatexFileTests.coffee rename to test/acceptance/coffee/SimpleLatexFileTests.js diff --git a/test/acceptance/coffee/SynctexTests.coffee b/test/acceptance/coffee/SynctexTests.js similarity index 100% rename from test/acceptance/coffee/SynctexTests.coffee rename to test/acceptance/coffee/SynctexTests.js diff --git a/test/acceptance/coffee/TimeoutTests.coffee b/test/acceptance/coffee/TimeoutTests.js similarity index 100% rename from test/acceptance/coffee/TimeoutTests.coffee rename to test/acceptance/coffee/TimeoutTests.js diff --git a/test/acceptance/coffee/UrlCachingTests.coffee b/test/acceptance/coffee/UrlCachingTests.js similarity index 100% rename from test/acceptance/coffee/UrlCachingTests.coffee rename to test/acceptance/coffee/UrlCachingTests.js diff --git a/test/acceptance/coffee/WordcountTests.coffee b/test/acceptance/coffee/WordcountTests.js similarity index 100% rename from test/acceptance/coffee/WordcountTests.coffee rename to test/acceptance/coffee/WordcountTests.js diff --git a/test/acceptance/coffee/helpers/Client.coffee b/test/acceptance/coffee/helpers/Client.js similarity index 100% rename from test/acceptance/coffee/helpers/Client.coffee rename to test/acceptance/coffee/helpers/Client.js diff --git a/test/acceptance/coffee/helpers/ClsiApp.coffee b/test/acceptance/coffee/helpers/ClsiApp.js similarity index 100% rename from test/acceptance/coffee/helpers/ClsiApp.coffee rename to test/acceptance/coffee/helpers/ClsiApp.js From 955749a7c410895fadb34edf88a929ae754c8356 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 Feb 2020 12:16:00 +0100 Subject: [PATCH 16/24] decaffeinate: Convert BrokenLatexFileTests.coffee and 9 other files to JS --- .../acceptance/coffee/BrokenLatexFileTests.js | 100 +++-- test/acceptance/coffee/DeleteOldFilesTest.js | 77 ++-- .../acceptance/coffee/ExampleDocumentTests.js | 281 +++++++----- .../acceptance/coffee/SimpleLatexFileTests.js | 84 ++-- test/acceptance/coffee/SynctexTests.js | 87 ++-- test/acceptance/coffee/TimeoutTests.js | 70 +-- test/acceptance/coffee/UrlCachingTests.js | 418 ++++++++++-------- test/acceptance/coffee/WordcountTests.js | 76 ++-- test/acceptance/coffee/helpers/Client.js | 230 ++++++---- test/acceptance/coffee/helpers/ClsiApp.js | 66 ++- 10 files changed, 883 insertions(+), 606 deletions(-) diff --git a/test/acceptance/coffee/BrokenLatexFileTests.js b/test/acceptance/coffee/BrokenLatexFileTests.js index 8ab4344..5aea625 100644 --- a/test/acceptance/coffee/BrokenLatexFileTests.js +++ b/test/acceptance/coffee/BrokenLatexFileTests.js @@ -1,48 +1,70 @@ -Client = require "./helpers/Client" -request = require "request" -require("chai").should() -ClsiApp = require "./helpers/ClsiApp" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require("./helpers/Client"); +const request = require("request"); +require("chai").should(); +const ClsiApp = require("./helpers/ClsiApp"); -describe "Broken LaTeX file", -> - before (done)-> - @broken_request = - resources: [ - path: "main.tex" - content: ''' - \\documentclass{articl % :( - \\begin{documen % :( - Broken - \\end{documen % :( - ''' +describe("Broken LaTeX file", function() { + before(function(done){ + this.broken_request = { + resources: [{ + path: "main.tex", + content: `\ +\\documentclass{articl % :( +\\begin{documen % :( +Broken +\\end{documen % :(\ +` + } ] - @correct_request = - resources: [ - path: "main.tex" - content: ''' - \\documentclass{article} - \\begin{document} - Hello world - \\end{document} - ''' + }; + this.correct_request = { + resources: [{ + path: "main.tex", + content: `\ +\\documentclass{article} +\\begin{document} +Hello world +\\end{document}\ +` + } ] - ClsiApp.ensureRunning done + }; + return ClsiApp.ensureRunning(done); + }); - describe "on first run", -> - before (done) -> - @project_id = Client.randomId() - Client.compile @project_id, @broken_request, (@error, @res, @body) => done() + describe("on first run", function() { + before(function(done) { + this.project_id = Client.randomId(); + return Client.compile(this.project_id, this.broken_request, (error, res, body) => { this.error = error; this.res = res; this.body = body; return done(); }); + }); - it "should return a failure status", -> - @body.compile.status.should.equal "failure" + return it("should return a failure status", function() { + return this.body.compile.status.should.equal("failure"); + }); + }); - describe "on second run", -> - before (done) -> - @project_id = Client.randomId() - Client.compile @project_id, @correct_request, () => - Client.compile @project_id, @broken_request, (@error, @res, @body) => - done() + return describe("on second run", function() { + before(function(done) { + this.project_id = Client.randomId(); + return Client.compile(this.project_id, this.correct_request, () => { + return Client.compile(this.project_id, this.broken_request, (error, res, body) => { + this.error = error; + this.res = res; + this.body = body; + return done(); + }); + }); + }); - it "should return a failure status", -> - @body.compile.status.should.equal "failure" + return it("should return a failure status", function() { + return this.body.compile.status.should.equal("failure"); + }); + }); +}); diff --git a/test/acceptance/coffee/DeleteOldFilesTest.js b/test/acceptance/coffee/DeleteOldFilesTest.js index 1cb6776..d6958c2 100644 --- a/test/acceptance/coffee/DeleteOldFilesTest.js +++ b/test/acceptance/coffee/DeleteOldFilesTest.js @@ -1,36 +1,55 @@ -Client = require "./helpers/Client" -request = require "request" -require("chai").should() -ClsiApp = require "./helpers/ClsiApp" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require("./helpers/Client"); +const request = require("request"); +require("chai").should(); +const ClsiApp = require("./helpers/ClsiApp"); -describe "Deleting Old Files", -> - before (done)-> - @request = - resources: [ - path: "main.tex" - content: ''' - \\documentclass{article} - \\begin{document} - Hello world - \\end{document} - ''' +describe("Deleting Old Files", function() { + before(function(done){ + this.request = { + resources: [{ + path: "main.tex", + content: `\ +\\documentclass{article} +\\begin{document} +Hello world +\\end{document}\ +` + } ] - ClsiApp.ensureRunning done + }; + return ClsiApp.ensureRunning(done); + }); - describe "on first run", -> - before (done) -> - @project_id = Client.randomId() - Client.compile @project_id, @request, (@error, @res, @body) => done() + return describe("on first run", function() { + before(function(done) { + this.project_id = Client.randomId(); + return Client.compile(this.project_id, this.request, (error, res, body) => { this.error = error; this.res = res; this.body = body; return done(); }); + }); - it "should return a success status", -> - @body.compile.status.should.equal "success" + it("should return a success status", function() { + return this.body.compile.status.should.equal("success"); + }); - describe "after file has been deleted", -> - before (done) -> - @request.resources = [] - Client.compile @project_id, @request, (@error, @res, @body) => - done() + return describe("after file has been deleted", function() { + before(function(done) { + this.request.resources = []; + return Client.compile(this.project_id, this.request, (error, res, body) => { + this.error = error; + this.res = res; + this.body = body; + return done(); + }); + }); - it "should return a failure status", -> - @body.compile.status.should.equal "failure" + return it("should return a failure status", function() { + return this.body.compile.status.should.equal("failure"); + }); + }); + }); +}); diff --git a/test/acceptance/coffee/ExampleDocumentTests.js b/test/acceptance/coffee/ExampleDocumentTests.js index f8e4a77..fe89970 100644 --- a/test/acceptance/coffee/ExampleDocumentTests.js +++ b/test/acceptance/coffee/ExampleDocumentTests.js @@ -1,129 +1,182 @@ -Client = require "./helpers/Client" -request = require "request" -require("chai").should() -fs = require "fs" -ChildProcess = require "child_process" -ClsiApp = require "./helpers/ClsiApp" -logger = require("logger-sharelatex") -Path = require("path") -fixturePath = (path) -> Path.normalize(__dirname + "/../fixtures/" + path) -process = require "process" -console.log process.pid, process.ppid, process.getuid(),process.getgroups(), "PID" -try - console.log "creating tmp directory", fixturePath("tmp") - fs.mkdirSync(fixturePath("tmp")) -catch err - console.log err, fixturePath("tmp"), "unable to create fixture tmp path" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require("./helpers/Client"); +const request = require("request"); +require("chai").should(); +const fs = require("fs"); +const ChildProcess = require("child_process"); +const ClsiApp = require("./helpers/ClsiApp"); +const logger = require("logger-sharelatex"); +const Path = require("path"); +const fixturePath = path => Path.normalize(__dirname + "/../fixtures/" + path); +const process = require("process"); +console.log(process.pid, process.ppid, process.getuid(),process.getgroups(), "PID"); +try { + console.log("creating tmp directory", fixturePath("tmp")); + fs.mkdirSync(fixturePath("tmp")); +} catch (error) { + const err = error; + console.log(err, fixturePath("tmp"), "unable to create fixture tmp path"); +} -MOCHA_LATEX_TIMEOUT = 60 * 1000 +const MOCHA_LATEX_TIMEOUT = 60 * 1000; -convertToPng = (pdfPath, pngPath, callback = (error) ->) -> - command = "convert #{fixturePath(pdfPath)} #{fixturePath(pngPath)}" - console.log "COMMAND" - console.log command - convert = ChildProcess.exec command - stdout = "" - convert.stdout.on "data", (chunk) -> console.log "STDOUT", chunk.toString() - convert.stderr.on "data", (chunk) -> console.log "STDERR", chunk.toString() - convert.on "exit", () -> - callback() +const convertToPng = function(pdfPath, pngPath, callback) { + if (callback == null) { callback = function(error) {}; } + const command = `convert ${fixturePath(pdfPath)} ${fixturePath(pngPath)}`; + console.log("COMMAND"); + console.log(command); + const convert = ChildProcess.exec(command); + const stdout = ""; + convert.stdout.on("data", chunk => console.log("STDOUT", chunk.toString())); + convert.stderr.on("data", chunk => console.log("STDERR", chunk.toString())); + return convert.on("exit", () => callback()); +}; -compare = (originalPath, generatedPath, callback = (error, same) ->) -> - diff_file = "#{fixturePath(generatedPath)}-diff.png" - proc = ChildProcess.exec "compare -metric mae #{fixturePath(originalPath)} #{fixturePath(generatedPath)} #{diff_file}" - stderr = "" - proc.stderr.on "data", (chunk) -> stderr += chunk - proc.on "exit", () -> - if stderr.trim() == "0 (0)" - # remove output diff if test matches expected image - fs.unlink diff_file, (err) -> - if err - throw err - callback null, true - else - console.log "compare result", stderr - callback null, false +const compare = function(originalPath, generatedPath, callback) { + if (callback == null) { callback = function(error, same) {}; } + const diff_file = `${fixturePath(generatedPath)}-diff.png`; + const proc = ChildProcess.exec(`compare -metric mae ${fixturePath(originalPath)} ${fixturePath(generatedPath)} ${diff_file}`); + let stderr = ""; + proc.stderr.on("data", chunk => stderr += chunk); + return proc.on("exit", function() { + if (stderr.trim() === "0 (0)") { + // remove output diff if test matches expected image + fs.unlink(diff_file, function(err) { + if (err) { + throw err; + } + }); + return callback(null, true); + } else { + console.log("compare result", stderr); + return callback(null, false); + } + }); +}; -checkPdfInfo = (pdfPath, callback = (error, output) ->) -> - proc = ChildProcess.exec "pdfinfo #{fixturePath(pdfPath)}" - stdout = "" - proc.stdout.on "data", (chunk) -> stdout += chunk - proc.stderr.on "data", (chunk) -> console.log "STDERR", chunk.toString() - proc.on "exit", () -> - if stdout.match(/Optimized:\s+yes/) - callback null, true - else - callback null, false +const checkPdfInfo = function(pdfPath, callback) { + if (callback == null) { callback = function(error, output) {}; } + const proc = ChildProcess.exec(`pdfinfo ${fixturePath(pdfPath)}`); + let stdout = ""; + proc.stdout.on("data", chunk => stdout += chunk); + proc.stderr.on("data", chunk => console.log("STDERR", chunk.toString())); + return proc.on("exit", function() { + if (stdout.match(/Optimized:\s+yes/)) { + return callback(null, true); + } else { + return callback(null, false); + } + }); +}; -compareMultiplePages = (project_id, callback = (error) ->) -> - compareNext = (page_no, callback) -> - path = "tmp/#{project_id}-source-#{page_no}.png" - fs.stat fixturePath(path), (error, stat) -> - if error? - callback() - else - compare "tmp/#{project_id}-source-#{page_no}.png", "tmp/#{project_id}-generated-#{page_no}.png", (error, same) => - throw error if error? - same.should.equal true - compareNext page_no + 1, callback - compareNext 0, callback +const compareMultiplePages = function(project_id, callback) { + if (callback == null) { callback = function(error) {}; } + var compareNext = function(page_no, callback) { + const path = `tmp/${project_id}-source-${page_no}.png`; + return fs.stat(fixturePath(path), function(error, stat) { + if (error != null) { + return callback(); + } else { + return compare(`tmp/${project_id}-source-${page_no}.png`, `tmp/${project_id}-generated-${page_no}.png`, (error, same) => { + if (error != null) { throw error; } + same.should.equal(true); + return compareNext(page_no + 1, callback); + }); + } + }); + }; + return compareNext(0, callback); +}; -comparePdf = (project_id, example_dir, callback = (error) ->) -> - console.log "CONVERT" - console.log "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png" - convertToPng "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png", (error) => - throw error if error? - convertToPng "examples/#{example_dir}/output.pdf", "tmp/#{project_id}-source.png", (error) => - throw error if error? - fs.stat fixturePath("tmp/#{project_id}-source-0.png"), (error, stat) => - if error? - compare "tmp/#{project_id}-source.png", "tmp/#{project_id}-generated.png", (error, same) => - throw error if error? - same.should.equal true - callback() - else - compareMultiplePages project_id, (error) -> - throw error if error? - callback() +const comparePdf = function(project_id, example_dir, callback) { + if (callback == null) { callback = function(error) {}; } + console.log("CONVERT"); + console.log(`tmp/${project_id}.pdf`, `tmp/${project_id}-generated.png`); + return convertToPng(`tmp/${project_id}.pdf`, `tmp/${project_id}-generated.png`, error => { + if (error != null) { throw error; } + return convertToPng(`examples/${example_dir}/output.pdf`, `tmp/${project_id}-source.png`, error => { + if (error != null) { throw error; } + return fs.stat(fixturePath(`tmp/${project_id}-source-0.png`), (error, stat) => { + if (error != null) { + return compare(`tmp/${project_id}-source.png`, `tmp/${project_id}-generated.png`, (error, same) => { + if (error != null) { throw error; } + same.should.equal(true); + return callback(); + }); + } else { + return compareMultiplePages(project_id, function(error) { + if (error != null) { throw error; } + return callback(); + }); + } + }); + }); + }); +}; -downloadAndComparePdf = (project_id, example_dir, url, callback = (error) ->) -> - writeStream = fs.createWriteStream(fixturePath("tmp/#{project_id}.pdf")) - request.get(url).pipe(writeStream) - console.log("writing file out", fixturePath("tmp/#{project_id}.pdf")) - writeStream.on "close", () => - checkPdfInfo "tmp/#{project_id}.pdf", (error, optimised) => - throw error if error? - optimised.should.equal true - comparePdf project_id, example_dir, callback +const downloadAndComparePdf = function(project_id, example_dir, url, callback) { + if (callback == null) { callback = function(error) {}; } + const writeStream = fs.createWriteStream(fixturePath(`tmp/${project_id}.pdf`)); + request.get(url).pipe(writeStream); + console.log("writing file out", fixturePath(`tmp/${project_id}.pdf`)); + return writeStream.on("close", () => { + return checkPdfInfo(`tmp/${project_id}.pdf`, (error, optimised) => { + if (error != null) { throw error; } + optimised.should.equal(true); + return comparePdf(project_id, example_dir, callback); + }); + }); +}; -Client.runServer(4242, fixturePath("examples")) +Client.runServer(4242, fixturePath("examples")); -describe "Example Documents", -> - before (done) -> - ChildProcess.exec("rm test/acceptance/fixtures/tmp/*").on "exit", () -> - ClsiApp.ensureRunning done +describe("Example Documents", function() { + before(done => + ChildProcess.exec("rm test/acceptance/fixtures/tmp/*").on("exit", () => ClsiApp.ensureRunning(done)) + ); - for example_dir in fs.readdirSync fixturePath("examples") - do (example_dir) -> - describe example_dir, -> - before -> - @project_id = Client.randomId() + "_" + example_dir + return Array.from(fs.readdirSync(fixturePath("examples"))).map((example_dir) => + (example_dir => + describe(example_dir, function() { + before(function() { + return this.project_id = Client.randomId() + "_" + example_dir; + }); - it "should generate the correct pdf", (done) -> - this.timeout(MOCHA_LATEX_TIMEOUT) - Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) => - if error || body?.compile?.status is "failure" - console.log "DEBUG: error", error, "body", JSON.stringify(body) - pdf = Client.getOutputFile body, "pdf" - downloadAndComparePdf(@project_id, example_dir, pdf.url, done) + it("should generate the correct pdf", function(done) { + this.timeout(MOCHA_LATEX_TIMEOUT); + return Client.compileDirectory(this.project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) => { + if (error || (__guard__(body != null ? body.compile : undefined, x => x.status) === "failure")) { + console.log("DEBUG: error", error, "body", JSON.stringify(body)); + } + const pdf = Client.getOutputFile(body, "pdf"); + return downloadAndComparePdf(this.project_id, example_dir, pdf.url, done); + }); + }); - it "should generate the correct pdf on the second run as well", (done) -> - this.timeout(MOCHA_LATEX_TIMEOUT) - Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) => - if error || body?.compile?.status is "failure" - console.log "DEBUG: error", error, "body", JSON.stringify(body) - pdf = Client.getOutputFile body, "pdf" - downloadAndComparePdf(@project_id, example_dir, pdf.url, done) + return it("should generate the correct pdf on the second run as well", function(done) { + this.timeout(MOCHA_LATEX_TIMEOUT); + return Client.compileDirectory(this.project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) => { + if (error || (__guard__(body != null ? body.compile : undefined, x => x.status) === "failure")) { + console.log("DEBUG: error", error, "body", JSON.stringify(body)); + } + const pdf = Client.getOutputFile(body, "pdf"); + return downloadAndComparePdf(this.project_id, example_dir, pdf.url, done); + }); + }); + }) + )(example_dir)); +}); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/test/acceptance/coffee/SimpleLatexFileTests.js b/test/acceptance/coffee/SimpleLatexFileTests.js index 95b667b..79789e8 100644 --- a/test/acceptance/coffee/SimpleLatexFileTests.js +++ b/test/acceptance/coffee/SimpleLatexFileTests.js @@ -1,41 +1,57 @@ -Client = require "./helpers/Client" -request = require "request" -require("chai").should() -ClsiApp = require "./helpers/ClsiApp" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require("./helpers/Client"); +const request = require("request"); +require("chai").should(); +const ClsiApp = require("./helpers/ClsiApp"); -describe "Simple LaTeX file", -> - before (done) -> - @project_id = Client.randomId() - @request = - resources: [ - path: "main.tex" - content: ''' - \\documentclass{article} - \\begin{document} - Hello world - \\end{document} - ''' +describe("Simple LaTeX file", function() { + before(function(done) { + this.project_id = Client.randomId(); + this.request = { + resources: [{ + path: "main.tex", + content: `\ +\\documentclass{article} +\\begin{document} +Hello world +\\end{document}\ +` + } ] - ClsiApp.ensureRunning => - Client.compile @project_id, @request, (@error, @res, @body) => done() + }; + return ClsiApp.ensureRunning(() => { + return Client.compile(this.project_id, this.request, (error, res, body) => { this.error = error; this.res = res; this.body = body; return done(); }); + }); + }); - it "should return the PDF", -> - pdf = Client.getOutputFile(@body, "pdf") - pdf.type.should.equal "pdf" + it("should return the PDF", function() { + const pdf = Client.getOutputFile(this.body, "pdf"); + return pdf.type.should.equal("pdf"); + }); - it "should return the log", -> - log = Client.getOutputFile(@body, "log") - log.type.should.equal "log" + it("should return the log", function() { + const log = Client.getOutputFile(this.body, "log"); + return log.type.should.equal("log"); + }); - it "should provide the pdf for download", (done) -> - pdf = Client.getOutputFile(@body, "pdf") - request.get pdf.url, (error, res, body) -> - res.statusCode.should.equal 200 - done() + it("should provide the pdf for download", function(done) { + const pdf = Client.getOutputFile(this.body, "pdf"); + return request.get(pdf.url, function(error, res, body) { + res.statusCode.should.equal(200); + return done(); + }); + }); - it "should provide the log for download", (done) -> - log = Client.getOutputFile(@body, "pdf") - request.get log.url, (error, res, body) -> - res.statusCode.should.equal 200 - done() + return it("should provide the log for download", function(done) { + const log = Client.getOutputFile(this.body, "pdf"); + return request.get(log.url, function(error, res, body) { + res.statusCode.should.equal(200); + return done(); + }); + }); +}); diff --git a/test/acceptance/coffee/SynctexTests.js b/test/acceptance/coffee/SynctexTests.js index 685d292..b0ac688 100644 --- a/test/acceptance/coffee/SynctexTests.js +++ b/test/acceptance/coffee/SynctexTests.js @@ -1,41 +1,58 @@ -Client = require "./helpers/Client" -request = require "request" -require("chai").should() -expect = require("chai").expect -ClsiApp = require "./helpers/ClsiApp" -crypto = require("crypto") +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require("./helpers/Client"); +const request = require("request"); +require("chai").should(); +const { expect } = require("chai"); +const ClsiApp = require("./helpers/ClsiApp"); +const crypto = require("crypto"); -describe "Syncing", -> - before (done) -> - content = ''' - \\documentclass{article} - \\begin{document} - Hello world - \\end{document} - ''' - @request = - resources: [ - path: "main.tex" - content: content +describe("Syncing", function() { + before(function(done) { + const content = `\ +\\documentclass{article} +\\begin{document} +Hello world +\\end{document}\ +`; + this.request = { + resources: [{ + path: "main.tex", + content + } ] - @project_id = Client.randomId() - ClsiApp.ensureRunning => - Client.compile @project_id, @request, (@error, @res, @body) => done() + }; + this.project_id = Client.randomId(); + return ClsiApp.ensureRunning(() => { + return Client.compile(this.project_id, this.request, (error, res, body) => { this.error = error; this.res = res; this.body = body; return done(); }); + }); + }); - describe "from code to pdf", -> - it "should return the correct location", (done) -> - Client.syncFromCode @project_id, "main.tex", 3, 5, (error, pdfPositions) -> - throw error if error? - expect(pdfPositions).to.deep.equal( + describe("from code to pdf", () => + it("should return the correct location", function(done) { + return Client.syncFromCode(this.project_id, "main.tex", 3, 5, function(error, pdfPositions) { + if (error != null) { throw error; } + expect(pdfPositions).to.deep.equal({ pdf: [ { page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 } ] - ) - done() + }); + return done(); + }); + }) + ); - describe "from pdf to code", -> - it "should return the correct location", (done) -> - Client.syncFromPdf @project_id, 1, 100, 200, (error, codePositions) => - throw error if error? - expect(codePositions).to.deep.equal( + return describe("from pdf to code", () => + it("should return the correct location", function(done) { + return Client.syncFromPdf(this.project_id, 1, 100, 200, (error, codePositions) => { + if (error != null) { throw error; } + expect(codePositions).to.deep.equal({ code: [ { file: 'main.tex', line: 3, column: -1 } ] - ) - done() + }); + return done(); + }); + }) + ); +}); diff --git a/test/acceptance/coffee/TimeoutTests.js b/test/acceptance/coffee/TimeoutTests.js index b274dd5..39d18ed 100644 --- a/test/acceptance/coffee/TimeoutTests.js +++ b/test/acceptance/coffee/TimeoutTests.js @@ -1,34 +1,48 @@ -Client = require "./helpers/Client" -request = require "request" -require("chai").should() -ClsiApp = require "./helpers/ClsiApp" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require("./helpers/Client"); +const request = require("request"); +require("chai").should(); +const ClsiApp = require("./helpers/ClsiApp"); -describe "Timed out compile", -> - before (done) -> - @request = - options: - timeout: 10 #seconds - resources: [ - path: "main.tex" - content: ''' - \\documentclass{article} - \\begin{document} - \\def\\x{Hello!\\par\\x} - \\x - \\end{document} - ''' +describe("Timed out compile", function() { + before(function(done) { + this.request = { + options: { + timeout: 10 + }, //seconds + resources: [{ + path: "main.tex", + content: `\ +\\documentclass{article} +\\begin{document} +\\def\\x{Hello!\\par\\x} +\\x +\\end{document}\ +` + } ] - @project_id = Client.randomId() - ClsiApp.ensureRunning => - Client.compile @project_id, @request, (@error, @res, @body) => done() + }; + this.project_id = Client.randomId(); + return ClsiApp.ensureRunning(() => { + return Client.compile(this.project_id, this.request, (error, res, body) => { this.error = error; this.res = res; this.body = body; return done(); }); + }); + }); - it "should return a timeout error", -> - @body.compile.error.should.equal "container timed out" + it("should return a timeout error", function() { + return this.body.compile.error.should.equal("container timed out"); + }); - it "should return a timedout status", -> - @body.compile.status.should.equal "timedout" + it("should return a timedout status", function() { + return this.body.compile.status.should.equal("timedout"); + }); - it "should return the log output file name", -> - outputFilePaths = @body.compile.outputFiles.map((x) => x.path) - outputFilePaths.should.include('output.log') + return it("should return the log output file name", function() { + const outputFilePaths = this.body.compile.outputFiles.map(x => x.path); + return outputFilePaths.should.include('output.log'); + }); +}); diff --git a/test/acceptance/coffee/UrlCachingTests.js b/test/acceptance/coffee/UrlCachingTests.js index cef7446..3fe947f 100644 --- a/test/acceptance/coffee/UrlCachingTests.js +++ b/test/acceptance/coffee/UrlCachingTests.js @@ -1,222 +1,280 @@ -Client = require "./helpers/Client" -request = require "request" -require("chai").should() -sinon = require "sinon" -ClsiApp = require "./helpers/ClsiApp" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require("./helpers/Client"); +const request = require("request"); +require("chai").should(); +const sinon = require("sinon"); +const ClsiApp = require("./helpers/ClsiApp"); -host = "localhost" +const host = "localhost"; -Server = - run: () -> - express = require "express" - app = express() +const Server = { + run() { + const express = require("express"); + const app = express(); - staticServer = express.static __dirname + "/../fixtures/" - app.get "/:random_id/*", (req, res, next) => - @getFile(req.url) - req.url = "/" + req.params[0] - staticServer(req, res, next) + const staticServer = express.static(__dirname + "/../fixtures/"); + app.get("/:random_id/*", (req, res, next) => { + this.getFile(req.url); + req.url = `/${req.params[0]}`; + return staticServer(req, res, next); + }); - app.listen 31415, host + return app.listen(31415, host); + }, - getFile: () -> + getFile() {}, - randomId: () -> - Math.random().toString(16).slice(2) + randomId() { + return Math.random().toString(16).slice(2); + } +}; -Server.run() +Server.run(); -describe "Url Caching", -> - describe "Downloading an image for the first time", -> - before (done) -> - @project_id = Client.randomId() - @file = "#{Server.randomId()}/lion.png" - @request = +describe("Url Caching", function() { + describe("Downloading an image for the first time", function() { + before(function(done) { + this.project_id = Client.randomId(); + this.file = `${Server.randomId()}/lion.png`; + this.request = { resources: [{ - path: "main.tex" - content: ''' - \\documentclass{article} - \\usepackage{graphicx} - \\begin{document} - \\includegraphics{lion.png} - \\end{document} - ''' + path: "main.tex", + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +` }, { - path: "lion.png" - url: "http://#{host}:31415/#{@file}" + path: "lion.png", + url: `http://${host}:31415/${this.file}` }] + }; - sinon.spy Server, "getFile" - ClsiApp.ensureRunning => - Client.compile @project_id, @request, (@error, @res, @body) => done() + sinon.spy(Server, "getFile"); + return ClsiApp.ensureRunning(() => { + return Client.compile(this.project_id, this.request, (error, res, body) => { this.error = error; this.res = res; this.body = body; return done(); }); + }); + }); - afterEach -> - Server.getFile.restore() + afterEach(() => Server.getFile.restore()); - it "should download the image", -> - Server.getFile - .calledWith("/" + @file) - .should.equal true + return it("should download the image", function() { + return Server.getFile + .calledWith(`/${this.file}`) + .should.equal(true); + }); + }); - describe "When an image is in the cache and the last modified date is unchanged", -> - before (done) -> - @project_id = Client.randomId() - @file = "#{Server.randomId()}/lion.png" - @request = + describe("When an image is in the cache and the last modified date is unchanged", function() { + before(function(done) { + this.project_id = Client.randomId(); + this.file = `${Server.randomId()}/lion.png`; + this.request = { resources: [{ - path: "main.tex" - content: ''' - \\documentclass{article} - \\usepackage{graphicx} - \\begin{document} - \\includegraphics{lion.png} - \\end{document} - ''' - }, @image_resource = { - path: "lion.png" - url: "http://#{host}:31415/#{@file}" + path: "main.tex", + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +` + }, (this.image_resource = { + path: "lion.png", + url: `http://${host}:31415/${this.file}`, modified: Date.now() - }] + })] + }; - Client.compile @project_id, @request, (@error, @res, @body) => - sinon.spy Server, "getFile" - Client.compile @project_id, @request, (@error, @res, @body) => - done() + return Client.compile(this.project_id, this.request, (error, res, body) => { + this.error = error; + this.res = res; + this.body = body; + sinon.spy(Server, "getFile"); + return Client.compile(this.project_id, this.request, (error1, res1, body1) => { + this.error = error1; + this.res = res1; + this.body = body1; + return done(); + }); + }); + }); - after -> - Server.getFile.restore() + after(() => Server.getFile.restore()); - it "should not download the image again", -> - Server.getFile.called.should.equal false + return it("should not download the image again", () => Server.getFile.called.should.equal(false)); + }); - describe "When an image is in the cache and the last modified date is advanced", -> - before (done) -> - @project_id = Client.randomId() - @file = "#{Server.randomId()}/lion.png" - @request = + describe("When an image is in the cache and the last modified date is advanced", function() { + before(function(done) { + this.project_id = Client.randomId(); + this.file = `${Server.randomId()}/lion.png`; + this.request = { resources: [{ - path: "main.tex" - content: ''' - \\documentclass{article} - \\usepackage{graphicx} - \\begin{document} - \\includegraphics{lion.png} - \\end{document} - ''' - }, @image_resource = { - path: "lion.png" - url: "http://#{host}:31415/#{@file}" - modified: @last_modified = Date.now() - }] + path: "main.tex", + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +` + }, (this.image_resource = { + path: "lion.png", + url: `http://${host}:31415/${this.file}`, + modified: (this.last_modified = Date.now()) + })] + }; - Client.compile @project_id, @request, (@error, @res, @body) => - sinon.spy Server, "getFile" - @image_resource.modified = new Date(@last_modified + 3000) - Client.compile @project_id, @request, (@error, @res, @body) => - done() + return Client.compile(this.project_id, this.request, (error, res, body) => { + this.error = error; + this.res = res; + this.body = body; + sinon.spy(Server, "getFile"); + this.image_resource.modified = new Date(this.last_modified + 3000); + return Client.compile(this.project_id, this.request, (error1, res1, body1) => { + this.error = error1; + this.res = res1; + this.body = body1; + return done(); + }); + }); + }); - afterEach -> - Server.getFile.restore() + afterEach(() => Server.getFile.restore()); - it "should download the image again", -> - Server.getFile.called.should.equal true + return it("should download the image again", () => Server.getFile.called.should.equal(true)); + }); - describe "When an image is in the cache and the last modified date is further in the past", -> - before (done) -> - @project_id = Client.randomId() - @file = "#{Server.randomId()}/lion.png" - @request = + describe("When an image is in the cache and the last modified date is further in the past", function() { + before(function(done) { + this.project_id = Client.randomId(); + this.file = `${Server.randomId()}/lion.png`; + this.request = { resources: [{ - path: "main.tex" - content: ''' - \\documentclass{article} - \\usepackage{graphicx} - \\begin{document} - \\includegraphics{lion.png} - \\end{document} - ''' - }, @image_resource = { - path: "lion.png" - url: "http://#{host}:31415/#{@file}" - modified: @last_modified = Date.now() - }] + path: "main.tex", + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +` + }, (this.image_resource = { + path: "lion.png", + url: `http://${host}:31415/${this.file}`, + modified: (this.last_modified = Date.now()) + })] + }; - Client.compile @project_id, @request, (@error, @res, @body) => - sinon.spy Server, "getFile" - @image_resource.modified = new Date(@last_modified - 3000) - Client.compile @project_id, @request, (@error, @res, @body) => - done() + return Client.compile(this.project_id, this.request, (error, res, body) => { + this.error = error; + this.res = res; + this.body = body; + sinon.spy(Server, "getFile"); + this.image_resource.modified = new Date(this.last_modified - 3000); + return Client.compile(this.project_id, this.request, (error1, res1, body1) => { + this.error = error1; + this.res = res1; + this.body = body1; + return done(); + }); + }); + }); - afterEach -> - Server.getFile.restore() + afterEach(() => Server.getFile.restore()); - it "should not download the image again", -> - Server.getFile.called.should.equal false + return it("should not download the image again", () => Server.getFile.called.should.equal(false)); + }); - describe "When an image is in the cache and the last modified date is not specified", -> - before (done) -> - @project_id = Client.randomId() - @file = "#{Server.randomId()}/lion.png" - @request = + describe("When an image is in the cache and the last modified date is not specified", function() { + before(function(done) { + this.project_id = Client.randomId(); + this.file = `${Server.randomId()}/lion.png`; + this.request = { resources: [{ - path: "main.tex" - content: ''' - \\documentclass{article} - \\usepackage{graphicx} - \\begin{document} - \\includegraphics{lion.png} - \\end{document} - ''' - }, @image_resource = { - path: "lion.png" - url: "http://#{host}:31415/#{@file}" - modified: @last_modified = Date.now() - }] + path: "main.tex", + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +` + }, (this.image_resource = { + path: "lion.png", + url: `http://${host}:31415/${this.file}`, + modified: (this.last_modified = Date.now()) + })] + }; - Client.compile @project_id, @request, (@error, @res, @body) => - sinon.spy Server, "getFile" - delete @image_resource.modified - Client.compile @project_id, @request, (@error, @res, @body) => - done() + return Client.compile(this.project_id, this.request, (error, res, body) => { + this.error = error; + this.res = res; + this.body = body; + sinon.spy(Server, "getFile"); + delete this.image_resource.modified; + return Client.compile(this.project_id, this.request, (error1, res1, body1) => { + this.error = error1; + this.res = res1; + this.body = body1; + return done(); + }); + }); + }); - afterEach -> - Server.getFile.restore() + afterEach(() => Server.getFile.restore()); - it "should download the image again", -> - Server.getFile.called.should.equal true + return it("should download the image again", () => Server.getFile.called.should.equal(true)); + }); - describe "After clearing the cache", -> - before (done) -> - @project_id = Client.randomId() - @file = "#{Server.randomId()}/lion.png" - @request = + return describe("After clearing the cache", function() { + before(function(done) { + this.project_id = Client.randomId(); + this.file = `${Server.randomId()}/lion.png`; + this.request = { resources: [{ - path: "main.tex" - content: ''' - \\documentclass{article} - \\usepackage{graphicx} - \\begin{document} - \\includegraphics{lion.png} - \\end{document} - ''' - }, @image_resource = { - path: "lion.png" - url: "http://#{host}:31415/#{@file}" - modified: @last_modified = Date.now() - }] + path: "main.tex", + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +` + }, (this.image_resource = { + path: "lion.png", + url: `http://${host}:31415/${this.file}`, + modified: (this.last_modified = Date.now()) + })] + }; - Client.compile @project_id, @request, (error) => - throw error if error? - Client.clearCache @project_id, (error, res, body) => - throw error if error? - sinon.spy Server, "getFile" - Client.compile @project_id, @request, (@error, @res, @body) => - done() + return Client.compile(this.project_id, this.request, error => { + if (error != null) { throw error; } + return Client.clearCache(this.project_id, (error, res, body) => { + if (error != null) { throw error; } + sinon.spy(Server, "getFile"); + return Client.compile(this.project_id, this.request, (error1, res1, body1) => { + this.error = error1; + this.res = res1; + this.body = body1; + return done(); + }); + }); + }); + }); - afterEach -> - Server.getFile.restore() + afterEach(() => Server.getFile.restore()); - it "should download the image again", -> - Server.getFile.called.should.equal true + return it("should download the image again", () => Server.getFile.called.should.equal(true)); + }); +}); diff --git a/test/acceptance/coffee/WordcountTests.js b/test/acceptance/coffee/WordcountTests.js index abace06..8c87a7c 100644 --- a/test/acceptance/coffee/WordcountTests.js +++ b/test/acceptance/coffee/WordcountTests.js @@ -1,38 +1,52 @@ -Client = require "./helpers/Client" -request = require "request" -require("chai").should() -expect = require("chai").expect -path = require("path") -fs = require("fs") -ClsiApp = require "./helpers/ClsiApp" +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require("./helpers/Client"); +const request = require("request"); +require("chai").should(); +const { expect } = require("chai"); +const path = require("path"); +const fs = require("fs"); +const ClsiApp = require("./helpers/ClsiApp"); -describe "Syncing", -> - before (done) -> - @request = - resources: [ - path: "main.tex" +describe("Syncing", function() { + before(function(done) { + this.request = { + resources: [{ + path: "main.tex", content: fs.readFileSync(path.join(__dirname,"../fixtures/naugty_strings.txt"),"utf-8") + } ] - @project_id = Client.randomId() - ClsiApp.ensureRunning => - Client.compile @project_id, @request, (@error, @res, @body) => done() + }; + this.project_id = Client.randomId(); + return ClsiApp.ensureRunning(() => { + return Client.compile(this.project_id, this.request, (error, res, body) => { this.error = error; this.res = res; this.body = body; return done(); }); + }); + }); - describe "wordcount file", -> - it "should return wordcount info", (done) -> - Client.wordcount @project_id, "main.tex", (error, result) -> - throw error if error? - expect(result).to.deep.equal( + return describe("wordcount file", () => + it("should return wordcount info", function(done) { + return Client.wordcount(this.project_id, "main.tex", function(error, result) { + if (error != null) { throw error; } + expect(result).to.deep.equal({ texcount: { - encode: "utf8" - textWords: 2281 - headWords: 2 - outside: 0 - headers: 2 - elements: 0 - mathInline: 6 - mathDisplay: 0 - errors: 0 + encode: "utf8", + textWords: 2281, + headWords: 2, + outside: 0, + headers: 2, + elements: 0, + mathInline: 6, + mathDisplay: 0, + errors: 0, messages: "" } - ) - done() + }); + return done(); + }); + }) + ); +}); diff --git a/test/acceptance/coffee/helpers/Client.js b/test/acceptance/coffee/helpers/Client.js index 3913170..4b85413 100644 --- a/test/acceptance/coffee/helpers/Client.js +++ b/test/acceptance/coffee/helpers/Client.js @@ -1,105 +1,147 @@ -request = require "request" -fs = require "fs" -Settings = require "settings-sharelatex" +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let Client; +const request = require("request"); +const fs = require("fs"); +const Settings = require("settings-sharelatex"); -host = "localhost" +const host = "localhost"; -module.exports = Client = - host: Settings.apis.clsi.url +module.exports = (Client = { + host: Settings.apis.clsi.url, - randomId: () -> - Math.random().toString(16).slice(2) + randomId() { + return Math.random().toString(16).slice(2); + }, - compile: (project_id, data, callback = (error, res, body) ->) -> - request.post { - url: "#{@host}/project/#{project_id}/compile" - json: + compile(project_id, data, callback) { + if (callback == null) { callback = function(error, res, body) {}; } + return request.post({ + url: `${this.host}/project/${project_id}/compile`, + json: { compile: data - }, callback - - clearCache: (project_id, callback = (error, res, body) ->) -> - request.del "#{@host}/project/#{project_id}", callback - - getOutputFile: (response, type) -> - for file in response.compile.outputFiles - if file.type == type and file.url.match("output.#{type}") - return file - return null - - runServer: (port, directory) -> - express = require("express") - app = express() - app.use express.static(directory) - console.log("starting test server on", port, host) - app.listen(port, host).on "error", (error) -> - console.error "error starting server:", error.message - process.exit(1) - - - syncFromCode: (project_id, file, line, column, callback = (error, pdfPositions) ->) -> - request.get { - url: "#{@host}/project/#{project_id}/sync/code" - qs: { - file: file - line: line - column: column } - }, (error, response, body) -> - return callback(error) if error? - callback null, JSON.parse(body) + }, callback); + }, - syncFromPdf: (project_id, page, h, v, callback = (error, pdfPositions) ->) -> - request.get { - url: "#{@host}/project/#{project_id}/sync/pdf" - qs: { - page: page, - h: h, v: v + clearCache(project_id, callback) { + if (callback == null) { callback = function(error, res, body) {}; } + return request.del(`${this.host}/project/${project_id}`, callback); + }, + + getOutputFile(response, type) { + for (let file of Array.from(response.compile.outputFiles)) { + if ((file.type === type) && file.url.match(`output.${type}`)) { + return file; } - }, (error, response, body) -> - return callback(error) if error? - callback null, JSON.parse(body) + } + return null; + }, - compileDirectory: (project_id, baseDirectory, directory, serverPort, callback = (error, res, body) ->) -> - resources = [] - entities = fs.readdirSync("#{baseDirectory}/#{directory}") - rootResourcePath = "main.tex" - while (entities.length > 0) - entity = entities.pop() - stat = fs.statSync("#{baseDirectory}/#{directory}/#{entity}") - if stat.isDirectory() - entities = entities.concat fs.readdirSync("#{baseDirectory}/#{directory}/#{entity}").map (subEntity) -> - if subEntity == "main.tex" - rootResourcePath = "#{entity}/#{subEntity}" - return "#{entity}/#{subEntity}" - else if stat.isFile() and entity != "output.pdf" - extension = entity.split(".").pop() - if ["tex", "bib", "cls", "sty", "pdf_tex", "Rtex", "ist", "md", "Rmd"].indexOf(extension) > -1 - resources.push - path: entity - content: fs.readFileSync("#{baseDirectory}/#{directory}/#{entity}").toString() - else if ["eps", "ttf", "png", "jpg", "pdf", "jpeg"].indexOf(extension) > -1 - resources.push - path: entity - url: "http://#{host}:#{serverPort}/#{directory}/#{entity}" + runServer(port, directory) { + const express = require("express"); + const app = express(); + app.use(express.static(directory)); + console.log("starting test server on", port, host); + return app.listen(port, host).on("error", function(error) { + console.error("error starting server:", error.message); + return process.exit(1); + }); + }, + + + syncFromCode(project_id, file, line, column, callback) { + if (callback == null) { callback = function(error, pdfPositions) {}; } + return request.get({ + url: `${this.host}/project/${project_id}/sync/code`, + qs: { + file, + line, + column + } + }, function(error, response, body) { + if (error != null) { return callback(error); } + return callback(null, JSON.parse(body)); + }); + }, + + syncFromPdf(project_id, page, h, v, callback) { + if (callback == null) { callback = function(error, pdfPositions) {}; } + return request.get({ + url: `${this.host}/project/${project_id}/sync/pdf`, + qs: { + page, + h, v + } + }, function(error, response, body) { + if (error != null) { return callback(error); } + return callback(null, JSON.parse(body)); + }); + }, + + compileDirectory(project_id, baseDirectory, directory, serverPort, callback) { + if (callback == null) { callback = function(error, res, body) {}; } + const resources = []; + let entities = fs.readdirSync(`${baseDirectory}/${directory}`); + let rootResourcePath = "main.tex"; + while (entities.length > 0) { + var entity = entities.pop(); + const stat = fs.statSync(`${baseDirectory}/${directory}/${entity}`); + if (stat.isDirectory()) { + entities = entities.concat(fs.readdirSync(`${baseDirectory}/${directory}/${entity}`).map(function(subEntity) { + if (subEntity === "main.tex") { + rootResourcePath = `${entity}/${subEntity}`; + } + return `${entity}/${subEntity}`; + }) + ); + } else if (stat.isFile() && (entity !== "output.pdf")) { + const extension = entity.split(".").pop(); + if (["tex", "bib", "cls", "sty", "pdf_tex", "Rtex", "ist", "md", "Rmd"].indexOf(extension) > -1) { + resources.push({ + path: entity, + content: fs.readFileSync(`${baseDirectory}/${directory}/${entity}`).toString() + }); + } else if (["eps", "ttf", "png", "jpg", "pdf", "jpeg"].indexOf(extension) > -1) { + resources.push({ + path: entity, + url: `http://${host}:${serverPort}/${directory}/${entity}`, modified: stat.mtime - - fs.readFile "#{baseDirectory}/#{directory}/options.json", (error, body) => - req = - resources: resources - rootResourcePath: rootResourcePath - - if !error? - body = JSON.parse body - req.options = body - - @compile project_id, req, callback - - wordcount: (project_id, file, callback = (error, pdfPositions) ->) -> - request.get { - url: "#{@host}/project/#{project_id}/wordcount" - qs: { - file: file + }); + } } - }, (error, response, body) -> - return callback(error) if error? - callback null, JSON.parse(body) + } + + return fs.readFile(`${baseDirectory}/${directory}/options.json`, (error, body) => { + const req = { + resources, + rootResourcePath + }; + + if ((error == null)) { + body = JSON.parse(body); + req.options = body; + } + + return this.compile(project_id, req, callback); + }); + }, + + wordcount(project_id, file, callback) { + if (callback == null) { callback = function(error, pdfPositions) {}; } + return request.get({ + url: `${this.host}/project/${project_id}/wordcount`, + qs: { + file + } + }, function(error, response, body) { + if (error != null) { return callback(error); } + return callback(null, JSON.parse(body)); + }); + } +}); diff --git a/test/acceptance/coffee/helpers/ClsiApp.js b/test/acceptance/coffee/helpers/ClsiApp.js index d9cd534..cad63ec 100644 --- a/test/acceptance/coffee/helpers/ClsiApp.js +++ b/test/acceptance/coffee/helpers/ClsiApp.js @@ -1,24 +1,46 @@ -app = require('../../../../app') -require("logger-sharelatex").logger.level("info") -logger = require("logger-sharelatex") -Settings = require("settings-sharelatex") +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const app = require('../../../../app'); +require("logger-sharelatex").logger.level("info"); +const logger = require("logger-sharelatex"); +const Settings = require("settings-sharelatex"); -module.exports = - running: false - initing: false - callbacks: [] - ensureRunning: (callback = (error) ->) -> - if @running - return callback() - else if @initing - @callbacks.push callback - else - @initing = true - @callbacks.push callback - app.listen Settings.internal?.clsi?.port, "localhost", (error) => - throw error if error? - @running = true - logger.log("clsi running in dev mode") +module.exports = { + running: false, + initing: false, + callbacks: [], + ensureRunning(callback) { + if (callback == null) { callback = function(error) {}; } + if (this.running) { + return callback(); + } else if (this.initing) { + return this.callbacks.push(callback); + } else { + this.initing = true; + this.callbacks.push(callback); + return app.listen(__guard__(Settings.internal != null ? Settings.internal.clsi : undefined, x => x.port), "localhost", error => { + if (error != null) { throw error; } + this.running = true; + logger.log("clsi running in dev mode"); - for callback in @callbacks - callback() \ No newline at end of file + return (() => { + const result = []; + for (callback of Array.from(this.callbacks)) { + result.push(callback()); + } + return result; + })(); + }); + } + } +}; +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file From 95854a3abbb6a56132e692a08c975f222a3e1ae3 Mon Sep 17 00:00:00 2001 From: decaffeinate Date: Wed, 19 Feb 2020 12:16:07 +0100 Subject: [PATCH 17/24] decaffeinate: Run post-processing cleanups on BrokenLatexFileTests.coffee and 9 other files --- .../acceptance/coffee/BrokenLatexFileTests.js | 5 ++++ test/acceptance/coffee/DeleteOldFilesTest.js | 5 ++++ .../acceptance/coffee/ExampleDocumentTests.js | 22 ++++++++++----- .../acceptance/coffee/SimpleLatexFileTests.js | 9 ++++-- test/acceptance/coffee/SynctexTests.js | 17 ++++++----- test/acceptance/coffee/TimeoutTests.js | 7 ++++- test/acceptance/coffee/UrlCachingTests.js | 28 +++++++++++-------- test/acceptance/coffee/WordcountTests.js | 12 +++++--- test/acceptance/coffee/helpers/Client.js | 19 +++++++++---- test/acceptance/coffee/helpers/ClsiApp.js | 5 ++++ 10 files changed, 91 insertions(+), 38 deletions(-) diff --git a/test/acceptance/coffee/BrokenLatexFileTests.js b/test/acceptance/coffee/BrokenLatexFileTests.js index 5aea625..2db36c1 100644 --- a/test/acceptance/coffee/BrokenLatexFileTests.js +++ b/test/acceptance/coffee/BrokenLatexFileTests.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/test/acceptance/coffee/DeleteOldFilesTest.js b/test/acceptance/coffee/DeleteOldFilesTest.js index d6958c2..720b90f 100644 --- a/test/acceptance/coffee/DeleteOldFilesTest.js +++ b/test/acceptance/coffee/DeleteOldFilesTest.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns diff --git a/test/acceptance/coffee/ExampleDocumentTests.js b/test/acceptance/coffee/ExampleDocumentTests.js index fe89970..4c3080f 100644 --- a/test/acceptance/coffee/ExampleDocumentTests.js +++ b/test/acceptance/coffee/ExampleDocumentTests.js @@ -1,3 +1,12 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-path-concat, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -45,10 +54,10 @@ const compare = function(originalPath, generatedPath, callback) { const proc = ChildProcess.exec(`compare -metric mae ${fixturePath(originalPath)} ${fixturePath(generatedPath)} ${diff_file}`); let stderr = ""; proc.stderr.on("data", chunk => stderr += chunk); - return proc.on("exit", function() { + return proc.on("exit", () => { if (stderr.trim() === "0 (0)") { // remove output diff if test matches expected image - fs.unlink(diff_file, function(err) { + fs.unlink(diff_file, (err) => { if (err) { throw err; } @@ -67,7 +76,7 @@ const checkPdfInfo = function(pdfPath, callback) { let stdout = ""; proc.stdout.on("data", chunk => stdout += chunk); proc.stderr.on("data", chunk => console.log("STDERR", chunk.toString())); - return proc.on("exit", function() { + return proc.on("exit", () => { if (stdout.match(/Optimized:\s+yes/)) { return callback(null, true); } else { @@ -80,7 +89,7 @@ const compareMultiplePages = function(project_id, callback) { if (callback == null) { callback = function(error) {}; } var compareNext = function(page_no, callback) { const path = `tmp/${project_id}-source-${page_no}.png`; - return fs.stat(fixturePath(path), function(error, stat) { + return fs.stat(fixturePath(path), (error, stat) => { if (error != null) { return callback(); } else { @@ -111,7 +120,7 @@ const comparePdf = function(project_id, example_dir, callback) { return callback(); }); } else { - return compareMultiplePages(project_id, function(error) { + return compareMultiplePages(project_id, (error) => { if (error != null) { throw error; } return callback(); }); @@ -138,8 +147,7 @@ const downloadAndComparePdf = function(project_id, example_dir, url, callback) { Client.runServer(4242, fixturePath("examples")); describe("Example Documents", function() { - before(done => - ChildProcess.exec("rm test/acceptance/fixtures/tmp/*").on("exit", () => ClsiApp.ensureRunning(done)) + before(function(done) { return ChildProcess.exec("rm test/acceptance/fixtures/tmp/*").on("exit", () => ClsiApp.ensureRunning(done)); } ); diff --git a/test/acceptance/coffee/SimpleLatexFileTests.js b/test/acceptance/coffee/SimpleLatexFileTests.js index 79789e8..d774301 100644 --- a/test/acceptance/coffee/SimpleLatexFileTests.js +++ b/test/acceptance/coffee/SimpleLatexFileTests.js @@ -1,3 +1,8 @@ +/* eslint-disable + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -40,7 +45,7 @@ Hello world it("should provide the pdf for download", function(done) { const pdf = Client.getOutputFile(this.body, "pdf"); - return request.get(pdf.url, function(error, res, body) { + return request.get(pdf.url, (error, res, body) => { res.statusCode.should.equal(200); return done(); }); @@ -48,7 +53,7 @@ Hello world return it("should provide the log for download", function(done) { const log = Client.getOutputFile(this.body, "pdf"); - return request.get(log.url, function(error, res, body) { + return request.get(log.url, (error, res, body) => { res.statusCode.should.equal(200); return done(); }); diff --git a/test/acceptance/coffee/SynctexTests.js b/test/acceptance/coffee/SynctexTests.js index b0ac688..d8879eb 100644 --- a/test/acceptance/coffee/SynctexTests.js +++ b/test/acceptance/coffee/SynctexTests.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -32,20 +37,18 @@ Hello world }); }); - describe("from code to pdf", () => - it("should return the correct location", function(done) { - return Client.syncFromCode(this.project_id, "main.tex", 3, 5, function(error, pdfPositions) { + describe("from code to pdf", function() { return it("should return the correct location", function(done) { + return Client.syncFromCode(this.project_id, "main.tex", 3, 5, (error, pdfPositions) => { if (error != null) { throw error; } expect(pdfPositions).to.deep.equal({ pdf: [ { page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 } ] }); return done(); }); - }) + }); } ); - return describe("from pdf to code", () => - it("should return the correct location", function(done) { + return describe("from pdf to code", function() { return it("should return the correct location", function(done) { return Client.syncFromPdf(this.project_id, 1, 100, 200, (error, codePositions) => { if (error != null) { throw error; } expect(codePositions).to.deep.equal({ @@ -53,6 +56,6 @@ Hello world }); return done(); }); - }) + }); } ); }); diff --git a/test/acceptance/coffee/TimeoutTests.js b/test/acceptance/coffee/TimeoutTests.js index 39d18ed..7f8f848 100644 --- a/test/acceptance/coffee/TimeoutTests.js +++ b/test/acceptance/coffee/TimeoutTests.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -14,7 +19,7 @@ describe("Timed out compile", function() { this.request = { options: { timeout: 10 - }, //seconds + }, // seconds resources: [{ path: "main.tex", content: `\ diff --git a/test/acceptance/coffee/UrlCachingTests.js b/test/acceptance/coffee/UrlCachingTests.js index 3fe947f..7bb0a20 100644 --- a/test/acceptance/coffee/UrlCachingTests.js +++ b/test/acceptance/coffee/UrlCachingTests.js @@ -1,3 +1,9 @@ +/* eslint-disable + no-path-concat, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -63,7 +69,7 @@ describe("Url Caching", function() { }); }); - afterEach(() => Server.getFile.restore()); + afterEach(function() { return Server.getFile.restore(); }); return it("should download the image", function() { return Server.getFile @@ -107,9 +113,9 @@ describe("Url Caching", function() { }); }); - after(() => Server.getFile.restore()); + after(function() { return Server.getFile.restore(); }); - return it("should not download the image again", () => Server.getFile.called.should.equal(false)); + return it("should not download the image again", function() { return Server.getFile.called.should.equal(false); }); }); describe("When an image is in the cache and the last modified date is advanced", function() { @@ -148,9 +154,9 @@ describe("Url Caching", function() { }); }); - afterEach(() => Server.getFile.restore()); + afterEach(function() { return Server.getFile.restore(); }); - return it("should download the image again", () => Server.getFile.called.should.equal(true)); + return it("should download the image again", function() { return Server.getFile.called.should.equal(true); }); }); describe("When an image is in the cache and the last modified date is further in the past", function() { @@ -189,9 +195,9 @@ describe("Url Caching", function() { }); }); - afterEach(() => Server.getFile.restore()); + afterEach(function() { return Server.getFile.restore(); }); - return it("should not download the image again", () => Server.getFile.called.should.equal(false)); + return it("should not download the image again", function() { return Server.getFile.called.should.equal(false); }); }); describe("When an image is in the cache and the last modified date is not specified", function() { @@ -230,9 +236,9 @@ describe("Url Caching", function() { }); }); - afterEach(() => Server.getFile.restore()); + afterEach(function() { return Server.getFile.restore(); }); - return it("should download the image again", () => Server.getFile.called.should.equal(true)); + return it("should download the image again", function() { return Server.getFile.called.should.equal(true); }); }); return describe("After clearing the cache", function() { @@ -271,9 +277,9 @@ describe("Url Caching", function() { }); }); - afterEach(() => Server.getFile.restore()); + afterEach(function() { return Server.getFile.restore(); }); - return it("should download the image again", () => Server.getFile.called.should.equal(true)); + return it("should download the image again", function() { return Server.getFile.called.should.equal(true); }); }); }); diff --git a/test/acceptance/coffee/WordcountTests.js b/test/acceptance/coffee/WordcountTests.js index 8c87a7c..2f81e13 100644 --- a/test/acceptance/coffee/WordcountTests.js +++ b/test/acceptance/coffee/WordcountTests.js @@ -1,3 +1,8 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS102: Remove unnecessary code created because of implicit returns @@ -27,9 +32,8 @@ describe("Syncing", function() { }); }); - return describe("wordcount file", () => - it("should return wordcount info", function(done) { - return Client.wordcount(this.project_id, "main.tex", function(error, result) { + return describe("wordcount file", function() { return it("should return wordcount info", function(done) { + return Client.wordcount(this.project_id, "main.tex", (error, result) => { if (error != null) { throw error; } expect(result).to.deep.equal({ texcount: { @@ -47,6 +51,6 @@ describe("Syncing", function() { }); return done(); }); - }) + }); } ); }); diff --git a/test/acceptance/coffee/helpers/Client.js b/test/acceptance/coffee/helpers/Client.js index 4b85413..50e75d6 100644 --- a/test/acceptance/coffee/helpers/Client.js +++ b/test/acceptance/coffee/helpers/Client.js @@ -1,3 +1,10 @@ +/* eslint-disable + camelcase, + handle-callback-err, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from @@ -35,7 +42,7 @@ module.exports = (Client = { }, getOutputFile(response, type) { - for (let file of Array.from(response.compile.outputFiles)) { + for (const file of Array.from(response.compile.outputFiles)) { if ((file.type === type) && file.url.match(`output.${type}`)) { return file; } @@ -48,7 +55,7 @@ module.exports = (Client = { const app = express(); app.use(express.static(directory)); console.log("starting test server on", port, host); - return app.listen(port, host).on("error", function(error) { + return app.listen(port, host).on("error", (error) => { console.error("error starting server:", error.message); return process.exit(1); }); @@ -64,7 +71,7 @@ module.exports = (Client = { line, column } - }, function(error, response, body) { + }, (error, response, body) => { if (error != null) { return callback(error); } return callback(null, JSON.parse(body)); }); @@ -78,7 +85,7 @@ module.exports = (Client = { page, h, v } - }, function(error, response, body) { + }, (error, response, body) => { if (error != null) { return callback(error); } return callback(null, JSON.parse(body)); }); @@ -93,7 +100,7 @@ module.exports = (Client = { var entity = entities.pop(); const stat = fs.statSync(`${baseDirectory}/${directory}/${entity}`); if (stat.isDirectory()) { - entities = entities.concat(fs.readdirSync(`${baseDirectory}/${directory}/${entity}`).map(function(subEntity) { + entities = entities.concat(fs.readdirSync(`${baseDirectory}/${directory}/${entity}`).map((subEntity) => { if (subEntity === "main.tex") { rootResourcePath = `${entity}/${subEntity}`; } @@ -139,7 +146,7 @@ module.exports = (Client = { qs: { file } - }, function(error, response, body) { + }, (error, response, body) => { if (error != null) { return callback(error); } return callback(null, JSON.parse(body)); }); diff --git a/test/acceptance/coffee/helpers/ClsiApp.js b/test/acceptance/coffee/helpers/ClsiApp.js index cad63ec..bd3222d 100644 --- a/test/acceptance/coffee/helpers/ClsiApp.js +++ b/test/acceptance/coffee/helpers/ClsiApp.js @@ -1,3 +1,8 @@ +/* eslint-disable + handle-callback-err, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. /* * decaffeinate suggestions: * DS101: Remove unnecessary use of Array.from From 7996f4494247302c1eb5ff459e5ba9e50637fe2a Mon Sep 17 00:00:00 2001 From: mserranom Date: Wed, 19 Feb 2020 12:16:11 +0100 Subject: [PATCH 18/24] decaffeinate: rename test/acceptance/coffee to test/acceptance/js --- test/acceptance/{coffee => js}/BrokenLatexFileTests.js | 0 test/acceptance/{coffee => js}/DeleteOldFilesTest.js | 0 test/acceptance/{coffee => js}/ExampleDocumentTests.js | 0 test/acceptance/{coffee => js}/SimpleLatexFileTests.js | 0 test/acceptance/{coffee => js}/SynctexTests.js | 0 test/acceptance/{coffee => js}/TimeoutTests.js | 0 test/acceptance/{coffee => js}/UrlCachingTests.js | 0 test/acceptance/{coffee => js}/WordcountTests.js | 0 test/acceptance/{coffee => js}/helpers/Client.js | 0 test/acceptance/{coffee => js}/helpers/ClsiApp.js | 0 10 files changed, 0 insertions(+), 0 deletions(-) rename test/acceptance/{coffee => js}/BrokenLatexFileTests.js (100%) rename test/acceptance/{coffee => js}/DeleteOldFilesTest.js (100%) rename test/acceptance/{coffee => js}/ExampleDocumentTests.js (100%) rename test/acceptance/{coffee => js}/SimpleLatexFileTests.js (100%) rename test/acceptance/{coffee => js}/SynctexTests.js (100%) rename test/acceptance/{coffee => js}/TimeoutTests.js (100%) rename test/acceptance/{coffee => js}/UrlCachingTests.js (100%) rename test/acceptance/{coffee => js}/WordcountTests.js (100%) rename test/acceptance/{coffee => js}/helpers/Client.js (100%) rename test/acceptance/{coffee => js}/helpers/ClsiApp.js (100%) diff --git a/test/acceptance/coffee/BrokenLatexFileTests.js b/test/acceptance/js/BrokenLatexFileTests.js similarity index 100% rename from test/acceptance/coffee/BrokenLatexFileTests.js rename to test/acceptance/js/BrokenLatexFileTests.js diff --git a/test/acceptance/coffee/DeleteOldFilesTest.js b/test/acceptance/js/DeleteOldFilesTest.js similarity index 100% rename from test/acceptance/coffee/DeleteOldFilesTest.js rename to test/acceptance/js/DeleteOldFilesTest.js diff --git a/test/acceptance/coffee/ExampleDocumentTests.js b/test/acceptance/js/ExampleDocumentTests.js similarity index 100% rename from test/acceptance/coffee/ExampleDocumentTests.js rename to test/acceptance/js/ExampleDocumentTests.js diff --git a/test/acceptance/coffee/SimpleLatexFileTests.js b/test/acceptance/js/SimpleLatexFileTests.js similarity index 100% rename from test/acceptance/coffee/SimpleLatexFileTests.js rename to test/acceptance/js/SimpleLatexFileTests.js diff --git a/test/acceptance/coffee/SynctexTests.js b/test/acceptance/js/SynctexTests.js similarity index 100% rename from test/acceptance/coffee/SynctexTests.js rename to test/acceptance/js/SynctexTests.js diff --git a/test/acceptance/coffee/TimeoutTests.js b/test/acceptance/js/TimeoutTests.js similarity index 100% rename from test/acceptance/coffee/TimeoutTests.js rename to test/acceptance/js/TimeoutTests.js diff --git a/test/acceptance/coffee/UrlCachingTests.js b/test/acceptance/js/UrlCachingTests.js similarity index 100% rename from test/acceptance/coffee/UrlCachingTests.js rename to test/acceptance/js/UrlCachingTests.js diff --git a/test/acceptance/coffee/WordcountTests.js b/test/acceptance/js/WordcountTests.js similarity index 100% rename from test/acceptance/coffee/WordcountTests.js rename to test/acceptance/js/WordcountTests.js diff --git a/test/acceptance/coffee/helpers/Client.js b/test/acceptance/js/helpers/Client.js similarity index 100% rename from test/acceptance/coffee/helpers/Client.js rename to test/acceptance/js/helpers/Client.js diff --git a/test/acceptance/coffee/helpers/ClsiApp.js b/test/acceptance/js/helpers/ClsiApp.js similarity index 100% rename from test/acceptance/coffee/helpers/ClsiApp.js rename to test/acceptance/js/helpers/ClsiApp.js From 71a50dd11fb9f235e48c9a9d5f14b2a3c88ce177 Mon Sep 17 00:00:00 2001 From: mserranom Date: Wed, 19 Feb 2020 12:16:14 +0100 Subject: [PATCH 19/24] prettier: convert test/acceptance decaffeinated files to Prettier format --- test/acceptance/js/BrokenLatexFileTests.js | 115 ++--- test/acceptance/js/DeleteOldFilesTest.js | 93 ++-- test/acceptance/js/ExampleDocumentTests.js | 390 ++++++++++------ test/acceptance/js/SimpleLatexFileTests.js | 97 ++-- test/acceptance/js/SynctexTests.js | 116 +++-- test/acceptance/js/TimeoutTests.js | 75 +-- test/acceptance/js/UrlCachingTests.js | 503 ++++++++++++--------- test/acceptance/js/WordcountTests.js | 102 +++-- test/acceptance/js/helpers/Client.js | 306 +++++++------ test/acceptance/js/helpers/ClsiApp.js | 79 ++-- 10 files changed, 1105 insertions(+), 771 deletions(-) diff --git a/test/acceptance/js/BrokenLatexFileTests.js b/test/acceptance/js/BrokenLatexFileTests.js index 2db36c1..b34d23c 100644 --- a/test/acceptance/js/BrokenLatexFileTests.js +++ b/test/acceptance/js/BrokenLatexFileTests.js @@ -8,68 +8,81 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const Client = require("./helpers/Client"); -const request = require("request"); -require("chai").should(); -const ClsiApp = require("./helpers/ClsiApp"); +const Client = require('./helpers/Client') +const request = require('request') +require('chai').should() +const ClsiApp = require('./helpers/ClsiApp') -describe("Broken LaTeX file", function() { - before(function(done){ - this.broken_request = { - resources: [{ - path: "main.tex", - content: `\ +describe('Broken LaTeX file', function() { + before(function(done) { + this.broken_request = { + resources: [ + { + path: 'main.tex', + content: `\ \\documentclass{articl % :( \\begin{documen % :( Broken \\end{documen % :(\ ` - } - ] - }; - this.correct_request = { - resources: [{ - path: "main.tex", - content: `\ + } + ] + } + this.correct_request = { + resources: [ + { + path: 'main.tex', + content: `\ \\documentclass{article} \\begin{document} Hello world \\end{document}\ ` - } - ] - }; - return ClsiApp.ensureRunning(done); - }); - - describe("on first run", function() { - before(function(done) { - this.project_id = Client.randomId(); - return Client.compile(this.project_id, this.broken_request, (error, res, body) => { this.error = error; this.res = res; this.body = body; return done(); }); - }); + } + ] + } + return ClsiApp.ensureRunning(done) + }) - return it("should return a failure status", function() { - return this.body.compile.status.should.equal("failure"); - }); - }); + describe('on first run', function() { + before(function(done) { + this.project_id = Client.randomId() + return Client.compile( + this.project_id, + this.broken_request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) - return describe("on second run", function() { - before(function(done) { - this.project_id = Client.randomId(); - return Client.compile(this.project_id, this.correct_request, () => { - return Client.compile(this.project_id, this.broken_request, (error, res, body) => { - this.error = error; - this.res = res; - this.body = body; - return done(); - }); - }); - }); + return it('should return a failure status', function() { + return this.body.compile.status.should.equal('failure') + }) + }) - return it("should return a failure status", function() { - return this.body.compile.status.should.equal("failure"); - }); - }); -}); - - + return describe('on second run', function() { + before(function(done) { + this.project_id = Client.randomId() + return Client.compile(this.project_id, this.correct_request, () => { + return Client.compile( + this.project_id, + this.broken_request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + }) + + return it('should return a failure status', function() { + return this.body.compile.status.should.equal('failure') + }) + }) +}) diff --git a/test/acceptance/js/DeleteOldFilesTest.js b/test/acceptance/js/DeleteOldFilesTest.js index 720b90f..83d7c96 100644 --- a/test/acceptance/js/DeleteOldFilesTest.js +++ b/test/acceptance/js/DeleteOldFilesTest.js @@ -8,53 +8,66 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const Client = require("./helpers/Client"); -const request = require("request"); -require("chai").should(); -const ClsiApp = require("./helpers/ClsiApp"); +const Client = require('./helpers/Client') +const request = require('request') +require('chai').should() +const ClsiApp = require('./helpers/ClsiApp') -describe("Deleting Old Files", function() { - before(function(done){ - this.request = { - resources: [{ - path: "main.tex", - content: `\ +describe('Deleting Old Files', function() { + before(function(done) { + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ \\documentclass{article} \\begin{document} Hello world \\end{document}\ ` - } - ] - }; - return ClsiApp.ensureRunning(done); - }); + } + ] + } + return ClsiApp.ensureRunning(done) + }) - return describe("on first run", function() { - before(function(done) { - this.project_id = Client.randomId(); - return Client.compile(this.project_id, this.request, (error, res, body) => { this.error = error; this.res = res; this.body = body; return done(); }); - }); + return describe('on first run', function() { + before(function(done) { + this.project_id = Client.randomId() + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) - it("should return a success status", function() { - return this.body.compile.status.should.equal("success"); - }); + it('should return a success status', function() { + return this.body.compile.status.should.equal('success') + }) - return describe("after file has been deleted", function() { - before(function(done) { - this.request.resources = []; - return Client.compile(this.project_id, this.request, (error, res, body) => { - this.error = error; - this.res = res; - this.body = body; - return done(); - }); - }); - - return it("should return a failure status", function() { - return this.body.compile.status.should.equal("failure"); - }); - }); - }); -}); + return describe('after file has been deleted', function() { + before(function(done) { + this.request.resources = [] + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + return it('should return a failure status', function() { + return this.body.compile.status.should.equal('failure') + }) + }) + }) +}) diff --git a/test/acceptance/js/ExampleDocumentTests.js b/test/acceptance/js/ExampleDocumentTests.js index 4c3080f..110b5d6 100644 --- a/test/acceptance/js/ExampleDocumentTests.js +++ b/test/acceptance/js/ExampleDocumentTests.js @@ -15,176 +15,266 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const Client = require("./helpers/Client"); -const request = require("request"); -require("chai").should(); -const fs = require("fs"); -const ChildProcess = require("child_process"); -const ClsiApp = require("./helpers/ClsiApp"); -const logger = require("logger-sharelatex"); -const Path = require("path"); -const fixturePath = path => Path.normalize(__dirname + "/../fixtures/" + path); -const process = require("process"); -console.log(process.pid, process.ppid, process.getuid(),process.getgroups(), "PID"); +const Client = require('./helpers/Client') +const request = require('request') +require('chai').should() +const fs = require('fs') +const ChildProcess = require('child_process') +const ClsiApp = require('./helpers/ClsiApp') +const logger = require('logger-sharelatex') +const Path = require('path') +const fixturePath = path => Path.normalize(__dirname + '/../fixtures/' + path) +const process = require('process') +console.log( + process.pid, + process.ppid, + process.getuid(), + process.getgroups(), + 'PID' +) try { - console.log("creating tmp directory", fixturePath("tmp")); - fs.mkdirSync(fixturePath("tmp")); + console.log('creating tmp directory', fixturePath('tmp')) + fs.mkdirSync(fixturePath('tmp')) } catch (error) { - const err = error; - console.log(err, fixturePath("tmp"), "unable to create fixture tmp path"); + const err = error + console.log(err, fixturePath('tmp'), 'unable to create fixture tmp path') } -const MOCHA_LATEX_TIMEOUT = 60 * 1000; +const MOCHA_LATEX_TIMEOUT = 60 * 1000 const convertToPng = function(pdfPath, pngPath, callback) { - if (callback == null) { callback = function(error) {}; } - const command = `convert ${fixturePath(pdfPath)} ${fixturePath(pngPath)}`; - console.log("COMMAND"); - console.log(command); - const convert = ChildProcess.exec(command); - const stdout = ""; - convert.stdout.on("data", chunk => console.log("STDOUT", chunk.toString())); - convert.stderr.on("data", chunk => console.log("STDERR", chunk.toString())); - return convert.on("exit", () => callback()); -}; + if (callback == null) { + callback = function(error) {} + } + const command = `convert ${fixturePath(pdfPath)} ${fixturePath(pngPath)}` + console.log('COMMAND') + console.log(command) + const convert = ChildProcess.exec(command) + const stdout = '' + convert.stdout.on('data', chunk => console.log('STDOUT', chunk.toString())) + convert.stderr.on('data', chunk => console.log('STDERR', chunk.toString())) + return convert.on('exit', () => callback()) +} const compare = function(originalPath, generatedPath, callback) { - if (callback == null) { callback = function(error, same) {}; } - const diff_file = `${fixturePath(generatedPath)}-diff.png`; - const proc = ChildProcess.exec(`compare -metric mae ${fixturePath(originalPath)} ${fixturePath(generatedPath)} ${diff_file}`); - let stderr = ""; - proc.stderr.on("data", chunk => stderr += chunk); - return proc.on("exit", () => { - if (stderr.trim() === "0 (0)") { - // remove output diff if test matches expected image - fs.unlink(diff_file, (err) => { - if (err) { - throw err; - } - }); - return callback(null, true); - } else { - console.log("compare result", stderr); - return callback(null, false); - } - }); -}; + if (callback == null) { + callback = function(error, same) {} + } + const diff_file = `${fixturePath(generatedPath)}-diff.png` + const proc = ChildProcess.exec( + `compare -metric mae ${fixturePath(originalPath)} ${fixturePath( + generatedPath + )} ${diff_file}` + ) + let stderr = '' + proc.stderr.on('data', chunk => (stderr += chunk)) + return proc.on('exit', () => { + if (stderr.trim() === '0 (0)') { + // remove output diff if test matches expected image + fs.unlink(diff_file, err => { + if (err) { + throw err + } + }) + return callback(null, true) + } else { + console.log('compare result', stderr) + return callback(null, false) + } + }) +} const checkPdfInfo = function(pdfPath, callback) { - if (callback == null) { callback = function(error, output) {}; } - const proc = ChildProcess.exec(`pdfinfo ${fixturePath(pdfPath)}`); - let stdout = ""; - proc.stdout.on("data", chunk => stdout += chunk); - proc.stderr.on("data", chunk => console.log("STDERR", chunk.toString())); - return proc.on("exit", () => { - if (stdout.match(/Optimized:\s+yes/)) { - return callback(null, true); - } else { - return callback(null, false); - } - }); -}; + if (callback == null) { + callback = function(error, output) {} + } + const proc = ChildProcess.exec(`pdfinfo ${fixturePath(pdfPath)}`) + let stdout = '' + proc.stdout.on('data', chunk => (stdout += chunk)) + proc.stderr.on('data', chunk => console.log('STDERR', chunk.toString())) + return proc.on('exit', () => { + if (stdout.match(/Optimized:\s+yes/)) { + return callback(null, true) + } else { + return callback(null, false) + } + }) +} const compareMultiplePages = function(project_id, callback) { - if (callback == null) { callback = function(error) {}; } - var compareNext = function(page_no, callback) { - const path = `tmp/${project_id}-source-${page_no}.png`; - return fs.stat(fixturePath(path), (error, stat) => { - if (error != null) { - return callback(); - } else { - return compare(`tmp/${project_id}-source-${page_no}.png`, `tmp/${project_id}-generated-${page_no}.png`, (error, same) => { - if (error != null) { throw error; } - same.should.equal(true); - return compareNext(page_no + 1, callback); - }); - } - }); - }; - return compareNext(0, callback); -}; + if (callback == null) { + callback = function(error) {} + } + var compareNext = function(page_no, callback) { + const path = `tmp/${project_id}-source-${page_no}.png` + return fs.stat(fixturePath(path), (error, stat) => { + if (error != null) { + return callback() + } else { + return compare( + `tmp/${project_id}-source-${page_no}.png`, + `tmp/${project_id}-generated-${page_no}.png`, + (error, same) => { + if (error != null) { + throw error + } + same.should.equal(true) + return compareNext(page_no + 1, callback) + } + ) + } + }) + } + return compareNext(0, callback) +} const comparePdf = function(project_id, example_dir, callback) { - if (callback == null) { callback = function(error) {}; } - console.log("CONVERT"); - console.log(`tmp/${project_id}.pdf`, `tmp/${project_id}-generated.png`); - return convertToPng(`tmp/${project_id}.pdf`, `tmp/${project_id}-generated.png`, error => { - if (error != null) { throw error; } - return convertToPng(`examples/${example_dir}/output.pdf`, `tmp/${project_id}-source.png`, error => { - if (error != null) { throw error; } - return fs.stat(fixturePath(`tmp/${project_id}-source-0.png`), (error, stat) => { - if (error != null) { - return compare(`tmp/${project_id}-source.png`, `tmp/${project_id}-generated.png`, (error, same) => { - if (error != null) { throw error; } - same.should.equal(true); - return callback(); - }); - } else { - return compareMultiplePages(project_id, (error) => { - if (error != null) { throw error; } - return callback(); - }); - } - }); - }); - }); -}; + if (callback == null) { + callback = function(error) {} + } + console.log('CONVERT') + console.log(`tmp/${project_id}.pdf`, `tmp/${project_id}-generated.png`) + return convertToPng( + `tmp/${project_id}.pdf`, + `tmp/${project_id}-generated.png`, + error => { + if (error != null) { + throw error + } + return convertToPng( + `examples/${example_dir}/output.pdf`, + `tmp/${project_id}-source.png`, + error => { + if (error != null) { + throw error + } + return fs.stat( + fixturePath(`tmp/${project_id}-source-0.png`), + (error, stat) => { + if (error != null) { + return compare( + `tmp/${project_id}-source.png`, + `tmp/${project_id}-generated.png`, + (error, same) => { + if (error != null) { + throw error + } + same.should.equal(true) + return callback() + } + ) + } else { + return compareMultiplePages(project_id, error => { + if (error != null) { + throw error + } + return callback() + }) + } + } + ) + } + ) + } + ) +} const downloadAndComparePdf = function(project_id, example_dir, url, callback) { - if (callback == null) { callback = function(error) {}; } - const writeStream = fs.createWriteStream(fixturePath(`tmp/${project_id}.pdf`)); - request.get(url).pipe(writeStream); - console.log("writing file out", fixturePath(`tmp/${project_id}.pdf`)); - return writeStream.on("close", () => { - return checkPdfInfo(`tmp/${project_id}.pdf`, (error, optimised) => { - if (error != null) { throw error; } - optimised.should.equal(true); - return comparePdf(project_id, example_dir, callback); - }); - }); -}; + if (callback == null) { + callback = function(error) {} + } + const writeStream = fs.createWriteStream(fixturePath(`tmp/${project_id}.pdf`)) + request.get(url).pipe(writeStream) + console.log('writing file out', fixturePath(`tmp/${project_id}.pdf`)) + return writeStream.on('close', () => { + return checkPdfInfo(`tmp/${project_id}.pdf`, (error, optimised) => { + if (error != null) { + throw error + } + optimised.should.equal(true) + return comparePdf(project_id, example_dir, callback) + }) + }) +} -Client.runServer(4242, fixturePath("examples")); +Client.runServer(4242, fixturePath('examples')) -describe("Example Documents", function() { - before(function(done) { return ChildProcess.exec("rm test/acceptance/fixtures/tmp/*").on("exit", () => ClsiApp.ensureRunning(done)); } - ); +describe('Example Documents', function() { + before(function(done) { + return ChildProcess.exec('rm test/acceptance/fixtures/tmp/*').on( + 'exit', + () => ClsiApp.ensureRunning(done) + ) + }) + return Array.from(fs.readdirSync(fixturePath('examples'))).map(example_dir => + (example_dir => + describe(example_dir, function() { + before(function() { + return (this.project_id = Client.randomId() + '_' + example_dir) + }) - return Array.from(fs.readdirSync(fixturePath("examples"))).map((example_dir) => - (example_dir => - describe(example_dir, function() { - before(function() { - return this.project_id = Client.randomId() + "_" + example_dir; - }); - - it("should generate the correct pdf", function(done) { - this.timeout(MOCHA_LATEX_TIMEOUT); - return Client.compileDirectory(this.project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) => { - if (error || (__guard__(body != null ? body.compile : undefined, x => x.status) === "failure")) { - console.log("DEBUG: error", error, "body", JSON.stringify(body)); - } - const pdf = Client.getOutputFile(body, "pdf"); - return downloadAndComparePdf(this.project_id, example_dir, pdf.url, done); - }); - }); - - return it("should generate the correct pdf on the second run as well", function(done) { - this.timeout(MOCHA_LATEX_TIMEOUT); - return Client.compileDirectory(this.project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) => { - if (error || (__guard__(body != null ? body.compile : undefined, x => x.status) === "failure")) { - console.log("DEBUG: error", error, "body", JSON.stringify(body)); - } - const pdf = Client.getOutputFile(body, "pdf"); - return downloadAndComparePdf(this.project_id, example_dir, pdf.url, done); - }); - }); - }) - )(example_dir)); -}); - + it('should generate the correct pdf', function(done) { + this.timeout(MOCHA_LATEX_TIMEOUT) + return Client.compileDirectory( + this.project_id, + fixturePath('examples'), + example_dir, + 4242, + (error, res, body) => { + if ( + error || + __guard__( + body != null ? body.compile : undefined, + x => x.status + ) === 'failure' + ) { + console.log('DEBUG: error', error, 'body', JSON.stringify(body)) + } + const pdf = Client.getOutputFile(body, 'pdf') + return downloadAndComparePdf( + this.project_id, + example_dir, + pdf.url, + done + ) + } + ) + }) + return it('should generate the correct pdf on the second run as well', function(done) { + this.timeout(MOCHA_LATEX_TIMEOUT) + return Client.compileDirectory( + this.project_id, + fixturePath('examples'), + example_dir, + 4242, + (error, res, body) => { + if ( + error || + __guard__( + body != null ? body.compile : undefined, + x => x.status + ) === 'failure' + ) { + console.log('DEBUG: error', error, 'body', JSON.stringify(body)) + } + const pdf = Client.getOutputFile(body, 'pdf') + return downloadAndComparePdf( + this.project_id, + example_dir, + pdf.url, + done + ) + } + ) + }) + }))(example_dir) + ) +}) function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/test/acceptance/js/SimpleLatexFileTests.js b/test/acceptance/js/SimpleLatexFileTests.js index d774301..447e1b6 100644 --- a/test/acceptance/js/SimpleLatexFileTests.js +++ b/test/acceptance/js/SimpleLatexFileTests.js @@ -8,55 +8,64 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const Client = require("./helpers/Client"); -const request = require("request"); -require("chai").should(); -const ClsiApp = require("./helpers/ClsiApp"); +const Client = require('./helpers/Client') +const request = require('request') +require('chai').should() +const ClsiApp = require('./helpers/ClsiApp') -describe("Simple LaTeX file", function() { - before(function(done) { - this.project_id = Client.randomId(); - this.request = { - resources: [{ - path: "main.tex", - content: `\ +describe('Simple LaTeX file', function() { + before(function(done) { + this.project_id = Client.randomId() + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ \\documentclass{article} \\begin{document} Hello world \\end{document}\ ` - } - ] - }; - return ClsiApp.ensureRunning(() => { - return Client.compile(this.project_id, this.request, (error, res, body) => { this.error = error; this.res = res; this.body = body; return done(); }); - }); - }); + } + ] + } + return ClsiApp.ensureRunning(() => { + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + }) - it("should return the PDF", function() { - const pdf = Client.getOutputFile(this.body, "pdf"); - return pdf.type.should.equal("pdf"); - }); - - it("should return the log", function() { - const log = Client.getOutputFile(this.body, "log"); - return log.type.should.equal("log"); - }); + it('should return the PDF', function() { + const pdf = Client.getOutputFile(this.body, 'pdf') + return pdf.type.should.equal('pdf') + }) - it("should provide the pdf for download", function(done) { - const pdf = Client.getOutputFile(this.body, "pdf"); - return request.get(pdf.url, (error, res, body) => { - res.statusCode.should.equal(200); - return done(); - }); - }); - - return it("should provide the log for download", function(done) { - const log = Client.getOutputFile(this.body, "pdf"); - return request.get(log.url, (error, res, body) => { - res.statusCode.should.equal(200); - return done(); - }); - }); -}); - + it('should return the log', function() { + const log = Client.getOutputFile(this.body, 'log') + return log.type.should.equal('log') + }) + + it('should provide the pdf for download', function(done) { + const pdf = Client.getOutputFile(this.body, 'pdf') + return request.get(pdf.url, (error, res, body) => { + res.statusCode.should.equal(200) + return done() + }) + }) + + return it('should provide the log for download', function(done) { + const log = Client.getOutputFile(this.body, 'pdf') + return request.get(log.url, (error, res, body) => { + res.statusCode.should.equal(200) + return done() + }) + }) +}) diff --git a/test/acceptance/js/SynctexTests.js b/test/acceptance/js/SynctexTests.js index d8879eb..4860c60 100644 --- a/test/acceptance/js/SynctexTests.js +++ b/test/acceptance/js/SynctexTests.js @@ -9,53 +9,83 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const Client = require("./helpers/Client"); -const request = require("request"); -require("chai").should(); -const { expect } = require("chai"); -const ClsiApp = require("./helpers/ClsiApp"); -const crypto = require("crypto"); +const Client = require('./helpers/Client') +const request = require('request') +require('chai').should() +const { expect } = require('chai') +const ClsiApp = require('./helpers/ClsiApp') +const crypto = require('crypto') -describe("Syncing", function() { - before(function(done) { - const content = `\ +describe('Syncing', function() { + before(function(done) { + const content = `\ \\documentclass{article} \\begin{document} Hello world \\end{document}\ -`; - this.request = { - resources: [{ - path: "main.tex", - content - } - ] - }; - this.project_id = Client.randomId(); - return ClsiApp.ensureRunning(() => { - return Client.compile(this.project_id, this.request, (error, res, body) => { this.error = error; this.res = res; this.body = body; return done(); }); - }); - }); +` + this.request = { + resources: [ + { + path: 'main.tex', + content + } + ] + } + this.project_id = Client.randomId() + return ClsiApp.ensureRunning(() => { + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + }) - describe("from code to pdf", function() { return it("should return the correct location", function(done) { - return Client.syncFromCode(this.project_id, "main.tex", 3, 5, (error, pdfPositions) => { - if (error != null) { throw error; } - expect(pdfPositions).to.deep.equal({ - pdf: [ { page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 } ] - }); - return done(); - }); - }); } - ); + describe('from code to pdf', function() { + return it('should return the correct location', function(done) { + return Client.syncFromCode( + this.project_id, + 'main.tex', + 3, + 5, + (error, pdfPositions) => { + if (error != null) { + throw error + } + expect(pdfPositions).to.deep.equal({ + pdf: [ + { page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 } + ] + }) + return done() + } + ) + }) + }) - return describe("from pdf to code", function() { return it("should return the correct location", function(done) { - return Client.syncFromPdf(this.project_id, 1, 100, 200, (error, codePositions) => { - if (error != null) { throw error; } - expect(codePositions).to.deep.equal({ - code: [ { file: 'main.tex', line: 3, column: -1 } ] - }); - return done(); - }); - }); } - ); -}); + return describe('from pdf to code', function() { + return it('should return the correct location', function(done) { + return Client.syncFromPdf( + this.project_id, + 1, + 100, + 200, + (error, codePositions) => { + if (error != null) { + throw error + } + expect(codePositions).to.deep.equal({ + code: [{ file: 'main.tex', line: 3, column: -1 }] + }) + return done() + } + ) + }) + }) +}) diff --git a/test/acceptance/js/TimeoutTests.js b/test/acceptance/js/TimeoutTests.js index 7f8f848..f6812e8 100644 --- a/test/acceptance/js/TimeoutTests.js +++ b/test/acceptance/js/TimeoutTests.js @@ -8,46 +8,55 @@ * DS102: Remove unnecessary code created because of implicit returns * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const Client = require("./helpers/Client"); -const request = require("request"); -require("chai").should(); -const ClsiApp = require("./helpers/ClsiApp"); +const Client = require('./helpers/Client') +const request = require('request') +require('chai').should() +const ClsiApp = require('./helpers/ClsiApp') - -describe("Timed out compile", function() { - before(function(done) { - this.request = { - options: { - timeout: 10 - }, // seconds - resources: [{ - path: "main.tex", - content: `\ +describe('Timed out compile', function() { + before(function(done) { + this.request = { + options: { + timeout: 10 + }, // seconds + resources: [ + { + path: 'main.tex', + content: `\ \\documentclass{article} \\begin{document} \\def\\x{Hello!\\par\\x} \\x \\end{document}\ ` - } - ] - }; - this.project_id = Client.randomId(); - return ClsiApp.ensureRunning(() => { - return Client.compile(this.project_id, this.request, (error, res, body) => { this.error = error; this.res = res; this.body = body; return done(); }); - }); - }); + } + ] + } + this.project_id = Client.randomId() + return ClsiApp.ensureRunning(() => { + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + }) - it("should return a timeout error", function() { - return this.body.compile.error.should.equal("container timed out"); - }); + it('should return a timeout error', function() { + return this.body.compile.error.should.equal('container timed out') + }) - it("should return a timedout status", function() { - return this.body.compile.status.should.equal("timedout"); - }); + it('should return a timedout status', function() { + return this.body.compile.status.should.equal('timedout') + }) - return it("should return the log output file name", function() { - const outputFilePaths = this.body.compile.outputFiles.map(x => x.path); - return outputFilePaths.should.include('output.log'); - }); -}); + return it('should return the log output file name', function() { + const outputFilePaths = this.body.compile.outputFiles.map(x => x.path) + return outputFilePaths.should.include('output.log') + }) +}) diff --git a/test/acceptance/js/UrlCachingTests.js b/test/acceptance/js/UrlCachingTests.js index 7bb0a20..4d62497 100644 --- a/test/acceptance/js/UrlCachingTests.js +++ b/test/acceptance/js/UrlCachingTests.js @@ -10,277 +10,364 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const Client = require("./helpers/Client"); -const request = require("request"); -require("chai").should(); -const sinon = require("sinon"); -const ClsiApp = require("./helpers/ClsiApp"); +const Client = require('./helpers/Client') +const request = require('request') +require('chai').should() +const sinon = require('sinon') +const ClsiApp = require('./helpers/ClsiApp') -const host = "localhost"; +const host = 'localhost' const Server = { - run() { - const express = require("express"); - const app = express(); + run() { + const express = require('express') + const app = express() - const staticServer = express.static(__dirname + "/../fixtures/"); - app.get("/:random_id/*", (req, res, next) => { - this.getFile(req.url); - req.url = `/${req.params[0]}`; - return staticServer(req, res, next); - }); + const staticServer = express.static(__dirname + '/../fixtures/') + app.get('/:random_id/*', (req, res, next) => { + this.getFile(req.url) + req.url = `/${req.params[0]}` + return staticServer(req, res, next) + }) - return app.listen(31415, host); - }, + return app.listen(31415, host) + }, - getFile() {}, + getFile() {}, - randomId() { - return Math.random().toString(16).slice(2); - } -}; + randomId() { + return Math.random() + .toString(16) + .slice(2) + } +} -Server.run(); +Server.run() -describe("Url Caching", function() { - describe("Downloading an image for the first time", function() { - before(function(done) { - this.project_id = Client.randomId(); - this.file = `${Server.randomId()}/lion.png`; - this.request = { - resources: [{ - path: "main.tex", - content: `\ +describe('Url Caching', function() { + describe('Downloading an image for the first time', function() { + before(function(done) { + this.project_id = Client.randomId() + this.file = `${Server.randomId()}/lion.png` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ \\documentclass{article} \\usepackage{graphicx} \\begin{document} \\includegraphics{lion.png} \\end{document}\ ` - }, { - path: "lion.png", - url: `http://${host}:31415/${this.file}` - }] - }; + }, + { + path: 'lion.png', + url: `http://${host}:31415/${this.file}` + } + ] + } - sinon.spy(Server, "getFile"); - return ClsiApp.ensureRunning(() => { - return Client.compile(this.project_id, this.request, (error, res, body) => { this.error = error; this.res = res; this.body = body; return done(); }); - }); - }); + sinon.spy(Server, 'getFile') + return ClsiApp.ensureRunning(() => { + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + }) - afterEach(function() { return Server.getFile.restore(); }); + afterEach(function() { + return Server.getFile.restore() + }) - return it("should download the image", function() { - return Server.getFile - .calledWith(`/${this.file}`) - .should.equal(true); - }); - }); - - describe("When an image is in the cache and the last modified date is unchanged", function() { - before(function(done) { - this.project_id = Client.randomId(); - this.file = `${Server.randomId()}/lion.png`; - this.request = { - resources: [{ - path: "main.tex", - content: `\ + return it('should download the image', function() { + return Server.getFile.calledWith(`/${this.file}`).should.equal(true) + }) + }) + + describe('When an image is in the cache and the last modified date is unchanged', function() { + before(function(done) { + this.project_id = Client.randomId() + this.file = `${Server.randomId()}/lion.png` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ \\documentclass{article} \\usepackage{graphicx} \\begin{document} \\includegraphics{lion.png} \\end{document}\ ` - }, (this.image_resource = { - path: "lion.png", - url: `http://${host}:31415/${this.file}`, - modified: Date.now() - })] - }; + }, + (this.image_resource = { + path: 'lion.png', + url: `http://${host}:31415/${this.file}`, + modified: Date.now() + }) + ] + } - return Client.compile(this.project_id, this.request, (error, res, body) => { - this.error = error; - this.res = res; - this.body = body; - sinon.spy(Server, "getFile"); - return Client.compile(this.project_id, this.request, (error1, res1, body1) => { - this.error = error1; - this.res = res1; - this.body = body1; - return done(); - }); - }); - }); + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + sinon.spy(Server, 'getFile') + return Client.compile( + this.project_id, + this.request, + (error1, res1, body1) => { + this.error = error1 + this.res = res1 + this.body = body1 + return done() + } + ) + } + ) + }) - after(function() { return Server.getFile.restore(); }); + after(function() { + return Server.getFile.restore() + }) - return it("should not download the image again", function() { return Server.getFile.called.should.equal(false); }); - }); + return it('should not download the image again', function() { + return Server.getFile.called.should.equal(false) + }) + }) - describe("When an image is in the cache and the last modified date is advanced", function() { - before(function(done) { - this.project_id = Client.randomId(); - this.file = `${Server.randomId()}/lion.png`; - this.request = { - resources: [{ - path: "main.tex", - content: `\ + describe('When an image is in the cache and the last modified date is advanced', function() { + before(function(done) { + this.project_id = Client.randomId() + this.file = `${Server.randomId()}/lion.png` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ \\documentclass{article} \\usepackage{graphicx} \\begin{document} \\includegraphics{lion.png} \\end{document}\ ` - }, (this.image_resource = { - path: "lion.png", - url: `http://${host}:31415/${this.file}`, - modified: (this.last_modified = Date.now()) - })] - }; + }, + (this.image_resource = { + path: 'lion.png', + url: `http://${host}:31415/${this.file}`, + modified: (this.last_modified = Date.now()) + }) + ] + } - return Client.compile(this.project_id, this.request, (error, res, body) => { - this.error = error; - this.res = res; - this.body = body; - sinon.spy(Server, "getFile"); - this.image_resource.modified = new Date(this.last_modified + 3000); - return Client.compile(this.project_id, this.request, (error1, res1, body1) => { - this.error = error1; - this.res = res1; - this.body = body1; - return done(); - }); - }); - }); + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + sinon.spy(Server, 'getFile') + this.image_resource.modified = new Date(this.last_modified + 3000) + return Client.compile( + this.project_id, + this.request, + (error1, res1, body1) => { + this.error = error1 + this.res = res1 + this.body = body1 + return done() + } + ) + } + ) + }) - afterEach(function() { return Server.getFile.restore(); }); + afterEach(function() { + return Server.getFile.restore() + }) - return it("should download the image again", function() { return Server.getFile.called.should.equal(true); }); - }); + return it('should download the image again', function() { + return Server.getFile.called.should.equal(true) + }) + }) - describe("When an image is in the cache and the last modified date is further in the past", function() { - before(function(done) { - this.project_id = Client.randomId(); - this.file = `${Server.randomId()}/lion.png`; - this.request = { - resources: [{ - path: "main.tex", - content: `\ + describe('When an image is in the cache and the last modified date is further in the past', function() { + before(function(done) { + this.project_id = Client.randomId() + this.file = `${Server.randomId()}/lion.png` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ \\documentclass{article} \\usepackage{graphicx} \\begin{document} \\includegraphics{lion.png} \\end{document}\ ` - }, (this.image_resource = { - path: "lion.png", - url: `http://${host}:31415/${this.file}`, - modified: (this.last_modified = Date.now()) - })] - }; + }, + (this.image_resource = { + path: 'lion.png', + url: `http://${host}:31415/${this.file}`, + modified: (this.last_modified = Date.now()) + }) + ] + } - return Client.compile(this.project_id, this.request, (error, res, body) => { - this.error = error; - this.res = res; - this.body = body; - sinon.spy(Server, "getFile"); - this.image_resource.modified = new Date(this.last_modified - 3000); - return Client.compile(this.project_id, this.request, (error1, res1, body1) => { - this.error = error1; - this.res = res1; - this.body = body1; - return done(); - }); - }); - }); + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + sinon.spy(Server, 'getFile') + this.image_resource.modified = new Date(this.last_modified - 3000) + return Client.compile( + this.project_id, + this.request, + (error1, res1, body1) => { + this.error = error1 + this.res = res1 + this.body = body1 + return done() + } + ) + } + ) + }) - afterEach(function() { return Server.getFile.restore(); }); + afterEach(function() { + return Server.getFile.restore() + }) - return it("should not download the image again", function() { return Server.getFile.called.should.equal(false); }); - }); + return it('should not download the image again', function() { + return Server.getFile.called.should.equal(false) + }) + }) - describe("When an image is in the cache and the last modified date is not specified", function() { - before(function(done) { - this.project_id = Client.randomId(); - this.file = `${Server.randomId()}/lion.png`; - this.request = { - resources: [{ - path: "main.tex", - content: `\ + describe('When an image is in the cache and the last modified date is not specified', function() { + before(function(done) { + this.project_id = Client.randomId() + this.file = `${Server.randomId()}/lion.png` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ \\documentclass{article} \\usepackage{graphicx} \\begin{document} \\includegraphics{lion.png} \\end{document}\ ` - }, (this.image_resource = { - path: "lion.png", - url: `http://${host}:31415/${this.file}`, - modified: (this.last_modified = Date.now()) - })] - }; + }, + (this.image_resource = { + path: 'lion.png', + url: `http://${host}:31415/${this.file}`, + modified: (this.last_modified = Date.now()) + }) + ] + } - return Client.compile(this.project_id, this.request, (error, res, body) => { - this.error = error; - this.res = res; - this.body = body; - sinon.spy(Server, "getFile"); - delete this.image_resource.modified; - return Client.compile(this.project_id, this.request, (error1, res1, body1) => { - this.error = error1; - this.res = res1; - this.body = body1; - return done(); - }); - }); - }); + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + sinon.spy(Server, 'getFile') + delete this.image_resource.modified + return Client.compile( + this.project_id, + this.request, + (error1, res1, body1) => { + this.error = error1 + this.res = res1 + this.body = body1 + return done() + } + ) + } + ) + }) - afterEach(function() { return Server.getFile.restore(); }); + afterEach(function() { + return Server.getFile.restore() + }) - return it("should download the image again", function() { return Server.getFile.called.should.equal(true); }); - }); - - return describe("After clearing the cache", function() { - before(function(done) { - this.project_id = Client.randomId(); - this.file = `${Server.randomId()}/lion.png`; - this.request = { - resources: [{ - path: "main.tex", - content: `\ + return it('should download the image again', function() { + return Server.getFile.called.should.equal(true) + }) + }) + + return describe('After clearing the cache', function() { + before(function(done) { + this.project_id = Client.randomId() + this.file = `${Server.randomId()}/lion.png` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ \\documentclass{article} \\usepackage{graphicx} \\begin{document} \\includegraphics{lion.png} \\end{document}\ ` - }, (this.image_resource = { - path: "lion.png", - url: `http://${host}:31415/${this.file}`, - modified: (this.last_modified = Date.now()) - })] - }; + }, + (this.image_resource = { + path: 'lion.png', + url: `http://${host}:31415/${this.file}`, + modified: (this.last_modified = Date.now()) + }) + ] + } - return Client.compile(this.project_id, this.request, error => { - if (error != null) { throw error; } - return Client.clearCache(this.project_id, (error, res, body) => { - if (error != null) { throw error; } - sinon.spy(Server, "getFile"); - return Client.compile(this.project_id, this.request, (error1, res1, body1) => { - this.error = error1; - this.res = res1; - this.body = body1; - return done(); - }); - }); - }); - }); + return Client.compile(this.project_id, this.request, error => { + if (error != null) { + throw error + } + return Client.clearCache(this.project_id, (error, res, body) => { + if (error != null) { + throw error + } + sinon.spy(Server, 'getFile') + return Client.compile( + this.project_id, + this.request, + (error1, res1, body1) => { + this.error = error1 + this.res = res1 + this.body = body1 + return done() + } + ) + }) + }) + }) - afterEach(function() { return Server.getFile.restore(); }); + afterEach(function() { + return Server.getFile.restore() + }) - return it("should download the image again", function() { return Server.getFile.called.should.equal(true); }); - }); -}); - - + return it('should download the image again', function() { + return Server.getFile.called.should.equal(true) + }) + }) +}) diff --git a/test/acceptance/js/WordcountTests.js b/test/acceptance/js/WordcountTests.js index 2f81e13..8721857 100644 --- a/test/acceptance/js/WordcountTests.js +++ b/test/acceptance/js/WordcountTests.js @@ -9,48 +9,64 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const Client = require("./helpers/Client"); -const request = require("request"); -require("chai").should(); -const { expect } = require("chai"); -const path = require("path"); -const fs = require("fs"); -const ClsiApp = require("./helpers/ClsiApp"); +const Client = require('./helpers/Client') +const request = require('request') +require('chai').should() +const { expect } = require('chai') +const path = require('path') +const fs = require('fs') +const ClsiApp = require('./helpers/ClsiApp') -describe("Syncing", function() { - before(function(done) { - this.request = { - resources: [{ - path: "main.tex", - content: fs.readFileSync(path.join(__dirname,"../fixtures/naugty_strings.txt"),"utf-8") - } - ] - }; - this.project_id = Client.randomId(); - return ClsiApp.ensureRunning(() => { - return Client.compile(this.project_id, this.request, (error, res, body) => { this.error = error; this.res = res; this.body = body; return done(); }); - }); - }); +describe('Syncing', function() { + before(function(done) { + this.request = { + resources: [ + { + path: 'main.tex', + content: fs.readFileSync( + path.join(__dirname, '../fixtures/naugty_strings.txt'), + 'utf-8' + ) + } + ] + } + this.project_id = Client.randomId() + return ClsiApp.ensureRunning(() => { + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + }) - return describe("wordcount file", function() { return it("should return wordcount info", function(done) { - return Client.wordcount(this.project_id, "main.tex", (error, result) => { - if (error != null) { throw error; } - expect(result).to.deep.equal({ - texcount: { - encode: "utf8", - textWords: 2281, - headWords: 2, - outside: 0, - headers: 2, - elements: 0, - mathInline: 6, - mathDisplay: 0, - errors: 0, - messages: "" - } - }); - return done(); - }); - }); } - ); -}); + return describe('wordcount file', function() { + return it('should return wordcount info', function(done) { + return Client.wordcount(this.project_id, 'main.tex', (error, result) => { + if (error != null) { + throw error + } + expect(result).to.deep.equal({ + texcount: { + encode: 'utf8', + textWords: 2281, + headWords: 2, + outside: 0, + headers: 2, + elements: 0, + mathInline: 6, + mathDisplay: 0, + errors: 0, + messages: '' + } + }) + return done() + }) + }) + }) +}) diff --git a/test/acceptance/js/helpers/Client.js b/test/acceptance/js/helpers/Client.js index 50e75d6..9f430e3 100644 --- a/test/acceptance/js/helpers/Client.js +++ b/test/acceptance/js/helpers/Client.js @@ -12,143 +12,197 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let Client; -const request = require("request"); -const fs = require("fs"); -const Settings = require("settings-sharelatex"); +let Client +const request = require('request') +const fs = require('fs') +const Settings = require('settings-sharelatex') -const host = "localhost"; +const host = 'localhost' -module.exports = (Client = { - host: Settings.apis.clsi.url, +module.exports = Client = { + host: Settings.apis.clsi.url, - randomId() { - return Math.random().toString(16).slice(2); - }, + randomId() { + return Math.random() + .toString(16) + .slice(2) + }, - compile(project_id, data, callback) { - if (callback == null) { callback = function(error, res, body) {}; } - return request.post({ - url: `${this.host}/project/${project_id}/compile`, - json: { - compile: data - } - }, callback); - }, + compile(project_id, data, callback) { + if (callback == null) { + callback = function(error, res, body) {} + } + return request.post( + { + url: `${this.host}/project/${project_id}/compile`, + json: { + compile: data + } + }, + callback + ) + }, - clearCache(project_id, callback) { - if (callback == null) { callback = function(error, res, body) {}; } - return request.del(`${this.host}/project/${project_id}`, callback); - }, + clearCache(project_id, callback) { + if (callback == null) { + callback = function(error, res, body) {} + } + return request.del(`${this.host}/project/${project_id}`, callback) + }, - getOutputFile(response, type) { - for (const file of Array.from(response.compile.outputFiles)) { - if ((file.type === type) && file.url.match(`output.${type}`)) { - return file; - } - } - return null; - }, + getOutputFile(response, type) { + for (const file of Array.from(response.compile.outputFiles)) { + if (file.type === type && file.url.match(`output.${type}`)) { + return file + } + } + return null + }, - runServer(port, directory) { - const express = require("express"); - const app = express(); - app.use(express.static(directory)); - console.log("starting test server on", port, host); - return app.listen(port, host).on("error", (error) => { - console.error("error starting server:", error.message); - return process.exit(1); - }); - }, + runServer(port, directory) { + const express = require('express') + const app = express() + app.use(express.static(directory)) + console.log('starting test server on', port, host) + return app.listen(port, host).on('error', error => { + console.error('error starting server:', error.message) + return process.exit(1) + }) + }, + syncFromCode(project_id, file, line, column, callback) { + if (callback == null) { + callback = function(error, pdfPositions) {} + } + return request.get( + { + url: `${this.host}/project/${project_id}/sync/code`, + qs: { + file, + line, + column + } + }, + (error, response, body) => { + if (error != null) { + return callback(error) + } + return callback(null, JSON.parse(body)) + } + ) + }, - syncFromCode(project_id, file, line, column, callback) { - if (callback == null) { callback = function(error, pdfPositions) {}; } - return request.get({ - url: `${this.host}/project/${project_id}/sync/code`, - qs: { - file, - line, - column - } - }, (error, response, body) => { - if (error != null) { return callback(error); } - return callback(null, JSON.parse(body)); - }); - }, + syncFromPdf(project_id, page, h, v, callback) { + if (callback == null) { + callback = function(error, pdfPositions) {} + } + return request.get( + { + url: `${this.host}/project/${project_id}/sync/pdf`, + qs: { + page, + h, + v + } + }, + (error, response, body) => { + if (error != null) { + return callback(error) + } + return callback(null, JSON.parse(body)) + } + ) + }, - syncFromPdf(project_id, page, h, v, callback) { - if (callback == null) { callback = function(error, pdfPositions) {}; } - return request.get({ - url: `${this.host}/project/${project_id}/sync/pdf`, - qs: { - page, - h, v - } - }, (error, response, body) => { - if (error != null) { return callback(error); } - return callback(null, JSON.parse(body)); - }); - }, + compileDirectory(project_id, baseDirectory, directory, serverPort, callback) { + if (callback == null) { + callback = function(error, res, body) {} + } + const resources = [] + let entities = fs.readdirSync(`${baseDirectory}/${directory}`) + let rootResourcePath = 'main.tex' + while (entities.length > 0) { + var entity = entities.pop() + const stat = fs.statSync(`${baseDirectory}/${directory}/${entity}`) + if (stat.isDirectory()) { + entities = entities.concat( + fs + .readdirSync(`${baseDirectory}/${directory}/${entity}`) + .map(subEntity => { + if (subEntity === 'main.tex') { + rootResourcePath = `${entity}/${subEntity}` + } + return `${entity}/${subEntity}` + }) + ) + } else if (stat.isFile() && entity !== 'output.pdf') { + const extension = entity.split('.').pop() + if ( + [ + 'tex', + 'bib', + 'cls', + 'sty', + 'pdf_tex', + 'Rtex', + 'ist', + 'md', + 'Rmd' + ].indexOf(extension) > -1 + ) { + resources.push({ + path: entity, + content: fs + .readFileSync(`${baseDirectory}/${directory}/${entity}`) + .toString() + }) + } else if ( + ['eps', 'ttf', 'png', 'jpg', 'pdf', 'jpeg'].indexOf(extension) > -1 + ) { + resources.push({ + path: entity, + url: `http://${host}:${serverPort}/${directory}/${entity}`, + modified: stat.mtime + }) + } + } + } - compileDirectory(project_id, baseDirectory, directory, serverPort, callback) { - if (callback == null) { callback = function(error, res, body) {}; } - const resources = []; - let entities = fs.readdirSync(`${baseDirectory}/${directory}`); - let rootResourcePath = "main.tex"; - while (entities.length > 0) { - var entity = entities.pop(); - const stat = fs.statSync(`${baseDirectory}/${directory}/${entity}`); - if (stat.isDirectory()) { - entities = entities.concat(fs.readdirSync(`${baseDirectory}/${directory}/${entity}`).map((subEntity) => { - if (subEntity === "main.tex") { - rootResourcePath = `${entity}/${subEntity}`; - } - return `${entity}/${subEntity}`; - }) - ); - } else if (stat.isFile() && (entity !== "output.pdf")) { - const extension = entity.split(".").pop(); - if (["tex", "bib", "cls", "sty", "pdf_tex", "Rtex", "ist", "md", "Rmd"].indexOf(extension) > -1) { - resources.push({ - path: entity, - content: fs.readFileSync(`${baseDirectory}/${directory}/${entity}`).toString() - }); - } else if (["eps", "ttf", "png", "jpg", "pdf", "jpeg"].indexOf(extension) > -1) { - resources.push({ - path: entity, - url: `http://${host}:${serverPort}/${directory}/${entity}`, - modified: stat.mtime - }); - } - } - } + return fs.readFile( + `${baseDirectory}/${directory}/options.json`, + (error, body) => { + const req = { + resources, + rootResourcePath + } - return fs.readFile(`${baseDirectory}/${directory}/options.json`, (error, body) => { - const req = { - resources, - rootResourcePath - }; + if (error == null) { + body = JSON.parse(body) + req.options = body + } - if ((error == null)) { - body = JSON.parse(body); - req.options = body; - } + return this.compile(project_id, req, callback) + } + ) + }, - return this.compile(project_id, req, callback); - }); - }, - - wordcount(project_id, file, callback) { - if (callback == null) { callback = function(error, pdfPositions) {}; } - return request.get({ - url: `${this.host}/project/${project_id}/wordcount`, - qs: { - file - } - }, (error, response, body) => { - if (error != null) { return callback(error); } - return callback(null, JSON.parse(body)); - }); - } -}); + wordcount(project_id, file, callback) { + if (callback == null) { + callback = function(error, pdfPositions) {} + } + return request.get( + { + url: `${this.host}/project/${project_id}/wordcount`, + qs: { + file + } + }, + (error, response, body) => { + if (error != null) { + return callback(error) + } + return callback(null, JSON.parse(body)) + } + ) + } +} diff --git a/test/acceptance/js/helpers/ClsiApp.js b/test/acceptance/js/helpers/ClsiApp.js index bd3222d..f803846 100644 --- a/test/acceptance/js/helpers/ClsiApp.js +++ b/test/acceptance/js/helpers/ClsiApp.js @@ -12,40 +12,53 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -const app = require('../../../../app'); -require("logger-sharelatex").logger.level("info"); -const logger = require("logger-sharelatex"); -const Settings = require("settings-sharelatex"); +const app = require('../../../../app') +require('logger-sharelatex').logger.level('info') +const logger = require('logger-sharelatex') +const Settings = require('settings-sharelatex') module.exports = { - running: false, - initing: false, - callbacks: [], - ensureRunning(callback) { - if (callback == null) { callback = function(error) {}; } - if (this.running) { - return callback(); - } else if (this.initing) { - return this.callbacks.push(callback); - } else { - this.initing = true; - this.callbacks.push(callback); - return app.listen(__guard__(Settings.internal != null ? Settings.internal.clsi : undefined, x => x.port), "localhost", error => { - if (error != null) { throw error; } - this.running = true; - logger.log("clsi running in dev mode"); + running: false, + initing: false, + callbacks: [], + ensureRunning(callback) { + if (callback == null) { + callback = function(error) {} + } + if (this.running) { + return callback() + } else if (this.initing) { + return this.callbacks.push(callback) + } else { + this.initing = true + this.callbacks.push(callback) + return app.listen( + __guard__( + Settings.internal != null ? Settings.internal.clsi : undefined, + x => x.port + ), + 'localhost', + error => { + if (error != null) { + throw error + } + this.running = true + logger.log('clsi running in dev mode') - return (() => { - const result = []; - for (callback of Array.from(this.callbacks)) { - result.push(callback()); - } - return result; - })(); - }); - } - } -}; + return (() => { + const result = [] + for (callback of Array.from(this.callbacks)) { + result.push(callback()) + } + return result + })() + } + ) + } + } +} function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} From 8694fce0c95b769f169844f0f1f6137304ed504c Mon Sep 17 00:00:00 2001 From: mserranom Date: Wed, 19 Feb 2020 12:16:15 +0100 Subject: [PATCH 20/24] decaffeinate: rename individual coffee files to js files --- app.coffee => app.js | 0 config/{settings.defaults.coffee => settings.defaults.js} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename app.coffee => app.js (100%) rename config/{settings.defaults.coffee => settings.defaults.js} (100%) diff --git a/app.coffee b/app.js similarity index 100% rename from app.coffee rename to app.js diff --git a/config/settings.defaults.coffee b/config/settings.defaults.js similarity index 100% rename from config/settings.defaults.coffee rename to config/settings.defaults.js From 62d20ee5f087d652fa54ad4aa0073f62fe3f8c24 Mon Sep 17 00:00:00 2001 From: mserranom Date: Wed, 19 Feb 2020 12:16:17 +0100 Subject: [PATCH 21/24] decaffeinate: convert individual files to js --- app.js | 428 ++++++++++++++++++++---------------- config/settings.defaults.js | 122 +++++----- 2 files changed, 311 insertions(+), 239 deletions(-) diff --git a/app.js b/app.js index 9bcdfeb..99427da 100644 --- a/app.js +++ b/app.js @@ -1,244 +1,298 @@ -Metrics = require "metrics-sharelatex" -Metrics.initialize("clsi") +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let tenMinutes; +const Metrics = require("metrics-sharelatex"); +Metrics.initialize("clsi"); -CompileController = require "./app/js/CompileController" -Settings = require "settings-sharelatex" -logger = require "logger-sharelatex" -logger.initialize("clsi") -if Settings.sentry?.dsn? - logger.initializeErrorReporting(Settings.sentry.dsn) +const CompileController = require("./app/js/CompileController"); +const Settings = require("settings-sharelatex"); +const logger = require("logger-sharelatex"); +logger.initialize("clsi"); +if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) { + logger.initializeErrorReporting(Settings.sentry.dsn); +} -smokeTest = require "smoke-test-sharelatex" -ContentTypeMapper = require "./app/js/ContentTypeMapper" -Errors = require './app/js/Errors' +const smokeTest = require("smoke-test-sharelatex"); +const ContentTypeMapper = require("./app/js/ContentTypeMapper"); +const Errors = require('./app/js/Errors'); -Path = require "path" -fs = require "fs" +const Path = require("path"); +const fs = require("fs"); -Metrics.open_sockets.monitor(logger) -Metrics.memory.monitor(logger) +Metrics.open_sockets.monitor(logger); +Metrics.memory.monitor(logger); -ProjectPersistenceManager = require "./app/js/ProjectPersistenceManager" -OutputCacheManager = require "./app/js/OutputCacheManager" +const ProjectPersistenceManager = require("./app/js/ProjectPersistenceManager"); +const OutputCacheManager = require("./app/js/OutputCacheManager"); -require("./app/js/db").sync() +require("./app/js/db").sync(); -express = require "express" -bodyParser = require "body-parser" -app = express() +const express = require("express"); +const bodyParser = require("body-parser"); +const app = express(); -Metrics.injectMetricsRoute(app) -app.use Metrics.http.monitor(logger) +Metrics.injectMetricsRoute(app); +app.use(Metrics.http.monitor(logger)); -# Compile requests can take longer than the default two -# minutes (including file download time), so bump up the -# timeout a bit. -TIMEOUT = 10 * 60 * 1000 -app.use (req, res, next) -> - req.setTimeout TIMEOUT - res.setTimeout TIMEOUT - res.removeHeader("X-Powered-By") - next() +// Compile requests can take longer than the default two +// minutes (including file download time), so bump up the +// timeout a bit. +const TIMEOUT = 10 * 60 * 1000; +app.use(function(req, res, next) { + req.setTimeout(TIMEOUT); + res.setTimeout(TIMEOUT); + res.removeHeader("X-Powered-By"); + return next(); +}); -app.param 'project_id', (req, res, next, project_id) -> - if project_id?.match /^[a-zA-Z0-9_-]+$/ - next() - else - next new Error("invalid project id") +app.param('project_id', function(req, res, next, project_id) { + if ((project_id != null ? project_id.match(/^[a-zA-Z0-9_-]+$/) : undefined)) { + return next(); + } else { + return next(new Error("invalid project id")); + } +}); -app.param 'user_id', (req, res, next, user_id) -> - if user_id?.match /^[0-9a-f]{24}$/ - next() - else - next new Error("invalid user id") +app.param('user_id', function(req, res, next, user_id) { + if ((user_id != null ? user_id.match(/^[0-9a-f]{24}$/) : undefined)) { + return next(); + } else { + return next(new Error("invalid user id")); + } +}); -app.param 'build_id', (req, res, next, build_id) -> - if build_id?.match OutputCacheManager.BUILD_REGEX - next() - else - next new Error("invalid build id #{build_id}") +app.param('build_id', function(req, res, next, build_id) { + if ((build_id != null ? build_id.match(OutputCacheManager.BUILD_REGEX) : undefined)) { + return next(); + } else { + return next(new Error(`invalid build id ${build_id}`)); + } +}); -app.post "/project/:project_id/compile", bodyParser.json(limit: Settings.compileSizeLimit), CompileController.compile -app.post "/project/:project_id/compile/stop", CompileController.stopCompile -app.delete "/project/:project_id", CompileController.clearCache +app.post("/project/:project_id/compile", bodyParser.json({limit: Settings.compileSizeLimit}), CompileController.compile); +app.post("/project/:project_id/compile/stop", CompileController.stopCompile); +app.delete("/project/:project_id", CompileController.clearCache); -app.get "/project/:project_id/sync/code", CompileController.syncFromCode -app.get "/project/:project_id/sync/pdf", CompileController.syncFromPdf -app.get "/project/:project_id/wordcount", CompileController.wordcount -app.get "/project/:project_id/status", CompileController.status +app.get("/project/:project_id/sync/code", CompileController.syncFromCode); +app.get("/project/:project_id/sync/pdf", CompileController.syncFromPdf); +app.get("/project/:project_id/wordcount", CompileController.wordcount); +app.get("/project/:project_id/status", CompileController.status); -# Per-user containers -app.post "/project/:project_id/user/:user_id/compile", bodyParser.json(limit: Settings.compileSizeLimit), CompileController.compile -app.post "/project/:project_id/user/:user_id/compile/stop", CompileController.stopCompile -app.delete "/project/:project_id/user/:user_id", CompileController.clearCache +// Per-user containers +app.post("/project/:project_id/user/:user_id/compile", bodyParser.json({limit: Settings.compileSizeLimit}), CompileController.compile); +app.post("/project/:project_id/user/:user_id/compile/stop", CompileController.stopCompile); +app.delete("/project/:project_id/user/:user_id", CompileController.clearCache); -app.get "/project/:project_id/user/:user_id/sync/code", CompileController.syncFromCode -app.get "/project/:project_id/user/:user_id/sync/pdf", CompileController.syncFromPdf -app.get "/project/:project_id/user/:user_id/wordcount", CompileController.wordcount +app.get("/project/:project_id/user/:user_id/sync/code", CompileController.syncFromCode); +app.get("/project/:project_id/user/:user_id/sync/pdf", CompileController.syncFromPdf); +app.get("/project/:project_id/user/:user_id/wordcount", CompileController.wordcount); -ForbidSymlinks = require "./app/js/StaticServerForbidSymlinks" +const ForbidSymlinks = require("./app/js/StaticServerForbidSymlinks"); -# create a static server which does not allow access to any symlinks -# avoids possible mismatch of root directory between middleware check -# and serving the files -staticServer = ForbidSymlinks express.static, Settings.path.compilesDir, setHeaders: (res, path, stat) -> - if Path.basename(path) == "output.pdf" - # Calculate an etag in the same way as nginx - # https://github.com/tj/send/issues/65 - etag = (path, stat) -> - '"' + Math.ceil(+stat.mtime / 1000).toString(16) + +// create a static server which does not allow access to any symlinks +// avoids possible mismatch of root directory between middleware check +// and serving the files +const staticServer = ForbidSymlinks(express.static, Settings.path.compilesDir, { setHeaders(res, path, stat) { + if (Path.basename(path) === "output.pdf") { + // Calculate an etag in the same way as nginx + // https://github.com/tj/send/issues/65 + const etag = (path, stat) => + `"${Math.ceil(+stat.mtime / 1000).toString(16)}` + '-' + Number(stat.size).toString(16) + '"' - res.set("Etag", etag(path, stat)) - res.set("Content-Type", ContentTypeMapper.map(path)) + ; + res.set("Etag", etag(path, stat)); + } + return res.set("Content-Type", ContentTypeMapper.map(path)); +} +} +); -app.get "/project/:project_id/user/:user_id/build/:build_id/output/*", (req, res, next) -> - # for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) - req.url = "/#{req.params.project_id}-#{req.params.user_id}/" + OutputCacheManager.path(req.params.build_id, "/#{req.params[0]}") - staticServer(req, res, next) +app.get("/project/:project_id/user/:user_id/build/:build_id/output/*", function(req, res, next) { + // for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) + req.url = `/${req.params.project_id}-${req.params.user_id}/` + OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`); + return staticServer(req, res, next); +}); -app.get "/project/:project_id/build/:build_id/output/*", (req, res, next) -> - # for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) - req.url = "/#{req.params.project_id}/" + OutputCacheManager.path(req.params.build_id, "/#{req.params[0]}") - staticServer(req, res, next) +app.get("/project/:project_id/build/:build_id/output/*", function(req, res, next) { + // for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) + req.url = `/${req.params.project_id}/` + OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`); + return staticServer(req, res, next); +}); -app.get "/project/:project_id/user/:user_id/output/*", (req, res, next) -> - # for specific user get the path to the top level file - req.url = "/#{req.params.project_id}-#{req.params.user_id}/#{req.params[0]}" - staticServer(req, res, next) +app.get("/project/:project_id/user/:user_id/output/*", function(req, res, next) { + // for specific user get the path to the top level file + req.url = `/${req.params.project_id}-${req.params.user_id}/${req.params[0]}`; + return staticServer(req, res, next); +}); -app.get "/project/:project_id/output/*", (req, res, next) -> - if req.query?.build? && req.query.build.match(OutputCacheManager.BUILD_REGEX) - # for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) - req.url = "/#{req.params.project_id}/" + OutputCacheManager.path(req.query.build, "/#{req.params[0]}") - else - req.url = "/#{req.params.project_id}/#{req.params[0]}" - staticServer(req, res, next) +app.get("/project/:project_id/output/*", function(req, res, next) { + if (((req.query != null ? req.query.build : undefined) != null) && req.query.build.match(OutputCacheManager.BUILD_REGEX)) { + // for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) + req.url = `/${req.params.project_id}/` + OutputCacheManager.path(req.query.build, `/${req.params[0]}`); + } else { + req.url = `/${req.params.project_id}/${req.params[0]}`; + } + return staticServer(req, res, next); +}); -app.get "/oops", (req, res, next) -> - logger.error {err: "hello"}, "test error" - res.send "error\n" +app.get("/oops", function(req, res, next) { + logger.error({err: "hello"}, "test error"); + return res.send("error\n"); +}); -app.get "/status", (req, res, next) -> - res.send "CLSI is alive\n" +app.get("/status", (req, res, next) => res.send("CLSI is alive\n")); -resCacher = - contentType:(@setContentType)-> - send:(@code, @body)-> +const resCacher = { + contentType(setContentType){ + this.setContentType = setContentType; + }, + send(code, body){ + this.code = code; + this.body = body; + }, - #default the server to be down - code:500 - body:{} + //default the server to be down + code:500, + body:{}, setContentType:"application/json" +}; -if Settings.smokeTest - do runSmokeTest = -> - logger.log("running smoke tests") - smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))({}, resCacher) - setTimeout(runSmokeTest, 30 * 1000) +if (Settings.smokeTest) { + let runSmokeTest; + (runSmokeTest = function() { + logger.log("running smoke tests"); + smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))({}, resCacher); + return setTimeout(runSmokeTest, 30 * 1000); + })(); +} -app.get "/health_check", (req, res)-> - res.contentType(resCacher?.setContentType) - res.status(resCacher?.code).send(resCacher?.body) +app.get("/health_check", function(req, res){ + res.contentType(resCacher != null ? resCacher.setContentType : undefined); + return res.status(resCacher != null ? resCacher.code : undefined).send(resCacher != null ? resCacher.body : undefined); +}); -app.get "/smoke_test_force", (req, res)-> - smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))(req, res) +app.get("/smoke_test_force", (req, res)=> smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))(req, res)); -profiler = require "v8-profiler-node8" -app.get "/profile", (req, res) -> - time = parseInt(req.query.time || "1000") - profiler.startProfiling("test") - setTimeout () -> - profile = profiler.stopProfiling("test") - res.json(profile) - , time +const profiler = require("v8-profiler-node8"); +app.get("/profile", function(req, res) { + const time = parseInt(req.query.time || "1000"); + profiler.startProfiling("test"); + return setTimeout(function() { + const profile = profiler.stopProfiling("test"); + return res.json(profile); + } + , time); +}); -app.get "/heapdump", (req, res)-> - require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.clsi.heapsnapshot', (err, filename)-> - res.send filename +app.get("/heapdump", (req, res)=> + require('heapdump').writeSnapshot(`/tmp/${Date.now()}.clsi.heapsnapshot`, (err, filename)=> res.send(filename)) +); -app.use (error, req, res, next) -> - if error instanceof Errors.NotFoundError - logger.warn {err: error, url: req.url}, "not found error" - return res.sendStatus(404) - else - logger.error {err: error, url: req.url}, "server error" - res.sendStatus(error?.statusCode || 500) +app.use(function(error, req, res, next) { + if (error instanceof Errors.NotFoundError) { + logger.warn({err: error, url: req.url}, "not found error"); + return res.sendStatus(404); + } else { + logger.error({err: error, url: req.url}, "server error"); + return res.sendStatus((error != null ? error.statusCode : undefined) || 500); + } +}); -net = require "net" -os = require "os" +const net = require("net"); +const os = require("os"); -STATE = "up" +let STATE = "up"; -loadTcpServer = net.createServer (socket) -> - socket.on "error", (err)-> - if err.code == "ECONNRESET" - # this always comes up, we don't know why - return - logger.err err:err, "error with socket on load check" - socket.destroy() +const loadTcpServer = net.createServer(function(socket) { + socket.on("error", function(err){ + if (err.code === "ECONNRESET") { + // this always comes up, we don't know why + return; + } + logger.err({err}, "error with socket on load check"); + return socket.destroy(); + }); - if STATE == "up" and Settings.internal.load_balancer_agent.report_load - currentLoad = os.loadavg()[0] + if ((STATE === "up") && Settings.internal.load_balancer_agent.report_load) { + let availableWorkingCpus; + const currentLoad = os.loadavg()[0]; - # staging clis's have 1 cpu core only - if os.cpus().length == 1 - availableWorkingCpus = 1 - else - availableWorkingCpus = os.cpus().length - 1 + // staging clis's have 1 cpu core only + if (os.cpus().length === 1) { + availableWorkingCpus = 1; + } else { + availableWorkingCpus = os.cpus().length - 1; + } - freeLoad = availableWorkingCpus - currentLoad - freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100) - if freeLoadPercentage <= 0 - freeLoadPercentage = 1 # when its 0 the server is set to drain and will move projects to different servers - socket.write("up, #{freeLoadPercentage}%\n", "ASCII") - socket.end() - else - socket.write("#{STATE}\n", "ASCII") - socket.end() + const freeLoad = availableWorkingCpus - currentLoad; + let freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100); + if (freeLoadPercentage <= 0) { + freeLoadPercentage = 1; // when its 0 the server is set to drain and will move projects to different servers + } + socket.write(`up, ${freeLoadPercentage}%\n`, "ASCII"); + return socket.end(); + } else { + socket.write(`${STATE}\n`, "ASCII"); + return socket.end(); + } +}); -loadHttpServer = express() +const loadHttpServer = express(); -loadHttpServer.post "/state/up", (req, res, next) -> - STATE = "up" - logger.info "getting message to set server to down" - res.sendStatus 204 +loadHttpServer.post("/state/up", function(req, res, next) { + STATE = "up"; + logger.info("getting message to set server to down"); + return res.sendStatus(204); +}); -loadHttpServer.post "/state/down", (req, res, next) -> - STATE = "down" - logger.info "getting message to set server to down" - res.sendStatus 204 +loadHttpServer.post("/state/down", function(req, res, next) { + STATE = "down"; + logger.info("getting message to set server to down"); + return res.sendStatus(204); +}); -loadHttpServer.post "/state/maint", (req, res, next) -> - STATE = "maint" - logger.info "getting message to set server to maint" - res.sendStatus 204 +loadHttpServer.post("/state/maint", function(req, res, next) { + STATE = "maint"; + logger.info("getting message to set server to maint"); + return res.sendStatus(204); +}); -port = (Settings.internal?.clsi?.port or 3013) -host = (Settings.internal?.clsi?.host or "localhost") +const port = (__guard__(Settings.internal != null ? Settings.internal.clsi : undefined, x => x.port) || 3013); +const host = (__guard__(Settings.internal != null ? Settings.internal.clsi : undefined, x1 => x1.host) || "localhost"); -load_tcp_port = Settings.internal.load_balancer_agent.load_port -load_http_port = Settings.internal.load_balancer_agent.local_port +const load_tcp_port = Settings.internal.load_balancer_agent.load_port; +const load_http_port = Settings.internal.load_balancer_agent.local_port; -if !module.parent # Called directly - app.listen port, host, (error) -> - logger.info "CLSI starting up, listening on #{host}:#{port}" +if (!module.parent) { // Called directly + app.listen(port, host, error => logger.info(`CLSI starting up, listening on ${host}:${port}`)); - loadTcpServer.listen load_tcp_port, host, (error) -> - throw error if error? - logger.info "Load tcp agent listening on load port #{load_tcp_port}" + loadTcpServer.listen(load_tcp_port, host, function(error) { + if (error != null) { throw error; } + return logger.info(`Load tcp agent listening on load port ${load_tcp_port}`); + }); - loadHttpServer.listen load_http_port, host, (error) -> - throw error if error? - logger.info "Load http agent listening on load port #{load_http_port}" + loadHttpServer.listen(load_http_port, host, function(error) { + if (error != null) { throw error; } + return logger.info(`Load http agent listening on load port ${load_http_port}`); + }); +} -module.exports = app +module.exports = app; -setInterval () -> - ProjectPersistenceManager.clearExpiredProjects() -, tenMinutes = 10 * 60 * 1000 +setInterval(() => ProjectPersistenceManager.clearExpiredProjects() +, (tenMinutes = 10 * 60 * 1000)); + +function __guard__(value, transform) { + return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; +} \ No newline at end of file diff --git a/config/settings.defaults.js b/config/settings.defaults.js index ad3f04d..5d0bb75 100644 --- a/config/settings.defaults.js +++ b/config/settings.defaults.js @@ -1,71 +1,89 @@ -Path = require "path" +const Path = require("path"); -module.exports = - # Options are passed to Sequelize. - # See http://sequelizejs.com/documentation#usage-options for details - mysql: - clsi: - database: "clsi" - username: "clsi" - dialect: "sqlite" - storage: process.env["SQLITE_PATH"] or Path.resolve(__dirname + "/../db.sqlite") - pool: - max: 1 +module.exports = { + // Options are passed to Sequelize. + // See http://sequelizejs.com/documentation#usage-options for details + mysql: { + clsi: { + database: "clsi", + username: "clsi", + dialect: "sqlite", + storage: process.env["SQLITE_PATH"] || Path.resolve(__dirname + "/../db.sqlite"), + pool: { + max: 1, min: 1 - retry: + }, + retry: { max: 10 + } + } + }, - compileSizeLimit: process.env["COMPILE_SIZE_LIMIT"] or "7mb" + compileSizeLimit: process.env["COMPILE_SIZE_LIMIT"] || "7mb", - path: - compilesDir: Path.resolve(__dirname + "/../compiles") - clsiCacheDir: Path.resolve(__dirname + "/../cache") - synctexBaseDir: (project_id) -> Path.join(@compilesDir, project_id) + path: { + compilesDir: Path.resolve(__dirname + "/../compiles"), + clsiCacheDir: Path.resolve(__dirname + "/../cache"), + synctexBaseDir(project_id) { return Path.join(this.compilesDir, project_id); } + }, - internal: - clsi: - port: 3013 - host: process.env["LISTEN_ADDRESS"] or "localhost" + internal: { + clsi: { + port: 3013, + host: process.env["LISTEN_ADDRESS"] || "localhost" + }, - load_balancer_agent: - report_load:true - load_port: 3048 + load_balancer_agent: { + report_load:true, + load_port: 3048, local_port: 3049 - apis: - clsi: - url: "http://#{process.env['CLSI_HOST'] or 'localhost'}:3013" + } + }, + apis: { + clsi: { + url: `http://${process.env['CLSI_HOST'] || 'localhost'}:3013` + } + }, - smokeTest: process.env["SMOKE_TEST"] or false - project_cache_length_ms: 1000 * 60 * 60 * 24 - parallelFileDownloads: process.env["FILESTORE_PARALLEL_FILE_DOWNLOADS"] or 1 - parallelSqlQueryLimit: process.env["FILESTORE_PARALLEL_SQL_QUERY_LIMIT"] or 1 - filestoreDomainOveride: process.env["FILESTORE_DOMAIN_OVERRIDE"] - texliveImageNameOveride: process.env["TEX_LIVE_IMAGE_NAME_OVERRIDE"] - sentry: + smokeTest: process.env["SMOKE_TEST"] || false, + project_cache_length_ms: 1000 * 60 * 60 * 24, + parallelFileDownloads: process.env["FILESTORE_PARALLEL_FILE_DOWNLOADS"] || 1, + parallelSqlQueryLimit: process.env["FILESTORE_PARALLEL_SQL_QUERY_LIMIT"] || 1, + filestoreDomainOveride: process.env["FILESTORE_DOMAIN_OVERRIDE"], + texliveImageNameOveride: process.env["TEX_LIVE_IMAGE_NAME_OVERRIDE"], + sentry: { dsn: process.env['SENTRY_DSN'] + } +}; -if process.env["DOCKER_RUNNER"] - module.exports.clsi = - dockerRunner: process.env["DOCKER_RUNNER"] == "true" - docker: - image: process.env["TEXLIVE_IMAGE"] or "quay.io/sharelatex/texlive-full:2017.1" - env: +if (process.env["DOCKER_RUNNER"]) { + let seccomp_profile_path; + module.exports.clsi = { + dockerRunner: process.env["DOCKER_RUNNER"] === "true", + docker: { + image: process.env["TEXLIVE_IMAGE"] || "quay.io/sharelatex/texlive-full:2017.1", + env: { HOME: "/tmp" - socketPath: "/var/run/docker.sock" - user: process.env["TEXLIVE_IMAGE_USER"] or "tex" - expireProjectAfterIdleMs: 24 * 60 * 60 * 1000 + }, + socketPath: "/var/run/docker.sock", + user: process.env["TEXLIVE_IMAGE_USER"] || "tex" + }, + expireProjectAfterIdleMs: 24 * 60 * 60 * 1000, checkProjectsIntervalMs: 10 * 60 * 1000 + }; - try - seccomp_profile_path = Path.resolve(__dirname + "/../seccomp/clsi-profile.json") - module.exports.clsi.docker.seccomp_profile = JSON.stringify(JSON.parse(require("fs").readFileSync(seccomp_profile_path))) - catch error - console.log error, "could not load seccom profile from #{seccomp_profile_path}" + try { + seccomp_profile_path = Path.resolve(__dirname + "/../seccomp/clsi-profile.json"); + module.exports.clsi.docker.seccomp_profile = JSON.stringify(JSON.parse(require("fs").readFileSync(seccomp_profile_path))); + } catch (error) { + console.log(error, `could not load seccom profile from ${seccomp_profile_path}`); + } - module.exports.path.synctexBaseDir = -> "/compile" + module.exports.path.synctexBaseDir = () => "/compile"; - module.exports.path.sandboxedCompilesHostDir = process.env["COMPILES_HOST_DIR"] + module.exports.path.sandboxedCompilesHostDir = process.env["COMPILES_HOST_DIR"]; - module.exports.path.synctexBinHostPath = process.env["SYNCTEX_BIN_HOST_PATH"] + module.exports.path.synctexBinHostPath = process.env["SYNCTEX_BIN_HOST_PATH"]; +} From de36ab663c3152cd33689130ae711e05a9c3cdb7 Mon Sep 17 00:00:00 2001 From: mserranom Date: Wed, 19 Feb 2020 12:16:18 +0100 Subject: [PATCH 22/24] prettier: convert individual decaffeinated files to Prettier format --- app.js | 519 ++++++++++++++++++++---------------- config/settings.defaults.js | 173 ++++++------ 2 files changed, 388 insertions(+), 304 deletions(-) diff --git a/app.js b/app.js index 99427da..c03fcd8 100644 --- a/app.js +++ b/app.js @@ -5,294 +5,367 @@ * DS207: Consider shorter variations of null checks * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md */ -let tenMinutes; -const Metrics = require("metrics-sharelatex"); -Metrics.initialize("clsi"); +let tenMinutes +const Metrics = require('metrics-sharelatex') +Metrics.initialize('clsi') -const CompileController = require("./app/js/CompileController"); -const Settings = require("settings-sharelatex"); -const logger = require("logger-sharelatex"); -logger.initialize("clsi"); +const CompileController = require('./app/js/CompileController') +const Settings = require('settings-sharelatex') +const logger = require('logger-sharelatex') +logger.initialize('clsi') if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) { - logger.initializeErrorReporting(Settings.sentry.dsn); + logger.initializeErrorReporting(Settings.sentry.dsn) } -const smokeTest = require("smoke-test-sharelatex"); -const ContentTypeMapper = require("./app/js/ContentTypeMapper"); -const Errors = require('./app/js/Errors'); +const smokeTest = require('smoke-test-sharelatex') +const ContentTypeMapper = require('./app/js/ContentTypeMapper') +const Errors = require('./app/js/Errors') -const Path = require("path"); -const fs = require("fs"); +const Path = require('path') +const fs = require('fs') +Metrics.open_sockets.monitor(logger) +Metrics.memory.monitor(logger) -Metrics.open_sockets.monitor(logger); -Metrics.memory.monitor(logger); +const ProjectPersistenceManager = require('./app/js/ProjectPersistenceManager') +const OutputCacheManager = require('./app/js/OutputCacheManager') -const ProjectPersistenceManager = require("./app/js/ProjectPersistenceManager"); -const OutputCacheManager = require("./app/js/OutputCacheManager"); +require('./app/js/db').sync() -require("./app/js/db").sync(); +const express = require('express') +const bodyParser = require('body-parser') +const app = express() -const express = require("express"); -const bodyParser = require("body-parser"); -const app = express(); - -Metrics.injectMetricsRoute(app); -app.use(Metrics.http.monitor(logger)); +Metrics.injectMetricsRoute(app) +app.use(Metrics.http.monitor(logger)) // Compile requests can take longer than the default two -// minutes (including file download time), so bump up the +// minutes (including file download time), so bump up the // timeout a bit. -const TIMEOUT = 10 * 60 * 1000; +const TIMEOUT = 10 * 60 * 1000 app.use(function(req, res, next) { - req.setTimeout(TIMEOUT); - res.setTimeout(TIMEOUT); - res.removeHeader("X-Powered-By"); - return next(); -}); + req.setTimeout(TIMEOUT) + res.setTimeout(TIMEOUT) + res.removeHeader('X-Powered-By') + return next() +}) app.param('project_id', function(req, res, next, project_id) { - if ((project_id != null ? project_id.match(/^[a-zA-Z0-9_-]+$/) : undefined)) { - return next(); - } else { - return next(new Error("invalid project id")); - } -}); + if (project_id != null ? project_id.match(/^[a-zA-Z0-9_-]+$/) : undefined) { + return next() + } else { + return next(new Error('invalid project id')) + } +}) app.param('user_id', function(req, res, next, user_id) { - if ((user_id != null ? user_id.match(/^[0-9a-f]{24}$/) : undefined)) { - return next(); - } else { - return next(new Error("invalid user id")); - } -}); + if (user_id != null ? user_id.match(/^[0-9a-f]{24}$/) : undefined) { + return next() + } else { + return next(new Error('invalid user id')) + } +}) app.param('build_id', function(req, res, next, build_id) { - if ((build_id != null ? build_id.match(OutputCacheManager.BUILD_REGEX) : undefined)) { - return next(); - } else { - return next(new Error(`invalid build id ${build_id}`)); - } -}); + if ( + build_id != null + ? build_id.match(OutputCacheManager.BUILD_REGEX) + : undefined + ) { + return next() + } else { + return next(new Error(`invalid build id ${build_id}`)) + } +}) +app.post( + '/project/:project_id/compile', + bodyParser.json({ limit: Settings.compileSizeLimit }), + CompileController.compile +) +app.post('/project/:project_id/compile/stop', CompileController.stopCompile) +app.delete('/project/:project_id', CompileController.clearCache) -app.post("/project/:project_id/compile", bodyParser.json({limit: Settings.compileSizeLimit}), CompileController.compile); -app.post("/project/:project_id/compile/stop", CompileController.stopCompile); -app.delete("/project/:project_id", CompileController.clearCache); - -app.get("/project/:project_id/sync/code", CompileController.syncFromCode); -app.get("/project/:project_id/sync/pdf", CompileController.syncFromPdf); -app.get("/project/:project_id/wordcount", CompileController.wordcount); -app.get("/project/:project_id/status", CompileController.status); +app.get('/project/:project_id/sync/code', CompileController.syncFromCode) +app.get('/project/:project_id/sync/pdf', CompileController.syncFromPdf) +app.get('/project/:project_id/wordcount', CompileController.wordcount) +app.get('/project/:project_id/status', CompileController.status) // Per-user containers -app.post("/project/:project_id/user/:user_id/compile", bodyParser.json({limit: Settings.compileSizeLimit}), CompileController.compile); -app.post("/project/:project_id/user/:user_id/compile/stop", CompileController.stopCompile); -app.delete("/project/:project_id/user/:user_id", CompileController.clearCache); +app.post( + '/project/:project_id/user/:user_id/compile', + bodyParser.json({ limit: Settings.compileSizeLimit }), + CompileController.compile +) +app.post( + '/project/:project_id/user/:user_id/compile/stop', + CompileController.stopCompile +) +app.delete('/project/:project_id/user/:user_id', CompileController.clearCache) -app.get("/project/:project_id/user/:user_id/sync/code", CompileController.syncFromCode); -app.get("/project/:project_id/user/:user_id/sync/pdf", CompileController.syncFromPdf); -app.get("/project/:project_id/user/:user_id/wordcount", CompileController.wordcount); +app.get( + '/project/:project_id/user/:user_id/sync/code', + CompileController.syncFromCode +) +app.get( + '/project/:project_id/user/:user_id/sync/pdf', + CompileController.syncFromPdf +) +app.get( + '/project/:project_id/user/:user_id/wordcount', + CompileController.wordcount +) -const ForbidSymlinks = require("./app/js/StaticServerForbidSymlinks"); +const ForbidSymlinks = require('./app/js/StaticServerForbidSymlinks') // create a static server which does not allow access to any symlinks // avoids possible mismatch of root directory between middleware check // and serving the files -const staticServer = ForbidSymlinks(express.static, Settings.path.compilesDir, { setHeaders(res, path, stat) { - if (Path.basename(path) === "output.pdf") { - // Calculate an etag in the same way as nginx - // https://github.com/tj/send/issues/65 - const etag = (path, stat) => - `"${Math.ceil(+stat.mtime / 1000).toString(16)}` + - '-' + Number(stat.size).toString(16) + '"' - ; - res.set("Etag", etag(path, stat)); - } - return res.set("Content-Type", ContentTypeMapper.map(path)); -} -} -); +const staticServer = ForbidSymlinks(express.static, Settings.path.compilesDir, { + setHeaders(res, path, stat) { + if (Path.basename(path) === 'output.pdf') { + // Calculate an etag in the same way as nginx + // https://github.com/tj/send/issues/65 + const etag = (path, stat) => + `"${Math.ceil(+stat.mtime / 1000).toString(16)}` + + '-' + + Number(stat.size).toString(16) + + '"' + res.set('Etag', etag(path, stat)) + } + return res.set('Content-Type', ContentTypeMapper.map(path)) + } +}) -app.get("/project/:project_id/user/:user_id/build/:build_id/output/*", function(req, res, next) { - // for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) - req.url = `/${req.params.project_id}-${req.params.user_id}/` + OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`); - return staticServer(req, res, next); -}); +app.get('/project/:project_id/user/:user_id/build/:build_id/output/*', function( + req, + res, + next +) { + // for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) + req.url = + `/${req.params.project_id}-${req.params.user_id}/` + + OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`) + return staticServer(req, res, next) +}) -app.get("/project/:project_id/build/:build_id/output/*", function(req, res, next) { - // for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) - req.url = `/${req.params.project_id}/` + OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`); - return staticServer(req, res, next); -}); +app.get('/project/:project_id/build/:build_id/output/*', function( + req, + res, + next +) { + // for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) + req.url = + `/${req.params.project_id}/` + + OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`) + return staticServer(req, res, next) +}) -app.get("/project/:project_id/user/:user_id/output/*", function(req, res, next) { - // for specific user get the path to the top level file - req.url = `/${req.params.project_id}-${req.params.user_id}/${req.params[0]}`; - return staticServer(req, res, next); -}); +app.get('/project/:project_id/user/:user_id/output/*', function( + req, + res, + next +) { + // for specific user get the path to the top level file + req.url = `/${req.params.project_id}-${req.params.user_id}/${req.params[0]}` + return staticServer(req, res, next) +}) -app.get("/project/:project_id/output/*", function(req, res, next) { - if (((req.query != null ? req.query.build : undefined) != null) && req.query.build.match(OutputCacheManager.BUILD_REGEX)) { - // for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) - req.url = `/${req.params.project_id}/` + OutputCacheManager.path(req.query.build, `/${req.params[0]}`); - } else { - req.url = `/${req.params.project_id}/${req.params[0]}`; - } - return staticServer(req, res, next); -}); +app.get('/project/:project_id/output/*', function(req, res, next) { + if ( + (req.query != null ? req.query.build : undefined) != null && + req.query.build.match(OutputCacheManager.BUILD_REGEX) + ) { + // for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) + req.url = + `/${req.params.project_id}/` + + OutputCacheManager.path(req.query.build, `/${req.params[0]}`) + } else { + req.url = `/${req.params.project_id}/${req.params[0]}` + } + return staticServer(req, res, next) +}) -app.get("/oops", function(req, res, next) { - logger.error({err: "hello"}, "test error"); - return res.send("error\n"); -}); +app.get('/oops', function(req, res, next) { + logger.error({ err: 'hello' }, 'test error') + return res.send('error\n') +}) - -app.get("/status", (req, res, next) => res.send("CLSI is alive\n")); +app.get('/status', (req, res, next) => res.send('CLSI is alive\n')) const resCacher = { - contentType(setContentType){ - this.setContentType = setContentType; - }, - send(code, body){ - this.code = code; - this.body = body; - }, + contentType(setContentType) { + this.setContentType = setContentType + }, + send(code, body) { + this.code = code + this.body = body + }, - //default the server to be down - code:500, - body:{}, - setContentType:"application/json" -}; + // default the server to be down + code: 500, + body: {}, + setContentType: 'application/json' +} if (Settings.smokeTest) { - let runSmokeTest; - (runSmokeTest = function() { - logger.log("running smoke tests"); - smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))({}, resCacher); - return setTimeout(runSmokeTest, 30 * 1000); - })(); + let runSmokeTest + ;(runSmokeTest = function() { + logger.log('running smoke tests') + smokeTest.run(require.resolve(__dirname + '/test/smoke/js/SmokeTests.js'))( + {}, + resCacher + ) + return setTimeout(runSmokeTest, 30 * 1000) + })() } -app.get("/health_check", function(req, res){ - res.contentType(resCacher != null ? resCacher.setContentType : undefined); - return res.status(resCacher != null ? resCacher.code : undefined).send(resCacher != null ? resCacher.body : undefined); -}); +app.get('/health_check', function(req, res) { + res.contentType(resCacher != null ? resCacher.setContentType : undefined) + return res + .status(resCacher != null ? resCacher.code : undefined) + .send(resCacher != null ? resCacher.body : undefined) +}) -app.get("/smoke_test_force", (req, res)=> smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))(req, res)); +app.get('/smoke_test_force', (req, res) => + smokeTest.run(require.resolve(__dirname + '/test/smoke/js/SmokeTests.js'))( + req, + res + ) +) -const profiler = require("v8-profiler-node8"); -app.get("/profile", function(req, res) { - const time = parseInt(req.query.time || "1000"); - profiler.startProfiling("test"); - return setTimeout(function() { - const profile = profiler.stopProfiling("test"); - return res.json(profile); - } - , time); -}); +const profiler = require('v8-profiler-node8') +app.get('/profile', function(req, res) { + const time = parseInt(req.query.time || '1000') + profiler.startProfiling('test') + return setTimeout(function() { + const profile = profiler.stopProfiling('test') + return res.json(profile) + }, time) +}) -app.get("/heapdump", (req, res)=> - require('heapdump').writeSnapshot(`/tmp/${Date.now()}.clsi.heapsnapshot`, (err, filename)=> res.send(filename)) -); +app.get('/heapdump', (req, res) => + require('heapdump').writeSnapshot( + `/tmp/${Date.now()}.clsi.heapsnapshot`, + (err, filename) => res.send(filename) + ) +) app.use(function(error, req, res, next) { - if (error instanceof Errors.NotFoundError) { - logger.warn({err: error, url: req.url}, "not found error"); - return res.sendStatus(404); - } else { - logger.error({err: error, url: req.url}, "server error"); - return res.sendStatus((error != null ? error.statusCode : undefined) || 500); - } -}); + if (error instanceof Errors.NotFoundError) { + logger.warn({ err: error, url: req.url }, 'not found error') + return res.sendStatus(404) + } else { + logger.error({ err: error, url: req.url }, 'server error') + return res.sendStatus((error != null ? error.statusCode : undefined) || 500) + } +}) -const net = require("net"); -const os = require("os"); - -let STATE = "up"; +const net = require('net') +const os = require('os') +let STATE = 'up' const loadTcpServer = net.createServer(function(socket) { - socket.on("error", function(err){ - if (err.code === "ECONNRESET") { - // this always comes up, we don't know why - return; - } - logger.err({err}, "error with socket on load check"); - return socket.destroy(); - }); - - if ((STATE === "up") && Settings.internal.load_balancer_agent.report_load) { - let availableWorkingCpus; - const currentLoad = os.loadavg()[0]; + socket.on('error', function(err) { + if (err.code === 'ECONNRESET') { + // this always comes up, we don't know why + return + } + logger.err({ err }, 'error with socket on load check') + return socket.destroy() + }) - // staging clis's have 1 cpu core only - if (os.cpus().length === 1) { - availableWorkingCpus = 1; - } else { - availableWorkingCpus = os.cpus().length - 1; - } + if (STATE === 'up' && Settings.internal.load_balancer_agent.report_load) { + let availableWorkingCpus + const currentLoad = os.loadavg()[0] - const freeLoad = availableWorkingCpus - currentLoad; - let freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100); - if (freeLoadPercentage <= 0) { - freeLoadPercentage = 1; // when its 0 the server is set to drain and will move projects to different servers - } - socket.write(`up, ${freeLoadPercentage}%\n`, "ASCII"); - return socket.end(); - } else { - socket.write(`${STATE}\n`, "ASCII"); - return socket.end(); - } -}); + // staging clis's have 1 cpu core only + if (os.cpus().length === 1) { + availableWorkingCpus = 1 + } else { + availableWorkingCpus = os.cpus().length - 1 + } -const loadHttpServer = express(); + const freeLoad = availableWorkingCpus - currentLoad + let freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100) + if (freeLoadPercentage <= 0) { + freeLoadPercentage = 1 // when its 0 the server is set to drain and will move projects to different servers + } + socket.write(`up, ${freeLoadPercentage}%\n`, 'ASCII') + return socket.end() + } else { + socket.write(`${STATE}\n`, 'ASCII') + return socket.end() + } +}) -loadHttpServer.post("/state/up", function(req, res, next) { - STATE = "up"; - logger.info("getting message to set server to down"); - return res.sendStatus(204); -}); +const loadHttpServer = express() -loadHttpServer.post("/state/down", function(req, res, next) { - STATE = "down"; - logger.info("getting message to set server to down"); - return res.sendStatus(204); -}); +loadHttpServer.post('/state/up', function(req, res, next) { + STATE = 'up' + logger.info('getting message to set server to down') + return res.sendStatus(204) +}) -loadHttpServer.post("/state/maint", function(req, res, next) { - STATE = "maint"; - logger.info("getting message to set server to maint"); - return res.sendStatus(204); -}); - +loadHttpServer.post('/state/down', function(req, res, next) { + STATE = 'down' + logger.info('getting message to set server to down') + return res.sendStatus(204) +}) -const port = (__guard__(Settings.internal != null ? Settings.internal.clsi : undefined, x => x.port) || 3013); -const host = (__guard__(Settings.internal != null ? Settings.internal.clsi : undefined, x1 => x1.host) || "localhost"); +loadHttpServer.post('/state/maint', function(req, res, next) { + STATE = 'maint' + logger.info('getting message to set server to maint') + return res.sendStatus(204) +}) -const load_tcp_port = Settings.internal.load_balancer_agent.load_port; -const load_http_port = Settings.internal.load_balancer_agent.local_port; +const port = + __guard__( + Settings.internal != null ? Settings.internal.clsi : undefined, + x => x.port + ) || 3013 +const host = + __guard__( + Settings.internal != null ? Settings.internal.clsi : undefined, + x1 => x1.host + ) || 'localhost' -if (!module.parent) { // Called directly - app.listen(port, host, error => logger.info(`CLSI starting up, listening on ${host}:${port}`)); +const load_tcp_port = Settings.internal.load_balancer_agent.load_port +const load_http_port = Settings.internal.load_balancer_agent.local_port - loadTcpServer.listen(load_tcp_port, host, function(error) { - if (error != null) { throw error; } - return logger.info(`Load tcp agent listening on load port ${load_tcp_port}`); - }); +if (!module.parent) { + // Called directly + app.listen(port, host, error => + logger.info(`CLSI starting up, listening on ${host}:${port}`) + ) - loadHttpServer.listen(load_http_port, host, function(error) { - if (error != null) { throw error; } - return logger.info(`Load http agent listening on load port ${load_http_port}`); - }); + loadTcpServer.listen(load_tcp_port, host, function(error) { + if (error != null) { + throw error + } + return logger.info(`Load tcp agent listening on load port ${load_tcp_port}`) + }) + + loadHttpServer.listen(load_http_port, host, function(error) { + if (error != null) { + throw error + } + return logger.info( + `Load http agent listening on load port ${load_http_port}` + ) + }) } -module.exports = app; - -setInterval(() => ProjectPersistenceManager.clearExpiredProjects() -, (tenMinutes = 10 * 60 * 1000)); +module.exports = app +setInterval( + () => ProjectPersistenceManager.clearExpiredProjects(), + (tenMinutes = 10 * 60 * 1000) +) function __guard__(value, transform) { - return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; -} \ No newline at end of file + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/config/settings.defaults.js b/config/settings.defaults.js index 5d0bb75..b0fd0cb 100644 --- a/config/settings.defaults.js +++ b/config/settings.defaults.js @@ -1,89 +1,100 @@ -const Path = require("path"); +const Path = require('path') module.exports = { - // Options are passed to Sequelize. - // See http://sequelizejs.com/documentation#usage-options for details - mysql: { - clsi: { - database: "clsi", - username: "clsi", - dialect: "sqlite", - storage: process.env["SQLITE_PATH"] || Path.resolve(__dirname + "/../db.sqlite"), - pool: { - max: 1, - min: 1 - }, - retry: { - max: 10 - } - } - }, + // Options are passed to Sequelize. + // See http://sequelizejs.com/documentation#usage-options for details + mysql: { + clsi: { + database: 'clsi', + username: 'clsi', + dialect: 'sqlite', + storage: + process.env.SQLITE_PATH || Path.resolve(__dirname + '/../db.sqlite'), + pool: { + max: 1, + min: 1 + }, + retry: { + max: 10 + } + } + }, - compileSizeLimit: process.env["COMPILE_SIZE_LIMIT"] || "7mb", - - path: { - compilesDir: Path.resolve(__dirname + "/../compiles"), - clsiCacheDir: Path.resolve(__dirname + "/../cache"), - synctexBaseDir(project_id) { return Path.join(this.compilesDir, project_id); } - }, + compileSizeLimit: process.env.COMPILE_SIZE_LIMIT || '7mb', - internal: { - clsi: { - port: 3013, - host: process.env["LISTEN_ADDRESS"] || "localhost" - }, - - load_balancer_agent: { - report_load:true, - load_port: 3048, - local_port: 3049 - } - }, - apis: { - clsi: { - url: `http://${process.env['CLSI_HOST'] || 'localhost'}:3013` - } - }, + path: { + compilesDir: Path.resolve(__dirname + '/../compiles'), + clsiCacheDir: Path.resolve(__dirname + '/../cache'), + synctexBaseDir(project_id) { + return Path.join(this.compilesDir, project_id) + } + }, - - smokeTest: process.env["SMOKE_TEST"] || false, - project_cache_length_ms: 1000 * 60 * 60 * 24, - parallelFileDownloads: process.env["FILESTORE_PARALLEL_FILE_DOWNLOADS"] || 1, - parallelSqlQueryLimit: process.env["FILESTORE_PARALLEL_SQL_QUERY_LIMIT"] || 1, - filestoreDomainOveride: process.env["FILESTORE_DOMAIN_OVERRIDE"], - texliveImageNameOveride: process.env["TEX_LIVE_IMAGE_NAME_OVERRIDE"], - sentry: { - dsn: process.env['SENTRY_DSN'] - } -}; + internal: { + clsi: { + port: 3013, + host: process.env.LISTEN_ADDRESS || 'localhost' + }, + load_balancer_agent: { + report_load: true, + load_port: 3048, + local_port: 3049 + } + }, + apis: { + clsi: { + url: `http://${process.env.CLSI_HOST || 'localhost'}:3013` + } + }, -if (process.env["DOCKER_RUNNER"]) { - let seccomp_profile_path; - module.exports.clsi = { - dockerRunner: process.env["DOCKER_RUNNER"] === "true", - docker: { - image: process.env["TEXLIVE_IMAGE"] || "quay.io/sharelatex/texlive-full:2017.1", - env: { - HOME: "/tmp" - }, - socketPath: "/var/run/docker.sock", - user: process.env["TEXLIVE_IMAGE_USER"] || "tex" - }, - expireProjectAfterIdleMs: 24 * 60 * 60 * 1000, - checkProjectsIntervalMs: 10 * 60 * 1000 - }; - - try { - seccomp_profile_path = Path.resolve(__dirname + "/../seccomp/clsi-profile.json"); - module.exports.clsi.docker.seccomp_profile = JSON.stringify(JSON.parse(require("fs").readFileSync(seccomp_profile_path))); - } catch (error) { - console.log(error, `could not load seccom profile from ${seccomp_profile_path}`); - } - - module.exports.path.synctexBaseDir = () => "/compile"; - - module.exports.path.sandboxedCompilesHostDir = process.env["COMPILES_HOST_DIR"]; - - module.exports.path.synctexBinHostPath = process.env["SYNCTEX_BIN_HOST_PATH"]; + smokeTest: process.env.SMOKE_TEST || false, + project_cache_length_ms: 1000 * 60 * 60 * 24, + parallelFileDownloads: process.env.FILESTORE_PARALLEL_FILE_DOWNLOADS || 1, + parallelSqlQueryLimit: process.env.FILESTORE_PARALLEL_SQL_QUERY_LIMIT || 1, + filestoreDomainOveride: process.env.FILESTORE_DOMAIN_OVERRIDE, + texliveImageNameOveride: process.env.TEX_LIVE_IMAGE_NAME_OVERRIDE, + sentry: { + dsn: process.env.SENTRY_DSN + } +} + +if (process.env.DOCKER_RUNNER) { + let seccomp_profile_path + module.exports.clsi = { + dockerRunner: process.env.DOCKER_RUNNER === 'true', + docker: { + image: + process.env.TEXLIVE_IMAGE || + 'quay.io/sharelatex/texlive-full:2017.1', + env: { + HOME: '/tmp' + }, + socketPath: '/var/run/docker.sock', + user: process.env.TEXLIVE_IMAGE_USER || 'tex' + }, + expireProjectAfterIdleMs: 24 * 60 * 60 * 1000, + checkProjectsIntervalMs: 10 * 60 * 1000 + } + + try { + seccomp_profile_path = Path.resolve( + __dirname + '/../seccomp/clsi-profile.json' + ) + module.exports.clsi.docker.seccomp_profile = JSON.stringify( + JSON.parse(require('fs').readFileSync(seccomp_profile_path)) + ) + } catch (error) { + console.log( + error, + `could not load seccom profile from ${seccomp_profile_path}` + ) + } + + module.exports.path.synctexBaseDir = () => '/compile' + + module.exports.path.sandboxedCompilesHostDir = + process.env.COMPILES_HOST_DIR + + module.exports.path.synctexBinHostPath = process.env.SYNCTEX_BIN_HOST_PATH } From 7e737bba4f9c2c4bca1aebb14d238dfeac06c1db Mon Sep 17 00:00:00 2001 From: mserranom Date: Wed, 19 Feb 2020 12:38:54 +0100 Subject: [PATCH 23/24] fixed test paths --- test/unit/js/DockerLockManagerTests.js | 2 +- test/unit/js/DockerRunnerTests.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test/unit/js/DockerLockManagerTests.js b/test/unit/js/DockerLockManagerTests.js index 9dcf9dc..177d7a2 100644 --- a/test/unit/js/DockerLockManagerTests.js +++ b/test/unit/js/DockerLockManagerTests.js @@ -15,7 +15,7 @@ require('chai').should() require('coffee-script') const modulePath = require('path').join( __dirname, - '../../../app/coffee/DockerLockManager' + '../../../app/js/DockerLockManager' ) describe('LockManager', function() { diff --git a/test/unit/js/DockerRunnerTests.js b/test/unit/js/DockerRunnerTests.js index e43a044..597c5d3 100644 --- a/test/unit/js/DockerRunnerTests.js +++ b/test/unit/js/DockerRunnerTests.js @@ -20,7 +20,7 @@ const { expect } = require('chai') require('coffee-script') const modulePath = require('path').join( __dirname, - '../../../app/coffee/DockerRunner' + '../../../app/js/DockerRunner' ) const Path = require('path') From ee12573b0601b031bf5bac9e7cdb6d6b717a5f99 Mon Sep 17 00:00:00 2001 From: mserranom Date: Thu, 20 Feb 2020 17:24:28 +0100 Subject: [PATCH 24/24] added container monitor cleanup to fix hanging tests --- app/js/DockerRunner.js | 31 +++++++++++++++++++------- npm-shrinkwrap.json | 6 ----- package.json | 1 - test/unit/js/DockerLockManagerTests.js | 1 - test/unit/js/DockerRunnerTests.js | 5 ++++- 5 files changed, 27 insertions(+), 17 deletions(-) diff --git a/app/js/DockerRunner.js b/app/js/DockerRunner.js index 5ac234b..393ce3d 100644 --- a/app/js/DockerRunner.js +++ b/app/js/DockerRunner.js @@ -35,6 +35,9 @@ const usingSiblingContainers = () => x => x.sandboxedCompilesHostDir ) != null +let containerMonitorTimeout +let containerMonitorInterval + module.exports = DockerRunner = { ERR_NOT_DIRECTORY: new Error('not a directory'), ERR_TERMINATED: new Error('terminated'), @@ -646,17 +649,29 @@ module.exports = DockerRunner = { { maxAge: DockerRunner.MAX_CONTAINER_AGE }, 'starting container expiry' ) + + // guarantee only one monitor is running + DockerRunner.stopContainerMonitor() + // randomise the start time const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000) - return setTimeout( - () => - setInterval( - () => DockerRunner.destroyOldContainers(), - (oneHour = 60 * 60 * 1000) - ), + containerMonitorTimeout = setTimeout(() => { + containerMonitorInterval = setInterval( + () => DockerRunner.destroyOldContainers(), + (oneHour = 60 * 60 * 1000) + ) + }, randomDelay) + }, - randomDelay - ) + stopContainerMonitor() { + if (containerMonitorTimeout) { + clearTimeout(containerMonitorTimeout) + containerMonitorTimeout = undefined + } + if (containerMonitorInterval) { + clearInterval(containerMonitorTimeout) + containerMonitorTimeout = undefined + } } } diff --git a/npm-shrinkwrap.json b/npm-shrinkwrap.json index 40fed3c..05e00cd 100644 --- a/npm-shrinkwrap.json +++ b/npm-shrinkwrap.json @@ -1536,12 +1536,6 @@ "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.6.0.tgz", "integrity": "sha1-gIs5bhEPU9AhoZpO8fZb4OjjX6M=" }, - "coffeescript": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-1.6.0.tgz", - "integrity": "sha1-bdTeHrYveE2MjYCWdVLLpUf/2d4=", - "dev": true - }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", diff --git a/package.json b/package.json index c38621a..87403ca 100644 --- a/package.json +++ b/package.json @@ -44,7 +44,6 @@ "babel-eslint": "^10.0.3", "bunyan": "^0.22.1", "chai": "~1.8.1", - "coffeescript": "1.6.0", "eslint": "^6.6.0", "eslint-config-prettier": "^6.10.0", "eslint-config-standard": "^14.1.0", diff --git a/test/unit/js/DockerLockManagerTests.js b/test/unit/js/DockerLockManagerTests.js index 177d7a2..bc13c5a 100644 --- a/test/unit/js/DockerLockManagerTests.js +++ b/test/unit/js/DockerLockManagerTests.js @@ -12,7 +12,6 @@ const SandboxedModule = require('sandboxed-module') const sinon = require('sinon') require('chai').should() -require('coffee-script') const modulePath = require('path').join( __dirname, '../../../app/js/DockerLockManager' diff --git a/test/unit/js/DockerRunnerTests.js b/test/unit/js/DockerRunnerTests.js index 597c5d3..d17d906 100644 --- a/test/unit/js/DockerRunnerTests.js +++ b/test/unit/js/DockerRunnerTests.js @@ -17,7 +17,6 @@ const SandboxedModule = require('sandboxed-module') const sinon = require('sinon') require('chai').should() const { expect } = require('chai') -require('coffee-script') const modulePath = require('path').join( __dirname, '../../../app/js/DockerRunner' @@ -89,6 +88,10 @@ describe('DockerRunner', function() { return (this.Settings.clsi.docker.env = { PATH: 'mock-path' }) }) + afterEach(function() { + this.DockerRunner.stopContainerMonitor() + }) + describe('run', function() { beforeEach(function(done) { this.DockerRunner._getContainerOptions = sinon