34 Commits

Author SHA1 Message Date
bb7dbcbf82 add possible rest api clients 2020-10-19 16:42:06 +02:00
41e82efd26 add texlive-full install command 2020-10-19 16:00:36 +02:00
f19eab4c55 update installation 2020-10-19 15:55:04 +02:00
01ad13b5b4 add link to original repo 2020-10-19 15:46:30 +02:00
b7242ba6dd add underscore dependency 2020-10-19 15:46:00 +02:00
Eric Mc Sween
a883084bba Merge pull request #193 from overleaf/dependabot-npm_and_yarn-node-fetch-2.6.1
Bump node-fetch from 2.6.0 to 2.6.1
2020-09-16 12:48:21 -04:00
Eric Mc Sween
0de0cc484d Merge pull request #191 from overleaf/dependabot-npm_and_yarn-bl-4.0.3
Bump bl from 4.0.1 to 4.0.3
2020-09-16 12:48:15 -04:00
Eric Mc Sween
1625126924 Merge pull request #190 from overleaf/dependabot-npm_and_yarn-lodash-4.17.20
Bump lodash from 4.17.15 to 4.17.20
2020-09-16 12:48:07 -04:00
Eric Mc Sween
ef90db025c Merge pull request #194 from overleaf/em-bump-node
Bump Node version to 10.22.1
2020-09-16 12:47:59 -04:00
Eric Mc Sween
1650a61831 Bump Node version to 10.22.1 2020-09-16 12:24:42 -04:00
Eric Mc Sween
cb96c04c21 Merge pull request #192 from overleaf/em-home-volume
Mount /home/tex in an anonymous volume
2020-09-14 09:50:17 -04:00
dependabot[bot]
610b04a738 Bump node-fetch from 2.6.0 to 2.6.1
Bumps [node-fetch](https://github.com/bitinn/node-fetch) from 2.6.0 to 2.6.1.
- [Release notes](https://github.com/bitinn/node-fetch/releases)
- [Changelog](https://github.com/node-fetch/node-fetch/blob/master/docs/CHANGELOG.md)
- [Commits](https://github.com/bitinn/node-fetch/compare/v2.6.0...v2.6.1)

Signed-off-by: dependabot[bot] <support@github.com>
2020-09-12 17:04:49 +00:00
Eric Mc Sween
0a05fa7abc Test anonymous volumes are removed with containers 2020-09-04 11:34:08 -04:00
Eric Mc Sween
c769ae63a2 Mount /home/tex in an anonymous volume
When we mount the container's root filesystem as read-only, mount
an anonymous volume in /home/tex so that it's writable. Our TeX Live
images have cached content in /home/tex. This content will automatically
get copied by Docker into this anonymous volume.
2020-09-04 08:49:56 -04:00
Eric Mc Sween
f4fb979c63 Decaf cleanup: normalize functions
Use function keyword for declarations and arrow functions for callbacks.
2020-09-03 15:58:16 -04:00
Eric Mc Sween
c9590c8cfa Use _.once() instead of ad hoc implementation 2020-09-03 15:52:09 -04:00
Eric Mc Sween
d9b25cdb18 Fix container monitor cleanup function
The intent here is clearly to clear both the timeout and the interval.
2020-09-03 15:50:45 -04:00
Eric Mc Sween
faad50ff4c Decaf cleanup: simplify variable declarations 2020-09-03 15:50:12 -04:00
Eric Mc Sween
c8371a8ea7 Do not instantiate errors at module load time
This prevents the right stack trace from being captured.
2020-09-03 15:34:19 -04:00
Eric Mc Sween
1708e29faf Decaf cleanup: unused vars 2020-09-03 15:30:07 -04:00
Eric Mc Sween
5b92439356 Decaf cleanup: convert async function to sync
The examineOldContainer() function doesn't need to use callbacks since
it only does synchronous work.
2020-09-03 15:13:04 -04:00
Eric Mc Sween
d86a856997 Decaf cleanup: camel-case variables 2020-09-03 15:13:04 -04:00
Eric Mc Sween
2a31f8c8d7 Decaf cleanup: simplify null checks 2020-09-03 15:13:04 -04:00
Eric Mc Sween
f2734c86ca Decaf cleanup: remove default callbacks 2020-09-03 15:13:04 -04:00
Eric Mc Sween
73cf107029 Decaf cleanup: remove IIFEs 2020-09-03 15:13:04 -04:00
Eric Mc Sween
92fbb9581e Decaf cleanup: remove __guard__ 2020-09-02 17:06:35 -04:00
Eric Mc Sween
9ab5738ae3 Decaf cleanup: remove unnecessary returns 2020-09-02 16:58:41 -04:00
Eric Mc Sween
ca61354cfe Decaf cleanup: remove unnecessary Array.from() 2020-09-02 16:45:49 -04:00
dependabot[bot]
db1dbdb42f Bump bl from 4.0.1 to 4.0.3
Bumps [bl](https://github.com/rvagg/bl) from 4.0.1 to 4.0.3.
- [Release notes](https://github.com/rvagg/bl/releases)
- [Commits](https://github.com/rvagg/bl/compare/v4.0.1...v4.0.3)

Signed-off-by: dependabot[bot] <support@github.com>
2020-09-02 16:50:44 +00:00
dependabot[bot]
004550f6e6 Bump lodash from 4.17.15 to 4.17.20
Bumps [lodash](https://github.com/lodash/lodash) from 4.17.15 to 4.17.20.
- [Release notes](https://github.com/lodash/lodash/releases)
- [Commits](https://github.com/lodash/lodash/compare/4.17.15...4.17.20)

Signed-off-by: dependabot[bot] <support@github.com>
2020-08-14 05:11:31 +00:00
Christopher Hoskin
205f61ae47 Merge pull request #188 from overleaf/jpa-bump-dev-env-3-3-2-testing
[misc] bump the dev-env to 3.3.2
2020-08-13 11:40:36 +01:00
Christopher Hoskin
a16bb95c4e Merge pull request #189 from overleaf/csh-issue-1123-bump-logger-220
[misc] bump logger-sharelatex to version 2.2.0
2020-08-13 11:28:23 +01:00
Christopher Hoskin
57dfd9c01b [misc] bump logger-sharelatex to version 2.2.0 2020-08-12 15:11:21 +01:00
Jakob Ackermann
f4561c2fe2 [misc] bump the dev-env to 3.3.2 2020-08-10 17:01:11 +01:00
68 changed files with 2024 additions and 1932 deletions

View File

@@ -8,7 +8,7 @@
"prettier/standard" "prettier/standard"
], ],
"parserOptions": { "parserOptions": {
"ecmaVersion": 2017 "ecmaVersion": 2018
}, },
"plugins": [ "plugins": [
"mocha", "mocha",

23
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,23 @@
version: 2
updates:
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "daily"
pull-request-branch-name:
# Separate sections of the branch name with a hyphen
# Docker images use the branch name and do not support slashes in tags
# https://github.com/overleaf/google-ops/issues/822
# https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#pull-request-branch-nameseparator
separator: "-"
# Block informal upgrades -- security upgrades use a separate queue.
# https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#open-pull-requests-limit
open-pull-requests-limit: 0
# currently assign team-magma to all dependabot PRs - this may change in
# future if we reorganise teams
labels:
- "dependencies"
- "Team-Magma"

3
.gitignore vendored
View File

@@ -11,3 +11,6 @@ db.sqlite-wal
db.sqlite-shm db.sqlite-shm
config/* config/*
npm-debug.log npm-debug.log
# managed by dev-environment$ bin/update_build_scripts
.npmrc

2
.nvmrc
View File

@@ -1 +1 @@
10.21.0 10.22.1

View File

@@ -2,7 +2,7 @@
# Instead run bin/update_build_scripts from # Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment # https://github.com/sharelatex/sharelatex-dev-environment
FROM node:10.21.0 as base FROM node:10.22.1 as base
WORKDIR /app WORKDIR /app
COPY install_deps.sh /app COPY install_deps.sh /app
@@ -15,12 +15,10 @@ FROM base as app
#wildcard as some files may not be in all repos #wildcard as some files may not be in all repos
COPY package*.json npm-shrink*.json /app/ COPY package*.json npm-shrink*.json /app/
RUN npm install --quiet RUN npm ci --quiet
COPY . /app COPY . /app
FROM base FROM base
COPY --from=app /app /app COPY --from=app /app /app

131
Jenkinsfile vendored
View File

@@ -1,131 +0,0 @@
String cron_string = BRANCH_NAME == "master" ? "@daily" : ""
pipeline {
agent any
environment {
GIT_PROJECT = "clsi"
JENKINS_WORKFLOW = "clsi-sharelatex"
TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline"
GIT_API_URL = "https://api.github.com/repos/overleaf/${GIT_PROJECT}/statuses/$GIT_COMMIT"
}
triggers {
pollSCM('* * * * *')
cron(cron_string)
}
stages {
stage('Install') {
steps {
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
sh "curl $GIT_API_URL \
--data '{ \
\"state\" : \"pending\", \
\"target_url\": \"$TARGET_URL\", \
\"description\": \"Your build is underway\", \
\"context\": \"ci/jenkins\" }' \
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
}
}
}
stage('Build') {
steps {
sh 'make build'
}
}
stage('Linting') {
steps {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make format'
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make lint'
}
}
stage('Unit Tests') {
steps {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit'
}
}
stage('Acceptance Tests') {
steps {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance'
}
}
stage('Package and docker push') {
steps {
sh 'echo ${BUILD_NUMBER} > build_number.txt'
sh 'touch build.tar.gz' // Avoid tar warning about files changing during read
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make tar'
withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) {
sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}'
}
sh 'DOCKER_REPO=gcr.io/overleaf-ops make publish'
sh 'docker logout https://gcr.io/overleaf-ops'
}
}
stage('Publish to s3') {
steps {
sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt'
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz")
}
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
// The deployment process uses this file to figure out the latest build
s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest")
}
}
}
}
post {
always {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean'
sh 'make clean'
}
success {
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
sh "curl $GIT_API_URL \
--data '{ \
\"state\" : \"success\", \
\"target_url\": \"$TARGET_URL\", \
\"description\": \"Your build succeeded!\", \
\"context\": \"ci/jenkins\" }' \
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
}
}
failure {
mail(from: "${EMAIL_ALERT_FROM}",
to: "${EMAIL_ALERT_TO}",
subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}",
body: "Build: ${BUILD_URL}")
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
sh "curl $GIT_API_URL \
--data '{ \
\"state\" : \"failure\", \
\"target_url\": \"$TARGET_URL\", \
\"description\": \"Your build failed\", \
\"context\": \"ci/jenkins\" }' \
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
}
}
}
// The options directive is for configuration that applies to the whole job.
options {
// we'd like to make sure remove old builds, so we don't fill up our storage!
buildDiscarder(logRotator(numToKeepStr:'50'))
// And we'd really like to be sure that this build doesn't hang forever, so let's time it out after:
timeout(time: 30, unit: 'MINUTES')
}
}

View File

@@ -25,13 +25,13 @@ clean:
docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
format: format:
$(DOCKER_COMPOSE) run --rm test_unit npm run format $(DOCKER_COMPOSE) run --rm test_unit npm run --silent format
format_fix: format_fix:
$(DOCKER_COMPOSE) run --rm test_unit npm run format:fix $(DOCKER_COMPOSE) run --rm test_unit npm run --silent format:fix
lint: lint:
$(DOCKER_COMPOSE) run --rm test_unit npm run lint $(DOCKER_COMPOSE) run --rm test_unit npm run --silent lint
test: format lint test_unit test_acceptance test: format lint test_unit test_acceptance

View File

@@ -1,6 +1,8 @@
overleaf/clsi overleaf/clsi
=============== ===============
**Note:** Original repo here: https://github.com/overleaf/clsi
A web api for compiling LaTeX documents in the cloud A web api for compiling LaTeX documents in the cloud
The Common LaTeX Service Interface (CLSI) provides a RESTful interface to traditional LaTeX tools (or, more generally, any command line tool for composing marked-up documents into a display format such as PDF or HTML). The CLSI listens on the following ports by default: The Common LaTeX Service Interface (CLSI) provides a RESTful interface to traditional LaTeX tools (or, more generally, any command line tool for composing marked-up documents into a display format such as PDF or HTML). The CLSI listens on the following ports by default:
@@ -34,20 +36,21 @@ The CLSI can be installed and set up as part of the entire [Overleaf stack](http
$ git clone git@github.com:overleaf/clsi.git $ git clone git@github.com:overleaf/clsi.git
Then install the require npm modules: Then install the require npm modules and run:
$ npm install $ npm install
$ npm start
Then compile the coffee script source files:
$ grunt install
Finally, (after configuring your local database - see the Config section), run the CLSI service:
$ grunt run
The CLSI should then be running at http://localhost:3013. The CLSI should then be running at http://localhost:3013.
**Note:** to install texlive-full on ubuntu:
$ sudo apt install texlive-full
Possible REST API clients to test are:
* Postman
* Insomnia
Config Config
------ ------

19
app.js
View File

@@ -134,17 +134,16 @@ const staticServer = ForbidSymlinks(express.static, Settings.path.compilesDir, {
} }
}) })
app.get('/project/:project_id/user/:user_id/build/:build_id/output/*', function( app.get(
req, '/project/:project_id/user/:user_id/build/:build_id/output/*',
res, function (req, res, next) {
next
) {
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) // for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url = req.url =
`/${req.params.project_id}-${req.params.user_id}/` + `/${req.params.project_id}-${req.params.user_id}/` +
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`) OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
return staticServer(req, res, next) return staticServer(req, res, next)
}) }
)
app.get('/project/:project_id/build/:build_id/output/*', function ( app.get('/project/:project_id/build/:build_id/output/*', function (
req, req,
@@ -208,7 +207,7 @@ if (Settings.processLifespanLimitMs) {
function runSmokeTest() { function runSmokeTest() {
if (Settings.processTooOld) return if (Settings.processTooOld) return
logger.log('running smoke tests') logger.log('running smoke tests')
smokeTest.triggerRun(err => { smokeTest.triggerRun((err) => {
if (err) logger.error({ err }, 'smoke tests failed') if (err) logger.error({ err }, 'smoke tests failed')
setTimeout(runSmokeTest, 30 * 1000) setTimeout(runSmokeTest, 30 * 1000)
}) })
@@ -301,12 +300,12 @@ loadHttpServer.post('/state/maint', function(req, res, next) {
const port = const port =
__guard__( __guard__(
Settings.internal != null ? Settings.internal.clsi : undefined, Settings.internal != null ? Settings.internal.clsi : undefined,
x => x.port (x) => x.port
) || 3013 ) || 3013
const host = const host =
__guard__( __guard__(
Settings.internal != null ? Settings.internal.clsi : undefined, Settings.internal != null ? Settings.internal.clsi : undefined,
x1 => x1.host (x1) => x1.host
) || 'localhost' ) || 'localhost'
const loadTcpPort = Settings.internal.load_balancer_agent.load_port const loadTcpPort = Settings.internal.load_balancer_agent.load_port
@@ -314,7 +313,7 @@ const loadHttpPort = Settings.internal.load_balancer_agent.local_port
if (!module.parent) { if (!module.parent) {
// Called directly // Called directly
app.listen(port, host, error => { app.listen(port, host, (error) => {
if (error) { if (error) {
logger.fatal({ error }, `Error starting CLSI on ${host}:${port}`) logger.fatal({ error }, `Error starting CLSI on ${host}:${port}`)
} else { } else {

View File

@@ -116,7 +116,7 @@ module.exports = CompileController = {
compile: { compile: {
status, status,
error: (error != null ? error.message : undefined) || error, error: (error != null ? error.message : undefined) || error,
outputFiles: outputFiles.map(file => ({ outputFiles: outputFiles.map((file) => ({
url: url:
`${Settings.apis.clsi.url}/project/${request.project_id}` + `${Settings.apis.clsi.url}/project/${request.project_id}` +
(request.user_id != null (request.user_id != null
@@ -197,17 +197,21 @@ module.exports = CompileController = {
const v = parseFloat(req.query.v) const v = parseFloat(req.query.v)
const { project_id } = req.params const { project_id } = req.params
const { user_id } = req.params const { user_id } = req.params
return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function( return CompileManager.syncFromPdf(
error, project_id,
codePositions user_id,
) { page,
h,
v,
function (error, codePositions) {
if (error != null) { if (error != null) {
return next(error) return next(error)
} }
return res.json({ return res.json({
code: codePositions code: codePositions
}) })
}) }
)
}, },
wordcount(req, res, next) { wordcount(req, res, next) {

View File

@@ -61,7 +61,7 @@ module.exports = CompileManager = {
} }
return LockManager.runWithLock( return LockManager.runWithLock(
lockFile, lockFile,
releaseLock => CompileManager.doCompile(request, releaseLock), (releaseLock) => CompileManager.doCompile(request, releaseLock),
callback callback
) )
}) })
@@ -120,7 +120,7 @@ module.exports = CompileManager = {
} }
} }
const createTikzFileIfRequired = callback => const createTikzFileIfRequired = (callback) =>
TikzManager.checkMainFile( TikzManager.checkMainFile(
compileDir, compileDir,
request.rootResourcePath, request.rootResourcePath,
@@ -177,9 +177,9 @@ module.exports = CompileManager = {
request.imageName != null request.imageName != null
? request.imageName.match(/:(.*)/) ? request.imageName.match(/:(.*)/)
: undefined, : undefined,
x1 => x1[1] (x1) => x1[1]
), ),
x => x.replace(/\./g, '-') (x) => x.replace(/\./g, '-')
) || 'default' ) || 'default'
if (!request.project_id.match(/^[0-9a-f]{24}$/)) { if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
tag = 'other' tag = 'other'
@@ -206,9 +206,7 @@ module.exports = CompileManager = {
// request was for validation only // request was for validation only
let metric_key, metric_value let metric_key, metric_value
if (request.check === 'validate') { if (request.check === 'validate') {
const result = (error != null const result = (error != null ? error.code : undefined)
? error.code
: undefined)
? 'fail' ? 'fail'
: 'pass' : 'pass'
error = new Error('validation') error = new Error('validation')
@@ -339,7 +337,7 @@ module.exports = CompileManager = {
proc.on('error', callback) proc.on('error', callback)
let stderr = '' let stderr = ''
proc.stderr.setEncoding('utf8').on('data', chunk => (stderr += chunk)) proc.stderr.setEncoding('utf8').on('data', (chunk) => (stderr += chunk))
return proc.on('close', function (code) { return proc.on('close', function (code) {
if (code === 0) { if (code === 0) {
@@ -360,7 +358,7 @@ module.exports = CompileManager = {
if (err != null) { if (err != null) {
return callback(err) return callback(err)
} }
const allDirs = Array.from(files).map(file => Path.join(root, file)) const allDirs = Array.from(files).map((file) => Path.join(root, file))
return callback(null, allDirs) return callback(null, allDirs)
}) })
}, },
@@ -512,7 +510,7 @@ module.exports = CompileManager = {
const timeout = 60 * 1000 // increased to allow for large projects const timeout = 60 * 1000 // increased to allow for large projects
const compileName = getCompileName(project_id, user_id) const compileName = getCompileName(project_id, user_id)
const compileGroup = 'synctex' const compileGroup = 'synctex'
CompileManager._checkFileExists(directory, 'output.synctex.gz', error => { CompileManager._checkFileExists(directory, 'output.synctex.gz', (error) => {
if (error) { if (error) {
return callback(error) return callback(error)
} }

View File

@@ -1,21 +1,3 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-return-assign,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS205: Consider reworking code to avoid use of IIFEs
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let DockerRunner, oneHour
const Settings = require('settings-sharelatex') const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex') const logger = require('logger-sharelatex')
const Docker = require('dockerode') const Docker = require('dockerode')
@@ -27,25 +9,23 @@ const fs = require('fs')
const Path = require('path') const Path = require('path')
const _ = require('lodash') const _ = require('lodash')
const ONE_HOUR_IN_MS = 60 * 60 * 1000
logger.info('using docker runner') logger.info('using docker runner')
const usingSiblingContainers = () => function usingSiblingContainers() {
__guard__( return (
Settings != null ? Settings.path : undefined, Settings != null &&
x => x.sandboxedCompilesHostDir Settings.path != null &&
) != null Settings.path.sandboxedCompilesHostDir != null
)
}
let containerMonitorTimeout let containerMonitorTimeout
let containerMonitorInterval let containerMonitorInterval
module.exports = DockerRunner = { const DockerRunner = {
ERR_NOT_DIRECTORY: new Error('not a directory'),
ERR_TERMINATED: new Error('terminated'),
ERR_EXITED: new Error('exited'),
ERR_TIMED_OUT: new Error('container timed out'),
run( run(
project_id, projectId,
command, command,
directory, directory,
image, image,
@@ -54,10 +34,6 @@ module.exports = DockerRunner = {
compileGroup, compileGroup,
callback callback
) { ) {
let name
if (callback == null) {
callback = function(error, output) {}
}
if (usingSiblingContainers()) { if (usingSiblingContainers()) {
const _newPath = Settings.path.sandboxedCompilesHostDir const _newPath = Settings.path.sandboxedCompilesHostDir
logger.log( logger.log(
@@ -74,16 +50,13 @@ module.exports = DockerRunner = {
) )
} }
const volumes = {} const volumes = { [directory]: '/compile' }
volumes[directory] = '/compile'
command = Array.from(command).map(arg => command = command.map((arg) =>
__guardMethod__(arg.toString(), 'replace', o => arg.toString().replace('$COMPILE_DIR', '/compile')
o.replace('$COMPILE_DIR', '/compile')
)
) )
if (image == null) { if (image == null) {
;({ image } = Settings.clsi.docker) image = Settings.clsi.docker.image
} }
if ( if (
@@ -107,25 +80,27 @@ module.exports = DockerRunner = {
compileGroup compileGroup
) )
const fingerprint = DockerRunner._fingerprintContainer(options) const fingerprint = DockerRunner._fingerprintContainer(options)
options.name = name = `project-${project_id}-${fingerprint}` const name = `project-${projectId}-${fingerprint}`
options.name = name
// logOptions = _.clone(options) // logOptions = _.clone(options)
// logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging" // logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging"
logger.log({ project_id }, 'running docker container') logger.log({ projectId }, 'running docker container')
DockerRunner._runAndWaitForContainer(options, volumes, timeout, function( DockerRunner._runAndWaitForContainer(
error, options,
output volumes,
) { timeout,
(error, output) => {
if (error && error.statusCode === 500) { if (error && error.statusCode === 500) {
logger.log( logger.log(
{ err: error, project_id }, { err: error, projectId },
'error running container so destroying and retrying' 'error running container so destroying and retrying'
) )
return DockerRunner.destroyContainer(name, null, true, function(error) { DockerRunner.destroyContainer(name, null, true, (error) => {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
return DockerRunner._runAndWaitForContainer( DockerRunner._runAndWaitForContainer(
options, options,
volumes, volumes,
timeout, timeout,
@@ -133,108 +108,93 @@ module.exports = DockerRunner = {
) )
}) })
} else { } else {
return callback(error, output) callback(error, output)
} }
}) }
)
// pass back the container name to allow it to be killed
return name return name
}, // pass back the container name to allow it to be killed },
kill(container_id, callback) { kill(containerId, callback) {
if (callback == null) { logger.log({ containerId }, 'sending kill signal to container')
callback = function(error) {} const container = dockerode.getContainer(containerId)
} container.kill((error) => {
logger.log({ container_id }, 'sending kill signal to container')
const container = dockerode.getContainer(container_id)
return container.kill(function(error) {
if ( if (
error != null && error != null &&
__guardMethod__(error != null ? error.message : undefined, 'match', o => error.message != null &&
o.match(/Cannot kill container .* is not running/) error.message.match(/Cannot kill container .* is not running/)
)
) { ) {
logger.warn( logger.warn(
{ err: error, container_id }, { err: error, containerId },
'container not running, continuing' 'container not running, continuing'
) )
error = null error = null
} }
if (error != null) { if (error != null) {
logger.error({ err: error, container_id }, 'error killing container') logger.error({ err: error, containerId }, 'error killing container')
return callback(error) callback(error)
} else { } else {
return callback() callback()
} }
}) })
}, },
_runAndWaitForContainer(options, volumes, timeout, _callback) { _runAndWaitForContainer(options, volumes, timeout, _callback) {
if (_callback == null) { const callback = _.once(_callback)
_callback = function(error, output) {}
}
const callback = function(...args) {
_callback(...Array.from(args || []))
// Only call the callback once
return (_callback = function() {})
}
const { name } = options const { name } = options
let streamEnded = false let streamEnded = false
let containerReturned = false let containerReturned = false
let output = {} let output = {}
const callbackIfFinished = function() { function callbackIfFinished() {
if (streamEnded && containerReturned) { if (streamEnded && containerReturned) {
return callback(null, output) callback(null, output)
} }
} }
const attachStreamHandler = function(error, _output) { function attachStreamHandler(error, _output) {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
output = _output output = _output
streamEnded = true streamEnded = true
return callbackIfFinished() callbackIfFinished()
} }
return DockerRunner.startContainer( DockerRunner.startContainer(
options, options,
volumes, volumes,
attachStreamHandler, attachStreamHandler,
function(error, containerId) { (error, containerId) => {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
return DockerRunner.waitForContainer(name, timeout, function( DockerRunner.waitForContainer(name, timeout, (error, exitCode) => {
error,
exitCode
) {
let err
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
if (exitCode === 137) { if (exitCode === 137) {
// exit status from kill -9 // exit status from kill -9
err = DockerRunner.ERR_TERMINATED const err = new Error('terminated')
err.terminated = true err.terminated = true
return callback(err) return callback(err)
} }
if (exitCode === 1) { if (exitCode === 1) {
// exit status from chktex // exit status from chktex
err = DockerRunner.ERR_EXITED const err = new Error('exited')
err.code = exitCode err.code = exitCode
return callback(err) return callback(err)
} }
containerReturned = true containerReturned = true
__guard__( if (options != null && options.HostConfig != null) {
options != null ? options.HostConfig : undefined, options.HostConfig.SecurityOpt = null
x => (x.SecurityOpt = null) }
) // small log line logger.log({ exitCode, options }, 'docker container has exited')
logger.log({ err, exitCode, options }, 'docker container has exited') callbackIfFinished()
return callbackIfFinished()
}) })
} }
) )
@@ -248,13 +208,11 @@ module.exports = DockerRunner = {
environment, environment,
compileGroup compileGroup
) { ) {
let m, year
let key, value, hostVol, dockerVol
const timeoutInSeconds = timeout / 1000 const timeoutInSeconds = timeout / 1000
const dockerVolumes = {} const dockerVolumes = {}
for (hostVol in volumes) { for (const hostVol in volumes) {
dockerVol = volumes[hostVol] const dockerVol = volumes[hostVol]
dockerVolumes[dockerVol] = {} dockerVolumes[dockerVol] = {}
if (volumes[hostVol].slice(-3).indexOf(':r') === -1) { if (volumes[hostVol].slice(-3).indexOf(':r') === -1) {
@@ -265,17 +223,14 @@ module.exports = DockerRunner = {
// merge settings and environment parameter // merge settings and environment parameter
const env = {} const env = {}
for (const src of [Settings.clsi.docker.env, environment || {}]) { for (const src of [Settings.clsi.docker.env, environment || {}]) {
for (key in src) { for (const key in src) {
value = src[key] const value = src[key]
env[key] = value env[key] = value
} }
} }
// set the path based on the image year // set the path based on the image year
if ((m = image.match(/:([0-9]+)\.[0-9]+/))) { const match = image.match(/:([0-9]+)\.[0-9]+/)
year = m[1] const year = match ? match[1] : '2014'
} else {
year = '2014'
}
env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/` env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`
const options = { const options = {
Cmd: command, Cmd: command,
@@ -285,23 +240,11 @@ module.exports = DockerRunner = {
NetworkDisabled: true, NetworkDisabled: true,
Memory: 1024 * 1024 * 1024 * 1024, // 1 Gb Memory: 1024 * 1024 * 1024 * 1024, // 1 Gb
User: Settings.clsi.docker.user, User: Settings.clsi.docker.user,
Env: (() => { Env: Object.entries(env).map(([key, value]) => `${key}=${value}`),
const result = []
for (key in env) {
value = env[key]
result.push(`${key}=${value}`)
}
return result
})(), // convert the environment hash to an array
HostConfig: { HostConfig: {
Binds: (() => { Binds: Object.entries(volumes).map(
const result1 = [] ([hostVol, dockerVol]) => `${hostVol}:${dockerVol}`
for (hostVol in volumes) { ),
dockerVol = volumes[hostVol]
result1.push(`${hostVol}:${dockerVol}`)
}
return result1
})(),
LogConfig: { Type: 'none', Config: {} }, LogConfig: { Type: 'none', Config: {} },
Ulimits: [ Ulimits: [
{ {
@@ -315,10 +258,7 @@ module.exports = DockerRunner = {
} }
} }
if ( if (Settings.path != null && Settings.path.synctexBinHostPath != null) {
(Settings.path != null ? Settings.path.synctexBinHostPath : undefined) !=
null
) {
options.HostConfig.Binds.push( options.HostConfig.Binds.push(
`${Settings.path.synctexBinHostPath}:/opt/synctex:ro` `${Settings.path.synctexBinHostPath}:/opt/synctex:ro`
) )
@@ -337,6 +277,7 @@ module.exports = DockerRunner = {
if (Settings.clsi.docker.Readonly) { if (Settings.clsi.docker.Readonly) {
options.HostConfig.ReadonlyRootfs = true options.HostConfig.ReadonlyRootfs = true
options.HostConfig.Tmpfs = { '/tmp': 'rw,noexec,nosuid,size=65536k' } options.HostConfig.Tmpfs = { '/tmp': 'rw,noexec,nosuid,size=65536k' }
options.Volumes['/home/tex'] = {}
} }
// Allow per-compile group overriding of individual settings // Allow per-compile group overriding of individual settings
@@ -345,8 +286,7 @@ module.exports = DockerRunner = {
Settings.clsi.docker.compileGroupConfig[compileGroup] Settings.clsi.docker.compileGroupConfig[compileGroup]
) { ) {
const override = Settings.clsi.docker.compileGroupConfig[compileGroup] const override = Settings.clsi.docker.compileGroupConfig[compileGroup]
let key for (const key in override) {
for (key in override) {
_.set(options, key, override[key]) _.set(options, key, override[key])
} }
} }
@@ -357,25 +297,22 @@ module.exports = DockerRunner = {
_fingerprintContainer(containerOptions) { _fingerprintContainer(containerOptions) {
// Yay, Hashing! // Yay, Hashing!
const json = JSON.stringify(containerOptions) const json = JSON.stringify(containerOptions)
return crypto return crypto.createHash('md5').update(json).digest('hex')
.createHash('md5')
.update(json)
.digest('hex')
}, },
startContainer(options, volumes, attachStreamHandler, callback) { startContainer(options, volumes, attachStreamHandler, callback) {
return LockManager.runWithLock( LockManager.runWithLock(
options.name, options.name,
releaseLock => (releaseLock) =>
// Check that volumes exist before starting the container. // Check that volumes exist before starting the container.
// When a container is started with volume pointing to a // When a container is started with volume pointing to a
// non-existent directory then docker creates the directory but // non-existent directory then docker creates the directory but
// with root ownership. // with root ownership.
DockerRunner._checkVolumes(options, volumes, function(err) { DockerRunner._checkVolumes(options, volumes, (err) => {
if (err != null) { if (err != null) {
return releaseLock(err) return releaseLock(err)
} }
return DockerRunner._startContainer( DockerRunner._startContainer(
options, options,
volumes, volumes,
attachStreamHandler, attachStreamHandler,
@@ -389,93 +326,85 @@ module.exports = DockerRunner = {
// Check that volumes exist and are directories // Check that volumes exist and are directories
_checkVolumes(options, volumes, callback) { _checkVolumes(options, volumes, callback) {
if (callback == null) {
callback = function(error, containerName) {}
}
if (usingSiblingContainers()) { if (usingSiblingContainers()) {
// Server Pro, with sibling-containers active, skip checks // Server Pro, with sibling-containers active, skip checks
return callback(null) return callback(null)
} }
const checkVolume = (path, cb) => const checkVolume = (path, cb) =>
fs.stat(path, function(err, stats) { fs.stat(path, (err, stats) => {
if (err != null) { if (err != null) {
return cb(err) return cb(err)
} }
if (!(stats != null ? stats.isDirectory() : undefined)) { if (!stats.isDirectory()) {
return cb(DockerRunner.ERR_NOT_DIRECTORY) return cb(new Error('not a directory'))
} }
return cb() cb()
}) })
const jobs = [] const jobs = []
for (const vol in volumes) { for (const vol in volumes) {
;(vol => jobs.push(cb => checkVolume(vol, cb)))(vol) jobs.push((cb) => checkVolume(vol, cb))
} }
return async.series(jobs, callback) async.series(jobs, callback)
}, },
_startContainer(options, volumes, attachStreamHandler, callback) { _startContainer(options, volumes, attachStreamHandler, callback) {
if (callback == null) {
callback = function(error, output) {}
}
callback = _.once(callback) callback = _.once(callback)
const { name } = options const { name } = options
logger.log({ container_name: name }, 'starting container') logger.log({ container_name: name }, 'starting container')
const container = dockerode.getContainer(name) const container = dockerode.getContainer(name)
const createAndStartContainer = () => function createAndStartContainer() {
dockerode.createContainer(options, function(error, container) { dockerode.createContainer(options, (error, container) => {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
return startExistingContainer() startExistingContainer()
}) })
var startExistingContainer = () => }
function startExistingContainer() {
DockerRunner.attachToContainer( DockerRunner.attachToContainer(
options.name, options.name,
attachStreamHandler, attachStreamHandler,
function(error) { (error) => {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
return container.start(function(error) { container.start((error) => {
if ( if (error != null && error.statusCode !== 304) {
error != null && callback(error)
(error != null ? error.statusCode : undefined) !== 304
) {
// already running
return callback(error)
} else { } else {
return callback() // already running
callback()
} }
}) })
} }
) )
return container.inspect(function(error, stats) { }
if ((error != null ? error.statusCode : undefined) === 404) {
return createAndStartContainer() container.inspect((error, stats) => {
if (error != null && error.statusCode === 404) {
createAndStartContainer()
} else if (error != null) { } else if (error != null) {
logger.err( logger.err(
{ container_name: name, error }, { container_name: name, error },
'unable to inspect container to start' 'unable to inspect container to start'
) )
return callback(error) callback(error)
} else { } else {
return startExistingContainer() startExistingContainer()
} }
}) })
}, },
attachToContainer(containerId, attachStreamHandler, attachStartCallback) { attachToContainer(containerId, attachStreamHandler, attachStartCallback) {
const container = dockerode.getContainer(containerId) const container = dockerode.getContainer(containerId)
return container.attach({ stdout: 1, stderr: 1, stream: 1 }, function( container.attach({ stdout: 1, stderr: 1, stream: 1 }, (error, stream) => {
error,
stream
) {
if (error != null) { if (error != null) {
logger.error( logger.error(
{ err: error, container_id: containerId }, { err: error, containerId },
'error attaching to container' 'error attaching to container'
) )
return attachStartCallback(error) return attachStartCallback(error)
@@ -483,10 +412,10 @@ module.exports = DockerRunner = {
attachStartCallback() attachStartCallback()
} }
logger.log({ container_id: containerId }, 'attached to container') logger.log({ containerId }, 'attached to container')
const MAX_OUTPUT = 1024 * 1024 // limit output to 1MB const MAX_OUTPUT = 1024 * 1024 // limit output to 1MB
const createStringOutputStream = function(name) { function createStringOutputStream(name) {
return { return {
data: '', data: '',
overflowed: false, overflowed: false,
@@ -495,18 +424,18 @@ module.exports = DockerRunner = {
return return
} }
if (this.data.length < MAX_OUTPUT) { if (this.data.length < MAX_OUTPUT) {
return (this.data += data) this.data += data
} else { } else {
logger.error( logger.error(
{ {
container_id: containerId, containerId,
length: this.data.length, length: this.data.length,
maxLen: MAX_OUTPUT maxLen: MAX_OUTPUT
}, },
`${name} exceeds max size` `${name} exceeds max size`
) )
this.data += `(...truncated at ${MAX_OUTPUT} chars...)` this.data += `(...truncated at ${MAX_OUTPUT} chars...)`
return (this.overflowed = true) this.overflowed = true
} }
} }
// kill container if too much output // kill container if too much output
@@ -519,63 +448,52 @@ module.exports = DockerRunner = {
container.modem.demuxStream(stream, stdout, stderr) container.modem.demuxStream(stream, stdout, stderr)
stream.on('error', err => stream.on('error', (err) =>
logger.error( logger.error(
{ err, container_id: containerId }, { err, containerId },
'error reading from container stream' 'error reading from container stream'
) )
) )
return stream.on('end', () => stream.on('end', () =>
attachStreamHandler(null, { stdout: stdout.data, stderr: stderr.data }) attachStreamHandler(null, { stdout: stdout.data, stderr: stderr.data })
) )
}) })
}, },
waitForContainer(containerId, timeout, _callback) { waitForContainer(containerId, timeout, _callback) {
if (_callback == null) { const callback = _.once(_callback)
_callback = function(error, exitCode) {}
}
const callback = function(...args) {
_callback(...Array.from(args || []))
// Only call the callback once
return (_callback = function() {})
}
const container = dockerode.getContainer(containerId) const container = dockerode.getContainer(containerId)
let timedOut = false let timedOut = false
const timeoutId = setTimeout(function() { const timeoutId = setTimeout(() => {
timedOut = true timedOut = true
logger.log( logger.log({ containerId }, 'timeout reached, killing container')
{ container_id: containerId }, container.kill((err) => {
'timeout reached, killing container' logger.warn({ err, containerId }, 'failed to kill container')
) })
return container.kill(function() {})
}, timeout) }, timeout)
logger.log({ container_id: containerId }, 'waiting for docker container') logger.log({ containerId }, 'waiting for docker container')
return container.wait(function(error, res) { container.wait((error, res) => {
if (error != null) { if (error != null) {
clearTimeout(timeoutId) clearTimeout(timeoutId)
logger.error( logger.error({ err: error, containerId }, 'error waiting for container')
{ err: error, container_id: containerId },
'error waiting for container'
)
return callback(error) return callback(error)
} }
if (timedOut) { if (timedOut) {
logger.log({ containerId }, 'docker container timed out') logger.log({ containerId }, 'docker container timed out')
error = DockerRunner.ERR_TIMED_OUT error = new Error('container timed out')
error.timedout = true error.timedout = true
return callback(error) callback(error)
} else { } else {
clearTimeout(timeoutId) clearTimeout(timeoutId)
logger.log( logger.log(
{ container_id: containerId, exitCode: res.StatusCode }, { containerId, exitCode: res.StatusCode },
'docker container returned' 'docker container returned'
) )
return callback(null, res.StatusCode) callback(null, res.StatusCode)
} }
}) })
}, },
@@ -587,12 +505,9 @@ module.exports = DockerRunner = {
// async exception, but if you delete by id it just does a normal // async exception, but if you delete by id it just does a normal
// error callback. We fall back to deleting by name if no id is // error callback. We fall back to deleting by name if no id is
// supplied. // supplied.
if (callback == null) { LockManager.runWithLock(
callback = function(error) {}
}
return LockManager.runWithLock(
containerName, containerName,
releaseLock => (releaseLock) =>
DockerRunner._destroyContainer( DockerRunner._destroyContainer(
containerId || containerName, containerId || containerName,
shouldForce, shouldForce,
@@ -603,46 +518,31 @@ module.exports = DockerRunner = {
}, },
_destroyContainer(containerId, shouldForce, callback) { _destroyContainer(containerId, shouldForce, callback) {
if (callback == null) { logger.log({ containerId }, 'destroying docker container')
callback = function(error) {}
}
logger.log({ container_id: containerId }, 'destroying docker container')
const container = dockerode.getContainer(containerId) const container = dockerode.getContainer(containerId)
return container.remove({ force: shouldForce === true }, function(error) { container.remove({ force: shouldForce === true, v: true }, (error) => {
if ( if (error != null && error.statusCode === 404) {
error != null &&
(error != null ? error.statusCode : undefined) === 404
) {
logger.warn( logger.warn(
{ err: error, container_id: containerId }, { err: error, containerId },
'container not found, continuing' 'container not found, continuing'
) )
error = null error = null
} }
if (error != null) { if (error != null) {
logger.error( logger.error({ err: error, containerId }, 'error destroying container')
{ err: error, container_id: containerId },
'error destroying container'
)
} else { } else {
logger.log({ container_id: containerId }, 'destroyed container') logger.log({ containerId }, 'destroyed container')
} }
return callback(error) callback(error)
}) })
}, },
// handle expiry of docker containers // handle expiry of docker containers
MAX_CONTAINER_AGE: MAX_CONTAINER_AGE: Settings.clsi.docker.maxContainerAge || ONE_HOUR_IN_MS,
Settings.clsi.docker.maxContainerAge || (oneHour = 60 * 60 * 1000),
examineOldContainer(container, callback) { examineOldContainer(container, callback) {
if (callback == null) { const name = container.Name || (container.Names && container.Names[0])
callback = function(error, name, id, ttl) {}
}
const name =
container.Name ||
(container.Names != null ? container.Names[0] : undefined)
const created = container.Created * 1000 // creation time is returned in seconds const created = container.Created * 1000 // creation time is returned in seconds
const now = Date.now() const now = Date.now()
const age = now - created const age = now - created
@@ -652,39 +552,29 @@ module.exports = DockerRunner = {
{ containerName: name, created, now, age, maxAge, ttl }, { containerName: name, created, now, age, maxAge, ttl },
'checking whether to destroy container' 'checking whether to destroy container'
) )
return callback(null, name, container.Id, ttl) return { name, id: container.Id, ttl }
}, },
destroyOldContainers(callback) { destroyOldContainers(callback) {
if (callback == null) { dockerode.listContainers({ all: true }, (error, containers) => {
callback = function(error) {}
}
return dockerode.listContainers({ all: true }, function(error, containers) {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
const jobs = [] const jobs = []
for (const container of Array.from(containers || [])) { for (const container of containers) {
;(container => const { name, id, ttl } = DockerRunner.examineOldContainer(container)
DockerRunner.examineOldContainer(container, function(
err,
name,
id,
ttl
) {
if (name.slice(0, 9) === '/project-' && ttl <= 0) { if (name.slice(0, 9) === '/project-' && ttl <= 0) {
// strip the / prefix // strip the / prefix
// the LockManager uses the plain container name // the LockManager uses the plain container name
name = name.slice(1) const plainName = name.slice(1)
return jobs.push(cb => jobs.push((cb) =>
DockerRunner.destroyContainer(name, id, false, () => cb()) DockerRunner.destroyContainer(plainName, id, false, () => cb())
) )
} }
}))(container)
} }
// Ignore errors because some containers get stuck but // Ignore errors because some containers get stuck but
// will be destroyed next time // will be destroyed next time
return async.series(jobs, callback) async.series(jobs, callback)
}) })
}, },
@@ -701,8 +591,13 @@ module.exports = DockerRunner = {
const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000) const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000)
containerMonitorTimeout = setTimeout(() => { containerMonitorTimeout = setTimeout(() => {
containerMonitorInterval = setInterval( containerMonitorInterval = setInterval(
() => DockerRunner.destroyOldContainers(), () =>
(oneHour = 60 * 60 * 1000) DockerRunner.destroyOldContainers((err) => {
if (err) {
logger.error({ err }, 'failed to destroy old containers')
}
}),
ONE_HOUR_IN_MS
) )
}, randomDelay) }, randomDelay)
}, },
@@ -713,27 +608,12 @@ module.exports = DockerRunner = {
containerMonitorTimeout = undefined containerMonitorTimeout = undefined
} }
if (containerMonitorInterval) { if (containerMonitorInterval) {
clearInterval(containerMonitorTimeout) clearInterval(containerMonitorInterval)
containerMonitorTimeout = undefined containerMonitorInterval = undefined
} }
} }
} }
DockerRunner.startContainerMonitor() DockerRunner.startContainerMonitor()
function __guard__(value, transform) { module.exports = DockerRunner
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}
function __guardMethod__(obj, methodName, transform) {
if (
typeof obj !== 'undefined' &&
obj !== null &&
typeof obj[methodName] === 'function'
) {
return transform(obj, methodName)
} else {
return undefined
}
}

View File

@@ -96,13 +96,13 @@ module.exports = LatexRunner = {
} }
const runs = const runs =
__guard__( __guard__(
__guard__(output != null ? output.stderr : undefined, x1 => __guard__(output != null ? output.stderr : undefined, (x1) =>
x1.match(/^Run number \d+ of .*latex/gm) x1.match(/^Run number \d+ of .*latex/gm)
), ),
x => x.length (x) => x.length
) || 0 ) || 0
const failed = const failed =
__guard__(output != null ? output.stdout : undefined, x2 => __guard__(output != null ? output.stdout : undefined, (x2) =>
x2.match(/^Latexmk: Errors/m) x2.match(/^Latexmk: Errors/m)
) != null ) != null
? 1 ? 1
@@ -122,21 +122,21 @@ module.exports = LatexRunner = {
stderr != null stderr != null
? stderr.match(/Percent of CPU this job got: (\d+)/m) ? stderr.match(/Percent of CPU this job got: (\d+)/m)
: undefined, : undefined,
x3 => x3[1] (x3) => x3[1]
) || 0 ) || 0
timings['cpu-time'] = timings['cpu-time'] =
__guard__( __guard__(
stderr != null stderr != null
? stderr.match(/User time.*: (\d+.\d+)/m) ? stderr.match(/User time.*: (\d+.\d+)/m)
: undefined, : undefined,
x4 => x4[1] (x4) => x4[1]
) || 0 ) || 0
timings['sys-time'] = timings['sys-time'] =
__guard__( __guard__(
stderr != null stderr != null
? stderr.match(/System time.*: (\d+.\d+)/m) ? stderr.match(/System time.*: (\d+.\d+)/m)
: undefined, : undefined,
x5 => x5[1] (x5) => x5[1]
) || 0 ) || 0
// record output files // record output files
LatexRunner.writeLogOutput(project_id, directory, output, () => { LatexRunner.writeLogOutput(project_id, directory, output, () => {
@@ -153,7 +153,7 @@ module.exports = LatexRunner = {
// internal method for writing non-empty log files // internal method for writing non-empty log files
function _writeFile(file, content, cb) { function _writeFile(file, content, cb) {
if (content && content.length > 0) { if (content && content.length > 0) {
fs.writeFile(file, content, err => { fs.writeFile(file, content, (err) => {
if (err) { if (err) {
logger.error({ project_id, file }, 'error writing log file') // don't fail on error logger.error({ project_id, file }, 'error writing log file') // don't fail on error
} }
@@ -202,7 +202,7 @@ module.exports = LatexRunner = {
return ( return (
__guard__( __guard__(
Settings != null ? Settings.clsi : undefined, Settings != null ? Settings.clsi : undefined,
x => x.latexmkCommandPrefix (x) => x.latexmkCommandPrefix
) || [] ) || []
).concat(args) ).concat(args)
}, },

View File

@@ -37,7 +37,7 @@ module.exports = CommandRunner = {
} else { } else {
callback = _.once(callback) callback = _.once(callback)
} }
command = Array.from(command).map(arg => command = Array.from(command).map((arg) =>
arg.toString().replace('$COMPILE_DIR', directory) arg.toString().replace('$COMPILE_DIR', directory)
) )
logger.log({ project_id, command, directory }, 'running command') logger.log({ project_id, command, directory }, 'running command')
@@ -58,7 +58,7 @@ module.exports = CommandRunner = {
const proc = spawn(command[0], command.slice(1), { cwd: directory, env }) const proc = spawn(command[0], command.slice(1), { cwd: directory, env })
let stdout = '' let stdout = ''
proc.stdout.setEncoding('utf8').on('data', data => (stdout += data)) proc.stdout.setEncoding('utf8').on('data', (data) => (stdout += data))
proc.on('error', function (err) { proc.on('error', function (err) {
logger.err( logger.err(

View File

@@ -99,13 +99,16 @@ module.exports = OutputCacheManager = {
(Settings.clsi != null ? Settings.clsi.archive_logs : undefined) || (Settings.clsi != null ? Settings.clsi.archive_logs : undefined) ||
(Settings.clsi != null ? Settings.clsi.strace : undefined) (Settings.clsi != null ? Settings.clsi.strace : undefined)
) { ) {
OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function( OutputCacheManager.archiveLogs(
err outputFiles,
) { compileDir,
buildId,
function (err) {
if (err != null) { if (err != null) {
return logger.warn({ err }, 'erroring archiving log files') return logger.warn({ err }, 'erroring archiving log files')
} }
}) }
)
} }
// make the new cache directory // make the new cache directory
@@ -280,7 +283,7 @@ module.exports = OutputCacheManager = {
// we can get the build time from the first part of the directory name DDDD-RRRR // we can get the build time from the first part of the directory name DDDD-RRRR
// DDDD is date and RRRR is random bytes // DDDD is date and RRRR is random bytes
const dirTime = parseInt( const dirTime = parseInt(
__guard__(dir.split('-'), x => x[0]), __guard__(dir.split('-'), (x) => x[0]),
16 16
) )
const age = currentTime - dirTime const age = currentTime - dirTime

View File

@@ -44,7 +44,7 @@ module.exports = OutputFileFinder = {
if (!incomingResources[file]) { if (!incomingResources[file]) {
outputFiles.push({ outputFiles.push({
path: file, path: file,
type: __guard__(file.match(/\.([^\.]+)$/), x => x[1]) type: __guard__(file.match(/\.([^\.]+)$/), (x) => x[1])
}) })
} }
} }
@@ -87,7 +87,7 @@ module.exports = OutputFileFinder = {
const proc = spawn('find', args) const proc = spawn('find', args)
let stdout = '' let stdout = ''
proc.stdout.setEncoding('utf8').on('data', chunk => (stdout += chunk)) proc.stdout.setEncoding('utf8').on('data', (chunk) => (stdout += chunk))
proc.on('error', callback) proc.on('error', callback)
return proc.on('close', function (code) { return proc.on('close', function (code) {
if (code !== 0) { if (code !== 0) {

View File

@@ -77,7 +77,7 @@ module.exports = OutputFileOptimiser = {
const timer = new Metrics.Timer('qpdf') const timer = new Metrics.Timer('qpdf')
const proc = spawn('qpdf', args) const proc = spawn('qpdf', args)
let stdout = '' let stdout = ''
proc.stdout.setEncoding('utf8').on('data', chunk => (stdout += chunk)) proc.stdout.setEncoding('utf8').on('data', (chunk) => (stdout += chunk))
callback = _.once(callback) // avoid double call back for error and close event callback = _.once(callback) // avoid double call back for error and close event
proc.on('error', function (err) { proc.on('error', function (err) {
logger.warn({ err, args }, 'qpdf failed') logger.warn({ err, args }, 'qpdf failed')

View File

@@ -50,7 +50,7 @@ module.exports = ProjectPersistenceManager = {
if (callback == null) { if (callback == null) {
callback = function (error) {} callback = function (error) {}
} }
const job = cb => const job = (cb) =>
db.Project.findOrCreate({ where: { project_id } }) db.Project.findOrCreate({ where: { project_id } })
.spread((project, created) => .spread((project, created) =>
project project
@@ -74,8 +74,8 @@ module.exports = ProjectPersistenceManager = {
return callback(error) return callback(error)
} }
logger.log({ project_ids }, 'clearing expired projects') logger.log({ project_ids }, 'clearing expired projects')
const jobs = Array.from(project_ids || []).map(project_id => const jobs = Array.from(project_ids || []).map((project_id) =>
(project_id => callback => ((project_id) => (callback) =>
ProjectPersistenceManager.clearProjectFromCache(project_id, function ( ProjectPersistenceManager.clearProjectFromCache(project_id, function (
err err
) { ) {
@@ -91,7 +91,7 @@ module.exports = ProjectPersistenceManager = {
} }
return CompileManager.clearExpiredProjects( return CompileManager.clearExpiredProjects(
ProjectPersistenceManager.EXPIRY_TIMEOUT, ProjectPersistenceManager.EXPIRY_TIMEOUT,
error => callback() (error) => callback()
) )
}) })
}) })
@@ -148,7 +148,7 @@ module.exports = ProjectPersistenceManager = {
callback = function (error) {} callback = function (error) {}
} }
logger.log({ project_id }, 'clearing project from database') logger.log({ project_id }, 'clearing project from database')
const job = cb => const job = (cb) =>
db.Project.destroy({ where: { project_id } }) db.Project.destroy({ where: { project_id } })
.then(() => cb()) .then(() => cb())
.error(cb) .error(cb)
@@ -166,10 +166,10 @@ module.exports = ProjectPersistenceManager = {
const q = {} const q = {}
q[db.op.lt] = keepProjectsFrom q[db.op.lt] = keepProjectsFrom
return db.Project.findAll({ where: { lastAccessed: q } }) return db.Project.findAll({ where: { lastAccessed: q } })
.then(projects => .then((projects) =>
cb( cb(
null, null,
projects.map(project => project.project_id) projects.map((project) => project.project_id)
) )
) )
.error(cb) .error(cb)

View File

@@ -56,7 +56,9 @@ module.exports = ResourceStateManager = {
}) })
} else { } else {
logger.log({ state, basePath }, 'writing sync state') logger.log({ state, basePath }, 'writing sync state')
const resourceList = Array.from(resources).map(resource => resource.path) const resourceList = Array.from(resources).map(
(resource) => resource.path
)
return fs.writeFile( return fs.writeFile(
stateFile, stateFile,
[...Array.from(resourceList), `stateHash:${state}`].join('\n'), [...Array.from(resourceList), `stateHash:${state}`].join('\n'),
@@ -86,7 +88,7 @@ module.exports = ResourceStateManager = {
) )
} }
const array = const array =
__guard__(result != null ? result.toString() : undefined, x => __guard__(result != null ? result.toString() : undefined, (x) =>
x.split('\n') x.split('\n')
) || [] ) || []
const adjustedLength = Math.max(array.length, 1) const adjustedLength = Math.max(array.length, 1)
@@ -102,7 +104,7 @@ module.exports = ResourceStateManager = {
new Errors.FilesOutOfSyncError('invalid state for incremental update') new Errors.FilesOutOfSyncError('invalid state for incremental update')
) )
} else { } else {
const resources = Array.from(resourceList).map(path => ({ path })) const resources = Array.from(resourceList).map((path) => ({ path }))
return callback(null, resources) return callback(null, resources)
} }
}) })
@@ -116,7 +118,7 @@ module.exports = ResourceStateManager = {
} }
for (file of Array.from(resources || [])) { for (file of Array.from(resources || [])) {
for (const dir of Array.from( for (const dir of Array.from(
__guard__(file != null ? file.path : undefined, x => x.split('/')) __guard__(file != null ? file.path : undefined, (x) => x.split('/'))
)) { )) {
if (dir === '..') { if (dir === '..') {
return callback(new Error('relative path in resource file list')) return callback(new Error('relative path in resource file list'))
@@ -129,8 +131,8 @@ module.exports = ResourceStateManager = {
seenFile[file] = true seenFile[file] = true
} }
const missingFiles = Array.from(resources) const missingFiles = Array.from(resources)
.filter(resource => !seenFile[resource.path]) .filter((resource) => !seenFile[resource.path])
.map(resource => resource.path) .map((resource) => resource.path)
if ((missingFiles != null ? missingFiles.length : undefined) > 0) { if ((missingFiles != null ? missingFiles.length : undefined) > 0) {
logger.err( logger.err(
{ missingFiles, basePath, allFiles, resources }, { missingFiles, basePath, allFiles, resources },

View File

@@ -109,13 +109,13 @@ module.exports = ResourceWriter = {
if (callback == null) { if (callback == null) {
callback = function (error) {} callback = function (error) {}
} }
return this._createDirectory(basePath, error => { return this._createDirectory(basePath, (error) => {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
const jobs = Array.from(resources).map(resource => const jobs = Array.from(resources).map((resource) =>
(resource => { ((resource) => {
return callback => return (callback) =>
this._writeResourceToDisk(project_id, resource, basePath, callback) this._writeResourceToDisk(project_id, resource, basePath, callback)
})(resource) })(resource)
) )
@@ -127,17 +127,17 @@ module.exports = ResourceWriter = {
if (callback == null) { if (callback == null) {
callback = function (error) {} callback = function (error) {}
} }
return this._createDirectory(basePath, error => { return this._createDirectory(basePath, (error) => {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
return this._removeExtraneousFiles(resources, basePath, error => { return this._removeExtraneousFiles(resources, basePath, (error) => {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
const jobs = Array.from(resources).map(resource => const jobs = Array.from(resources).map((resource) =>
(resource => { ((resource) => {
return callback => return (callback) =>
this._writeResourceToDisk( this._writeResourceToDisk(
project_id, project_id,
resource, resource,
@@ -242,7 +242,7 @@ module.exports = ResourceWriter = {
should_delete = true should_delete = true
} }
if (should_delete) { if (should_delete) {
return jobs.push(callback => return jobs.push((callback) =>
ResourceWriter._deleteFileIfNotDirectory( ResourceWriter._deleteFileIfNotDirectory(
Path.join(basePath, path), Path.join(basePath, path),
callback callback
@@ -303,7 +303,9 @@ module.exports = ResourceWriter = {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
return fs.mkdir(Path.dirname(path), { recursive: true }, function(error) { return fs.mkdir(Path.dirname(path), { recursive: true }, function (
error
) {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }

View File

@@ -26,7 +26,7 @@ module.exports = ForbidSymlinks = function(staticFn, root, options) {
const basePath = Path.resolve(root) const basePath = Path.resolve(root)
return function (req, res, next) { return function (req, res, next) {
let file, project_id, result let file, project_id, result
const path = __guard__(url.parse(req.url), x => x.pathname) const path = __guard__(url.parse(req.url), (x) => x.pathname)
// check that the path is of the form /project_id_or_name/path/to/file.log // check that the path is of the form /project_id_or_name/path/to/file.log
if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) { if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) {
project_id = result[1] project_id = result[1]

View File

@@ -42,7 +42,10 @@ module.exports = TikzManager = {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
return SafeReader.readFile(path, 65536, 'utf8', function(error, content) { return SafeReader.readFile(path, 65536, 'utf8', function (
error,
content
) {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }

View File

@@ -60,8 +60,8 @@ module.exports = UrlCache = {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
const jobs = Array.from(urls || []).map(url => const jobs = Array.from(urls || []).map((url) =>
(url => callback => ((url) => (callback) =>
UrlCache._clearUrlFromCache(project_id, url, function (error) { UrlCache._clearUrlFromCache(project_id, url, function (error) {
if (error != null) { if (error != null) {
logger.error( logger.error(
@@ -98,7 +98,7 @@ module.exports = UrlCache = {
return UrlFetcher.pipeUrlToFileWithRetry( return UrlFetcher.pipeUrlToFileWithRetry(
url, url,
UrlCache._cacheFilePathForUrl(project_id, url), UrlCache._cacheFilePathForUrl(project_id, url),
error => { (error) => {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
@@ -106,7 +106,7 @@ module.exports = UrlCache = {
project_id, project_id,
url, url,
lastModified, lastModified,
error => { (error) => {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
@@ -153,14 +153,7 @@ module.exports = UrlCache = {
}, },
_cacheFileNameForUrl(project_id, url) { _cacheFileNameForUrl(project_id, url) {
return ( return project_id + ':' + crypto.createHash('md5').update(url).digest('hex')
project_id +
':' +
crypto
.createHash('md5')
.update(url)
.digest('hex')
)
}, },
_cacheFilePathForUrl(project_id, url) { _cacheFilePathForUrl(project_id, url) {
@@ -197,7 +190,9 @@ module.exports = UrlCache = {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) { return UrlCache._deleteUrlCacheFromDisk(project_id, url, function (
error
) {
if (error != null) { if (error != null) {
return callback(error) return callback(error)
} }
@@ -226,9 +221,9 @@ module.exports = UrlCache = {
if (callback == null) { if (callback == null) {
callback = function (error, urlDetails) {} callback = function (error, urlDetails) {}
} }
const job = cb => const job = (cb) =>
db.UrlCache.findOne({ where: { url, project_id } }) db.UrlCache.findOne({ where: { url, project_id } })
.then(urlDetails => cb(null, urlDetails)) .then((urlDetails) => cb(null, urlDetails))
.error(cb) .error(cb)
return dbQueue.queue.push(job, callback) return dbQueue.queue.push(job, callback)
}, },
@@ -237,7 +232,7 @@ module.exports = UrlCache = {
if (callback == null) { if (callback == null) {
callback = function (error) {} callback = function (error) {}
} }
const job = cb => const job = (cb) =>
db.UrlCache.findOrCreate({ where: { url, project_id } }) db.UrlCache.findOrCreate({ where: { url, project_id } })
.spread((urlDetails, created) => .spread((urlDetails, created) =>
urlDetails urlDetails
@@ -253,7 +248,7 @@ module.exports = UrlCache = {
if (callback == null) { if (callback == null) {
callback = function (error) {} callback = function (error) {}
} }
const job = cb => const job = (cb) =>
db.UrlCache.destroy({ where: { url, project_id } }) db.UrlCache.destroy({ where: { url, project_id } })
.then(() => cb(null)) .then(() => cb(null))
.error(cb) .error(cb)
@@ -264,12 +259,12 @@ module.exports = UrlCache = {
if (callback == null) { if (callback == null) {
callback = function (error, urls) {} callback = function (error, urls) {}
} }
const job = cb => const job = (cb) =>
db.UrlCache.findAll({ where: { project_id } }) db.UrlCache.findAll({ where: { project_id } })
.then(urlEntries => .then((urlEntries) =>
cb( cb(
null, null,
urlEntries.map(entry => entry.url) urlEntries.map((entry) => entry.url)
) )
) )
.error(cb) .error(cb)

View File

@@ -62,6 +62,6 @@ module.exports = {
return sequelize return sequelize
.sync() .sync()
.then(() => logger.log('db sync complete')) .then(() => logger.log('db sync complete'))
.catch(err => console.log(err, 'error syncing')) .catch((err) => console.log(err, 'error syncing'))
} }
} }

View File

@@ -1,11 +1,9 @@
clsi clsi
--acceptance-creds=None
--data-dirs=cache,compiles,db --data-dirs=cache,compiles,db
--dependencies= --dependencies=
--docker-repos=gcr.io/overleaf-ops --docker-repos=gcr.io/overleaf-ops
--env-add= --env-add=
--env-pass-through=TEXLIVE_IMAGE --env-pass-through=TEXLIVE_IMAGE
--language=es --node-version=10.22.1
--node-version=10.21.0
--public-repo=True --public-repo=True
--script-version=2.1.0 --script-version=3.3.3

View File

@@ -10,6 +10,7 @@ services:
command: npm run test:unit:_run command: npm run test:unit:_run
environment: environment:
NODE_ENV: test NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict"
test_acceptance: test_acceptance:
@@ -25,6 +26,7 @@ services:
POSTGRES_HOST: postgres POSTGRES_HOST: postgres
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict"
TEXLIVE_IMAGE: TEXLIVE_IMAGE:
command: npm run test:acceptance:_run command: npm run test:acceptance:_run

View File

@@ -15,7 +15,8 @@ services:
environment: environment:
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test NODE_ENV: test
command: npm run test:unit NODE_OPTIONS: "--unhandled-rejections=strict"
command: npm run --silent test:unit
test_acceptance: test_acceptance:
build: build:
@@ -35,5 +36,6 @@ services:
MOCHA_GREP: ${MOCHA_GREP} MOCHA_GREP: ${MOCHA_GREP}
LOG_LEVEL: ERROR LOG_LEVEL: ERROR
NODE_ENV: test NODE_ENV: test
command: npm run test:acceptance NODE_OPTIONS: "--unhandled-rejections=strict"
command: npm run --silent test:acceptance

View File

@@ -8,7 +8,6 @@
"execMap": { "execMap": {
"js": "npm run start" "js": "npm run start"
}, },
"watch": [ "watch": [
"app/js/", "app/js/",
"app.js", "app.js",

749
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -13,7 +13,7 @@
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
"nodemon": "nodemon --config nodemon.json", "nodemon": "nodemon --config nodemon.json",
"lint": "node_modules/.bin/eslint .", "lint": "node_modules/.bin/eslint --max-warnings 0 .",
"format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different", "format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different",
"format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write" "format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write"
}, },
@@ -27,8 +27,8 @@
"fs-extra": "^8.1.0", "fs-extra": "^8.1.0",
"heapdump": "^0.3.15", "heapdump": "^0.3.15",
"lockfile": "^1.0.4", "lockfile": "^1.0.4",
"lodash": "^4.17.15", "lodash": "^4.17.20",
"logger-sharelatex": "^1.9.1", "logger-sharelatex": "^2.2.0",
"lynx": "0.2.0", "lynx": "0.2.0",
"metrics-sharelatex": "^2.6.0", "metrics-sharelatex": "^2.6.0",
"mysql": "^2.18.1", "mysql": "^2.18.1",
@@ -36,6 +36,7 @@
"sequelize": "^5.21.5", "sequelize": "^5.21.5",
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0",
"sqlite3": "^4.1.1", "sqlite3": "^4.1.1",
"underscore": "^1.11.0",
"v8-profiler-node8": "^6.1.1", "v8-profiler-node8": "^6.1.1",
"wrench": "~1.5.9" "wrench": "~1.5.9"
}, },
@@ -59,7 +60,7 @@
"eslint-plugin-react": "^7.19.0", "eslint-plugin-react": "^7.19.0",
"eslint-plugin-standard": "^4.0.1", "eslint-plugin-standard": "^4.0.1",
"mocha": "^7.1.0", "mocha": "^7.1.0",
"prettier": "^1.19.1", "prettier": "^2.0.0",
"prettier-eslint-cli": "^5.0.0", "prettier-eslint-cli": "^5.0.0",
"sandboxed-module": "^2.0.3", "sandboxed-module": "^2.0.3",
"sinon": "~9.0.1", "sinon": "~9.0.1",

View File

@@ -24,7 +24,7 @@ const ChildProcess = require('child_process')
const ClsiApp = require('./helpers/ClsiApp') const ClsiApp = require('./helpers/ClsiApp')
const logger = require('logger-sharelatex') const logger = require('logger-sharelatex')
const Path = require('path') const Path = require('path')
const fixturePath = path => { const fixturePath = (path) => {
if (path.slice(0, 3) === 'tmp') { if (path.slice(0, 3) === 'tmp') {
return '/tmp/clsi_acceptance_tests' + path.slice(3) return '/tmp/clsi_acceptance_tests' + path.slice(3)
} }
@@ -50,8 +50,8 @@ const convertToPng = function(pdfPath, pngPath, callback) {
console.log(command) console.log(command)
const convert = ChildProcess.exec(command) const convert = ChildProcess.exec(command)
const stdout = '' const stdout = ''
convert.stdout.on('data', chunk => console.log('STDOUT', chunk.toString())) convert.stdout.on('data', (chunk) => console.log('STDOUT', chunk.toString()))
convert.stderr.on('data', chunk => console.log('STDERR', chunk.toString())) convert.stderr.on('data', (chunk) => console.log('STDERR', chunk.toString()))
return convert.on('exit', () => callback()) return convert.on('exit', () => callback())
} }
@@ -66,11 +66,11 @@ const compare = function(originalPath, generatedPath, callback) {
)} ${diff_file}` )} ${diff_file}`
) )
let stderr = '' let stderr = ''
proc.stderr.on('data', chunk => (stderr += chunk)) proc.stderr.on('data', (chunk) => (stderr += chunk))
return proc.on('exit', () => { return proc.on('exit', () => {
if (stderr.trim() === '0 (0)') { if (stderr.trim() === '0 (0)') {
// remove output diff if test matches expected image // remove output diff if test matches expected image
fs.unlink(diff_file, err => { fs.unlink(diff_file, (err) => {
if (err) { if (err) {
throw err throw err
} }
@@ -89,8 +89,8 @@ const checkPdfInfo = function(pdfPath, callback) {
} }
const proc = ChildProcess.exec(`pdfinfo ${fixturePath(pdfPath)}`) const proc = ChildProcess.exec(`pdfinfo ${fixturePath(pdfPath)}`)
let stdout = '' let stdout = ''
proc.stdout.on('data', chunk => (stdout += chunk)) proc.stdout.on('data', (chunk) => (stdout += chunk))
proc.stderr.on('data', chunk => console.log('STDERR', chunk.toString())) proc.stderr.on('data', (chunk) => console.log('STDERR', chunk.toString()))
return proc.on('exit', () => { return proc.on('exit', () => {
if (stdout.match(/Optimized:\s+yes/)) { if (stdout.match(/Optimized:\s+yes/)) {
return callback(null, true) return callback(null, true)
@@ -136,14 +136,14 @@ const comparePdf = function(project_id, example_dir, callback) {
return convertToPng( return convertToPng(
`tmp/${project_id}.pdf`, `tmp/${project_id}.pdf`,
`tmp/${project_id}-generated.png`, `tmp/${project_id}-generated.png`,
error => { (error) => {
if (error != null) { if (error != null) {
throw error throw error
} }
return convertToPng( return convertToPng(
`examples/${example_dir}/output.pdf`, `examples/${example_dir}/output.pdf`,
`tmp/${project_id}-source.png`, `tmp/${project_id}-source.png`,
error => { (error) => {
if (error != null) { if (error != null) {
throw error throw error
} }
@@ -163,7 +163,7 @@ const comparePdf = function(project_id, example_dir, callback) {
} }
) )
} else { } else {
return compareMultiplePages(project_id, error => { return compareMultiplePages(project_id, (error) => {
if (error != null) { if (error != null) {
throw error throw error
} }
@@ -178,7 +178,12 @@ const comparePdf = function(project_id, example_dir, callback) {
) )
} }
const downloadAndComparePdf = function(project_id, example_dir, url, callback) { const downloadAndComparePdf = function (
project_id,
example_dir,
url,
callback
) {
if (callback == null) { if (callback == null) {
callback = function (error) {} callback = function (error) {}
} }
@@ -212,8 +217,9 @@ describe('Example Documents', function() {
fsExtra.remove(fixturePath('tmp'), done) fsExtra.remove(fixturePath('tmp'), done)
}) })
return Array.from(fs.readdirSync(fixturePath('examples'))).map(example_dir => return Array.from(fs.readdirSync(fixturePath('examples'))).map(
(example_dir => (example_dir) =>
((example_dir) =>
describe(example_dir, function () { describe(example_dir, function () {
before(function () { before(function () {
return (this.project_id = Client.randomId() + '_' + example_dir) return (this.project_id = Client.randomId() + '_' + example_dir)
@@ -231,10 +237,15 @@ describe('Example Documents', function() {
error || error ||
__guard__( __guard__(
body != null ? body.compile : undefined, body != null ? body.compile : undefined,
x => x.status (x) => x.status
) === 'failure' ) === 'failure'
) { ) {
console.log('DEBUG: error', error, 'body', JSON.stringify(body)) console.log(
'DEBUG: error',
error,
'body',
JSON.stringify(body)
)
return done(new Error('Compile failed')) return done(new Error('Compile failed'))
} }
const pdf = Client.getOutputFile(body, 'pdf') const pdf = Client.getOutputFile(body, 'pdf')
@@ -260,10 +271,15 @@ describe('Example Documents', function() {
error || error ||
__guard__( __guard__(
body != null ? body.compile : undefined, body != null ? body.compile : undefined,
x => x.status (x) => x.status
) === 'failure' ) === 'failure'
) { ) {
console.log('DEBUG: error', error, 'body', JSON.stringify(body)) console.log(
'DEBUG: error',
error,
'body',
JSON.stringify(body)
)
return done(new Error('Compile failed')) return done(new Error('Compile failed'))
} }
const pdf = Client.getOutputFile(body, 'pdf') const pdf = Client.getOutputFile(body, 'pdf')

View File

@@ -56,7 +56,7 @@ describe('Timed out compile', function() {
}) })
return it('should return the log output file name', function () { return it('should return the log output file name', function () {
const outputFilePaths = this.body.compile.outputFiles.map(x => x.path) const outputFilePaths = this.body.compile.outputFiles.map((x) => x.path)
return outputFilePaths.should.include('output.log') return outputFilePaths.should.include('output.log')
}) })
}) })

View File

@@ -35,9 +35,7 @@ const Server = {
getFile() {}, getFile() {},
randomId() { randomId() {
return Math.random() return Math.random().toString(16).slice(2)
.toString(16)
.slice(2)
} }
} }
@@ -338,7 +336,7 @@ describe('Url Caching', function() {
] ]
} }
return Client.compile(this.project_id, this.request, error => { return Client.compile(this.project_id, this.request, (error) => {
if (error != null) { if (error != null) {
throw error throw error
} }

View File

@@ -23,9 +23,7 @@ module.exports = Client = {
host: Settings.apis.clsi.url, host: Settings.apis.clsi.url,
randomId() { randomId() {
return Math.random() return Math.random().toString(16).slice(2)
.toString(16)
.slice(2)
}, },
compile(project_id, data, callback) { compile(project_id, data, callback) {
@@ -64,7 +62,7 @@ module.exports = Client = {
const app = express() const app = express()
app.use(express.static(directory)) app.use(express.static(directory))
console.log('starting test server on', port, host) console.log('starting test server on', port, host)
return app.listen(port, host).on('error', error => { return app.listen(port, host).on('error', (error) => {
console.error('error starting server:', error.message) console.error('error starting server:', error.message)
return process.exit(1) return process.exit(1)
}) })
@@ -130,7 +128,7 @@ module.exports = Client = {
entities = entities.concat( entities = entities.concat(
fs fs
.readdirSync(`${baseDirectory}/${directory}/${entity}`) .readdirSync(`${baseDirectory}/${directory}/${entity}`)
.map(subEntity => { .map((subEntity) => {
if (subEntity === 'main.tex') { if (subEntity === 'main.tex') {
rootResourcePath = `${entity}/${subEntity}` rootResourcePath = `${entity}/${subEntity}`
} }

View File

@@ -35,10 +35,10 @@ module.exports = {
return app.listen( return app.listen(
__guard__( __guard__(
Settings.internal != null ? Settings.internal.clsi : undefined, Settings.internal != null ? Settings.internal.clsi : undefined,
x => x.port (x) => x.port
), ),
'localhost', 'localhost',
error => { (error) => {
if (error != null) { if (error != null) {
throw error throw error
} }

View File

@@ -17,7 +17,7 @@ const _ = require('lodash')
const concurentCompiles = 5 const concurentCompiles = 5
const totalCompiles = 50 const totalCompiles = 50
const buildUrl = path => const buildUrl = (path) =>
`http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}` `http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`
const mainTexContent = fs.readFileSync('./bulk.tex', 'utf-8') const mainTexContent = fs.readFileSync('./bulk.tex', 'utf-8')
@@ -74,12 +74,12 @@ ${bodyContent}
) )
} }
const jobs = _.map(__range__(1, totalCompiles, true), i => cb => const jobs = _.map(__range__(1, totalCompiles, true), (i) => (cb) =>
makeRequest(i, cb) makeRequest(i, cb)
) )
const startTime = new Date() const startTime = new Date()
async.parallelLimit(jobs, concurentCompiles, err => { async.parallelLimit(jobs, concurentCompiles, (err) => {
if (err != null) { if (err != null) {
console.error(err) console.error(err)
} }

View File

@@ -1,20 +1,20 @@
const request = require('request') const request = require('request')
const Settings = require('settings-sharelatex') const Settings = require('settings-sharelatex')
const buildUrl = path => const buildUrl = (path) =>
`http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}` `http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`
const url = buildUrl(`project/smoketest-${process.pid}/compile`) const url = buildUrl(`project/smoketest-${process.pid}/compile`)
module.exports = { module.exports = {
sendNewResult(res) { sendNewResult(res) {
this._run(error => this._sendResponse(res, error)) this._run((error) => this._sendResponse(res, error))
}, },
sendLastResult(res) { sendLastResult(res) {
this._sendResponse(res, this._lastError) this._sendResponse(res, this._lastError)
}, },
triggerRun(cb) { triggerRun(cb) {
this._run(error => { this._run((error) => {
this._lastError = error this._lastError = error
cb(error) cb(error)
}) })

View File

@@ -115,7 +115,7 @@ describe('CompileController', function() {
compile: { compile: {
status: 'success', status: 'success',
error: null, error: null,
outputFiles: this.output_files.map(file => { outputFiles: this.output_files.map((file) => {
return { return {
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`, url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
path: file.path, path: file.path,

View File

@@ -374,7 +374,7 @@ describe('CompileManager', function() {
this.column = 3 this.column = 3
this.file_name = 'main.tex' this.file_name = 'main.tex'
this.child_process.execFile = sinon.stub() this.child_process.execFile = sinon.stub()
return (this.Settings.path.synctexBaseDir = project_id => return (this.Settings.path.synctexBaseDir = (project_id) =>
`${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`) `${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`)
}) })

View File

@@ -36,7 +36,7 @@ describe('LockManager', function() {
this.callback = sinon.stub() this.callback = sinon.stub()
return this.LockManager.runWithLock( return this.LockManager.runWithLock(
'lock-one', 'lock-one',
releaseLock => (releaseLock) =>
setTimeout(() => releaseLock(null, 'hello', 'world'), 100), setTimeout(() => releaseLock(null, 'hello', 'world'), 100),
(err, ...args) => { (err, ...args) => {
@@ -59,7 +59,7 @@ describe('LockManager', function() {
this.callback2 = sinon.stub() this.callback2 = sinon.stub()
this.LockManager.runWithLock( this.LockManager.runWithLock(
'lock-one', 'lock-one',
releaseLock => (releaseLock) =>
setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100), setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100),
(err, ...args) => { (err, ...args) => {
@@ -68,7 +68,7 @@ describe('LockManager', function() {
) )
return this.LockManager.runWithLock( return this.LockManager.runWithLock(
'lock-two', 'lock-two',
releaseLock => (releaseLock) =>
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200), setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200),
(err, ...args) => { (err, ...args) => {
@@ -100,7 +100,7 @@ describe('LockManager', function() {
this.callback2 = sinon.stub() this.callback2 = sinon.stub()
this.LockManager.runWithLock( this.LockManager.runWithLock(
'lock', 'lock',
releaseLock => (releaseLock) =>
setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100), setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100),
(err, ...args) => { (err, ...args) => {
@@ -109,7 +109,7 @@ describe('LockManager', function() {
) )
return this.LockManager.runWithLock( return this.LockManager.runWithLock(
'lock', 'lock',
releaseLock => (releaseLock) =>
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200), setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200),
(err, ...args) => { (err, ...args) => {
@@ -154,7 +154,7 @@ describe('LockManager', function() {
} }
this.LockManager.runWithLock( this.LockManager.runWithLock(
'lock', 'lock',
releaseLock => (releaseLock) =>
setTimeout( setTimeout(
() => releaseLock(null, 'hello', 'world', 'one'), () => releaseLock(null, 'hello', 'world', 'one'),
1100 1100
@@ -167,7 +167,7 @@ describe('LockManager', function() {
) )
return this.LockManager.runWithLock( return this.LockManager.runWithLock(
'lock', 'lock',
releaseLock => (releaseLock) =>
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100), setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100),
(err, ...args) => { (err, ...args) => {
@@ -211,7 +211,7 @@ describe('LockManager', function() {
} }
this.LockManager.runWithLock( this.LockManager.runWithLock(
'lock', 'lock',
releaseLock => (releaseLock) =>
setTimeout( setTimeout(
() => releaseLock(null, 'hello', 'world', 'one'), () => releaseLock(null, 'hello', 'world', 'one'),
1500 1500
@@ -224,7 +224,7 @@ describe('LockManager', function() {
) )
return this.LockManager.runWithLock( return this.LockManager.runWithLock(
'lock', 'lock',
releaseLock => (releaseLock) =>
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100), setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100),
(err, ...args) => { (err, ...args) => {

View File

@@ -738,7 +738,7 @@ describe('DockerRunner', function() {
this.DockerRunner.MAX_CONTAINER_AGE = oneHourInMilliseconds this.DockerRunner.MAX_CONTAINER_AGE = oneHourInMilliseconds
this.listContainers.callsArgWith(1, null, this.containers) this.listContainers.callsArgWith(1, null, this.containers)
this.DockerRunner.destroyContainer = sinon.stub().callsArg(3) this.DockerRunner.destroyContainer = sinon.stub().callsArg(3)
return this.DockerRunner.destroyOldContainers(error => { return this.DockerRunner.destroyOldContainers((error) => {
this.callback(error) this.callback(error)
return done() return done()
}) })
@@ -785,7 +785,7 @@ describe('DockerRunner', function() {
return this.DockerRunner._destroyContainer( return this.DockerRunner._destroyContainer(
this.containerId, this.containerId,
false, false,
err => { (err) => {
this.Docker.prototype.getContainer.callCount.should.equal(1) this.Docker.prototype.getContainer.callCount.should.equal(1)
this.Docker.prototype.getContainer this.Docker.prototype.getContainer
.calledWith(this.containerId) .calledWith(this.containerId)
@@ -799,10 +799,10 @@ describe('DockerRunner', function() {
return this.DockerRunner._destroyContainer( return this.DockerRunner._destroyContainer(
this.containerId, this.containerId,
true, true,
err => { (err) => {
this.fakeContainer.remove.callCount.should.equal(1) this.fakeContainer.remove.callCount.should.equal(1)
this.fakeContainer.remove this.fakeContainer.remove
.calledWith({ force: true }) .calledWith({ force: true, v: true })
.should.equal(true) .should.equal(true)
return done() return done()
} }
@@ -813,10 +813,10 @@ describe('DockerRunner', function() {
return this.DockerRunner._destroyContainer( return this.DockerRunner._destroyContainer(
this.containerId, this.containerId,
false, false,
err => { (err) => {
this.fakeContainer.remove.callCount.should.equal(1) this.fakeContainer.remove.callCount.should.equal(1)
this.fakeContainer.remove this.fakeContainer.remove
.calledWith({ force: false }) .calledWith({ force: false, v: true })
.should.equal(true) .should.equal(true)
return done() return done()
} }
@@ -827,7 +827,7 @@ describe('DockerRunner', function() {
return this.DockerRunner._destroyContainer( return this.DockerRunner._destroyContainer(
this.containerId, this.containerId,
false, false,
err => { (err) => {
expect(err).to.equal(null) expect(err).to.equal(null)
return done() return done()
} }
@@ -850,7 +850,7 @@ describe('DockerRunner', function() {
return this.DockerRunner._destroyContainer( return this.DockerRunner._destroyContainer(
this.containerId, this.containerId,
false, false,
err => { (err) => {
expect(err).to.equal(null) expect(err).to.equal(null)
return done() return done()
} }
@@ -874,7 +874,7 @@ describe('DockerRunner', function() {
return this.DockerRunner._destroyContainer( return this.DockerRunner._destroyContainer(
this.containerId, this.containerId,
false, false,
err => { (err) => {
expect(err).to.not.equal(null) expect(err).to.not.equal(null)
expect(err).to.equal(this.fakeError) expect(err).to.equal(this.fakeError)
return done() return done()
@@ -894,7 +894,7 @@ describe('DockerRunner', function() {
}) })
it('should get the container', function (done) { it('should get the container', function (done) {
return this.DockerRunner.kill(this.containerId, err => { return this.DockerRunner.kill(this.containerId, (err) => {
this.Docker.prototype.getContainer.callCount.should.equal(1) this.Docker.prototype.getContainer.callCount.should.equal(1)
this.Docker.prototype.getContainer this.Docker.prototype.getContainer
.calledWith(this.containerId) .calledWith(this.containerId)
@@ -904,14 +904,14 @@ describe('DockerRunner', function() {
}) })
it('should try to force-destroy the container', function (done) { it('should try to force-destroy the container', function (done) {
return this.DockerRunner.kill(this.containerId, err => { return this.DockerRunner.kill(this.containerId, (err) => {
this.fakeContainer.kill.callCount.should.equal(1) this.fakeContainer.kill.callCount.should.equal(1)
return done() return done()
}) })
}) })
it('should not produce an error', function (done) { it('should not produce an error', function (done) {
return this.DockerRunner.kill(this.containerId, err => { return this.DockerRunner.kill(this.containerId, (err) => {
expect(err).to.equal(undefined) expect(err).to.equal(undefined)
return done() return done()
}) })
@@ -932,7 +932,7 @@ describe('DockerRunner', function() {
}) })
return it('should not produce an error', function (done) { return it('should not produce an error', function (done) {
return this.DockerRunner.kill(this.containerId, err => { return this.DockerRunner.kill(this.containerId, (err) => {
expect(err).to.equal(undefined) expect(err).to.equal(undefined)
return done() return done()
}) })
@@ -953,7 +953,7 @@ describe('DockerRunner', function() {
}) })
return it('should produce an error', function (done) { return it('should produce an error', function (done) {
return this.DockerRunner.kill(this.containerId, err => { return this.DockerRunner.kill(this.containerId, (err) => {
expect(err).to.not.equal(undefined) expect(err).to.not.equal(undefined)
expect(err).to.equal(this.fakeError) expect(err).to.equal(this.fakeError)
return done() return done()

View File

@@ -144,7 +144,7 @@ describe('LatexRunner', function() {
return it('should include the flags in the command', function () { return it('should include the flags in the command', function () {
const command = this.CommandRunner.run.args[0][1] const command = this.CommandRunner.run.args[0][1]
const flags = command.filter( const flags = command.filter(
arg => arg === '-file-line-error' || arg === '-halt-on-error' (arg) => arg === '-file-line-error' || arg === '-halt-on-error'
) )
flags.length.should.equal(2) flags.length.should.equal(2)
flags[0].should.equal('-file-line-error') flags[0].should.equal('-file-line-error')

View File

@@ -126,10 +126,7 @@ describe('OutputFileOptimiser', function() {
.stub() .stub()
.withArgs(this.fd) .withArgs(this.fd)
.yields(null, 100, Buffer.from('hello /Linearized 1')) .yields(null, 100, Buffer.from('hello /Linearized 1'))
this.fs.close = sinon this.fs.close = sinon.stub().withArgs(this.fd).yields(null)
.stub()
.withArgs(this.fd)
.yields(null)
return this.OutputFileOptimiser.checkIfPDFIsOptimised( return this.OutputFileOptimiser.checkIfPDFIsOptimised(
this.src, this.src,
this.callback this.callback

View File

@@ -109,7 +109,7 @@ describe('ProjectPersistenceManager', function() {
}) })
it('should clear each expired project', function () { it('should clear each expired project', function () {
return Array.from(this.project_ids).map(project_id => return Array.from(this.project_ids).map((project_id) =>
this.ProjectPersistenceManager.clearProjectFromCache this.ProjectPersistenceManager.clearProjectFromCache
.calledWith(project_id) .calledWith(project_id)
.should.equal(true) .should.equal(true)

View File

@@ -78,7 +78,7 @@ describe('ResourceWriter', function() {
}) })
it('should write each resource to disk', function () { it('should write each resource to disk', function () {
return Array.from(this.resources).map(resource => return Array.from(this.resources).map((resource) =>
this.ResourceWriter._writeResourceToDisk this.ResourceWriter._writeResourceToDisk
.calledWith(this.project_id, resource, this.basePath) .calledWith(this.project_id, resource, this.basePath)
.should.equal(true) .should.equal(true)
@@ -139,7 +139,7 @@ describe('ResourceWriter', function() {
}) })
it('should write each resource to disk', function () { it('should write each resource to disk', function () {
return Array.from(this.resources).map(resource => return Array.from(this.resources).map((resource) =>
this.ResourceWriter._writeResourceToDisk this.ResourceWriter._writeResourceToDisk
.calledWith(this.project_id, resource, this.basePath) .calledWith(this.project_id, resource, this.basePath)
.should.equal(true) .should.equal(true)

View File

@@ -342,7 +342,7 @@ describe('UrlCache', function() {
}) })
it('should clear the cache for each url in the project', function () { it('should clear the cache for each url in the project', function () {
return Array.from(this.urls).map(url => return Array.from(this.urls).map((url) =>
this.UrlCache._clearUrlFromCache this.UrlCache._clearUrlFromCache
.calledWith(this.project_id, url) .calledWith(this.project_id, url)
.should.equal(true) .should.equal(true)

View File

@@ -40,7 +40,7 @@ describe('UrlFetcher', function() {
it('should call pipeUrlToFile', function (done) { it('should call pipeUrlToFile', function (done) {
this.UrlFetcher.pipeUrlToFile.callsArgWith(2) this.UrlFetcher.pipeUrlToFile.callsArgWith(2)
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, err => { this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, (err) => {
expect(err).to.equal(undefined) expect(err).to.equal(undefined)
this.UrlFetcher.pipeUrlToFile.called.should.equal(true) this.UrlFetcher.pipeUrlToFile.called.should.equal(true)
done() done()
@@ -50,7 +50,7 @@ describe('UrlFetcher', function() {
it('should call pipeUrlToFile multiple times on error', function (done) { it('should call pipeUrlToFile multiple times on error', function (done) {
const error = new Error("couldn't download file") const error = new Error("couldn't download file")
this.UrlFetcher.pipeUrlToFile.callsArgWith(2, error) this.UrlFetcher.pipeUrlToFile.callsArgWith(2, error)
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, err => { this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, (err) => {
expect(err).to.equal(error) expect(err).to.equal(error)
this.UrlFetcher.pipeUrlToFile.callCount.should.equal(3) this.UrlFetcher.pipeUrlToFile.callCount.should.equal(3)
done() done()
@@ -60,7 +60,7 @@ describe('UrlFetcher', function() {
it('should call pipeUrlToFile twice if only 1 error', function (done) { it('should call pipeUrlToFile twice if only 1 error', function (done) {
this.UrlFetcher.pipeUrlToFile.onCall(0).callsArgWith(2, 'error') this.UrlFetcher.pipeUrlToFile.onCall(0).callsArgWith(2, 'error')
this.UrlFetcher.pipeUrlToFile.onCall(1).callsArgWith(2) this.UrlFetcher.pipeUrlToFile.onCall(1).callsArgWith(2)
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, err => { this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, (err) => {
expect(err).to.equal(undefined) expect(err).to.equal(undefined)
this.UrlFetcher.pipeUrlToFile.callCount.should.equal(2) this.UrlFetcher.pipeUrlToFile.callCount.should.equal(2)
done() done()
@@ -167,7 +167,7 @@ describe('UrlFetcher', function() {
describe('with non success status code', function () { describe('with non success status code', function () {
beforeEach(function (done) { beforeEach(function (done) {
this.UrlFetcher.pipeUrlToFile(this.url, this.path, err => { this.UrlFetcher.pipeUrlToFile(this.url, this.path, (err) => {
this.callback(err) this.callback(err)
return done() return done()
}) })
@@ -188,7 +188,7 @@ describe('UrlFetcher', function() {
return describe('with error', function () { return describe('with error', function () {
beforeEach(function (done) { beforeEach(function (done) {
this.UrlFetcher.pipeUrlToFile(this.url, this.path, err => { this.UrlFetcher.pipeUrlToFile(this.url, this.path, (err) => {
this.callback(err) this.callback(err)
return done() return done()
}) })