1 Commits

Author SHA1 Message Date
Brian Gough
ab8fb8351c create home and tmp directories in compile dir 2020-05-13 10:18:27 +01:00
70 changed files with 2265 additions and 2991 deletions

View File

@@ -8,7 +8,7 @@
"prettier/standard"
],
"parserOptions": {
"ecmaVersion": 2018
"ecmaVersion": 2017
},
"plugins": [
"mocha",

View File

@@ -1,23 +0,0 @@
version: 2
updates:
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "daily"
pull-request-branch-name:
# Separate sections of the branch name with a hyphen
# Docker images use the branch name and do not support slashes in tags
# https://github.com/overleaf/google-ops/issues/822
# https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#pull-request-branch-nameseparator
separator: "-"
# Block informal upgrades -- security upgrades use a separate queue.
# https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#open-pull-requests-limit
open-pull-requests-limit: 0
# currently assign team-magma to all dependabot PRs - this may change in
# future if we reorganise teams
labels:
- "dependencies"
- "Team-Magma"

3
.gitignore vendored
View File

@@ -11,6 +11,3 @@ db.sqlite-wal
db.sqlite-shm
config/*
npm-debug.log
# managed by dev-environment$ bin/update_build_scripts
.npmrc

2
.nvmrc
View File

@@ -1 +1 @@
10.22.1
10.19.0

View File

@@ -2,7 +2,7 @@
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
FROM node:10.22.1 as base
FROM node:10.19.0 as base
WORKDIR /app
COPY install_deps.sh /app
@@ -15,10 +15,12 @@ FROM base as app
#wildcard as some files may not be in all repos
COPY package*.json npm-shrink*.json /app/
RUN npm ci --quiet
RUN npm install --quiet
COPY . /app
FROM base
COPY --from=app /app /app

131
Jenkinsfile vendored Normal file
View File

@@ -0,0 +1,131 @@
String cron_string = BRANCH_NAME == "master" ? "@daily" : ""
pipeline {
agent any
environment {
GIT_PROJECT = "clsi"
JENKINS_WORKFLOW = "clsi-sharelatex"
TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline"
GIT_API_URL = "https://api.github.com/repos/overleaf/${GIT_PROJECT}/statuses/$GIT_COMMIT"
}
triggers {
pollSCM('* * * * *')
cron(cron_string)
}
stages {
stage('Install') {
steps {
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
sh "curl $GIT_API_URL \
--data '{ \
\"state\" : \"pending\", \
\"target_url\": \"$TARGET_URL\", \
\"description\": \"Your build is underway\", \
\"context\": \"ci/jenkins\" }' \
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
}
}
}
stage('Build') {
steps {
sh 'make build'
}
}
stage('Linting') {
steps {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make format'
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make lint'
}
}
stage('Unit Tests') {
steps {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit'
}
}
stage('Acceptance Tests') {
steps {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance'
}
}
stage('Package and docker push') {
steps {
sh 'echo ${BUILD_NUMBER} > build_number.txt'
sh 'touch build.tar.gz' // Avoid tar warning about files changing during read
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make tar'
withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) {
sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}'
}
sh 'DOCKER_REPO=gcr.io/overleaf-ops make publish'
sh 'docker logout https://gcr.io/overleaf-ops'
}
}
stage('Publish to s3') {
steps {
sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt'
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz")
}
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
// The deployment process uses this file to figure out the latest build
s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest")
}
}
}
}
post {
always {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean'
sh 'make clean'
}
success {
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
sh "curl $GIT_API_URL \
--data '{ \
\"state\" : \"success\", \
\"target_url\": \"$TARGET_URL\", \
\"description\": \"Your build succeeded!\", \
\"context\": \"ci/jenkins\" }' \
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
}
}
failure {
mail(from: "${EMAIL_ALERT_FROM}",
to: "${EMAIL_ALERT_TO}",
subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}",
body: "Build: ${BUILD_URL}")
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
sh "curl $GIT_API_URL \
--data '{ \
\"state\" : \"failure\", \
\"target_url\": \"$TARGET_URL\", \
\"description\": \"Your build failed\", \
\"context\": \"ci/jenkins\" }' \
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
}
}
}
// The options directive is for configuration that applies to the whole job.
options {
// we'd like to make sure remove old builds, so we don't fill up our storage!
buildDiscarder(logRotator(numToKeepStr:'50'))
// And we'd really like to be sure that this build doesn't hang forever, so let's time it out after:
timeout(time: 30, unit: 'MINUTES')
}
}

View File

@@ -25,13 +25,13 @@ clean:
docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
format:
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent format
$(DOCKER_COMPOSE) run --rm test_unit npm run format
format_fix:
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent format:fix
$(DOCKER_COMPOSE) run --rm test_unit npm run format:fix
lint:
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent lint
$(DOCKER_COMPOSE) run --rm test_unit npm run lint
test: format lint test_unit test_acceptance

View File

@@ -1,8 +1,6 @@
overleaf/clsi
===============
**Note:** Original repo here: https://github.com/overleaf/clsi
A web api for compiling LaTeX documents in the cloud
The Common LaTeX Service Interface (CLSI) provides a RESTful interface to traditional LaTeX tools (or, more generally, any command line tool for composing marked-up documents into a display format such as PDF or HTML). The CLSI listens on the following ports by default:
@@ -36,21 +34,20 @@ The CLSI can be installed and set up as part of the entire [Overleaf stack](http
$ git clone git@github.com:overleaf/clsi.git
Then install the require npm modules and run:
Then install the require npm modules:
$ npm install
$ npm start
Then compile the coffee script source files:
$ grunt install
Finally, (after configuring your local database - see the Config section), run the CLSI service:
$ grunt run
The CLSI should then be running at http://localhost:3013.
**Note:** to install texlive-full on ubuntu:
$ sudo apt install texlive-full
Possible REST API clients to test are:
* Postman
* Insomnia
Config
------

135
app.js
View File

@@ -5,7 +5,7 @@
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const tenMinutes = 10 * 60 * 1000
let tenMinutes
const Metrics = require('metrics-sharelatex')
Metrics.initialize('clsi')
@@ -17,7 +17,7 @@ if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
logger.initializeErrorReporting(Settings.sentry.dsn)
}
const smokeTest = require('./test/smoke/js/SmokeTests')
const smokeTest = require('smoke-test-sharelatex')
const ContentTypeMapper = require('./app/js/ContentTypeMapper')
const Errors = require('./app/js/Errors')
@@ -49,29 +49,31 @@ app.use(function (req, res, next) {
return next()
})
app.param('project_id', function (req, res, next, projectId) {
if (projectId != null ? projectId.match(/^[a-zA-Z0-9_-]+$/) : undefined) {
app.param('project_id', function(req, res, next, project_id) {
if (project_id != null ? project_id.match(/^[a-zA-Z0-9_-]+$/) : undefined) {
return next()
} else {
return next(new Error('invalid project id'))
}
})
app.param('user_id', function (req, res, next, userId) {
if (userId != null ? userId.match(/^[0-9a-f]{24}$/) : undefined) {
app.param('user_id', function(req, res, next, user_id) {
if (user_id != null ? user_id.match(/^[0-9a-f]{24}$/) : undefined) {
return next()
} else {
return next(new Error('invalid user id'))
}
})
app.param('build_id', function (req, res, next, buildId) {
app.param('build_id', function(req, res, next, build_id) {
if (
buildId != null ? buildId.match(OutputCacheManager.BUILD_REGEX) : undefined
build_id != null
? build_id.match(OutputCacheManager.BUILD_REGEX)
: undefined
) {
return next()
} else {
return next(new Error(`invalid build id ${buildId}`))
return next(new Error(`invalid build id ${build_id}`))
}
})
@@ -134,16 +136,17 @@ const staticServer = ForbidSymlinks(express.static, Settings.path.compilesDir, {
}
})
app.get(
'/project/:project_id/user/:user_id/build/:build_id/output/*',
function (req, res, next) {
app.get('/project/:project_id/user/:user_id/build/:build_id/output/*', function(
req,
res,
next
) {
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url =
`/${req.params.project_id}-${req.params.user_id}/` +
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
return staticServer(req, res, next)
}
)
})
app.get('/project/:project_id/build/:build_id/output/*', function(
req,
@@ -189,49 +192,69 @@ app.get('/oops', function (req, res, next) {
app.get('/status', (req, res, next) => res.send('CLSI is alive\n'))
Settings.processTooOld = false
const resCacher = {
contentType(setContentType) {
this.setContentType = setContentType
},
send(code, body) {
this.code = code
this.body = body
},
// default the server to be down
code: 500,
body: {},
setContentType: 'application/json'
}
let shutdownTime
if (Settings.processLifespanLimitMs) {
Settings.processLifespanLimitMs +=
Settings.processLifespanLimitMs * (Math.random() / 10)
logger.info(
'Lifespan limited to ',
Date.now() + Settings.processLifespanLimitMs
)
setTimeout(() => {
logger.log('shutting down, process is too old')
Settings.processTooOld = true
}, Settings.processLifespanLimitMs)
shutdownTime = Date.now() + Settings.processLifespanLimitMs
logger.info('Lifespan limited to ', shutdownTime)
}
function runSmokeTest() {
if (Settings.processTooOld) return
const checkIfProcessIsTooOld = function(cont) {
if (shutdownTime && shutdownTime < Date.now()) {
logger.log('shutting down, process is too old')
resCacher.send = function() {}
resCacher.code = 500
resCacher.body = { processToOld: true }
} else {
cont()
}
}
if (Settings.smokeTest) {
const runSmokeTest = function() {
checkIfProcessIsTooOld(function() {
logger.log('running smoke tests')
smokeTest.triggerRun((err) => {
if (err) logger.error({ err }, 'smoke tests failed')
setTimeout(runSmokeTest, 30 * 1000)
smokeTest.run(
require.resolve(__dirname + '/test/smoke/js/SmokeTests.js')
)({}, resCacher)
return setTimeout(runSmokeTest, 30 * 1000)
})
}
if (Settings.smokeTest) {
runSmokeTest()
}
app.get('/health_check', function(req, res) {
if (Settings.processTooOld) {
return res.status(500).json({ processTooOld: true })
}
smokeTest.sendLastResult(res)
res.contentType(resCacher.setContentType)
return res.status(resCacher.code).send(resCacher.body)
})
app.get('/smoke_test_force', (req, res) => smokeTest.sendNewResult(res))
app.get('/smoke_test_force', (req, res) =>
smokeTest.run(require.resolve(__dirname + '/test/smoke/js/SmokeTests.js'))(
req,
res
)
)
app.use(function(error, req, res, next) {
if (error instanceof Errors.NotFoundError) {
logger.log({ err: error, url: req.url }, 'not found error')
logger.warn({ err: error, url: req.url }, 'not found error')
return res.sendStatus(404)
} else if (error.code === 'EPIPE') {
// inspect container returns EPIPE when shutting down
return res.sendStatus(503) // send 503 Unavailable response
} else {
logger.error({ err: error, url: req.url }, 'server error')
return res.sendStatus((error != null ? error.statusCode : undefined) || 500)
@@ -300,48 +323,46 @@ loadHttpServer.post('/state/maint', function (req, res, next) {
const port =
__guard__(
Settings.internal != null ? Settings.internal.clsi : undefined,
(x) => x.port
x => x.port
) || 3013
const host =
__guard__(
Settings.internal != null ? Settings.internal.clsi : undefined,
(x1) => x1.host
x1 => x1.host
) || 'localhost'
const loadTcpPort = Settings.internal.load_balancer_agent.load_port
const loadHttpPort = Settings.internal.load_balancer_agent.local_port
const load_tcp_port = Settings.internal.load_balancer_agent.load_port
const load_http_port = Settings.internal.load_balancer_agent.local_port
if (!module.parent) {
// Called directly
app.listen(port, host, (error) => {
if (error) {
logger.fatal({ error }, `Error starting CLSI on ${host}:${port}`)
} else {
app.listen(port, host, error =>
logger.info(`CLSI starting up, listening on ${host}:${port}`)
}
})
)
loadTcpServer.listen(loadTcpPort, host, function (error) {
loadTcpServer.listen(load_tcp_port, host, function(error) {
if (error != null) {
throw error
}
return logger.info(`Load tcp agent listening on load port ${loadTcpPort}`)
return logger.info(`Load tcp agent listening on load port ${load_tcp_port}`)
})
loadHttpServer.listen(loadHttpPort, host, function (error) {
loadHttpServer.listen(load_http_port, host, function(error) {
if (error != null) {
throw error
}
return logger.info(`Load http agent listening on load port ${loadHttpPort}`)
return logger.info(
`Load http agent listening on load port ${load_http_port}`
)
})
}
module.exports = app
setInterval(() => {
ProjectPersistenceManager.refreshExpiryTimeout()
ProjectPersistenceManager.clearExpiredProjects()
}, tenMinutes)
setInterval(
() => ProjectPersistenceManager.clearExpiredProjects(),
(tenMinutes = 10 * 60 * 1000)
)
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null

View File

@@ -55,10 +55,6 @@ module.exports = CompileController = {
} else if (error instanceof Errors.FilesOutOfSyncError) {
code = 409 // Http 409 Conflict
status = 'retry'
} else if (error && error.code === 'EPIPE') {
// docker returns EPIPE when shutting down
code = 503 // send 503 Unavailable response
status = 'unavailable'
} else if (error != null ? error.terminated : undefined) {
status = 'terminated'
} else if (error != null ? error.validate : undefined) {
@@ -116,7 +112,7 @@ module.exports = CompileController = {
compile: {
status,
error: (error != null ? error.message : undefined) || error,
outputFiles: outputFiles.map((file) => ({
outputFiles: outputFiles.map(file => ({
url:
`${Settings.apis.clsi.url}/project/${request.project_id}` +
(request.user_id != null
@@ -197,21 +193,17 @@ module.exports = CompileController = {
const v = parseFloat(req.query.v)
const { project_id } = req.params
const { user_id } = req.params
return CompileManager.syncFromPdf(
project_id,
user_id,
page,
h,
v,
function (error, codePositions) {
return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function(
error,
codePositions
) {
if (error != null) {
return next(error)
}
return res.json({
code: codePositions
})
}
)
})
},
wordcount(req, res, next) {
@@ -222,15 +214,6 @@ module.exports = CompileController = {
const { project_id } = req.params
const { user_id } = req.params
const { image } = req.query
if (
image &&
Settings.clsi &&
Settings.clsi.docker &&
Settings.clsi.docker.allowedImages &&
!Settings.clsi.docker.allowedImages.includes(image)
) {
return res.status(400).send('invalid image')
}
logger.log({ image, file, project_id }, 'word count request')
return CompileManager.wordcount(project_id, user_id, file, image, function(

View File

@@ -53,15 +53,18 @@ module.exports = CompileManager = {
}
const compileDir = getCompileDir(request.project_id, request.user_id)
const lockFile = Path.join(compileDir, '.project-lock')
// create local home and tmp directories in the compile dir
const homeDir = Path.join(compileDir, '.project-home')
const tmpDir = Path.join(compileDir, '.project-tmp')
// use a .project-lock file in the compile directory to prevent
// simultaneous compiles
return fse.ensureDir(compileDir, function (error) {
async.each([compileDir, homeDir, tmpDir], fse.ensureDir, function (error) {
if (error != null) {
return callback(error)
}
return LockManager.runWithLock(
lockFile,
(releaseLock) => CompileManager.doCompile(request, releaseLock),
releaseLock => CompileManager.doCompile(request, releaseLock),
callback
)
})
@@ -120,7 +123,7 @@ module.exports = CompileManager = {
}
}
const createTikzFileIfRequired = (callback) =>
const createTikzFileIfRequired = callback =>
TikzManager.checkMainFile(
compileDir,
request.rootResourcePath,
@@ -142,10 +145,6 @@ module.exports = CompileManager = {
)
// set up environment variables for chktex
const env = {}
if (Settings.texliveOpenoutAny && Settings.texliveOpenoutAny !== '') {
// override default texlive openout_any environment variable
env.openout_any = Settings.texliveOpenoutAny
}
// only run chktex on LaTeX files (not knitr .Rtex files or any others)
const isLaTeXFile =
request.rootResourcePath != null
@@ -177,9 +176,9 @@ module.exports = CompileManager = {
request.imageName != null
? request.imageName.match(/:(.*)/)
: undefined,
(x1) => x1[1]
x1 => x1[1]
),
(x) => x.replace(/\./g, '-')
x => x.replace(/\./g, '-')
) || 'default'
if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
tag = 'other'
@@ -199,14 +198,15 @@ module.exports = CompileManager = {
timeout: request.timeout,
image: request.imageName,
flags: request.flags,
environment: env,
compileGroup: request.compileGroup
environment: env
},
function(error, output, stats, timings) {
// request was for validation only
let metric_key, metric_value
if (request.check === 'validate') {
const result = (error != null ? error.code : undefined)
const result = (error != null
? error.code
: undefined)
? 'fail'
: 'pass'
error = new Error('validation')
@@ -337,7 +337,7 @@ module.exports = CompileManager = {
proc.on('error', callback)
let stderr = ''
proc.stderr.setEncoding('utf8').on('data', (chunk) => (stderr += chunk))
proc.stderr.on('data', chunk => (stderr += chunk.toString()))
return proc.on('close', function(code) {
if (code === 0) {
@@ -358,7 +358,7 @@ module.exports = CompileManager = {
if (err != null) {
return callback(err)
}
const allDirs = Array.from(files).map((file) => Path.join(root, file))
const allDirs = Array.from(files).map(file => Path.join(root, file))
return callback(null, allDirs)
})
},
@@ -429,7 +429,15 @@ module.exports = CompileManager = {
const compileDir = getCompileDir(project_id, user_id)
const synctex_path = `${base_dir}/output.pdf`
const command = ['code', synctex_path, file_path, line, column]
CompileManager._runSynctex(project_id, user_id, command, function (
return fse.ensureDir(compileDir, function(error) {
if (error != null) {
logger.err(
{ error, project_id, user_id, file_name },
'error ensuring dir for sync from code'
)
return callback(error)
}
return CompileManager._runSynctex(project_id, user_id, command, function(
error,
stdout
) {
@@ -440,7 +448,11 @@ module.exports = CompileManager = {
{ project_id, user_id, file_name, line, column, command, stdout },
'synctex code output'
)
return callback(null, CompileManager._parseSynctexFromCodeOutput(stdout))
return callback(
null,
CompileManager._parseSynctexFromCodeOutput(stdout)
)
})
})
},
@@ -453,7 +465,15 @@ module.exports = CompileManager = {
const base_dir = Settings.path.synctexBaseDir(compileName)
const synctex_path = `${base_dir}/output.pdf`
const command = ['pdf', synctex_path, page, h, v]
CompileManager._runSynctex(project_id, user_id, command, function (
return fse.ensureDir(compileDir, function(error) {
if (error != null) {
logger.err(
{ error, project_id, user_id, file_name },
'error ensuring dir for sync to code'
)
return callback(error)
}
return CompileManager._runSynctex(project_id, user_id, command, function(
error,
stdout
) {
@@ -469,23 +489,29 @@ module.exports = CompileManager = {
CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)
)
})
})
},
_checkFileExists(dir, filename, callback) {
_checkFileExists(path, callback) {
if (callback == null) {
callback = function(error) {}
}
const file = Path.join(dir, filename)
return fs.stat(dir, function (error, stats) {
const synctexDir = Path.dirname(path)
const synctexFile = Path.join(synctexDir, 'output.synctex.gz')
return fs.stat(synctexDir, function(error, stats) {
if ((error != null ? error.code : undefined) === 'ENOENT') {
return callback(new Errors.NotFoundError('no output directory'))
return callback(
new Errors.NotFoundError('called synctex with no output directory')
)
}
if (error != null) {
return callback(error)
}
return fs.stat(file, function (error, stats) {
return fs.stat(synctexFile, function(error, stats) {
if ((error != null ? error.code : undefined) === 'ENOENT') {
return callback(new Errors.NotFoundError('no output file'))
return callback(
new Errors.NotFoundError('called synctex with no output file')
)
}
if (error != null) {
return callback(error)
@@ -509,21 +535,13 @@ module.exports = CompileManager = {
const directory = getCompileDir(project_id, user_id)
const timeout = 60 * 1000 // increased to allow for large projects
const compileName = getCompileName(project_id, user_id)
const compileGroup = 'synctex'
CompileManager._checkFileExists(directory, 'output.synctex.gz', (error) => {
if (error) {
return callback(error)
}
return CommandRunner.run(
compileName,
command,
directory,
Settings.clsi && Settings.clsi.docker
? Settings.clsi.docker.image
: undefined,
Settings.clsi != null ? Settings.clsi.docker.image : undefined,
timeout,
{},
compileGroup,
function(error, output) {
if (error != null) {
logger.err(
@@ -535,7 +553,6 @@ module.exports = CompileManager = {
return callback(null, output.stdout)
}
)
})
},
_parseSynctexFromCodeOutput(output) {
@@ -588,7 +605,6 @@ module.exports = CompileManager = {
const compileDir = getCompileDir(project_id, user_id)
const timeout = 60 * 1000
const compileName = getCompileName(project_id, user_id)
const compileGroup = 'wordcount'
return fse.ensureDir(compileDir, function(error) {
if (error != null) {
logger.err(
@@ -604,7 +620,6 @@ module.exports = CompileManager = {
image,
timeout,
{},
compileGroup,
function(error) {
if (error != null) {
return callback(error)

View File

@@ -1,3 +1,21 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-return-assign,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS205: Consider reworking code to avoid use of IIFEs
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let DockerRunner, oneHour
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const Docker = require('dockerode')
@@ -7,33 +25,30 @@ const async = require('async')
const LockManager = require('./DockerLockManager')
const fs = require('fs')
const Path = require('path')
const _ = require('lodash')
const _ = require('underscore')
const ONE_HOUR_IN_MS = 60 * 60 * 1000
logger.info('using docker runner')
function usingSiblingContainers() {
return (
Settings != null &&
Settings.path != null &&
Settings.path.sandboxedCompilesHostDir != null
)
}
const usingSiblingContainers = () =>
__guard__(
Settings != null ? Settings.path : undefined,
x => x.sandboxedCompilesHostDir
) != null
let containerMonitorTimeout
let containerMonitorInterval
const DockerRunner = {
run(
projectId,
command,
directory,
image,
timeout,
environment,
compileGroup,
callback
) {
module.exports = DockerRunner = {
ERR_NOT_DIRECTORY: new Error('not a directory'),
ERR_TERMINATED: new Error('terminated'),
ERR_EXITED: new Error('exited'),
ERR_TIMED_OUT: new Error('container timed out'),
run(project_id, command, directory, image, timeout, environment, callback) {
let name
if (callback == null) {
callback = function(error, output) {}
}
if (usingSiblingContainers()) {
const _newPath = Settings.path.sandboxedCompilesHostDir
logger.log(
@@ -50,20 +65,16 @@ const DockerRunner = {
)
}
const volumes = { [directory]: '/compile' }
const volumes = {}
volumes[directory] = '/compile'
command = command.map((arg) =>
arg.toString().replace('$COMPILE_DIR', '/compile')
command = Array.from(command).map(arg =>
__guardMethod__(arg.toString(), 'replace', o =>
o.replace('$COMPILE_DIR', '/compile')
)
)
if (image == null) {
image = Settings.clsi.docker.image
}
if (
Settings.clsi.docker.allowedImages &&
!Settings.clsi.docker.allowedImages.includes(image)
) {
return callback(new Error('image not allowed'))
;({ image } = Settings.clsi.docker)
}
if (Settings.texliveImageNameOveride != null) {
@@ -76,31 +87,28 @@ const DockerRunner = {
image,
volumes,
timeout,
environment,
compileGroup
environment
)
const fingerprint = DockerRunner._fingerprintContainer(options)
const name = `project-${projectId}-${fingerprint}`
options.name = name
options.name = name = `project-${project_id}-${fingerprint}`
// logOptions = _.clone(options)
// logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging"
logger.log({ projectId }, 'running docker container')
DockerRunner._runAndWaitForContainer(
options,
volumes,
timeout,
(error, output) => {
logger.log({ project_id }, 'running docker container')
DockerRunner._runAndWaitForContainer(options, volumes, timeout, function(
error,
output
) {
if (error && error.statusCode === 500) {
logger.log(
{ err: error, projectId },
{ err: error, project_id },
'error running container so destroying and retrying'
)
DockerRunner.destroyContainer(name, null, true, (error) => {
return DockerRunner.destroyContainer(name, null, true, function(error) {
if (error != null) {
return callback(error)
}
DockerRunner._runAndWaitForContainer(
return DockerRunner._runAndWaitForContainer(
options,
volumes,
timeout,
@@ -108,111 +116,121 @@ const DockerRunner = {
)
})
} else {
callback(error, output)
return callback(error, output)
}
}
)
})
// pass back the container name to allow it to be killed
return name
},
}, // pass back the container name to allow it to be killed
kill(containerId, callback) {
logger.log({ containerId }, 'sending kill signal to container')
const container = dockerode.getContainer(containerId)
container.kill((error) => {
kill(container_id, callback) {
if (callback == null) {
callback = function(error) {}
}
logger.log({ container_id }, 'sending kill signal to container')
const container = dockerode.getContainer(container_id)
return container.kill(function(error) {
if (
error != null &&
error.message != null &&
error.message.match(/Cannot kill container .* is not running/)
__guardMethod__(error != null ? error.message : undefined, 'match', o =>
o.match(/Cannot kill container .* is not running/)
)
) {
logger.warn(
{ err: error, containerId },
{ err: error, container_id },
'container not running, continuing'
)
error = null
}
if (error != null) {
logger.error({ err: error, containerId }, 'error killing container')
callback(error)
logger.error({ err: error, container_id }, 'error killing container')
return callback(error)
} else {
callback()
return callback()
}
})
},
_runAndWaitForContainer(options, volumes, timeout, _callback) {
const callback = _.once(_callback)
if (_callback == null) {
_callback = function(error, output) {}
}
const callback = function(...args) {
_callback(...Array.from(args || []))
// Only call the callback once
return (_callback = function() {})
}
const { name } = options
let streamEnded = false
let containerReturned = false
let output = {}
function callbackIfFinished() {
const callbackIfFinished = function() {
if (streamEnded && containerReturned) {
callback(null, output)
return callback(null, output)
}
}
function attachStreamHandler(error, _output) {
const attachStreamHandler = function(error, _output) {
if (error != null) {
return callback(error)
}
output = _output
streamEnded = true
callbackIfFinished()
return callbackIfFinished()
}
DockerRunner.startContainer(
return DockerRunner.startContainer(
options,
volumes,
attachStreamHandler,
(error, containerId) => {
function(error, containerId) {
if (error != null) {
return callback(error)
}
DockerRunner.waitForContainer(name, timeout, (error, exitCode) => {
return DockerRunner.waitForContainer(name, timeout, function(
error,
exitCode
) {
let err
if (error != null) {
return callback(error)
}
if (exitCode === 137) {
// exit status from kill -9
const err = new Error('terminated')
err = DockerRunner.ERR_TERMINATED
err.terminated = true
return callback(err)
}
if (exitCode === 1) {
// exit status from chktex
const err = new Error('exited')
err = DockerRunner.ERR_EXITED
err.code = exitCode
return callback(err)
}
containerReturned = true
if (options != null && options.HostConfig != null) {
options.HostConfig.SecurityOpt = null
}
logger.log({ exitCode, options }, 'docker container has exited')
callbackIfFinished()
__guard__(
options != null ? options.HostConfig : undefined,
x => (x.SecurityOpt = null)
) // small log line
logger.log({ err, exitCode, options }, 'docker container has exited')
return callbackIfFinished()
})
}
)
},
_getContainerOptions(
command,
image,
volumes,
timeout,
environment,
compileGroup
) {
_getContainerOptions(command, image, volumes, timeout, environment) {
let m, year
let key, value, hostVol, dockerVol
const timeoutInSeconds = timeout / 1000
const dockerVolumes = {}
for (const hostVol in volumes) {
const dockerVol = volumes[hostVol]
for (hostVol in volumes) {
dockerVol = volumes[hostVol]
dockerVolumes[dockerVol] = {}
if (volumes[hostVol].slice(-3).indexOf(':r') === -1) {
@@ -223,14 +241,17 @@ const DockerRunner = {
// merge settings and environment parameter
const env = {}
for (const src of [Settings.clsi.docker.env, environment || {}]) {
for (const key in src) {
const value = src[key]
for (key in src) {
value = src[key]
env[key] = value
}
}
// set the path based on the image year
const match = image.match(/:([0-9]+)\.[0-9]+/)
const year = match ? match[1] : '2014'
if ((m = image.match(/:([0-9]+)\.[0-9]+/))) {
year = m[1]
} else {
year = '2014'
}
env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`
const options = {
Cmd: command,
@@ -240,11 +261,23 @@ const DockerRunner = {
NetworkDisabled: true,
Memory: 1024 * 1024 * 1024 * 1024, // 1 Gb
User: Settings.clsi.docker.user,
Env: Object.entries(env).map(([key, value]) => `${key}=${value}`),
Env: (() => {
const result = []
for (key in env) {
value = env[key]
result.push(`${key}=${value}`)
}
return result
})(), // convert the environment hash to an array
HostConfig: {
Binds: Object.entries(volumes).map(
([hostVol, dockerVol]) => `${hostVol}:${dockerVol}`
),
Binds: (() => {
const result1 = []
for (hostVol in volumes) {
dockerVol = volumes[hostVol]
result1.push(`${hostVol}:${dockerVol}`)
}
return result1
})(),
LogConfig: { Type: 'none', Config: {} },
Ulimits: [
{
@@ -258,7 +291,10 @@ const DockerRunner = {
}
}
if (Settings.path != null && Settings.path.synctexBinHostPath != null) {
if (
(Settings.path != null ? Settings.path.synctexBinHostPath : undefined) !=
null
) {
options.HostConfig.Binds.push(
`${Settings.path.synctexBinHostPath}:/opt/synctex:ro`
)
@@ -274,45 +310,31 @@ const DockerRunner = {
options.HostConfig.Runtime = Settings.clsi.docker.runtime
}
if (Settings.clsi.docker.Readonly) {
options.HostConfig.ReadonlyRootfs = true
options.HostConfig.Tmpfs = { '/tmp': 'rw,noexec,nosuid,size=65536k' }
options.Volumes['/home/tex'] = {}
}
// Allow per-compile group overriding of individual settings
if (
Settings.clsi.docker.compileGroupConfig &&
Settings.clsi.docker.compileGroupConfig[compileGroup]
) {
const override = Settings.clsi.docker.compileGroupConfig[compileGroup]
for (const key in override) {
_.set(options, key, override[key])
}
}
return options
},
_fingerprintContainer(containerOptions) {
// Yay, Hashing!
const json = JSON.stringify(containerOptions)
return crypto.createHash('md5').update(json).digest('hex')
return crypto
.createHash('md5')
.update(json)
.digest('hex')
},
startContainer(options, volumes, attachStreamHandler, callback) {
LockManager.runWithLock(
return LockManager.runWithLock(
options.name,
(releaseLock) =>
releaseLock =>
// Check that volumes exist before starting the container.
// When a container is started with volume pointing to a
// non-existent directory then docker creates the directory but
// with root ownership.
DockerRunner._checkVolumes(options, volumes, (err) => {
DockerRunner._checkVolumes(options, volumes, function(err) {
if (err != null) {
return releaseLock(err)
}
DockerRunner._startContainer(
return DockerRunner._startContainer(
options,
volumes,
attachStreamHandler,
@@ -326,85 +348,93 @@ const DockerRunner = {
// Check that volumes exist and are directories
_checkVolumes(options, volumes, callback) {
if (callback == null) {
callback = function(error, containerName) {}
}
if (usingSiblingContainers()) {
// Server Pro, with sibling-containers active, skip checks
return callback(null)
}
const checkVolume = (path, cb) =>
fs.stat(path, (err, stats) => {
fs.stat(path, function(err, stats) {
if (err != null) {
return cb(err)
}
if (!stats.isDirectory()) {
return cb(new Error('not a directory'))
if (!(stats != null ? stats.isDirectory() : undefined)) {
return cb(DockerRunner.ERR_NOT_DIRECTORY)
}
cb()
return cb()
})
const jobs = []
for (const vol in volumes) {
jobs.push((cb) => checkVolume(vol, cb))
;(vol => jobs.push(cb => checkVolume(vol, cb)))(vol)
}
async.series(jobs, callback)
return async.series(jobs, callback)
},
_startContainer(options, volumes, attachStreamHandler, callback) {
if (callback == null) {
callback = function(error, output) {}
}
callback = _.once(callback)
const { name } = options
logger.log({ container_name: name }, 'starting container')
const container = dockerode.getContainer(name)
function createAndStartContainer() {
dockerode.createContainer(options, (error, container) => {
const createAndStartContainer = () =>
dockerode.createContainer(options, function(error, container) {
if (error != null) {
return callback(error)
}
startExistingContainer()
return startExistingContainer()
})
}
function startExistingContainer() {
var startExistingContainer = () =>
DockerRunner.attachToContainer(
options.name,
attachStreamHandler,
(error) => {
function(error) {
if (error != null) {
return callback(error)
}
container.start((error) => {
if (error != null && error.statusCode !== 304) {
callback(error)
} else {
return container.start(function(error) {
if (
error != null &&
(error != null ? error.statusCode : undefined) !== 304
) {
// already running
callback()
return callback(error)
} else {
return callback()
}
})
}
)
}
container.inspect((error, stats) => {
if (error != null && error.statusCode === 404) {
createAndStartContainer()
return container.inspect(function(error, stats) {
if ((error != null ? error.statusCode : undefined) === 404) {
return createAndStartContainer()
} else if (error != null) {
logger.err(
{ container_name: name, error },
'unable to inspect container to start'
)
callback(error)
return callback(error)
} else {
startExistingContainer()
return startExistingContainer()
}
})
},
attachToContainer(containerId, attachStreamHandler, attachStartCallback) {
const container = dockerode.getContainer(containerId)
container.attach({ stdout: 1, stderr: 1, stream: 1 }, (error, stream) => {
return container.attach({ stdout: 1, stderr: 1, stream: 1 }, function(
error,
stream
) {
if (error != null) {
logger.error(
{ err: error, containerId },
{ err: error, container_id: containerId },
'error attaching to container'
)
return attachStartCallback(error)
@@ -412,10 +442,10 @@ const DockerRunner = {
attachStartCallback()
}
logger.log({ containerId }, 'attached to container')
logger.log({ container_id: containerId }, 'attached to container')
const MAX_OUTPUT = 1024 * 1024 // limit output to 1MB
function createStringOutputStream(name) {
const createStringOutputStream = function(name) {
return {
data: '',
overflowed: false,
@@ -424,18 +454,18 @@ const DockerRunner = {
return
}
if (this.data.length < MAX_OUTPUT) {
this.data += data
return (this.data += data)
} else {
logger.error(
{
containerId,
container_id: containerId,
length: this.data.length,
maxLen: MAX_OUTPUT
},
`${name} exceeds max size`
)
this.data += `(...truncated at ${MAX_OUTPUT} chars...)`
this.overflowed = true
return (this.overflowed = true)
}
}
// kill container if too much output
@@ -448,52 +478,63 @@ const DockerRunner = {
container.modem.demuxStream(stream, stdout, stderr)
stream.on('error', (err) =>
stream.on('error', err =>
logger.error(
{ err, containerId },
{ err, container_id: containerId },
'error reading from container stream'
)
)
stream.on('end', () =>
return stream.on('end', () =>
attachStreamHandler(null, { stdout: stdout.data, stderr: stderr.data })
)
})
},
waitForContainer(containerId, timeout, _callback) {
const callback = _.once(_callback)
if (_callback == null) {
_callback = function(error, exitCode) {}
}
const callback = function(...args) {
_callback(...Array.from(args || []))
// Only call the callback once
return (_callback = function() {})
}
const container = dockerode.getContainer(containerId)
let timedOut = false
const timeoutId = setTimeout(() => {
const timeoutId = setTimeout(function() {
timedOut = true
logger.log({ containerId }, 'timeout reached, killing container')
container.kill((err) => {
logger.warn({ err, containerId }, 'failed to kill container')
})
logger.log(
{ container_id: containerId },
'timeout reached, killing container'
)
return container.kill(function() {})
}, timeout)
logger.log({ containerId }, 'waiting for docker container')
container.wait((error, res) => {
logger.log({ container_id: containerId }, 'waiting for docker container')
return container.wait(function(error, res) {
if (error != null) {
clearTimeout(timeoutId)
logger.error({ err: error, containerId }, 'error waiting for container')
logger.error(
{ err: error, container_id: containerId },
'error waiting for container'
)
return callback(error)
}
if (timedOut) {
logger.log({ containerId }, 'docker container timed out')
error = new Error('container timed out')
error = DockerRunner.ERR_TIMED_OUT
error.timedout = true
callback(error)
return callback(error)
} else {
clearTimeout(timeoutId)
logger.log(
{ containerId, exitCode: res.StatusCode },
{ container_id: containerId, exitCode: res.StatusCode },
'docker container returned'
)
callback(null, res.StatusCode)
return callback(null, res.StatusCode)
}
})
},
@@ -505,9 +546,12 @@ const DockerRunner = {
// async exception, but if you delete by id it just does a normal
// error callback. We fall back to deleting by name if no id is
// supplied.
LockManager.runWithLock(
if (callback == null) {
callback = function(error) {}
}
return LockManager.runWithLock(
containerName,
(releaseLock) =>
releaseLock =>
DockerRunner._destroyContainer(
containerId || containerName,
shouldForce,
@@ -518,31 +562,46 @@ const DockerRunner = {
},
_destroyContainer(containerId, shouldForce, callback) {
logger.log({ containerId }, 'destroying docker container')
if (callback == null) {
callback = function(error) {}
}
logger.log({ container_id: containerId }, 'destroying docker container')
const container = dockerode.getContainer(containerId)
container.remove({ force: shouldForce === true, v: true }, (error) => {
if (error != null && error.statusCode === 404) {
return container.remove({ force: shouldForce === true }, function(error) {
if (
error != null &&
(error != null ? error.statusCode : undefined) === 404
) {
logger.warn(
{ err: error, containerId },
{ err: error, container_id: containerId },
'container not found, continuing'
)
error = null
}
if (error != null) {
logger.error({ err: error, containerId }, 'error destroying container')
logger.error(
{ err: error, container_id: containerId },
'error destroying container'
)
} else {
logger.log({ containerId }, 'destroyed container')
logger.log({ container_id: containerId }, 'destroyed container')
}
callback(error)
return callback(error)
})
},
// handle expiry of docker containers
MAX_CONTAINER_AGE: Settings.clsi.docker.maxContainerAge || ONE_HOUR_IN_MS,
MAX_CONTAINER_AGE:
Settings.clsi.docker.maxContainerAge || (oneHour = 60 * 60 * 1000),
examineOldContainer(container, callback) {
const name = container.Name || (container.Names && container.Names[0])
if (callback == null) {
callback = function(error, name, id, ttl) {}
}
const name =
container.Name ||
(container.Names != null ? container.Names[0] : undefined)
const created = container.Created * 1000 // creation time is returned in seconds
const now = Date.now()
const age = now - created
@@ -552,29 +611,36 @@ const DockerRunner = {
{ containerName: name, created, now, age, maxAge, ttl },
'checking whether to destroy container'
)
return { name, id: container.Id, ttl }
return callback(null, name, container.Id, ttl)
},
destroyOldContainers(callback) {
dockerode.listContainers({ all: true }, (error, containers) => {
if (callback == null) {
callback = function(error) {}
}
return dockerode.listContainers({ all: true }, function(error, containers) {
if (error != null) {
return callback(error)
}
const jobs = []
for (const container of containers) {
const { name, id, ttl } = DockerRunner.examineOldContainer(container)
for (const container of Array.from(containers || [])) {
;(container =>
DockerRunner.examineOldContainer(container, function(
err,
name,
id,
ttl
) {
if (name.slice(0, 9) === '/project-' && ttl <= 0) {
// strip the / prefix
// the LockManager uses the plain container name
const plainName = name.slice(1)
jobs.push((cb) =>
DockerRunner.destroyContainer(plainName, id, false, () => cb())
return jobs.push(cb =>
DockerRunner.destroyContainer(name, id, false, () => cb())
)
}
}))(container)
}
// Ignore errors because some containers get stuck but
// will be destroyed next time
async.series(jobs, callback)
return async.series(jobs, callback)
})
},
@@ -591,13 +657,8 @@ const DockerRunner = {
const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000)
containerMonitorTimeout = setTimeout(() => {
containerMonitorInterval = setInterval(
() =>
DockerRunner.destroyOldContainers((err) => {
if (err) {
logger.error({ err }, 'failed to destroy old containers')
}
}),
ONE_HOUR_IN_MS
() => DockerRunner.destroyOldContainers(),
(oneHour = 60 * 60 * 1000)
)
}, randomDelay)
},
@@ -608,12 +669,27 @@ const DockerRunner = {
containerMonitorTimeout = undefined
}
if (containerMonitorInterval) {
clearInterval(containerMonitorInterval)
containerMonitorInterval = undefined
clearInterval(containerMonitorTimeout)
containerMonitorTimeout = undefined
}
}
}
DockerRunner.startContainerMonitor()
module.exports = DockerRunner
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}
function __guardMethod__(obj, methodName, transform) {
if (
typeof obj !== 'undefined' &&
obj !== null &&
typeof obj[methodName] === 'function'
) {
return transform(obj, methodName)
} else {
return undefined
}
}

View File

@@ -19,7 +19,6 @@ const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const Metrics = require('./Metrics')
const CommandRunner = require('./CommandRunner')
const fs = require('fs')
const ProcessTable = {} // table of currently running jobs (pids or docker container names)
@@ -36,8 +35,7 @@ module.exports = LatexRunner = {
timeout,
image,
environment,
flags,
compileGroup
flags
} = options
if (!compiler) {
compiler = 'pdflatex'
@@ -47,15 +45,7 @@ module.exports = LatexRunner = {
} // milliseconds
logger.log(
{
directory,
compiler,
timeout,
mainFile,
environment,
flags,
compileGroup
},
{ directory, compiler, timeout, mainFile, environment, flags },
'starting compile'
)
@@ -88,7 +78,6 @@ module.exports = LatexRunner = {
image,
timeout,
environment,
compileGroup,
function(error, output) {
delete ProcessTable[id]
if (error != null) {
@@ -96,13 +85,13 @@ module.exports = LatexRunner = {
}
const runs =
__guard__(
__guard__(output != null ? output.stderr : undefined, (x1) =>
__guard__(output != null ? output.stderr : undefined, x1 =>
x1.match(/^Run number \d+ of .*latex/gm)
),
(x) => x.length
x => x.length
) || 0
const failed =
__guard__(output != null ? output.stdout : undefined, (x2) =>
__guard__(output != null ? output.stdout : undefined, x2 =>
x2.match(/^Latexmk: Errors/m)
) != null
? 1
@@ -122,55 +111,27 @@ module.exports = LatexRunner = {
stderr != null
? stderr.match(/Percent of CPU this job got: (\d+)/m)
: undefined,
(x3) => x3[1]
x3 => x3[1]
) || 0
timings['cpu-time'] =
__guard__(
stderr != null
? stderr.match(/User time.*: (\d+.\d+)/m)
: undefined,
(x4) => x4[1]
x4 => x4[1]
) || 0
timings['sys-time'] =
__guard__(
stderr != null
? stderr.match(/System time.*: (\d+.\d+)/m)
: undefined,
(x5) => x5[1]
x5 => x5[1]
) || 0
// record output files
LatexRunner.writeLogOutput(project_id, directory, output, () => {
return callback(error, output, stats, timings)
})
}
))
},
writeLogOutput(project_id, directory, output, callback) {
if (!output) {
return callback()
}
// internal method for writing non-empty log files
function _writeFile(file, content, cb) {
if (content && content.length > 0) {
fs.writeFile(file, content, (err) => {
if (err) {
logger.error({ project_id, file }, 'error writing log file') // don't fail on error
}
cb()
})
} else {
cb()
}
}
// write stdout and stderr, ignoring errors
_writeFile(Path.join(directory, 'output.stdout'), output.stdout, () => {
_writeFile(Path.join(directory, 'output.stderr'), output.stderr, () => {
callback()
})
})
},
killLatex(project_id, callback) {
if (callback == null) {
callback = function(error) {}
@@ -202,7 +163,7 @@ module.exports = LatexRunner = {
return (
__guard__(
Settings != null ? Settings.clsi : undefined,
(x) => x.latexmkCommandPrefix
x => x.latexmkCommandPrefix
) || []
).concat(args)
},

View File

@@ -15,29 +15,17 @@
*/
let CommandRunner
const { spawn } = require('child_process')
const _ = require('underscore')
const logger = require('logger-sharelatex')
logger.info('using standard command runner')
module.exports = CommandRunner = {
run(
project_id,
command,
directory,
image,
timeout,
environment,
compileGroup,
callback
) {
run(project_id, command, directory, image, timeout, environment, callback) {
let key, value
if (callback == null) {
callback = function(error) {}
} else {
callback = _.once(callback)
}
command = Array.from(command).map((arg) =>
command = Array.from(command).map(arg =>
arg.toString().replace('$COMPILE_DIR', directory)
)
logger.log({ project_id, command, directory }, 'running command')
@@ -58,7 +46,7 @@ module.exports = CommandRunner = {
const proc = spawn(command[0], command.slice(1), { cwd: directory, env })
let stdout = ''
proc.stdout.setEncoding('utf8').on('data', (data) => (stdout += data))
proc.stdout.on('data', data => (stdout += data))
proc.on('error', function(err) {
logger.err(

View File

@@ -19,7 +19,7 @@ const fs = require('fs')
const fse = require('fs-extra')
const Path = require('path')
const logger = require('logger-sharelatex')
const _ = require('lodash')
const _ = require('underscore')
const Settings = require('settings-sharelatex')
const crypto = require('crypto')
@@ -99,16 +99,13 @@ module.exports = OutputCacheManager = {
(Settings.clsi != null ? Settings.clsi.archive_logs : undefined) ||
(Settings.clsi != null ? Settings.clsi.strace : undefined)
) {
OutputCacheManager.archiveLogs(
outputFiles,
compileDir,
buildId,
function (err) {
OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function(
err
) {
if (err != null) {
return logger.warn({ err }, 'erroring archiving log files')
}
}
)
})
}
// make the new cache directory
@@ -283,7 +280,7 @@ module.exports = OutputCacheManager = {
// we can get the build time from the first part of the directory name DDDD-RRRR
// DDDD is date and RRRR is random bytes
const dirTime = parseInt(
__guard__(dir.split('-'), (x) => x[0]),
__guard__(dir.split('-'), x => x[0]),
16
)
const age = currentTime - dirTime

View File

@@ -44,7 +44,7 @@ module.exports = OutputFileFinder = {
if (!incomingResources[file]) {
outputFiles.push({
path: file,
type: __guard__(file.match(/\.([^\.]+)$/), (x) => x[1])
type: __guard__(file.match(/\.([^\.]+)$/), x => x[1])
})
}
}
@@ -87,7 +87,7 @@ module.exports = OutputFileFinder = {
const proc = spawn('find', args)
let stdout = ''
proc.stdout.setEncoding('utf8').on('data', (chunk) => (stdout += chunk))
proc.stdout.on('data', chunk => (stdout += chunk.toString()))
proc.on('error', callback)
return proc.on('close', function(code) {
if (code !== 0) {

View File

@@ -19,7 +19,7 @@ const Path = require('path')
const { spawn } = require('child_process')
const logger = require('logger-sharelatex')
const Metrics = require('./Metrics')
const _ = require('lodash')
const _ = require('underscore')
module.exports = OutputFileOptimiser = {
optimiseFile(src, dst, callback) {
@@ -45,7 +45,8 @@ module.exports = OutputFileOptimiser = {
checkIfPDFIsOptimised(file, callback) {
const SIZE = 16 * 1024 // check the header of the pdf
const result = Buffer.alloc(SIZE) // fills with zeroes by default
const result = new Buffer(SIZE)
result.fill(0) // prevent leakage of uninitialised buffer
return fs.open(file, 'r', function(err, fd) {
if (err != null) {
return callback(err)
@@ -77,7 +78,7 @@ module.exports = OutputFileOptimiser = {
const timer = new Metrics.Timer('qpdf')
const proc = spawn('qpdf', args)
let stdout = ''
proc.stdout.setEncoding('utf8').on('data', (chunk) => (stdout += chunk))
proc.stdout.on('data', chunk => (stdout += chunk.toString()))
callback = _.once(callback) // avoid double call back for error and close event
proc.on('error', function(err) {
logger.warn({ err, args }, 'qpdf failed')

View File

@@ -20,37 +20,15 @@ const async = require('async')
const logger = require('logger-sharelatex')
const oneDay = 24 * 60 * 60 * 1000
const Settings = require('settings-sharelatex')
const diskusage = require('diskusage')
module.exports = ProjectPersistenceManager = {
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5,
refreshExpiryTimeout(callback) {
if (callback == null) {
callback = function (error) {}
}
diskusage.check('/', function (err, stats) {
if (err) {
logger.err({ err: err }, 'error getting disk usage')
return callback(err)
}
const lowDisk = stats.available / stats.total < 0.1
const lowerExpiry = ProjectPersistenceManager.EXPIRY_TIMEOUT * 0.9
if (lowDisk && Settings.project_cache_length_ms / 2 < lowerExpiry) {
logger.warn(
{ stats: stats },
'disk running low on space, modifying EXPIRY_TIMEOUT'
)
ProjectPersistenceManager.EXPIRY_TIMEOUT = lowerExpiry
}
callback()
})
},
markProjectAsJustAccessed(project_id, callback) {
if (callback == null) {
callback = function(error) {}
}
const job = (cb) =>
const job = cb =>
db.Project.findOrCreate({ where: { project_id } })
.spread((project, created) =>
project
@@ -74,8 +52,8 @@ module.exports = ProjectPersistenceManager = {
return callback(error)
}
logger.log({ project_ids }, 'clearing expired projects')
const jobs = Array.from(project_ids || []).map((project_id) =>
((project_id) => (callback) =>
const jobs = Array.from(project_ids || []).map(project_id =>
(project_id => callback =>
ProjectPersistenceManager.clearProjectFromCache(project_id, function(
err
) {
@@ -91,7 +69,7 @@ module.exports = ProjectPersistenceManager = {
}
return CompileManager.clearExpiredProjects(
ProjectPersistenceManager.EXPIRY_TIMEOUT,
(error) => callback()
error => callback()
)
})
})
@@ -148,7 +126,7 @@ module.exports = ProjectPersistenceManager = {
callback = function(error) {}
}
logger.log({ project_id }, 'clearing project from database')
const job = (cb) =>
const job = cb =>
db.Project.destroy({ where: { project_id } })
.then(() => cb())
.error(cb)
@@ -166,10 +144,10 @@ module.exports = ProjectPersistenceManager = {
const q = {}
q[db.op.lt] = keepProjectsFrom
return db.Project.findAll({ where: { lastAccessed: q } })
.then((projects) =>
.then(projects =>
cb(
null,
projects.map((project) => project.project_id)
projects.map(project => project.project_id)
)
)
.error(cb)

View File

@@ -61,13 +61,7 @@ module.exports = RequestParser = {
response.imageName = this._parseAttribute(
'imageName',
compile.options.imageName,
{
type: 'string',
validValues:
settings.clsi &&
settings.clsi.docker &&
settings.clsi.docker.allowedImages
}
{ type: 'string' }
)
response.draft = this._parseAttribute('draft', compile.options.draft, {
default: false,
@@ -80,17 +74,7 @@ module.exports = RequestParser = {
default: [],
type: 'object'
})
if (settings.allowedCompileGroups) {
response.compileGroup = this._parseAttribute(
'compileGroup',
compile.options.compileGroup,
{
validValues: settings.allowedCompileGroups,
default: '',
type: 'string'
}
)
}
// The syncType specifies whether the request contains all
// resources (full) or only those resources to be updated
// in-place (incremental).

View File

@@ -56,9 +56,7 @@ module.exports = ResourceStateManager = {
})
} else {
logger.log({ state, basePath }, 'writing sync state')
const resourceList = Array.from(resources).map(
(resource) => resource.path
)
const resourceList = Array.from(resources).map(resource => resource.path)
return fs.writeFile(
stateFile,
[...Array.from(resourceList), `stateHash:${state}`].join('\n'),
@@ -88,7 +86,7 @@ module.exports = ResourceStateManager = {
)
}
const array =
__guard__(result != null ? result.toString() : undefined, (x) =>
__guard__(result != null ? result.toString() : undefined, x =>
x.split('\n')
) || []
const adjustedLength = Math.max(array.length, 1)
@@ -104,7 +102,7 @@ module.exports = ResourceStateManager = {
new Errors.FilesOutOfSyncError('invalid state for incremental update')
)
} else {
const resources = Array.from(resourceList).map((path) => ({ path }))
const resources = Array.from(resourceList).map(path => ({ path }))
return callback(null, resources)
}
})
@@ -118,7 +116,7 @@ module.exports = ResourceStateManager = {
}
for (file of Array.from(resources || [])) {
for (const dir of Array.from(
__guard__(file != null ? file.path : undefined, (x) => x.split('/'))
__guard__(file != null ? file.path : undefined, x => x.split('/'))
)) {
if (dir === '..') {
return callback(new Error('relative path in resource file list'))
@@ -131,8 +129,8 @@ module.exports = ResourceStateManager = {
seenFile[file] = true
}
const missingFiles = Array.from(resources)
.filter((resource) => !seenFile[resource.path])
.map((resource) => resource.path)
.filter(resource => !seenFile[resource.path])
.map(resource => resource.path)
if ((missingFiles != null ? missingFiles.length : undefined) > 0) {
logger.err(
{ missingFiles, basePath, allFiles, resources },

View File

@@ -109,13 +109,13 @@ module.exports = ResourceWriter = {
if (callback == null) {
callback = function(error) {}
}
return this._createDirectory(basePath, (error) => {
return this._createDirectory(basePath, error => {
if (error != null) {
return callback(error)
}
const jobs = Array.from(resources).map((resource) =>
((resource) => {
return (callback) =>
const jobs = Array.from(resources).map(resource =>
(resource => {
return callback =>
this._writeResourceToDisk(project_id, resource, basePath, callback)
})(resource)
)
@@ -127,17 +127,17 @@ module.exports = ResourceWriter = {
if (callback == null) {
callback = function(error) {}
}
return this._createDirectory(basePath, (error) => {
return this._createDirectory(basePath, error => {
if (error != null) {
return callback(error)
}
return this._removeExtraneousFiles(resources, basePath, (error) => {
return this._removeExtraneousFiles(resources, basePath, error => {
if (error != null) {
return callback(error)
}
const jobs = Array.from(resources).map((resource) =>
((resource) => {
return (callback) =>
const jobs = Array.from(resources).map(resource =>
(resource => {
return callback =>
this._writeResourceToDisk(
project_id,
resource,
@@ -231,9 +231,7 @@ module.exports = ResourceWriter = {
path === 'output.pdf' ||
path === 'output.dvi' ||
path === 'output.log' ||
path === 'output.xdv' ||
path === 'output.stdout' ||
path === 'output.stderr'
path === 'output.xdv'
) {
should_delete = true
}
@@ -242,7 +240,7 @@ module.exports = ResourceWriter = {
should_delete = true
}
if (should_delete) {
return jobs.push((callback) =>
return jobs.push(callback =>
ResourceWriter._deleteFileIfNotDirectory(
Path.join(basePath, path),
callback
@@ -303,9 +301,7 @@ module.exports = ResourceWriter = {
if (error != null) {
return callback(error)
}
return fs.mkdir(Path.dirname(path), { recursive: true }, function (
error
) {
return fs.mkdir(Path.dirname(path), { recursive: true }, function(error) {
if (error != null) {
return callback(error)
}

View File

@@ -43,7 +43,7 @@ module.exports = SafeReader = {
}
return callback(null, ...Array.from(result))
})
const buff = Buffer.alloc(size) // fills with zeroes by default
const buff = new Buffer(size, 0) // fill with zeros
return fs.read(fd, buff, 0, buff.length, 0, function(
err,
bytesRead,

View File

@@ -26,7 +26,7 @@ module.exports = ForbidSymlinks = function (staticFn, root, options) {
const basePath = Path.resolve(root)
return function(req, res, next) {
let file, project_id, result
const path = __guard__(url.parse(req.url), (x) => x.pathname)
const path = __guard__(url.parse(req.url), x => x.pathname)
// check that the path is of the form /project_id_or_name/path/to/file.log
if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) {
project_id = result[1]

View File

@@ -42,10 +42,7 @@ module.exports = TikzManager = {
if (error != null) {
return callback(error)
}
return SafeReader.readFile(path, 65536, 'utf8', function (
error,
content
) {
return SafeReader.readFile(path, 65536, 'utf8', function(error, content) {
if (error != null) {
return callback(error)
}

View File

@@ -60,8 +60,8 @@ module.exports = UrlCache = {
if (error != null) {
return callback(error)
}
const jobs = Array.from(urls || []).map((url) =>
((url) => (callback) =>
const jobs = Array.from(urls || []).map(url =>
(url => callback =>
UrlCache._clearUrlFromCache(project_id, url, function(error) {
if (error != null) {
logger.error(
@@ -95,10 +95,10 @@ module.exports = UrlCache = {
}
if (needsDownloading) {
logger.log({ url, lastModified }, 'downloading URL')
return UrlFetcher.pipeUrlToFileWithRetry(
return UrlFetcher.pipeUrlToFile(
url,
UrlCache._cacheFilePathForUrl(project_id, url),
(error) => {
error => {
if (error != null) {
return callback(error)
}
@@ -106,7 +106,7 @@ module.exports = UrlCache = {
project_id,
url,
lastModified,
(error) => {
error => {
if (error != null) {
return callback(error)
}
@@ -153,7 +153,14 @@ module.exports = UrlCache = {
},
_cacheFileNameForUrl(project_id, url) {
return project_id + ':' + crypto.createHash('md5').update(url).digest('hex')
return (
project_id +
':' +
crypto
.createHash('md5')
.update(url)
.digest('hex')
)
},
_cacheFilePathForUrl(project_id, url) {
@@ -190,9 +197,7 @@ module.exports = UrlCache = {
if (error != null) {
return callback(error)
}
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function (
error
) {
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) {
if (error != null) {
return callback(error)
}
@@ -221,9 +226,9 @@ module.exports = UrlCache = {
if (callback == null) {
callback = function(error, urlDetails) {}
}
const job = (cb) =>
const job = cb =>
db.UrlCache.findOne({ where: { url, project_id } })
.then((urlDetails) => cb(null, urlDetails))
.then(urlDetails => cb(null, urlDetails))
.error(cb)
return dbQueue.queue.push(job, callback)
},
@@ -232,7 +237,7 @@ module.exports = UrlCache = {
if (callback == null) {
callback = function(error) {}
}
const job = (cb) =>
const job = cb =>
db.UrlCache.findOrCreate({ where: { url, project_id } })
.spread((urlDetails, created) =>
urlDetails
@@ -248,7 +253,7 @@ module.exports = UrlCache = {
if (callback == null) {
callback = function(error) {}
}
const job = (cb) =>
const job = cb =>
db.UrlCache.destroy({ where: { url, project_id } })
.then(() => cb(null))
.error(cb)
@@ -259,12 +264,12 @@ module.exports = UrlCache = {
if (callback == null) {
callback = function(error, urls) {}
}
const job = (cb) =>
const job = cb =>
db.UrlCache.findAll({ where: { project_id } })
.then((urlEntries) =>
.then(urlEntries =>
cb(
null,
urlEntries.map((entry) => entry.url)
urlEntries.map(entry => entry.url)
)
)
.error(cb)

View File

@@ -18,18 +18,10 @@ const fs = require('fs')
const logger = require('logger-sharelatex')
const settings = require('settings-sharelatex')
const URL = require('url')
const async = require('async')
const oneMinute = 60 * 1000
module.exports = UrlFetcher = {
pipeUrlToFileWithRetry(url, filePath, callback) {
const doDownload = function (cb) {
UrlFetcher.pipeUrlToFile(url, filePath, cb)
}
async.retry(3, doDownload, callback)
},
pipeUrlToFile(url, filePath, _callback) {
if (_callback == null) {
_callback = function(error) {}

View File

@@ -10,7 +10,7 @@
*/
const Sequelize = require('sequelize')
const Settings = require('settings-sharelatex')
const _ = require('lodash')
const _ = require('underscore')
const logger = require('logger-sharelatex')
const options = _.extend({ logging: false }, Settings.mysql.clsi)
@@ -62,6 +62,6 @@ module.exports = {
return sequelize
.sync()
.then(() => logger.log('db sync complete'))
.catch((err) => console.log(err, 'error syncing'))
.catch(err => console.log(err, 'error syncing'))
}
}

View File

@@ -1,9 +1,11 @@
clsi
--acceptance-creds=None
--data-dirs=cache,compiles,db
--dependencies=
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=TEXLIVE_IMAGE
--node-version=10.22.1
--language=es
--node-version=10.19.0
--public-repo=True
--script-version=3.3.3
--script-version=2.1.0

View File

@@ -9,7 +9,7 @@ module.exports = {
username: 'clsi',
dialect: 'sqlite',
storage:
process.env.SQLITE_PATH || Path.resolve(__dirname, '../db/db.sqlite'),
process.env.SQLITE_PATH || Path.resolve(__dirname + '/../db/db.sqlite'),
pool: {
max: 1,
min: 1
@@ -26,10 +26,10 @@ module.exports = {
parseInt(process.env.PROCESS_LIFE_SPAN_LIMIT_MS) || 60 * 60 * 24 * 1000 * 2,
path: {
compilesDir: Path.resolve(__dirname, '../compiles'),
clsiCacheDir: Path.resolve(__dirname, '../cache'),
synctexBaseDir(projectId) {
return Path.join(this.compilesDir, projectId)
compilesDir: Path.resolve(__dirname + '/../compiles'),
clsiCacheDir: Path.resolve(__dirname + '/../cache'),
synctexBaseDir(project_id) {
return Path.join(this.compilesDir, project_id)
}
},
@@ -57,25 +57,13 @@ module.exports = {
parallelSqlQueryLimit: process.env.FILESTORE_PARALLEL_SQL_QUERY_LIMIT || 1,
filestoreDomainOveride: process.env.FILESTORE_DOMAIN_OVERRIDE,
texliveImageNameOveride: process.env.TEX_LIVE_IMAGE_NAME_OVERRIDE,
texliveOpenoutAny: process.env.TEXLIVE_OPENOUT_ANY,
sentry: {
dsn: process.env.SENTRY_DSN
}
}
if (process.env.ALLOWED_COMPILE_GROUPS) {
try {
module.exports.allowedCompileGroups = process.env.ALLOWED_COMPILE_GROUPS.split(
' '
)
} catch (error) {
console.error(error, 'could not apply allowed compile group setting')
process.exit(1)
}
}
if (process.env.DOCKER_RUNNER) {
let seccompProfilePath
let seccomp_profile_path
module.exports.clsi = {
dockerRunner: process.env.DOCKER_RUNNER === 'true',
docker: {
@@ -83,61 +71,28 @@ if (process.env.DOCKER_RUNNER) {
image:
process.env.TEXLIVE_IMAGE || 'quay.io/sharelatex/texlive-full:2017.1',
env: {
HOME: '/tmp'
HOME: process.env.TEXLIVE_HOME || '/tmp',
TMPDIR: process.env.TEXLIVE_TMPDIR || '/tmp'
},
socketPath: '/var/run/docker.sock',
user: process.env.TEXLIVE_IMAGE_USER || 'tex'
},
optimiseInDocker: true,
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000,
checkProjectsIntervalMs: 10 * 60 * 1000
}
try {
// Override individual docker settings using path-based keys, e.g.:
// compileGroupDockerConfigs = {
// priority: { 'HostConfig.CpuShares': 100 }
// beta: { 'dotted.path.here', 'value'}
// }
const compileGroupConfig = JSON.parse(
process.env.COMPILE_GROUP_DOCKER_CONFIGS || '{}'
seccomp_profile_path = Path.resolve(
__dirname + '/../seccomp/clsi-profile.json'
)
// Automatically clean up wordcount and synctex containers
const defaultCompileGroupConfig = {
wordcount: { 'HostConfig.AutoRemove': true },
synctex: { 'HostConfig.AutoRemove': true }
}
module.exports.clsi.docker.compileGroupConfig = Object.assign(
defaultCompileGroupConfig,
compileGroupConfig
)
} catch (error) {
console.error(error, 'could not apply compile group docker configs')
process.exit(1)
}
try {
seccompProfilePath = Path.resolve(__dirname, '../seccomp/clsi-profile.json')
module.exports.clsi.docker.seccomp_profile = JSON.stringify(
JSON.parse(require('fs').readFileSync(seccompProfilePath))
JSON.parse(require('fs').readFileSync(seccomp_profile_path))
)
} catch (error) {
console.error(
console.log(
error,
`could not load seccomp profile from ${seccompProfilePath}`
`could not load seccom profile from ${seccomp_profile_path}`
)
process.exit(1)
}
if (process.env.ALLOWED_IMAGES) {
try {
module.exports.clsi.docker.allowedImages = process.env.ALLOWED_IMAGES.split(
' '
)
} catch (error) {
console.error(error, 'could not apply allowed images setting')
process.exit(1)
}
}
module.exports.path.synctexBaseDir = () => '/compile'

View File

@@ -3,7 +3,6 @@ version: "2.3"
services:
dev:
environment:
ALLOWED_IMAGES: "quay.io/sharelatex/texlive-full:2017.1"
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
TEXLIVE_IMAGE_USER: "tex"
SHARELATEX_CONFIG: /app/config/settings.defaults.coffee
@@ -19,7 +18,6 @@ services:
ci:
environment:
ALLOWED_IMAGES: ${TEXLIVE_IMAGE}
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
TEXLIVE_IMAGE_USER: "tex"
SHARELATEX_CONFIG: /app/config/settings.defaults.coffee

View File

@@ -10,7 +10,6 @@ services:
command: npm run test:unit:_run
environment:
NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict"
test_acceptance:
@@ -26,7 +25,6 @@ services:
POSTGRES_HOST: postgres
MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict"
TEXLIVE_IMAGE:
command: npm run test:acceptance:_run

View File

@@ -15,8 +15,7 @@ services:
environment:
MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict"
command: npm run --silent test:unit
command: npm run test:unit
test_acceptance:
build:
@@ -36,6 +35,5 @@ services:
MOCHA_GREP: ${MOCHA_GREP}
LOG_LEVEL: ERROR
NODE_ENV: test
NODE_OPTIONS: "--unhandled-rejections=strict"
command: npm run --silent test:acceptance
command: npm run test:acceptance

View File

@@ -8,6 +8,7 @@
"execMap": {
"js": "npm run start"
},
"watch": [
"app/js/",
"app.js",

855
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -13,7 +13,7 @@
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
"nodemon": "nodemon --config nodemon.json",
"lint": "node_modules/.bin/eslint --max-warnings 0 .",
"lint": "node_modules/.bin/eslint .",
"format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different",
"format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write"
},
@@ -21,22 +21,21 @@
"dependencies": {
"async": "3.2.0",
"body-parser": "^1.19.0",
"diskusage": "^1.1.3",
"dockerode": "^3.1.0",
"express": "^4.17.1",
"fs-extra": "^8.1.0",
"heapdump": "^0.3.15",
"lockfile": "^1.0.4",
"lodash": "^4.17.20",
"logger-sharelatex": "^2.2.0",
"logger-sharelatex": "^1.9.1",
"lynx": "0.2.0",
"metrics-sharelatex": "^2.6.0",
"mysql": "^2.18.1",
"request": "^2.88.2",
"sequelize": "^5.21.5",
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0",
"smoke-test-sharelatex": "git+https://github.com/sharelatex/smoke-test-sharelatex.git#v0.2.0",
"sqlite3": "^4.1.1",
"underscore": "^1.11.0",
"underscore": "^1.9.2",
"v8-profiler-node8": "^6.1.1",
"wrench": "~1.5.9"
},
@@ -60,7 +59,7 @@
"eslint-plugin-react": "^7.19.0",
"eslint-plugin-standard": "^4.0.1",
"mocha": "^7.1.0",
"prettier": "^2.0.0",
"prettier": "^1.19.1",
"prettier-eslint-cli": "^5.0.0",
"sandboxed-module": "^2.0.3",
"sinon": "~9.0.1",

View File

@@ -1,102 +0,0 @@
const Client = require('./helpers/Client')
const ClsiApp = require('./helpers/ClsiApp')
const { expect } = require('chai')
describe('AllowedImageNames', function () {
beforeEach(function (done) {
this.project_id = Client.randomId()
this.request = {
options: {
imageName: undefined
},
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}\
`
}
]
}
ClsiApp.ensureRunning(done)
})
describe('with a valid name', function () {
beforeEach(function (done) {
this.request.options.imageName = process.env.TEXLIVE_IMAGE
Client.compile(this.project_id, this.request, (error, res, body) => {
this.error = error
this.res = res
this.body = body
done(error)
})
})
it('should return success', function () {
expect(this.res.statusCode).to.equal(200)
})
it('should return a PDF', function () {
let pdf
try {
pdf = Client.getOutputFile(this.body, 'pdf')
} catch (e) {}
expect(pdf).to.exist
})
})
describe('with an invalid name', function () {
beforeEach(function (done) {
this.request.options.imageName = 'something/evil:1337'
Client.compile(this.project_id, this.request, (error, res, body) => {
this.error = error
this.res = res
this.body = body
done(error)
})
})
it('should return non success', function () {
expect(this.res.statusCode).to.not.equal(200)
})
it('should not return a PDF', function () {
let pdf
try {
pdf = Client.getOutputFile(this.body, 'pdf')
} catch (e) {}
expect(pdf).to.not.exist
})
})
describe('wordcount', function () {
beforeEach(function (done) {
Client.compile(this.project_id, this.request, done)
})
it('should error out with an invalid imageName', function () {
Client.wordcountWithImage(
this.project_id,
'main.tex',
'something/evil:1337',
(error, result) => {
expect(String(error)).to.include('statusCode=400')
}
)
})
it('should produce a texcout a valid imageName', function () {
Client.wordcountWithImage(
this.project_id,
'main.tex',
process.env.TEXLIVE_IMAGE,
(error, result) => {
expect(error).to.not.exist
expect(result).to.exist
expect(result.texcount).to.exist
}
)
})
})
})

View File

@@ -24,7 +24,7 @@ const ChildProcess = require('child_process')
const ClsiApp = require('./helpers/ClsiApp')
const logger = require('logger-sharelatex')
const Path = require('path')
const fixturePath = (path) => {
const fixturePath = path => {
if (path.slice(0, 3) === 'tmp') {
return '/tmp/clsi_acceptance_tests' + path.slice(3)
}
@@ -50,8 +50,8 @@ const convertToPng = function (pdfPath, pngPath, callback) {
console.log(command)
const convert = ChildProcess.exec(command)
const stdout = ''
convert.stdout.on('data', (chunk) => console.log('STDOUT', chunk.toString()))
convert.stderr.on('data', (chunk) => console.log('STDERR', chunk.toString()))
convert.stdout.on('data', chunk => console.log('STDOUT', chunk.toString()))
convert.stderr.on('data', chunk => console.log('STDERR', chunk.toString()))
return convert.on('exit', () => callback())
}
@@ -66,11 +66,11 @@ const compare = function (originalPath, generatedPath, callback) {
)} ${diff_file}`
)
let stderr = ''
proc.stderr.on('data', (chunk) => (stderr += chunk))
proc.stderr.on('data', chunk => (stderr += chunk))
return proc.on('exit', () => {
if (stderr.trim() === '0 (0)') {
// remove output diff if test matches expected image
fs.unlink(diff_file, (err) => {
fs.unlink(diff_file, err => {
if (err) {
throw err
}
@@ -89,8 +89,8 @@ const checkPdfInfo = function (pdfPath, callback) {
}
const proc = ChildProcess.exec(`pdfinfo ${fixturePath(pdfPath)}`)
let stdout = ''
proc.stdout.on('data', (chunk) => (stdout += chunk))
proc.stderr.on('data', (chunk) => console.log('STDERR', chunk.toString()))
proc.stdout.on('data', chunk => (stdout += chunk))
proc.stderr.on('data', chunk => console.log('STDERR', chunk.toString()))
return proc.on('exit', () => {
if (stdout.match(/Optimized:\s+yes/)) {
return callback(null, true)
@@ -136,14 +136,14 @@ const comparePdf = function (project_id, example_dir, callback) {
return convertToPng(
`tmp/${project_id}.pdf`,
`tmp/${project_id}-generated.png`,
(error) => {
error => {
if (error != null) {
throw error
}
return convertToPng(
`examples/${example_dir}/output.pdf`,
`tmp/${project_id}-source.png`,
(error) => {
error => {
if (error != null) {
throw error
}
@@ -163,7 +163,7 @@ const comparePdf = function (project_id, example_dir, callback) {
}
)
} else {
return compareMultiplePages(project_id, (error) => {
return compareMultiplePages(project_id, error => {
if (error != null) {
throw error
}
@@ -178,12 +178,7 @@ const comparePdf = function (project_id, example_dir, callback) {
)
}
const downloadAndComparePdf = function (
project_id,
example_dir,
url,
callback
) {
const downloadAndComparePdf = function(project_id, example_dir, url, callback) {
if (callback == null) {
callback = function(error) {}
}
@@ -217,9 +212,8 @@ describe('Example Documents', function () {
fsExtra.remove(fixturePath('tmp'), done)
})
return Array.from(fs.readdirSync(fixturePath('examples'))).map(
(example_dir) =>
((example_dir) =>
return Array.from(fs.readdirSync(fixturePath('examples'))).map(example_dir =>
(example_dir =>
describe(example_dir, function() {
before(function() {
return (this.project_id = Client.randomId() + '_' + example_dir)
@@ -237,16 +231,10 @@ describe('Example Documents', function () {
error ||
__guard__(
body != null ? body.compile : undefined,
(x) => x.status
x => x.status
) === 'failure'
) {
console.log(
'DEBUG: error',
error,
'body',
JSON.stringify(body)
)
return done(new Error('Compile failed'))
console.log('DEBUG: error', error, 'body', JSON.stringify(body))
}
const pdf = Client.getOutputFile(body, 'pdf')
return downloadAndComparePdf(
@@ -271,16 +259,10 @@ describe('Example Documents', function () {
error ||
__guard__(
body != null ? body.compile : undefined,
(x) => x.status
x => x.status
) === 'failure'
) {
console.log(
'DEBUG: error',
error,
'body',
JSON.stringify(body)
)
return done(new Error('Compile failed'))
console.log('DEBUG: error', error, 'body', JSON.stringify(body))
}
const pdf = Client.getOutputFile(body, 'pdf')
return downloadAndComparePdf(

View File

@@ -69,7 +69,7 @@ Hello world
})
})
describe('from pdf to code', function () {
return describe('from pdf to code', function() {
return it('should return the correct location', function(done) {
return Client.syncFromPdf(
this.project_id,
@@ -88,104 +88,4 @@ Hello world
)
})
})
describe('when the project directory is not available', function () {
before(function () {
this.other_project_id = Client.randomId()
})
describe('from code to pdf', function () {
it('should return a 404 response', function (done) {
return Client.syncFromCode(
this.other_project_id,
'main.tex',
3,
5,
(error, body) => {
if (error != null) {
throw error
}
expect(body).to.equal('Not Found')
return done()
}
)
})
})
describe('from pdf to code', function () {
it('should return a 404 response', function (done) {
return Client.syncFromPdf(
this.other_project_id,
1,
100,
200,
(error, body) => {
if (error != null) {
throw error
}
expect(body).to.equal('Not Found')
return done()
}
)
})
})
})
describe('when the synctex file is not available', function () {
before(function (done) {
this.broken_project_id = Client.randomId()
const content = 'this is not valid tex' // not a valid tex file
this.request = {
resources: [
{
path: 'main.tex',
content
}
]
}
Client.compile(
this.broken_project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
describe('from code to pdf', function () {
it('should return a 404 response', function (done) {
return Client.syncFromCode(
this.broken_project_id,
'main.tex',
3,
5,
(error, body) => {
if (error != null) {
throw error
}
expect(body).to.equal('Not Found')
return done()
}
)
})
})
describe('from pdf to code', function () {
it('should return a 404 response', function (done) {
return Client.syncFromPdf(
this.broken_project_id,
1,
100,
200,
(error, body) => {
if (error != null) {
throw error
}
expect(body).to.equal('Not Found')
return done()
}
)
})
})
})
})

View File

@@ -56,7 +56,7 @@ describe('Timed out compile', function () {
})
return it('should return the log output file name', function() {
const outputFilePaths = this.body.compile.outputFiles.map((x) => x.path)
const outputFilePaths = this.body.compile.outputFiles.map(x => x.path)
return outputFilePaths.should.include('output.log')
})
})

View File

@@ -11,6 +11,7 @@
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const sinon = require('sinon')
const ClsiApp = require('./helpers/ClsiApp')
@@ -35,7 +36,9 @@ const Server = {
getFile() {},
randomId() {
return Math.random().toString(16).slice(2)
return Math.random()
.toString(16)
.slice(2)
}
}
@@ -336,7 +339,7 @@ describe('Url Caching', function () {
]
}
return Client.compile(this.project_id, this.request, (error) => {
return Client.compile(this.project_id, this.request, error => {
if (error != null) {
throw error
}

View File

@@ -23,7 +23,9 @@ module.exports = Client = {
host: Settings.apis.clsi.url,
randomId() {
return Math.random().toString(16).slice(2)
return Math.random()
.toString(16)
.slice(2)
},
compile(project_id, data, callback) {
@@ -62,7 +64,7 @@ module.exports = Client = {
const app = express()
app.use(express.static(directory))
console.log('starting test server on', port, host)
return app.listen(port, host).on('error', (error) => {
return app.listen(port, host).on('error', error => {
console.error('error starting server:', error.message)
return process.exit(1)
})
@@ -79,14 +81,13 @@ module.exports = Client = {
file,
line,
column
},
json: true
}
},
(error, response, body) => {
if (error != null) {
return callback(error)
}
return callback(null, body)
return callback(null, JSON.parse(body))
}
)
},
@@ -102,14 +103,13 @@ module.exports = Client = {
page,
h,
v
},
json: true
}
},
(error, response, body) => {
if (error != null) {
return callback(error)
}
return callback(null, body)
return callback(null, JSON.parse(body))
}
)
},
@@ -128,7 +128,7 @@ module.exports = Client = {
entities = entities.concat(
fs
.readdirSync(`${baseDirectory}/${directory}/${entity}`)
.map((subEntity) => {
.map(subEntity => {
if (subEntity === 'main.tex') {
rootResourcePath = `${entity}/${subEntity}`
}
@@ -187,11 +187,6 @@ module.exports = Client = {
},
wordcount(project_id, file, callback) {
const image = undefined
Client.wordcountWithImage(project_id, file, image, callback)
},
wordcountWithImage(project_id, file, image, callback) {
if (callback == null) {
callback = function(error, pdfPositions) {}
}
@@ -199,7 +194,6 @@ module.exports = Client = {
{
url: `${this.host}/project/${project_id}/wordcount`,
qs: {
image,
file
}
},
@@ -207,9 +201,6 @@ module.exports = Client = {
if (error != null) {
return callback(error)
}
if (response.statusCode !== 200) {
return callback(new Error(`statusCode=${response.statusCode}`))
}
return callback(null, JSON.parse(body))
}
)

View File

@@ -35,10 +35,10 @@ module.exports = {
return app.listen(
__guard__(
Settings.internal != null ? Settings.internal.clsi : undefined,
(x) => x.port
x => x.port
),
'localhost',
(error) => {
error => {
if (error != null) {
throw error
}

View File

@@ -13,11 +13,11 @@ const request = require('request')
const Settings = require('settings-sharelatex')
const async = require('async')
const fs = require('fs')
const _ = require('lodash')
const _ = require('underscore')
const concurentCompiles = 5
const totalCompiles = 50
const buildUrl = (path) =>
const buildUrl = path =>
`http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`
const mainTexContent = fs.readFileSync('./bulk.tex', 'utf-8')
@@ -74,12 +74,12 @@ ${bodyContent}
)
}
const jobs = _.map(__range__(1, totalCompiles, true), (i) => (cb) =>
const jobs = _.map(__range__(1, totalCompiles, true), i => cb =>
makeRequest(i, cb)
)
const startTime = new Date()
async.parallelLimit(jobs, concurentCompiles, (err) => {
async.parallelLimit(jobs, concurentCompiles, err => {
if (err != null) {
console.error(err)
}

View File

@@ -1,40 +1,31 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const chai = require('chai')
if (Object.prototype.should == null) {
chai.should()
}
const { expect } = chai
const request = require('request')
const Settings = require('settings-sharelatex')
const buildUrl = (path) =>
const buildUrl = path =>
`http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`
const url = buildUrl(`project/smoketest-${process.pid}/compile`)
module.exports = {
sendNewResult(res) {
this._run((error) => this._sendResponse(res, error))
},
sendLastResult(res) {
this._sendResponse(res, this._lastError)
},
triggerRun(cb) {
this._run((error) => {
this._lastError = error
cb(error)
})
},
_lastError: new Error('SmokeTestsPending'),
_sendResponse(res, error) {
let code, body
if (error) {
code = 500
body = error.message
} else {
code = 200
body = 'OK'
}
res.contentType('text/plain')
res.status(code).send(body)
},
_run(done) {
request.post(
describe('Running a compile', function() {
before(function(done) {
return request.post(
{
url,
json: {
@@ -81,22 +72,29 @@ module.exports = {
}
},
(error, response, body) => {
if (error) return done(error)
if (!body || !body.compile || !body.compile.outputFiles) {
return done(new Error('response payload incomplete'))
}
let pdfFound = false
let logFound = false
for (const file of body.compile.outputFiles) {
if (file.type === 'pdf') pdfFound = true
if (file.type === 'log') logFound = true
}
if (!pdfFound) return done(new Error('no pdf returned'))
if (!logFound) return done(new Error('no log returned'))
done()
this.error = error
this.response = response
this.body = body
return done()
}
)
})
it('should return the pdf', function() {
for (const file of Array.from(this.body.compile.outputFiles)) {
if (file.type === 'pdf') {
return
}
}
throw new Error('no pdf returned')
})
return it('should return the log', function() {
for (const file of Array.from(this.body.compile.outputFiles)) {
if (file.type === 'log') {
return
}
}
throw new Error('no log returned')
})
})

View File

@@ -12,7 +12,6 @@
const SandboxedModule = require('sandboxed-module')
const sinon = require('sinon')
require('chai').should()
const { expect } = require('chai')
const modulePath = require('path').join(
__dirname,
'../../../app/js/CompileController'
@@ -115,7 +114,7 @@ describe('CompileController', function () {
compile: {
status: 'success',
error: null,
outputFiles: this.output_files.map((file) => {
outputFiles: this.output_files.map(file => {
return {
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
path: file.path,
@@ -288,60 +287,21 @@ describe('CompileController', function () {
this.CompileManager.wordcount = sinon
.stub()
.callsArgWith(4, null, (this.texcount = ['mock-texcount']))
return this.CompileController.wordcount(this.req, this.res, this.next)
})
it('should return the word count of a file', function() {
this.CompileController.wordcount(this.req, this.res, this.next)
return this.CompileManager.wordcount
.calledWith(this.project_id, undefined, this.file, this.image)
.should.equal(true)
})
it('should return the texcount info', function () {
this.CompileController.wordcount(this.req, this.res, this.next)
return it('should return the texcount info', function() {
return this.res.json
.calledWith({
texcount: this.texcount
})
.should.equal(true)
})
describe('when allowedImages is set', function () {
beforeEach(function () {
this.Settings.clsi = { docker: {} }
this.Settings.clsi.docker.allowedImages = [
'repo/image:tag1',
'repo/image:tag2'
]
this.res.send = sinon.stub()
this.res.status = sinon.stub().returns({ send: this.res.send })
})
describe('with an invalid image', function () {
beforeEach(function () {
this.req.query.image = 'something/evil:1337'
this.CompileController.wordcount(this.req, this.res, this.next)
})
it('should return a 400', function () {
expect(this.res.status.calledWith(400)).to.equal(true)
})
it('should not run the query', function () {
expect(this.CompileManager.wordcount.called).to.equal(false)
})
})
describe('with a valid image', function () {
beforeEach(function () {
this.req.query.image = 'repo/image:tag1'
this.CompileController.wordcount(this.req, this.res, this.next)
})
it('should not return a 400', function () {
expect(this.res.status.calledWith(400)).to.equal(false)
})
it('should run the query', function () {
expect(this.CompileManager.wordcount.called).to.equal(true)
})
})
})
})
})

View File

@@ -160,8 +160,7 @@ describe('CompileManager', function () {
compiler: (this.compiler = 'pdflatex'),
timeout: (this.timeout = 42000),
imageName: (this.image = 'example.com/image'),
flags: (this.flags = ['-file-line-error']),
compileGroup: (this.compileGroup = 'compile-group')
flags: (this.flags = ['-file-line-error'])
}
this.env = {}
this.Settings.compileDir = 'compiles'
@@ -200,8 +199,7 @@ describe('CompileManager', function () {
timeout: this.timeout,
image: this.image,
flags: this.flags,
environment: this.env,
compileGroup: this.compileGroup
environment: this.env
})
.should.equal(true)
})
@@ -255,8 +253,7 @@ describe('CompileManager', function () {
CHKTEX_OPTIONS: '-nall -e9 -e10 -w15 -w16',
CHKTEX_EXIT_ON_ERROR: 1,
CHKTEX_ULIMIT_OPTIONS: '-t 5 -v 64000'
},
compileGroup: this.compileGroup
}
})
.should.equal(true)
})
@@ -278,8 +275,7 @@ describe('CompileManager', function () {
timeout: this.timeout,
image: this.image,
flags: this.flags,
environment: this.env,
compileGroup: this.compileGroup
environment: this.env
})
.should.equal(true)
})
@@ -298,7 +294,6 @@ describe('CompileManager', function () {
this.proc = new EventEmitter()
this.proc.stdout = new EventEmitter()
this.proc.stderr = new EventEmitter()
this.proc.stderr.setEncoding = sinon.stub().returns(this.proc.stderr)
this.child_process.spawn = sinon.stub().returns(this.proc)
this.CompileManager.clearProject(
this.project_id,
@@ -333,7 +328,6 @@ describe('CompileManager', function () {
this.proc = new EventEmitter()
this.proc.stdout = new EventEmitter()
this.proc.stderr = new EventEmitter()
this.proc.stderr.setEncoding = sinon.stub().returns(this.proc.stderr)
this.child_process.spawn = sinon.stub().returns(this.proc)
this.CompileManager.clearProject(
this.project_id,
@@ -374,7 +368,7 @@ describe('CompileManager', function () {
this.column = 3
this.file_name = 'main.tex'
this.child_process.execFile = sinon.stub()
return (this.Settings.path.synctexBaseDir = (project_id) =>
return (this.Settings.path.synctexBaseDir = project_id =>
`${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`)
})
@@ -388,7 +382,7 @@ describe('CompileManager', function () {
this.stdout = `NODE\t${this.page}\t${this.h}\t${this.v}\t${this.width}\t${this.height}\n`
this.CommandRunner.run = sinon
.stub()
.callsArgWith(7, null, { stdout: this.stdout })
.callsArgWith(6, null, { stdout: this.stdout })
return this.CompileManager.syncFromCode(
this.project_id,
this.user_id,
@@ -447,7 +441,7 @@ describe('CompileManager', function () {
this.stdout = `NODE\t${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}/${this.file_name}\t${this.line}\t${this.column}\n`
this.CommandRunner.run = sinon
.stub()
.callsArgWith(7, null, { stdout: this.stdout })
.callsArgWith(6, null, { stdout: this.stdout })
return this.CompileManager.syncFromPdf(
this.project_id,
this.user_id,
@@ -489,7 +483,7 @@ describe('CompileManager', function () {
return describe('wordcount', function() {
beforeEach(function() {
this.CommandRunner.run = sinon.stub().callsArg(7)
this.CommandRunner.run = sinon.stub().callsArg(6)
this.fs.readFile = sinon
.stub()
.callsArgWith(

View File

@@ -36,7 +36,7 @@ describe('LockManager', function () {
this.callback = sinon.stub()
return this.LockManager.runWithLock(
'lock-one',
(releaseLock) =>
releaseLock =>
setTimeout(() => releaseLock(null, 'hello', 'world'), 100),
(err, ...args) => {
@@ -59,7 +59,7 @@ describe('LockManager', function () {
this.callback2 = sinon.stub()
this.LockManager.runWithLock(
'lock-one',
(releaseLock) =>
releaseLock =>
setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100),
(err, ...args) => {
@@ -68,7 +68,7 @@ describe('LockManager', function () {
)
return this.LockManager.runWithLock(
'lock-two',
(releaseLock) =>
releaseLock =>
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200),
(err, ...args) => {
@@ -100,7 +100,7 @@ describe('LockManager', function () {
this.callback2 = sinon.stub()
this.LockManager.runWithLock(
'lock',
(releaseLock) =>
releaseLock =>
setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100),
(err, ...args) => {
@@ -109,7 +109,7 @@ describe('LockManager', function () {
)
return this.LockManager.runWithLock(
'lock',
(releaseLock) =>
releaseLock =>
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200),
(err, ...args) => {
@@ -154,7 +154,7 @@ describe('LockManager', function () {
}
this.LockManager.runWithLock(
'lock',
(releaseLock) =>
releaseLock =>
setTimeout(
() => releaseLock(null, 'hello', 'world', 'one'),
1100
@@ -167,7 +167,7 @@ describe('LockManager', function () {
)
return this.LockManager.runWithLock(
'lock',
(releaseLock) =>
releaseLock =>
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100),
(err, ...args) => {
@@ -211,7 +211,7 @@ describe('LockManager', function () {
}
this.LockManager.runWithLock(
'lock',
(releaseLock) =>
releaseLock =>
setTimeout(
() => releaseLock(null, 'hello', 'world', 'one'),
1500
@@ -224,7 +224,7 @@ describe('LockManager', function () {
)
return this.LockManager.runWithLock(
'lock',
(releaseLock) =>
releaseLock =>
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100),
(err, ...args) => {

View File

@@ -69,8 +69,7 @@ describe('DockerRunner', function () {
return runner(callback)
}
}
},
globals: { Math } // used by lodash
}
})
this.Docker = Docker
this.getContainer = Docker.prototype.getContainer
@@ -86,7 +85,6 @@ describe('DockerRunner', function () {
this.project_id = 'project-id-123'
this.volumes = { '/local/compile/directory': '/compile' }
this.Settings.clsi.docker.image = this.defaultImage = 'default-image'
this.compileGroup = 'compile-group'
return (this.Settings.clsi.docker.env = { PATH: 'mock-path' })
})
@@ -123,7 +121,6 @@ describe('DockerRunner', function () {
this.image,
this.timeout,
this.env,
this.compileGroup,
(err, output) => {
this.callback(err, output)
return done()
@@ -173,7 +170,6 @@ describe('DockerRunner', function () {
this.image,
this.timeout,
this.env,
this.compileGroup,
this.callback
)
})
@@ -222,7 +218,6 @@ describe('DockerRunner', function () {
this.image,
this.timeout,
this.env,
this.compileGroup,
this.callback
)
})
@@ -256,7 +251,6 @@ describe('DockerRunner', function () {
null,
this.timeout,
this.env,
this.compileGroup,
this.callback
)
})
@@ -273,7 +267,7 @@ describe('DockerRunner', function () {
})
})
describe('with image override', function () {
return describe('with image override', function() {
beforeEach(function() {
this.Settings.texliveImageNameOveride = 'overrideimage.com/something'
this.DockerRunner._runAndWaitForContainer = sinon
@@ -286,7 +280,6 @@ describe('DockerRunner', function () {
this.image,
this.timeout,
this.env,
this.compileGroup,
this.callback
)
})
@@ -296,120 +289,6 @@ describe('DockerRunner', function () {
return image.should.equal('overrideimage.com/something/image:2016.2')
})
})
describe('with image restriction', function () {
beforeEach(function () {
this.Settings.clsi.docker.allowedImages = [
'repo/image:tag1',
'repo/image:tag2'
]
this.DockerRunner._runAndWaitForContainer = sinon
.stub()
.callsArgWith(3, null, (this.output = 'mock-output'))
})
describe('with a valid image', function () {
beforeEach(function () {
this.DockerRunner.run(
this.project_id,
this.command,
this.directory,
'repo/image:tag1',
this.timeout,
this.env,
this.compileGroup,
this.callback
)
})
it('should setup the container', function () {
this.DockerRunner._getContainerOptions.called.should.equal(true)
})
})
describe('with a invalid image', function () {
beforeEach(function () {
this.DockerRunner.run(
this.project_id,
this.command,
this.directory,
'something/different:evil',
this.timeout,
this.env,
this.compileGroup,
this.callback
)
})
it('should call the callback with an error', function () {
const err = new Error('image not allowed')
this.callback.called.should.equal(true)
this.callback.args[0][0].message.should.equal(err.message)
})
it('should not setup the container', function () {
this.DockerRunner._getContainerOptions.called.should.equal(false)
})
})
})
})
describe('run with _getOptions', function () {
beforeEach(function (done) {
// this.DockerRunner._getContainerOptions = sinon
// .stub()
// .returns((this.options = { mockoptions: 'foo' }))
this.DockerRunner._fingerprintContainer = sinon
.stub()
.returns((this.fingerprint = 'fingerprint'))
this.name = `project-${this.project_id}-${this.fingerprint}`
this.command = ['mock', 'command', '--outdir=$COMPILE_DIR']
this.command_with_dir = ['mock', 'command', '--outdir=/compile']
this.timeout = 42000
return done()
})
describe('when a compile group config is set', function () {
beforeEach(function () {
this.Settings.clsi.docker.compileGroupConfig = {
'compile-group': {
'HostConfig.newProperty': 'new-property'
},
'other-group': { otherProperty: 'other-property' }
}
this.DockerRunner._runAndWaitForContainer = sinon
.stub()
.callsArgWith(3, null, (this.output = 'mock-output'))
return this.DockerRunner.run(
this.project_id,
this.command,
this.directory,
this.image,
this.timeout,
this.env,
this.compileGroup,
this.callback
)
})
it('should set the docker options for the compile group', function () {
const options = this.DockerRunner._runAndWaitForContainer.lastCall
.args[0]
return expect(options.HostConfig).to.deep.include({
Binds: ['/local/compile/directory:/compile:rw'],
LogConfig: { Type: 'none', Config: {} },
CapDrop: 'ALL',
SecurityOpt: ['no-new-privileges'],
newProperty: 'new-property'
})
})
return it('should call the callback', function () {
return this.callback.calledWith(null, this.output).should.equal(true)
})
})
})
describe('_runAndWaitForContainer', function() {
@@ -478,8 +357,8 @@ describe('DockerRunner', function () {
return this.DockerRunner.startContainer(
this.options,
this.volumes,
() => {},
this.callback
this.callback,
() => {}
)
})
@@ -738,7 +617,7 @@ describe('DockerRunner', function () {
this.DockerRunner.MAX_CONTAINER_AGE = oneHourInMilliseconds
this.listContainers.callsArgWith(1, null, this.containers)
this.DockerRunner.destroyContainer = sinon.stub().callsArg(3)
return this.DockerRunner.destroyOldContainers((error) => {
return this.DockerRunner.destroyOldContainers(error => {
this.callback(error)
return done()
})
@@ -751,19 +630,19 @@ describe('DockerRunner', function () {
it('should destroy old containers', function() {
this.DockerRunner.destroyContainer.callCount.should.equal(1)
return this.DockerRunner.destroyContainer
.calledWith('project-old-container-name', 'old-container-id')
.calledWith('/project-old-container-name', 'old-container-id')
.should.equal(true)
})
it('should not destroy new containers', function() {
return this.DockerRunner.destroyContainer
.calledWith('project-new-container-name', 'new-container-id')
.calledWith('/project-new-container-name', 'new-container-id')
.should.equal(false)
})
it('should not destroy non-project containers', function() {
return this.DockerRunner.destroyContainer
.calledWith('totally-not-a-project-container', 'some-random-id')
.calledWith('/totally-not-a-project-container', 'some-random-id')
.should.equal(false)
})
@@ -785,7 +664,7 @@ describe('DockerRunner', function () {
return this.DockerRunner._destroyContainer(
this.containerId,
false,
(err) => {
err => {
this.Docker.prototype.getContainer.callCount.should.equal(1)
this.Docker.prototype.getContainer
.calledWith(this.containerId)
@@ -799,10 +678,10 @@ describe('DockerRunner', function () {
return this.DockerRunner._destroyContainer(
this.containerId,
true,
(err) => {
err => {
this.fakeContainer.remove.callCount.should.equal(1)
this.fakeContainer.remove
.calledWith({ force: true, v: true })
.calledWith({ force: true })
.should.equal(true)
return done()
}
@@ -813,10 +692,10 @@ describe('DockerRunner', function () {
return this.DockerRunner._destroyContainer(
this.containerId,
false,
(err) => {
err => {
this.fakeContainer.remove.callCount.should.equal(1)
this.fakeContainer.remove
.calledWith({ force: false, v: true })
.calledWith({ force: false })
.should.equal(true)
return done()
}
@@ -827,7 +706,7 @@ describe('DockerRunner', function () {
return this.DockerRunner._destroyContainer(
this.containerId,
false,
(err) => {
err => {
expect(err).to.equal(null)
return done()
}
@@ -850,7 +729,7 @@ describe('DockerRunner', function () {
return this.DockerRunner._destroyContainer(
this.containerId,
false,
(err) => {
err => {
expect(err).to.equal(null)
return done()
}
@@ -874,7 +753,7 @@ describe('DockerRunner', function () {
return this.DockerRunner._destroyContainer(
this.containerId,
false,
(err) => {
err => {
expect(err).to.not.equal(null)
expect(err).to.equal(this.fakeError)
return done()
@@ -894,7 +773,7 @@ describe('DockerRunner', function () {
})
it('should get the container', function(done) {
return this.DockerRunner.kill(this.containerId, (err) => {
return this.DockerRunner.kill(this.containerId, err => {
this.Docker.prototype.getContainer.callCount.should.equal(1)
this.Docker.prototype.getContainer
.calledWith(this.containerId)
@@ -904,14 +783,14 @@ describe('DockerRunner', function () {
})
it('should try to force-destroy the container', function(done) {
return this.DockerRunner.kill(this.containerId, (err) => {
return this.DockerRunner.kill(this.containerId, err => {
this.fakeContainer.kill.callCount.should.equal(1)
return done()
})
})
it('should not produce an error', function(done) {
return this.DockerRunner.kill(this.containerId, (err) => {
return this.DockerRunner.kill(this.containerId, err => {
expect(err).to.equal(undefined)
return done()
})
@@ -932,7 +811,7 @@ describe('DockerRunner', function () {
})
return it('should not produce an error', function(done) {
return this.DockerRunner.kill(this.containerId, (err) => {
return this.DockerRunner.kill(this.containerId, err => {
expect(err).to.equal(undefined)
return done()
})
@@ -953,7 +832,7 @@ describe('DockerRunner', function () {
})
return it('should produce an error', function(done) {
return this.DockerRunner.kill(this.containerId, (err) => {
return this.DockerRunner.kill(this.containerId, err => {
expect(err).to.not.equal(undefined)
expect(err).to.equal(this.fakeError)
return done()

View File

@@ -37,10 +37,7 @@ describe('LatexRunner', function () {
done() {}
})
},
'./CommandRunner': (this.CommandRunner = {}),
fs: (this.fs = {
writeFile: sinon.stub().callsArg(2)
})
'./CommandRunner': (this.CommandRunner = {})
}
})
@@ -48,7 +45,6 @@ describe('LatexRunner', function () {
this.mainFile = 'main-file.tex'
this.compiler = 'pdflatex'
this.image = 'example.com/image'
this.compileGroup = 'compile-group'
this.callback = sinon.stub()
this.project_id = 'project-id-123'
return (this.env = { foo: '123' })
@@ -56,10 +52,7 @@ describe('LatexRunner', function () {
return describe('runLatex', function() {
beforeEach(function() {
return (this.CommandRunner.run = sinon.stub().callsArgWith(7, null, {
stdout: 'this is stdout',
stderr: 'this is stderr'
}))
return (this.CommandRunner.run = sinon.stub().callsArg(6))
})
describe('normally', function() {
@@ -72,14 +65,13 @@ describe('LatexRunner', function () {
compiler: this.compiler,
timeout: (this.timeout = 42000),
image: this.image,
environment: this.env,
compileGroup: this.compileGroup
environment: this.env
},
this.callback
)
})
it('should run the latex command', function () {
return it('should run the latex command', function() {
return this.CommandRunner.run
.calledWith(
this.project_id,
@@ -87,20 +79,10 @@ describe('LatexRunner', function () {
this.directory,
this.image,
this.timeout,
this.env,
this.compileGroup
this.env
)
.should.equal(true)
})
it('should record the stdout and stderr', function () {
this.fs.writeFile
.calledWith(this.directory + '/' + 'output.stdout', 'this is stdout')
.should.equal(true)
this.fs.writeFile
.calledWith(this.directory + '/' + 'output.stderr', 'this is stderr')
.should.equal(true)
})
})
describe('with an .Rtex main file', function() {
@@ -144,7 +126,7 @@ describe('LatexRunner', function () {
return it('should include the flags in the command', function() {
const command = this.CommandRunner.run.args[0][1]
const flags = command.filter(
(arg) => arg === '-file-line-error' || arg === '-halt-on-error'
arg => arg === '-file-line-error' || arg === '-halt-on-error'
)
flags.length.should.equal(2)
flags[0].should.equal('-file-line-error')

View File

@@ -70,7 +70,6 @@ describe('OutputFileFinder', function () {
beforeEach(function() {
this.proc = new EventEmitter()
this.proc.stdout = new EventEmitter()
this.proc.stdout.setEncoding = sinon.stub().returns(this.proc.stdout)
this.spawn.returns(this.proc)
this.directory = '/base/dir'
return this.OutputFileFinder._getAllFiles(this.directory, this.callback)

View File

@@ -30,8 +30,7 @@ describe('OutputFileOptimiser', function () {
child_process: { spawn: (this.spawn = sinon.stub()) },
'logger-sharelatex': { log: sinon.stub(), warn: sinon.stub() },
'./Metrics': {}
},
globals: { Math } // used by lodash
}
})
this.directory = '/test/dir'
return (this.callback = sinon.stub())
@@ -125,8 +124,11 @@ describe('OutputFileOptimiser', function () {
this.fs.read = sinon
.stub()
.withArgs(this.fd)
.yields(null, 100, Buffer.from('hello /Linearized 1'))
this.fs.close = sinon.stub().withArgs(this.fd).yields(null)
.yields(null, 100, new Buffer('hello /Linearized 1'))
this.fs.close = sinon
.stub()
.withArgs(this.fd)
.yields(null)
return this.OutputFileOptimiser.checkIfPDFIsOptimised(
this.src,
this.callback
@@ -138,7 +140,7 @@ describe('OutputFileOptimiser', function () {
this.fs.read = sinon
.stub()
.withArgs(this.fd)
.yields(null, 100, Buffer.from('hello /Linearized 1'))
.yields(null, 100, new Buffer('hello /Linearized 1'))
return this.OutputFileOptimiser.checkIfPDFIsOptimised(
this.src,
this.callback
@@ -167,7 +169,7 @@ describe('OutputFileOptimiser', function () {
this.fs.read = sinon
.stub()
.withArgs(this.fd)
.yields(null, 100, Buffer.from('hello not linearized 1'))
.yields(null, 100, new Buffer('hello not linearized 1'))
return this.OutputFileOptimiser.checkIfPDFIsOptimised(
this.src,
this.callback

View File

@@ -14,7 +14,6 @@
const SandboxedModule = require('sandboxed-module')
const sinon = require('sinon')
require('chai').should()
const assert = require('chai').assert
const modulePath = require('path').join(
__dirname,
'../../../app/js/ProjectPersistenceManager'
@@ -27,15 +26,7 @@ describe('ProjectPersistenceManager', function () {
requires: {
'./UrlCache': (this.UrlCache = {}),
'./CompileManager': (this.CompileManager = {}),
diskusage: (this.diskusage = { check: sinon.stub() }),
'logger-sharelatex': (this.logger = {
log: sinon.stub(),
warn: sinon.stub(),
err: sinon.stub()
}),
'settings-sharelatex': (this.settings = {
project_cache_length_ms: 1000
}),
'logger-sharelatex': (this.logger = { log: sinon.stub() }),
'./db': (this.db = {})
}
})
@@ -44,57 +35,6 @@ describe('ProjectPersistenceManager', function () {
return (this.user_id = '1234')
})
describe('refreshExpiryTimeout', function () {
it('should leave expiry alone if plenty of disk', function (done) {
this.diskusage.check.callsArgWith(1, null, {
available: 40,
total: 100
})
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
this.ProjectPersistenceManager.EXPIRY_TIMEOUT.should.equal(
this.settings.project_cache_length_ms
)
done()
})
})
it('should drop EXPIRY_TIMEOUT 10% if low disk usage', function (done) {
this.diskusage.check.callsArgWith(1, null, {
available: 5,
total: 100
})
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
this.ProjectPersistenceManager.EXPIRY_TIMEOUT.should.equal(900)
done()
})
})
it('should not drop EXPIRY_TIMEOUT to below 50% of project_cache_length_ms', function (done) {
this.diskusage.check.callsArgWith(1, null, {
available: 5,
total: 100
})
this.ProjectPersistenceManager.EXPIRY_TIMEOUT = 500
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
this.ProjectPersistenceManager.EXPIRY_TIMEOUT.should.equal(500)
done()
})
})
it('should not modify EXPIRY_TIMEOUT if there is an error getting disk values', function (done) {
this.diskusage.check.callsArgWith(1, 'Error', {
available: 5,
total: 100
})
this.ProjectPersistenceManager.refreshExpiryTimeout(() => {
this.ProjectPersistenceManager.EXPIRY_TIMEOUT.should.equal(1000)
done()
})
})
})
describe('clearExpiredProjects', function() {
beforeEach(function() {
this.project_ids = ['project-id-1', 'project-id-2']
@@ -109,7 +49,7 @@ describe('ProjectPersistenceManager', function () {
})
it('should clear each expired project', function() {
return Array.from(this.project_ids).map((project_id) =>
return Array.from(this.project_ids).map(project_id =>
this.ProjectPersistenceManager.clearProjectFromCache
.calledWith(project_id)
.should.equal(true)

View File

@@ -114,48 +114,6 @@ describe('RequestParser', function () {
})
})
describe('when image restrictions are present', function () {
beforeEach(function () {
this.settings.clsi = { docker: {} }
this.settings.clsi.docker.allowedImages = [
'repo/name:tag1',
'repo/name:tag2'
]
})
describe('with imageName set to something invalid', function () {
beforeEach(function () {
const request = this.validRequest
request.compile.options.imageName = 'something/different:latest'
this.RequestParser.parse(request, (error, data) => {
this.error = error
this.data = data
})
})
it('should throw an error for imageName', function () {
expect(String(this.error)).to.include(
'imageName attribute should be one of'
)
})
})
describe('with imageName set to something valid', function () {
beforeEach(function () {
const request = this.validRequest
request.compile.options.imageName = 'repo/name:tag1'
this.RequestParser.parse(request, (error, data) => {
this.error = error
this.data = data
})
})
it('should set the imageName', function () {
this.data.imageName.should.equal('repo/name:tag1')
})
})
})
describe('with flags set', function() {
beforeEach(function() {
this.validRequest.compile.options.flags = ['-file-line-error']

View File

@@ -78,7 +78,7 @@ describe('ResourceWriter', function () {
})
it('should write each resource to disk', function() {
return Array.from(this.resources).map((resource) =>
return Array.from(this.resources).map(resource =>
this.ResourceWriter._writeResourceToDisk
.calledWith(this.project_id, resource, this.basePath)
.should.equal(true)
@@ -139,7 +139,7 @@ describe('ResourceWriter', function () {
})
it('should write each resource to disk', function() {
return Array.from(this.resources).map((resource) =>
return Array.from(this.resources).map(resource =>
this.ResourceWriter._writeResourceToDisk
.calledWith(this.project_id, resource, this.basePath)
.should.equal(true)
@@ -230,12 +230,6 @@ describe('ResourceWriter', function () {
{
path: '_markdown_main/30893013dec5d869a415610079774c2f.md.tex',
type: 'tex'
},
{
path: 'output.stdout'
},
{
path: 'output.stderr'
}
]
this.resources = 'mock-resources'
@@ -262,18 +256,6 @@ describe('ResourceWriter', function () {
.should.equal(true)
})
it('should delete the stdout log file', function () {
return this.ResourceWriter._deleteFileIfNotDirectory
.calledWith(path.join(this.basePath, 'output.stdout'))
.should.equal(true)
})
it('should delete the stderr log file', function () {
return this.ResourceWriter._deleteFileIfNotDirectory
.calledWith(path.join(this.basePath, 'output.stderr'))
.should.equal(true)
})
it('should delete the extra files', function() {
return this.ResourceWriter._deleteFileIfNotDirectory
.calledWith(path.join(this.basePath, 'extra/file.tex'))

View File

@@ -160,7 +160,7 @@ describe('UrlCache', function () {
describe('_ensureUrlIsInCache', function() {
beforeEach(function() {
this.UrlFetcher.pipeUrlToFileWithRetry = sinon.stub().callsArg(2)
this.UrlFetcher.pipeUrlToFile = sinon.stub().callsArg(2)
return (this.UrlCache._updateOrCreateUrlDetails = sinon
.stub()
.callsArg(3))
@@ -190,7 +190,7 @@ describe('UrlCache', function () {
})
it('should download the URL to the cache file', function() {
return this.UrlFetcher.pipeUrlToFileWithRetry
return this.UrlFetcher.pipeUrlToFile
.calledWith(
this.url,
this.UrlCache._cacheFilePathForUrl(this.project_id, this.url)
@@ -232,7 +232,7 @@ describe('UrlCache', function () {
})
it('should not download the URL to the cache file', function() {
return this.UrlFetcher.pipeUrlToFileWithRetry.called.should.equal(false)
return this.UrlFetcher.pipeUrlToFile.called.should.equal(false)
})
return it('should return the callback with the cache file path', function() {
@@ -342,7 +342,7 @@ describe('UrlCache', function () {
})
it('should clear the cache for each url in the project', function() {
return Array.from(this.urls).map((url) =>
return Array.from(this.urls).map(url =>
this.UrlCache._clearUrlFromCache
.calledWith(this.project_id, url)
.should.equal(true)

View File

@@ -33,42 +33,7 @@ describe('UrlFetcher', function () {
}
}))
})
describe('pipeUrlToFileWithRetry', function () {
this.beforeEach(function () {
this.UrlFetcher.pipeUrlToFile = sinon.stub()
})
it('should call pipeUrlToFile', function (done) {
this.UrlFetcher.pipeUrlToFile.callsArgWith(2)
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, (err) => {
expect(err).to.equal(undefined)
this.UrlFetcher.pipeUrlToFile.called.should.equal(true)
done()
})
})
it('should call pipeUrlToFile multiple times on error', function (done) {
const error = new Error("couldn't download file")
this.UrlFetcher.pipeUrlToFile.callsArgWith(2, error)
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, (err) => {
expect(err).to.equal(error)
this.UrlFetcher.pipeUrlToFile.callCount.should.equal(3)
done()
})
})
it('should call pipeUrlToFile twice if only 1 error', function (done) {
this.UrlFetcher.pipeUrlToFile.onCall(0).callsArgWith(2, 'error')
this.UrlFetcher.pipeUrlToFile.onCall(1).callsArgWith(2)
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, (err) => {
expect(err).to.equal(undefined)
this.UrlFetcher.pipeUrlToFile.callCount.should.equal(2)
done()
})
})
})
describe('pipeUrlToFile', function () {
it('should turn off the cookie jar in request', function() {
return this.defaults.calledWith({ jar: false }).should.equal(true)
})
@@ -167,7 +132,7 @@ describe('UrlFetcher', function () {
describe('with non success status code', function() {
beforeEach(function(done) {
this.UrlFetcher.pipeUrlToFile(this.url, this.path, (err) => {
this.UrlFetcher.pipeUrlToFile(this.url, this.path, err => {
this.callback(err)
return done()
})
@@ -180,15 +145,13 @@ describe('UrlFetcher', function () {
this.callback.calledWith(sinon.match(Error)).should.equal(true)
const message = this.callback.args[0][0].message
expect(message).to.include(
'URL returned non-success status code: 404'
)
expect(message).to.include('URL returned non-success status code: 404')
})
})
return describe('with error', function() {
beforeEach(function(done) {
this.UrlFetcher.pipeUrlToFile(this.url, this.path, (err) => {
this.UrlFetcher.pipeUrlToFile(this.url, this.path, err => {
this.callback(err)
return done()
})
@@ -209,4 +172,3 @@ describe('UrlFetcher', function () {
})
})
})
})