Merge pull request #149 from overleaf/msm-service-decaf
clsi decaffeination
This commit is contained in:
@@ -5,5 +5,3 @@ gitrev
|
||||
.npm
|
||||
.nvmrc
|
||||
nodemon.json
|
||||
app.js
|
||||
**/js/*
|
||||
|
||||
65
.eslintrc
Normal file
65
.eslintrc
Normal file
@@ -0,0 +1,65 @@
|
||||
// this file was auto-generated, do not edit it directly.
|
||||
// instead run bin/update_build_scripts from
|
||||
// https://github.com/sharelatex/sharelatex-dev-environment
|
||||
// Version: 1.3.5
|
||||
{
|
||||
"extends": [
|
||||
"standard",
|
||||
"prettier",
|
||||
"prettier/standard"
|
||||
],
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2017
|
||||
},
|
||||
"plugins": [
|
||||
"mocha",
|
||||
"chai-expect",
|
||||
"chai-friendly"
|
||||
],
|
||||
"env": {
|
||||
"node": true,
|
||||
"mocha": true
|
||||
},
|
||||
"rules": {
|
||||
// Swap the no-unused-expressions rule with a more chai-friendly one
|
||||
"no-unused-expressions": 0,
|
||||
"chai-friendly/no-unused-expressions": "error"
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
// Test specific rules
|
||||
"files": ["test/**/*.js"],
|
||||
"globals": {
|
||||
"expect": true
|
||||
},
|
||||
"rules": {
|
||||
// mocha-specific rules
|
||||
"mocha/handle-done-callback": "error",
|
||||
"mocha/no-exclusive-tests": "error",
|
||||
"mocha/no-global-tests": "error",
|
||||
"mocha/no-identical-title": "error",
|
||||
"mocha/no-nested-tests": "error",
|
||||
"mocha/no-pending-tests": "error",
|
||||
"mocha/no-skipped-tests": "error",
|
||||
"mocha/no-mocha-arrows": "error",
|
||||
|
||||
// chai-specific rules
|
||||
"chai-expect/missing-assertion": "error",
|
||||
"chai-expect/terminating-properties": "error",
|
||||
|
||||
// prefer-arrow-callback applies to all callbacks, not just ones in mocha tests.
|
||||
// we don't enforce this at the top-level - just in tests to manage `this` scope
|
||||
// based on mocha's context mechanism
|
||||
"mocha/prefer-arrow-callback": "error"
|
||||
}
|
||||
},
|
||||
{
|
||||
// Backend specific rules
|
||||
"files": ["app/**/*.js", "app.js", "index.js"],
|
||||
"rules": {
|
||||
// don't allow console.log in backend code
|
||||
"no-console": "error"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -1,13 +1,7 @@
|
||||
**.swp
|
||||
node_modules
|
||||
app/js
|
||||
test/unit/js
|
||||
test/smoke/js
|
||||
test/acceptance/js
|
||||
test/acceptance/fixtures/tmp
|
||||
compiles
|
||||
app.js
|
||||
**/*.map
|
||||
.DS_Store
|
||||
*~
|
||||
cache
|
||||
|
||||
8
.prettierrc
Normal file
8
.prettierrc
Normal file
@@ -0,0 +1,8 @@
|
||||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
# Version: 1.3.5
|
||||
{
|
||||
"semi": false,
|
||||
"singleQuote": true
|
||||
}
|
||||
@@ -21,7 +21,6 @@ RUN npm install --quiet
|
||||
COPY . /app
|
||||
|
||||
|
||||
RUN npm run compile:all
|
||||
|
||||
FROM base
|
||||
|
||||
|
||||
7
Jenkinsfile
vendored
7
Jenkinsfile
vendored
@@ -37,6 +37,13 @@ pipeline {
|
||||
}
|
||||
}
|
||||
|
||||
stage('Linting') {
|
||||
steps {
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make format'
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make lint'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Unit Tests') {
|
||||
steps {
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit'
|
||||
|
||||
15
Makefile
15
Makefile
@@ -16,12 +16,17 @@ DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
|
||||
clean:
|
||||
docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
rm -f app.js
|
||||
rm -rf app/js
|
||||
rm -rf test/unit/js
|
||||
rm -rf test/acceptance/js
|
||||
|
||||
test: test_unit test_acceptance
|
||||
format:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run format
|
||||
|
||||
format_fix:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run format:fix
|
||||
|
||||
lint:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run lint
|
||||
|
||||
test: format lint test_unit test_acceptance
|
||||
|
||||
test_unit:
|
||||
@[ ! -d test/unit ] && echo "clsi has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit
|
||||
|
||||
244
app.coffee
244
app.coffee
@@ -1,244 +0,0 @@
|
||||
Metrics = require "metrics-sharelatex"
|
||||
Metrics.initialize("clsi")
|
||||
|
||||
CompileController = require "./app/js/CompileController"
|
||||
Settings = require "settings-sharelatex"
|
||||
logger = require "logger-sharelatex"
|
||||
logger.initialize("clsi")
|
||||
if Settings.sentry?.dsn?
|
||||
logger.initializeErrorReporting(Settings.sentry.dsn)
|
||||
|
||||
smokeTest = require "smoke-test-sharelatex"
|
||||
ContentTypeMapper = require "./app/js/ContentTypeMapper"
|
||||
Errors = require './app/js/Errors'
|
||||
|
||||
Path = require "path"
|
||||
fs = require "fs"
|
||||
|
||||
|
||||
Metrics.open_sockets.monitor(logger)
|
||||
Metrics.memory.monitor(logger)
|
||||
|
||||
ProjectPersistenceManager = require "./app/js/ProjectPersistenceManager"
|
||||
OutputCacheManager = require "./app/js/OutputCacheManager"
|
||||
|
||||
require("./app/js/db").sync()
|
||||
|
||||
express = require "express"
|
||||
bodyParser = require "body-parser"
|
||||
app = express()
|
||||
|
||||
Metrics.injectMetricsRoute(app)
|
||||
app.use Metrics.http.monitor(logger)
|
||||
|
||||
# Compile requests can take longer than the default two
|
||||
# minutes (including file download time), so bump up the
|
||||
# timeout a bit.
|
||||
TIMEOUT = 10 * 60 * 1000
|
||||
app.use (req, res, next) ->
|
||||
req.setTimeout TIMEOUT
|
||||
res.setTimeout TIMEOUT
|
||||
res.removeHeader("X-Powered-By")
|
||||
next()
|
||||
|
||||
app.param 'project_id', (req, res, next, project_id) ->
|
||||
if project_id?.match /^[a-zA-Z0-9_-]+$/
|
||||
next()
|
||||
else
|
||||
next new Error("invalid project id")
|
||||
|
||||
app.param 'user_id', (req, res, next, user_id) ->
|
||||
if user_id?.match /^[0-9a-f]{24}$/
|
||||
next()
|
||||
else
|
||||
next new Error("invalid user id")
|
||||
|
||||
app.param 'build_id', (req, res, next, build_id) ->
|
||||
if build_id?.match OutputCacheManager.BUILD_REGEX
|
||||
next()
|
||||
else
|
||||
next new Error("invalid build id #{build_id}")
|
||||
|
||||
|
||||
app.post "/project/:project_id/compile", bodyParser.json(limit: Settings.compileSizeLimit), CompileController.compile
|
||||
app.post "/project/:project_id/compile/stop", CompileController.stopCompile
|
||||
app.delete "/project/:project_id", CompileController.clearCache
|
||||
|
||||
app.get "/project/:project_id/sync/code", CompileController.syncFromCode
|
||||
app.get "/project/:project_id/sync/pdf", CompileController.syncFromPdf
|
||||
app.get "/project/:project_id/wordcount", CompileController.wordcount
|
||||
app.get "/project/:project_id/status", CompileController.status
|
||||
|
||||
# Per-user containers
|
||||
app.post "/project/:project_id/user/:user_id/compile", bodyParser.json(limit: Settings.compileSizeLimit), CompileController.compile
|
||||
app.post "/project/:project_id/user/:user_id/compile/stop", CompileController.stopCompile
|
||||
app.delete "/project/:project_id/user/:user_id", CompileController.clearCache
|
||||
|
||||
app.get "/project/:project_id/user/:user_id/sync/code", CompileController.syncFromCode
|
||||
app.get "/project/:project_id/user/:user_id/sync/pdf", CompileController.syncFromPdf
|
||||
app.get "/project/:project_id/user/:user_id/wordcount", CompileController.wordcount
|
||||
|
||||
ForbidSymlinks = require "./app/js/StaticServerForbidSymlinks"
|
||||
|
||||
# create a static server which does not allow access to any symlinks
|
||||
# avoids possible mismatch of root directory between middleware check
|
||||
# and serving the files
|
||||
staticServer = ForbidSymlinks express.static, Settings.path.compilesDir, setHeaders: (res, path, stat) ->
|
||||
if Path.basename(path) == "output.pdf"
|
||||
# Calculate an etag in the same way as nginx
|
||||
# https://github.com/tj/send/issues/65
|
||||
etag = (path, stat) ->
|
||||
'"' + Math.ceil(+stat.mtime / 1000).toString(16) +
|
||||
'-' + Number(stat.size).toString(16) + '"'
|
||||
res.set("Etag", etag(path, stat))
|
||||
res.set("Content-Type", ContentTypeMapper.map(path))
|
||||
|
||||
app.get "/project/:project_id/user/:user_id/build/:build_id/output/*", (req, res, next) ->
|
||||
# for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||
req.url = "/#{req.params.project_id}-#{req.params.user_id}/" + OutputCacheManager.path(req.params.build_id, "/#{req.params[0]}")
|
||||
staticServer(req, res, next)
|
||||
|
||||
app.get "/project/:project_id/build/:build_id/output/*", (req, res, next) ->
|
||||
# for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||
req.url = "/#{req.params.project_id}/" + OutputCacheManager.path(req.params.build_id, "/#{req.params[0]}")
|
||||
staticServer(req, res, next)
|
||||
|
||||
app.get "/project/:project_id/user/:user_id/output/*", (req, res, next) ->
|
||||
# for specific user get the path to the top level file
|
||||
req.url = "/#{req.params.project_id}-#{req.params.user_id}/#{req.params[0]}"
|
||||
staticServer(req, res, next)
|
||||
|
||||
app.get "/project/:project_id/output/*", (req, res, next) ->
|
||||
if req.query?.build? && req.query.build.match(OutputCacheManager.BUILD_REGEX)
|
||||
# for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||
req.url = "/#{req.params.project_id}/" + OutputCacheManager.path(req.query.build, "/#{req.params[0]}")
|
||||
else
|
||||
req.url = "/#{req.params.project_id}/#{req.params[0]}"
|
||||
staticServer(req, res, next)
|
||||
|
||||
app.get "/oops", (req, res, next) ->
|
||||
logger.error {err: "hello"}, "test error"
|
||||
res.send "error\n"
|
||||
|
||||
|
||||
app.get "/status", (req, res, next) ->
|
||||
res.send "CLSI is alive\n"
|
||||
|
||||
resCacher =
|
||||
contentType:(@setContentType)->
|
||||
send:(@code, @body)->
|
||||
|
||||
#default the server to be down
|
||||
code:500
|
||||
body:{}
|
||||
setContentType:"application/json"
|
||||
|
||||
if Settings.smokeTest
|
||||
do runSmokeTest = ->
|
||||
logger.log("running smoke tests")
|
||||
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))({}, resCacher)
|
||||
setTimeout(runSmokeTest, 30 * 1000)
|
||||
|
||||
app.get "/health_check", (req, res)->
|
||||
res.contentType(resCacher?.setContentType)
|
||||
res.status(resCacher?.code).send(resCacher?.body)
|
||||
|
||||
app.get "/smoke_test_force", (req, res)->
|
||||
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))(req, res)
|
||||
|
||||
profiler = require "v8-profiler-node8"
|
||||
app.get "/profile", (req, res) ->
|
||||
time = parseInt(req.query.time || "1000")
|
||||
profiler.startProfiling("test")
|
||||
setTimeout () ->
|
||||
profile = profiler.stopProfiling("test")
|
||||
res.json(profile)
|
||||
, time
|
||||
|
||||
app.get "/heapdump", (req, res)->
|
||||
require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.clsi.heapsnapshot', (err, filename)->
|
||||
res.send filename
|
||||
|
||||
app.use (error, req, res, next) ->
|
||||
if error instanceof Errors.NotFoundError
|
||||
logger.warn {err: error, url: req.url}, "not found error"
|
||||
return res.sendStatus(404)
|
||||
else
|
||||
logger.error {err: error, url: req.url}, "server error"
|
||||
res.sendStatus(error?.statusCode || 500)
|
||||
|
||||
net = require "net"
|
||||
os = require "os"
|
||||
|
||||
STATE = "up"
|
||||
|
||||
|
||||
loadTcpServer = net.createServer (socket) ->
|
||||
socket.on "error", (err)->
|
||||
if err.code == "ECONNRESET"
|
||||
# this always comes up, we don't know why
|
||||
return
|
||||
logger.err err:err, "error with socket on load check"
|
||||
socket.destroy()
|
||||
|
||||
if STATE == "up" and Settings.internal.load_balancer_agent.report_load
|
||||
currentLoad = os.loadavg()[0]
|
||||
|
||||
# staging clis's have 1 cpu core only
|
||||
if os.cpus().length == 1
|
||||
availableWorkingCpus = 1
|
||||
else
|
||||
availableWorkingCpus = os.cpus().length - 1
|
||||
|
||||
freeLoad = availableWorkingCpus - currentLoad
|
||||
freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100)
|
||||
if freeLoadPercentage <= 0
|
||||
freeLoadPercentage = 1 # when its 0 the server is set to drain and will move projects to different servers
|
||||
socket.write("up, #{freeLoadPercentage}%\n", "ASCII")
|
||||
socket.end()
|
||||
else
|
||||
socket.write("#{STATE}\n", "ASCII")
|
||||
socket.end()
|
||||
|
||||
loadHttpServer = express()
|
||||
|
||||
loadHttpServer.post "/state/up", (req, res, next) ->
|
||||
STATE = "up"
|
||||
logger.info "getting message to set server to down"
|
||||
res.sendStatus 204
|
||||
|
||||
loadHttpServer.post "/state/down", (req, res, next) ->
|
||||
STATE = "down"
|
||||
logger.info "getting message to set server to down"
|
||||
res.sendStatus 204
|
||||
|
||||
loadHttpServer.post "/state/maint", (req, res, next) ->
|
||||
STATE = "maint"
|
||||
logger.info "getting message to set server to maint"
|
||||
res.sendStatus 204
|
||||
|
||||
|
||||
port = (Settings.internal?.clsi?.port or 3013)
|
||||
host = (Settings.internal?.clsi?.host or "localhost")
|
||||
|
||||
load_tcp_port = Settings.internal.load_balancer_agent.load_port
|
||||
load_http_port = Settings.internal.load_balancer_agent.local_port
|
||||
|
||||
if !module.parent # Called directly
|
||||
app.listen port, host, (error) ->
|
||||
logger.info "CLSI starting up, listening on #{host}:#{port}"
|
||||
|
||||
loadTcpServer.listen load_tcp_port, host, (error) ->
|
||||
throw error if error?
|
||||
logger.info "Load tcp agent listening on load port #{load_tcp_port}"
|
||||
|
||||
loadHttpServer.listen load_http_port, host, (error) ->
|
||||
throw error if error?
|
||||
logger.info "Load http agent listening on load port #{load_http_port}"
|
||||
|
||||
module.exports = app
|
||||
|
||||
setInterval () ->
|
||||
ProjectPersistenceManager.clearExpiredProjects()
|
||||
, tenMinutes = 10 * 60 * 1000
|
||||
|
||||
371
app.js
Normal file
371
app.js
Normal file
@@ -0,0 +1,371 @@
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let tenMinutes
|
||||
const Metrics = require('metrics-sharelatex')
|
||||
Metrics.initialize('clsi')
|
||||
|
||||
const CompileController = require('./app/js/CompileController')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
logger.initialize('clsi')
|
||||
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
|
||||
logger.initializeErrorReporting(Settings.sentry.dsn)
|
||||
}
|
||||
|
||||
const smokeTest = require('smoke-test-sharelatex')
|
||||
const ContentTypeMapper = require('./app/js/ContentTypeMapper')
|
||||
const Errors = require('./app/js/Errors')
|
||||
|
||||
const Path = require('path')
|
||||
const fs = require('fs')
|
||||
|
||||
Metrics.open_sockets.monitor(logger)
|
||||
Metrics.memory.monitor(logger)
|
||||
|
||||
const ProjectPersistenceManager = require('./app/js/ProjectPersistenceManager')
|
||||
const OutputCacheManager = require('./app/js/OutputCacheManager')
|
||||
|
||||
require('./app/js/db').sync()
|
||||
|
||||
const express = require('express')
|
||||
const bodyParser = require('body-parser')
|
||||
const app = express()
|
||||
|
||||
Metrics.injectMetricsRoute(app)
|
||||
app.use(Metrics.http.monitor(logger))
|
||||
|
||||
// Compile requests can take longer than the default two
|
||||
// minutes (including file download time), so bump up the
|
||||
// timeout a bit.
|
||||
const TIMEOUT = 10 * 60 * 1000
|
||||
app.use(function(req, res, next) {
|
||||
req.setTimeout(TIMEOUT)
|
||||
res.setTimeout(TIMEOUT)
|
||||
res.removeHeader('X-Powered-By')
|
||||
return next()
|
||||
})
|
||||
|
||||
app.param('project_id', function(req, res, next, project_id) {
|
||||
if (project_id != null ? project_id.match(/^[a-zA-Z0-9_-]+$/) : undefined) {
|
||||
return next()
|
||||
} else {
|
||||
return next(new Error('invalid project id'))
|
||||
}
|
||||
})
|
||||
|
||||
app.param('user_id', function(req, res, next, user_id) {
|
||||
if (user_id != null ? user_id.match(/^[0-9a-f]{24}$/) : undefined) {
|
||||
return next()
|
||||
} else {
|
||||
return next(new Error('invalid user id'))
|
||||
}
|
||||
})
|
||||
|
||||
app.param('build_id', function(req, res, next, build_id) {
|
||||
if (
|
||||
build_id != null
|
||||
? build_id.match(OutputCacheManager.BUILD_REGEX)
|
||||
: undefined
|
||||
) {
|
||||
return next()
|
||||
} else {
|
||||
return next(new Error(`invalid build id ${build_id}`))
|
||||
}
|
||||
})
|
||||
|
||||
app.post(
|
||||
'/project/:project_id/compile',
|
||||
bodyParser.json({ limit: Settings.compileSizeLimit }),
|
||||
CompileController.compile
|
||||
)
|
||||
app.post('/project/:project_id/compile/stop', CompileController.stopCompile)
|
||||
app.delete('/project/:project_id', CompileController.clearCache)
|
||||
|
||||
app.get('/project/:project_id/sync/code', CompileController.syncFromCode)
|
||||
app.get('/project/:project_id/sync/pdf', CompileController.syncFromPdf)
|
||||
app.get('/project/:project_id/wordcount', CompileController.wordcount)
|
||||
app.get('/project/:project_id/status', CompileController.status)
|
||||
|
||||
// Per-user containers
|
||||
app.post(
|
||||
'/project/:project_id/user/:user_id/compile',
|
||||
bodyParser.json({ limit: Settings.compileSizeLimit }),
|
||||
CompileController.compile
|
||||
)
|
||||
app.post(
|
||||
'/project/:project_id/user/:user_id/compile/stop',
|
||||
CompileController.stopCompile
|
||||
)
|
||||
app.delete('/project/:project_id/user/:user_id', CompileController.clearCache)
|
||||
|
||||
app.get(
|
||||
'/project/:project_id/user/:user_id/sync/code',
|
||||
CompileController.syncFromCode
|
||||
)
|
||||
app.get(
|
||||
'/project/:project_id/user/:user_id/sync/pdf',
|
||||
CompileController.syncFromPdf
|
||||
)
|
||||
app.get(
|
||||
'/project/:project_id/user/:user_id/wordcount',
|
||||
CompileController.wordcount
|
||||
)
|
||||
|
||||
const ForbidSymlinks = require('./app/js/StaticServerForbidSymlinks')
|
||||
|
||||
// create a static server which does not allow access to any symlinks
|
||||
// avoids possible mismatch of root directory between middleware check
|
||||
// and serving the files
|
||||
const staticServer = ForbidSymlinks(express.static, Settings.path.compilesDir, {
|
||||
setHeaders(res, path, stat) {
|
||||
if (Path.basename(path) === 'output.pdf') {
|
||||
// Calculate an etag in the same way as nginx
|
||||
// https://github.com/tj/send/issues/65
|
||||
const etag = (path, stat) =>
|
||||
`"${Math.ceil(+stat.mtime / 1000).toString(16)}` +
|
||||
'-' +
|
||||
Number(stat.size).toString(16) +
|
||||
'"'
|
||||
res.set('Etag', etag(path, stat))
|
||||
}
|
||||
return res.set('Content-Type', ContentTypeMapper.map(path))
|
||||
}
|
||||
})
|
||||
|
||||
app.get('/project/:project_id/user/:user_id/build/:build_id/output/*', function(
|
||||
req,
|
||||
res,
|
||||
next
|
||||
) {
|
||||
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||
req.url =
|
||||
`/${req.params.project_id}-${req.params.user_id}/` +
|
||||
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
|
||||
return staticServer(req, res, next)
|
||||
})
|
||||
|
||||
app.get('/project/:project_id/build/:build_id/output/*', function(
|
||||
req,
|
||||
res,
|
||||
next
|
||||
) {
|
||||
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||
req.url =
|
||||
`/${req.params.project_id}/` +
|
||||
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
|
||||
return staticServer(req, res, next)
|
||||
})
|
||||
|
||||
app.get('/project/:project_id/user/:user_id/output/*', function(
|
||||
req,
|
||||
res,
|
||||
next
|
||||
) {
|
||||
// for specific user get the path to the top level file
|
||||
req.url = `/${req.params.project_id}-${req.params.user_id}/${req.params[0]}`
|
||||
return staticServer(req, res, next)
|
||||
})
|
||||
|
||||
app.get('/project/:project_id/output/*', function(req, res, next) {
|
||||
if (
|
||||
(req.query != null ? req.query.build : undefined) != null &&
|
||||
req.query.build.match(OutputCacheManager.BUILD_REGEX)
|
||||
) {
|
||||
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||
req.url =
|
||||
`/${req.params.project_id}/` +
|
||||
OutputCacheManager.path(req.query.build, `/${req.params[0]}`)
|
||||
} else {
|
||||
req.url = `/${req.params.project_id}/${req.params[0]}`
|
||||
}
|
||||
return staticServer(req, res, next)
|
||||
})
|
||||
|
||||
app.get('/oops', function(req, res, next) {
|
||||
logger.error({ err: 'hello' }, 'test error')
|
||||
return res.send('error\n')
|
||||
})
|
||||
|
||||
app.get('/status', (req, res, next) => res.send('CLSI is alive\n'))
|
||||
|
||||
const resCacher = {
|
||||
contentType(setContentType) {
|
||||
this.setContentType = setContentType
|
||||
},
|
||||
send(code, body) {
|
||||
this.code = code
|
||||
this.body = body
|
||||
},
|
||||
|
||||
// default the server to be down
|
||||
code: 500,
|
||||
body: {},
|
||||
setContentType: 'application/json'
|
||||
}
|
||||
|
||||
if (Settings.smokeTest) {
|
||||
let runSmokeTest
|
||||
;(runSmokeTest = function() {
|
||||
logger.log('running smoke tests')
|
||||
smokeTest.run(require.resolve(__dirname + '/test/smoke/js/SmokeTests.js'))(
|
||||
{},
|
||||
resCacher
|
||||
)
|
||||
return setTimeout(runSmokeTest, 30 * 1000)
|
||||
})()
|
||||
}
|
||||
|
||||
app.get('/health_check', function(req, res) {
|
||||
res.contentType(resCacher != null ? resCacher.setContentType : undefined)
|
||||
return res
|
||||
.status(resCacher != null ? resCacher.code : undefined)
|
||||
.send(resCacher != null ? resCacher.body : undefined)
|
||||
})
|
||||
|
||||
app.get('/smoke_test_force', (req, res) =>
|
||||
smokeTest.run(require.resolve(__dirname + '/test/smoke/js/SmokeTests.js'))(
|
||||
req,
|
||||
res
|
||||
)
|
||||
)
|
||||
|
||||
const profiler = require('v8-profiler-node8')
|
||||
app.get('/profile', function(req, res) {
|
||||
const time = parseInt(req.query.time || '1000')
|
||||
profiler.startProfiling('test')
|
||||
return setTimeout(function() {
|
||||
const profile = profiler.stopProfiling('test')
|
||||
return res.json(profile)
|
||||
}, time)
|
||||
})
|
||||
|
||||
app.get('/heapdump', (req, res) =>
|
||||
require('heapdump').writeSnapshot(
|
||||
`/tmp/${Date.now()}.clsi.heapsnapshot`,
|
||||
(err, filename) => res.send(filename)
|
||||
)
|
||||
)
|
||||
|
||||
app.use(function(error, req, res, next) {
|
||||
if (error instanceof Errors.NotFoundError) {
|
||||
logger.warn({ err: error, url: req.url }, 'not found error')
|
||||
return res.sendStatus(404)
|
||||
} else {
|
||||
logger.error({ err: error, url: req.url }, 'server error')
|
||||
return res.sendStatus((error != null ? error.statusCode : undefined) || 500)
|
||||
}
|
||||
})
|
||||
|
||||
const net = require('net')
|
||||
const os = require('os')
|
||||
|
||||
let STATE = 'up'
|
||||
|
||||
const loadTcpServer = net.createServer(function(socket) {
|
||||
socket.on('error', function(err) {
|
||||
if (err.code === 'ECONNRESET') {
|
||||
// this always comes up, we don't know why
|
||||
return
|
||||
}
|
||||
logger.err({ err }, 'error with socket on load check')
|
||||
return socket.destroy()
|
||||
})
|
||||
|
||||
if (STATE === 'up' && Settings.internal.load_balancer_agent.report_load) {
|
||||
let availableWorkingCpus
|
||||
const currentLoad = os.loadavg()[0]
|
||||
|
||||
// staging clis's have 1 cpu core only
|
||||
if (os.cpus().length === 1) {
|
||||
availableWorkingCpus = 1
|
||||
} else {
|
||||
availableWorkingCpus = os.cpus().length - 1
|
||||
}
|
||||
|
||||
const freeLoad = availableWorkingCpus - currentLoad
|
||||
let freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100)
|
||||
if (freeLoadPercentage <= 0) {
|
||||
freeLoadPercentage = 1 // when its 0 the server is set to drain and will move projects to different servers
|
||||
}
|
||||
socket.write(`up, ${freeLoadPercentage}%\n`, 'ASCII')
|
||||
return socket.end()
|
||||
} else {
|
||||
socket.write(`${STATE}\n`, 'ASCII')
|
||||
return socket.end()
|
||||
}
|
||||
})
|
||||
|
||||
const loadHttpServer = express()
|
||||
|
||||
loadHttpServer.post('/state/up', function(req, res, next) {
|
||||
STATE = 'up'
|
||||
logger.info('getting message to set server to down')
|
||||
return res.sendStatus(204)
|
||||
})
|
||||
|
||||
loadHttpServer.post('/state/down', function(req, res, next) {
|
||||
STATE = 'down'
|
||||
logger.info('getting message to set server to down')
|
||||
return res.sendStatus(204)
|
||||
})
|
||||
|
||||
loadHttpServer.post('/state/maint', function(req, res, next) {
|
||||
STATE = 'maint'
|
||||
logger.info('getting message to set server to maint')
|
||||
return res.sendStatus(204)
|
||||
})
|
||||
|
||||
const port =
|
||||
__guard__(
|
||||
Settings.internal != null ? Settings.internal.clsi : undefined,
|
||||
x => x.port
|
||||
) || 3013
|
||||
const host =
|
||||
__guard__(
|
||||
Settings.internal != null ? Settings.internal.clsi : undefined,
|
||||
x1 => x1.host
|
||||
) || 'localhost'
|
||||
|
||||
const load_tcp_port = Settings.internal.load_balancer_agent.load_port
|
||||
const load_http_port = Settings.internal.load_balancer_agent.local_port
|
||||
|
||||
if (!module.parent) {
|
||||
// Called directly
|
||||
app.listen(port, host, error =>
|
||||
logger.info(`CLSI starting up, listening on ${host}:${port}`)
|
||||
)
|
||||
|
||||
loadTcpServer.listen(load_tcp_port, host, function(error) {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return logger.info(`Load tcp agent listening on load port ${load_tcp_port}`)
|
||||
})
|
||||
|
||||
loadHttpServer.listen(load_http_port, host, function(error) {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return logger.info(
|
||||
`Load http agent listening on load port ${load_http_port}`
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = app
|
||||
|
||||
setInterval(
|
||||
() => ProjectPersistenceManager.clearExpiredProjects(),
|
||||
(tenMinutes = 10 * 60 * 1000)
|
||||
)
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
Settings = require "settings-sharelatex"
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
if Settings.clsi?.dockerRunner == true
|
||||
commandRunnerPath = "./DockerRunner"
|
||||
else
|
||||
commandRunnerPath = "./LocalCommandRunner"
|
||||
logger.info commandRunnerPath:commandRunnerPath, "selecting command runner for clsi"
|
||||
CommandRunner = require(commandRunnerPath)
|
||||
|
||||
module.exports = CommandRunner
|
||||
@@ -1,119 +0,0 @@
|
||||
RequestParser = require "./RequestParser"
|
||||
CompileManager = require "./CompileManager"
|
||||
Settings = require "settings-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
ProjectPersistenceManager = require "./ProjectPersistenceManager"
|
||||
logger = require "logger-sharelatex"
|
||||
Errors = require "./Errors"
|
||||
|
||||
module.exports = CompileController =
|
||||
compile: (req, res, next = (error) ->) ->
|
||||
timer = new Metrics.Timer("compile-request")
|
||||
RequestParser.parse req.body, (error, request) ->
|
||||
return next(error) if error?
|
||||
request.project_id = req.params.project_id
|
||||
request.user_id = req.params.user_id if req.params.user_id?
|
||||
ProjectPersistenceManager.markProjectAsJustAccessed request.project_id, (error) ->
|
||||
return next(error) if error?
|
||||
CompileManager.doCompileWithLock request, (error, outputFiles = []) ->
|
||||
if error instanceof Errors.AlreadyCompilingError
|
||||
code = 423 # Http 423 Locked
|
||||
status = "compile-in-progress"
|
||||
else if error instanceof Errors.FilesOutOfSyncError
|
||||
code = 409 # Http 409 Conflict
|
||||
status = "retry"
|
||||
else if error?.terminated
|
||||
status = "terminated"
|
||||
else if error?.validate
|
||||
status = "validation-#{error.validate}"
|
||||
else if error?.timedout
|
||||
status = "timedout"
|
||||
logger.log err: error, project_id: request.project_id, "timeout running compile"
|
||||
else if error?
|
||||
status = "error"
|
||||
code = 500
|
||||
logger.warn err: error, project_id: request.project_id, "error running compile"
|
||||
else
|
||||
status = "failure"
|
||||
for file in outputFiles
|
||||
if file.path?.match(/output\.pdf$/)
|
||||
status = "success"
|
||||
|
||||
if status == "failure"
|
||||
logger.warn project_id: request.project_id, outputFiles:outputFiles, "project failed to compile successfully, no output.pdf generated"
|
||||
|
||||
# log an error if any core files are found
|
||||
for file in outputFiles
|
||||
if file.path is "core"
|
||||
logger.error project_id:request.project_id, req:req, outputFiles:outputFiles, "core file found in output"
|
||||
|
||||
if error?
|
||||
outputFiles = error.outputFiles || []
|
||||
|
||||
timer.done()
|
||||
res.status(code or 200).send {
|
||||
compile:
|
||||
status: status
|
||||
error: error?.message or error
|
||||
outputFiles: outputFiles.map (file) ->
|
||||
url:
|
||||
"#{Settings.apis.clsi.url}/project/#{request.project_id}" +
|
||||
(if request.user_id? then "/user/#{request.user_id}" else "") +
|
||||
(if file.build? then "/build/#{file.build}" else "") +
|
||||
"/output/#{file.path}"
|
||||
path: file.path
|
||||
type: file.type
|
||||
build: file.build
|
||||
}
|
||||
|
||||
stopCompile: (req, res, next) ->
|
||||
{project_id, user_id} = req.params
|
||||
CompileManager.stopCompile project_id, user_id, (error) ->
|
||||
return next(error) if error?
|
||||
res.sendStatus(204)
|
||||
|
||||
clearCache: (req, res, next = (error) ->) ->
|
||||
ProjectPersistenceManager.clearProject req.params.project_id, req.params.user_id, (error) ->
|
||||
return next(error) if error?
|
||||
res.sendStatus(204) # No content
|
||||
|
||||
syncFromCode: (req, res, next = (error) ->) ->
|
||||
file = req.query.file
|
||||
line = parseInt(req.query.line, 10)
|
||||
column = parseInt(req.query.column, 10)
|
||||
project_id = req.params.project_id
|
||||
user_id = req.params.user_id
|
||||
CompileManager.syncFromCode project_id, user_id, file, line, column, (error, pdfPositions) ->
|
||||
return next(error) if error?
|
||||
res.json {
|
||||
pdf: pdfPositions
|
||||
}
|
||||
|
||||
syncFromPdf: (req, res, next = (error) ->) ->
|
||||
page = parseInt(req.query.page, 10)
|
||||
h = parseFloat(req.query.h)
|
||||
v = parseFloat(req.query.v)
|
||||
project_id = req.params.project_id
|
||||
user_id = req.params.user_id
|
||||
CompileManager.syncFromPdf project_id, user_id, page, h, v, (error, codePositions) ->
|
||||
return next(error) if error?
|
||||
res.json {
|
||||
code: codePositions
|
||||
}
|
||||
|
||||
wordcount: (req, res, next = (error) ->) ->
|
||||
file = req.query.file || "main.tex"
|
||||
project_id = req.params.project_id
|
||||
user_id = req.params.user_id
|
||||
image = req.query.image
|
||||
logger.log {image, file, project_id}, "word count request"
|
||||
|
||||
CompileManager.wordcount project_id, user_id, file, image, (error, result) ->
|
||||
return next(error) if error?
|
||||
res.json {
|
||||
texcount: result
|
||||
}
|
||||
|
||||
status: (req, res, next = (error)-> )->
|
||||
res.send("OK")
|
||||
|
||||
@@ -1,345 +0,0 @@
|
||||
ResourceWriter = require "./ResourceWriter"
|
||||
LatexRunner = require "./LatexRunner"
|
||||
OutputFileFinder = require "./OutputFileFinder"
|
||||
OutputCacheManager = require "./OutputCacheManager"
|
||||
Settings = require("settings-sharelatex")
|
||||
Path = require "path"
|
||||
logger = require "logger-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
child_process = require "child_process"
|
||||
DraftModeManager = require "./DraftModeManager"
|
||||
TikzManager = require "./TikzManager"
|
||||
LockManager = require "./LockManager"
|
||||
fs = require("fs")
|
||||
fse = require "fs-extra"
|
||||
os = require("os")
|
||||
async = require "async"
|
||||
Errors = require './Errors'
|
||||
CommandRunner = require "./CommandRunner"
|
||||
|
||||
getCompileName = (project_id, user_id) ->
|
||||
if user_id? then "#{project_id}-#{user_id}" else project_id
|
||||
|
||||
getCompileDir = (project_id, user_id) ->
|
||||
Path.join(Settings.path.compilesDir, getCompileName(project_id, user_id))
|
||||
|
||||
module.exports = CompileManager =
|
||||
|
||||
doCompileWithLock: (request, callback = (error, outputFiles) ->) ->
|
||||
compileDir = getCompileDir(request.project_id, request.user_id)
|
||||
lockFile = Path.join(compileDir, ".project-lock")
|
||||
# use a .project-lock file in the compile directory to prevent
|
||||
# simultaneous compiles
|
||||
fse.ensureDir compileDir, (error) ->
|
||||
return callback(error) if error?
|
||||
LockManager.runWithLock lockFile, (releaseLock) ->
|
||||
CompileManager.doCompile(request, releaseLock)
|
||||
, callback
|
||||
|
||||
doCompile: (request, callback = (error, outputFiles) ->) ->
|
||||
compileDir = getCompileDir(request.project_id, request.user_id)
|
||||
timer = new Metrics.Timer("write-to-disk")
|
||||
logger.log project_id: request.project_id, user_id: request.user_id, "syncing resources to disk"
|
||||
ResourceWriter.syncResourcesToDisk request, compileDir, (error, resourceList) ->
|
||||
# NOTE: resourceList is insecure, it should only be used to exclude files from the output list
|
||||
if error? and error instanceof Errors.FilesOutOfSyncError
|
||||
logger.warn project_id: request.project_id, user_id: request.user_id, "files out of sync, please retry"
|
||||
return callback(error)
|
||||
else if error?
|
||||
logger.err err:error, project_id: request.project_id, user_id: request.user_id, "error writing resources to disk"
|
||||
return callback(error)
|
||||
logger.log project_id: request.project_id, user_id: request.user_id, time_taken: Date.now() - timer.start, "written files to disk"
|
||||
timer.done()
|
||||
|
||||
injectDraftModeIfRequired = (callback) ->
|
||||
if request.draft
|
||||
DraftModeManager.injectDraftMode Path.join(compileDir, request.rootResourcePath), callback
|
||||
else
|
||||
callback()
|
||||
|
||||
createTikzFileIfRequired = (callback) ->
|
||||
TikzManager.checkMainFile compileDir, request.rootResourcePath, resourceList, (error, needsMainFile) ->
|
||||
return callback(error) if error?
|
||||
if needsMainFile
|
||||
TikzManager.injectOutputFile compileDir, request.rootResourcePath, callback
|
||||
else
|
||||
callback()
|
||||
|
||||
# set up environment variables for chktex
|
||||
env = {}
|
||||
# only run chktex on LaTeX files (not knitr .Rtex files or any others)
|
||||
isLaTeXFile = request.rootResourcePath?.match(/\.tex$/i)
|
||||
if request.check? and isLaTeXFile
|
||||
env['CHKTEX_OPTIONS'] = '-nall -e9 -e10 -w15 -w16'
|
||||
env['CHKTEX_ULIMIT_OPTIONS'] = '-t 5 -v 64000'
|
||||
if request.check is 'error'
|
||||
env['CHKTEX_EXIT_ON_ERROR'] = 1
|
||||
if request.check is 'validate'
|
||||
env['CHKTEX_VALIDATE'] = 1
|
||||
|
||||
# apply a series of file modifications/creations for draft mode and tikz
|
||||
async.series [injectDraftModeIfRequired, createTikzFileIfRequired], (error) ->
|
||||
return callback(error) if error?
|
||||
timer = new Metrics.Timer("run-compile")
|
||||
# find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
|
||||
tag = request.imageName?.match(/:(.*)/)?[1]?.replace(/\./g,'-') or "default"
|
||||
tag = "other" if not request.project_id.match(/^[0-9a-f]{24}$/) # exclude smoke test
|
||||
Metrics.inc("compiles")
|
||||
Metrics.inc("compiles-with-image.#{tag}")
|
||||
compileName = getCompileName(request.project_id, request.user_id)
|
||||
LatexRunner.runLatex compileName, {
|
||||
directory: compileDir
|
||||
mainFile: request.rootResourcePath
|
||||
compiler: request.compiler
|
||||
timeout: request.timeout
|
||||
image: request.imageName
|
||||
flags: request.flags
|
||||
environment: env
|
||||
}, (error, output, stats, timings) ->
|
||||
# request was for validation only
|
||||
if request.check is "validate"
|
||||
result = if error?.code then "fail" else "pass"
|
||||
error = new Error("validation")
|
||||
error.validate = result
|
||||
# request was for compile, and failed on validation
|
||||
if request.check is "error" and error?.message is 'exited'
|
||||
error = new Error("compilation")
|
||||
error.validate = "fail"
|
||||
# compile was killed by user, was a validation, or a compile which failed validation
|
||||
if error?.terminated or error?.validate or error?.timedout
|
||||
OutputFileFinder.findOutputFiles resourceList, compileDir, (err, outputFiles) ->
|
||||
return callback(err) if err?
|
||||
error.outputFiles = outputFiles # return output files so user can check logs
|
||||
callback(error)
|
||||
return
|
||||
# compile completed normally
|
||||
return callback(error) if error?
|
||||
Metrics.inc("compiles-succeeded")
|
||||
for metric_key, metric_value of stats or {}
|
||||
Metrics.count(metric_key, metric_value)
|
||||
for metric_key, metric_value of timings or {}
|
||||
Metrics.timing(metric_key, metric_value)
|
||||
loadavg = os.loadavg?()
|
||||
Metrics.gauge("load-avg", loadavg[0]) if loadavg?
|
||||
ts = timer.done()
|
||||
logger.log {project_id: request.project_id, user_id: request.user_id, time_taken: ts, stats:stats, timings:timings, loadavg:loadavg}, "done compile"
|
||||
if stats?["latex-runs"] > 0
|
||||
Metrics.timing("run-compile-per-pass", ts / stats["latex-runs"])
|
||||
if stats?["latex-runs"] > 0 and timings?["cpu-time"] > 0
|
||||
Metrics.timing("run-compile-cpu-time-per-pass", timings["cpu-time"] / stats["latex-runs"])
|
||||
|
||||
OutputFileFinder.findOutputFiles resourceList, compileDir, (error, outputFiles) ->
|
||||
return callback(error) if error?
|
||||
OutputCacheManager.saveOutputFiles outputFiles, compileDir, (error, newOutputFiles) ->
|
||||
callback null, newOutputFiles
|
||||
|
||||
stopCompile: (project_id, user_id, callback = (error) ->) ->
|
||||
compileName = getCompileName(project_id, user_id)
|
||||
LatexRunner.killLatex compileName, callback
|
||||
|
||||
clearProject: (project_id, user_id, _callback = (error) ->) ->
|
||||
callback = (error) ->
|
||||
_callback(error)
|
||||
_callback = () ->
|
||||
|
||||
compileDir = getCompileDir(project_id, user_id)
|
||||
|
||||
CompileManager._checkDirectory compileDir, (err, exists) ->
|
||||
return callback(err) if err?
|
||||
return callback() if not exists # skip removal if no directory present
|
||||
|
||||
proc = child_process.spawn "rm", ["-r", compileDir]
|
||||
|
||||
proc.on "error", callback
|
||||
|
||||
stderr = ""
|
||||
proc.stderr.on "data", (chunk) -> stderr += chunk.toString()
|
||||
|
||||
proc.on "close", (code) ->
|
||||
if code == 0
|
||||
return callback(null)
|
||||
else
|
||||
return callback(new Error("rm -r #{compileDir} failed: #{stderr}"))
|
||||
|
||||
_findAllDirs: (callback = (error, allDirs) ->) ->
|
||||
root = Settings.path.compilesDir
|
||||
fs.readdir root, (err, files) ->
|
||||
return callback(err) if err?
|
||||
allDirs = (Path.join(root, file) for file in files)
|
||||
callback(null, allDirs)
|
||||
|
||||
clearExpiredProjects: (max_cache_age_ms, callback = (error) ->) ->
|
||||
now = Date.now()
|
||||
# action for each directory
|
||||
expireIfNeeded = (checkDir, cb) ->
|
||||
fs.stat checkDir, (err, stats) ->
|
||||
return cb() if err? # ignore errors checking directory
|
||||
age = now - stats.mtime
|
||||
hasExpired = (age > max_cache_age_ms)
|
||||
if hasExpired then fse.remove(checkDir, cb) else cb()
|
||||
# iterate over all project directories
|
||||
CompileManager._findAllDirs (error, allDirs) ->
|
||||
return callback() if error?
|
||||
async.eachSeries allDirs, expireIfNeeded, callback
|
||||
|
||||
_checkDirectory: (compileDir, callback = (error, exists) ->) ->
|
||||
fs.lstat compileDir, (err, stats) ->
|
||||
if err?.code is 'ENOENT'
|
||||
return callback(null, false) # directory does not exist
|
||||
else if err?
|
||||
logger.err {dir: compileDir, err:err}, "error on stat of project directory for removal"
|
||||
return callback(err)
|
||||
else if not stats?.isDirectory()
|
||||
logger.err {dir: compileDir, stats:stats}, "bad project directory for removal"
|
||||
return callback new Error("project directory is not directory")
|
||||
else
|
||||
callback(null, true) # directory exists
|
||||
|
||||
syncFromCode: (project_id, user_id, file_name, line, column, callback = (error, pdfPositions) ->) ->
|
||||
# If LaTeX was run in a virtual environment, the file path that synctex expects
|
||||
# might not match the file path on the host. The .synctex.gz file however, will be accessed
|
||||
# wherever it is on the host.
|
||||
compileName = getCompileName(project_id, user_id)
|
||||
base_dir = Settings.path.synctexBaseDir(compileName)
|
||||
file_path = base_dir + "/" + file_name
|
||||
compileDir = getCompileDir(project_id, user_id)
|
||||
synctex_path = "#{base_dir}/output.pdf"
|
||||
command = ["code", synctex_path, file_path, line, column]
|
||||
fse.ensureDir compileDir, (error) ->
|
||||
if error?
|
||||
logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync from code"
|
||||
return callback(error)
|
||||
CompileManager._runSynctex project_id, user_id, command, (error, stdout) ->
|
||||
return callback(error) if error?
|
||||
logger.log project_id: project_id, user_id:user_id, file_name: file_name, line: line, column: column, command:command, stdout: stdout, "synctex code output"
|
||||
callback null, CompileManager._parseSynctexFromCodeOutput(stdout)
|
||||
|
||||
syncFromPdf: (project_id, user_id, page, h, v, callback = (error, filePositions) ->) ->
|
||||
compileName = getCompileName(project_id, user_id)
|
||||
compileDir = getCompileDir(project_id, user_id)
|
||||
base_dir = Settings.path.synctexBaseDir(compileName)
|
||||
synctex_path = "#{base_dir}/output.pdf"
|
||||
command = ["pdf", synctex_path, page, h, v]
|
||||
fse.ensureDir compileDir, (error) ->
|
||||
if error?
|
||||
logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync to code"
|
||||
return callback(error)
|
||||
CompileManager._runSynctex project_id, user_id, command, (error, stdout) ->
|
||||
return callback(error) if error?
|
||||
logger.log project_id: project_id, user_id:user_id, page: page, h: h, v:v, stdout: stdout, "synctex pdf output"
|
||||
callback null, CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)
|
||||
|
||||
_checkFileExists: (path, callback = (error) ->) ->
|
||||
synctexDir = Path.dirname(path)
|
||||
synctexFile = Path.join(synctexDir, "output.synctex.gz")
|
||||
fs.stat synctexDir, (error, stats) ->
|
||||
if error?.code is 'ENOENT'
|
||||
return callback(new Errors.NotFoundError("called synctex with no output directory"))
|
||||
return callback(error) if error?
|
||||
fs.stat synctexFile, (error, stats) ->
|
||||
if error?.code is 'ENOENT'
|
||||
return callback(new Errors.NotFoundError("called synctex with no output file"))
|
||||
return callback(error) if error?
|
||||
return callback(new Error("not a file")) if not stats?.isFile()
|
||||
callback()
|
||||
|
||||
_runSynctex: (project_id, user_id, command, callback = (error, stdout) ->) ->
|
||||
seconds = 1000
|
||||
|
||||
command.unshift("/opt/synctex")
|
||||
|
||||
directory = getCompileDir(project_id, user_id)
|
||||
timeout = 60 * 1000 # increased to allow for large projects
|
||||
compileName = getCompileName(project_id, user_id)
|
||||
CommandRunner.run compileName, command, directory, Settings.clsi?.docker.image, timeout, {}, (error, output) ->
|
||||
if error?
|
||||
logger.err err:error, command:command, project_id:project_id, user_id:user_id, "error running synctex"
|
||||
return callback(error)
|
||||
callback(null, output.stdout)
|
||||
|
||||
_parseSynctexFromCodeOutput: (output) ->
|
||||
results = []
|
||||
for line in output.split("\n")
|
||||
[node, page, h, v, width, height] = line.split("\t")
|
||||
if node == "NODE"
|
||||
results.push {
|
||||
page: parseInt(page, 10)
|
||||
h: parseFloat(h)
|
||||
v: parseFloat(v)
|
||||
height: parseFloat(height)
|
||||
width: parseFloat(width)
|
||||
}
|
||||
return results
|
||||
|
||||
_parseSynctexFromPdfOutput: (output, base_dir) ->
|
||||
results = []
|
||||
for line in output.split("\n")
|
||||
[node, file_path, line, column] = line.split("\t")
|
||||
if node == "NODE"
|
||||
file = file_path.slice(base_dir.length + 1)
|
||||
results.push {
|
||||
file: file
|
||||
line: parseInt(line, 10)
|
||||
column: parseInt(column, 10)
|
||||
}
|
||||
return results
|
||||
|
||||
|
||||
wordcount: (project_id, user_id, file_name, image, callback = (error, pdfPositions) ->) ->
|
||||
logger.log project_id:project_id, user_id:user_id, file_name:file_name, image:image, "running wordcount"
|
||||
file_path = "$COMPILE_DIR/" + file_name
|
||||
command = [ "texcount", '-nocol', '-inc', file_path, "-out=" + file_path + ".wc"]
|
||||
compileDir = getCompileDir(project_id, user_id)
|
||||
timeout = 60 * 1000
|
||||
compileName = getCompileName(project_id, user_id)
|
||||
fse.ensureDir compileDir, (error) ->
|
||||
if error?
|
||||
logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync from code"
|
||||
return callback(error)
|
||||
CommandRunner.run compileName, command, compileDir, image, timeout, {}, (error) ->
|
||||
return callback(error) if error?
|
||||
fs.readFile compileDir + "/" + file_name + ".wc", "utf-8", (err, stdout) ->
|
||||
if err?
|
||||
#call it node_err so sentry doesn't use random path error as unique id so it can't be ignored
|
||||
logger.err node_err:err, command:command, compileDir:compileDir, project_id:project_id, user_id:user_id, "error reading word count output"
|
||||
return callback(err)
|
||||
results = CompileManager._parseWordcountFromOutput(stdout)
|
||||
logger.log project_id:project_id, user_id:user_id, wordcount: results, "word count results"
|
||||
callback null, results
|
||||
|
||||
_parseWordcountFromOutput: (output) ->
|
||||
results = {
|
||||
encode: ""
|
||||
textWords: 0
|
||||
headWords: 0
|
||||
outside: 0
|
||||
headers: 0
|
||||
elements: 0
|
||||
mathInline: 0
|
||||
mathDisplay: 0
|
||||
errors: 0
|
||||
messages: ""
|
||||
}
|
||||
for line in output.split("\n")
|
||||
[data, info] = line.split(":")
|
||||
if data.indexOf("Encoding") > -1
|
||||
results['encode'] = info.trim()
|
||||
if data.indexOf("in text") > -1
|
||||
results['textWords'] = parseInt(info, 10)
|
||||
if data.indexOf("in head") > -1
|
||||
results['headWords'] = parseInt(info, 10)
|
||||
if data.indexOf("outside") > -1
|
||||
results['outside'] = parseInt(info, 10)
|
||||
if data.indexOf("of head") > -1
|
||||
results['headers'] = parseInt(info, 10)
|
||||
if data.indexOf("Number of floats/tables/figures") > -1
|
||||
results['elements'] = parseInt(info, 10)
|
||||
if data.indexOf("Number of math inlines") > -1
|
||||
results['mathInline'] = parseInt(info, 10)
|
||||
if data.indexOf("Number of math displayed") > -1
|
||||
results['mathDisplay'] = parseInt(info, 10)
|
||||
if data is "(errors" # errors reported as (errors:123)
|
||||
results['errors'] = parseInt(info, 10)
|
||||
if line.indexOf("!!! ") > -1 # errors logged as !!! message !!!
|
||||
results['messages'] += line + "\n"
|
||||
return results
|
||||
@@ -1,24 +0,0 @@
|
||||
Path = require 'path'
|
||||
|
||||
# here we coerce html, css and js to text/plain,
|
||||
# otherwise choose correct mime type based on file extension,
|
||||
# falling back to octet-stream
|
||||
module.exports = ContentTypeMapper =
|
||||
map: (path) ->
|
||||
switch Path.extname(path)
|
||||
when '.txt', '.html', '.js', '.css', '.svg'
|
||||
return 'text/plain'
|
||||
when '.csv'
|
||||
return 'text/csv'
|
||||
when '.pdf'
|
||||
return 'application/pdf'
|
||||
when '.png'
|
||||
return 'image/png'
|
||||
when '.jpg', '.jpeg'
|
||||
return 'image/jpeg'
|
||||
when '.tiff'
|
||||
return 'image/tiff'
|
||||
when '.gif'
|
||||
return 'image/gif'
|
||||
else
|
||||
return 'application/octet-stream'
|
||||
@@ -1,13 +0,0 @@
|
||||
async = require "async"
|
||||
Settings = require "settings-sharelatex"
|
||||
logger = require("logger-sharelatex")
|
||||
queue = async.queue((task, cb)->
|
||||
task(cb)
|
||||
, Settings.parallelSqlQueryLimit)
|
||||
|
||||
queue.drain = ()->
|
||||
logger.debug('all items have been processed')
|
||||
|
||||
module.exports =
|
||||
queue: queue
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
LockState = {} # locks for docker container operations, by container name
|
||||
|
||||
module.exports = LockManager =
|
||||
|
||||
MAX_LOCK_HOLD_TIME: 15000 # how long we can keep a lock
|
||||
MAX_LOCK_WAIT_TIME: 10000 # how long we wait for a lock
|
||||
LOCK_TEST_INTERVAL: 1000 # retry time
|
||||
|
||||
tryLock: (key, callback = (err, gotLock) ->) ->
|
||||
existingLock = LockState[key]
|
||||
if existingLock? # the lock is already taken, check how old it is
|
||||
lockAge = Date.now() - existingLock.created
|
||||
if lockAge < LockManager.MAX_LOCK_HOLD_TIME
|
||||
return callback(null, false) # we didn't get the lock, bail out
|
||||
else
|
||||
logger.error {key: key, lock: existingLock, age:lockAge}, "taking old lock by force"
|
||||
# take the lock
|
||||
LockState[key] = lockValue = {created: Date.now()}
|
||||
callback(null, true, lockValue)
|
||||
|
||||
getLock: (key, callback = (error, lockValue) ->) ->
|
||||
startTime = Date.now()
|
||||
do attempt = () ->
|
||||
LockManager.tryLock key, (error, gotLock, lockValue) ->
|
||||
return callback(error) if error?
|
||||
if gotLock
|
||||
callback(null, lockValue)
|
||||
else if Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME
|
||||
e = new Error("Lock timeout")
|
||||
e.key = key
|
||||
return callback(e)
|
||||
else
|
||||
setTimeout attempt, LockManager.LOCK_TEST_INTERVAL
|
||||
|
||||
releaseLock: (key, lockValue, callback = (error) ->) ->
|
||||
existingLock = LockState[key]
|
||||
if existingLock is lockValue # lockValue is an object, so we can test by reference
|
||||
delete LockState[key] # our lock, so we can free it
|
||||
callback()
|
||||
else if existingLock? # lock exists but doesn't match ours
|
||||
logger.error {key:key, lock: existingLock}, "tried to release lock taken by force"
|
||||
callback()
|
||||
else
|
||||
logger.error {key:key, lock: existingLock}, "tried to release lock that has gone"
|
||||
callback()
|
||||
|
||||
runWithLock: (key, runner = ( (releaseLock = (error) ->) -> ), callback = ( (error) -> )) ->
|
||||
LockManager.getLock key, (error, lockValue) ->
|
||||
return callback(error) if error?
|
||||
runner (error1, args...) ->
|
||||
LockManager.releaseLock key, lockValue, (error2) ->
|
||||
error = error1 or error2
|
||||
return callback(error) if error?
|
||||
callback(null, args...)
|
||||
@@ -1,358 +0,0 @@
|
||||
Settings = require "settings-sharelatex"
|
||||
logger = require "logger-sharelatex"
|
||||
Docker = require("dockerode")
|
||||
dockerode = new Docker()
|
||||
crypto = require "crypto"
|
||||
async = require "async"
|
||||
LockManager = require "./DockerLockManager"
|
||||
fs = require "fs"
|
||||
Path = require 'path'
|
||||
_ = require "underscore"
|
||||
|
||||
logger.info "using docker runner"
|
||||
|
||||
usingSiblingContainers = () ->
|
||||
Settings?.path?.sandboxedCompilesHostDir?
|
||||
|
||||
module.exports = DockerRunner =
|
||||
ERR_NOT_DIRECTORY: new Error("not a directory")
|
||||
ERR_TERMINATED: new Error("terminated")
|
||||
ERR_EXITED: new Error("exited")
|
||||
ERR_TIMED_OUT: new Error("container timed out")
|
||||
|
||||
run: (project_id, command, directory, image, timeout, environment, callback = (error, output) ->) ->
|
||||
|
||||
if usingSiblingContainers()
|
||||
_newPath = Settings.path.sandboxedCompilesHostDir
|
||||
logger.log {path: _newPath}, "altering bind path for sibling containers"
|
||||
# Server Pro, example:
|
||||
# '/var/lib/sharelatex/data/compiles/<project-id>'
|
||||
# ... becomes ...
|
||||
# '/opt/sharelatex_data/data/compiles/<project-id>'
|
||||
directory = Path.join(Settings.path.sandboxedCompilesHostDir, Path.basename(directory))
|
||||
|
||||
volumes = {}
|
||||
volumes[directory] = "/compile"
|
||||
|
||||
command = (arg.toString().replace?('$COMPILE_DIR', "/compile") for arg in command)
|
||||
if !image?
|
||||
image = Settings.clsi.docker.image
|
||||
|
||||
if Settings.texliveImageNameOveride?
|
||||
img = image.split("/")
|
||||
image = "#{Settings.texliveImageNameOveride}/#{img[2]}"
|
||||
|
||||
options = DockerRunner._getContainerOptions(command, image, volumes, timeout, environment)
|
||||
fingerprint = DockerRunner._fingerprintContainer(options)
|
||||
options.name = name = "project-#{project_id}-#{fingerprint}"
|
||||
|
||||
# logOptions = _.clone(options)
|
||||
# logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging"
|
||||
logger.log project_id: project_id, "running docker container"
|
||||
DockerRunner._runAndWaitForContainer options, volumes, timeout, (error, output) ->
|
||||
if error?.message?.match("HTTP code is 500")
|
||||
logger.log err: error, project_id: project_id, "error running container so destroying and retrying"
|
||||
DockerRunner.destroyContainer name, null, true, (error) ->
|
||||
return callback(error) if error?
|
||||
DockerRunner._runAndWaitForContainer options, volumes, timeout, callback
|
||||
else
|
||||
callback(error, output)
|
||||
|
||||
return name # pass back the container name to allow it to be killed
|
||||
|
||||
kill: (container_id, callback = (error) ->) ->
|
||||
logger.log container_id: container_id, "sending kill signal to container"
|
||||
container = dockerode.getContainer(container_id)
|
||||
container.kill (error) ->
|
||||
if error? and error?.message?.match?(/Cannot kill container .* is not running/)
|
||||
logger.warn err: error, container_id: container_id, "container not running, continuing"
|
||||
error = null
|
||||
if error?
|
||||
logger.error err: error, container_id: container_id, "error killing container"
|
||||
return callback(error)
|
||||
else
|
||||
callback()
|
||||
|
||||
_runAndWaitForContainer: (options, volumes, timeout, _callback = (error, output) ->) ->
|
||||
callback = (args...) ->
|
||||
_callback(args...)
|
||||
# Only call the callback once
|
||||
_callback = () ->
|
||||
|
||||
name = options.name
|
||||
|
||||
streamEnded = false
|
||||
containerReturned = false
|
||||
output = {}
|
||||
|
||||
callbackIfFinished = () ->
|
||||
if streamEnded and containerReturned
|
||||
callback(null, output)
|
||||
|
||||
attachStreamHandler = (error, _output) ->
|
||||
return callback(error) if error?
|
||||
output = _output
|
||||
streamEnded = true
|
||||
callbackIfFinished()
|
||||
|
||||
DockerRunner.startContainer options, volumes, attachStreamHandler, (error, containerId) ->
|
||||
return callback(error) if error?
|
||||
|
||||
DockerRunner.waitForContainer name, timeout, (error, exitCode) ->
|
||||
return callback(error) if error?
|
||||
if exitCode is 137 # exit status from kill -9
|
||||
err = DockerRunner.ERR_TERMINATED
|
||||
err.terminated = true
|
||||
return callback(err)
|
||||
if exitCode is 1 # exit status from chktex
|
||||
err = DockerRunner.ERR_EXITED
|
||||
err.code = exitCode
|
||||
return callback(err)
|
||||
containerReturned = true
|
||||
options?.HostConfig?.SecurityOpt = null #small log line
|
||||
logger.log err:err, exitCode:exitCode, options:options, "docker container has exited"
|
||||
callbackIfFinished()
|
||||
|
||||
_getContainerOptions: (command, image, volumes, timeout, environment) ->
|
||||
timeoutInSeconds = timeout / 1000
|
||||
|
||||
dockerVolumes = {}
|
||||
for hostVol, dockerVol of volumes
|
||||
dockerVolumes[dockerVol] = {}
|
||||
|
||||
if volumes[hostVol].slice(-3).indexOf(":r") == -1
|
||||
volumes[hostVol] = "#{dockerVol}:rw"
|
||||
|
||||
# merge settings and environment parameter
|
||||
env = {}
|
||||
for src in [Settings.clsi.docker.env, environment or {}]
|
||||
env[key] = value for key, value of src
|
||||
# set the path based on the image year
|
||||
if m = image.match /:([0-9]+)\.[0-9]+/
|
||||
year = m[1]
|
||||
else
|
||||
year = "2014"
|
||||
env['PATH'] = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/#{year}/bin/x86_64-linux/"
|
||||
options =
|
||||
"Cmd" : command,
|
||||
"Image" : image
|
||||
"Volumes" : dockerVolumes
|
||||
"WorkingDir" : "/compile"
|
||||
"NetworkDisabled" : true
|
||||
"Memory" : 1024 * 1024 * 1024 * 1024 # 1 Gb
|
||||
"User" : Settings.clsi.docker.user
|
||||
"Env" : ("#{key}=#{value}" for key, value of env) # convert the environment hash to an array
|
||||
"HostConfig" :
|
||||
"Binds": ("#{hostVol}:#{dockerVol}" for hostVol, dockerVol of volumes)
|
||||
"LogConfig": {"Type": "none", "Config": {}}
|
||||
"Ulimits": [{'Name': 'cpu', 'Soft': timeoutInSeconds+5, 'Hard': timeoutInSeconds+10}]
|
||||
"CapDrop": "ALL"
|
||||
"SecurityOpt": ["no-new-privileges"]
|
||||
|
||||
|
||||
if Settings.path?.synctexBinHostPath?
|
||||
options["HostConfig"]["Binds"].push("#{Settings.path.synctexBinHostPath}:/opt/synctex:ro")
|
||||
|
||||
if Settings.clsi.docker.seccomp_profile?
|
||||
options.HostConfig.SecurityOpt.push "seccomp=#{Settings.clsi.docker.seccomp_profile}"
|
||||
|
||||
return options
|
||||
|
||||
_fingerprintContainer: (containerOptions) ->
|
||||
# Yay, Hashing!
|
||||
json = JSON.stringify(containerOptions)
|
||||
return crypto.createHash("md5").update(json).digest("hex")
|
||||
|
||||
startContainer: (options, volumes, attachStreamHandler, callback) ->
|
||||
LockManager.runWithLock options.name, (releaseLock) ->
|
||||
# Check that volumes exist before starting the container.
|
||||
# When a container is started with volume pointing to a
|
||||
# non-existent directory then docker creates the directory but
|
||||
# with root ownership.
|
||||
DockerRunner._checkVolumes options, volumes, (err) ->
|
||||
return releaseLock(err) if err?
|
||||
DockerRunner._startContainer options, volumes, attachStreamHandler, releaseLock
|
||||
, callback
|
||||
|
||||
# Check that volumes exist and are directories
|
||||
_checkVolumes: (options, volumes, callback = (error, containerName) ->) ->
|
||||
if usingSiblingContainers()
|
||||
# Server Pro, with sibling-containers active, skip checks
|
||||
return callback(null)
|
||||
|
||||
checkVolume = (path, cb) ->
|
||||
fs.stat path, (err, stats) ->
|
||||
return cb(err) if err?
|
||||
return cb(DockerRunner.ERR_NOT_DIRECTORY) if not stats?.isDirectory()
|
||||
cb()
|
||||
jobs = []
|
||||
for vol of volumes
|
||||
do (vol) ->
|
||||
jobs.push (cb) -> checkVolume(vol, cb)
|
||||
async.series jobs, callback
|
||||
|
||||
_startContainer: (options, volumes, attachStreamHandler, callback = ((error, output) ->)) ->
|
||||
callback = _.once(callback)
|
||||
name = options.name
|
||||
|
||||
logger.log {container_name: name}, "starting container"
|
||||
container = dockerode.getContainer(name)
|
||||
|
||||
createAndStartContainer = ->
|
||||
dockerode.createContainer options, (error, container) ->
|
||||
return callback(error) if error?
|
||||
startExistingContainer()
|
||||
|
||||
startExistingContainer = ->
|
||||
DockerRunner.attachToContainer options.name, attachStreamHandler, (error)->
|
||||
return callback(error) if error?
|
||||
container.start (error) ->
|
||||
if error? and error?.statusCode != 304 #already running
|
||||
return callback(error)
|
||||
else
|
||||
callback()
|
||||
|
||||
container.inspect (error, stats)->
|
||||
if error?.statusCode == 404
|
||||
createAndStartContainer()
|
||||
else if error?
|
||||
logger.err {container_name: name, error:error}, "unable to inspect container to start"
|
||||
return callback(error)
|
||||
else
|
||||
startExistingContainer()
|
||||
|
||||
|
||||
attachToContainer: (containerId, attachStreamHandler, attachStartCallback) ->
|
||||
container = dockerode.getContainer(containerId)
|
||||
container.attach {stdout: 1, stderr: 1, stream: 1}, (error, stream) ->
|
||||
if error?
|
||||
logger.error err: error, container_id: containerId, "error attaching to container"
|
||||
return attachStartCallback(error)
|
||||
else
|
||||
attachStartCallback()
|
||||
|
||||
|
||||
logger.log container_id: containerId, "attached to container"
|
||||
|
||||
MAX_OUTPUT = 1024 * 1024 # limit output to 1MB
|
||||
createStringOutputStream = (name) ->
|
||||
return {
|
||||
data: ""
|
||||
overflowed: false
|
||||
write: (data) ->
|
||||
return if @overflowed
|
||||
if @data.length < MAX_OUTPUT
|
||||
@data += data
|
||||
else
|
||||
logger.error container_id: containerId, length: @data.length, maxLen: MAX_OUTPUT, "#{name} exceeds max size"
|
||||
@data += "(...truncated at #{MAX_OUTPUT} chars...)"
|
||||
@overflowed = true
|
||||
# kill container if too much output
|
||||
# docker.containers.kill(containerId, () ->)
|
||||
}
|
||||
|
||||
stdout = createStringOutputStream "stdout"
|
||||
stderr = createStringOutputStream "stderr"
|
||||
|
||||
container.modem.demuxStream(stream, stdout, stderr)
|
||||
|
||||
stream.on "error", (err) ->
|
||||
logger.error err: err, container_id: containerId, "error reading from container stream"
|
||||
|
||||
stream.on "end", () ->
|
||||
attachStreamHandler null, {stdout: stdout.data, stderr: stderr.data}
|
||||
|
||||
waitForContainer: (containerId, timeout, _callback = (error, exitCode) ->) ->
|
||||
callback = (args...) ->
|
||||
_callback(args...)
|
||||
# Only call the callback once
|
||||
_callback = () ->
|
||||
|
||||
container = dockerode.getContainer(containerId)
|
||||
|
||||
timedOut = false
|
||||
timeoutId = setTimeout () ->
|
||||
timedOut = true
|
||||
logger.log container_id: containerId, "timeout reached, killing container"
|
||||
container.kill(() ->)
|
||||
, timeout
|
||||
|
||||
logger.log container_id: containerId, "waiting for docker container"
|
||||
container.wait (error, res) ->
|
||||
if error?
|
||||
clearTimeout timeoutId
|
||||
logger.error err: error, container_id: containerId, "error waiting for container"
|
||||
return callback(error)
|
||||
if timedOut
|
||||
logger.log containerId: containerId, "docker container timed out"
|
||||
error = DockerRunner.ERR_TIMED_OUT
|
||||
error.timedout = true
|
||||
callback error
|
||||
else
|
||||
clearTimeout timeoutId
|
||||
logger.log container_id: containerId, exitCode: res.StatusCode, "docker container returned"
|
||||
callback null, res.StatusCode
|
||||
|
||||
destroyContainer: (containerName, containerId, shouldForce, callback = (error) ->) ->
|
||||
# We want the containerName for the lock and, ideally, the
|
||||
# containerId to delete. There is a bug in the docker.io module
|
||||
# where if you delete by name and there is an error, it throws an
|
||||
# async exception, but if you delete by id it just does a normal
|
||||
# error callback. We fall back to deleting by name if no id is
|
||||
# supplied.
|
||||
LockManager.runWithLock containerName, (releaseLock) ->
|
||||
DockerRunner._destroyContainer containerId or containerName, shouldForce, releaseLock
|
||||
, callback
|
||||
|
||||
_destroyContainer: (containerId, shouldForce, callback = (error) ->) ->
|
||||
logger.log container_id: containerId, "destroying docker container"
|
||||
container = dockerode.getContainer(containerId)
|
||||
container.remove {force: shouldForce == true}, (error) ->
|
||||
if error? and error?.statusCode == 404
|
||||
logger.warn err: error, container_id: containerId, "container not found, continuing"
|
||||
error = null
|
||||
if error?
|
||||
logger.error err: error, container_id: containerId, "error destroying container"
|
||||
else
|
||||
logger.log container_id: containerId, "destroyed container"
|
||||
callback(error)
|
||||
|
||||
# handle expiry of docker containers
|
||||
|
||||
MAX_CONTAINER_AGE: Settings.clsi.docker.maxContainerAge or oneHour = 60 * 60 * 1000
|
||||
|
||||
examineOldContainer: (container, callback = (error, name, id, ttl)->) ->
|
||||
name = container.Name or container.Names?[0]
|
||||
created = container.Created * 1000 # creation time is returned in seconds
|
||||
now = Date.now()
|
||||
age = now - created
|
||||
maxAge = DockerRunner.MAX_CONTAINER_AGE
|
||||
ttl = maxAge - age
|
||||
logger.log {containerName: name, created: created, now: now, age: age, maxAge: maxAge, ttl: ttl}, "checking whether to destroy container"
|
||||
callback(null, name, container.Id, ttl)
|
||||
|
||||
destroyOldContainers: (callback = (error) ->) ->
|
||||
dockerode.listContainers all: true, (error, containers) ->
|
||||
return callback(error) if error?
|
||||
jobs = []
|
||||
for container in containers or []
|
||||
do (container) ->
|
||||
DockerRunner.examineOldContainer container, (err, name, id, ttl) ->
|
||||
if name.slice(0, 9) == '/project-' && ttl <= 0
|
||||
jobs.push (cb) ->
|
||||
DockerRunner.destroyContainer name, id, false, () -> cb()
|
||||
# Ignore errors because some containers get stuck but
|
||||
# will be destroyed next time
|
||||
async.series jobs, callback
|
||||
|
||||
startContainerMonitor: () ->
|
||||
logger.log {maxAge: DockerRunner.MAX_CONTAINER_AGE}, "starting container expiry"
|
||||
# randomise the start time
|
||||
randomDelay = Math.floor(Math.random() * 5 * 60 * 1000)
|
||||
setTimeout () ->
|
||||
setInterval () ->
|
||||
DockerRunner.destroyOldContainers()
|
||||
, oneHour = 60 * 60 * 1000
|
||||
, randomDelay
|
||||
|
||||
DockerRunner.startContainerMonitor()
|
||||
@@ -1,24 +0,0 @@
|
||||
fs = require "fs"
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
module.exports = DraftModeManager =
|
||||
injectDraftMode: (filename, callback = (error) ->) ->
|
||||
fs.readFile filename, "utf8", (error, content) ->
|
||||
return callback(error) if error?
|
||||
# avoid adding draft mode more than once
|
||||
if content?.indexOf("\\documentclass\[draft") >= 0
|
||||
return callback()
|
||||
modified_content = DraftModeManager._injectDraftOption content
|
||||
logger.log {
|
||||
content: content.slice(0,1024), # \documentclass is normally v near the top
|
||||
modified_content: modified_content.slice(0,1024),
|
||||
filename
|
||||
}, "injected draft class"
|
||||
fs.writeFile filename, modified_content, callback
|
||||
|
||||
_injectDraftOption: (content) ->
|
||||
content
|
||||
# With existing options (must be first, otherwise both are applied)
|
||||
.replace(/\\documentclass\[/g, "\\documentclass[draft,")
|
||||
# Without existing options
|
||||
.replace(/\\documentclass\{/g, "\\documentclass[draft]{")
|
||||
@@ -1,25 +0,0 @@
|
||||
NotFoundError = (message) ->
|
||||
error = new Error(message)
|
||||
error.name = "NotFoundError"
|
||||
error.__proto__ = NotFoundError.prototype
|
||||
return error
|
||||
NotFoundError.prototype.__proto__ = Error.prototype
|
||||
|
||||
FilesOutOfSyncError = (message) ->
|
||||
error = new Error(message)
|
||||
error.name = "FilesOutOfSyncError"
|
||||
error.__proto__ = FilesOutOfSyncError.prototype
|
||||
return error
|
||||
FilesOutOfSyncError.prototype.__proto__ = Error.prototype
|
||||
|
||||
AlreadyCompilingError = (message) ->
|
||||
error = new Error(message)
|
||||
error.name = "AlreadyCompilingError"
|
||||
error.__proto__ = AlreadyCompilingError.prototype
|
||||
return error
|
||||
AlreadyCompilingError.prototype.__proto__ = Error.prototype
|
||||
|
||||
module.exports = Errors =
|
||||
NotFoundError: NotFoundError
|
||||
FilesOutOfSyncError: FilesOutOfSyncError
|
||||
AlreadyCompilingError: AlreadyCompilingError
|
||||
@@ -1,95 +0,0 @@
|
||||
Path = require "path"
|
||||
Settings = require "settings-sharelatex"
|
||||
logger = require "logger-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
CommandRunner = require "./CommandRunner"
|
||||
|
||||
ProcessTable = {} # table of currently running jobs (pids or docker container names)
|
||||
|
||||
module.exports = LatexRunner =
|
||||
runLatex: (project_id, options, callback = (error) ->) ->
|
||||
{directory, mainFile, compiler, timeout, image, environment, flags} = options
|
||||
compiler ||= "pdflatex"
|
||||
timeout ||= 60000 # milliseconds
|
||||
|
||||
logger.log directory: directory, compiler: compiler, timeout: timeout, mainFile: mainFile, environment: environment, flags:flags, "starting compile"
|
||||
|
||||
# We want to run latexmk on the tex file which we will automatically
|
||||
# generate from the Rtex/Rmd/md file.
|
||||
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".tex")
|
||||
|
||||
if compiler == "pdflatex"
|
||||
command = LatexRunner._pdflatexCommand mainFile, flags
|
||||
else if compiler == "latex"
|
||||
command = LatexRunner._latexCommand mainFile, flags
|
||||
else if compiler == "xelatex"
|
||||
command = LatexRunner._xelatexCommand mainFile, flags
|
||||
else if compiler == "lualatex"
|
||||
command = LatexRunner._lualatexCommand mainFile, flags
|
||||
else
|
||||
return callback new Error("unknown compiler: #{compiler}")
|
||||
|
||||
if Settings.clsi?.strace
|
||||
command = ["strace", "-o", "strace", "-ff"].concat(command)
|
||||
|
||||
id = "#{project_id}" # record running project under this id
|
||||
|
||||
ProcessTable[id] = CommandRunner.run project_id, command, directory, image, timeout, environment, (error, output) ->
|
||||
delete ProcessTable[id]
|
||||
return callback(error) if error?
|
||||
runs = output?.stderr?.match(/^Run number \d+ of .*latex/mg)?.length or 0
|
||||
failed = if output?.stdout?.match(/^Latexmk: Errors/m)? then 1 else 0
|
||||
# counters from latexmk output
|
||||
stats = {}
|
||||
stats["latexmk-errors"] = failed
|
||||
stats["latex-runs"] = runs
|
||||
stats["latex-runs-with-errors"] = if failed then runs else 0
|
||||
stats["latex-runs-#{runs}"] = 1
|
||||
stats["latex-runs-with-errors-#{runs}"] = if failed then 1 else 0
|
||||
# timing information from /usr/bin/time
|
||||
timings = {}
|
||||
stderr = output?.stderr
|
||||
timings["cpu-percent"] = stderr?.match(/Percent of CPU this job got: (\d+)/m)?[1] or 0
|
||||
timings["cpu-time"] = stderr?.match(/User time.*: (\d+.\d+)/m)?[1] or 0
|
||||
timings["sys-time"] = stderr?.match(/System time.*: (\d+.\d+)/m)?[1] or 0
|
||||
callback error, output, stats, timings
|
||||
|
||||
killLatex: (project_id, callback = (error) ->) ->
|
||||
id = "#{project_id}"
|
||||
logger.log {id:id}, "killing running compile"
|
||||
if not ProcessTable[id]?
|
||||
logger.warn {id}, "no such project to kill"
|
||||
return callback(null)
|
||||
else
|
||||
CommandRunner.kill ProcessTable[id], callback
|
||||
|
||||
_latexmkBaseCommand: (flags) ->
|
||||
args = ["latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR", "-synctex=1","-interaction=batchmode"]
|
||||
if flags
|
||||
args = args.concat(flags)
|
||||
(Settings?.clsi?.latexmkCommandPrefix || []).concat(args)
|
||||
|
||||
_pdflatexCommand: (mainFile, flags) ->
|
||||
LatexRunner._latexmkBaseCommand(flags).concat [
|
||||
"-pdf",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
_latexCommand: (mainFile, flags) ->
|
||||
LatexRunner._latexmkBaseCommand(flags).concat [
|
||||
"-pdfdvi",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
_xelatexCommand: (mainFile, flags) ->
|
||||
LatexRunner._latexmkBaseCommand(flags).concat [
|
||||
"-xelatex",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
_lualatexCommand: (mainFile, flags) ->
|
||||
LatexRunner._latexmkBaseCommand(flags).concat [
|
||||
"-lualatex",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
spawn = require("child_process").spawn
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
logger.info "using standard command runner"
|
||||
|
||||
module.exports = CommandRunner =
|
||||
run: (project_id, command, directory, image, timeout, environment, callback = (error) ->) ->
|
||||
command = (arg.toString().replace('$COMPILE_DIR', directory) for arg in command)
|
||||
logger.log project_id: project_id, command: command, directory: directory, "running command"
|
||||
logger.warn "timeouts and sandboxing are not enabled with CommandRunner"
|
||||
|
||||
# merge environment settings
|
||||
env = {}
|
||||
env[key] = value for key, value of process.env
|
||||
env[key] = value for key, value of environment
|
||||
|
||||
# run command as detached process so it has its own process group (which can be killed if needed)
|
||||
proc = spawn command[0], command.slice(1), cwd: directory, env: env
|
||||
|
||||
stdout = ""
|
||||
proc.stdout.on "data", (data)->
|
||||
stdout += data
|
||||
|
||||
proc.on "error", (err)->
|
||||
logger.err err:err, project_id:project_id, command: command, directory: directory, "error running command"
|
||||
callback(err)
|
||||
|
||||
proc.on "close", (code, signal) ->
|
||||
logger.info code:code, signal:signal, project_id:project_id, "command exited"
|
||||
if signal is 'SIGTERM' # signal from kill method below
|
||||
err = new Error("terminated")
|
||||
err.terminated = true
|
||||
return callback(err)
|
||||
else if code is 1 # exit status from chktex
|
||||
err = new Error("exited")
|
||||
err.code = code
|
||||
return callback(err)
|
||||
else
|
||||
callback(null, {"stdout": stdout})
|
||||
|
||||
return proc.pid # return process id to allow job to be killed if necessary
|
||||
|
||||
kill: (pid, callback = (error) ->) ->
|
||||
try
|
||||
process.kill -pid # kill all processes in group
|
||||
catch err
|
||||
return callback(err)
|
||||
callback()
|
||||
@@ -1,31 +0,0 @@
|
||||
Settings = require('settings-sharelatex')
|
||||
logger = require "logger-sharelatex"
|
||||
Lockfile = require('lockfile') # from https://github.com/npm/lockfile
|
||||
Errors = require "./Errors"
|
||||
fs = require("fs")
|
||||
Path = require("path")
|
||||
module.exports = LockManager =
|
||||
LOCK_TEST_INTERVAL: 1000 # 50ms between each test of the lock
|
||||
MAX_LOCK_WAIT_TIME: 15000 # 10s maximum time to spend trying to get the lock
|
||||
LOCK_STALE: 5*60*1000 # 5 mins time until lock auto expires
|
||||
|
||||
runWithLock: (path, runner = ((releaseLock = (error) ->) ->), callback = ((error) ->)) ->
|
||||
lockOpts =
|
||||
wait: @MAX_LOCK_WAIT_TIME
|
||||
pollPeriod: @LOCK_TEST_INTERVAL
|
||||
stale: @LOCK_STALE
|
||||
Lockfile.lock path, lockOpts, (error) ->
|
||||
if error?.code is 'EEXIST'
|
||||
return callback new Errors.AlreadyCompilingError("compile in progress")
|
||||
else if error?
|
||||
fs.lstat path, (statLockErr, statLock)->
|
||||
fs.lstat Path.dirname(path), (statDirErr, statDir)->
|
||||
fs.readdir Path.dirname(path), (readdirErr, readdirDir)->
|
||||
logger.err error:error, path:path, statLock:statLock, statLockErr:statLockErr, statDir:statDir, statDirErr: statDirErr, readdirErr:readdirErr, readdirDir:readdirDir, "unable to get lock"
|
||||
return callback(error)
|
||||
else
|
||||
runner (error1, args...) ->
|
||||
Lockfile.unlock path, (error2) ->
|
||||
error = error1 or error2
|
||||
return callback(error) if error?
|
||||
callback(null, args...)
|
||||
@@ -1,2 +0,0 @@
|
||||
module.exports = require "metrics-sharelatex"
|
||||
|
||||
@@ -1,199 +0,0 @@
|
||||
async = require "async"
|
||||
fs = require "fs"
|
||||
fse = require "fs-extra"
|
||||
Path = require "path"
|
||||
logger = require "logger-sharelatex"
|
||||
_ = require "underscore"
|
||||
Settings = require "settings-sharelatex"
|
||||
crypto = require "crypto"
|
||||
|
||||
OutputFileOptimiser = require "./OutputFileOptimiser"
|
||||
|
||||
module.exports = OutputCacheManager =
|
||||
CACHE_SUBDIR: '.cache/clsi'
|
||||
ARCHIVE_SUBDIR: '.archive/clsi'
|
||||
# build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes
|
||||
# for backwards compatibility, make the randombytes part optional
|
||||
BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/
|
||||
CACHE_LIMIT: 2 # maximum number of cache directories
|
||||
CACHE_AGE: 60*60*1000 # up to one hour old
|
||||
|
||||
path: (buildId, file) ->
|
||||
# used by static server, given build id return '.cache/clsi/buildId'
|
||||
if buildId.match OutputCacheManager.BUILD_REGEX
|
||||
return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file)
|
||||
else
|
||||
# for invalid build id, return top level
|
||||
return file
|
||||
|
||||
generateBuildId: (callback = (error, buildId) ->) ->
|
||||
# generate a secure build id from Date.now() and 8 random bytes in hex
|
||||
crypto.randomBytes 8, (err, buf) ->
|
||||
return callback(err) if err?
|
||||
random = buf.toString('hex')
|
||||
date = Date.now().toString(16)
|
||||
callback err, "#{date}-#{random}"
|
||||
|
||||
saveOutputFiles: (outputFiles, compileDir, callback = (error) ->) ->
|
||||
OutputCacheManager.generateBuildId (err, buildId) ->
|
||||
return callback(err) if err?
|
||||
OutputCacheManager.saveOutputFilesInBuildDir outputFiles, compileDir, buildId, callback
|
||||
|
||||
saveOutputFilesInBuildDir: (outputFiles, compileDir, buildId, callback = (error) ->) ->
|
||||
# make a compileDir/CACHE_SUBDIR/build_id directory and
|
||||
# copy all the output files into it
|
||||
cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR)
|
||||
# Put the files into a new cache subdirectory
|
||||
cacheDir = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR, buildId)
|
||||
# Is it a per-user compile? check if compile directory is PROJECTID-USERID
|
||||
perUser = Path.basename(compileDir).match(/^[0-9a-f]{24}-[0-9a-f]{24}$/)
|
||||
|
||||
# Archive logs in background
|
||||
if Settings.clsi?.archive_logs or Settings.clsi?.strace
|
||||
OutputCacheManager.archiveLogs outputFiles, compileDir, buildId, (err) ->
|
||||
if err?
|
||||
logger.warn err:err, "erroring archiving log files"
|
||||
|
||||
# make the new cache directory
|
||||
fse.ensureDir cacheDir, (err) ->
|
||||
if err?
|
||||
logger.error err: err, directory: cacheDir, "error creating cache directory"
|
||||
callback(err, outputFiles)
|
||||
else
|
||||
# copy all the output files into the new cache directory
|
||||
results = []
|
||||
async.mapSeries outputFiles, (file, cb) ->
|
||||
# don't send dot files as output, express doesn't serve them
|
||||
if OutputCacheManager._fileIsHidden(file.path)
|
||||
logger.debug compileDir: compileDir, path: file.path, "ignoring dotfile in output"
|
||||
return cb()
|
||||
# copy other files into cache directory if valid
|
||||
newFile = _.clone(file)
|
||||
[src, dst] = [Path.join(compileDir, file.path), Path.join(cacheDir, file.path)]
|
||||
OutputCacheManager._checkFileIsSafe src, (err, isSafe) ->
|
||||
return cb(err) if err?
|
||||
if !isSafe
|
||||
return cb()
|
||||
OutputCacheManager._checkIfShouldCopy src, (err, shouldCopy) ->
|
||||
return cb(err) if err?
|
||||
if !shouldCopy
|
||||
return cb()
|
||||
OutputCacheManager._copyFile src, dst, (err) ->
|
||||
return cb(err) if err?
|
||||
newFile.build = buildId # attach a build id if we cached the file
|
||||
results.push newFile
|
||||
cb()
|
||||
, (err) ->
|
||||
if err?
|
||||
# pass back the original files if we encountered *any* error
|
||||
callback(err, outputFiles)
|
||||
# clean up the directory we just created
|
||||
fse.remove cacheDir, (err) ->
|
||||
if err?
|
||||
logger.error err: err, dir: cacheDir, "error removing cache dir after failure"
|
||||
else
|
||||
# pass back the list of new files in the cache
|
||||
callback(err, results)
|
||||
# let file expiry run in the background, expire all previous files if per-user
|
||||
OutputCacheManager.expireOutputFiles cacheRoot, {keep: buildId, limit: if perUser then 1 else null}
|
||||
|
||||
archiveLogs: (outputFiles, compileDir, buildId, callback = (error) ->) ->
|
||||
archiveDir = Path.join(compileDir, OutputCacheManager.ARCHIVE_SUBDIR, buildId)
|
||||
logger.log {dir: archiveDir}, "archiving log files for project"
|
||||
fse.ensureDir archiveDir, (err) ->
|
||||
return callback(err) if err?
|
||||
async.mapSeries outputFiles, (file, cb) ->
|
||||
[src, dst] = [Path.join(compileDir, file.path), Path.join(archiveDir, file.path)]
|
||||
OutputCacheManager._checkFileIsSafe src, (err, isSafe) ->
|
||||
return cb(err) if err?
|
||||
return cb() if !isSafe
|
||||
OutputCacheManager._checkIfShouldArchive src, (err, shouldArchive) ->
|
||||
return cb(err) if err?
|
||||
return cb() if !shouldArchive
|
||||
OutputCacheManager._copyFile src, dst, cb
|
||||
, callback
|
||||
|
||||
expireOutputFiles: (cacheRoot, options, callback = (error) ->) ->
|
||||
# look in compileDir for build dirs and delete if > N or age of mod time > T
|
||||
fs.readdir cacheRoot, (err, results) ->
|
||||
if err?
|
||||
return callback(null) if err.code == 'ENOENT' # cache directory is empty
|
||||
logger.error err: err, project_id: cacheRoot, "error clearing cache"
|
||||
return callback(err)
|
||||
|
||||
dirs = results.sort().reverse()
|
||||
currentTime = Date.now()
|
||||
|
||||
isExpired = (dir, index) ->
|
||||
return false if options?.keep == dir
|
||||
# remove any directories over the requested (non-null) limit
|
||||
return true if options?.limit? and index > options.limit
|
||||
# remove any directories over the hard limit
|
||||
return true if index > OutputCacheManager.CACHE_LIMIT
|
||||
# we can get the build time from the first part of the directory name DDDD-RRRR
|
||||
# DDDD is date and RRRR is random bytes
|
||||
dirTime = parseInt(dir.split('-')?[0], 16)
|
||||
age = currentTime - dirTime
|
||||
return age > OutputCacheManager.CACHE_AGE
|
||||
|
||||
toRemove = _.filter(dirs, isExpired)
|
||||
|
||||
removeDir = (dir, cb) ->
|
||||
fse.remove Path.join(cacheRoot, dir), (err, result) ->
|
||||
logger.log cache: cacheRoot, dir: dir, "removed expired cache dir"
|
||||
if err?
|
||||
logger.error err: err, dir: dir, "cache remove error"
|
||||
cb(err, result)
|
||||
|
||||
async.eachSeries toRemove, (dir, cb) ->
|
||||
removeDir dir, cb
|
||||
, callback
|
||||
|
||||
_fileIsHidden: (path) ->
|
||||
return path?.match(/^\.|\/\./)?
|
||||
|
||||
_checkFileIsSafe: (src, callback = (error, isSafe) ->) ->
|
||||
# check if we have a valid file to copy into the cache
|
||||
fs.stat src, (err, stats) ->
|
||||
if err?.code is 'ENOENT'
|
||||
logger.warn err: err, file: src, "file has disappeared before copying to build cache"
|
||||
callback(err, false)
|
||||
else if err?
|
||||
# some other problem reading the file
|
||||
logger.error err: err, file: src, "stat error for file in cache"
|
||||
callback(err, false)
|
||||
else if not stats.isFile()
|
||||
# other filetype - reject it
|
||||
logger.warn src: src, stat: stats, "nonfile output - refusing to copy to cache"
|
||||
callback(null, false)
|
||||
else
|
||||
# it's a plain file, ok to copy
|
||||
callback(null, true)
|
||||
|
||||
_copyFile: (src, dst, callback) ->
|
||||
# copy output file into the cache
|
||||
fse.copy src, dst, (err) ->
|
||||
if err?.code is 'ENOENT'
|
||||
logger.warn err: err, file: src, "file has disappeared when copying to build cache"
|
||||
callback(err, false)
|
||||
else if err?
|
||||
logger.error err: err, src: src, dst: dst, "copy error for file in cache"
|
||||
callback(err)
|
||||
else
|
||||
if Settings.clsi?.optimiseInDocker
|
||||
# don't run any optimisations on the pdf when they are done
|
||||
# in the docker container
|
||||
callback()
|
||||
else
|
||||
# call the optimiser for the file too
|
||||
OutputFileOptimiser.optimiseFile src, dst, callback
|
||||
|
||||
_checkIfShouldCopy: (src, callback = (err, shouldCopy) ->) ->
|
||||
return callback(null, !Path.basename(src).match(/^strace/))
|
||||
|
||||
_checkIfShouldArchive: (src, callback = (err, shouldCopy) ->) ->
|
||||
if Path.basename(src).match(/^strace/)
|
||||
return callback(null, true)
|
||||
if Settings.clsi?.archive_logs and Path.basename(src) in ["output.log", "output.blg"]
|
||||
return callback(null, true)
|
||||
return callback(null, false)
|
||||
@@ -1,50 +0,0 @@
|
||||
async = require "async"
|
||||
fs = require "fs"
|
||||
Path = require "path"
|
||||
spawn = require("child_process").spawn
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
module.exports = OutputFileFinder =
|
||||
findOutputFiles: (resources, directory, callback = (error, outputFiles, allFiles) ->) ->
|
||||
incomingResources = {}
|
||||
for resource in resources
|
||||
incomingResources[resource.path] = true
|
||||
|
||||
OutputFileFinder._getAllFiles directory, (error, allFiles = []) ->
|
||||
if error?
|
||||
logger.err err:error, "error finding all output files"
|
||||
return callback(error)
|
||||
outputFiles = []
|
||||
for file in allFiles
|
||||
if !incomingResources[file]
|
||||
outputFiles.push {
|
||||
path: file
|
||||
type: file.match(/\.([^\.]+)$/)?[1]
|
||||
}
|
||||
callback null, outputFiles, allFiles
|
||||
|
||||
_getAllFiles: (directory, _callback = (error, fileList) ->) ->
|
||||
callback = (error, fileList) ->
|
||||
_callback(error, fileList)
|
||||
_callback = () ->
|
||||
|
||||
# don't include clsi-specific files/directories in the output list
|
||||
EXCLUDE_DIRS = ["-name", ".cache", "-o", "-name", ".archive","-o", "-name", ".project-*"]
|
||||
args = [directory, "(", EXCLUDE_DIRS..., ")", "-prune", "-o", "-type", "f", "-print"]
|
||||
logger.log args: args, "running find command"
|
||||
|
||||
proc = spawn("find", args)
|
||||
stdout = ""
|
||||
proc.stdout.on "data", (chunk) ->
|
||||
stdout += chunk.toString()
|
||||
proc.on "error", callback
|
||||
proc.on "close", (code) ->
|
||||
if code != 0
|
||||
logger.warn {directory, code}, "find returned error, directory likely doesn't exist"
|
||||
return callback null, []
|
||||
fileList = stdout.trim().split("\n")
|
||||
fileList = fileList.map (file) ->
|
||||
# Strip leading directory
|
||||
path = Path.relative(directory, file)
|
||||
return callback null, fileList
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
fs = require "fs"
|
||||
Path = require "path"
|
||||
spawn = require("child_process").spawn
|
||||
logger = require "logger-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
_ = require "underscore"
|
||||
|
||||
module.exports = OutputFileOptimiser =
|
||||
|
||||
optimiseFile: (src, dst, callback = (error) ->) ->
|
||||
# check output file (src) and see if we can optimise it, storing
|
||||
# the result in the build directory (dst)
|
||||
if src.match(/\/output\.pdf$/)
|
||||
OutputFileOptimiser.checkIfPDFIsOptimised src, (err, isOptimised) ->
|
||||
return callback(null) if err? or isOptimised
|
||||
OutputFileOptimiser.optimisePDF src, dst, callback
|
||||
else
|
||||
callback (null)
|
||||
|
||||
checkIfPDFIsOptimised: (file, callback) ->
|
||||
SIZE = 16*1024 # check the header of the pdf
|
||||
result = new Buffer(SIZE)
|
||||
result.fill(0) # prevent leakage of uninitialised buffer
|
||||
fs.open file, "r", (err, fd) ->
|
||||
return callback(err) if err?
|
||||
fs.read fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) ->
|
||||
fs.close fd, (errClose) ->
|
||||
return callback(errRead) if errRead?
|
||||
return callback(errClose) if errReadClose?
|
||||
isOptimised = buffer.toString('ascii').indexOf("/Linearized 1") >= 0
|
||||
callback(null, isOptimised)
|
||||
|
||||
optimisePDF: (src, dst, callback = (error) ->) ->
|
||||
tmpOutput = dst + '.opt'
|
||||
args = ["--linearize", src, tmpOutput]
|
||||
logger.log args: args, "running qpdf command"
|
||||
|
||||
timer = new Metrics.Timer("qpdf")
|
||||
proc = spawn("qpdf", args)
|
||||
stdout = ""
|
||||
proc.stdout.on "data", (chunk) ->
|
||||
stdout += chunk.toString()
|
||||
callback = _.once(callback) # avoid double call back for error and close event
|
||||
proc.on "error", (err) ->
|
||||
logger.warn {err, args}, "qpdf failed"
|
||||
callback(null) # ignore the error
|
||||
proc.on "close", (code) ->
|
||||
timer.done()
|
||||
if code != 0
|
||||
logger.warn {code, args}, "qpdf returned error"
|
||||
return callback(null) # ignore the error
|
||||
fs.rename tmpOutput, dst, (err) ->
|
||||
if err?
|
||||
logger.warn {tmpOutput, dst}, "failed to rename output of qpdf command"
|
||||
callback(null) # ignore the error
|
||||
@@ -1,84 +0,0 @@
|
||||
UrlCache = require "./UrlCache"
|
||||
CompileManager = require "./CompileManager"
|
||||
db = require "./db"
|
||||
dbQueue = require "./DbQueue"
|
||||
async = require "async"
|
||||
logger = require "logger-sharelatex"
|
||||
oneDay = 24 * 60 * 60 * 1000
|
||||
Settings = require "settings-sharelatex"
|
||||
|
||||
module.exports = ProjectPersistenceManager =
|
||||
|
||||
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5
|
||||
|
||||
markProjectAsJustAccessed: (project_id, callback = (error) ->) ->
|
||||
job = (cb)->
|
||||
db.Project.findOrCreate(where: {project_id: project_id})
|
||||
.spread(
|
||||
(project, created) ->
|
||||
project.updateAttributes(lastAccessed: new Date())
|
||||
.then(() -> cb())
|
||||
.error cb
|
||||
)
|
||||
.error cb
|
||||
dbQueue.queue.push(job, callback)
|
||||
|
||||
|
||||
clearExpiredProjects: (callback = (error) ->) ->
|
||||
ProjectPersistenceManager._findExpiredProjectIds (error, project_ids) ->
|
||||
return callback(error) if error?
|
||||
logger.log project_ids: project_ids, "clearing expired projects"
|
||||
jobs = for project_id in (project_ids or [])
|
||||
do (project_id) ->
|
||||
(callback) ->
|
||||
ProjectPersistenceManager.clearProjectFromCache project_id, (err) ->
|
||||
if err?
|
||||
logger.error err: err, project_id: project_id, "error clearing project"
|
||||
callback()
|
||||
async.series jobs, (error) ->
|
||||
return callback(error) if error?
|
||||
CompileManager.clearExpiredProjects ProjectPersistenceManager.EXPIRY_TIMEOUT, (error) ->
|
||||
callback() # ignore any errors from deleting directories
|
||||
|
||||
clearProject: (project_id, user_id, callback = (error) ->) ->
|
||||
logger.log project_id: project_id, user_id:user_id, "clearing project for user"
|
||||
CompileManager.clearProject project_id, user_id, (error) ->
|
||||
return callback(error) if error?
|
||||
ProjectPersistenceManager.clearProjectFromCache project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
callback()
|
||||
|
||||
clearProjectFromCache: (project_id, callback = (error) ->) ->
|
||||
logger.log project_id: project_id, "clearing project from cache"
|
||||
UrlCache.clearProject project_id, (error) ->
|
||||
if error?
|
||||
logger.err error:error, project_id: project_id, "error clearing project from cache"
|
||||
return callback(error)
|
||||
ProjectPersistenceManager._clearProjectFromDatabase project_id, (error) ->
|
||||
if error?
|
||||
logger.err error:error, project_id:project_id, "error clearing project from database"
|
||||
callback(error)
|
||||
|
||||
_clearProjectFromDatabase: (project_id, callback = (error) ->) ->
|
||||
logger.log project_id:project_id, "clearing project from database"
|
||||
job = (cb)->
|
||||
db.Project.destroy(where: {project_id: project_id})
|
||||
.then(() -> cb())
|
||||
.error cb
|
||||
dbQueue.queue.push(job, callback)
|
||||
|
||||
|
||||
_findExpiredProjectIds: (callback = (error, project_ids) ->) ->
|
||||
job = (cb)->
|
||||
keepProjectsFrom = new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT)
|
||||
q = {}
|
||||
q[db.op.lt] = keepProjectsFrom
|
||||
db.Project.findAll(where:{lastAccessed:q})
|
||||
.then((projects) ->
|
||||
cb null, projects.map((project) -> project.project_id)
|
||||
).error cb
|
||||
|
||||
dbQueue.queue.push(job, callback)
|
||||
|
||||
|
||||
logger.log {EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout"
|
||||
@@ -1,128 +0,0 @@
|
||||
settings = require("settings-sharelatex")
|
||||
|
||||
module.exports = RequestParser =
|
||||
VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"]
|
||||
MAX_TIMEOUT: 600
|
||||
|
||||
parse: (body, callback = (error, data) ->) ->
|
||||
response = {}
|
||||
|
||||
if !body.compile?
|
||||
return callback "top level object should have a compile attribute"
|
||||
|
||||
compile = body.compile
|
||||
compile.options ||= {}
|
||||
|
||||
try
|
||||
response.compiler = @_parseAttribute "compiler",
|
||||
compile.options.compiler,
|
||||
validValues: @VALID_COMPILERS
|
||||
default: "pdflatex"
|
||||
type: "string"
|
||||
response.timeout = @_parseAttribute "timeout",
|
||||
compile.options.timeout
|
||||
default: RequestParser.MAX_TIMEOUT
|
||||
type: "number"
|
||||
response.imageName = @_parseAttribute "imageName",
|
||||
compile.options.imageName,
|
||||
type: "string"
|
||||
response.draft = @_parseAttribute "draft",
|
||||
compile.options.draft,
|
||||
default: false,
|
||||
type: "boolean"
|
||||
response.check = @_parseAttribute "check",
|
||||
compile.options.check,
|
||||
type: "string"
|
||||
response.flags = @_parseAttribute "flags",
|
||||
compile.options.flags,
|
||||
default: [],
|
||||
type: "object"
|
||||
|
||||
# The syncType specifies whether the request contains all
|
||||
# resources (full) or only those resources to be updated
|
||||
# in-place (incremental).
|
||||
response.syncType = @_parseAttribute "syncType",
|
||||
compile.options.syncType,
|
||||
validValues: ["full", "incremental"]
|
||||
type: "string"
|
||||
|
||||
# The syncState is an identifier passed in with the request
|
||||
# which has the property that it changes when any resource is
|
||||
# added, deleted, moved or renamed.
|
||||
#
|
||||
# on syncType full the syncState identifier is passed in and
|
||||
# stored
|
||||
#
|
||||
# on syncType incremental the syncState identifier must match
|
||||
# the stored value
|
||||
response.syncState = @_parseAttribute "syncState",
|
||||
compile.options.syncState,
|
||||
type: "string"
|
||||
|
||||
if response.timeout > RequestParser.MAX_TIMEOUT
|
||||
response.timeout = RequestParser.MAX_TIMEOUT
|
||||
response.timeout = response.timeout * 1000 # milliseconds
|
||||
|
||||
response.resources = (@_parseResource(resource) for resource in (compile.resources or []))
|
||||
|
||||
rootResourcePath = @_parseAttribute "rootResourcePath",
|
||||
compile.rootResourcePath
|
||||
default: "main.tex"
|
||||
type: "string"
|
||||
originalRootResourcePath = rootResourcePath
|
||||
sanitizedRootResourcePath = RequestParser._sanitizePath(rootResourcePath)
|
||||
response.rootResourcePath = RequestParser._checkPath(sanitizedRootResourcePath)
|
||||
|
||||
for resource in response.resources
|
||||
if resource.path == originalRootResourcePath
|
||||
resource.path = sanitizedRootResourcePath
|
||||
catch error
|
||||
return callback error
|
||||
|
||||
callback null, response
|
||||
|
||||
_parseResource: (resource) ->
|
||||
if !resource.path? or typeof resource.path != "string"
|
||||
throw "all resources should have a path attribute"
|
||||
|
||||
if resource.modified?
|
||||
modified = new Date(resource.modified)
|
||||
if isNaN(modified.getTime())
|
||||
throw "resource modified date could not be understood: #{resource.modified}"
|
||||
|
||||
if !resource.url? and !resource.content?
|
||||
throw "all resources should have either a url or content attribute"
|
||||
if resource.content? and typeof resource.content != "string"
|
||||
throw "content attribute should be a string"
|
||||
if resource.url? and typeof resource.url != "string"
|
||||
throw "url attribute should be a string"
|
||||
|
||||
return {
|
||||
path: resource.path
|
||||
modified: modified
|
||||
url: resource.url
|
||||
content: resource.content
|
||||
}
|
||||
|
||||
_parseAttribute: (name, attribute, options) ->
|
||||
if attribute?
|
||||
if options.validValues?
|
||||
if options.validValues.indexOf(attribute) == -1
|
||||
throw "#{name} attribute should be one of: #{options.validValues.join(", ")}"
|
||||
if options.type?
|
||||
if typeof attribute != options.type
|
||||
throw "#{name} attribute should be a #{options.type}"
|
||||
else
|
||||
return options.default if options.default?
|
||||
return attribute
|
||||
|
||||
_sanitizePath: (path) ->
|
||||
# See http://php.net/manual/en/function.escapeshellcmd.php
|
||||
path.replace(/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g, "")
|
||||
|
||||
_checkPath: (path) ->
|
||||
# check that the request does not use a relative path
|
||||
for dir in path.split('/')
|
||||
if dir == '..'
|
||||
throw "relative path in root resource"
|
||||
return path
|
||||
@@ -1,72 +0,0 @@
|
||||
Path = require "path"
|
||||
fs = require "fs"
|
||||
logger = require "logger-sharelatex"
|
||||
settings = require("settings-sharelatex")
|
||||
Errors = require "./Errors"
|
||||
SafeReader = require "./SafeReader"
|
||||
|
||||
module.exports = ResourceStateManager =
|
||||
|
||||
# The sync state is an identifier which must match for an
|
||||
# incremental update to be allowed.
|
||||
#
|
||||
# The initial value is passed in and stored on a full
|
||||
# compile, along with the list of resources..
|
||||
#
|
||||
# Subsequent incremental compiles must come with the same value - if
|
||||
# not they will be rejected with a 409 Conflict response. The
|
||||
# previous list of resources is returned.
|
||||
#
|
||||
# An incremental compile can only update existing files with new
|
||||
# content. The sync state identifier must change if any docs or
|
||||
# files are moved, added, deleted or renamed.
|
||||
|
||||
SYNC_STATE_FILE: ".project-sync-state"
|
||||
SYNC_STATE_MAX_SIZE: 128*1024
|
||||
|
||||
saveProjectState: (state, resources, basePath, callback = (error) ->) ->
|
||||
stateFile = Path.join(basePath, @SYNC_STATE_FILE)
|
||||
if not state? # remove the file if no state passed in
|
||||
logger.log state:state, basePath:basePath, "clearing sync state"
|
||||
fs.unlink stateFile, (err) ->
|
||||
if err? and err.code isnt 'ENOENT'
|
||||
return callback(err)
|
||||
else
|
||||
return callback()
|
||||
else
|
||||
logger.log state:state, basePath:basePath, "writing sync state"
|
||||
resourceList = (resource.path for resource in resources)
|
||||
fs.writeFile stateFile, [resourceList..., "stateHash:#{state}"].join("\n"), callback
|
||||
|
||||
checkProjectStateMatches: (state, basePath, callback = (error, resources) ->) ->
|
||||
stateFile = Path.join(basePath, @SYNC_STATE_FILE)
|
||||
size = @SYNC_STATE_MAX_SIZE
|
||||
SafeReader.readFile stateFile, size, 'utf8', (err, result, bytesRead) ->
|
||||
return callback(err) if err?
|
||||
if bytesRead is size
|
||||
logger.error file:stateFile, size:size, bytesRead:bytesRead, "project state file truncated"
|
||||
[resourceList..., oldState] = result?.toString()?.split("\n") or []
|
||||
newState = "stateHash:#{state}"
|
||||
logger.log state:state, oldState: oldState, basePath:basePath, stateMatches: (newState is oldState), "checking sync state"
|
||||
if newState isnt oldState
|
||||
return callback new Errors.FilesOutOfSyncError("invalid state for incremental update")
|
||||
else
|
||||
resources = ({path: path} for path in resourceList)
|
||||
callback(null, resources)
|
||||
|
||||
checkResourceFiles: (resources, allFiles, basePath, callback = (error) ->) ->
|
||||
# check the paths are all relative to current directory
|
||||
for file in resources or []
|
||||
for dir in file?.path?.split('/')
|
||||
if dir == '..'
|
||||
return callback new Error("relative path in resource file list")
|
||||
# check if any of the input files are not present in list of files
|
||||
seenFile = {}
|
||||
for file in allFiles
|
||||
seenFile[file] = true
|
||||
missingFiles = (resource.path for resource in resources when not seenFile[resource.path])
|
||||
if missingFiles?.length > 0
|
||||
logger.err missingFiles:missingFiles, basePath:basePath, allFiles:allFiles, resources:resources, "missing input files for project"
|
||||
return callback new Errors.FilesOutOfSyncError("resource files missing in incremental update")
|
||||
else
|
||||
callback()
|
||||
@@ -1,142 +0,0 @@
|
||||
UrlCache = require "./UrlCache"
|
||||
Path = require "path"
|
||||
fs = require "fs"
|
||||
async = require "async"
|
||||
mkdirp = require "mkdirp"
|
||||
OutputFileFinder = require "./OutputFileFinder"
|
||||
ResourceStateManager = require "./ResourceStateManager"
|
||||
Metrics = require "./Metrics"
|
||||
logger = require "logger-sharelatex"
|
||||
settings = require("settings-sharelatex")
|
||||
|
||||
parallelFileDownloads = settings.parallelFileDownloads or 1
|
||||
|
||||
module.exports = ResourceWriter =
|
||||
|
||||
syncResourcesToDisk: (request, basePath, callback = (error, resourceList) ->) ->
|
||||
if request.syncType is "incremental"
|
||||
logger.log project_id: request.project_id, user_id: request.user_id, "incremental sync"
|
||||
ResourceStateManager.checkProjectStateMatches request.syncState, basePath, (error, resourceList) ->
|
||||
return callback(error) if error?
|
||||
ResourceWriter._removeExtraneousFiles resourceList, basePath, (error, outputFiles, allFiles) ->
|
||||
return callback(error) if error?
|
||||
ResourceStateManager.checkResourceFiles resourceList, allFiles, basePath, (error) ->
|
||||
return callback(error) if error?
|
||||
ResourceWriter.saveIncrementalResourcesToDisk request.project_id, request.resources, basePath, (error) ->
|
||||
return callback(error) if error?
|
||||
callback(null, resourceList)
|
||||
else
|
||||
logger.log project_id: request.project_id, user_id: request.user_id, "full sync"
|
||||
@saveAllResourcesToDisk request.project_id, request.resources, basePath, (error) ->
|
||||
return callback(error) if error?
|
||||
ResourceStateManager.saveProjectState request.syncState, request.resources, basePath, (error) ->
|
||||
return callback(error) if error?
|
||||
callback(null, request.resources)
|
||||
|
||||
saveIncrementalResourcesToDisk: (project_id, resources, basePath, callback = (error) ->) ->
|
||||
@_createDirectory basePath, (error) =>
|
||||
return callback(error) if error?
|
||||
jobs = for resource in resources
|
||||
do (resource) =>
|
||||
(callback) => @_writeResourceToDisk(project_id, resource, basePath, callback)
|
||||
async.parallelLimit jobs, parallelFileDownloads, callback
|
||||
|
||||
saveAllResourcesToDisk: (project_id, resources, basePath, callback = (error) ->) ->
|
||||
@_createDirectory basePath, (error) =>
|
||||
return callback(error) if error?
|
||||
@_removeExtraneousFiles resources, basePath, (error) =>
|
||||
return callback(error) if error?
|
||||
jobs = for resource in resources
|
||||
do (resource) =>
|
||||
(callback) => @_writeResourceToDisk(project_id, resource, basePath, callback)
|
||||
async.parallelLimit jobs, parallelFileDownloads, callback
|
||||
|
||||
_createDirectory: (basePath, callback = (error) ->) ->
|
||||
fs.mkdir basePath, (err) ->
|
||||
if err?
|
||||
if err.code is 'EEXIST'
|
||||
return callback()
|
||||
else
|
||||
logger.log {err: err, dir:basePath}, "error creating directory"
|
||||
return callback(err)
|
||||
else
|
||||
return callback()
|
||||
|
||||
_removeExtraneousFiles: (resources, basePath, _callback = (error, outputFiles, allFiles) ->) ->
|
||||
timer = new Metrics.Timer("unlink-output-files")
|
||||
callback = (error, result...) ->
|
||||
timer.done()
|
||||
_callback(error, result...)
|
||||
|
||||
OutputFileFinder.findOutputFiles resources, basePath, (error, outputFiles, allFiles) ->
|
||||
return callback(error) if error?
|
||||
|
||||
jobs = []
|
||||
for file in outputFiles or []
|
||||
do (file) ->
|
||||
path = file.path
|
||||
should_delete = true
|
||||
if path.match(/^output\./) or path.match(/\.aux$/) or path.match(/^cache\//) # knitr cache
|
||||
should_delete = false
|
||||
if path.match(/^output-.*/) # Tikz cached figures (default case)
|
||||
should_delete = false
|
||||
if path.match(/\.(pdf|dpth|md5)$/) # Tikz cached figures (by extension)
|
||||
should_delete = false
|
||||
if path.match(/\.(pygtex|pygstyle)$/) or path.match(/(^|\/)_minted-[^\/]+\//) # minted files/directory
|
||||
should_delete = false
|
||||
if path.match(/\.md\.tex$/) or path.match(/(^|\/)_markdown_[^\/]+\//) # markdown files/directory
|
||||
should_delete = false
|
||||
if path.match(/-eps-converted-to\.pdf$/) # Epstopdf generated files
|
||||
should_delete = false
|
||||
if path == "output.pdf" or path == "output.dvi" or path == "output.log" or path == "output.xdv"
|
||||
should_delete = true
|
||||
if path == "output.tex" # created by TikzManager if present in output files
|
||||
should_delete = true
|
||||
if should_delete
|
||||
jobs.push (callback) -> ResourceWriter._deleteFileIfNotDirectory Path.join(basePath, path), callback
|
||||
|
||||
async.series jobs, (error) ->
|
||||
return callback(error) if error?
|
||||
callback(null, outputFiles, allFiles)
|
||||
|
||||
_deleteFileIfNotDirectory: (path, callback = (error) ->) ->
|
||||
fs.stat path, (error, stat) ->
|
||||
if error? and error.code is 'ENOENT'
|
||||
return callback()
|
||||
else if error?
|
||||
logger.err {err: error, path: path}, "error stating file in deleteFileIfNotDirectory"
|
||||
return callback(error)
|
||||
else if stat.isFile()
|
||||
fs.unlink path, (error) ->
|
||||
if error?
|
||||
logger.err {err: error, path: path}, "error removing file in deleteFileIfNotDirectory"
|
||||
callback(error)
|
||||
else
|
||||
callback()
|
||||
else
|
||||
callback()
|
||||
|
||||
_writeResourceToDisk: (project_id, resource, basePath, callback = (error) ->) ->
|
||||
ResourceWriter.checkPath basePath, resource.path, (error, path) ->
|
||||
return callback(error) if error?
|
||||
mkdirp Path.dirname(path), (error) ->
|
||||
return callback(error) if error?
|
||||
# TODO: Don't overwrite file if it hasn't been modified
|
||||
if resource.url?
|
||||
UrlCache.downloadUrlToFile project_id, resource.url, path, resource.modified, (err)->
|
||||
if err?
|
||||
logger.err err:err, project_id:project_id, path:path, resource_url:resource.url, modified:resource.modified, "error downloading file for resources"
|
||||
callback() #try and continue compiling even if http resource can not be downloaded at this time
|
||||
else
|
||||
process = require("process")
|
||||
fs.writeFile path, resource.content, callback
|
||||
try
|
||||
result = fs.lstatSync(path)
|
||||
catch e
|
||||
|
||||
checkPath: (basePath, resourcePath, callback) ->
|
||||
path = Path.normalize(Path.join(basePath, resourcePath))
|
||||
if (path.slice(0, basePath.length + 1) != basePath + "/")
|
||||
return callback new Error("resource path is outside root directory")
|
||||
else
|
||||
return callback(null, path)
|
||||
@@ -1,25 +0,0 @@
|
||||
fs = require "fs"
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
module.exports = SafeReader =
|
||||
|
||||
# safely read up to size bytes from a file and return result as a
|
||||
# string
|
||||
|
||||
readFile: (file, size, encoding, callback = (error, result) ->) ->
|
||||
fs.open file, 'r', (err, fd) ->
|
||||
return callback() if err? and err.code is 'ENOENT'
|
||||
return callback(err) if err?
|
||||
|
||||
# safely return always closing the file
|
||||
callbackWithClose = (err, result...) ->
|
||||
fs.close fd, (err1) ->
|
||||
return callback(err) if err?
|
||||
return callback(err1) if err1?
|
||||
callback(null, result...)
|
||||
|
||||
buff = new Buffer(size, 0) # fill with zeros
|
||||
fs.read fd, buff, 0, buff.length, 0, (err, bytesRead, buffer) ->
|
||||
return callbackWithClose(err) if err?
|
||||
result = buffer.toString(encoding, 0, bytesRead)
|
||||
callbackWithClose(null, result, bytesRead)
|
||||
@@ -1,41 +0,0 @@
|
||||
Path = require("path")
|
||||
fs = require("fs")
|
||||
Settings = require("settings-sharelatex")
|
||||
logger = require("logger-sharelatex")
|
||||
url = require "url"
|
||||
|
||||
module.exports = ForbidSymlinks = (staticFn, root, options) ->
|
||||
expressStatic = staticFn root, options
|
||||
basePath = Path.resolve(root)
|
||||
return (req, res, next) ->
|
||||
path = url.parse(req.url)?.pathname
|
||||
# check that the path is of the form /project_id_or_name/path/to/file.log
|
||||
if result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/)
|
||||
project_id = result[1]
|
||||
file = result[2]
|
||||
else
|
||||
logger.warn path: path, "unrecognized file request"
|
||||
return res.sendStatus(404)
|
||||
# check that the file does not use a relative path
|
||||
for dir in file.split('/')
|
||||
if dir == '..'
|
||||
logger.warn path: path, "attempt to use a relative path"
|
||||
return res.sendStatus(404)
|
||||
# check that the requested path is normalized
|
||||
requestedFsPath = "#{basePath}/#{project_id}/#{file}"
|
||||
if requestedFsPath != Path.normalize(requestedFsPath)
|
||||
logger.error path: requestedFsPath, "requestedFsPath is not normalized"
|
||||
return res.sendStatus(404)
|
||||
# check that the requested path is not a symlink
|
||||
fs.realpath requestedFsPath, (err, realFsPath)->
|
||||
if err?
|
||||
if err.code == 'ENOENT'
|
||||
return res.sendStatus(404)
|
||||
else
|
||||
logger.error err:err, requestedFsPath:requestedFsPath, realFsPath:realFsPath, path: req.params[0], project_id: req.params.project_id, "error checking file access"
|
||||
return res.sendStatus(500)
|
||||
else if requestedFsPath != realFsPath
|
||||
logger.warn requestedFsPath:requestedFsPath, realFsPath:realFsPath, path: req.params[0], project_id: req.params.project_id, "trying to access a different file (symlink), aborting"
|
||||
return res.sendStatus(404)
|
||||
else
|
||||
expressStatic(req, res, next)
|
||||
@@ -1,37 +0,0 @@
|
||||
fs = require "fs"
|
||||
Path = require "path"
|
||||
ResourceWriter = require "./ResourceWriter"
|
||||
SafeReader = require "./SafeReader"
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
# for \tikzexternalize or pstool to work the main file needs to match the
|
||||
# jobname. Since we set the -jobname to output, we have to create a
|
||||
# copy of the main file as 'output.tex'.
|
||||
|
||||
module.exports = TikzManager =
|
||||
|
||||
checkMainFile: (compileDir, mainFile, resources, callback = (error, needsMainFile) ->) ->
|
||||
# if there's already an output.tex file, we don't want to touch it
|
||||
for resource in resources
|
||||
if resource.path is "output.tex"
|
||||
logger.log compileDir: compileDir, mainFile: mainFile, "output.tex already in resources"
|
||||
return callback(null, false)
|
||||
# if there's no output.tex, see if we are using tikz/pgf or pstool in the main file
|
||||
ResourceWriter.checkPath compileDir, mainFile, (error, path) ->
|
||||
return callback(error) if error?
|
||||
SafeReader.readFile path, 65536, "utf8", (error, content) ->
|
||||
return callback(error) if error?
|
||||
usesTikzExternalize = content?.indexOf("\\tikzexternalize") >= 0
|
||||
usesPsTool = content?.indexOf("{pstool}") >= 0
|
||||
logger.log compileDir: compileDir, mainFile: mainFile, usesTikzExternalize:usesTikzExternalize, usesPsTool: usesPsTool, "checked for packages needing main file as output.tex"
|
||||
needsMainFile = (usesTikzExternalize || usesPsTool)
|
||||
callback null, needsMainFile
|
||||
|
||||
injectOutputFile: (compileDir, mainFile, callback = (error) ->) ->
|
||||
ResourceWriter.checkPath compileDir, mainFile, (error, path) ->
|
||||
return callback(error) if error?
|
||||
fs.readFile path, "utf8", (error, content) ->
|
||||
return callback(error) if error?
|
||||
logger.log compileDir: compileDir, mainFile: mainFile, "copied file to output.tex as project uses packages which require it"
|
||||
# use wx flag to ensure that output file does not already exist
|
||||
fs.writeFile Path.join(compileDir, "output.tex"), content, {flag:'wx'}, callback
|
||||
@@ -1,134 +0,0 @@
|
||||
db = require("./db")
|
||||
dbQueue = require "./DbQueue"
|
||||
UrlFetcher = require("./UrlFetcher")
|
||||
Settings = require("settings-sharelatex")
|
||||
crypto = require("crypto")
|
||||
fs = require("fs")
|
||||
logger = require "logger-sharelatex"
|
||||
async = require "async"
|
||||
|
||||
module.exports = UrlCache =
|
||||
downloadUrlToFile: (project_id, url, destPath, lastModified, callback = (error) ->) ->
|
||||
UrlCache._ensureUrlIsInCache project_id, url, lastModified, (error, pathToCachedUrl) =>
|
||||
return callback(error) if error?
|
||||
UrlCache._copyFile pathToCachedUrl, destPath, (error) ->
|
||||
if error?
|
||||
UrlCache._clearUrlDetails project_id, url, () ->
|
||||
callback(error)
|
||||
else
|
||||
callback(error)
|
||||
|
||||
clearProject: (project_id, callback = (error) ->) ->
|
||||
UrlCache._findAllUrlsInProject project_id, (error, urls) ->
|
||||
logger.log project_id: project_id, url_count: urls.length, "clearing project URLs"
|
||||
return callback(error) if error?
|
||||
jobs = for url in (urls or [])
|
||||
do (url) ->
|
||||
(callback) ->
|
||||
UrlCache._clearUrlFromCache project_id, url, (error) ->
|
||||
if error?
|
||||
logger.error err: error, project_id: project_id, url: url, "error clearing project URL"
|
||||
callback()
|
||||
async.series jobs, callback
|
||||
|
||||
_ensureUrlIsInCache: (project_id, url, lastModified, callback = (error, pathOnDisk) ->) ->
|
||||
if lastModified?
|
||||
# MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds.
|
||||
# So round down to seconds
|
||||
lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000)
|
||||
UrlCache._doesUrlNeedDownloading project_id, url, lastModified, (error, needsDownloading) =>
|
||||
return callback(error) if error?
|
||||
if needsDownloading
|
||||
logger.log url: url, lastModified: lastModified, "downloading URL"
|
||||
UrlFetcher.pipeUrlToFile url, UrlCache._cacheFilePathForUrl(project_id, url), (error) =>
|
||||
return callback(error) if error?
|
||||
UrlCache._updateOrCreateUrlDetails project_id, url, lastModified, (error) =>
|
||||
return callback(error) if error?
|
||||
callback null, UrlCache._cacheFilePathForUrl(project_id, url)
|
||||
else
|
||||
logger.log url: url, lastModified: lastModified, "URL is up to date in cache"
|
||||
callback null, UrlCache._cacheFilePathForUrl(project_id, url)
|
||||
|
||||
_doesUrlNeedDownloading: (project_id, url, lastModified, callback = (error, needsDownloading) ->) ->
|
||||
if !lastModified?
|
||||
return callback null, true
|
||||
UrlCache._findUrlDetails project_id, url, (error, urlDetails) ->
|
||||
return callback(error) if error?
|
||||
if !urlDetails? or !urlDetails.lastModified? or urlDetails.lastModified.getTime() < lastModified.getTime()
|
||||
return callback null, true
|
||||
else
|
||||
return callback null, false
|
||||
|
||||
_cacheFileNameForUrl: (project_id, url) ->
|
||||
project_id + ":" + crypto.createHash("md5").update(url).digest("hex")
|
||||
|
||||
_cacheFilePathForUrl: (project_id, url) ->
|
||||
"#{Settings.path.clsiCacheDir}/#{UrlCache._cacheFileNameForUrl(project_id, url)}"
|
||||
|
||||
_copyFile: (from, to, _callback = (error) ->) ->
|
||||
callbackOnce = (error) ->
|
||||
if error?
|
||||
logger.error err: error, from:from, to:to, "error copying file from cache"
|
||||
_callback(error)
|
||||
_callback = () ->
|
||||
writeStream = fs.createWriteStream(to)
|
||||
readStream = fs.createReadStream(from)
|
||||
writeStream.on "error", callbackOnce
|
||||
readStream.on "error", callbackOnce
|
||||
writeStream.on "close", callbackOnce
|
||||
writeStream.on "open", () ->
|
||||
readStream.pipe(writeStream)
|
||||
|
||||
_clearUrlFromCache: (project_id, url, callback = (error) ->) ->
|
||||
UrlCache._clearUrlDetails project_id, url, (error) ->
|
||||
return callback(error) if error?
|
||||
UrlCache._deleteUrlCacheFromDisk project_id, url, (error) ->
|
||||
return callback(error) if error?
|
||||
callback null
|
||||
|
||||
_deleteUrlCacheFromDisk: (project_id, url, callback = (error) ->) ->
|
||||
fs.unlink UrlCache._cacheFilePathForUrl(project_id, url), (error) ->
|
||||
if error? and error.code != 'ENOENT' # no error if the file isn't present
|
||||
return callback(error)
|
||||
else
|
||||
return callback()
|
||||
|
||||
_findUrlDetails: (project_id, url, callback = (error, urlDetails) ->) ->
|
||||
job = (cb)->
|
||||
db.UrlCache.find(where: { url: url, project_id: project_id })
|
||||
.then((urlDetails) -> cb null, urlDetails)
|
||||
.error cb
|
||||
dbQueue.queue.push job, callback
|
||||
|
||||
_updateOrCreateUrlDetails: (project_id, url, lastModified, callback = (error) ->) ->
|
||||
job = (cb)->
|
||||
db.UrlCache.findOrCreate(where: {url: url, project_id: project_id})
|
||||
.spread(
|
||||
(urlDetails, created) ->
|
||||
urlDetails.updateAttributes(lastModified: lastModified)
|
||||
.then(() -> cb())
|
||||
.error(cb)
|
||||
)
|
||||
.error cb
|
||||
dbQueue.queue.push(job, callback)
|
||||
|
||||
_clearUrlDetails: (project_id, url, callback = (error) ->) ->
|
||||
job = (cb)->
|
||||
db.UrlCache.destroy(where: {url: url, project_id: project_id})
|
||||
.then(() -> cb null)
|
||||
.error cb
|
||||
dbQueue.queue.push(job, callback)
|
||||
|
||||
|
||||
_findAllUrlsInProject: (project_id, callback = (error, urls) ->) ->
|
||||
job = (cb)->
|
||||
db.UrlCache.findAll(where: { project_id: project_id })
|
||||
.then(
|
||||
(urlEntries) ->
|
||||
cb null, urlEntries.map((entry) -> entry.url)
|
||||
)
|
||||
.error cb
|
||||
dbQueue.queue.push(job, callback)
|
||||
|
||||
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
request = require("request").defaults(jar: false)
|
||||
fs = require("fs")
|
||||
logger = require "logger-sharelatex"
|
||||
settings = require("settings-sharelatex")
|
||||
URL = require('url');
|
||||
|
||||
oneMinute = 60 * 1000
|
||||
|
||||
module.exports = UrlFetcher =
|
||||
pipeUrlToFile: (url, filePath, _callback = (error) ->) ->
|
||||
callbackOnce = (error) ->
|
||||
clearTimeout timeoutHandler if timeoutHandler?
|
||||
_callback(error)
|
||||
_callback = () ->
|
||||
|
||||
if settings.filestoreDomainOveride?
|
||||
p = URL.parse(url).path
|
||||
url = "#{settings.filestoreDomainOveride}#{p}"
|
||||
timeoutHandler = setTimeout () ->
|
||||
timeoutHandler = null
|
||||
logger.error url:url, filePath: filePath, "Timed out downloading file to cache"
|
||||
callbackOnce(new Error("Timed out downloading file to cache #{url}"))
|
||||
# FIXME: maybe need to close fileStream here
|
||||
, 3 * oneMinute
|
||||
|
||||
logger.log url:url, filePath: filePath, "started downloading url to cache"
|
||||
urlStream = request.get({url: url, timeout: oneMinute})
|
||||
urlStream.pause() # stop data flowing until we are ready
|
||||
|
||||
# attach handlers before setting up pipes
|
||||
urlStream.on "error", (error) ->
|
||||
logger.error err: error, url:url, filePath: filePath, "error downloading url"
|
||||
callbackOnce(error or new Error("Something went wrong downloading the URL #{url}"))
|
||||
|
||||
urlStream.on "end", () ->
|
||||
logger.log url:url, filePath: filePath, "finished downloading file into cache"
|
||||
|
||||
urlStream.on "response", (res) ->
|
||||
if res.statusCode >= 200 and res.statusCode < 300
|
||||
fileStream = fs.createWriteStream(filePath)
|
||||
|
||||
# attach handlers before setting up pipes
|
||||
fileStream.on 'error', (error) ->
|
||||
logger.error err: error, url:url, filePath: filePath, "error writing file into cache"
|
||||
fs.unlink filePath, (err) ->
|
||||
if err?
|
||||
logger.err err: err, filePath: filePath, "error deleting file from cache"
|
||||
callbackOnce(error)
|
||||
|
||||
fileStream.on 'finish', () ->
|
||||
logger.log url:url, filePath: filePath, "finished writing file into cache"
|
||||
callbackOnce()
|
||||
|
||||
fileStream.on 'pipe', () ->
|
||||
logger.log url:url, filePath: filePath, "piping into filestream"
|
||||
|
||||
urlStream.pipe(fileStream)
|
||||
urlStream.resume() # now we are ready to handle the data
|
||||
else
|
||||
logger.error statusCode: res.statusCode, url:url, filePath: filePath, "unexpected status code downloading url to cache"
|
||||
# https://nodejs.org/api/http.html#http_class_http_clientrequest
|
||||
# If you add a 'response' event handler, then you must consume
|
||||
# the data from the response object, either by calling
|
||||
# response.read() whenever there is a 'readable' event, or by
|
||||
# adding a 'data' handler, or by calling the .resume()
|
||||
# method. Until the data is consumed, the 'end' event will not
|
||||
# fire. Also, until the data is read it will consume memory
|
||||
# that can eventually lead to a 'process out of memory' error.
|
||||
urlStream.resume() # discard the data
|
||||
callbackOnce(new Error("URL returned non-success status code: #{res.statusCode} #{url}"))
|
||||
@@ -1,55 +0,0 @@
|
||||
Sequelize = require("sequelize")
|
||||
Settings = require("settings-sharelatex")
|
||||
_ = require("underscore")
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
options = _.extend {logging:false}, Settings.mysql.clsi
|
||||
|
||||
logger.log dbPath:Settings.mysql.clsi.storage, "connecting to db"
|
||||
|
||||
sequelize = new Sequelize(
|
||||
Settings.mysql.clsi.database,
|
||||
Settings.mysql.clsi.username,
|
||||
Settings.mysql.clsi.password,
|
||||
options
|
||||
)
|
||||
|
||||
if Settings.mysql.clsi.dialect == "sqlite"
|
||||
logger.log "running PRAGMA journal_mode=WAL;"
|
||||
sequelize.query("PRAGMA journal_mode=WAL;")
|
||||
sequelize.query("PRAGMA synchronous=OFF;")
|
||||
sequelize.query("PRAGMA read_uncommitted = true;")
|
||||
|
||||
module.exports =
|
||||
UrlCache: sequelize.define("UrlCache", {
|
||||
url: Sequelize.STRING
|
||||
project_id: Sequelize.STRING
|
||||
lastModified: Sequelize.DATE
|
||||
}, {
|
||||
indexes: [
|
||||
{fields: ['url', 'project_id']},
|
||||
{fields: ['project_id']}
|
||||
]
|
||||
})
|
||||
|
||||
Project: sequelize.define("Project", {
|
||||
project_id: {type: Sequelize.STRING, primaryKey: true}
|
||||
lastAccessed: Sequelize.DATE
|
||||
}, {
|
||||
indexes: [
|
||||
{fields: ['lastAccessed']}
|
||||
]
|
||||
})
|
||||
|
||||
op: Sequelize.Op
|
||||
|
||||
sync: () ->
|
||||
logger.log dbPath:Settings.mysql.clsi.storage, "syncing db schema"
|
||||
sequelize.sync()
|
||||
.then(->
|
||||
logger.log "db sync complete"
|
||||
).catch((err)->
|
||||
console.log err, "error syncing"
|
||||
)
|
||||
|
||||
|
||||
20
app/js/CommandRunner.js
Normal file
20
app/js/CommandRunner.js
Normal file
@@ -0,0 +1,20 @@
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Sanity-check the conversion and remove this comment.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let commandRunnerPath
|
||||
const Settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) {
|
||||
commandRunnerPath = './DockerRunner'
|
||||
} else {
|
||||
commandRunnerPath = './LocalCommandRunner'
|
||||
}
|
||||
logger.info({ commandRunnerPath }, 'selecting command runner for clsi')
|
||||
const CommandRunner = require(commandRunnerPath)
|
||||
|
||||
module.exports = CommandRunner
|
||||
238
app/js/CompileController.js
Normal file
238
app/js/CompileController.js
Normal file
@@ -0,0 +1,238 @@
|
||||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let CompileController
|
||||
const RequestParser = require('./RequestParser')
|
||||
const CompileManager = require('./CompileManager')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const Metrics = require('./Metrics')
|
||||
const ProjectPersistenceManager = require('./ProjectPersistenceManager')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Errors = require('./Errors')
|
||||
|
||||
module.exports = CompileController = {
|
||||
compile(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function(error) {}
|
||||
}
|
||||
const timer = new Metrics.Timer('compile-request')
|
||||
return RequestParser.parse(req.body, function(error, request) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
request.project_id = req.params.project_id
|
||||
if (req.params.user_id != null) {
|
||||
request.user_id = req.params.user_id
|
||||
}
|
||||
return ProjectPersistenceManager.markProjectAsJustAccessed(
|
||||
request.project_id,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return CompileManager.doCompileWithLock(request, function(
|
||||
error,
|
||||
outputFiles
|
||||
) {
|
||||
let code, status
|
||||
if (outputFiles == null) {
|
||||
outputFiles = []
|
||||
}
|
||||
if (error instanceof Errors.AlreadyCompilingError) {
|
||||
code = 423 // Http 423 Locked
|
||||
status = 'compile-in-progress'
|
||||
} else if (error instanceof Errors.FilesOutOfSyncError) {
|
||||
code = 409 // Http 409 Conflict
|
||||
status = 'retry'
|
||||
} else if (error != null ? error.terminated : undefined) {
|
||||
status = 'terminated'
|
||||
} else if (error != null ? error.validate : undefined) {
|
||||
status = `validation-${error.validate}`
|
||||
} else if (error != null ? error.timedout : undefined) {
|
||||
status = 'timedout'
|
||||
logger.log(
|
||||
{ err: error, project_id: request.project_id },
|
||||
'timeout running compile'
|
||||
)
|
||||
} else if (error != null) {
|
||||
status = 'error'
|
||||
code = 500
|
||||
logger.warn(
|
||||
{ err: error, project_id: request.project_id },
|
||||
'error running compile'
|
||||
)
|
||||
} else {
|
||||
let file
|
||||
status = 'failure'
|
||||
for (file of Array.from(outputFiles)) {
|
||||
if (
|
||||
file.path != null
|
||||
? file.path.match(/output\.pdf$/)
|
||||
: undefined
|
||||
) {
|
||||
status = 'success'
|
||||
}
|
||||
}
|
||||
|
||||
if (status === 'failure') {
|
||||
logger.warn(
|
||||
{ project_id: request.project_id, outputFiles },
|
||||
'project failed to compile successfully, no output.pdf generated'
|
||||
)
|
||||
}
|
||||
|
||||
// log an error if any core files are found
|
||||
for (file of Array.from(outputFiles)) {
|
||||
if (file.path === 'core') {
|
||||
logger.error(
|
||||
{ project_id: request.project_id, req, outputFiles },
|
||||
'core file found in output'
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (error != null) {
|
||||
outputFiles = error.outputFiles || []
|
||||
}
|
||||
|
||||
timer.done()
|
||||
return res.status(code || 200).send({
|
||||
compile: {
|
||||
status,
|
||||
error: (error != null ? error.message : undefined) || error,
|
||||
outputFiles: outputFiles.map(file => ({
|
||||
url:
|
||||
`${Settings.apis.clsi.url}/project/${request.project_id}` +
|
||||
(request.user_id != null
|
||||
? `/user/${request.user_id}`
|
||||
: '') +
|
||||
(file.build != null ? `/build/${file.build}` : '') +
|
||||
`/output/${file.path}`,
|
||||
path: file.path,
|
||||
type: file.type,
|
||||
build: file.build
|
||||
}))
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
stopCompile(req, res, next) {
|
||||
const { project_id, user_id } = req.params
|
||||
return CompileManager.stopCompile(project_id, user_id, function(error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.sendStatus(204)
|
||||
})
|
||||
},
|
||||
|
||||
clearCache(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function(error) {}
|
||||
}
|
||||
return ProjectPersistenceManager.clearProject(
|
||||
req.params.project_id,
|
||||
req.params.user_id,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.sendStatus(204)
|
||||
}
|
||||
)
|
||||
}, // No content
|
||||
|
||||
syncFromCode(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function(error) {}
|
||||
}
|
||||
const { file } = req.query
|
||||
const line = parseInt(req.query.line, 10)
|
||||
const column = parseInt(req.query.column, 10)
|
||||
const { project_id } = req.params
|
||||
const { user_id } = req.params
|
||||
return CompileManager.syncFromCode(
|
||||
project_id,
|
||||
user_id,
|
||||
file,
|
||||
line,
|
||||
column,
|
||||
function(error, pdfPositions) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.json({
|
||||
pdf: pdfPositions
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
syncFromPdf(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function(error) {}
|
||||
}
|
||||
const page = parseInt(req.query.page, 10)
|
||||
const h = parseFloat(req.query.h)
|
||||
const v = parseFloat(req.query.v)
|
||||
const { project_id } = req.params
|
||||
const { user_id } = req.params
|
||||
return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function(
|
||||
error,
|
||||
codePositions
|
||||
) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.json({
|
||||
code: codePositions
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
wordcount(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function(error) {}
|
||||
}
|
||||
const file = req.query.file || 'main.tex'
|
||||
const { project_id } = req.params
|
||||
const { user_id } = req.params
|
||||
const { image } = req.query
|
||||
logger.log({ image, file, project_id }, 'word count request')
|
||||
|
||||
return CompileManager.wordcount(project_id, user_id, file, image, function(
|
||||
error,
|
||||
result
|
||||
) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.json({
|
||||
texcount: result
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
status(req, res, next) {
|
||||
if (next == null) {
|
||||
next = function(error) {}
|
||||
}
|
||||
return res.send('OK')
|
||||
}
|
||||
}
|
||||
705
app/js/CompileManager.js
Normal file
705
app/js/CompileManager.js
Normal file
@@ -0,0 +1,705 @@
|
||||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
no-undef,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let CompileManager
|
||||
const ResourceWriter = require('./ResourceWriter')
|
||||
const LatexRunner = require('./LatexRunner')
|
||||
const OutputFileFinder = require('./OutputFileFinder')
|
||||
const OutputCacheManager = require('./OutputCacheManager')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const Path = require('path')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Metrics = require('./Metrics')
|
||||
const child_process = require('child_process')
|
||||
const DraftModeManager = require('./DraftModeManager')
|
||||
const TikzManager = require('./TikzManager')
|
||||
const LockManager = require('./LockManager')
|
||||
const fs = require('fs')
|
||||
const fse = require('fs-extra')
|
||||
const os = require('os')
|
||||
const async = require('async')
|
||||
const Errors = require('./Errors')
|
||||
const CommandRunner = require('./CommandRunner')
|
||||
|
||||
const getCompileName = function(project_id, user_id) {
|
||||
if (user_id != null) {
|
||||
return `${project_id}-${user_id}`
|
||||
} else {
|
||||
return project_id
|
||||
}
|
||||
}
|
||||
|
||||
const getCompileDir = (project_id, user_id) =>
|
||||
Path.join(Settings.path.compilesDir, getCompileName(project_id, user_id))
|
||||
|
||||
module.exports = CompileManager = {
|
||||
doCompileWithLock(request, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, outputFiles) {}
|
||||
}
|
||||
const compileDir = getCompileDir(request.project_id, request.user_id)
|
||||
const lockFile = Path.join(compileDir, '.project-lock')
|
||||
// use a .project-lock file in the compile directory to prevent
|
||||
// simultaneous compiles
|
||||
return fse.ensureDir(compileDir, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return LockManager.runWithLock(
|
||||
lockFile,
|
||||
releaseLock => CompileManager.doCompile(request, releaseLock),
|
||||
callback
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
doCompile(request, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, outputFiles) {}
|
||||
}
|
||||
const compileDir = getCompileDir(request.project_id, request.user_id)
|
||||
let timer = new Metrics.Timer('write-to-disk')
|
||||
logger.log(
|
||||
{ project_id: request.project_id, user_id: request.user_id },
|
||||
'syncing resources to disk'
|
||||
)
|
||||
return ResourceWriter.syncResourcesToDisk(request, compileDir, function(
|
||||
error,
|
||||
resourceList
|
||||
) {
|
||||
// NOTE: resourceList is insecure, it should only be used to exclude files from the output list
|
||||
if (error != null && error instanceof Errors.FilesOutOfSyncError) {
|
||||
logger.warn(
|
||||
{ project_id: request.project_id, user_id: request.user_id },
|
||||
'files out of sync, please retry'
|
||||
)
|
||||
return callback(error)
|
||||
} else if (error != null) {
|
||||
logger.err(
|
||||
{
|
||||
err: error,
|
||||
project_id: request.project_id,
|
||||
user_id: request.user_id
|
||||
},
|
||||
'error writing resources to disk'
|
||||
)
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
{
|
||||
project_id: request.project_id,
|
||||
user_id: request.user_id,
|
||||
time_taken: Date.now() - timer.start
|
||||
},
|
||||
'written files to disk'
|
||||
)
|
||||
timer.done()
|
||||
|
||||
const injectDraftModeIfRequired = function(callback) {
|
||||
if (request.draft) {
|
||||
return DraftModeManager.injectDraftMode(
|
||||
Path.join(compileDir, request.rootResourcePath),
|
||||
callback
|
||||
)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
}
|
||||
|
||||
const createTikzFileIfRequired = callback =>
|
||||
TikzManager.checkMainFile(
|
||||
compileDir,
|
||||
request.rootResourcePath,
|
||||
resourceList,
|
||||
function(error, needsMainFile) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (needsMainFile) {
|
||||
return TikzManager.injectOutputFile(
|
||||
compileDir,
|
||||
request.rootResourcePath,
|
||||
callback
|
||||
)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
}
|
||||
)
|
||||
// set up environment variables for chktex
|
||||
const env = {}
|
||||
// only run chktex on LaTeX files (not knitr .Rtex files or any others)
|
||||
const isLaTeXFile =
|
||||
request.rootResourcePath != null
|
||||
? request.rootResourcePath.match(/\.tex$/i)
|
||||
: undefined
|
||||
if (request.check != null && isLaTeXFile) {
|
||||
env.CHKTEX_OPTIONS = '-nall -e9 -e10 -w15 -w16'
|
||||
env.CHKTEX_ULIMIT_OPTIONS = '-t 5 -v 64000'
|
||||
if (request.check === 'error') {
|
||||
env.CHKTEX_EXIT_ON_ERROR = 1
|
||||
}
|
||||
if (request.check === 'validate') {
|
||||
env.CHKTEX_VALIDATE = 1
|
||||
}
|
||||
}
|
||||
|
||||
// apply a series of file modifications/creations for draft mode and tikz
|
||||
return async.series(
|
||||
[injectDraftModeIfRequired, createTikzFileIfRequired],
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
timer = new Metrics.Timer('run-compile')
|
||||
// find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
|
||||
let tag =
|
||||
__guard__(
|
||||
__guard__(
|
||||
request.imageName != null
|
||||
? request.imageName.match(/:(.*)/)
|
||||
: undefined,
|
||||
x1 => x1[1]
|
||||
),
|
||||
x => x.replace(/\./g, '-')
|
||||
) || 'default'
|
||||
if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
|
||||
tag = 'other'
|
||||
} // exclude smoke test
|
||||
Metrics.inc('compiles')
|
||||
Metrics.inc(`compiles-with-image.${tag}`)
|
||||
const compileName = getCompileName(
|
||||
request.project_id,
|
||||
request.user_id
|
||||
)
|
||||
return LatexRunner.runLatex(
|
||||
compileName,
|
||||
{
|
||||
directory: compileDir,
|
||||
mainFile: request.rootResourcePath,
|
||||
compiler: request.compiler,
|
||||
timeout: request.timeout,
|
||||
image: request.imageName,
|
||||
flags: request.flags,
|
||||
environment: env
|
||||
},
|
||||
function(error, output, stats, timings) {
|
||||
// request was for validation only
|
||||
let metric_key, metric_value
|
||||
if (request.check === 'validate') {
|
||||
const result = (error != null
|
||||
? error.code
|
||||
: undefined)
|
||||
? 'fail'
|
||||
: 'pass'
|
||||
error = new Error('validation')
|
||||
error.validate = result
|
||||
}
|
||||
// request was for compile, and failed on validation
|
||||
if (
|
||||
request.check === 'error' &&
|
||||
(error != null ? error.message : undefined) === 'exited'
|
||||
) {
|
||||
error = new Error('compilation')
|
||||
error.validate = 'fail'
|
||||
}
|
||||
// compile was killed by user, was a validation, or a compile which failed validation
|
||||
if (
|
||||
(error != null ? error.terminated : undefined) ||
|
||||
(error != null ? error.validate : undefined) ||
|
||||
(error != null ? error.timedout : undefined)
|
||||
) {
|
||||
OutputFileFinder.findOutputFiles(
|
||||
resourceList,
|
||||
compileDir,
|
||||
function(err, outputFiles) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
error.outputFiles = outputFiles // return output files so user can check logs
|
||||
return callback(error)
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
// compile completed normally
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
Metrics.inc('compiles-succeeded')
|
||||
const object = stats || {}
|
||||
for (metric_key in object) {
|
||||
metric_value = object[metric_key]
|
||||
Metrics.count(metric_key, metric_value)
|
||||
}
|
||||
const object1 = timings || {}
|
||||
for (metric_key in object1) {
|
||||
metric_value = object1[metric_key]
|
||||
Metrics.timing(metric_key, metric_value)
|
||||
}
|
||||
const loadavg =
|
||||
typeof os.loadavg === 'function' ? os.loadavg() : undefined
|
||||
if (loadavg != null) {
|
||||
Metrics.gauge('load-avg', loadavg[0])
|
||||
}
|
||||
const ts = timer.done()
|
||||
logger.log(
|
||||
{
|
||||
project_id: request.project_id,
|
||||
user_id: request.user_id,
|
||||
time_taken: ts,
|
||||
stats,
|
||||
timings,
|
||||
loadavg
|
||||
},
|
||||
'done compile'
|
||||
)
|
||||
if ((stats != null ? stats['latex-runs'] : undefined) > 0) {
|
||||
Metrics.timing('run-compile-per-pass', ts / stats['latex-runs'])
|
||||
}
|
||||
if (
|
||||
(stats != null ? stats['latex-runs'] : undefined) > 0 &&
|
||||
(timings != null ? timings['cpu-time'] : undefined) > 0
|
||||
) {
|
||||
Metrics.timing(
|
||||
'run-compile-cpu-time-per-pass',
|
||||
timings['cpu-time'] / stats['latex-runs']
|
||||
)
|
||||
}
|
||||
|
||||
return OutputFileFinder.findOutputFiles(
|
||||
resourceList,
|
||||
compileDir,
|
||||
function(error, outputFiles) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return OutputCacheManager.saveOutputFiles(
|
||||
outputFiles,
|
||||
compileDir,
|
||||
(error, newOutputFiles) => callback(null, newOutputFiles)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
stopCompile(project_id, user_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const compileName = getCompileName(project_id, user_id)
|
||||
return LatexRunner.killLatex(compileName, callback)
|
||||
},
|
||||
|
||||
clearProject(project_id, user_id, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function(error) {}
|
||||
}
|
||||
const callback = function(error) {
|
||||
_callback(error)
|
||||
return (_callback = function() {})
|
||||
}
|
||||
|
||||
const compileDir = getCompileDir(project_id, user_id)
|
||||
|
||||
return CompileManager._checkDirectory(compileDir, function(err, exists) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
if (!exists) {
|
||||
return callback()
|
||||
} // skip removal if no directory present
|
||||
|
||||
const proc = child_process.spawn('rm', ['-r', compileDir])
|
||||
|
||||
proc.on('error', callback)
|
||||
|
||||
let stderr = ''
|
||||
proc.stderr.on('data', chunk => (stderr += chunk.toString()))
|
||||
|
||||
return proc.on('close', function(code) {
|
||||
if (code === 0) {
|
||||
return callback(null)
|
||||
} else {
|
||||
return callback(new Error(`rm -r ${compileDir} failed: ${stderr}`))
|
||||
}
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
_findAllDirs(callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, allDirs) {}
|
||||
}
|
||||
const root = Settings.path.compilesDir
|
||||
return fs.readdir(root, function(err, files) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
const allDirs = Array.from(files).map(file => Path.join(root, file))
|
||||
return callback(null, allDirs)
|
||||
})
|
||||
},
|
||||
|
||||
clearExpiredProjects(max_cache_age_ms, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const now = Date.now()
|
||||
// action for each directory
|
||||
const expireIfNeeded = (checkDir, cb) =>
|
||||
fs.stat(checkDir, function(err, stats) {
|
||||
if (err != null) {
|
||||
return cb()
|
||||
} // ignore errors checking directory
|
||||
const age = now - stats.mtime
|
||||
const hasExpired = age > max_cache_age_ms
|
||||
if (hasExpired) {
|
||||
return fse.remove(checkDir, cb)
|
||||
} else {
|
||||
return cb()
|
||||
}
|
||||
})
|
||||
// iterate over all project directories
|
||||
return CompileManager._findAllDirs(function(error, allDirs) {
|
||||
if (error != null) {
|
||||
return callback()
|
||||
}
|
||||
return async.eachSeries(allDirs, expireIfNeeded, callback)
|
||||
})
|
||||
},
|
||||
|
||||
_checkDirectory(compileDir, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, exists) {}
|
||||
}
|
||||
return fs.lstat(compileDir, function(err, stats) {
|
||||
if ((err != null ? err.code : undefined) === 'ENOENT') {
|
||||
return callback(null, false) // directory does not exist
|
||||
} else if (err != null) {
|
||||
logger.err(
|
||||
{ dir: compileDir, err },
|
||||
'error on stat of project directory for removal'
|
||||
)
|
||||
return callback(err)
|
||||
} else if (!(stats != null ? stats.isDirectory() : undefined)) {
|
||||
logger.err(
|
||||
{ dir: compileDir, stats },
|
||||
'bad project directory for removal'
|
||||
)
|
||||
return callback(new Error('project directory is not directory'))
|
||||
} else {
|
||||
return callback(null, true)
|
||||
}
|
||||
})
|
||||
}, // directory exists
|
||||
|
||||
syncFromCode(project_id, user_id, file_name, line, column, callback) {
|
||||
// If LaTeX was run in a virtual environment, the file path that synctex expects
|
||||
// might not match the file path on the host. The .synctex.gz file however, will be accessed
|
||||
// wherever it is on the host.
|
||||
if (callback == null) {
|
||||
callback = function(error, pdfPositions) {}
|
||||
}
|
||||
const compileName = getCompileName(project_id, user_id)
|
||||
const base_dir = Settings.path.synctexBaseDir(compileName)
|
||||
const file_path = base_dir + '/' + file_name
|
||||
const compileDir = getCompileDir(project_id, user_id)
|
||||
const synctex_path = `${base_dir}/output.pdf`
|
||||
const command = ['code', synctex_path, file_path, line, column]
|
||||
return fse.ensureDir(compileDir, function(error) {
|
||||
if (error != null) {
|
||||
logger.err(
|
||||
{ error, project_id, user_id, file_name },
|
||||
'error ensuring dir for sync from code'
|
||||
)
|
||||
return callback(error)
|
||||
}
|
||||
return CompileManager._runSynctex(project_id, user_id, command, function(
|
||||
error,
|
||||
stdout
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
{ project_id, user_id, file_name, line, column, command, stdout },
|
||||
'synctex code output'
|
||||
)
|
||||
return callback(
|
||||
null,
|
||||
CompileManager._parseSynctexFromCodeOutput(stdout)
|
||||
)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
syncFromPdf(project_id, user_id, page, h, v, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, filePositions) {}
|
||||
}
|
||||
const compileName = getCompileName(project_id, user_id)
|
||||
const compileDir = getCompileDir(project_id, user_id)
|
||||
const base_dir = Settings.path.synctexBaseDir(compileName)
|
||||
const synctex_path = `${base_dir}/output.pdf`
|
||||
const command = ['pdf', synctex_path, page, h, v]
|
||||
return fse.ensureDir(compileDir, function(error) {
|
||||
if (error != null) {
|
||||
logger.err(
|
||||
{ error, project_id, user_id, file_name },
|
||||
'error ensuring dir for sync to code'
|
||||
)
|
||||
return callback(error)
|
||||
}
|
||||
return CompileManager._runSynctex(project_id, user_id, command, function(
|
||||
error,
|
||||
stdout
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
{ project_id, user_id, page, h, v, stdout },
|
||||
'synctex pdf output'
|
||||
)
|
||||
return callback(
|
||||
null,
|
||||
CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)
|
||||
)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
_checkFileExists(path, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const synctexDir = Path.dirname(path)
|
||||
const synctexFile = Path.join(synctexDir, 'output.synctex.gz')
|
||||
return fs.stat(synctexDir, function(error, stats) {
|
||||
if ((error != null ? error.code : undefined) === 'ENOENT') {
|
||||
return callback(
|
||||
new Errors.NotFoundError('called synctex with no output directory')
|
||||
)
|
||||
}
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return fs.stat(synctexFile, function(error, stats) {
|
||||
if ((error != null ? error.code : undefined) === 'ENOENT') {
|
||||
return callback(
|
||||
new Errors.NotFoundError('called synctex with no output file')
|
||||
)
|
||||
}
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (!(stats != null ? stats.isFile() : undefined)) {
|
||||
return callback(new Error('not a file'))
|
||||
}
|
||||
return callback()
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
_runSynctex(project_id, user_id, command, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, stdout) {}
|
||||
}
|
||||
const seconds = 1000
|
||||
|
||||
command.unshift('/opt/synctex')
|
||||
|
||||
const directory = getCompileDir(project_id, user_id)
|
||||
const timeout = 60 * 1000 // increased to allow for large projects
|
||||
const compileName = getCompileName(project_id, user_id)
|
||||
return CommandRunner.run(
|
||||
compileName,
|
||||
command,
|
||||
directory,
|
||||
Settings.clsi != null ? Settings.clsi.docker.image : undefined,
|
||||
timeout,
|
||||
{},
|
||||
function(error, output) {
|
||||
if (error != null) {
|
||||
logger.err(
|
||||
{ err: error, command, project_id, user_id },
|
||||
'error running synctex'
|
||||
)
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, output.stdout)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_parseSynctexFromCodeOutput(output) {
|
||||
const results = []
|
||||
for (const line of Array.from(output.split('\n'))) {
|
||||
const [node, page, h, v, width, height] = Array.from(line.split('\t'))
|
||||
if (node === 'NODE') {
|
||||
results.push({
|
||||
page: parseInt(page, 10),
|
||||
h: parseFloat(h),
|
||||
v: parseFloat(v),
|
||||
height: parseFloat(height),
|
||||
width: parseFloat(width)
|
||||
})
|
||||
}
|
||||
}
|
||||
return results
|
||||
},
|
||||
|
||||
_parseSynctexFromPdfOutput(output, base_dir) {
|
||||
const results = []
|
||||
for (let line of Array.from(output.split('\n'))) {
|
||||
let column, file_path, node
|
||||
;[node, file_path, line, column] = Array.from(line.split('\t'))
|
||||
if (node === 'NODE') {
|
||||
const file = file_path.slice(base_dir.length + 1)
|
||||
results.push({
|
||||
file,
|
||||
line: parseInt(line, 10),
|
||||
column: parseInt(column, 10)
|
||||
})
|
||||
}
|
||||
}
|
||||
return results
|
||||
},
|
||||
|
||||
wordcount(project_id, user_id, file_name, image, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, pdfPositions) {}
|
||||
}
|
||||
logger.log({ project_id, user_id, file_name, image }, 'running wordcount')
|
||||
const file_path = `$COMPILE_DIR/${file_name}`
|
||||
const command = [
|
||||
'texcount',
|
||||
'-nocol',
|
||||
'-inc',
|
||||
file_path,
|
||||
`-out=${file_path}.wc`
|
||||
]
|
||||
const compileDir = getCompileDir(project_id, user_id)
|
||||
const timeout = 60 * 1000
|
||||
const compileName = getCompileName(project_id, user_id)
|
||||
return fse.ensureDir(compileDir, function(error) {
|
||||
if (error != null) {
|
||||
logger.err(
|
||||
{ error, project_id, user_id, file_name },
|
||||
'error ensuring dir for sync from code'
|
||||
)
|
||||
return callback(error)
|
||||
}
|
||||
return CommandRunner.run(
|
||||
compileName,
|
||||
command,
|
||||
compileDir,
|
||||
image,
|
||||
timeout,
|
||||
{},
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return fs.readFile(
|
||||
compileDir + '/' + file_name + '.wc',
|
||||
'utf-8',
|
||||
function(err, stdout) {
|
||||
if (err != null) {
|
||||
// call it node_err so sentry doesn't use random path error as unique id so it can't be ignored
|
||||
logger.err(
|
||||
{ node_err: err, command, compileDir, project_id, user_id },
|
||||
'error reading word count output'
|
||||
)
|
||||
return callback(err)
|
||||
}
|
||||
const results = CompileManager._parseWordcountFromOutput(stdout)
|
||||
logger.log(
|
||||
{ project_id, user_id, wordcount: results },
|
||||
'word count results'
|
||||
)
|
||||
return callback(null, results)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
_parseWordcountFromOutput(output) {
|
||||
const results = {
|
||||
encode: '',
|
||||
textWords: 0,
|
||||
headWords: 0,
|
||||
outside: 0,
|
||||
headers: 0,
|
||||
elements: 0,
|
||||
mathInline: 0,
|
||||
mathDisplay: 0,
|
||||
errors: 0,
|
||||
messages: ''
|
||||
}
|
||||
for (const line of Array.from(output.split('\n'))) {
|
||||
const [data, info] = Array.from(line.split(':'))
|
||||
if (data.indexOf('Encoding') > -1) {
|
||||
results.encode = info.trim()
|
||||
}
|
||||
if (data.indexOf('in text') > -1) {
|
||||
results.textWords = parseInt(info, 10)
|
||||
}
|
||||
if (data.indexOf('in head') > -1) {
|
||||
results.headWords = parseInt(info, 10)
|
||||
}
|
||||
if (data.indexOf('outside') > -1) {
|
||||
results.outside = parseInt(info, 10)
|
||||
}
|
||||
if (data.indexOf('of head') > -1) {
|
||||
results.headers = parseInt(info, 10)
|
||||
}
|
||||
if (data.indexOf('Number of floats/tables/figures') > -1) {
|
||||
results.elements = parseInt(info, 10)
|
||||
}
|
||||
if (data.indexOf('Number of math inlines') > -1) {
|
||||
results.mathInline = parseInt(info, 10)
|
||||
}
|
||||
if (data.indexOf('Number of math displayed') > -1) {
|
||||
results.mathDisplay = parseInt(info, 10)
|
||||
}
|
||||
if (data === '(errors') {
|
||||
// errors reported as (errors:123)
|
||||
results.errors = parseInt(info, 10)
|
||||
}
|
||||
if (line.indexOf('!!! ') > -1) {
|
||||
// errors logged as !!! message !!!
|
||||
results.messages += line + '\n'
|
||||
}
|
||||
}
|
||||
return results
|
||||
}
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
38
app/js/ContentTypeMapper.js
Normal file
38
app/js/ContentTypeMapper.js
Normal file
@@ -0,0 +1,38 @@
|
||||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
let ContentTypeMapper
|
||||
const Path = require('path')
|
||||
|
||||
// here we coerce html, css and js to text/plain,
|
||||
// otherwise choose correct mime type based on file extension,
|
||||
// falling back to octet-stream
|
||||
module.exports = ContentTypeMapper = {
|
||||
map(path) {
|
||||
switch (Path.extname(path)) {
|
||||
case '.txt':
|
||||
case '.html':
|
||||
case '.js':
|
||||
case '.css':
|
||||
case '.svg':
|
||||
return 'text/plain'
|
||||
case '.csv':
|
||||
return 'text/csv'
|
||||
case '.pdf':
|
||||
return 'application/pdf'
|
||||
case '.png':
|
||||
return 'image/png'
|
||||
case '.jpg':
|
||||
case '.jpeg':
|
||||
return 'image/jpeg'
|
||||
case '.tiff':
|
||||
return 'image/tiff'
|
||||
case '.gif':
|
||||
return 'image/gif'
|
||||
default:
|
||||
return 'application/octet-stream'
|
||||
}
|
||||
}
|
||||
}
|
||||
18
app/js/DbQueue.js
Normal file
18
app/js/DbQueue.js
Normal file
@@ -0,0 +1,18 @@
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Sanity-check the conversion and remove this comment.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const async = require('async')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
const queue = async.queue(
|
||||
(task, cb) => task(cb),
|
||||
Settings.parallelSqlQueryLimit
|
||||
)
|
||||
|
||||
queue.drain = () => logger.debug('all items have been processed')
|
||||
|
||||
module.exports = { queue }
|
||||
113
app/js/DockerLockManager.js
Normal file
113
app/js/DockerLockManager.js
Normal file
@@ -0,0 +1,113 @@
|
||||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let LockManager
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
const LockState = {} // locks for docker container operations, by container name
|
||||
|
||||
module.exports = LockManager = {
|
||||
MAX_LOCK_HOLD_TIME: 15000, // how long we can keep a lock
|
||||
MAX_LOCK_WAIT_TIME: 10000, // how long we wait for a lock
|
||||
LOCK_TEST_INTERVAL: 1000, // retry time
|
||||
|
||||
tryLock(key, callback) {
|
||||
let lockValue
|
||||
if (callback == null) {
|
||||
callback = function(err, gotLock) {}
|
||||
}
|
||||
const existingLock = LockState[key]
|
||||
if (existingLock != null) {
|
||||
// the lock is already taken, check how old it is
|
||||
const lockAge = Date.now() - existingLock.created
|
||||
if (lockAge < LockManager.MAX_LOCK_HOLD_TIME) {
|
||||
return callback(null, false) // we didn't get the lock, bail out
|
||||
} else {
|
||||
logger.error(
|
||||
{ key, lock: existingLock, age: lockAge },
|
||||
'taking old lock by force'
|
||||
)
|
||||
}
|
||||
}
|
||||
// take the lock
|
||||
LockState[key] = lockValue = { created: Date.now() }
|
||||
return callback(null, true, lockValue)
|
||||
},
|
||||
|
||||
getLock(key, callback) {
|
||||
let attempt
|
||||
if (callback == null) {
|
||||
callback = function(error, lockValue) {}
|
||||
}
|
||||
const startTime = Date.now()
|
||||
return (attempt = () =>
|
||||
LockManager.tryLock(key, function(error, gotLock, lockValue) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (gotLock) {
|
||||
return callback(null, lockValue)
|
||||
} else if (Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME) {
|
||||
const e = new Error('Lock timeout')
|
||||
e.key = key
|
||||
return callback(e)
|
||||
} else {
|
||||
return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL)
|
||||
}
|
||||
}))()
|
||||
},
|
||||
|
||||
releaseLock(key, lockValue, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const existingLock = LockState[key]
|
||||
if (existingLock === lockValue) {
|
||||
// lockValue is an object, so we can test by reference
|
||||
delete LockState[key] // our lock, so we can free it
|
||||
return callback()
|
||||
} else if (existingLock != null) {
|
||||
// lock exists but doesn't match ours
|
||||
logger.error(
|
||||
{ key, lock: existingLock },
|
||||
'tried to release lock taken by force'
|
||||
)
|
||||
return callback()
|
||||
} else {
|
||||
logger.error(
|
||||
{ key, lock: existingLock },
|
||||
'tried to release lock that has gone'
|
||||
)
|
||||
return callback()
|
||||
}
|
||||
},
|
||||
|
||||
runWithLock(key, runner, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return LockManager.getLock(key, function(error, lockValue) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return runner((error1, ...args) =>
|
||||
LockManager.releaseLock(key, lockValue, function(error2) {
|
||||
error = error1 || error2
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, ...Array.from(args))
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
695
app/js/DockerRunner.js
Normal file
695
app/js/DockerRunner.js
Normal file
@@ -0,0 +1,695 @@
|
||||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let DockerRunner, oneHour
|
||||
const Settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Docker = require('dockerode')
|
||||
const dockerode = new Docker()
|
||||
const crypto = require('crypto')
|
||||
const async = require('async')
|
||||
const LockManager = require('./DockerLockManager')
|
||||
const fs = require('fs')
|
||||
const Path = require('path')
|
||||
const _ = require('underscore')
|
||||
|
||||
logger.info('using docker runner')
|
||||
|
||||
const usingSiblingContainers = () =>
|
||||
__guard__(
|
||||
Settings != null ? Settings.path : undefined,
|
||||
x => x.sandboxedCompilesHostDir
|
||||
) != null
|
||||
|
||||
let containerMonitorTimeout
|
||||
let containerMonitorInterval
|
||||
|
||||
module.exports = DockerRunner = {
|
||||
ERR_NOT_DIRECTORY: new Error('not a directory'),
|
||||
ERR_TERMINATED: new Error('terminated'),
|
||||
ERR_EXITED: new Error('exited'),
|
||||
ERR_TIMED_OUT: new Error('container timed out'),
|
||||
|
||||
run(project_id, command, directory, image, timeout, environment, callback) {
|
||||
let name
|
||||
if (callback == null) {
|
||||
callback = function(error, output) {}
|
||||
}
|
||||
if (usingSiblingContainers()) {
|
||||
const _newPath = Settings.path.sandboxedCompilesHostDir
|
||||
logger.log(
|
||||
{ path: _newPath },
|
||||
'altering bind path for sibling containers'
|
||||
)
|
||||
// Server Pro, example:
|
||||
// '/var/lib/sharelatex/data/compiles/<project-id>'
|
||||
// ... becomes ...
|
||||
// '/opt/sharelatex_data/data/compiles/<project-id>'
|
||||
directory = Path.join(
|
||||
Settings.path.sandboxedCompilesHostDir,
|
||||
Path.basename(directory)
|
||||
)
|
||||
}
|
||||
|
||||
const volumes = {}
|
||||
volumes[directory] = '/compile'
|
||||
|
||||
command = Array.from(command).map(arg =>
|
||||
__guardMethod__(arg.toString(), 'replace', o =>
|
||||
o.replace('$COMPILE_DIR', '/compile')
|
||||
)
|
||||
)
|
||||
if (image == null) {
|
||||
;({ image } = Settings.clsi.docker)
|
||||
}
|
||||
|
||||
if (Settings.texliveImageNameOveride != null) {
|
||||
const img = image.split('/')
|
||||
image = `${Settings.texliveImageNameOveride}/${img[2]}`
|
||||
}
|
||||
|
||||
const options = DockerRunner._getContainerOptions(
|
||||
command,
|
||||
image,
|
||||
volumes,
|
||||
timeout,
|
||||
environment
|
||||
)
|
||||
const fingerprint = DockerRunner._fingerprintContainer(options)
|
||||
options.name = name = `project-${project_id}-${fingerprint}`
|
||||
|
||||
// logOptions = _.clone(options)
|
||||
// logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging"
|
||||
logger.log({ project_id }, 'running docker container')
|
||||
DockerRunner._runAndWaitForContainer(options, volumes, timeout, function(
|
||||
error,
|
||||
output
|
||||
) {
|
||||
if (
|
||||
__guard__(error != null ? error.message : undefined, x =>
|
||||
x.match('HTTP code is 500')
|
||||
)
|
||||
) {
|
||||
logger.log(
|
||||
{ err: error, project_id },
|
||||
'error running container so destroying and retrying'
|
||||
)
|
||||
return DockerRunner.destroyContainer(name, null, true, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return DockerRunner._runAndWaitForContainer(
|
||||
options,
|
||||
volumes,
|
||||
timeout,
|
||||
callback
|
||||
)
|
||||
})
|
||||
} else {
|
||||
return callback(error, output)
|
||||
}
|
||||
})
|
||||
|
||||
return name
|
||||
}, // pass back the container name to allow it to be killed
|
||||
|
||||
kill(container_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
logger.log({ container_id }, 'sending kill signal to container')
|
||||
const container = dockerode.getContainer(container_id)
|
||||
return container.kill(function(error) {
|
||||
if (
|
||||
error != null &&
|
||||
__guardMethod__(error != null ? error.message : undefined, 'match', o =>
|
||||
o.match(/Cannot kill container .* is not running/)
|
||||
)
|
||||
) {
|
||||
logger.warn(
|
||||
{ err: error, container_id },
|
||||
'container not running, continuing'
|
||||
)
|
||||
error = null
|
||||
}
|
||||
if (error != null) {
|
||||
logger.error({ err: error, container_id }, 'error killing container')
|
||||
return callback(error)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_runAndWaitForContainer(options, volumes, timeout, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function(error, output) {}
|
||||
}
|
||||
const callback = function(...args) {
|
||||
_callback(...Array.from(args || []))
|
||||
// Only call the callback once
|
||||
return (_callback = function() {})
|
||||
}
|
||||
|
||||
const { name } = options
|
||||
|
||||
let streamEnded = false
|
||||
let containerReturned = false
|
||||
let output = {}
|
||||
|
||||
const callbackIfFinished = function() {
|
||||
if (streamEnded && containerReturned) {
|
||||
return callback(null, output)
|
||||
}
|
||||
}
|
||||
|
||||
const attachStreamHandler = function(error, _output) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
output = _output
|
||||
streamEnded = true
|
||||
return callbackIfFinished()
|
||||
}
|
||||
|
||||
return DockerRunner.startContainer(
|
||||
options,
|
||||
volumes,
|
||||
attachStreamHandler,
|
||||
function(error, containerId) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
return DockerRunner.waitForContainer(name, timeout, function(
|
||||
error,
|
||||
exitCode
|
||||
) {
|
||||
let err
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (exitCode === 137) {
|
||||
// exit status from kill -9
|
||||
err = DockerRunner.ERR_TERMINATED
|
||||
err.terminated = true
|
||||
return callback(err)
|
||||
}
|
||||
if (exitCode === 1) {
|
||||
// exit status from chktex
|
||||
err = DockerRunner.ERR_EXITED
|
||||
err.code = exitCode
|
||||
return callback(err)
|
||||
}
|
||||
containerReturned = true
|
||||
__guard__(
|
||||
options != null ? options.HostConfig : undefined,
|
||||
x => (x.SecurityOpt = null)
|
||||
) // small log line
|
||||
logger.log({ err, exitCode, options }, 'docker container has exited')
|
||||
return callbackIfFinished()
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_getContainerOptions(command, image, volumes, timeout, environment) {
|
||||
let m, year
|
||||
let key, value, hostVol, dockerVol
|
||||
const timeoutInSeconds = timeout / 1000
|
||||
|
||||
const dockerVolumes = {}
|
||||
for (hostVol in volumes) {
|
||||
dockerVol = volumes[hostVol]
|
||||
dockerVolumes[dockerVol] = {}
|
||||
|
||||
if (volumes[hostVol].slice(-3).indexOf(':r') === -1) {
|
||||
volumes[hostVol] = `${dockerVol}:rw`
|
||||
}
|
||||
}
|
||||
|
||||
// merge settings and environment parameter
|
||||
const env = {}
|
||||
for (const src of [Settings.clsi.docker.env, environment || {}]) {
|
||||
for (key in src) {
|
||||
value = src[key]
|
||||
env[key] = value
|
||||
}
|
||||
}
|
||||
// set the path based on the image year
|
||||
if ((m = image.match(/:([0-9]+)\.[0-9]+/))) {
|
||||
year = m[1]
|
||||
} else {
|
||||
year = '2014'
|
||||
}
|
||||
env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`
|
||||
const options = {
|
||||
Cmd: command,
|
||||
Image: image,
|
||||
Volumes: dockerVolumes,
|
||||
WorkingDir: '/compile',
|
||||
NetworkDisabled: true,
|
||||
Memory: 1024 * 1024 * 1024 * 1024, // 1 Gb
|
||||
User: Settings.clsi.docker.user,
|
||||
Env: (() => {
|
||||
const result = []
|
||||
for (key in env) {
|
||||
value = env[key]
|
||||
result.push(`${key}=${value}`)
|
||||
}
|
||||
return result
|
||||
})(), // convert the environment hash to an array
|
||||
HostConfig: {
|
||||
Binds: (() => {
|
||||
const result1 = []
|
||||
for (hostVol in volumes) {
|
||||
dockerVol = volumes[hostVol]
|
||||
result1.push(`${hostVol}:${dockerVol}`)
|
||||
}
|
||||
return result1
|
||||
})(),
|
||||
LogConfig: { Type: 'none', Config: {} },
|
||||
Ulimits: [
|
||||
{
|
||||
Name: 'cpu',
|
||||
Soft: timeoutInSeconds + 5,
|
||||
Hard: timeoutInSeconds + 10
|
||||
}
|
||||
],
|
||||
CapDrop: 'ALL',
|
||||
SecurityOpt: ['no-new-privileges']
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
(Settings.path != null ? Settings.path.synctexBinHostPath : undefined) !=
|
||||
null
|
||||
) {
|
||||
options.HostConfig.Binds.push(
|
||||
`${Settings.path.synctexBinHostPath}:/opt/synctex:ro`
|
||||
)
|
||||
}
|
||||
|
||||
if (Settings.clsi.docker.seccomp_profile != null) {
|
||||
options.HostConfig.SecurityOpt.push(
|
||||
`seccomp=${Settings.clsi.docker.seccomp_profile}`
|
||||
)
|
||||
}
|
||||
|
||||
return options
|
||||
},
|
||||
|
||||
_fingerprintContainer(containerOptions) {
|
||||
// Yay, Hashing!
|
||||
const json = JSON.stringify(containerOptions)
|
||||
return crypto
|
||||
.createHash('md5')
|
||||
.update(json)
|
||||
.digest('hex')
|
||||
},
|
||||
|
||||
startContainer(options, volumes, attachStreamHandler, callback) {
|
||||
return LockManager.runWithLock(
|
||||
options.name,
|
||||
releaseLock =>
|
||||
// Check that volumes exist before starting the container.
|
||||
// When a container is started with volume pointing to a
|
||||
// non-existent directory then docker creates the directory but
|
||||
// with root ownership.
|
||||
DockerRunner._checkVolumes(options, volumes, function(err) {
|
||||
if (err != null) {
|
||||
return releaseLock(err)
|
||||
}
|
||||
return DockerRunner._startContainer(
|
||||
options,
|
||||
volumes,
|
||||
attachStreamHandler,
|
||||
releaseLock
|
||||
)
|
||||
}),
|
||||
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
// Check that volumes exist and are directories
|
||||
_checkVolumes(options, volumes, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, containerName) {}
|
||||
}
|
||||
if (usingSiblingContainers()) {
|
||||
// Server Pro, with sibling-containers active, skip checks
|
||||
return callback(null)
|
||||
}
|
||||
|
||||
const checkVolume = (path, cb) =>
|
||||
fs.stat(path, function(err, stats) {
|
||||
if (err != null) {
|
||||
return cb(err)
|
||||
}
|
||||
if (!(stats != null ? stats.isDirectory() : undefined)) {
|
||||
return cb(DockerRunner.ERR_NOT_DIRECTORY)
|
||||
}
|
||||
return cb()
|
||||
})
|
||||
const jobs = []
|
||||
for (const vol in volumes) {
|
||||
;(vol => jobs.push(cb => checkVolume(vol, cb)))(vol)
|
||||
}
|
||||
return async.series(jobs, callback)
|
||||
},
|
||||
|
||||
_startContainer(options, volumes, attachStreamHandler, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, output) {}
|
||||
}
|
||||
callback = _.once(callback)
|
||||
const { name } = options
|
||||
|
||||
logger.log({ container_name: name }, 'starting container')
|
||||
const container = dockerode.getContainer(name)
|
||||
|
||||
const createAndStartContainer = () =>
|
||||
dockerode.createContainer(options, function(error, container) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return startExistingContainer()
|
||||
})
|
||||
var startExistingContainer = () =>
|
||||
DockerRunner.attachToContainer(
|
||||
options.name,
|
||||
attachStreamHandler,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return container.start(function(error) {
|
||||
if (
|
||||
error != null &&
|
||||
(error != null ? error.statusCode : undefined) !== 304
|
||||
) {
|
||||
// already running
|
||||
return callback(error)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
return container.inspect(function(error, stats) {
|
||||
if ((error != null ? error.statusCode : undefined) === 404) {
|
||||
return createAndStartContainer()
|
||||
} else if (error != null) {
|
||||
logger.err(
|
||||
{ container_name: name, error },
|
||||
'unable to inspect container to start'
|
||||
)
|
||||
return callback(error)
|
||||
} else {
|
||||
return startExistingContainer()
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
attachToContainer(containerId, attachStreamHandler, attachStartCallback) {
|
||||
const container = dockerode.getContainer(containerId)
|
||||
return container.attach({ stdout: 1, stderr: 1, stream: 1 }, function(
|
||||
error,
|
||||
stream
|
||||
) {
|
||||
if (error != null) {
|
||||
logger.error(
|
||||
{ err: error, container_id: containerId },
|
||||
'error attaching to container'
|
||||
)
|
||||
return attachStartCallback(error)
|
||||
} else {
|
||||
attachStartCallback()
|
||||
}
|
||||
|
||||
logger.log({ container_id: containerId }, 'attached to container')
|
||||
|
||||
const MAX_OUTPUT = 1024 * 1024 // limit output to 1MB
|
||||
const createStringOutputStream = function(name) {
|
||||
return {
|
||||
data: '',
|
||||
overflowed: false,
|
||||
write(data) {
|
||||
if (this.overflowed) {
|
||||
return
|
||||
}
|
||||
if (this.data.length < MAX_OUTPUT) {
|
||||
return (this.data += data)
|
||||
} else {
|
||||
logger.error(
|
||||
{
|
||||
container_id: containerId,
|
||||
length: this.data.length,
|
||||
maxLen: MAX_OUTPUT
|
||||
},
|
||||
`${name} exceeds max size`
|
||||
)
|
||||
this.data += `(...truncated at ${MAX_OUTPUT} chars...)`
|
||||
return (this.overflowed = true)
|
||||
}
|
||||
}
|
||||
// kill container if too much output
|
||||
// docker.containers.kill(containerId, () ->)
|
||||
}
|
||||
}
|
||||
|
||||
const stdout = createStringOutputStream('stdout')
|
||||
const stderr = createStringOutputStream('stderr')
|
||||
|
||||
container.modem.demuxStream(stream, stdout, stderr)
|
||||
|
||||
stream.on('error', err =>
|
||||
logger.error(
|
||||
{ err, container_id: containerId },
|
||||
'error reading from container stream'
|
||||
)
|
||||
)
|
||||
|
||||
return stream.on('end', () =>
|
||||
attachStreamHandler(null, { stdout: stdout.data, stderr: stderr.data })
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
waitForContainer(containerId, timeout, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function(error, exitCode) {}
|
||||
}
|
||||
const callback = function(...args) {
|
||||
_callback(...Array.from(args || []))
|
||||
// Only call the callback once
|
||||
return (_callback = function() {})
|
||||
}
|
||||
|
||||
const container = dockerode.getContainer(containerId)
|
||||
|
||||
let timedOut = false
|
||||
const timeoutId = setTimeout(function() {
|
||||
timedOut = true
|
||||
logger.log(
|
||||
{ container_id: containerId },
|
||||
'timeout reached, killing container'
|
||||
)
|
||||
return container.kill(function() {})
|
||||
}, timeout)
|
||||
|
||||
logger.log({ container_id: containerId }, 'waiting for docker container')
|
||||
return container.wait(function(error, res) {
|
||||
if (error != null) {
|
||||
clearTimeout(timeoutId)
|
||||
logger.error(
|
||||
{ err: error, container_id: containerId },
|
||||
'error waiting for container'
|
||||
)
|
||||
return callback(error)
|
||||
}
|
||||
if (timedOut) {
|
||||
logger.log({ containerId }, 'docker container timed out')
|
||||
error = DockerRunner.ERR_TIMED_OUT
|
||||
error.timedout = true
|
||||
return callback(error)
|
||||
} else {
|
||||
clearTimeout(timeoutId)
|
||||
logger.log(
|
||||
{ container_id: containerId, exitCode: res.StatusCode },
|
||||
'docker container returned'
|
||||
)
|
||||
return callback(null, res.StatusCode)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
destroyContainer(containerName, containerId, shouldForce, callback) {
|
||||
// We want the containerName for the lock and, ideally, the
|
||||
// containerId to delete. There is a bug in the docker.io module
|
||||
// where if you delete by name and there is an error, it throws an
|
||||
// async exception, but if you delete by id it just does a normal
|
||||
// error callback. We fall back to deleting by name if no id is
|
||||
// supplied.
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return LockManager.runWithLock(
|
||||
containerName,
|
||||
releaseLock =>
|
||||
DockerRunner._destroyContainer(
|
||||
containerId || containerName,
|
||||
shouldForce,
|
||||
releaseLock
|
||||
),
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
_destroyContainer(containerId, shouldForce, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
logger.log({ container_id: containerId }, 'destroying docker container')
|
||||
const container = dockerode.getContainer(containerId)
|
||||
return container.remove({ force: shouldForce === true }, function(error) {
|
||||
if (
|
||||
error != null &&
|
||||
(error != null ? error.statusCode : undefined) === 404
|
||||
) {
|
||||
logger.warn(
|
||||
{ err: error, container_id: containerId },
|
||||
'container not found, continuing'
|
||||
)
|
||||
error = null
|
||||
}
|
||||
if (error != null) {
|
||||
logger.error(
|
||||
{ err: error, container_id: containerId },
|
||||
'error destroying container'
|
||||
)
|
||||
} else {
|
||||
logger.log({ container_id: containerId }, 'destroyed container')
|
||||
}
|
||||
return callback(error)
|
||||
})
|
||||
},
|
||||
|
||||
// handle expiry of docker containers
|
||||
|
||||
MAX_CONTAINER_AGE:
|
||||
Settings.clsi.docker.maxContainerAge || (oneHour = 60 * 60 * 1000),
|
||||
|
||||
examineOldContainer(container, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, name, id, ttl) {}
|
||||
}
|
||||
const name =
|
||||
container.Name ||
|
||||
(container.Names != null ? container.Names[0] : undefined)
|
||||
const created = container.Created * 1000 // creation time is returned in seconds
|
||||
const now = Date.now()
|
||||
const age = now - created
|
||||
const maxAge = DockerRunner.MAX_CONTAINER_AGE
|
||||
const ttl = maxAge - age
|
||||
logger.log(
|
||||
{ containerName: name, created, now, age, maxAge, ttl },
|
||||
'checking whether to destroy container'
|
||||
)
|
||||
return callback(null, name, container.Id, ttl)
|
||||
},
|
||||
|
||||
destroyOldContainers(callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return dockerode.listContainers({ all: true }, function(error, containers) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = []
|
||||
for (const container of Array.from(containers || [])) {
|
||||
;(container =>
|
||||
DockerRunner.examineOldContainer(container, function(
|
||||
err,
|
||||
name,
|
||||
id,
|
||||
ttl
|
||||
) {
|
||||
if (name.slice(0, 9) === '/project-' && ttl <= 0) {
|
||||
return jobs.push(cb =>
|
||||
DockerRunner.destroyContainer(name, id, false, () => cb())
|
||||
)
|
||||
}
|
||||
}))(container)
|
||||
}
|
||||
// Ignore errors because some containers get stuck but
|
||||
// will be destroyed next time
|
||||
return async.series(jobs, callback)
|
||||
})
|
||||
},
|
||||
|
||||
startContainerMonitor() {
|
||||
logger.log(
|
||||
{ maxAge: DockerRunner.MAX_CONTAINER_AGE },
|
||||
'starting container expiry'
|
||||
)
|
||||
|
||||
// guarantee only one monitor is running
|
||||
DockerRunner.stopContainerMonitor()
|
||||
|
||||
// randomise the start time
|
||||
const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000)
|
||||
containerMonitorTimeout = setTimeout(() => {
|
||||
containerMonitorInterval = setInterval(
|
||||
() => DockerRunner.destroyOldContainers(),
|
||||
(oneHour = 60 * 60 * 1000)
|
||||
)
|
||||
}, randomDelay)
|
||||
},
|
||||
|
||||
stopContainerMonitor() {
|
||||
if (containerMonitorTimeout) {
|
||||
clearTimeout(containerMonitorTimeout)
|
||||
containerMonitorTimeout = undefined
|
||||
}
|
||||
if (containerMonitorInterval) {
|
||||
clearInterval(containerMonitorTimeout)
|
||||
containerMonitorTimeout = undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
DockerRunner.startContainerMonitor()
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
function __guardMethod__(obj, methodName, transform) {
|
||||
if (
|
||||
typeof obj !== 'undefined' &&
|
||||
obj !== null &&
|
||||
typeof obj[methodName] === 'function'
|
||||
) {
|
||||
return transform(obj, methodName)
|
||||
} else {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
57
app/js/DraftModeManager.js
Normal file
57
app/js/DraftModeManager.js
Normal file
@@ -0,0 +1,57 @@
|
||||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-useless-escape,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let DraftModeManager
|
||||
const fs = require('fs')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = DraftModeManager = {
|
||||
injectDraftMode(filename, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return fs.readFile(filename, 'utf8', function(error, content) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
// avoid adding draft mode more than once
|
||||
if (
|
||||
(content != null
|
||||
? content.indexOf('\\documentclass[draft')
|
||||
: undefined) >= 0
|
||||
) {
|
||||
return callback()
|
||||
}
|
||||
const modified_content = DraftModeManager._injectDraftOption(content)
|
||||
logger.log(
|
||||
{
|
||||
content: content.slice(0, 1024), // \documentclass is normally v near the top
|
||||
modified_content: modified_content.slice(0, 1024),
|
||||
filename
|
||||
},
|
||||
'injected draft class'
|
||||
)
|
||||
return fs.writeFile(filename, modified_content, callback)
|
||||
})
|
||||
},
|
||||
|
||||
_injectDraftOption(content) {
|
||||
return (
|
||||
content
|
||||
// With existing options (must be first, otherwise both are applied)
|
||||
.replace(/\\documentclass\[/g, '\\documentclass[draft,')
|
||||
// Without existing options
|
||||
.replace(/\\documentclass\{/g, '\\documentclass[draft]{')
|
||||
)
|
||||
}
|
||||
}
|
||||
36
app/js/Errors.js
Normal file
36
app/js/Errors.js
Normal file
@@ -0,0 +1,36 @@
|
||||
/* eslint-disable
|
||||
no-proto,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
let Errors
|
||||
var NotFoundError = function(message) {
|
||||
const error = new Error(message)
|
||||
error.name = 'NotFoundError'
|
||||
error.__proto__ = NotFoundError.prototype
|
||||
return error
|
||||
}
|
||||
NotFoundError.prototype.__proto__ = Error.prototype
|
||||
|
||||
var FilesOutOfSyncError = function(message) {
|
||||
const error = new Error(message)
|
||||
error.name = 'FilesOutOfSyncError'
|
||||
error.__proto__ = FilesOutOfSyncError.prototype
|
||||
return error
|
||||
}
|
||||
FilesOutOfSyncError.prototype.__proto__ = Error.prototype
|
||||
|
||||
var AlreadyCompilingError = function(message) {
|
||||
const error = new Error(message)
|
||||
error.name = 'AlreadyCompilingError'
|
||||
error.__proto__ = AlreadyCompilingError.prototype
|
||||
return error
|
||||
}
|
||||
AlreadyCompilingError.prototype.__proto__ = Error.prototype
|
||||
|
||||
module.exports = Errors = {
|
||||
NotFoundError,
|
||||
FilesOutOfSyncError,
|
||||
AlreadyCompilingError
|
||||
}
|
||||
204
app/js/LatexRunner.js
Normal file
204
app/js/LatexRunner.js
Normal file
@@ -0,0 +1,204 @@
|
||||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let LatexRunner
|
||||
const Path = require('path')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Metrics = require('./Metrics')
|
||||
const CommandRunner = require('./CommandRunner')
|
||||
|
||||
const ProcessTable = {} // table of currently running jobs (pids or docker container names)
|
||||
|
||||
module.exports = LatexRunner = {
|
||||
runLatex(project_id, options, callback) {
|
||||
let command
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
let {
|
||||
directory,
|
||||
mainFile,
|
||||
compiler,
|
||||
timeout,
|
||||
image,
|
||||
environment,
|
||||
flags
|
||||
} = options
|
||||
if (!compiler) {
|
||||
compiler = 'pdflatex'
|
||||
}
|
||||
if (!timeout) {
|
||||
timeout = 60000
|
||||
} // milliseconds
|
||||
|
||||
logger.log(
|
||||
{ directory, compiler, timeout, mainFile, environment, flags },
|
||||
'starting compile'
|
||||
)
|
||||
|
||||
// We want to run latexmk on the tex file which we will automatically
|
||||
// generate from the Rtex/Rmd/md file.
|
||||
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, '.tex')
|
||||
|
||||
if (compiler === 'pdflatex') {
|
||||
command = LatexRunner._pdflatexCommand(mainFile, flags)
|
||||
} else if (compiler === 'latex') {
|
||||
command = LatexRunner._latexCommand(mainFile, flags)
|
||||
} else if (compiler === 'xelatex') {
|
||||
command = LatexRunner._xelatexCommand(mainFile, flags)
|
||||
} else if (compiler === 'lualatex') {
|
||||
command = LatexRunner._lualatexCommand(mainFile, flags)
|
||||
} else {
|
||||
return callback(new Error(`unknown compiler: ${compiler}`))
|
||||
}
|
||||
|
||||
if (Settings.clsi != null ? Settings.clsi.strace : undefined) {
|
||||
command = ['strace', '-o', 'strace', '-ff'].concat(command)
|
||||
}
|
||||
|
||||
const id = `${project_id}` // record running project under this id
|
||||
|
||||
return (ProcessTable[id] = CommandRunner.run(
|
||||
project_id,
|
||||
command,
|
||||
directory,
|
||||
image,
|
||||
timeout,
|
||||
environment,
|
||||
function(error, output) {
|
||||
delete ProcessTable[id]
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const runs =
|
||||
__guard__(
|
||||
__guard__(output != null ? output.stderr : undefined, x1 =>
|
||||
x1.match(/^Run number \d+ of .*latex/gm)
|
||||
),
|
||||
x => x.length
|
||||
) || 0
|
||||
const failed =
|
||||
__guard__(output != null ? output.stdout : undefined, x2 =>
|
||||
x2.match(/^Latexmk: Errors/m)
|
||||
) != null
|
||||
? 1
|
||||
: 0
|
||||
// counters from latexmk output
|
||||
const stats = {}
|
||||
stats['latexmk-errors'] = failed
|
||||
stats['latex-runs'] = runs
|
||||
stats['latex-runs-with-errors'] = failed ? runs : 0
|
||||
stats[`latex-runs-${runs}`] = 1
|
||||
stats[`latex-runs-with-errors-${runs}`] = failed ? 1 : 0
|
||||
// timing information from /usr/bin/time
|
||||
const timings = {}
|
||||
const stderr = output != null ? output.stderr : undefined
|
||||
timings['cpu-percent'] =
|
||||
__guard__(
|
||||
stderr != null
|
||||
? stderr.match(/Percent of CPU this job got: (\d+)/m)
|
||||
: undefined,
|
||||
x3 => x3[1]
|
||||
) || 0
|
||||
timings['cpu-time'] =
|
||||
__guard__(
|
||||
stderr != null
|
||||
? stderr.match(/User time.*: (\d+.\d+)/m)
|
||||
: undefined,
|
||||
x4 => x4[1]
|
||||
) || 0
|
||||
timings['sys-time'] =
|
||||
__guard__(
|
||||
stderr != null
|
||||
? stderr.match(/System time.*: (\d+.\d+)/m)
|
||||
: undefined,
|
||||
x5 => x5[1]
|
||||
) || 0
|
||||
return callback(error, output, stats, timings)
|
||||
}
|
||||
))
|
||||
},
|
||||
|
||||
killLatex(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const id = `${project_id}`
|
||||
logger.log({ id }, 'killing running compile')
|
||||
if (ProcessTable[id] == null) {
|
||||
logger.warn({ id }, 'no such project to kill')
|
||||
return callback(null)
|
||||
} else {
|
||||
return CommandRunner.kill(ProcessTable[id], callback)
|
||||
}
|
||||
},
|
||||
|
||||
_latexmkBaseCommand(flags) {
|
||||
let args = [
|
||||
'latexmk',
|
||||
'-cd',
|
||||
'-f',
|
||||
'-jobname=output',
|
||||
'-auxdir=$COMPILE_DIR',
|
||||
'-outdir=$COMPILE_DIR',
|
||||
'-synctex=1',
|
||||
'-interaction=batchmode'
|
||||
]
|
||||
if (flags) {
|
||||
args = args.concat(flags)
|
||||
}
|
||||
return (
|
||||
__guard__(
|
||||
Settings != null ? Settings.clsi : undefined,
|
||||
x => x.latexmkCommandPrefix
|
||||
) || []
|
||||
).concat(args)
|
||||
},
|
||||
|
||||
_pdflatexCommand(mainFile, flags) {
|
||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||
'-pdf',
|
||||
Path.join('$COMPILE_DIR', mainFile)
|
||||
])
|
||||
},
|
||||
|
||||
_latexCommand(mainFile, flags) {
|
||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||
'-pdfdvi',
|
||||
Path.join('$COMPILE_DIR', mainFile)
|
||||
])
|
||||
},
|
||||
|
||||
_xelatexCommand(mainFile, flags) {
|
||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||
'-xelatex',
|
||||
Path.join('$COMPILE_DIR', mainFile)
|
||||
])
|
||||
},
|
||||
|
||||
_lualatexCommand(mainFile, flags) {
|
||||
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||
'-lualatex',
|
||||
Path.join('$COMPILE_DIR', mainFile)
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
91
app/js/LocalCommandRunner.js
Normal file
91
app/js/LocalCommandRunner.js
Normal file
@@ -0,0 +1,91 @@
|
||||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let CommandRunner
|
||||
const { spawn } = require('child_process')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
logger.info('using standard command runner')
|
||||
|
||||
module.exports = CommandRunner = {
|
||||
run(project_id, command, directory, image, timeout, environment, callback) {
|
||||
let key, value
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
command = Array.from(command).map(arg =>
|
||||
arg.toString().replace('$COMPILE_DIR', directory)
|
||||
)
|
||||
logger.log({ project_id, command, directory }, 'running command')
|
||||
logger.warn('timeouts and sandboxing are not enabled with CommandRunner')
|
||||
|
||||
// merge environment settings
|
||||
const env = {}
|
||||
for (key in process.env) {
|
||||
value = process.env[key]
|
||||
env[key] = value
|
||||
}
|
||||
for (key in environment) {
|
||||
value = environment[key]
|
||||
env[key] = value
|
||||
}
|
||||
|
||||
// run command as detached process so it has its own process group (which can be killed if needed)
|
||||
const proc = spawn(command[0], command.slice(1), { cwd: directory, env })
|
||||
|
||||
let stdout = ''
|
||||
proc.stdout.on('data', data => (stdout += data))
|
||||
|
||||
proc.on('error', function(err) {
|
||||
logger.err(
|
||||
{ err, project_id, command, directory },
|
||||
'error running command'
|
||||
)
|
||||
return callback(err)
|
||||
})
|
||||
|
||||
proc.on('close', function(code, signal) {
|
||||
let err
|
||||
logger.info({ code, signal, project_id }, 'command exited')
|
||||
if (signal === 'SIGTERM') {
|
||||
// signal from kill method below
|
||||
err = new Error('terminated')
|
||||
err.terminated = true
|
||||
return callback(err)
|
||||
} else if (code === 1) {
|
||||
// exit status from chktex
|
||||
err = new Error('exited')
|
||||
err.code = code
|
||||
return callback(err)
|
||||
} else {
|
||||
return callback(null, { stdout: stdout })
|
||||
}
|
||||
})
|
||||
|
||||
return proc.pid
|
||||
}, // return process id to allow job to be killed if necessary
|
||||
|
||||
kill(pid, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
try {
|
||||
process.kill(-pid) // kill all processes in group
|
||||
} catch (err) {
|
||||
return callback(err)
|
||||
}
|
||||
return callback()
|
||||
}
|
||||
}
|
||||
72
app/js/LockManager.js
Normal file
72
app/js/LockManager.js
Normal file
@@ -0,0 +1,72 @@
|
||||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let LockManager
|
||||
const Settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Lockfile = require('lockfile') // from https://github.com/npm/lockfile
|
||||
const Errors = require('./Errors')
|
||||
const fs = require('fs')
|
||||
const Path = require('path')
|
||||
module.exports = LockManager = {
|
||||
LOCK_TEST_INTERVAL: 1000, // 50ms between each test of the lock
|
||||
MAX_LOCK_WAIT_TIME: 15000, // 10s maximum time to spend trying to get the lock
|
||||
LOCK_STALE: 5 * 60 * 1000, // 5 mins time until lock auto expires
|
||||
|
||||
runWithLock(path, runner, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const lockOpts = {
|
||||
wait: this.MAX_LOCK_WAIT_TIME,
|
||||
pollPeriod: this.LOCK_TEST_INTERVAL,
|
||||
stale: this.LOCK_STALE
|
||||
}
|
||||
return Lockfile.lock(path, lockOpts, function(error) {
|
||||
if ((error != null ? error.code : undefined) === 'EEXIST') {
|
||||
return callback(new Errors.AlreadyCompilingError('compile in progress'))
|
||||
} else if (error != null) {
|
||||
return fs.lstat(path, (statLockErr, statLock) =>
|
||||
fs.lstat(Path.dirname(path), (statDirErr, statDir) =>
|
||||
fs.readdir(Path.dirname(path), function(readdirErr, readdirDir) {
|
||||
logger.err(
|
||||
{
|
||||
error,
|
||||
path,
|
||||
statLock,
|
||||
statLockErr,
|
||||
statDir,
|
||||
statDirErr,
|
||||
readdirErr,
|
||||
readdirDir
|
||||
},
|
||||
'unable to get lock'
|
||||
)
|
||||
return callback(error)
|
||||
})
|
||||
)
|
||||
)
|
||||
} else {
|
||||
return runner((error1, ...args) =>
|
||||
Lockfile.unlock(path, function(error2) {
|
||||
error = error1 || error2
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, ...Array.from(args))
|
||||
})
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
3
app/js/Metrics.js
Normal file
3
app/js/Metrics.js
Normal file
@@ -0,0 +1,3 @@
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Sanity-check the conversion and remove this comment.
|
||||
module.exports = require('metrics-sharelatex')
|
||||
399
app/js/OutputCacheManager.js
Normal file
399
app/js/OutputCacheManager.js
Normal file
@@ -0,0 +1,399 @@
|
||||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS104: Avoid inline assignments
|
||||
* DS204: Change includes calls to have a more natural evaluation order
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let OutputCacheManager
|
||||
const async = require('async')
|
||||
const fs = require('fs')
|
||||
const fse = require('fs-extra')
|
||||
const Path = require('path')
|
||||
const logger = require('logger-sharelatex')
|
||||
const _ = require('underscore')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const crypto = require('crypto')
|
||||
|
||||
const OutputFileOptimiser = require('./OutputFileOptimiser')
|
||||
|
||||
module.exports = OutputCacheManager = {
|
||||
CACHE_SUBDIR: '.cache/clsi',
|
||||
ARCHIVE_SUBDIR: '.archive/clsi',
|
||||
// build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes
|
||||
// for backwards compatibility, make the randombytes part optional
|
||||
BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/,
|
||||
CACHE_LIMIT: 2, // maximum number of cache directories
|
||||
CACHE_AGE: 60 * 60 * 1000, // up to one hour old
|
||||
|
||||
path(buildId, file) {
|
||||
// used by static server, given build id return '.cache/clsi/buildId'
|
||||
if (buildId.match(OutputCacheManager.BUILD_REGEX)) {
|
||||
return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file)
|
||||
} else {
|
||||
// for invalid build id, return top level
|
||||
return file
|
||||
}
|
||||
},
|
||||
|
||||
generateBuildId(callback) {
|
||||
// generate a secure build id from Date.now() and 8 random bytes in hex
|
||||
if (callback == null) {
|
||||
callback = function(error, buildId) {}
|
||||
}
|
||||
return crypto.randomBytes(8, function(err, buf) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
const random = buf.toString('hex')
|
||||
const date = Date.now().toString(16)
|
||||
return callback(err, `${date}-${random}`)
|
||||
})
|
||||
},
|
||||
|
||||
saveOutputFiles(outputFiles, compileDir, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return OutputCacheManager.generateBuildId(function(err, buildId) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
return OutputCacheManager.saveOutputFilesInBuildDir(
|
||||
outputFiles,
|
||||
compileDir,
|
||||
buildId,
|
||||
callback
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback) {
|
||||
// make a compileDir/CACHE_SUBDIR/build_id directory and
|
||||
// copy all the output files into it
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR)
|
||||
// Put the files into a new cache subdirectory
|
||||
const cacheDir = Path.join(
|
||||
compileDir,
|
||||
OutputCacheManager.CACHE_SUBDIR,
|
||||
buildId
|
||||
)
|
||||
// Is it a per-user compile? check if compile directory is PROJECTID-USERID
|
||||
const perUser = Path.basename(compileDir).match(
|
||||
/^[0-9a-f]{24}-[0-9a-f]{24}$/
|
||||
)
|
||||
|
||||
// Archive logs in background
|
||||
if (
|
||||
(Settings.clsi != null ? Settings.clsi.archive_logs : undefined) ||
|
||||
(Settings.clsi != null ? Settings.clsi.strace : undefined)
|
||||
) {
|
||||
OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function(
|
||||
err
|
||||
) {
|
||||
if (err != null) {
|
||||
return logger.warn({ err }, 'erroring archiving log files')
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// make the new cache directory
|
||||
return fse.ensureDir(cacheDir, function(err) {
|
||||
if (err != null) {
|
||||
logger.error(
|
||||
{ err, directory: cacheDir },
|
||||
'error creating cache directory'
|
||||
)
|
||||
return callback(err, outputFiles)
|
||||
} else {
|
||||
// copy all the output files into the new cache directory
|
||||
const results = []
|
||||
return async.mapSeries(
|
||||
outputFiles,
|
||||
function(file, cb) {
|
||||
// don't send dot files as output, express doesn't serve them
|
||||
if (OutputCacheManager._fileIsHidden(file.path)) {
|
||||
logger.debug(
|
||||
{ compileDir, path: file.path },
|
||||
'ignoring dotfile in output'
|
||||
)
|
||||
return cb()
|
||||
}
|
||||
// copy other files into cache directory if valid
|
||||
const newFile = _.clone(file)
|
||||
const [src, dst] = Array.from([
|
||||
Path.join(compileDir, file.path),
|
||||
Path.join(cacheDir, file.path)
|
||||
])
|
||||
return OutputCacheManager._checkFileIsSafe(src, function(
|
||||
err,
|
||||
isSafe
|
||||
) {
|
||||
if (err != null) {
|
||||
return cb(err)
|
||||
}
|
||||
if (!isSafe) {
|
||||
return cb()
|
||||
}
|
||||
return OutputCacheManager._checkIfShouldCopy(src, function(
|
||||
err,
|
||||
shouldCopy
|
||||
) {
|
||||
if (err != null) {
|
||||
return cb(err)
|
||||
}
|
||||
if (!shouldCopy) {
|
||||
return cb()
|
||||
}
|
||||
return OutputCacheManager._copyFile(src, dst, function(err) {
|
||||
if (err != null) {
|
||||
return cb(err)
|
||||
}
|
||||
newFile.build = buildId // attach a build id if we cached the file
|
||||
results.push(newFile)
|
||||
return cb()
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
function(err) {
|
||||
if (err != null) {
|
||||
// pass back the original files if we encountered *any* error
|
||||
callback(err, outputFiles)
|
||||
// clean up the directory we just created
|
||||
return fse.remove(cacheDir, function(err) {
|
||||
if (err != null) {
|
||||
return logger.error(
|
||||
{ err, dir: cacheDir },
|
||||
'error removing cache dir after failure'
|
||||
)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
// pass back the list of new files in the cache
|
||||
callback(err, results)
|
||||
// let file expiry run in the background, expire all previous files if per-user
|
||||
return OutputCacheManager.expireOutputFiles(cacheRoot, {
|
||||
keep: buildId,
|
||||
limit: perUser ? 1 : null
|
||||
})
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
archiveLogs(outputFiles, compileDir, buildId, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const archiveDir = Path.join(
|
||||
compileDir,
|
||||
OutputCacheManager.ARCHIVE_SUBDIR,
|
||||
buildId
|
||||
)
|
||||
logger.log({ dir: archiveDir }, 'archiving log files for project')
|
||||
return fse.ensureDir(archiveDir, function(err) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
return async.mapSeries(
|
||||
outputFiles,
|
||||
function(file, cb) {
|
||||
const [src, dst] = Array.from([
|
||||
Path.join(compileDir, file.path),
|
||||
Path.join(archiveDir, file.path)
|
||||
])
|
||||
return OutputCacheManager._checkFileIsSafe(src, function(
|
||||
err,
|
||||
isSafe
|
||||
) {
|
||||
if (err != null) {
|
||||
return cb(err)
|
||||
}
|
||||
if (!isSafe) {
|
||||
return cb()
|
||||
}
|
||||
return OutputCacheManager._checkIfShouldArchive(src, function(
|
||||
err,
|
||||
shouldArchive
|
||||
) {
|
||||
if (err != null) {
|
||||
return cb(err)
|
||||
}
|
||||
if (!shouldArchive) {
|
||||
return cb()
|
||||
}
|
||||
return OutputCacheManager._copyFile(src, dst, cb)
|
||||
})
|
||||
})
|
||||
},
|
||||
callback
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
expireOutputFiles(cacheRoot, options, callback) {
|
||||
// look in compileDir for build dirs and delete if > N or age of mod time > T
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return fs.readdir(cacheRoot, function(err, results) {
|
||||
if (err != null) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return callback(null)
|
||||
} // cache directory is empty
|
||||
logger.error({ err, project_id: cacheRoot }, 'error clearing cache')
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
const dirs = results.sort().reverse()
|
||||
const currentTime = Date.now()
|
||||
|
||||
const isExpired = function(dir, index) {
|
||||
if ((options != null ? options.keep : undefined) === dir) {
|
||||
return false
|
||||
}
|
||||
// remove any directories over the requested (non-null) limit
|
||||
if (
|
||||
(options != null ? options.limit : undefined) != null &&
|
||||
index > options.limit
|
||||
) {
|
||||
return true
|
||||
}
|
||||
// remove any directories over the hard limit
|
||||
if (index > OutputCacheManager.CACHE_LIMIT) {
|
||||
return true
|
||||
}
|
||||
// we can get the build time from the first part of the directory name DDDD-RRRR
|
||||
// DDDD is date and RRRR is random bytes
|
||||
const dirTime = parseInt(
|
||||
__guard__(dir.split('-'), x => x[0]),
|
||||
16
|
||||
)
|
||||
const age = currentTime - dirTime
|
||||
return age > OutputCacheManager.CACHE_AGE
|
||||
}
|
||||
|
||||
const toRemove = _.filter(dirs, isExpired)
|
||||
|
||||
const removeDir = (dir, cb) =>
|
||||
fse.remove(Path.join(cacheRoot, dir), function(err, result) {
|
||||
logger.log({ cache: cacheRoot, dir }, 'removed expired cache dir')
|
||||
if (err != null) {
|
||||
logger.error({ err, dir }, 'cache remove error')
|
||||
}
|
||||
return cb(err, result)
|
||||
})
|
||||
return async.eachSeries(
|
||||
toRemove,
|
||||
(dir, cb) => removeDir(dir, cb),
|
||||
callback
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
_fileIsHidden(path) {
|
||||
return (path != null ? path.match(/^\.|\/\./) : undefined) != null
|
||||
},
|
||||
|
||||
_checkFileIsSafe(src, callback) {
|
||||
// check if we have a valid file to copy into the cache
|
||||
if (callback == null) {
|
||||
callback = function(error, isSafe) {}
|
||||
}
|
||||
return fs.stat(src, function(err, stats) {
|
||||
if ((err != null ? err.code : undefined) === 'ENOENT') {
|
||||
logger.warn(
|
||||
{ err, file: src },
|
||||
'file has disappeared before copying to build cache'
|
||||
)
|
||||
return callback(err, false)
|
||||
} else if (err != null) {
|
||||
// some other problem reading the file
|
||||
logger.error({ err, file: src }, 'stat error for file in cache')
|
||||
return callback(err, false)
|
||||
} else if (!stats.isFile()) {
|
||||
// other filetype - reject it
|
||||
logger.warn(
|
||||
{ src, stat: stats },
|
||||
'nonfile output - refusing to copy to cache'
|
||||
)
|
||||
return callback(null, false)
|
||||
} else {
|
||||
// it's a plain file, ok to copy
|
||||
return callback(null, true)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_copyFile(src, dst, callback) {
|
||||
// copy output file into the cache
|
||||
return fse.copy(src, dst, function(err) {
|
||||
if ((err != null ? err.code : undefined) === 'ENOENT') {
|
||||
logger.warn(
|
||||
{ err, file: src },
|
||||
'file has disappeared when copying to build cache'
|
||||
)
|
||||
return callback(err, false)
|
||||
} else if (err != null) {
|
||||
logger.error({ err, src, dst }, 'copy error for file in cache')
|
||||
return callback(err)
|
||||
} else {
|
||||
if (
|
||||
Settings.clsi != null ? Settings.clsi.optimiseInDocker : undefined
|
||||
) {
|
||||
// don't run any optimisations on the pdf when they are done
|
||||
// in the docker container
|
||||
return callback()
|
||||
} else {
|
||||
// call the optimiser for the file too
|
||||
return OutputFileOptimiser.optimiseFile(src, dst, callback)
|
||||
}
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_checkIfShouldCopy(src, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(err, shouldCopy) {}
|
||||
}
|
||||
return callback(null, !Path.basename(src).match(/^strace/))
|
||||
},
|
||||
|
||||
_checkIfShouldArchive(src, callback) {
|
||||
let needle
|
||||
if (callback == null) {
|
||||
callback = function(err, shouldCopy) {}
|
||||
}
|
||||
if (Path.basename(src).match(/^strace/)) {
|
||||
return callback(null, true)
|
||||
}
|
||||
if (
|
||||
(Settings.clsi != null ? Settings.clsi.archive_logs : undefined) &&
|
||||
((needle = Path.basename(src)),
|
||||
['output.log', 'output.blg'].includes(needle))
|
||||
) {
|
||||
return callback(null, true)
|
||||
}
|
||||
return callback(null, false)
|
||||
}
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
115
app/js/OutputFileFinder.js
Normal file
115
app/js/OutputFileFinder.js
Normal file
@@ -0,0 +1,115 @@
|
||||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
no-useless-escape,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let OutputFileFinder
|
||||
const async = require('async')
|
||||
const fs = require('fs')
|
||||
const Path = require('path')
|
||||
const { spawn } = require('child_process')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = OutputFileFinder = {
|
||||
findOutputFiles(resources, directory, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, outputFiles, allFiles) {}
|
||||
}
|
||||
const incomingResources = {}
|
||||
for (const resource of Array.from(resources)) {
|
||||
incomingResources[resource.path] = true
|
||||
}
|
||||
|
||||
return OutputFileFinder._getAllFiles(directory, function(error, allFiles) {
|
||||
if (allFiles == null) {
|
||||
allFiles = []
|
||||
}
|
||||
if (error != null) {
|
||||
logger.err({ err: error }, 'error finding all output files')
|
||||
return callback(error)
|
||||
}
|
||||
const outputFiles = []
|
||||
for (const file of Array.from(allFiles)) {
|
||||
if (!incomingResources[file]) {
|
||||
outputFiles.push({
|
||||
path: file,
|
||||
type: __guard__(file.match(/\.([^\.]+)$/), x => x[1])
|
||||
})
|
||||
}
|
||||
}
|
||||
return callback(null, outputFiles, allFiles)
|
||||
})
|
||||
},
|
||||
|
||||
_getAllFiles(directory, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function(error, fileList) {}
|
||||
}
|
||||
const callback = function(error, fileList) {
|
||||
_callback(error, fileList)
|
||||
return (_callback = function() {})
|
||||
}
|
||||
|
||||
// don't include clsi-specific files/directories in the output list
|
||||
const EXCLUDE_DIRS = [
|
||||
'-name',
|
||||
'.cache',
|
||||
'-o',
|
||||
'-name',
|
||||
'.archive',
|
||||
'-o',
|
||||
'-name',
|
||||
'.project-*'
|
||||
]
|
||||
const args = [
|
||||
directory,
|
||||
'(',
|
||||
...Array.from(EXCLUDE_DIRS),
|
||||
')',
|
||||
'-prune',
|
||||
'-o',
|
||||
'-type',
|
||||
'f',
|
||||
'-print'
|
||||
]
|
||||
logger.log({ args }, 'running find command')
|
||||
|
||||
const proc = spawn('find', args)
|
||||
let stdout = ''
|
||||
proc.stdout.on('data', chunk => (stdout += chunk.toString()))
|
||||
proc.on('error', callback)
|
||||
return proc.on('close', function(code) {
|
||||
if (code !== 0) {
|
||||
logger.warn(
|
||||
{ directory, code },
|
||||
"find returned error, directory likely doesn't exist"
|
||||
)
|
||||
return callback(null, [])
|
||||
}
|
||||
let fileList = stdout.trim().split('\n')
|
||||
fileList = fileList.map(function(file) {
|
||||
// Strip leading directory
|
||||
let path
|
||||
return (path = Path.relative(directory, file))
|
||||
})
|
||||
return callback(null, fileList)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
104
app/js/OutputFileOptimiser.js
Normal file
104
app/js/OutputFileOptimiser.js
Normal file
@@ -0,0 +1,104 @@
|
||||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
no-undef,
|
||||
no-unused-vars,
|
||||
node/no-deprecated-api,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let OutputFileOptimiser
|
||||
const fs = require('fs')
|
||||
const Path = require('path')
|
||||
const { spawn } = require('child_process')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Metrics = require('./Metrics')
|
||||
const _ = require('underscore')
|
||||
|
||||
module.exports = OutputFileOptimiser = {
|
||||
optimiseFile(src, dst, callback) {
|
||||
// check output file (src) and see if we can optimise it, storing
|
||||
// the result in the build directory (dst)
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
if (src.match(/\/output\.pdf$/)) {
|
||||
return OutputFileOptimiser.checkIfPDFIsOptimised(src, function(
|
||||
err,
|
||||
isOptimised
|
||||
) {
|
||||
if (err != null || isOptimised) {
|
||||
return callback(null)
|
||||
}
|
||||
return OutputFileOptimiser.optimisePDF(src, dst, callback)
|
||||
})
|
||||
} else {
|
||||
return callback(null)
|
||||
}
|
||||
},
|
||||
|
||||
checkIfPDFIsOptimised(file, callback) {
|
||||
const SIZE = 16 * 1024 // check the header of the pdf
|
||||
const result = new Buffer(SIZE)
|
||||
result.fill(0) // prevent leakage of uninitialised buffer
|
||||
return fs.open(file, 'r', function(err, fd) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) =>
|
||||
fs.close(fd, function(errClose) {
|
||||
if (errRead != null) {
|
||||
return callback(errRead)
|
||||
}
|
||||
if (typeof errReadClose !== 'undefined' && errReadClose !== null) {
|
||||
return callback(errClose)
|
||||
}
|
||||
const isOptimised =
|
||||
buffer.toString('ascii').indexOf('/Linearized 1') >= 0
|
||||
return callback(null, isOptimised)
|
||||
})
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
optimisePDF(src, dst, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const tmpOutput = dst + '.opt'
|
||||
const args = ['--linearize', src, tmpOutput]
|
||||
logger.log({ args }, 'running qpdf command')
|
||||
|
||||
const timer = new Metrics.Timer('qpdf')
|
||||
const proc = spawn('qpdf', args)
|
||||
let stdout = ''
|
||||
proc.stdout.on('data', chunk => (stdout += chunk.toString()))
|
||||
callback = _.once(callback) // avoid double call back for error and close event
|
||||
proc.on('error', function(err) {
|
||||
logger.warn({ err, args }, 'qpdf failed')
|
||||
return callback(null)
|
||||
}) // ignore the error
|
||||
return proc.on('close', function(code) {
|
||||
timer.done()
|
||||
if (code !== 0) {
|
||||
logger.warn({ code, args }, 'qpdf returned error')
|
||||
return callback(null) // ignore the error
|
||||
}
|
||||
return fs.rename(tmpOutput, dst, function(err) {
|
||||
if (err != null) {
|
||||
logger.warn(
|
||||
{ tmpOutput, dst },
|
||||
'failed to rename output of qpdf command'
|
||||
)
|
||||
}
|
||||
return callback(null)
|
||||
})
|
||||
})
|
||||
} // ignore the error
|
||||
}
|
||||
163
app/js/ProjectPersistenceManager.js
Normal file
163
app/js/ProjectPersistenceManager.js
Normal file
@@ -0,0 +1,163 @@
|
||||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ProjectPersistenceManager
|
||||
const UrlCache = require('./UrlCache')
|
||||
const CompileManager = require('./CompileManager')
|
||||
const db = require('./db')
|
||||
const dbQueue = require('./DbQueue')
|
||||
const async = require('async')
|
||||
const logger = require('logger-sharelatex')
|
||||
const oneDay = 24 * 60 * 60 * 1000
|
||||
const Settings = require('settings-sharelatex')
|
||||
|
||||
module.exports = ProjectPersistenceManager = {
|
||||
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5,
|
||||
|
||||
markProjectAsJustAccessed(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const job = cb =>
|
||||
db.Project.findOrCreate({ where: { project_id } })
|
||||
.spread((project, created) =>
|
||||
project
|
||||
.updateAttributes({ lastAccessed: new Date() })
|
||||
.then(() => cb())
|
||||
.error(cb)
|
||||
)
|
||||
.error(cb)
|
||||
return dbQueue.queue.push(job, callback)
|
||||
},
|
||||
|
||||
clearExpiredProjects(callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return ProjectPersistenceManager._findExpiredProjectIds(function(
|
||||
error,
|
||||
project_ids
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log({ project_ids }, 'clearing expired projects')
|
||||
const jobs = Array.from(project_ids || []).map(project_id =>
|
||||
(project_id => callback =>
|
||||
ProjectPersistenceManager.clearProjectFromCache(project_id, function(
|
||||
err
|
||||
) {
|
||||
if (err != null) {
|
||||
logger.error({ err, project_id }, 'error clearing project')
|
||||
}
|
||||
return callback()
|
||||
}))(project_id)
|
||||
)
|
||||
return async.series(jobs, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return CompileManager.clearExpiredProjects(
|
||||
ProjectPersistenceManager.EXPIRY_TIMEOUT,
|
||||
error => callback()
|
||||
)
|
||||
})
|
||||
})
|
||||
}, // ignore any errors from deleting directories
|
||||
|
||||
clearProject(project_id, user_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
logger.log({ project_id, user_id }, 'clearing project for user')
|
||||
return CompileManager.clearProject(project_id, user_id, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return ProjectPersistenceManager.clearProjectFromCache(
|
||||
project_id,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback()
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
clearProjectFromCache(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
logger.log({ project_id }, 'clearing project from cache')
|
||||
return UrlCache.clearProject(project_id, function(error) {
|
||||
if (error != null) {
|
||||
logger.err({ error, project_id }, 'error clearing project from cache')
|
||||
return callback(error)
|
||||
}
|
||||
return ProjectPersistenceManager._clearProjectFromDatabase(
|
||||
project_id,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
logger.err(
|
||||
{ error, project_id },
|
||||
'error clearing project from database'
|
||||
)
|
||||
}
|
||||
return callback(error)
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
_clearProjectFromDatabase(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
logger.log({ project_id }, 'clearing project from database')
|
||||
const job = cb =>
|
||||
db.Project.destroy({ where: { project_id } })
|
||||
.then(() => cb())
|
||||
.error(cb)
|
||||
return dbQueue.queue.push(job, callback)
|
||||
},
|
||||
|
||||
_findExpiredProjectIds(callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, project_ids) {}
|
||||
}
|
||||
const job = function(cb) {
|
||||
const keepProjectsFrom = new Date(
|
||||
Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT
|
||||
)
|
||||
const q = {}
|
||||
q[db.op.lt] = keepProjectsFrom
|
||||
return db.Project.findAll({ where: { lastAccessed: q } })
|
||||
.then(projects =>
|
||||
cb(
|
||||
null,
|
||||
projects.map(project => project.project_id)
|
||||
)
|
||||
)
|
||||
.error(cb)
|
||||
}
|
||||
|
||||
return dbQueue.queue.push(job, callback)
|
||||
}
|
||||
}
|
||||
|
||||
logger.log(
|
||||
{ EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT },
|
||||
'project assets kept timeout'
|
||||
)
|
||||
217
app/js/RequestParser.js
Normal file
217
app/js/RequestParser.js
Normal file
@@ -0,0 +1,217 @@
|
||||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
no-control-regex,
|
||||
no-throw-literal,
|
||||
no-unused-vars,
|
||||
no-useless-escape,
|
||||
standard/no-callback-literal,
|
||||
valid-typeof,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let RequestParser
|
||||
const settings = require('settings-sharelatex')
|
||||
|
||||
module.exports = RequestParser = {
|
||||
VALID_COMPILERS: ['pdflatex', 'latex', 'xelatex', 'lualatex'],
|
||||
MAX_TIMEOUT: 600,
|
||||
|
||||
parse(body, callback) {
|
||||
let resource
|
||||
if (callback == null) {
|
||||
callback = function(error, data) {}
|
||||
}
|
||||
const response = {}
|
||||
|
||||
if (body.compile == null) {
|
||||
return callback('top level object should have a compile attribute')
|
||||
}
|
||||
|
||||
const { compile } = body
|
||||
if (!compile.options) {
|
||||
compile.options = {}
|
||||
}
|
||||
|
||||
try {
|
||||
response.compiler = this._parseAttribute(
|
||||
'compiler',
|
||||
compile.options.compiler,
|
||||
{
|
||||
validValues: this.VALID_COMPILERS,
|
||||
default: 'pdflatex',
|
||||
type: 'string'
|
||||
}
|
||||
)
|
||||
response.timeout = this._parseAttribute(
|
||||
'timeout',
|
||||
compile.options.timeout,
|
||||
{
|
||||
default: RequestParser.MAX_TIMEOUT,
|
||||
type: 'number'
|
||||
}
|
||||
)
|
||||
response.imageName = this._parseAttribute(
|
||||
'imageName',
|
||||
compile.options.imageName,
|
||||
{ type: 'string' }
|
||||
)
|
||||
response.draft = this._parseAttribute('draft', compile.options.draft, {
|
||||
default: false,
|
||||
type: 'boolean'
|
||||
})
|
||||
response.check = this._parseAttribute('check', compile.options.check, {
|
||||
type: 'string'
|
||||
})
|
||||
response.flags = this._parseAttribute('flags', compile.options.flags, {
|
||||
default: [],
|
||||
type: 'object'
|
||||
})
|
||||
|
||||
// The syncType specifies whether the request contains all
|
||||
// resources (full) or only those resources to be updated
|
||||
// in-place (incremental).
|
||||
response.syncType = this._parseAttribute(
|
||||
'syncType',
|
||||
compile.options.syncType,
|
||||
{
|
||||
validValues: ['full', 'incremental'],
|
||||
type: 'string'
|
||||
}
|
||||
)
|
||||
|
||||
// The syncState is an identifier passed in with the request
|
||||
// which has the property that it changes when any resource is
|
||||
// added, deleted, moved or renamed.
|
||||
//
|
||||
// on syncType full the syncState identifier is passed in and
|
||||
// stored
|
||||
//
|
||||
// on syncType incremental the syncState identifier must match
|
||||
// the stored value
|
||||
response.syncState = this._parseAttribute(
|
||||
'syncState',
|
||||
compile.options.syncState,
|
||||
{ type: 'string' }
|
||||
)
|
||||
|
||||
if (response.timeout > RequestParser.MAX_TIMEOUT) {
|
||||
response.timeout = RequestParser.MAX_TIMEOUT
|
||||
}
|
||||
response.timeout = response.timeout * 1000 // milliseconds
|
||||
|
||||
response.resources = (() => {
|
||||
const result = []
|
||||
for (resource of Array.from(compile.resources || [])) {
|
||||
result.push(this._parseResource(resource))
|
||||
}
|
||||
return result
|
||||
})()
|
||||
|
||||
const rootResourcePath = this._parseAttribute(
|
||||
'rootResourcePath',
|
||||
compile.rootResourcePath,
|
||||
{
|
||||
default: 'main.tex',
|
||||
type: 'string'
|
||||
}
|
||||
)
|
||||
const originalRootResourcePath = rootResourcePath
|
||||
const sanitizedRootResourcePath = RequestParser._sanitizePath(
|
||||
rootResourcePath
|
||||
)
|
||||
response.rootResourcePath = RequestParser._checkPath(
|
||||
sanitizedRootResourcePath
|
||||
)
|
||||
|
||||
for (resource of Array.from(response.resources)) {
|
||||
if (resource.path === originalRootResourcePath) {
|
||||
resource.path = sanitizedRootResourcePath
|
||||
}
|
||||
}
|
||||
} catch (error1) {
|
||||
const error = error1
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
return callback(null, response)
|
||||
},
|
||||
|
||||
_parseResource(resource) {
|
||||
let modified
|
||||
if (resource.path == null || typeof resource.path !== 'string') {
|
||||
throw 'all resources should have a path attribute'
|
||||
}
|
||||
|
||||
if (resource.modified != null) {
|
||||
modified = new Date(resource.modified)
|
||||
if (isNaN(modified.getTime())) {
|
||||
throw `resource modified date could not be understood: ${resource.modified}`
|
||||
}
|
||||
}
|
||||
|
||||
if (resource.url == null && resource.content == null) {
|
||||
throw 'all resources should have either a url or content attribute'
|
||||
}
|
||||
if (resource.content != null && typeof resource.content !== 'string') {
|
||||
throw 'content attribute should be a string'
|
||||
}
|
||||
if (resource.url != null && typeof resource.url !== 'string') {
|
||||
throw 'url attribute should be a string'
|
||||
}
|
||||
|
||||
return {
|
||||
path: resource.path,
|
||||
modified,
|
||||
url: resource.url,
|
||||
content: resource.content
|
||||
}
|
||||
},
|
||||
|
||||
_parseAttribute(name, attribute, options) {
|
||||
if (attribute != null) {
|
||||
if (options.validValues != null) {
|
||||
if (options.validValues.indexOf(attribute) === -1) {
|
||||
throw `${name} attribute should be one of: ${options.validValues.join(
|
||||
', '
|
||||
)}`
|
||||
}
|
||||
}
|
||||
if (options.type != null) {
|
||||
if (typeof attribute !== options.type) {
|
||||
throw `${name} attribute should be a ${options.type}`
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (options.default != null) {
|
||||
return options.default
|
||||
}
|
||||
}
|
||||
return attribute
|
||||
},
|
||||
|
||||
_sanitizePath(path) {
|
||||
// See http://php.net/manual/en/function.escapeshellcmd.php
|
||||
return path.replace(
|
||||
/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g,
|
||||
''
|
||||
)
|
||||
},
|
||||
|
||||
_checkPath(path) {
|
||||
// check that the request does not use a relative path
|
||||
for (const dir of Array.from(path.split('/'))) {
|
||||
if (dir === '..') {
|
||||
throw 'relative path in root resource'
|
||||
}
|
||||
}
|
||||
return path
|
||||
}
|
||||
}
|
||||
154
app/js/ResourceStateManager.js
Normal file
154
app/js/ResourceStateManager.js
Normal file
@@ -0,0 +1,154 @@
|
||||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS201: Simplify complex destructure assignments
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ResourceStateManager
|
||||
const Path = require('path')
|
||||
const fs = require('fs')
|
||||
const logger = require('logger-sharelatex')
|
||||
const settings = require('settings-sharelatex')
|
||||
const Errors = require('./Errors')
|
||||
const SafeReader = require('./SafeReader')
|
||||
|
||||
module.exports = ResourceStateManager = {
|
||||
// The sync state is an identifier which must match for an
|
||||
// incremental update to be allowed.
|
||||
//
|
||||
// The initial value is passed in and stored on a full
|
||||
// compile, along with the list of resources..
|
||||
//
|
||||
// Subsequent incremental compiles must come with the same value - if
|
||||
// not they will be rejected with a 409 Conflict response. The
|
||||
// previous list of resources is returned.
|
||||
//
|
||||
// An incremental compile can only update existing files with new
|
||||
// content. The sync state identifier must change if any docs or
|
||||
// files are moved, added, deleted or renamed.
|
||||
|
||||
SYNC_STATE_FILE: '.project-sync-state',
|
||||
SYNC_STATE_MAX_SIZE: 128 * 1024,
|
||||
|
||||
saveProjectState(state, resources, basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
|
||||
if (state == null) {
|
||||
// remove the file if no state passed in
|
||||
logger.log({ state, basePath }, 'clearing sync state')
|
||||
return fs.unlink(stateFile, function(err) {
|
||||
if (err != null && err.code !== 'ENOENT') {
|
||||
return callback(err)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
logger.log({ state, basePath }, 'writing sync state')
|
||||
const resourceList = Array.from(resources).map(resource => resource.path)
|
||||
return fs.writeFile(
|
||||
stateFile,
|
||||
[...Array.from(resourceList), `stateHash:${state}`].join('\n'),
|
||||
callback
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
checkProjectStateMatches(state, basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, resources) {}
|
||||
}
|
||||
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
|
||||
const size = this.SYNC_STATE_MAX_SIZE
|
||||
return SafeReader.readFile(stateFile, size, 'utf8', function(
|
||||
err,
|
||||
result,
|
||||
bytesRead
|
||||
) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
if (bytesRead === size) {
|
||||
logger.error(
|
||||
{ file: stateFile, size, bytesRead },
|
||||
'project state file truncated'
|
||||
)
|
||||
}
|
||||
const array =
|
||||
__guard__(result != null ? result.toString() : undefined, x =>
|
||||
x.split('\n')
|
||||
) || []
|
||||
const adjustedLength = Math.max(array.length, 1)
|
||||
const resourceList = array.slice(0, adjustedLength - 1)
|
||||
const oldState = array[adjustedLength - 1]
|
||||
const newState = `stateHash:${state}`
|
||||
logger.log(
|
||||
{ state, oldState, basePath, stateMatches: newState === oldState },
|
||||
'checking sync state'
|
||||
)
|
||||
if (newState !== oldState) {
|
||||
return callback(
|
||||
new Errors.FilesOutOfSyncError('invalid state for incremental update')
|
||||
)
|
||||
} else {
|
||||
const resources = Array.from(resourceList).map(path => ({ path }))
|
||||
return callback(null, resources)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
checkResourceFiles(resources, allFiles, basePath, callback) {
|
||||
// check the paths are all relative to current directory
|
||||
let file
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
for (file of Array.from(resources || [])) {
|
||||
for (const dir of Array.from(
|
||||
__guard__(file != null ? file.path : undefined, x => x.split('/'))
|
||||
)) {
|
||||
if (dir === '..') {
|
||||
return callback(new Error('relative path in resource file list'))
|
||||
}
|
||||
}
|
||||
}
|
||||
// check if any of the input files are not present in list of files
|
||||
const seenFile = {}
|
||||
for (file of Array.from(allFiles)) {
|
||||
seenFile[file] = true
|
||||
}
|
||||
const missingFiles = Array.from(resources)
|
||||
.filter(resource => !seenFile[resource.path])
|
||||
.map(resource => resource.path)
|
||||
if ((missingFiles != null ? missingFiles.length : undefined) > 0) {
|
||||
logger.err(
|
||||
{ missingFiles, basePath, allFiles, resources },
|
||||
'missing input files for project'
|
||||
)
|
||||
return callback(
|
||||
new Errors.FilesOutOfSyncError(
|
||||
'resource files missing in incremental update'
|
||||
)
|
||||
)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
352
app/js/ResourceWriter.js
Normal file
352
app/js/ResourceWriter.js
Normal file
@@ -0,0 +1,352 @@
|
||||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
no-useless-escape,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ResourceWriter
|
||||
const UrlCache = require('./UrlCache')
|
||||
const Path = require('path')
|
||||
const fs = require('fs')
|
||||
const async = require('async')
|
||||
const mkdirp = require('mkdirp')
|
||||
const OutputFileFinder = require('./OutputFileFinder')
|
||||
const ResourceStateManager = require('./ResourceStateManager')
|
||||
const Metrics = require('./Metrics')
|
||||
const logger = require('logger-sharelatex')
|
||||
const settings = require('settings-sharelatex')
|
||||
|
||||
const parallelFileDownloads = settings.parallelFileDownloads || 1
|
||||
|
||||
module.exports = ResourceWriter = {
|
||||
syncResourcesToDisk(request, basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, resourceList) {}
|
||||
}
|
||||
if (request.syncType === 'incremental') {
|
||||
logger.log(
|
||||
{ project_id: request.project_id, user_id: request.user_id },
|
||||
'incremental sync'
|
||||
)
|
||||
return ResourceStateManager.checkProjectStateMatches(
|
||||
request.syncState,
|
||||
basePath,
|
||||
function(error, resourceList) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return ResourceWriter._removeExtraneousFiles(
|
||||
resourceList,
|
||||
basePath,
|
||||
function(error, outputFiles, allFiles) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return ResourceStateManager.checkResourceFiles(
|
||||
resourceList,
|
||||
allFiles,
|
||||
basePath,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return ResourceWriter.saveIncrementalResourcesToDisk(
|
||||
request.project_id,
|
||||
request.resources,
|
||||
basePath,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, resourceList)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
logger.log(
|
||||
{ project_id: request.project_id, user_id: request.user_id },
|
||||
'full sync'
|
||||
)
|
||||
return this.saveAllResourcesToDisk(
|
||||
request.project_id,
|
||||
request.resources,
|
||||
basePath,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return ResourceStateManager.saveProjectState(
|
||||
request.syncState,
|
||||
request.resources,
|
||||
basePath,
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, request.resources)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
saveIncrementalResourcesToDisk(project_id, resources, basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return this._createDirectory(basePath, error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = Array.from(resources).map(resource =>
|
||||
(resource => {
|
||||
return callback =>
|
||||
this._writeResourceToDisk(project_id, resource, basePath, callback)
|
||||
})(resource)
|
||||
)
|
||||
return async.parallelLimit(jobs, parallelFileDownloads, callback)
|
||||
})
|
||||
},
|
||||
|
||||
saveAllResourcesToDisk(project_id, resources, basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return this._createDirectory(basePath, error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return this._removeExtraneousFiles(resources, basePath, error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = Array.from(resources).map(resource =>
|
||||
(resource => {
|
||||
return callback =>
|
||||
this._writeResourceToDisk(
|
||||
project_id,
|
||||
resource,
|
||||
basePath,
|
||||
callback
|
||||
)
|
||||
})(resource)
|
||||
)
|
||||
return async.parallelLimit(jobs, parallelFileDownloads, callback)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
_createDirectory(basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return fs.mkdir(basePath, function(err) {
|
||||
if (err != null) {
|
||||
if (err.code === 'EEXIST') {
|
||||
return callback()
|
||||
} else {
|
||||
logger.log({ err, dir: basePath }, 'error creating directory')
|
||||
return callback(err)
|
||||
}
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_removeExtraneousFiles(resources, basePath, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function(error, outputFiles, allFiles) {}
|
||||
}
|
||||
const timer = new Metrics.Timer('unlink-output-files')
|
||||
const callback = function(error, ...result) {
|
||||
timer.done()
|
||||
return _callback(error, ...Array.from(result))
|
||||
}
|
||||
|
||||
return OutputFileFinder.findOutputFiles(resources, basePath, function(
|
||||
error,
|
||||
outputFiles,
|
||||
allFiles
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
const jobs = []
|
||||
for (const file of Array.from(outputFiles || [])) {
|
||||
;(function(file) {
|
||||
const { path } = file
|
||||
let should_delete = true
|
||||
if (
|
||||
path.match(/^output\./) ||
|
||||
path.match(/\.aux$/) ||
|
||||
path.match(/^cache\//)
|
||||
) {
|
||||
// knitr cache
|
||||
should_delete = false
|
||||
}
|
||||
if (path.match(/^output-.*/)) {
|
||||
// Tikz cached figures (default case)
|
||||
should_delete = false
|
||||
}
|
||||
if (path.match(/\.(pdf|dpth|md5)$/)) {
|
||||
// Tikz cached figures (by extension)
|
||||
should_delete = false
|
||||
}
|
||||
if (
|
||||
path.match(/\.(pygtex|pygstyle)$/) ||
|
||||
path.match(/(^|\/)_minted-[^\/]+\//)
|
||||
) {
|
||||
// minted files/directory
|
||||
should_delete = false
|
||||
}
|
||||
if (
|
||||
path.match(/\.md\.tex$/) ||
|
||||
path.match(/(^|\/)_markdown_[^\/]+\//)
|
||||
) {
|
||||
// markdown files/directory
|
||||
should_delete = false
|
||||
}
|
||||
if (path.match(/-eps-converted-to\.pdf$/)) {
|
||||
// Epstopdf generated files
|
||||
should_delete = false
|
||||
}
|
||||
if (
|
||||
path === 'output.pdf' ||
|
||||
path === 'output.dvi' ||
|
||||
path === 'output.log' ||
|
||||
path === 'output.xdv'
|
||||
) {
|
||||
should_delete = true
|
||||
}
|
||||
if (path === 'output.tex') {
|
||||
// created by TikzManager if present in output files
|
||||
should_delete = true
|
||||
}
|
||||
if (should_delete) {
|
||||
return jobs.push(callback =>
|
||||
ResourceWriter._deleteFileIfNotDirectory(
|
||||
Path.join(basePath, path),
|
||||
callback
|
||||
)
|
||||
)
|
||||
}
|
||||
})(file)
|
||||
}
|
||||
|
||||
return async.series(jobs, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, outputFiles, allFiles)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
_deleteFileIfNotDirectory(path, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return fs.stat(path, function(error, stat) {
|
||||
if (error != null && error.code === 'ENOENT') {
|
||||
return callback()
|
||||
} else if (error != null) {
|
||||
logger.err(
|
||||
{ err: error, path },
|
||||
'error stating file in deleteFileIfNotDirectory'
|
||||
)
|
||||
return callback(error)
|
||||
} else if (stat.isFile()) {
|
||||
return fs.unlink(path, function(error) {
|
||||
if (error != null) {
|
||||
logger.err(
|
||||
{ err: error, path },
|
||||
'error removing file in deleteFileIfNotDirectory'
|
||||
)
|
||||
return callback(error)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_writeResourceToDisk(project_id, resource, basePath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return ResourceWriter.checkPath(basePath, resource.path, function(
|
||||
error,
|
||||
path
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return mkdirp(Path.dirname(path), function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
// TODO: Don't overwrite file if it hasn't been modified
|
||||
if (resource.url != null) {
|
||||
return UrlCache.downloadUrlToFile(
|
||||
project_id,
|
||||
resource.url,
|
||||
path,
|
||||
resource.modified,
|
||||
function(err) {
|
||||
if (err != null) {
|
||||
logger.err(
|
||||
{
|
||||
err,
|
||||
project_id,
|
||||
path,
|
||||
resource_url: resource.url,
|
||||
modified: resource.modified
|
||||
},
|
||||
'error downloading file for resources'
|
||||
)
|
||||
}
|
||||
return callback()
|
||||
}
|
||||
) // try and continue compiling even if http resource can not be downloaded at this time
|
||||
} else {
|
||||
const process = require('process')
|
||||
fs.writeFile(path, resource.content, callback)
|
||||
try {
|
||||
let result
|
||||
return (result = fs.lstatSync(path))
|
||||
} catch (e) {}
|
||||
}
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
checkPath(basePath, resourcePath, callback) {
|
||||
const path = Path.normalize(Path.join(basePath, resourcePath))
|
||||
if (path.slice(0, basePath.length + 1) !== basePath + '/') {
|
||||
return callback(new Error('resource path is outside root directory'))
|
||||
} else {
|
||||
return callback(null, path)
|
||||
}
|
||||
}
|
||||
}
|
||||
60
app/js/SafeReader.js
Normal file
60
app/js/SafeReader.js
Normal file
@@ -0,0 +1,60 @@
|
||||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
node/no-deprecated-api,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let SafeReader
|
||||
const fs = require('fs')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
module.exports = SafeReader = {
|
||||
// safely read up to size bytes from a file and return result as a
|
||||
// string
|
||||
|
||||
readFile(file, size, encoding, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, result) {}
|
||||
}
|
||||
return fs.open(file, 'r', function(err, fd) {
|
||||
if (err != null && err.code === 'ENOENT') {
|
||||
return callback()
|
||||
}
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
// safely return always closing the file
|
||||
const callbackWithClose = (err, ...result) =>
|
||||
fs.close(fd, function(err1) {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
if (err1 != null) {
|
||||
return callback(err1)
|
||||
}
|
||||
return callback(null, ...Array.from(result))
|
||||
})
|
||||
const buff = new Buffer(size, 0) // fill with zeros
|
||||
return fs.read(fd, buff, 0, buff.length, 0, function(
|
||||
err,
|
||||
bytesRead,
|
||||
buffer
|
||||
) {
|
||||
if (err != null) {
|
||||
return callbackWithClose(err)
|
||||
}
|
||||
const result = buffer.toString(encoding, 0, bytesRead)
|
||||
return callbackWithClose(null, result, bytesRead)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
94
app/js/StaticServerForbidSymlinks.js
Normal file
94
app/js/StaticServerForbidSymlinks.js
Normal file
@@ -0,0 +1,94 @@
|
||||
/* eslint-disable
|
||||
camelcase,
|
||||
no-cond-assign,
|
||||
no-unused-vars,
|
||||
node/no-deprecated-api,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let ForbidSymlinks
|
||||
const Path = require('path')
|
||||
const fs = require('fs')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
const url = require('url')
|
||||
|
||||
module.exports = ForbidSymlinks = function(staticFn, root, options) {
|
||||
const expressStatic = staticFn(root, options)
|
||||
const basePath = Path.resolve(root)
|
||||
return function(req, res, next) {
|
||||
let file, project_id, result
|
||||
const path = __guard__(url.parse(req.url), x => x.pathname)
|
||||
// check that the path is of the form /project_id_or_name/path/to/file.log
|
||||
if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) {
|
||||
project_id = result[1]
|
||||
file = result[2]
|
||||
} else {
|
||||
logger.warn({ path }, 'unrecognized file request')
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
// check that the file does not use a relative path
|
||||
for (const dir of Array.from(file.split('/'))) {
|
||||
if (dir === '..') {
|
||||
logger.warn({ path }, 'attempt to use a relative path')
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
}
|
||||
// check that the requested path is normalized
|
||||
const requestedFsPath = `${basePath}/${project_id}/${file}`
|
||||
if (requestedFsPath !== Path.normalize(requestedFsPath)) {
|
||||
logger.error(
|
||||
{ path: requestedFsPath },
|
||||
'requestedFsPath is not normalized'
|
||||
)
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
// check that the requested path is not a symlink
|
||||
return fs.realpath(requestedFsPath, function(err, realFsPath) {
|
||||
if (err != null) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return res.sendStatus(404)
|
||||
} else {
|
||||
logger.error(
|
||||
{
|
||||
err,
|
||||
requestedFsPath,
|
||||
realFsPath,
|
||||
path: req.params[0],
|
||||
project_id: req.params.project_id
|
||||
},
|
||||
'error checking file access'
|
||||
)
|
||||
return res.sendStatus(500)
|
||||
}
|
||||
} else if (requestedFsPath !== realFsPath) {
|
||||
logger.warn(
|
||||
{
|
||||
requestedFsPath,
|
||||
realFsPath,
|
||||
path: req.params[0],
|
||||
project_id: req.params.project_id
|
||||
},
|
||||
'trying to access a different file (symlink), aborting'
|
||||
)
|
||||
return res.sendStatus(404)
|
||||
} else {
|
||||
return expressStatic(req, res, next)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
94
app/js/TikzManager.js
Normal file
94
app/js/TikzManager.js
Normal file
@@ -0,0 +1,94 @@
|
||||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let TikzManager
|
||||
const fs = require('fs')
|
||||
const Path = require('path')
|
||||
const ResourceWriter = require('./ResourceWriter')
|
||||
const SafeReader = require('./SafeReader')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
// for \tikzexternalize or pstool to work the main file needs to match the
|
||||
// jobname. Since we set the -jobname to output, we have to create a
|
||||
// copy of the main file as 'output.tex'.
|
||||
|
||||
module.exports = TikzManager = {
|
||||
checkMainFile(compileDir, mainFile, resources, callback) {
|
||||
// if there's already an output.tex file, we don't want to touch it
|
||||
if (callback == null) {
|
||||
callback = function(error, needsMainFile) {}
|
||||
}
|
||||
for (const resource of Array.from(resources)) {
|
||||
if (resource.path === 'output.tex') {
|
||||
logger.log({ compileDir, mainFile }, 'output.tex already in resources')
|
||||
return callback(null, false)
|
||||
}
|
||||
}
|
||||
// if there's no output.tex, see if we are using tikz/pgf or pstool in the main file
|
||||
return ResourceWriter.checkPath(compileDir, mainFile, function(
|
||||
error,
|
||||
path
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return SafeReader.readFile(path, 65536, 'utf8', function(error, content) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const usesTikzExternalize =
|
||||
(content != null
|
||||
? content.indexOf('\\tikzexternalize')
|
||||
: undefined) >= 0
|
||||
const usesPsTool =
|
||||
(content != null ? content.indexOf('{pstool}') : undefined) >= 0
|
||||
logger.log(
|
||||
{ compileDir, mainFile, usesTikzExternalize, usesPsTool },
|
||||
'checked for packages needing main file as output.tex'
|
||||
)
|
||||
const needsMainFile = usesTikzExternalize || usesPsTool
|
||||
return callback(null, needsMainFile)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
injectOutputFile(compileDir, mainFile, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return ResourceWriter.checkPath(compileDir, mainFile, function(
|
||||
error,
|
||||
path
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return fs.readFile(path, 'utf8', function(error, content) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log(
|
||||
{ compileDir, mainFile },
|
||||
'copied file to output.tex as project uses packages which require it'
|
||||
)
|
||||
// use wx flag to ensure that output file does not already exist
|
||||
return fs.writeFile(
|
||||
Path.join(compileDir, 'output.tex'),
|
||||
content,
|
||||
{ flag: 'wx' },
|
||||
callback
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
278
app/js/UrlCache.js
Normal file
278
app/js/UrlCache.js
Normal file
@@ -0,0 +1,278 @@
|
||||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let UrlCache
|
||||
const db = require('./db')
|
||||
const dbQueue = require('./DbQueue')
|
||||
const UrlFetcher = require('./UrlFetcher')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const crypto = require('crypto')
|
||||
const fs = require('fs')
|
||||
const logger = require('logger-sharelatex')
|
||||
const async = require('async')
|
||||
|
||||
module.exports = UrlCache = {
|
||||
downloadUrlToFile(project_id, url, destPath, lastModified, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return UrlCache._ensureUrlIsInCache(
|
||||
project_id,
|
||||
url,
|
||||
lastModified,
|
||||
(error, pathToCachedUrl) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return UrlCache._copyFile(pathToCachedUrl, destPath, function(error) {
|
||||
if (error != null) {
|
||||
return UrlCache._clearUrlDetails(project_id, url, () =>
|
||||
callback(error)
|
||||
)
|
||||
} else {
|
||||
return callback(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
clearProject(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return UrlCache._findAllUrlsInProject(project_id, function(error, urls) {
|
||||
logger.log(
|
||||
{ project_id, url_count: urls.length },
|
||||
'clearing project URLs'
|
||||
)
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = Array.from(urls || []).map(url =>
|
||||
(url => callback =>
|
||||
UrlCache._clearUrlFromCache(project_id, url, function(error) {
|
||||
if (error != null) {
|
||||
logger.error(
|
||||
{ err: error, project_id, url },
|
||||
'error clearing project URL'
|
||||
)
|
||||
}
|
||||
return callback()
|
||||
}))(url)
|
||||
)
|
||||
return async.series(jobs, callback)
|
||||
})
|
||||
},
|
||||
|
||||
_ensureUrlIsInCache(project_id, url, lastModified, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, pathOnDisk) {}
|
||||
}
|
||||
if (lastModified != null) {
|
||||
// MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds.
|
||||
// So round down to seconds
|
||||
lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000)
|
||||
}
|
||||
return UrlCache._doesUrlNeedDownloading(
|
||||
project_id,
|
||||
url,
|
||||
lastModified,
|
||||
(error, needsDownloading) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (needsDownloading) {
|
||||
logger.log({ url, lastModified }, 'downloading URL')
|
||||
return UrlFetcher.pipeUrlToFile(
|
||||
url,
|
||||
UrlCache._cacheFilePathForUrl(project_id, url),
|
||||
error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return UrlCache._updateOrCreateUrlDetails(
|
||||
project_id,
|
||||
url,
|
||||
lastModified,
|
||||
error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(
|
||||
null,
|
||||
UrlCache._cacheFilePathForUrl(project_id, url)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
logger.log({ url, lastModified }, 'URL is up to date in cache')
|
||||
return callback(null, UrlCache._cacheFilePathForUrl(project_id, url))
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
_doesUrlNeedDownloading(project_id, url, lastModified, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, needsDownloading) {}
|
||||
}
|
||||
if (lastModified == null) {
|
||||
return callback(null, true)
|
||||
}
|
||||
return UrlCache._findUrlDetails(project_id, url, function(
|
||||
error,
|
||||
urlDetails
|
||||
) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (
|
||||
urlDetails == null ||
|
||||
urlDetails.lastModified == null ||
|
||||
urlDetails.lastModified.getTime() < lastModified.getTime()
|
||||
) {
|
||||
return callback(null, true)
|
||||
} else {
|
||||
return callback(null, false)
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_cacheFileNameForUrl(project_id, url) {
|
||||
return (
|
||||
project_id +
|
||||
':' +
|
||||
crypto
|
||||
.createHash('md5')
|
||||
.update(url)
|
||||
.digest('hex')
|
||||
)
|
||||
},
|
||||
|
||||
_cacheFilePathForUrl(project_id, url) {
|
||||
return `${Settings.path.clsiCacheDir}/${UrlCache._cacheFileNameForUrl(
|
||||
project_id,
|
||||
url
|
||||
)}`
|
||||
},
|
||||
|
||||
_copyFile(from, to, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function(error) {}
|
||||
}
|
||||
const callbackOnce = function(error) {
|
||||
if (error != null) {
|
||||
logger.error({ err: error, from, to }, 'error copying file from cache')
|
||||
}
|
||||
_callback(error)
|
||||
return (_callback = function() {})
|
||||
}
|
||||
const writeStream = fs.createWriteStream(to)
|
||||
const readStream = fs.createReadStream(from)
|
||||
writeStream.on('error', callbackOnce)
|
||||
readStream.on('error', callbackOnce)
|
||||
writeStream.on('close', callbackOnce)
|
||||
return writeStream.on('open', () => readStream.pipe(writeStream))
|
||||
},
|
||||
|
||||
_clearUrlFromCache(project_id, url, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return UrlCache._clearUrlDetails(project_id, url, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
_deleteUrlCacheFromDisk(project_id, url, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function(
|
||||
error
|
||||
) {
|
||||
if (error != null && error.code !== 'ENOENT') {
|
||||
// no error if the file isn't present
|
||||
return callback(error)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_findUrlDetails(project_id, url, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, urlDetails) {}
|
||||
}
|
||||
const job = cb =>
|
||||
db.UrlCache.find({ where: { url, project_id } })
|
||||
.then(urlDetails => cb(null, urlDetails))
|
||||
.error(cb)
|
||||
return dbQueue.queue.push(job, callback)
|
||||
},
|
||||
|
||||
_updateOrCreateUrlDetails(project_id, url, lastModified, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const job = cb =>
|
||||
db.UrlCache.findOrCreate({ where: { url, project_id } })
|
||||
.spread((urlDetails, created) =>
|
||||
urlDetails
|
||||
.updateAttributes({ lastModified })
|
||||
.then(() => cb())
|
||||
.error(cb)
|
||||
)
|
||||
.error(cb)
|
||||
return dbQueue.queue.push(job, callback)
|
||||
},
|
||||
|
||||
_clearUrlDetails(project_id, url, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const job = cb =>
|
||||
db.UrlCache.destroy({ where: { url, project_id } })
|
||||
.then(() => cb(null))
|
||||
.error(cb)
|
||||
return dbQueue.queue.push(job, callback)
|
||||
},
|
||||
|
||||
_findAllUrlsInProject(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, urls) {}
|
||||
}
|
||||
const job = cb =>
|
||||
db.UrlCache.findAll({ where: { project_id } })
|
||||
.then(urlEntries =>
|
||||
cb(
|
||||
null,
|
||||
urlEntries.map(entry => entry.url)
|
||||
)
|
||||
)
|
||||
.error(cb)
|
||||
return dbQueue.queue.push(job, callback)
|
||||
}
|
||||
}
|
||||
120
app/js/UrlFetcher.js
Normal file
120
app/js/UrlFetcher.js
Normal file
@@ -0,0 +1,120 @@
|
||||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
node/no-deprecated-api,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let UrlFetcher
|
||||
const request = require('request').defaults({ jar: false })
|
||||
const fs = require('fs')
|
||||
const logger = require('logger-sharelatex')
|
||||
const settings = require('settings-sharelatex')
|
||||
const URL = require('url')
|
||||
|
||||
const oneMinute = 60 * 1000
|
||||
|
||||
module.exports = UrlFetcher = {
|
||||
pipeUrlToFile(url, filePath, _callback) {
|
||||
if (_callback == null) {
|
||||
_callback = function(error) {}
|
||||
}
|
||||
const callbackOnce = function(error) {
|
||||
if (timeoutHandler != null) {
|
||||
clearTimeout(timeoutHandler)
|
||||
}
|
||||
_callback(error)
|
||||
return (_callback = function() {})
|
||||
}
|
||||
|
||||
if (settings.filestoreDomainOveride != null) {
|
||||
const p = URL.parse(url).path
|
||||
url = `${settings.filestoreDomainOveride}${p}`
|
||||
}
|
||||
var timeoutHandler = setTimeout(
|
||||
function() {
|
||||
timeoutHandler = null
|
||||
logger.error({ url, filePath }, 'Timed out downloading file to cache')
|
||||
return callbackOnce(
|
||||
new Error(`Timed out downloading file to cache ${url}`)
|
||||
)
|
||||
},
|
||||
// FIXME: maybe need to close fileStream here
|
||||
3 * oneMinute
|
||||
)
|
||||
|
||||
logger.log({ url, filePath }, 'started downloading url to cache')
|
||||
const urlStream = request.get({ url, timeout: oneMinute })
|
||||
urlStream.pause() // stop data flowing until we are ready
|
||||
|
||||
// attach handlers before setting up pipes
|
||||
urlStream.on('error', function(error) {
|
||||
logger.error({ err: error, url, filePath }, 'error downloading url')
|
||||
return callbackOnce(
|
||||
error || new Error(`Something went wrong downloading the URL ${url}`)
|
||||
)
|
||||
})
|
||||
|
||||
urlStream.on('end', () =>
|
||||
logger.log({ url, filePath }, 'finished downloading file into cache')
|
||||
)
|
||||
|
||||
return urlStream.on('response', function(res) {
|
||||
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
const fileStream = fs.createWriteStream(filePath)
|
||||
|
||||
// attach handlers before setting up pipes
|
||||
fileStream.on('error', function(error) {
|
||||
logger.error(
|
||||
{ err: error, url, filePath },
|
||||
'error writing file into cache'
|
||||
)
|
||||
return fs.unlink(filePath, function(err) {
|
||||
if (err != null) {
|
||||
logger.err({ err, filePath }, 'error deleting file from cache')
|
||||
}
|
||||
return callbackOnce(error)
|
||||
})
|
||||
})
|
||||
|
||||
fileStream.on('finish', function() {
|
||||
logger.log({ url, filePath }, 'finished writing file into cache')
|
||||
return callbackOnce()
|
||||
})
|
||||
|
||||
fileStream.on('pipe', () =>
|
||||
logger.log({ url, filePath }, 'piping into filestream')
|
||||
)
|
||||
|
||||
urlStream.pipe(fileStream)
|
||||
return urlStream.resume() // now we are ready to handle the data
|
||||
} else {
|
||||
logger.error(
|
||||
{ statusCode: res.statusCode, url, filePath },
|
||||
'unexpected status code downloading url to cache'
|
||||
)
|
||||
// https://nodejs.org/api/http.html#http_class_http_clientrequest
|
||||
// If you add a 'response' event handler, then you must consume
|
||||
// the data from the response object, either by calling
|
||||
// response.read() whenever there is a 'readable' event, or by
|
||||
// adding a 'data' handler, or by calling the .resume()
|
||||
// method. Until the data is consumed, the 'end' event will not
|
||||
// fire. Also, until the data is read it will consume memory
|
||||
// that can eventually lead to a 'process out of memory' error.
|
||||
urlStream.resume() // discard the data
|
||||
return callbackOnce(
|
||||
new Error(
|
||||
`URL returned non-success status code: ${res.statusCode} ${url}`
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
67
app/js/db.js
Normal file
67
app/js/db.js
Normal file
@@ -0,0 +1,67 @@
|
||||
/* eslint-disable
|
||||
no-console,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Sequelize = require('sequelize')
|
||||
const Settings = require('settings-sharelatex')
|
||||
const _ = require('underscore')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
const options = _.extend({ logging: false }, Settings.mysql.clsi)
|
||||
|
||||
logger.log({ dbPath: Settings.mysql.clsi.storage }, 'connecting to db')
|
||||
|
||||
const sequelize = new Sequelize(
|
||||
Settings.mysql.clsi.database,
|
||||
Settings.mysql.clsi.username,
|
||||
Settings.mysql.clsi.password,
|
||||
options
|
||||
)
|
||||
|
||||
if (Settings.mysql.clsi.dialect === 'sqlite') {
|
||||
logger.log('running PRAGMA journal_mode=WAL;')
|
||||
sequelize.query('PRAGMA journal_mode=WAL;')
|
||||
sequelize.query('PRAGMA synchronous=OFF;')
|
||||
sequelize.query('PRAGMA read_uncommitted = true;')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
UrlCache: sequelize.define(
|
||||
'UrlCache',
|
||||
{
|
||||
url: Sequelize.STRING,
|
||||
project_id: Sequelize.STRING,
|
||||
lastModified: Sequelize.DATE
|
||||
},
|
||||
{
|
||||
indexes: [{ fields: ['url', 'project_id'] }, { fields: ['project_id'] }]
|
||||
}
|
||||
),
|
||||
|
||||
Project: sequelize.define(
|
||||
'Project',
|
||||
{
|
||||
project_id: { type: Sequelize.STRING, primaryKey: true },
|
||||
lastAccessed: Sequelize.DATE
|
||||
},
|
||||
{
|
||||
indexes: [{ fields: ['lastAccessed'] }]
|
||||
}
|
||||
),
|
||||
|
||||
op: Sequelize.Op,
|
||||
|
||||
sync() {
|
||||
logger.log({ dbPath: Settings.mysql.clsi.storage }, 'syncing db schema')
|
||||
return sequelize
|
||||
.sync()
|
||||
.then(() => logger.log('db sync complete'))
|
||||
.catch(err => console.log(err, 'error syncing'))
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
clsi
|
||||
--public-repo=True
|
||||
--language=coffeescript
|
||||
--language=es
|
||||
--env-add=
|
||||
--node-version=10.19.0
|
||||
--acceptance-creds=None
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
Path = require "path"
|
||||
|
||||
module.exports =
|
||||
# Options are passed to Sequelize.
|
||||
# See http://sequelizejs.com/documentation#usage-options for details
|
||||
mysql:
|
||||
clsi:
|
||||
database: "clsi"
|
||||
username: "clsi"
|
||||
dialect: "sqlite"
|
||||
storage: process.env["SQLITE_PATH"] or Path.resolve(__dirname + "/../db.sqlite")
|
||||
pool:
|
||||
max: 1
|
||||
min: 1
|
||||
retry:
|
||||
max: 10
|
||||
|
||||
compileSizeLimit: process.env["COMPILE_SIZE_LIMIT"] or "7mb"
|
||||
|
||||
path:
|
||||
compilesDir: Path.resolve(__dirname + "/../compiles")
|
||||
clsiCacheDir: Path.resolve(__dirname + "/../cache")
|
||||
synctexBaseDir: (project_id) -> Path.join(@compilesDir, project_id)
|
||||
|
||||
internal:
|
||||
clsi:
|
||||
port: 3013
|
||||
host: process.env["LISTEN_ADDRESS"] or "localhost"
|
||||
|
||||
load_balancer_agent:
|
||||
report_load:true
|
||||
load_port: 3048
|
||||
local_port: 3049
|
||||
apis:
|
||||
clsi:
|
||||
url: "http://#{process.env['CLSI_HOST'] or 'localhost'}:3013"
|
||||
|
||||
|
||||
smokeTest: process.env["SMOKE_TEST"] or false
|
||||
project_cache_length_ms: 1000 * 60 * 60 * 24
|
||||
parallelFileDownloads: process.env["FILESTORE_PARALLEL_FILE_DOWNLOADS"] or 1
|
||||
parallelSqlQueryLimit: process.env["FILESTORE_PARALLEL_SQL_QUERY_LIMIT"] or 1
|
||||
filestoreDomainOveride: process.env["FILESTORE_DOMAIN_OVERRIDE"]
|
||||
texliveImageNameOveride: process.env["TEX_LIVE_IMAGE_NAME_OVERRIDE"]
|
||||
sentry:
|
||||
dsn: process.env['SENTRY_DSN']
|
||||
|
||||
|
||||
if process.env["DOCKER_RUNNER"]
|
||||
module.exports.clsi =
|
||||
dockerRunner: process.env["DOCKER_RUNNER"] == "true"
|
||||
docker:
|
||||
image: process.env["TEXLIVE_IMAGE"] or "quay.io/sharelatex/texlive-full:2017.1"
|
||||
env:
|
||||
HOME: "/tmp"
|
||||
socketPath: "/var/run/docker.sock"
|
||||
user: process.env["TEXLIVE_IMAGE_USER"] or "tex"
|
||||
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000
|
||||
checkProjectsIntervalMs: 10 * 60 * 1000
|
||||
|
||||
try
|
||||
seccomp_profile_path = Path.resolve(__dirname + "/../seccomp/clsi-profile.json")
|
||||
module.exports.clsi.docker.seccomp_profile = JSON.stringify(JSON.parse(require("fs").readFileSync(seccomp_profile_path)))
|
||||
catch error
|
||||
console.log error, "could not load seccom profile from #{seccomp_profile_path}"
|
||||
|
||||
module.exports.path.synctexBaseDir = -> "/compile"
|
||||
|
||||
module.exports.path.sandboxedCompilesHostDir = process.env["COMPILES_HOST_DIR"]
|
||||
|
||||
module.exports.path.synctexBinHostPath = process.env["SYNCTEX_BIN_HOST_PATH"]
|
||||
100
config/settings.defaults.js
Normal file
100
config/settings.defaults.js
Normal file
@@ -0,0 +1,100 @@
|
||||
const Path = require('path')
|
||||
|
||||
module.exports = {
|
||||
// Options are passed to Sequelize.
|
||||
// See http://sequelizejs.com/documentation#usage-options for details
|
||||
mysql: {
|
||||
clsi: {
|
||||
database: 'clsi',
|
||||
username: 'clsi',
|
||||
dialect: 'sqlite',
|
||||
storage:
|
||||
process.env.SQLITE_PATH || Path.resolve(__dirname + '/../db.sqlite'),
|
||||
pool: {
|
||||
max: 1,
|
||||
min: 1
|
||||
},
|
||||
retry: {
|
||||
max: 10
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
compileSizeLimit: process.env.COMPILE_SIZE_LIMIT || '7mb',
|
||||
|
||||
path: {
|
||||
compilesDir: Path.resolve(__dirname + '/../compiles'),
|
||||
clsiCacheDir: Path.resolve(__dirname + '/../cache'),
|
||||
synctexBaseDir(project_id) {
|
||||
return Path.join(this.compilesDir, project_id)
|
||||
}
|
||||
},
|
||||
|
||||
internal: {
|
||||
clsi: {
|
||||
port: 3013,
|
||||
host: process.env.LISTEN_ADDRESS || 'localhost'
|
||||
},
|
||||
|
||||
load_balancer_agent: {
|
||||
report_load: true,
|
||||
load_port: 3048,
|
||||
local_port: 3049
|
||||
}
|
||||
},
|
||||
apis: {
|
||||
clsi: {
|
||||
url: `http://${process.env.CLSI_HOST || 'localhost'}:3013`
|
||||
}
|
||||
},
|
||||
|
||||
smokeTest: process.env.SMOKE_TEST || false,
|
||||
project_cache_length_ms: 1000 * 60 * 60 * 24,
|
||||
parallelFileDownloads: process.env.FILESTORE_PARALLEL_FILE_DOWNLOADS || 1,
|
||||
parallelSqlQueryLimit: process.env.FILESTORE_PARALLEL_SQL_QUERY_LIMIT || 1,
|
||||
filestoreDomainOveride: process.env.FILESTORE_DOMAIN_OVERRIDE,
|
||||
texliveImageNameOveride: process.env.TEX_LIVE_IMAGE_NAME_OVERRIDE,
|
||||
sentry: {
|
||||
dsn: process.env.SENTRY_DSN
|
||||
}
|
||||
}
|
||||
|
||||
if (process.env.DOCKER_RUNNER) {
|
||||
let seccomp_profile_path
|
||||
module.exports.clsi = {
|
||||
dockerRunner: process.env.DOCKER_RUNNER === 'true',
|
||||
docker: {
|
||||
image:
|
||||
process.env.TEXLIVE_IMAGE ||
|
||||
'quay.io/sharelatex/texlive-full:2017.1',
|
||||
env: {
|
||||
HOME: '/tmp'
|
||||
},
|
||||
socketPath: '/var/run/docker.sock',
|
||||
user: process.env.TEXLIVE_IMAGE_USER || 'tex'
|
||||
},
|
||||
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000,
|
||||
checkProjectsIntervalMs: 10 * 60 * 1000
|
||||
}
|
||||
|
||||
try {
|
||||
seccomp_profile_path = Path.resolve(
|
||||
__dirname + '/../seccomp/clsi-profile.json'
|
||||
)
|
||||
module.exports.clsi.docker.seccomp_profile = JSON.stringify(
|
||||
JSON.parse(require('fs').readFileSync(seccomp_profile_path))
|
||||
)
|
||||
} catch (error) {
|
||||
console.log(
|
||||
error,
|
||||
`could not load seccom profile from ${seccomp_profile_path}`
|
||||
)
|
||||
}
|
||||
|
||||
module.exports.path.synctexBaseDir = () => '/compile'
|
||||
|
||||
module.exports.path.sandboxedCompilesHostDir =
|
||||
process.env.COMPILES_HOST_DIR
|
||||
|
||||
module.exports.path.synctexBinHostPath = process.env.SYNCTEX_BIN_HOST_PATH
|
||||
}
|
||||
@@ -10,10 +10,9 @@
|
||||
},
|
||||
|
||||
"watch": [
|
||||
"app/coffee/",
|
||||
"app.coffee",
|
||||
"app/js/",
|
||||
"app.js",
|
||||
"config/"
|
||||
],
|
||||
"ext": "coffee"
|
||||
|
||||
"ext": "js"
|
||||
}
|
||||
|
||||
3090
npm-shrinkwrap.json
generated
3090
npm-shrinkwrap.json
generated
File diff suppressed because it is too large
Load Diff
37
package.json
37
package.json
@@ -7,17 +7,15 @@
|
||||
"url": "https://github.com/sharelatex/clsi-sharelatex.git"
|
||||
},
|
||||
"scripts": {
|
||||
"compile:app": "([ -e app/coffee ] && coffee -m $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee -m $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')",
|
||||
"start": "npm run compile:app && node $NODE_APP_OPTIONS app.js",
|
||||
"test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js",
|
||||
"test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
|
||||
"test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js",
|
||||
"test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
||||
"compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee",
|
||||
"compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee",
|
||||
"compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests && npm run compile:smoke_tests",
|
||||
"start": "node $NODE_APP_OPTIONS app.js",
|
||||
"test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
|
||||
"test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
|
||||
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
|
||||
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
||||
"nodemon": "nodemon --config nodemon.json",
|
||||
"compile:smoke_tests": "[ ! -e test/smoke/coffee ] && echo 'No smoke tests to compile' || coffee -o test/smoke/js -c test/smoke/coffee"
|
||||
"lint": "node_modules/.bin/eslint .",
|
||||
"format": "node_modules/.bin/prettier-eslint '**/*.js' --list-different",
|
||||
"format:fix": "node_modules/.bin/prettier-eslint '**/*.js' --write"
|
||||
},
|
||||
"author": "James Allen <james@sharelatex.com>",
|
||||
"dependencies": {
|
||||
@@ -43,10 +41,27 @@
|
||||
"wrench": "~1.5.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"babel-eslint": "^10.0.3",
|
||||
"bunyan": "^0.22.1",
|
||||
"chai": "~1.8.1",
|
||||
"coffeescript": "1.6.0",
|
||||
"eslint": "^6.6.0",
|
||||
"eslint-config-prettier": "^6.10.0",
|
||||
"eslint-config-standard": "^14.1.0",
|
||||
"eslint-config-standard-jsx": "^8.1.0",
|
||||
"eslint-config-standard-react": "^9.2.0",
|
||||
"eslint-plugin-chai-expect": "^2.1.0",
|
||||
"eslint-plugin-chai-friendly": "^0.5.0",
|
||||
"eslint-plugin-import": "^2.20.1",
|
||||
"eslint-plugin-jsx-a11y": "^6.2.3",
|
||||
"eslint-plugin-mocha": "^6.2.2",
|
||||
"eslint-plugin-node": "^11.0.0",
|
||||
"eslint-plugin-prettier": "^3.1.2",
|
||||
"eslint-plugin-promise": "^4.2.1",
|
||||
"eslint-plugin-react": "^7.18.3",
|
||||
"eslint-plugin-standard": "^4.0.1",
|
||||
"mocha": "^4.0.1",
|
||||
"prettier": "^1.19.1",
|
||||
"prettier-eslint-cli": "^5.0.0",
|
||||
"sandboxed-module": "~0.3.0",
|
||||
"sinon": "~1.7.3",
|
||||
"timekeeper": "0.0.4"
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
|
||||
describe "Broken LaTeX file", ->
|
||||
before (done)->
|
||||
@broken_request =
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{articl % :(
|
||||
\\begin{documen % :(
|
||||
Broken
|
||||
\\end{documen % :(
|
||||
'''
|
||||
]
|
||||
@correct_request =
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
'''
|
||||
]
|
||||
ClsiApp.ensureRunning done
|
||||
|
||||
describe "on first run", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
Client.compile @project_id, @broken_request, (@error, @res, @body) => done()
|
||||
|
||||
it "should return a failure status", ->
|
||||
@body.compile.status.should.equal "failure"
|
||||
|
||||
describe "on second run", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
Client.compile @project_id, @correct_request, () =>
|
||||
Client.compile @project_id, @broken_request, (@error, @res, @body) =>
|
||||
done()
|
||||
|
||||
it "should return a failure status", ->
|
||||
@body.compile.status.should.equal "failure"
|
||||
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
|
||||
describe "Deleting Old Files", ->
|
||||
before (done)->
|
||||
@request =
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
'''
|
||||
]
|
||||
ClsiApp.ensureRunning done
|
||||
|
||||
describe "on first run", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
it "should return a success status", ->
|
||||
@body.compile.status.should.equal "success"
|
||||
|
||||
describe "after file has been deleted", ->
|
||||
before (done) ->
|
||||
@request.resources = []
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
done()
|
||||
|
||||
it "should return a failure status", ->
|
||||
@body.compile.status.should.equal "failure"
|
||||
|
||||
@@ -1,129 +0,0 @@
|
||||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
fs = require "fs"
|
||||
ChildProcess = require "child_process"
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
logger = require("logger-sharelatex")
|
||||
Path = require("path")
|
||||
fixturePath = (path) -> Path.normalize(__dirname + "/../fixtures/" + path)
|
||||
process = require "process"
|
||||
console.log process.pid, process.ppid, process.getuid(),process.getgroups(), "PID"
|
||||
try
|
||||
console.log "creating tmp directory", fixturePath("tmp")
|
||||
fs.mkdirSync(fixturePath("tmp"))
|
||||
catch err
|
||||
console.log err, fixturePath("tmp"), "unable to create fixture tmp path"
|
||||
|
||||
MOCHA_LATEX_TIMEOUT = 60 * 1000
|
||||
|
||||
convertToPng = (pdfPath, pngPath, callback = (error) ->) ->
|
||||
command = "convert #{fixturePath(pdfPath)} #{fixturePath(pngPath)}"
|
||||
console.log "COMMAND"
|
||||
console.log command
|
||||
convert = ChildProcess.exec command
|
||||
stdout = ""
|
||||
convert.stdout.on "data", (chunk) -> console.log "STDOUT", chunk.toString()
|
||||
convert.stderr.on "data", (chunk) -> console.log "STDERR", chunk.toString()
|
||||
convert.on "exit", () ->
|
||||
callback()
|
||||
|
||||
compare = (originalPath, generatedPath, callback = (error, same) ->) ->
|
||||
diff_file = "#{fixturePath(generatedPath)}-diff.png"
|
||||
proc = ChildProcess.exec "compare -metric mae #{fixturePath(originalPath)} #{fixturePath(generatedPath)} #{diff_file}"
|
||||
stderr = ""
|
||||
proc.stderr.on "data", (chunk) -> stderr += chunk
|
||||
proc.on "exit", () ->
|
||||
if stderr.trim() == "0 (0)"
|
||||
# remove output diff if test matches expected image
|
||||
fs.unlink diff_file, (err) ->
|
||||
if err
|
||||
throw err
|
||||
callback null, true
|
||||
else
|
||||
console.log "compare result", stderr
|
||||
callback null, false
|
||||
|
||||
checkPdfInfo = (pdfPath, callback = (error, output) ->) ->
|
||||
proc = ChildProcess.exec "pdfinfo #{fixturePath(pdfPath)}"
|
||||
stdout = ""
|
||||
proc.stdout.on "data", (chunk) -> stdout += chunk
|
||||
proc.stderr.on "data", (chunk) -> console.log "STDERR", chunk.toString()
|
||||
proc.on "exit", () ->
|
||||
if stdout.match(/Optimized:\s+yes/)
|
||||
callback null, true
|
||||
else
|
||||
callback null, false
|
||||
|
||||
compareMultiplePages = (project_id, callback = (error) ->) ->
|
||||
compareNext = (page_no, callback) ->
|
||||
path = "tmp/#{project_id}-source-#{page_no}.png"
|
||||
fs.stat fixturePath(path), (error, stat) ->
|
||||
if error?
|
||||
callback()
|
||||
else
|
||||
compare "tmp/#{project_id}-source-#{page_no}.png", "tmp/#{project_id}-generated-#{page_no}.png", (error, same) =>
|
||||
throw error if error?
|
||||
same.should.equal true
|
||||
compareNext page_no + 1, callback
|
||||
compareNext 0, callback
|
||||
|
||||
comparePdf = (project_id, example_dir, callback = (error) ->) ->
|
||||
console.log "CONVERT"
|
||||
console.log "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png"
|
||||
convertToPng "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png", (error) =>
|
||||
throw error if error?
|
||||
convertToPng "examples/#{example_dir}/output.pdf", "tmp/#{project_id}-source.png", (error) =>
|
||||
throw error if error?
|
||||
fs.stat fixturePath("tmp/#{project_id}-source-0.png"), (error, stat) =>
|
||||
if error?
|
||||
compare "tmp/#{project_id}-source.png", "tmp/#{project_id}-generated.png", (error, same) =>
|
||||
throw error if error?
|
||||
same.should.equal true
|
||||
callback()
|
||||
else
|
||||
compareMultiplePages project_id, (error) ->
|
||||
throw error if error?
|
||||
callback()
|
||||
|
||||
downloadAndComparePdf = (project_id, example_dir, url, callback = (error) ->) ->
|
||||
writeStream = fs.createWriteStream(fixturePath("tmp/#{project_id}.pdf"))
|
||||
request.get(url).pipe(writeStream)
|
||||
console.log("writing file out", fixturePath("tmp/#{project_id}.pdf"))
|
||||
writeStream.on "close", () =>
|
||||
checkPdfInfo "tmp/#{project_id}.pdf", (error, optimised) =>
|
||||
throw error if error?
|
||||
optimised.should.equal true
|
||||
comparePdf project_id, example_dir, callback
|
||||
|
||||
Client.runServer(4242, fixturePath("examples"))
|
||||
|
||||
describe "Example Documents", ->
|
||||
before (done) ->
|
||||
ChildProcess.exec("rm test/acceptance/fixtures/tmp/*").on "exit", () ->
|
||||
ClsiApp.ensureRunning done
|
||||
|
||||
|
||||
for example_dir in fs.readdirSync fixturePath("examples")
|
||||
do (example_dir) ->
|
||||
describe example_dir, ->
|
||||
before ->
|
||||
@project_id = Client.randomId() + "_" + example_dir
|
||||
|
||||
it "should generate the correct pdf", (done) ->
|
||||
this.timeout(MOCHA_LATEX_TIMEOUT)
|
||||
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
|
||||
if error || body?.compile?.status is "failure"
|
||||
console.log "DEBUG: error", error, "body", JSON.stringify(body)
|
||||
pdf = Client.getOutputFile body, "pdf"
|
||||
downloadAndComparePdf(@project_id, example_dir, pdf.url, done)
|
||||
|
||||
it "should generate the correct pdf on the second run as well", (done) ->
|
||||
this.timeout(MOCHA_LATEX_TIMEOUT)
|
||||
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
|
||||
if error || body?.compile?.status is "failure"
|
||||
console.log "DEBUG: error", error, "body", JSON.stringify(body)
|
||||
pdf = Client.getOutputFile body, "pdf"
|
||||
downloadAndComparePdf(@project_id, example_dir, pdf.url, done)
|
||||
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
|
||||
describe "Simple LaTeX file", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
@request =
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
'''
|
||||
]
|
||||
ClsiApp.ensureRunning =>
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
it "should return the PDF", ->
|
||||
pdf = Client.getOutputFile(@body, "pdf")
|
||||
pdf.type.should.equal "pdf"
|
||||
|
||||
it "should return the log", ->
|
||||
log = Client.getOutputFile(@body, "log")
|
||||
log.type.should.equal "log"
|
||||
|
||||
it "should provide the pdf for download", (done) ->
|
||||
pdf = Client.getOutputFile(@body, "pdf")
|
||||
request.get pdf.url, (error, res, body) ->
|
||||
res.statusCode.should.equal 200
|
||||
done()
|
||||
|
||||
it "should provide the log for download", (done) ->
|
||||
log = Client.getOutputFile(@body, "pdf")
|
||||
request.get log.url, (error, res, body) ->
|
||||
res.statusCode.should.equal 200
|
||||
done()
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
expect = require("chai").expect
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
crypto = require("crypto")
|
||||
|
||||
describe "Syncing", ->
|
||||
before (done) ->
|
||||
content = '''
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
'''
|
||||
@request =
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: content
|
||||
]
|
||||
@project_id = Client.randomId()
|
||||
ClsiApp.ensureRunning =>
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
describe "from code to pdf", ->
|
||||
it "should return the correct location", (done) ->
|
||||
Client.syncFromCode @project_id, "main.tex", 3, 5, (error, pdfPositions) ->
|
||||
throw error if error?
|
||||
expect(pdfPositions).to.deep.equal(
|
||||
pdf: [ { page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 } ]
|
||||
)
|
||||
done()
|
||||
|
||||
describe "from pdf to code", ->
|
||||
it "should return the correct location", (done) ->
|
||||
Client.syncFromPdf @project_id, 1, 100, 200, (error, codePositions) =>
|
||||
throw error if error?
|
||||
expect(codePositions).to.deep.equal(
|
||||
code: [ { file: 'main.tex', line: 3, column: -1 } ]
|
||||
)
|
||||
done()
|
||||
@@ -1,34 +0,0 @@
|
||||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
|
||||
|
||||
describe "Timed out compile", ->
|
||||
before (done) ->
|
||||
@request =
|
||||
options:
|
||||
timeout: 10 #seconds
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
\\def\\x{Hello!\\par\\x}
|
||||
\\x
|
||||
\\end{document}
|
||||
'''
|
||||
]
|
||||
@project_id = Client.randomId()
|
||||
ClsiApp.ensureRunning =>
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
it "should return a timeout error", ->
|
||||
@body.compile.error.should.equal "container timed out"
|
||||
|
||||
it "should return a timedout status", ->
|
||||
@body.compile.status.should.equal "timedout"
|
||||
|
||||
it "should return the log output file name", ->
|
||||
outputFilePaths = @body.compile.outputFiles.map((x) => x.path)
|
||||
outputFilePaths.should.include('output.log')
|
||||
@@ -1,222 +0,0 @@
|
||||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
sinon = require "sinon"
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
|
||||
host = "localhost"
|
||||
|
||||
Server =
|
||||
run: () ->
|
||||
express = require "express"
|
||||
app = express()
|
||||
|
||||
staticServer = express.static __dirname + "/../fixtures/"
|
||||
app.get "/:random_id/*", (req, res, next) =>
|
||||
@getFile(req.url)
|
||||
req.url = "/" + req.params[0]
|
||||
staticServer(req, res, next)
|
||||
|
||||
app.listen 31415, host
|
||||
|
||||
getFile: () ->
|
||||
|
||||
randomId: () ->
|
||||
Math.random().toString(16).slice(2)
|
||||
|
||||
Server.run()
|
||||
|
||||
describe "Url Caching", ->
|
||||
describe "Downloading an image for the first time", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
@file = "#{Server.randomId()}/lion.png"
|
||||
@request =
|
||||
resources: [{
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}
|
||||
'''
|
||||
}, {
|
||||
path: "lion.png"
|
||||
url: "http://#{host}:31415/#{@file}"
|
||||
}]
|
||||
|
||||
sinon.spy Server, "getFile"
|
||||
ClsiApp.ensureRunning =>
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
afterEach ->
|
||||
Server.getFile.restore()
|
||||
|
||||
it "should download the image", ->
|
||||
Server.getFile
|
||||
.calledWith("/" + @file)
|
||||
.should.equal true
|
||||
|
||||
describe "When an image is in the cache and the last modified date is unchanged", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
@file = "#{Server.randomId()}/lion.png"
|
||||
@request =
|
||||
resources: [{
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}
|
||||
'''
|
||||
}, @image_resource = {
|
||||
path: "lion.png"
|
||||
url: "http://#{host}:31415/#{@file}"
|
||||
modified: Date.now()
|
||||
}]
|
||||
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
sinon.spy Server, "getFile"
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
done()
|
||||
|
||||
after ->
|
||||
Server.getFile.restore()
|
||||
|
||||
it "should not download the image again", ->
|
||||
Server.getFile.called.should.equal false
|
||||
|
||||
describe "When an image is in the cache and the last modified date is advanced", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
@file = "#{Server.randomId()}/lion.png"
|
||||
@request =
|
||||
resources: [{
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}
|
||||
'''
|
||||
}, @image_resource = {
|
||||
path: "lion.png"
|
||||
url: "http://#{host}:31415/#{@file}"
|
||||
modified: @last_modified = Date.now()
|
||||
}]
|
||||
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
sinon.spy Server, "getFile"
|
||||
@image_resource.modified = new Date(@last_modified + 3000)
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
done()
|
||||
|
||||
afterEach ->
|
||||
Server.getFile.restore()
|
||||
|
||||
it "should download the image again", ->
|
||||
Server.getFile.called.should.equal true
|
||||
|
||||
describe "When an image is in the cache and the last modified date is further in the past", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
@file = "#{Server.randomId()}/lion.png"
|
||||
@request =
|
||||
resources: [{
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}
|
||||
'''
|
||||
}, @image_resource = {
|
||||
path: "lion.png"
|
||||
url: "http://#{host}:31415/#{@file}"
|
||||
modified: @last_modified = Date.now()
|
||||
}]
|
||||
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
sinon.spy Server, "getFile"
|
||||
@image_resource.modified = new Date(@last_modified - 3000)
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
done()
|
||||
|
||||
afterEach ->
|
||||
Server.getFile.restore()
|
||||
|
||||
it "should not download the image again", ->
|
||||
Server.getFile.called.should.equal false
|
||||
|
||||
describe "When an image is in the cache and the last modified date is not specified", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
@file = "#{Server.randomId()}/lion.png"
|
||||
@request =
|
||||
resources: [{
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}
|
||||
'''
|
||||
}, @image_resource = {
|
||||
path: "lion.png"
|
||||
url: "http://#{host}:31415/#{@file}"
|
||||
modified: @last_modified = Date.now()
|
||||
}]
|
||||
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
sinon.spy Server, "getFile"
|
||||
delete @image_resource.modified
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
done()
|
||||
|
||||
afterEach ->
|
||||
Server.getFile.restore()
|
||||
|
||||
it "should download the image again", ->
|
||||
Server.getFile.called.should.equal true
|
||||
|
||||
describe "After clearing the cache", ->
|
||||
before (done) ->
|
||||
@project_id = Client.randomId()
|
||||
@file = "#{Server.randomId()}/lion.png"
|
||||
@request =
|
||||
resources: [{
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}
|
||||
'''
|
||||
}, @image_resource = {
|
||||
path: "lion.png"
|
||||
url: "http://#{host}:31415/#{@file}"
|
||||
modified: @last_modified = Date.now()
|
||||
}]
|
||||
|
||||
Client.compile @project_id, @request, (error) =>
|
||||
throw error if error?
|
||||
Client.clearCache @project_id, (error, res, body) =>
|
||||
throw error if error?
|
||||
sinon.spy Server, "getFile"
|
||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
||||
done()
|
||||
|
||||
afterEach ->
|
||||
Server.getFile.restore()
|
||||
|
||||
it "should download the image again", ->
|
||||
Server.getFile.called.should.equal true
|
||||
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
expect = require("chai").expect
|
||||
path = require("path")
|
||||
fs = require("fs")
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
|
||||
describe "Syncing", ->
|
||||
before (done) ->
|
||||
@request =
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: fs.readFileSync(path.join(__dirname,"../fixtures/naugty_strings.txt"),"utf-8")
|
||||
]
|
||||
@project_id = Client.randomId()
|
||||
ClsiApp.ensureRunning =>
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
describe "wordcount file", ->
|
||||
it "should return wordcount info", (done) ->
|
||||
Client.wordcount @project_id, "main.tex", (error, result) ->
|
||||
throw error if error?
|
||||
expect(result).to.deep.equal(
|
||||
texcount: {
|
||||
encode: "utf8"
|
||||
textWords: 2281
|
||||
headWords: 2
|
||||
outside: 0
|
||||
headers: 2
|
||||
elements: 0
|
||||
mathInline: 6
|
||||
mathDisplay: 0
|
||||
errors: 0
|
||||
messages: ""
|
||||
}
|
||||
)
|
||||
done()
|
||||
@@ -1,105 +0,0 @@
|
||||
request = require "request"
|
||||
fs = require "fs"
|
||||
Settings = require "settings-sharelatex"
|
||||
|
||||
host = "localhost"
|
||||
|
||||
module.exports = Client =
|
||||
host: Settings.apis.clsi.url
|
||||
|
||||
randomId: () ->
|
||||
Math.random().toString(16).slice(2)
|
||||
|
||||
compile: (project_id, data, callback = (error, res, body) ->) ->
|
||||
request.post {
|
||||
url: "#{@host}/project/#{project_id}/compile"
|
||||
json:
|
||||
compile: data
|
||||
}, callback
|
||||
|
||||
clearCache: (project_id, callback = (error, res, body) ->) ->
|
||||
request.del "#{@host}/project/#{project_id}", callback
|
||||
|
||||
getOutputFile: (response, type) ->
|
||||
for file in response.compile.outputFiles
|
||||
if file.type == type and file.url.match("output.#{type}")
|
||||
return file
|
||||
return null
|
||||
|
||||
runServer: (port, directory) ->
|
||||
express = require("express")
|
||||
app = express()
|
||||
app.use express.static(directory)
|
||||
console.log("starting test server on", port, host)
|
||||
app.listen(port, host).on "error", (error) ->
|
||||
console.error "error starting server:", error.message
|
||||
process.exit(1)
|
||||
|
||||
|
||||
syncFromCode: (project_id, file, line, column, callback = (error, pdfPositions) ->) ->
|
||||
request.get {
|
||||
url: "#{@host}/project/#{project_id}/sync/code"
|
||||
qs: {
|
||||
file: file
|
||||
line: line
|
||||
column: column
|
||||
}
|
||||
}, (error, response, body) ->
|
||||
return callback(error) if error?
|
||||
callback null, JSON.parse(body)
|
||||
|
||||
syncFromPdf: (project_id, page, h, v, callback = (error, pdfPositions) ->) ->
|
||||
request.get {
|
||||
url: "#{@host}/project/#{project_id}/sync/pdf"
|
||||
qs: {
|
||||
page: page,
|
||||
h: h, v: v
|
||||
}
|
||||
}, (error, response, body) ->
|
||||
return callback(error) if error?
|
||||
callback null, JSON.parse(body)
|
||||
|
||||
compileDirectory: (project_id, baseDirectory, directory, serverPort, callback = (error, res, body) ->) ->
|
||||
resources = []
|
||||
entities = fs.readdirSync("#{baseDirectory}/#{directory}")
|
||||
rootResourcePath = "main.tex"
|
||||
while (entities.length > 0)
|
||||
entity = entities.pop()
|
||||
stat = fs.statSync("#{baseDirectory}/#{directory}/#{entity}")
|
||||
if stat.isDirectory()
|
||||
entities = entities.concat fs.readdirSync("#{baseDirectory}/#{directory}/#{entity}").map (subEntity) ->
|
||||
if subEntity == "main.tex"
|
||||
rootResourcePath = "#{entity}/#{subEntity}"
|
||||
return "#{entity}/#{subEntity}"
|
||||
else if stat.isFile() and entity != "output.pdf"
|
||||
extension = entity.split(".").pop()
|
||||
if ["tex", "bib", "cls", "sty", "pdf_tex", "Rtex", "ist", "md", "Rmd"].indexOf(extension) > -1
|
||||
resources.push
|
||||
path: entity
|
||||
content: fs.readFileSync("#{baseDirectory}/#{directory}/#{entity}").toString()
|
||||
else if ["eps", "ttf", "png", "jpg", "pdf", "jpeg"].indexOf(extension) > -1
|
||||
resources.push
|
||||
path: entity
|
||||
url: "http://#{host}:#{serverPort}/#{directory}/#{entity}"
|
||||
modified: stat.mtime
|
||||
|
||||
fs.readFile "#{baseDirectory}/#{directory}/options.json", (error, body) =>
|
||||
req =
|
||||
resources: resources
|
||||
rootResourcePath: rootResourcePath
|
||||
|
||||
if !error?
|
||||
body = JSON.parse body
|
||||
req.options = body
|
||||
|
||||
@compile project_id, req, callback
|
||||
|
||||
wordcount: (project_id, file, callback = (error, pdfPositions) ->) ->
|
||||
request.get {
|
||||
url: "#{@host}/project/#{project_id}/wordcount"
|
||||
qs: {
|
||||
file: file
|
||||
}
|
||||
}, (error, response, body) ->
|
||||
return callback(error) if error?
|
||||
callback null, JSON.parse(body)
|
||||
@@ -1,24 +0,0 @@
|
||||
app = require('../../../../app')
|
||||
require("logger-sharelatex").logger.level("info")
|
||||
logger = require("logger-sharelatex")
|
||||
Settings = require("settings-sharelatex")
|
||||
|
||||
module.exports =
|
||||
running: false
|
||||
initing: false
|
||||
callbacks: []
|
||||
ensureRunning: (callback = (error) ->) ->
|
||||
if @running
|
||||
return callback()
|
||||
else if @initing
|
||||
@callbacks.push callback
|
||||
else
|
||||
@initing = true
|
||||
@callbacks.push callback
|
||||
app.listen Settings.internal?.clsi?.port, "localhost", (error) =>
|
||||
throw error if error?
|
||||
@running = true
|
||||
logger.log("clsi running in dev mode")
|
||||
|
||||
for callback in @callbacks
|
||||
callback()
|
||||
88
test/acceptance/js/BrokenLatexFileTests.js
Normal file
88
test/acceptance/js/BrokenLatexFileTests.js
Normal file
@@ -0,0 +1,88 @@
|
||||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Client = require('./helpers/Client')
|
||||
const request = require('request')
|
||||
require('chai').should()
|
||||
const ClsiApp = require('./helpers/ClsiApp')
|
||||
|
||||
describe('Broken LaTeX file', function() {
|
||||
before(function(done) {
|
||||
this.broken_request = {
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: `\
|
||||
\\documentclass{articl % :(
|
||||
\\begin{documen % :(
|
||||
Broken
|
||||
\\end{documen % :(\
|
||||
`
|
||||
}
|
||||
]
|
||||
}
|
||||
this.correct_request = {
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: `\
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}\
|
||||
`
|
||||
}
|
||||
]
|
||||
}
|
||||
return ClsiApp.ensureRunning(done)
|
||||
})
|
||||
|
||||
describe('on first run', function() {
|
||||
before(function(done) {
|
||||
this.project_id = Client.randomId()
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.broken_request,
|
||||
(error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return a failure status', function() {
|
||||
return this.body.compile.status.should.equal('failure')
|
||||
})
|
||||
})
|
||||
|
||||
return describe('on second run', function() {
|
||||
before(function(done) {
|
||||
this.project_id = Client.randomId()
|
||||
return Client.compile(this.project_id, this.correct_request, () => {
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.broken_request,
|
||||
(error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return it('should return a failure status', function() {
|
||||
return this.body.compile.status.should.equal('failure')
|
||||
})
|
||||
})
|
||||
})
|
||||
73
test/acceptance/js/DeleteOldFilesTest.js
Normal file
73
test/acceptance/js/DeleteOldFilesTest.js
Normal file
@@ -0,0 +1,73 @@
|
||||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Client = require('./helpers/Client')
|
||||
const request = require('request')
|
||||
require('chai').should()
|
||||
const ClsiApp = require('./helpers/ClsiApp')
|
||||
|
||||
describe('Deleting Old Files', function() {
|
||||
before(function(done) {
|
||||
this.request = {
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: `\
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}\
|
||||
`
|
||||
}
|
||||
]
|
||||
}
|
||||
return ClsiApp.ensureRunning(done)
|
||||
})
|
||||
|
||||
return describe('on first run', function() {
|
||||
before(function(done) {
|
||||
this.project_id = Client.randomId()
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return a success status', function() {
|
||||
return this.body.compile.status.should.equal('success')
|
||||
})
|
||||
|
||||
return describe('after file has been deleted', function() {
|
||||
before(function(done) {
|
||||
this.request.resources = []
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return a failure status', function() {
|
||||
return this.body.compile.status.should.equal('failure')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
280
test/acceptance/js/ExampleDocumentTests.js
Normal file
280
test/acceptance/js/ExampleDocumentTests.js
Normal file
@@ -0,0 +1,280 @@
|
||||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-path-concat,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Client = require('./helpers/Client')
|
||||
const request = require('request')
|
||||
require('chai').should()
|
||||
const fs = require('fs')
|
||||
const ChildProcess = require('child_process')
|
||||
const ClsiApp = require('./helpers/ClsiApp')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Path = require('path')
|
||||
const fixturePath = path => Path.normalize(__dirname + '/../fixtures/' + path)
|
||||
const process = require('process')
|
||||
console.log(
|
||||
process.pid,
|
||||
process.ppid,
|
||||
process.getuid(),
|
||||
process.getgroups(),
|
||||
'PID'
|
||||
)
|
||||
try {
|
||||
console.log('creating tmp directory', fixturePath('tmp'))
|
||||
fs.mkdirSync(fixturePath('tmp'))
|
||||
} catch (error) {
|
||||
const err = error
|
||||
console.log(err, fixturePath('tmp'), 'unable to create fixture tmp path')
|
||||
}
|
||||
|
||||
const MOCHA_LATEX_TIMEOUT = 60 * 1000
|
||||
|
||||
const convertToPng = function(pdfPath, pngPath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const command = `convert ${fixturePath(pdfPath)} ${fixturePath(pngPath)}`
|
||||
console.log('COMMAND')
|
||||
console.log(command)
|
||||
const convert = ChildProcess.exec(command)
|
||||
const stdout = ''
|
||||
convert.stdout.on('data', chunk => console.log('STDOUT', chunk.toString()))
|
||||
convert.stderr.on('data', chunk => console.log('STDERR', chunk.toString()))
|
||||
return convert.on('exit', () => callback())
|
||||
}
|
||||
|
||||
const compare = function(originalPath, generatedPath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, same) {}
|
||||
}
|
||||
const diff_file = `${fixturePath(generatedPath)}-diff.png`
|
||||
const proc = ChildProcess.exec(
|
||||
`compare -metric mae ${fixturePath(originalPath)} ${fixturePath(
|
||||
generatedPath
|
||||
)} ${diff_file}`
|
||||
)
|
||||
let stderr = ''
|
||||
proc.stderr.on('data', chunk => (stderr += chunk))
|
||||
return proc.on('exit', () => {
|
||||
if (stderr.trim() === '0 (0)') {
|
||||
// remove output diff if test matches expected image
|
||||
fs.unlink(diff_file, err => {
|
||||
if (err) {
|
||||
throw err
|
||||
}
|
||||
})
|
||||
return callback(null, true)
|
||||
} else {
|
||||
console.log('compare result', stderr)
|
||||
return callback(null, false)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const checkPdfInfo = function(pdfPath, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, output) {}
|
||||
}
|
||||
const proc = ChildProcess.exec(`pdfinfo ${fixturePath(pdfPath)}`)
|
||||
let stdout = ''
|
||||
proc.stdout.on('data', chunk => (stdout += chunk))
|
||||
proc.stderr.on('data', chunk => console.log('STDERR', chunk.toString()))
|
||||
return proc.on('exit', () => {
|
||||
if (stdout.match(/Optimized:\s+yes/)) {
|
||||
return callback(null, true)
|
||||
} else {
|
||||
return callback(null, false)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const compareMultiplePages = function(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
var compareNext = function(page_no, callback) {
|
||||
const path = `tmp/${project_id}-source-${page_no}.png`
|
||||
return fs.stat(fixturePath(path), (error, stat) => {
|
||||
if (error != null) {
|
||||
return callback()
|
||||
} else {
|
||||
return compare(
|
||||
`tmp/${project_id}-source-${page_no}.png`,
|
||||
`tmp/${project_id}-generated-${page_no}.png`,
|
||||
(error, same) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
same.should.equal(true)
|
||||
return compareNext(page_no + 1, callback)
|
||||
}
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
return compareNext(0, callback)
|
||||
}
|
||||
|
||||
const comparePdf = function(project_id, example_dir, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
console.log('CONVERT')
|
||||
console.log(`tmp/${project_id}.pdf`, `tmp/${project_id}-generated.png`)
|
||||
return convertToPng(
|
||||
`tmp/${project_id}.pdf`,
|
||||
`tmp/${project_id}-generated.png`,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return convertToPng(
|
||||
`examples/${example_dir}/output.pdf`,
|
||||
`tmp/${project_id}-source.png`,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return fs.stat(
|
||||
fixturePath(`tmp/${project_id}-source-0.png`),
|
||||
(error, stat) => {
|
||||
if (error != null) {
|
||||
return compare(
|
||||
`tmp/${project_id}-source.png`,
|
||||
`tmp/${project_id}-generated.png`,
|
||||
(error, same) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
same.should.equal(true)
|
||||
return callback()
|
||||
}
|
||||
)
|
||||
} else {
|
||||
return compareMultiplePages(project_id, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return callback()
|
||||
})
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const downloadAndComparePdf = function(project_id, example_dir, url, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const writeStream = fs.createWriteStream(fixturePath(`tmp/${project_id}.pdf`))
|
||||
request.get(url).pipe(writeStream)
|
||||
console.log('writing file out', fixturePath(`tmp/${project_id}.pdf`))
|
||||
return writeStream.on('close', () => {
|
||||
return checkPdfInfo(`tmp/${project_id}.pdf`, (error, optimised) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
optimised.should.equal(true)
|
||||
return comparePdf(project_id, example_dir, callback)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
Client.runServer(4242, fixturePath('examples'))
|
||||
|
||||
describe('Example Documents', function() {
|
||||
before(function(done) {
|
||||
return ChildProcess.exec('rm test/acceptance/fixtures/tmp/*').on(
|
||||
'exit',
|
||||
() => ClsiApp.ensureRunning(done)
|
||||
)
|
||||
})
|
||||
|
||||
return Array.from(fs.readdirSync(fixturePath('examples'))).map(example_dir =>
|
||||
(example_dir =>
|
||||
describe(example_dir, function() {
|
||||
before(function() {
|
||||
return (this.project_id = Client.randomId() + '_' + example_dir)
|
||||
})
|
||||
|
||||
it('should generate the correct pdf', function(done) {
|
||||
this.timeout(MOCHA_LATEX_TIMEOUT)
|
||||
return Client.compileDirectory(
|
||||
this.project_id,
|
||||
fixturePath('examples'),
|
||||
example_dir,
|
||||
4242,
|
||||
(error, res, body) => {
|
||||
if (
|
||||
error ||
|
||||
__guard__(
|
||||
body != null ? body.compile : undefined,
|
||||
x => x.status
|
||||
) === 'failure'
|
||||
) {
|
||||
console.log('DEBUG: error', error, 'body', JSON.stringify(body))
|
||||
}
|
||||
const pdf = Client.getOutputFile(body, 'pdf')
|
||||
return downloadAndComparePdf(
|
||||
this.project_id,
|
||||
example_dir,
|
||||
pdf.url,
|
||||
done
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should generate the correct pdf on the second run as well', function(done) {
|
||||
this.timeout(MOCHA_LATEX_TIMEOUT)
|
||||
return Client.compileDirectory(
|
||||
this.project_id,
|
||||
fixturePath('examples'),
|
||||
example_dir,
|
||||
4242,
|
||||
(error, res, body) => {
|
||||
if (
|
||||
error ||
|
||||
__guard__(
|
||||
body != null ? body.compile : undefined,
|
||||
x => x.status
|
||||
) === 'failure'
|
||||
) {
|
||||
console.log('DEBUG: error', error, 'body', JSON.stringify(body))
|
||||
}
|
||||
const pdf = Client.getOutputFile(body, 'pdf')
|
||||
return downloadAndComparePdf(
|
||||
this.project_id,
|
||||
example_dir,
|
||||
pdf.url,
|
||||
done
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
}))(example_dir)
|
||||
)
|
||||
})
|
||||
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
71
test/acceptance/js/SimpleLatexFileTests.js
Normal file
71
test/acceptance/js/SimpleLatexFileTests.js
Normal file
@@ -0,0 +1,71 @@
|
||||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Client = require('./helpers/Client')
|
||||
const request = require('request')
|
||||
require('chai').should()
|
||||
const ClsiApp = require('./helpers/ClsiApp')
|
||||
|
||||
describe('Simple LaTeX file', function() {
|
||||
before(function(done) {
|
||||
this.project_id = Client.randomId()
|
||||
this.request = {
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: `\
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}\
|
||||
`
|
||||
}
|
||||
]
|
||||
}
|
||||
return ClsiApp.ensureRunning(() => {
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the PDF', function() {
|
||||
const pdf = Client.getOutputFile(this.body, 'pdf')
|
||||
return pdf.type.should.equal('pdf')
|
||||
})
|
||||
|
||||
it('should return the log', function() {
|
||||
const log = Client.getOutputFile(this.body, 'log')
|
||||
return log.type.should.equal('log')
|
||||
})
|
||||
|
||||
it('should provide the pdf for download', function(done) {
|
||||
const pdf = Client.getOutputFile(this.body, 'pdf')
|
||||
return request.get(pdf.url, (error, res, body) => {
|
||||
res.statusCode.should.equal(200)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return it('should provide the log for download', function(done) {
|
||||
const log = Client.getOutputFile(this.body, 'pdf')
|
||||
return request.get(log.url, (error, res, body) => {
|
||||
res.statusCode.should.equal(200)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
91
test/acceptance/js/SynctexTests.js
Normal file
91
test/acceptance/js/SynctexTests.js
Normal file
@@ -0,0 +1,91 @@
|
||||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Client = require('./helpers/Client')
|
||||
const request = require('request')
|
||||
require('chai').should()
|
||||
const { expect } = require('chai')
|
||||
const ClsiApp = require('./helpers/ClsiApp')
|
||||
const crypto = require('crypto')
|
||||
|
||||
describe('Syncing', function() {
|
||||
before(function(done) {
|
||||
const content = `\
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}\
|
||||
`
|
||||
this.request = {
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content
|
||||
}
|
||||
]
|
||||
}
|
||||
this.project_id = Client.randomId()
|
||||
return ClsiApp.ensureRunning(() => {
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('from code to pdf', function() {
|
||||
return it('should return the correct location', function(done) {
|
||||
return Client.syncFromCode(
|
||||
this.project_id,
|
||||
'main.tex',
|
||||
3,
|
||||
5,
|
||||
(error, pdfPositions) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(pdfPositions).to.deep.equal({
|
||||
pdf: [
|
||||
{ page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 }
|
||||
]
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('from pdf to code', function() {
|
||||
return it('should return the correct location', function(done) {
|
||||
return Client.syncFromPdf(
|
||||
this.project_id,
|
||||
1,
|
||||
100,
|
||||
200,
|
||||
(error, codePositions) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(codePositions).to.deep.equal({
|
||||
code: [{ file: 'main.tex', line: 3, column: -1 }]
|
||||
})
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
62
test/acceptance/js/TimeoutTests.js
Normal file
62
test/acceptance/js/TimeoutTests.js
Normal file
@@ -0,0 +1,62 @@
|
||||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Client = require('./helpers/Client')
|
||||
const request = require('request')
|
||||
require('chai').should()
|
||||
const ClsiApp = require('./helpers/ClsiApp')
|
||||
|
||||
describe('Timed out compile', function() {
|
||||
before(function(done) {
|
||||
this.request = {
|
||||
options: {
|
||||
timeout: 10
|
||||
}, // seconds
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: `\
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
\\def\\x{Hello!\\par\\x}
|
||||
\\x
|
||||
\\end{document}\
|
||||
`
|
||||
}
|
||||
]
|
||||
}
|
||||
this.project_id = Client.randomId()
|
||||
return ClsiApp.ensureRunning(() => {
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should return a timeout error', function() {
|
||||
return this.body.compile.error.should.equal('container timed out')
|
||||
})
|
||||
|
||||
it('should return a timedout status', function() {
|
||||
return this.body.compile.status.should.equal('timedout')
|
||||
})
|
||||
|
||||
return it('should return the log output file name', function() {
|
||||
const outputFilePaths = this.body.compile.outputFiles.map(x => x.path)
|
||||
return outputFilePaths.should.include('output.log')
|
||||
})
|
||||
})
|
||||
373
test/acceptance/js/UrlCachingTests.js
Normal file
373
test/acceptance/js/UrlCachingTests.js
Normal file
@@ -0,0 +1,373 @@
|
||||
/* eslint-disable
|
||||
no-path-concat,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Client = require('./helpers/Client')
|
||||
const request = require('request')
|
||||
require('chai').should()
|
||||
const sinon = require('sinon')
|
||||
const ClsiApp = require('./helpers/ClsiApp')
|
||||
|
||||
const host = 'localhost'
|
||||
|
||||
const Server = {
|
||||
run() {
|
||||
const express = require('express')
|
||||
const app = express()
|
||||
|
||||
const staticServer = express.static(__dirname + '/../fixtures/')
|
||||
app.get('/:random_id/*', (req, res, next) => {
|
||||
this.getFile(req.url)
|
||||
req.url = `/${req.params[0]}`
|
||||
return staticServer(req, res, next)
|
||||
})
|
||||
|
||||
return app.listen(31415, host)
|
||||
},
|
||||
|
||||
getFile() {},
|
||||
|
||||
randomId() {
|
||||
return Math.random()
|
||||
.toString(16)
|
||||
.slice(2)
|
||||
}
|
||||
}
|
||||
|
||||
Server.run()
|
||||
|
||||
describe('Url Caching', function() {
|
||||
describe('Downloading an image for the first time', function() {
|
||||
before(function(done) {
|
||||
this.project_id = Client.randomId()
|
||||
this.file = `${Server.randomId()}/lion.png`
|
||||
this.request = {
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: `\
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}\
|
||||
`
|
||||
},
|
||||
{
|
||||
path: 'lion.png',
|
||||
url: `http://${host}:31415/${this.file}`
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
sinon.spy(Server, 'getFile')
|
||||
return ClsiApp.ensureRunning(() => {
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function() {
|
||||
return Server.getFile.restore()
|
||||
})
|
||||
|
||||
return it('should download the image', function() {
|
||||
return Server.getFile.calledWith(`/${this.file}`).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('When an image is in the cache and the last modified date is unchanged', function() {
|
||||
before(function(done) {
|
||||
this.project_id = Client.randomId()
|
||||
this.file = `${Server.randomId()}/lion.png`
|
||||
this.request = {
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: `\
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}\
|
||||
`
|
||||
},
|
||||
(this.image_resource = {
|
||||
path: 'lion.png',
|
||||
url: `http://${host}:31415/${this.file}`,
|
||||
modified: Date.now()
|
||||
})
|
||||
]
|
||||
}
|
||||
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
sinon.spy(Server, 'getFile')
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error1, res1, body1) => {
|
||||
this.error = error1
|
||||
this.res = res1
|
||||
this.body = body1
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
after(function() {
|
||||
return Server.getFile.restore()
|
||||
})
|
||||
|
||||
return it('should not download the image again', function() {
|
||||
return Server.getFile.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('When an image is in the cache and the last modified date is advanced', function() {
|
||||
before(function(done) {
|
||||
this.project_id = Client.randomId()
|
||||
this.file = `${Server.randomId()}/lion.png`
|
||||
this.request = {
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: `\
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}\
|
||||
`
|
||||
},
|
||||
(this.image_resource = {
|
||||
path: 'lion.png',
|
||||
url: `http://${host}:31415/${this.file}`,
|
||||
modified: (this.last_modified = Date.now())
|
||||
})
|
||||
]
|
||||
}
|
||||
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
sinon.spy(Server, 'getFile')
|
||||
this.image_resource.modified = new Date(this.last_modified + 3000)
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error1, res1, body1) => {
|
||||
this.error = error1
|
||||
this.res = res1
|
||||
this.body = body1
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(function() {
|
||||
return Server.getFile.restore()
|
||||
})
|
||||
|
||||
return it('should download the image again', function() {
|
||||
return Server.getFile.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('When an image is in the cache and the last modified date is further in the past', function() {
|
||||
before(function(done) {
|
||||
this.project_id = Client.randomId()
|
||||
this.file = `${Server.randomId()}/lion.png`
|
||||
this.request = {
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: `\
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}\
|
||||
`
|
||||
},
|
||||
(this.image_resource = {
|
||||
path: 'lion.png',
|
||||
url: `http://${host}:31415/${this.file}`,
|
||||
modified: (this.last_modified = Date.now())
|
||||
})
|
||||
]
|
||||
}
|
||||
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
sinon.spy(Server, 'getFile')
|
||||
this.image_resource.modified = new Date(this.last_modified - 3000)
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error1, res1, body1) => {
|
||||
this.error = error1
|
||||
this.res = res1
|
||||
this.body = body1
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(function() {
|
||||
return Server.getFile.restore()
|
||||
})
|
||||
|
||||
return it('should not download the image again', function() {
|
||||
return Server.getFile.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('When an image is in the cache and the last modified date is not specified', function() {
|
||||
before(function(done) {
|
||||
this.project_id = Client.randomId()
|
||||
this.file = `${Server.randomId()}/lion.png`
|
||||
this.request = {
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: `\
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}\
|
||||
`
|
||||
},
|
||||
(this.image_resource = {
|
||||
path: 'lion.png',
|
||||
url: `http://${host}:31415/${this.file}`,
|
||||
modified: (this.last_modified = Date.now())
|
||||
})
|
||||
]
|
||||
}
|
||||
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
sinon.spy(Server, 'getFile')
|
||||
delete this.image_resource.modified
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error1, res1, body1) => {
|
||||
this.error = error1
|
||||
this.res = res1
|
||||
this.body = body1
|
||||
return done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(function() {
|
||||
return Server.getFile.restore()
|
||||
})
|
||||
|
||||
return it('should download the image again', function() {
|
||||
return Server.getFile.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('After clearing the cache', function() {
|
||||
before(function(done) {
|
||||
this.project_id = Client.randomId()
|
||||
this.file = `${Server.randomId()}/lion.png`
|
||||
this.request = {
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: `\
|
||||
\\documentclass{article}
|
||||
\\usepackage{graphicx}
|
||||
\\begin{document}
|
||||
\\includegraphics{lion.png}
|
||||
\\end{document}\
|
||||
`
|
||||
},
|
||||
(this.image_resource = {
|
||||
path: 'lion.png',
|
||||
url: `http://${host}:31415/${this.file}`,
|
||||
modified: (this.last_modified = Date.now())
|
||||
})
|
||||
]
|
||||
}
|
||||
|
||||
return Client.compile(this.project_id, this.request, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return Client.clearCache(this.project_id, (error, res, body) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
sinon.spy(Server, 'getFile')
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error1, res1, body1) => {
|
||||
this.error = error1
|
||||
this.res = res1
|
||||
this.body = body1
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function() {
|
||||
return Server.getFile.restore()
|
||||
})
|
||||
|
||||
return it('should download the image again', function() {
|
||||
return Server.getFile.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
72
test/acceptance/js/WordcountTests.js
Normal file
72
test/acceptance/js/WordcountTests.js
Normal file
@@ -0,0 +1,72 @@
|
||||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const Client = require('./helpers/Client')
|
||||
const request = require('request')
|
||||
require('chai').should()
|
||||
const { expect } = require('chai')
|
||||
const path = require('path')
|
||||
const fs = require('fs')
|
||||
const ClsiApp = require('./helpers/ClsiApp')
|
||||
|
||||
describe('Syncing', function() {
|
||||
before(function(done) {
|
||||
this.request = {
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: fs.readFileSync(
|
||||
path.join(__dirname, '../fixtures/naugty_strings.txt'),
|
||||
'utf-8'
|
||||
)
|
||||
}
|
||||
]
|
||||
}
|
||||
this.project_id = Client.randomId()
|
||||
return ClsiApp.ensureRunning(() => {
|
||||
return Client.compile(
|
||||
this.project_id,
|
||||
this.request,
|
||||
(error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('wordcount file', function() {
|
||||
return it('should return wordcount info', function(done) {
|
||||
return Client.wordcount(this.project_id, 'main.tex', (error, result) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
expect(result).to.deep.equal({
|
||||
texcount: {
|
||||
encode: 'utf8',
|
||||
textWords: 2281,
|
||||
headWords: 2,
|
||||
outside: 0,
|
||||
headers: 2,
|
||||
elements: 0,
|
||||
mathInline: 6,
|
||||
mathDisplay: 0,
|
||||
errors: 0,
|
||||
messages: ''
|
||||
}
|
||||
})
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
208
test/acceptance/js/helpers/Client.js
Normal file
208
test/acceptance/js/helpers/Client.js
Normal file
@@ -0,0 +1,208 @@
|
||||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let Client
|
||||
const request = require('request')
|
||||
const fs = require('fs')
|
||||
const Settings = require('settings-sharelatex')
|
||||
|
||||
const host = 'localhost'
|
||||
|
||||
module.exports = Client = {
|
||||
host: Settings.apis.clsi.url,
|
||||
|
||||
randomId() {
|
||||
return Math.random()
|
||||
.toString(16)
|
||||
.slice(2)
|
||||
},
|
||||
|
||||
compile(project_id, data, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, res, body) {}
|
||||
}
|
||||
return request.post(
|
||||
{
|
||||
url: `${this.host}/project/${project_id}/compile`,
|
||||
json: {
|
||||
compile: data
|
||||
}
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
clearCache(project_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, res, body) {}
|
||||
}
|
||||
return request.del(`${this.host}/project/${project_id}`, callback)
|
||||
},
|
||||
|
||||
getOutputFile(response, type) {
|
||||
for (const file of Array.from(response.compile.outputFiles)) {
|
||||
if (file.type === type && file.url.match(`output.${type}`)) {
|
||||
return file
|
||||
}
|
||||
}
|
||||
return null
|
||||
},
|
||||
|
||||
runServer(port, directory) {
|
||||
const express = require('express')
|
||||
const app = express()
|
||||
app.use(express.static(directory))
|
||||
console.log('starting test server on', port, host)
|
||||
return app.listen(port, host).on('error', error => {
|
||||
console.error('error starting server:', error.message)
|
||||
return process.exit(1)
|
||||
})
|
||||
},
|
||||
|
||||
syncFromCode(project_id, file, line, column, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, pdfPositions) {}
|
||||
}
|
||||
return request.get(
|
||||
{
|
||||
url: `${this.host}/project/${project_id}/sync/code`,
|
||||
qs: {
|
||||
file,
|
||||
line,
|
||||
column
|
||||
}
|
||||
},
|
||||
(error, response, body) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, JSON.parse(body))
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
syncFromPdf(project_id, page, h, v, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, pdfPositions) {}
|
||||
}
|
||||
return request.get(
|
||||
{
|
||||
url: `${this.host}/project/${project_id}/sync/pdf`,
|
||||
qs: {
|
||||
page,
|
||||
h,
|
||||
v
|
||||
}
|
||||
},
|
||||
(error, response, body) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, JSON.parse(body))
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
compileDirectory(project_id, baseDirectory, directory, serverPort, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, res, body) {}
|
||||
}
|
||||
const resources = []
|
||||
let entities = fs.readdirSync(`${baseDirectory}/${directory}`)
|
||||
let rootResourcePath = 'main.tex'
|
||||
while (entities.length > 0) {
|
||||
var entity = entities.pop()
|
||||
const stat = fs.statSync(`${baseDirectory}/${directory}/${entity}`)
|
||||
if (stat.isDirectory()) {
|
||||
entities = entities.concat(
|
||||
fs
|
||||
.readdirSync(`${baseDirectory}/${directory}/${entity}`)
|
||||
.map(subEntity => {
|
||||
if (subEntity === 'main.tex') {
|
||||
rootResourcePath = `${entity}/${subEntity}`
|
||||
}
|
||||
return `${entity}/${subEntity}`
|
||||
})
|
||||
)
|
||||
} else if (stat.isFile() && entity !== 'output.pdf') {
|
||||
const extension = entity.split('.').pop()
|
||||
if (
|
||||
[
|
||||
'tex',
|
||||
'bib',
|
||||
'cls',
|
||||
'sty',
|
||||
'pdf_tex',
|
||||
'Rtex',
|
||||
'ist',
|
||||
'md',
|
||||
'Rmd'
|
||||
].indexOf(extension) > -1
|
||||
) {
|
||||
resources.push({
|
||||
path: entity,
|
||||
content: fs
|
||||
.readFileSync(`${baseDirectory}/${directory}/${entity}`)
|
||||
.toString()
|
||||
})
|
||||
} else if (
|
||||
['eps', 'ttf', 'png', 'jpg', 'pdf', 'jpeg'].indexOf(extension) > -1
|
||||
) {
|
||||
resources.push({
|
||||
path: entity,
|
||||
url: `http://${host}:${serverPort}/${directory}/${entity}`,
|
||||
modified: stat.mtime
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fs.readFile(
|
||||
`${baseDirectory}/${directory}/options.json`,
|
||||
(error, body) => {
|
||||
const req = {
|
||||
resources,
|
||||
rootResourcePath
|
||||
}
|
||||
|
||||
if (error == null) {
|
||||
body = JSON.parse(body)
|
||||
req.options = body
|
||||
}
|
||||
|
||||
return this.compile(project_id, req, callback)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
wordcount(project_id, file, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, pdfPositions) {}
|
||||
}
|
||||
return request.get(
|
||||
{
|
||||
url: `${this.host}/project/${project_id}/wordcount`,
|
||||
qs: {
|
||||
file
|
||||
}
|
||||
},
|
||||
(error, response, body) => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return callback(null, JSON.parse(body))
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
64
test/acceptance/js/helpers/ClsiApp.js
Normal file
64
test/acceptance/js/helpers/ClsiApp.js
Normal file
@@ -0,0 +1,64 @@
|
||||
/* eslint-disable
|
||||
handle-callback-err,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const app = require('../../../../app')
|
||||
require('logger-sharelatex').logger.level('info')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Settings = require('settings-sharelatex')
|
||||
|
||||
module.exports = {
|
||||
running: false,
|
||||
initing: false,
|
||||
callbacks: [],
|
||||
ensureRunning(callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
if (this.running) {
|
||||
return callback()
|
||||
} else if (this.initing) {
|
||||
return this.callbacks.push(callback)
|
||||
} else {
|
||||
this.initing = true
|
||||
this.callbacks.push(callback)
|
||||
return app.listen(
|
||||
__guard__(
|
||||
Settings.internal != null ? Settings.internal.clsi : undefined,
|
||||
x => x.port
|
||||
),
|
||||
'localhost',
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
this.running = true
|
||||
logger.log('clsi running in dev mode')
|
||||
|
||||
return (() => {
|
||||
const result = []
|
||||
for (callback of Array.from(this.callbacks)) {
|
||||
result.push(callback())
|
||||
}
|
||||
return result
|
||||
})()
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
@@ -1,217 +0,0 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/CompileController'
|
||||
tk = require("timekeeper")
|
||||
|
||||
describe "CompileController", ->
|
||||
beforeEach ->
|
||||
@CompileController = SandboxedModule.require modulePath, requires:
|
||||
"./CompileManager": @CompileManager = {}
|
||||
"./RequestParser": @RequestParser = {}
|
||||
"settings-sharelatex": @Settings =
|
||||
apis:
|
||||
clsi:
|
||||
url: "http://clsi.example.com"
|
||||
"./ProjectPersistenceManager": @ProjectPersistenceManager = {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), err:sinon.stub(), warn: sinon.stub()}
|
||||
@Settings.externalUrl = "http://www.example.com"
|
||||
@req = {}
|
||||
@res = {}
|
||||
@next = sinon.stub()
|
||||
|
||||
describe "compile", ->
|
||||
beforeEach ->
|
||||
@req.body = {
|
||||
compile: "mock-body"
|
||||
}
|
||||
@req.params =
|
||||
project_id: @project_id = "project-id-123"
|
||||
@request = {
|
||||
compile: "mock-parsed-request"
|
||||
}
|
||||
@request_with_project_id =
|
||||
compile: @request.compile
|
||||
project_id: @project_id
|
||||
@output_files = [{
|
||||
path: "output.pdf"
|
||||
type: "pdf"
|
||||
build: 1234
|
||||
}, {
|
||||
path: "output.log"
|
||||
type: "log"
|
||||
build: 1234
|
||||
}]
|
||||
@RequestParser.parse = sinon.stub().callsArgWith(1, null, @request)
|
||||
@ProjectPersistenceManager.markProjectAsJustAccessed = sinon.stub().callsArg(1)
|
||||
@res.status = sinon.stub().returnsThis()
|
||||
@res.send = sinon.stub()
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach ->
|
||||
@CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, null, @output_files)
|
||||
@CompileController.compile @req, @res
|
||||
|
||||
it "should parse the request", ->
|
||||
@RequestParser.parse
|
||||
.calledWith(@req.body)
|
||||
.should.equal true
|
||||
|
||||
it "should run the compile for the specified project", ->
|
||||
@CompileManager.doCompileWithLock
|
||||
.calledWith(@request_with_project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should mark the project as accessed", ->
|
||||
@ProjectPersistenceManager.markProjectAsJustAccessed
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should return the JSON response", ->
|
||||
@res.status.calledWith(200).should.equal true
|
||||
@res.send
|
||||
.calledWith(
|
||||
compile:
|
||||
status: "success"
|
||||
error: null
|
||||
outputFiles: @output_files.map (file) =>
|
||||
url: "#{@Settings.apis.clsi.url}/project/#{@project_id}/build/#{file.build}/output/#{file.path}"
|
||||
path: file.path
|
||||
type: file.type
|
||||
build: file.build
|
||||
)
|
||||
.should.equal true
|
||||
|
||||
describe "with an error", ->
|
||||
beforeEach ->
|
||||
@CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, new Error(@message = "error message"), null)
|
||||
@CompileController.compile @req, @res
|
||||
|
||||
it "should return the JSON response with the error", ->
|
||||
@res.status.calledWith(500).should.equal true
|
||||
@res.send
|
||||
.calledWith(
|
||||
compile:
|
||||
status: "error"
|
||||
error: @message
|
||||
outputFiles: []
|
||||
)
|
||||
.should.equal true
|
||||
|
||||
describe "when the request times out", ->
|
||||
beforeEach ->
|
||||
@error = new Error(@message = "container timed out")
|
||||
@error.timedout = true
|
||||
@CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, @error, null)
|
||||
@CompileController.compile @req, @res
|
||||
|
||||
it "should return the JSON response with the timeout status", ->
|
||||
@res.status.calledWith(200).should.equal true
|
||||
@res.send
|
||||
.calledWith(
|
||||
compile:
|
||||
status: "timedout"
|
||||
error: @message
|
||||
outputFiles: []
|
||||
)
|
||||
.should.equal true
|
||||
|
||||
describe "when the request returns no output files", ->
|
||||
beforeEach ->
|
||||
@CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, null, [])
|
||||
@CompileController.compile @req, @res
|
||||
|
||||
it "should return the JSON response with the failure status", ->
|
||||
@res.status.calledWith(200).should.equal true
|
||||
@res.send
|
||||
.calledWith(
|
||||
compile:
|
||||
error: null
|
||||
status: "failure"
|
||||
outputFiles: []
|
||||
)
|
||||
.should.equal true
|
||||
|
||||
describe "syncFromCode", ->
|
||||
beforeEach ->
|
||||
@file = "main.tex"
|
||||
@line = 42
|
||||
@column = 5
|
||||
@project_id = "mock-project-id"
|
||||
@req.params =
|
||||
project_id: @project_id
|
||||
@req.query =
|
||||
file: @file
|
||||
line: @line.toString()
|
||||
column: @column.toString()
|
||||
@res.json = sinon.stub()
|
||||
|
||||
@CompileManager.syncFromCode = sinon.stub().callsArgWith(5, null, @pdfPositions = ["mock-positions"])
|
||||
@CompileController.syncFromCode @req, @res, @next
|
||||
|
||||
it "should find the corresponding location in the PDF", ->
|
||||
@CompileManager.syncFromCode
|
||||
.calledWith(@project_id, undefined, @file, @line, @column)
|
||||
.should.equal true
|
||||
|
||||
it "should return the positions", ->
|
||||
@res.json
|
||||
.calledWith(
|
||||
pdf: @pdfPositions
|
||||
)
|
||||
.should.equal true
|
||||
|
||||
describe "syncFromPdf", ->
|
||||
beforeEach ->
|
||||
@page = 5
|
||||
@h = 100.23
|
||||
@v = 45.67
|
||||
@project_id = "mock-project-id"
|
||||
@req.params =
|
||||
project_id: @project_id
|
||||
@req.query =
|
||||
page: @page.toString()
|
||||
h: @h.toString()
|
||||
v: @v.toString()
|
||||
@res.json = sinon.stub()
|
||||
|
||||
@CompileManager.syncFromPdf = sinon.stub().callsArgWith(5, null, @codePositions = ["mock-positions"])
|
||||
@CompileController.syncFromPdf @req, @res, @next
|
||||
|
||||
it "should find the corresponding location in the code", ->
|
||||
@CompileManager.syncFromPdf
|
||||
.calledWith(@project_id, undefined, @page, @h, @v)
|
||||
.should.equal true
|
||||
|
||||
it "should return the positions", ->
|
||||
@res.json
|
||||
.calledWith(
|
||||
code: @codePositions
|
||||
)
|
||||
.should.equal true
|
||||
|
||||
describe "wordcount", ->
|
||||
beforeEach ->
|
||||
@file = "main.tex"
|
||||
@project_id = "mock-project-id"
|
||||
@req.params =
|
||||
project_id: @project_id
|
||||
@req.query =
|
||||
file: @file
|
||||
image: @image = "example.com/image"
|
||||
@res.json = sinon.stub()
|
||||
|
||||
@CompileManager.wordcount = sinon.stub().callsArgWith(4, null, @texcount = ["mock-texcount"])
|
||||
@CompileController.wordcount @req, @res, @next
|
||||
|
||||
it "should return the word count of a file", ->
|
||||
@CompileManager.wordcount
|
||||
.calledWith(@project_id, undefined, @file, @image)
|
||||
.should.equal true
|
||||
|
||||
it "should return the texcount info", ->
|
||||
@res.json
|
||||
.calledWith(
|
||||
texcount: @texcount
|
||||
)
|
||||
.should.equal true
|
||||
@@ -1,356 +0,0 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/CompileManager'
|
||||
tk = require("timekeeper")
|
||||
EventEmitter = require("events").EventEmitter
|
||||
Path = require "path"
|
||||
|
||||
describe "CompileManager", ->
|
||||
beforeEach ->
|
||||
@CompileManager = SandboxedModule.require modulePath, requires:
|
||||
"./LatexRunner": @LatexRunner = {}
|
||||
"./ResourceWriter": @ResourceWriter = {}
|
||||
"./OutputFileFinder": @OutputFileFinder = {}
|
||||
"./OutputCacheManager": @OutputCacheManager = {}
|
||||
"settings-sharelatex": @Settings =
|
||||
path:
|
||||
compilesDir: "/compiles/dir"
|
||||
synctexBaseDir: -> "/compile"
|
||||
clsi:
|
||||
docker:
|
||||
image: "SOMEIMAGE"
|
||||
|
||||
"logger-sharelatex": @logger = { log: sinon.stub() , info:->}
|
||||
"child_process": @child_process = {}
|
||||
"./CommandRunner": @CommandRunner = {}
|
||||
"./DraftModeManager": @DraftModeManager = {}
|
||||
"./TikzManager": @TikzManager = {}
|
||||
"./LockManager": @LockManager = {}
|
||||
"fs": @fs = {}
|
||||
"fs-extra": @fse = { ensureDir: sinon.stub().callsArg(1) }
|
||||
@callback = sinon.stub()
|
||||
@project_id = "project-id-123"
|
||||
@user_id = "1234"
|
||||
describe "doCompileWithLock", ->
|
||||
beforeEach ->
|
||||
@request =
|
||||
resources: @resources = "mock-resources"
|
||||
project_id: @project_id
|
||||
user_id: @user_id
|
||||
@output_files = ["foo", "bar"]
|
||||
@Settings.compileDir = "compiles"
|
||||
@compileDir = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"
|
||||
@CompileManager.doCompile = sinon.stub().callsArgWith(1, null, @output_files)
|
||||
@LockManager.runWithLock = (lockFile, runner, callback) ->
|
||||
runner (err, result...) ->
|
||||
callback(err, result...)
|
||||
|
||||
describe "when the project is not locked", ->
|
||||
beforeEach ->
|
||||
@CompileManager.doCompileWithLock @request, @callback
|
||||
|
||||
it "should ensure that the compile directory exists", ->
|
||||
@fse.ensureDir.calledWith(@compileDir)
|
||||
.should.equal true
|
||||
|
||||
it "should call doCompile with the request", ->
|
||||
@CompileManager.doCompile
|
||||
.calledWith(@request)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the output files", ->
|
||||
@callback.calledWithExactly(null, @output_files)
|
||||
.should.equal true
|
||||
|
||||
describe "when the project is locked", ->
|
||||
beforeEach ->
|
||||
@error = new Error("locked")
|
||||
@LockManager.runWithLock = (lockFile, runner, callback) =>
|
||||
callback(@error)
|
||||
@CompileManager.doCompileWithLock @request, @callback
|
||||
|
||||
it "should ensure that the compile directory exists", ->
|
||||
@fse.ensureDir.calledWith(@compileDir)
|
||||
.should.equal true
|
||||
|
||||
it "should not call doCompile with the request", ->
|
||||
@CompileManager.doCompile
|
||||
.called.should.equal false
|
||||
|
||||
it "should call the callback with the error", ->
|
||||
@callback.calledWithExactly(@error)
|
||||
.should.equal true
|
||||
|
||||
describe "doCompile", ->
|
||||
beforeEach ->
|
||||
@output_files = [{
|
||||
path: "output.log"
|
||||
type: "log"
|
||||
}, {
|
||||
path: "output.pdf"
|
||||
type: "pdf"
|
||||
}]
|
||||
@build_files = [{
|
||||
path: "output.log"
|
||||
type: "log"
|
||||
build: 1234
|
||||
}, {
|
||||
path: "output.pdf"
|
||||
type: "pdf"
|
||||
build: 1234
|
||||
}]
|
||||
@request =
|
||||
resources: @resources = "mock-resources"
|
||||
rootResourcePath: @rootResourcePath = "main.tex"
|
||||
project_id: @project_id
|
||||
user_id: @user_id
|
||||
compiler: @compiler = "pdflatex"
|
||||
timeout: @timeout = 42000
|
||||
imageName: @image = "example.com/image"
|
||||
flags: @flags = ["-file-line-error"]
|
||||
@env = {}
|
||||
@Settings.compileDir = "compiles"
|
||||
@compileDir = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"
|
||||
@ResourceWriter.syncResourcesToDisk = sinon.stub().callsArgWith(2, null, @resources)
|
||||
@LatexRunner.runLatex = sinon.stub().callsArg(2)
|
||||
@OutputFileFinder.findOutputFiles = sinon.stub().callsArgWith(2, null, @output_files)
|
||||
@OutputCacheManager.saveOutputFiles = sinon.stub().callsArgWith(2, null, @build_files)
|
||||
@DraftModeManager.injectDraftMode = sinon.stub().callsArg(1)
|
||||
@TikzManager.checkMainFile = sinon.stub().callsArg(3, false)
|
||||
|
||||
describe "normally", ->
|
||||
beforeEach ->
|
||||
@CompileManager.doCompile @request, @callback
|
||||
|
||||
it "should write the resources to disk", ->
|
||||
@ResourceWriter.syncResourcesToDisk
|
||||
.calledWith(@request, @compileDir)
|
||||
.should.equal true
|
||||
|
||||
it "should run LaTeX", ->
|
||||
@LatexRunner.runLatex
|
||||
.calledWith("#{@project_id}-#{@user_id}", {
|
||||
directory: @compileDir
|
||||
mainFile: @rootResourcePath
|
||||
compiler: @compiler
|
||||
timeout: @timeout
|
||||
image: @image
|
||||
flags: @flags
|
||||
environment: @env
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
it "should find the output files", ->
|
||||
@OutputFileFinder.findOutputFiles
|
||||
.calledWith(@resources, @compileDir)
|
||||
.should.equal true
|
||||
|
||||
it "should return the output files", ->
|
||||
@callback.calledWith(null, @build_files).should.equal true
|
||||
|
||||
it "should not inject draft mode by default", ->
|
||||
@DraftModeManager.injectDraftMode.called.should.equal false
|
||||
|
||||
describe "with draft mode", ->
|
||||
beforeEach ->
|
||||
@request.draft = true
|
||||
@CompileManager.doCompile @request, @callback
|
||||
|
||||
it "should inject the draft mode header", ->
|
||||
@DraftModeManager.injectDraftMode
|
||||
.calledWith(@compileDir + "/" + @rootResourcePath)
|
||||
.should.equal true
|
||||
|
||||
describe "with a check option", ->
|
||||
beforeEach ->
|
||||
@request.check = "error"
|
||||
@CompileManager.doCompile @request, @callback
|
||||
|
||||
it "should run chktex", ->
|
||||
@LatexRunner.runLatex
|
||||
.calledWith("#{@project_id}-#{@user_id}", {
|
||||
directory: @compileDir
|
||||
mainFile: @rootResourcePath
|
||||
compiler: @compiler
|
||||
timeout: @timeout
|
||||
image: @image
|
||||
flags: @flags
|
||||
environment: {'CHKTEX_OPTIONS': '-nall -e9 -e10 -w15 -w16', 'CHKTEX_EXIT_ON_ERROR':1, 'CHKTEX_ULIMIT_OPTIONS': '-t 5 -v 64000'}
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
describe "with a knitr file and check options", ->
|
||||
beforeEach ->
|
||||
@request.rootResourcePath = "main.Rtex"
|
||||
@request.check = "error"
|
||||
@CompileManager.doCompile @request, @callback
|
||||
|
||||
it "should not run chktex", ->
|
||||
@LatexRunner.runLatex
|
||||
.calledWith("#{@project_id}-#{@user_id}", {
|
||||
directory: @compileDir
|
||||
mainFile: "main.Rtex"
|
||||
compiler: @compiler
|
||||
timeout: @timeout
|
||||
image: @image
|
||||
flags: @flags
|
||||
environment: @env
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
describe "clearProject", ->
|
||||
describe "succesfully", ->
|
||||
beforeEach ->
|
||||
@Settings.compileDir = "compiles"
|
||||
@fs.lstat = sinon.stub().callsArgWith(1, null,{isDirectory: ()->true})
|
||||
@proc = new EventEmitter()
|
||||
@proc.stdout = new EventEmitter()
|
||||
@proc.stderr = new EventEmitter()
|
||||
@child_process.spawn = sinon.stub().returns(@proc)
|
||||
@CompileManager.clearProject @project_id, @user_id, @callback
|
||||
@proc.emit "close", 0
|
||||
|
||||
it "should remove the project directory", ->
|
||||
@child_process.spawn
|
||||
.calledWith("rm", ["-r", "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"])
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "with a non-success status code", ->
|
||||
beforeEach ->
|
||||
@Settings.compileDir = "compiles"
|
||||
@fs.lstat = sinon.stub().callsArgWith(1, null,{isDirectory: ()->true})
|
||||
@proc = new EventEmitter()
|
||||
@proc.stdout = new EventEmitter()
|
||||
@proc.stderr = new EventEmitter()
|
||||
@child_process.spawn = sinon.stub().returns(@proc)
|
||||
@CompileManager.clearProject @project_id, @user_id, @callback
|
||||
@proc.stderr.emit "data", @error = "oops"
|
||||
@proc.emit "close", 1
|
||||
|
||||
it "should remove the project directory", ->
|
||||
@child_process.spawn
|
||||
.calledWith("rm", ["-r", "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"])
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with an error from the stderr", ->
|
||||
@callback
|
||||
.calledWith(new Error())
|
||||
.should.equal true
|
||||
|
||||
@callback.args[0][0].message.should.equal "rm -r #{@Settings.path.compilesDir}/#{@project_id}-#{@user_id} failed: #{@error}"
|
||||
|
||||
describe "syncing", ->
|
||||
beforeEach ->
|
||||
@page = 1
|
||||
@h = 42.23
|
||||
@v = 87.56
|
||||
@width = 100.01
|
||||
@height = 234.56
|
||||
@line = 5
|
||||
@column = 3
|
||||
@file_name = "main.tex"
|
||||
@child_process.execFile = sinon.stub()
|
||||
@Settings.path.synctexBaseDir = (project_id) => "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"
|
||||
|
||||
describe "syncFromCode", ->
|
||||
beforeEach ->
|
||||
@fs.stat = sinon.stub().callsArgWith(1, null,{isFile: ()->true})
|
||||
@stdout = "NODE\t#{@page}\t#{@h}\t#{@v}\t#{@width}\t#{@height}\n"
|
||||
@CommandRunner.run = sinon.stub().callsArgWith(6, null, {stdout:@stdout})
|
||||
@CompileManager.syncFromCode @project_id, @user_id, @file_name, @line, @column, @callback
|
||||
|
||||
it "should execute the synctex binary", ->
|
||||
bin_path = Path.resolve(__dirname + "/../../../bin/synctex")
|
||||
synctex_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/output.pdf"
|
||||
file_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/#{@file_name}"
|
||||
@CommandRunner.run
|
||||
.calledWith(
|
||||
"#{@project_id}-#{@user_id}",
|
||||
['/opt/synctex', 'code', synctex_path, file_path, @line, @column],
|
||||
"#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}",
|
||||
@Settings.clsi.docker.image,
|
||||
60000,
|
||||
{}
|
||||
).should.equal true
|
||||
|
||||
it "should call the callback with the parsed output", ->
|
||||
@callback
|
||||
.calledWith(null, [{
|
||||
page: @page
|
||||
h: @h
|
||||
v: @v
|
||||
height: @height
|
||||
width: @width
|
||||
}])
|
||||
.should.equal true
|
||||
|
||||
describe "syncFromPdf", ->
|
||||
beforeEach ->
|
||||
@fs.stat = sinon.stub().callsArgWith(1, null,{isFile: ()->true})
|
||||
@stdout = "NODE\t#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/#{@file_name}\t#{@line}\t#{@column}\n"
|
||||
@CommandRunner.run = sinon.stub().callsArgWith(6, null, {stdout:@stdout})
|
||||
@CompileManager.syncFromPdf @project_id, @user_id, @page, @h, @v, @callback
|
||||
|
||||
it "should execute the synctex binary", ->
|
||||
bin_path = Path.resolve(__dirname + "/../../../bin/synctex")
|
||||
synctex_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/output.pdf"
|
||||
@CommandRunner.run
|
||||
.calledWith(
|
||||
"#{@project_id}-#{@user_id}",
|
||||
['/opt/synctex', "pdf", synctex_path, @page, @h, @v],
|
||||
"#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}",
|
||||
@Settings.clsi.docker.image,
|
||||
60000,
|
||||
{}).should.equal true
|
||||
|
||||
it "should call the callback with the parsed output", ->
|
||||
@callback
|
||||
.calledWith(null, [{
|
||||
file: @file_name
|
||||
line: @line
|
||||
column: @column
|
||||
}])
|
||||
.should.equal true
|
||||
|
||||
describe "wordcount", ->
|
||||
beforeEach ->
|
||||
@CommandRunner.run = sinon.stub().callsArg(6)
|
||||
@fs.readFile = sinon.stub().callsArgWith(2, null, @stdout = "Encoding: ascii\nWords in text: 2")
|
||||
@callback = sinon.stub()
|
||||
|
||||
@project_id
|
||||
@timeout = 60 * 1000
|
||||
@file_name = "main.tex"
|
||||
@Settings.path.compilesDir = "/local/compile/directory"
|
||||
@image = "example.com/image"
|
||||
|
||||
@CompileManager.wordcount @project_id, @user_id, @file_name, @image, @callback
|
||||
|
||||
it "should run the texcount command", ->
|
||||
@directory = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"
|
||||
@file_path = "$COMPILE_DIR/#{@file_name}"
|
||||
@command =[ "texcount", "-nocol", "-inc", @file_path, "-out=" + @file_path + ".wc"]
|
||||
|
||||
@CommandRunner.run
|
||||
.calledWith("#{@project_id}-#{@user_id}", @command, @directory, @image, @timeout, {})
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the parsed output", ->
|
||||
@callback
|
||||
.calledWith(null, {
|
||||
encode: "ascii"
|
||||
textWords: 2
|
||||
headWords: 0
|
||||
outside: 0
|
||||
headers: 0
|
||||
elements: 0
|
||||
mathInline: 0
|
||||
mathDisplay: 0
|
||||
errors: 0
|
||||
messages: ""
|
||||
})
|
||||
.should.equal true
|
||||
@@ -1,55 +0,0 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/ContentTypeMapper'
|
||||
|
||||
describe 'ContentTypeMapper', ->
|
||||
|
||||
beforeEach ->
|
||||
@ContentTypeMapper = SandboxedModule.require modulePath
|
||||
|
||||
describe 'map', ->
|
||||
|
||||
it 'should map .txt to text/plain', ->
|
||||
content_type = @ContentTypeMapper.map('example.txt')
|
||||
content_type.should.equal 'text/plain'
|
||||
|
||||
it 'should map .csv to text/csv', ->
|
||||
content_type = @ContentTypeMapper.map('example.csv')
|
||||
content_type.should.equal 'text/csv'
|
||||
|
||||
it 'should map .pdf to application/pdf', ->
|
||||
content_type = @ContentTypeMapper.map('example.pdf')
|
||||
content_type.should.equal 'application/pdf'
|
||||
|
||||
it 'should fall back to octet-stream', ->
|
||||
content_type = @ContentTypeMapper.map('example.unknown')
|
||||
content_type.should.equal 'application/octet-stream'
|
||||
|
||||
describe 'coercing web files to plain text', ->
|
||||
|
||||
it 'should map .js to plain text', ->
|
||||
content_type = @ContentTypeMapper.map('example.js')
|
||||
content_type.should.equal 'text/plain'
|
||||
|
||||
it 'should map .html to plain text', ->
|
||||
content_type = @ContentTypeMapper.map('example.html')
|
||||
content_type.should.equal 'text/plain'
|
||||
|
||||
it 'should map .css to plain text', ->
|
||||
content_type = @ContentTypeMapper.map('example.css')
|
||||
content_type.should.equal 'text/plain'
|
||||
|
||||
describe 'image files', ->
|
||||
|
||||
it 'should map .png to image/png', ->
|
||||
content_type = @ContentTypeMapper.map('example.png')
|
||||
content_type.should.equal 'image/png'
|
||||
|
||||
it 'should map .jpeg to image/jpeg', ->
|
||||
content_type = @ContentTypeMapper.map('example.jpeg')
|
||||
content_type.should.equal 'image/jpeg'
|
||||
|
||||
it 'should map .svg to text/plain to protect against XSS (SVG can execute JS)', ->
|
||||
content_type = @ContentTypeMapper.map('example.svg')
|
||||
content_type.should.equal 'text/plain'
|
||||
@@ -1,145 +0,0 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
require "coffee-script"
|
||||
modulePath = require('path').join __dirname, '../../../app/coffee/DockerLockManager'
|
||||
|
||||
describe "LockManager", ->
|
||||
beforeEach ->
|
||||
@LockManager = SandboxedModule.require modulePath, requires:
|
||||
"settings-sharelatex": @Settings =
|
||||
clsi: docker: {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
|
||||
|
||||
describe "runWithLock", ->
|
||||
describe "with a single lock", ->
|
||||
beforeEach (done) ->
|
||||
@callback = sinon.stub()
|
||||
@LockManager.runWithLock "lock-one", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world")
|
||||
, 100
|
||||
, (err, args...) =>
|
||||
@callback(err,args...)
|
||||
done()
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.calledWith(null,"hello","world").should.equal true
|
||||
|
||||
describe "with two locks", ->
|
||||
beforeEach (done) ->
|
||||
@callback1 = sinon.stub()
|
||||
@callback2 = sinon.stub()
|
||||
@LockManager.runWithLock "lock-one", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","one")
|
||||
, 100
|
||||
, (err, args...) =>
|
||||
@callback1(err,args...)
|
||||
@LockManager.runWithLock "lock-two", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","two")
|
||||
, 200
|
||||
, (err, args...) =>
|
||||
@callback2(err,args...)
|
||||
done()
|
||||
|
||||
it "should call the first callback", ->
|
||||
@callback1.calledWith(null,"hello","world","one").should.equal true
|
||||
|
||||
it "should call the second callback", ->
|
||||
@callback2.calledWith(null,"hello","world","two").should.equal true
|
||||
|
||||
describe "with lock contention", ->
|
||||
describe "where the first lock is released quickly", ->
|
||||
beforeEach (done) ->
|
||||
@LockManager.MAX_LOCK_WAIT_TIME = 1000
|
||||
@LockManager.LOCK_TEST_INTERVAL = 100
|
||||
@callback1 = sinon.stub()
|
||||
@callback2 = sinon.stub()
|
||||
@LockManager.runWithLock "lock", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","one")
|
||||
, 100
|
||||
, (err, args...) =>
|
||||
@callback1(err,args...)
|
||||
@LockManager.runWithLock "lock", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","two")
|
||||
, 200
|
||||
, (err, args...) =>
|
||||
@callback2(err,args...)
|
||||
done()
|
||||
|
||||
it "should call the first callback", ->
|
||||
@callback1.calledWith(null,"hello","world","one").should.equal true
|
||||
|
||||
it "should call the second callback", ->
|
||||
@callback2.calledWith(null,"hello","world","two").should.equal true
|
||||
|
||||
describe "where the first lock is held longer than the waiting time", ->
|
||||
beforeEach (done) ->
|
||||
@LockManager.MAX_LOCK_HOLD_TIME = 10000
|
||||
@LockManager.MAX_LOCK_WAIT_TIME = 1000
|
||||
@LockManager.LOCK_TEST_INTERVAL = 100
|
||||
@callback1 = sinon.stub()
|
||||
@callback2 = sinon.stub()
|
||||
doneOne = doneTwo = false
|
||||
finish = (key) ->
|
||||
doneOne = true if key is 1
|
||||
doneTwo = true if key is 2
|
||||
done() if doneOne and doneTwo
|
||||
@LockManager.runWithLock "lock", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","one")
|
||||
, 1100
|
||||
, (err, args...) =>
|
||||
@callback1(err,args...)
|
||||
finish(1)
|
||||
@LockManager.runWithLock "lock", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","two")
|
||||
, 100
|
||||
, (err, args...) =>
|
||||
@callback2(err,args...)
|
||||
finish(2)
|
||||
|
||||
it "should call the first callback", ->
|
||||
@callback1.calledWith(null,"hello","world","one").should.equal true
|
||||
|
||||
it "should call the second callback with an error", ->
|
||||
error = sinon.match.instanceOf Error
|
||||
@callback2.calledWith(error).should.equal true
|
||||
|
||||
describe "where the first lock is held longer than the max holding time", ->
|
||||
beforeEach (done) ->
|
||||
@LockManager.MAX_LOCK_HOLD_TIME = 1000
|
||||
@LockManager.MAX_LOCK_WAIT_TIME = 2000
|
||||
@LockManager.LOCK_TEST_INTERVAL = 100
|
||||
@callback1 = sinon.stub()
|
||||
@callback2 = sinon.stub()
|
||||
doneOne = doneTwo = false
|
||||
finish = (key) ->
|
||||
doneOne = true if key is 1
|
||||
doneTwo = true if key is 2
|
||||
done() if doneOne and doneTwo
|
||||
@LockManager.runWithLock "lock", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","one")
|
||||
, 1500
|
||||
, (err, args...) =>
|
||||
@callback1(err,args...)
|
||||
finish(1)
|
||||
@LockManager.runWithLock "lock", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","two")
|
||||
, 100
|
||||
, (err, args...) =>
|
||||
@callback2(err,args...)
|
||||
finish(2)
|
||||
|
||||
it "should call the first callback", ->
|
||||
@callback1.calledWith(null,"hello","world","one").should.equal true
|
||||
|
||||
it "should call the second callback", ->
|
||||
@callback2.calledWith(null,"hello","world","two").should.equal true
|
||||
@@ -1,509 +0,0 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
expect = require('chai').expect
|
||||
require "coffee-script"
|
||||
modulePath = require('path').join __dirname, '../../../app/coffee/DockerRunner'
|
||||
Path = require "path"
|
||||
|
||||
describe "DockerRunner", ->
|
||||
beforeEach ->
|
||||
@container = container = {}
|
||||
@DockerRunner = SandboxedModule.require modulePath, requires:
|
||||
"settings-sharelatex": @Settings =
|
||||
clsi: docker: {}
|
||||
path: {}
|
||||
"logger-sharelatex": @logger = {
|
||||
log: sinon.stub(),
|
||||
error: sinon.stub(),
|
||||
info: sinon.stub(),
|
||||
warn: sinon.stub()
|
||||
}
|
||||
"dockerode": class Docker
|
||||
getContainer: sinon.stub().returns(container)
|
||||
createContainer: sinon.stub().yields(null, container)
|
||||
listContainers: sinon.stub()
|
||||
"fs": @fs = { stat: sinon.stub().yields(null,{isDirectory:()->true}) }
|
||||
"./Metrics":
|
||||
Timer: class Timer
|
||||
done: () ->
|
||||
"./LockManager":
|
||||
runWithLock: (key, runner, callback) -> runner(callback)
|
||||
@Docker = Docker
|
||||
@getContainer = Docker::getContainer
|
||||
@createContainer = Docker::createContainer
|
||||
@listContainers = Docker::listContainers
|
||||
|
||||
@directory = "/local/compile/directory"
|
||||
@mainFile = "main-file.tex"
|
||||
@compiler = "pdflatex"
|
||||
@image = "example.com/sharelatex/image:2016.2"
|
||||
@env = {}
|
||||
@callback = sinon.stub()
|
||||
@project_id = "project-id-123"
|
||||
@volumes =
|
||||
"/local/compile/directory": "/compile"
|
||||
@Settings.clsi.docker.image = @defaultImage = "default-image"
|
||||
@Settings.clsi.docker.env = PATH: "mock-path"
|
||||
|
||||
describe "run", ->
|
||||
beforeEach (done)->
|
||||
@DockerRunner._getContainerOptions = sinon.stub().returns(@options = {mockoptions: "foo"})
|
||||
@DockerRunner._fingerprintContainer = sinon.stub().returns(@fingerprint = "fingerprint")
|
||||
|
||||
@name = "project-#{@project_id}-#{@fingerprint}"
|
||||
|
||||
@command = ["mock", "command", "--outdir=$COMPILE_DIR"]
|
||||
@command_with_dir = ["mock", "command", "--outdir=/compile"]
|
||||
@timeout = 42000
|
||||
done()
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach (done)->
|
||||
@DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output")
|
||||
@DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, (err, output)=>
|
||||
@callback(err, output)
|
||||
done()
|
||||
|
||||
it "should generate the options for the container", ->
|
||||
@DockerRunner._getContainerOptions
|
||||
.calledWith(@command_with_dir, @image, @volumes, @timeout)
|
||||
.should.equal true
|
||||
|
||||
it "should generate the fingerprint from the returned options", ->
|
||||
@DockerRunner._fingerprintContainer
|
||||
.calledWith(@options)
|
||||
.should.equal true
|
||||
|
||||
it "should do the run", ->
|
||||
@DockerRunner._runAndWaitForContainer
|
||||
.calledWith(@options, @volumes, @timeout)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.calledWith(null, @output).should.equal true
|
||||
|
||||
describe 'when path.sandboxedCompilesHostDir is set', ->
|
||||
|
||||
beforeEach ->
|
||||
@Settings.path.sandboxedCompilesHostDir = '/some/host/dir/compiles'
|
||||
@directory = '/var/lib/sharelatex/data/compiles/xyz'
|
||||
@DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output")
|
||||
@DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, @callback
|
||||
|
||||
it 'should re-write the bind directory', ->
|
||||
volumes = @DockerRunner._runAndWaitForContainer.lastCall.args[1]
|
||||
expect(volumes).to.deep.equal {
|
||||
'/some/host/dir/compiles/xyz': '/compile'
|
||||
}
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.calledWith(null, @output).should.equal true
|
||||
|
||||
describe "when the run throws an error", ->
|
||||
beforeEach ->
|
||||
firstTime = true
|
||||
@output = "mock-output"
|
||||
@DockerRunner._runAndWaitForContainer = (options, volumes, timeout, callback = (error, output)->) =>
|
||||
if firstTime
|
||||
firstTime = false
|
||||
callback new Error("HTTP code is 500 which indicates error: server error")
|
||||
else
|
||||
callback(null, @output)
|
||||
sinon.spy @DockerRunner, "_runAndWaitForContainer"
|
||||
@DockerRunner.destroyContainer = sinon.stub().callsArg(3)
|
||||
@DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, @callback
|
||||
|
||||
it "should do the run twice", ->
|
||||
@DockerRunner._runAndWaitForContainer
|
||||
.calledTwice.should.equal true
|
||||
|
||||
it "should destroy the container in between", ->
|
||||
@DockerRunner.destroyContainer
|
||||
.calledWith(@name, null)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.calledWith(null, @output).should.equal true
|
||||
|
||||
describe "with no image", ->
|
||||
beforeEach ->
|
||||
@DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output")
|
||||
@DockerRunner.run @project_id, @command, @directory, null, @timeout, @env, @callback
|
||||
|
||||
it "should use the default image", ->
|
||||
@DockerRunner._getContainerOptions
|
||||
.calledWith(@command_with_dir, @defaultImage, @volumes, @timeout)
|
||||
.should.equal true
|
||||
|
||||
describe "with image override", ->
|
||||
beforeEach ->
|
||||
@Settings.texliveImageNameOveride = "overrideimage.com/something"
|
||||
@DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output")
|
||||
@DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, @callback
|
||||
|
||||
it "should use the override and keep the tag", ->
|
||||
image = @DockerRunner._getContainerOptions.args[0][1]
|
||||
image.should.equal "overrideimage.com/something/image:2016.2"
|
||||
|
||||
describe "_runAndWaitForContainer", ->
|
||||
beforeEach ->
|
||||
@options = {mockoptions: "foo", name: @name = "mock-name"}
|
||||
@DockerRunner.startContainer = (options, volumes, attachStreamHandler, callback) =>
|
||||
attachStreamHandler(null, @output = "mock-output")
|
||||
callback(null, @containerId = "container-id")
|
||||
sinon.spy @DockerRunner, "startContainer"
|
||||
@DockerRunner.waitForContainer = sinon.stub().callsArgWith(2, null, @exitCode = 42)
|
||||
@DockerRunner._runAndWaitForContainer @options, @volumes, @timeout, @callback
|
||||
|
||||
it "should create/start the container", ->
|
||||
@DockerRunner.startContainer
|
||||
.calledWith(@options, @volumes)
|
||||
.should.equal true
|
||||
|
||||
it "should wait for the container to finish", ->
|
||||
@DockerRunner.waitForContainer
|
||||
.calledWith(@name, @timeout)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the output", ->
|
||||
@callback.calledWith(null, @output).should.equal true
|
||||
|
||||
describe "startContainer", ->
|
||||
beforeEach ->
|
||||
@attachStreamHandler = sinon.stub()
|
||||
@attachStreamHandler.cock = true
|
||||
@options = {mockoptions: "foo", name: "mock-name"}
|
||||
@container.inspect = sinon.stub().callsArgWith(0)
|
||||
@DockerRunner.attachToContainer = (containerId, attachStreamHandler, cb)=>
|
||||
attachStreamHandler()
|
||||
cb()
|
||||
sinon.spy @DockerRunner, "attachToContainer"
|
||||
|
||||
|
||||
|
||||
describe "when the container exists", ->
|
||||
beforeEach ->
|
||||
@container.inspect = sinon.stub().callsArgWith(0)
|
||||
@container.start = sinon.stub().yields()
|
||||
|
||||
@DockerRunner.startContainer @options, @volumes, @callback, ->
|
||||
|
||||
it "should start the container with the given name", ->
|
||||
@getContainer
|
||||
.calledWith(@options.name)
|
||||
.should.equal true
|
||||
@container.start
|
||||
.called
|
||||
.should.equal true
|
||||
|
||||
it "should not try to create the container", ->
|
||||
@createContainer.called.should.equal false
|
||||
|
||||
it "should attach to the container", ->
|
||||
@DockerRunner.attachToContainer.called.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
it "should attach before the container starts", ->
|
||||
sinon.assert.callOrder(@DockerRunner.attachToContainer, @container.start)
|
||||
|
||||
describe "when the container does not exist", ->
|
||||
beforeEach ()->
|
||||
exists = false
|
||||
@container.start = sinon.stub().yields()
|
||||
@container.inspect = sinon.stub().callsArgWith(0, {statusCode:404})
|
||||
@DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback
|
||||
|
||||
it "should create the container", ->
|
||||
@createContainer
|
||||
.calledWith(@options)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback and stream handler", ->
|
||||
@attachStreamHandler.called.should.equal true
|
||||
@callback.called.should.equal true
|
||||
|
||||
it "should attach to the container", ->
|
||||
@DockerRunner.attachToContainer.called.should.equal true
|
||||
|
||||
it "should attach before the container starts", ->
|
||||
sinon.assert.callOrder(@DockerRunner.attachToContainer, @container.start)
|
||||
|
||||
|
||||
describe "when the container is already running", ->
|
||||
beforeEach ->
|
||||
error = new Error("HTTP code is 304 which indicates error: server error - start: Cannot start container #{@name}: The container MOCKID is already running.")
|
||||
error.statusCode = 304
|
||||
@container.start = sinon.stub().yields(error)
|
||||
@container.inspect = sinon.stub().callsArgWith(0)
|
||||
@DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback
|
||||
|
||||
it "should not try to create the container", ->
|
||||
@createContainer.called.should.equal false
|
||||
|
||||
it "should call the callback and stream handler without an error", ->
|
||||
@attachStreamHandler.called.should.equal true
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "when a volume does not exist", ->
|
||||
beforeEach ()->
|
||||
@fs.stat = sinon.stub().yields(new Error("no such path"))
|
||||
@DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback
|
||||
|
||||
it "should not try to create the container", ->
|
||||
@createContainer.called.should.equal false
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
@callback.calledWith(new Error()).should.equal true
|
||||
|
||||
describe "when a volume exists but is not a directory", ->
|
||||
beforeEach ->
|
||||
@fs.stat = sinon.stub().yields(null, {isDirectory: () -> return false})
|
||||
@DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback
|
||||
|
||||
it "should not try to create the container", ->
|
||||
@createContainer.called.should.equal false
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
@callback.calledWith(new Error()).should.equal true
|
||||
|
||||
describe "when a volume does not exist, but sibling-containers are used", ->
|
||||
beforeEach ->
|
||||
@fs.stat = sinon.stub().yields(new Error("no such path"))
|
||||
@Settings.path.sandboxedCompilesHostDir = '/some/path'
|
||||
@container.start = sinon.stub().yields()
|
||||
@DockerRunner.startContainer @options, @volumes, @callback
|
||||
|
||||
afterEach ->
|
||||
delete @Settings.path.sandboxedCompilesHostDir
|
||||
|
||||
it "should start the container with the given name", ->
|
||||
@getContainer
|
||||
.calledWith(@options.name)
|
||||
.should.equal true
|
||||
@container.start
|
||||
.called
|
||||
.should.equal true
|
||||
|
||||
it "should not try to create the container", ->
|
||||
@createContainer.called.should.equal false
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
@callback.calledWith(new Error()).should.equal false
|
||||
|
||||
describe "when the container tries to be created, but already has been (race condition)", ->
|
||||
|
||||
describe "waitForContainer", ->
|
||||
beforeEach ->
|
||||
@containerId = "container-id"
|
||||
@timeout = 5000
|
||||
@container.wait = sinon.stub().yields(null, StatusCode: @statusCode = 42)
|
||||
@container.kill = sinon.stub().yields()
|
||||
|
||||
describe "when the container returns in time", ->
|
||||
beforeEach ->
|
||||
@DockerRunner.waitForContainer @containerId, @timeout, @callback
|
||||
|
||||
it "should wait for the container", ->
|
||||
@getContainer
|
||||
.calledWith(@containerId)
|
||||
.should.equal true
|
||||
@container.wait
|
||||
.called
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the exit", ->
|
||||
@callback
|
||||
.calledWith(null, @statusCode)
|
||||
.should.equal true
|
||||
|
||||
describe "when the container does not return before the timeout", ->
|
||||
beforeEach (done) ->
|
||||
@container.wait = (callback = (error, exitCode) ->) ->
|
||||
setTimeout () ->
|
||||
callback(null, StatusCode: 42)
|
||||
, 100
|
||||
@timeout = 5
|
||||
@DockerRunner.waitForContainer @containerId, @timeout, (args...) =>
|
||||
@callback(args...)
|
||||
done()
|
||||
|
||||
it "should call kill on the container", ->
|
||||
@getContainer
|
||||
.calledWith(@containerId)
|
||||
.should.equal true
|
||||
@container.kill
|
||||
.called
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
error = new Error("container timed out")
|
||||
error.timedout = true
|
||||
@callback
|
||||
.calledWith(error)
|
||||
.should.equal true
|
||||
|
||||
describe "destroyOldContainers", ->
|
||||
beforeEach (done) ->
|
||||
oneHourInSeconds = 60 * 60
|
||||
oneHourInMilliseconds = oneHourInSeconds * 1000
|
||||
nowInSeconds = Date.now()/1000
|
||||
@containers = [{
|
||||
Name: "/project-old-container-name"
|
||||
Id: "old-container-id"
|
||||
Created: nowInSeconds - oneHourInSeconds - 100
|
||||
}, {
|
||||
Name: "/project-new-container-name"
|
||||
Id: "new-container-id"
|
||||
Created: nowInSeconds - oneHourInSeconds + 100
|
||||
}, {
|
||||
Name: "/totally-not-a-project-container"
|
||||
Id: "some-random-id"
|
||||
Created: nowInSeconds - (2 * oneHourInSeconds )
|
||||
}]
|
||||
@DockerRunner.MAX_CONTAINER_AGE = oneHourInMilliseconds
|
||||
@listContainers.callsArgWith(1, null, @containers)
|
||||
@DockerRunner.destroyContainer = sinon.stub().callsArg(3)
|
||||
@DockerRunner.destroyOldContainers (error) =>
|
||||
@callback(error)
|
||||
done()
|
||||
|
||||
it "should list all containers", ->
|
||||
@listContainers
|
||||
.calledWith(all: true)
|
||||
.should.equal true
|
||||
|
||||
it "should destroy old containers", ->
|
||||
@DockerRunner.destroyContainer
|
||||
.callCount
|
||||
.should.equal 1
|
||||
@DockerRunner.destroyContainer
|
||||
.calledWith("/project-old-container-name", "old-container-id")
|
||||
.should.equal true
|
||||
|
||||
it "should not destroy new containers", ->
|
||||
@DockerRunner.destroyContainer
|
||||
.calledWith("/project-new-container-name", "new-container-id")
|
||||
.should.equal false
|
||||
|
||||
it "should not destroy non-project containers", ->
|
||||
@DockerRunner.destroyContainer
|
||||
.calledWith("/totally-not-a-project-container", "some-random-id")
|
||||
.should.equal false
|
||||
|
||||
it "should callback the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
|
||||
describe '_destroyContainer', ->
|
||||
beforeEach ->
|
||||
@containerId = 'some_id'
|
||||
@fakeContainer =
|
||||
remove: sinon.stub().callsArgWith(1, null)
|
||||
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
|
||||
|
||||
it 'should get the container', (done) ->
|
||||
@DockerRunner._destroyContainer @containerId, false, (err) =>
|
||||
@Docker::getContainer.callCount.should.equal 1
|
||||
@Docker::getContainer.calledWith(@containerId).should.equal true
|
||||
done()
|
||||
|
||||
it 'should try to force-destroy the container when shouldForce=true', (done) ->
|
||||
@DockerRunner._destroyContainer @containerId, true, (err) =>
|
||||
@fakeContainer.remove.callCount.should.equal 1
|
||||
@fakeContainer.remove.calledWith({force: true}).should.equal true
|
||||
done()
|
||||
|
||||
it 'should not try to force-destroy the container when shouldForce=false', (done) ->
|
||||
@DockerRunner._destroyContainer @containerId, false, (err) =>
|
||||
@fakeContainer.remove.callCount.should.equal 1
|
||||
@fakeContainer.remove.calledWith({force: false}).should.equal true
|
||||
done()
|
||||
|
||||
it 'should not produce an error', (done) ->
|
||||
@DockerRunner._destroyContainer @containerId, false, (err) =>
|
||||
expect(err).to.equal null
|
||||
done()
|
||||
|
||||
describe 'when the container is already gone', ->
|
||||
beforeEach ->
|
||||
@fakeError = new Error('woops')
|
||||
@fakeError.statusCode = 404
|
||||
@fakeContainer =
|
||||
remove: sinon.stub().callsArgWith(1, @fakeError)
|
||||
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
|
||||
|
||||
it 'should not produce an error', (done) ->
|
||||
@DockerRunner._destroyContainer @containerId, false, (err) =>
|
||||
expect(err).to.equal null
|
||||
done()
|
||||
|
||||
describe 'when container.destroy produces an error', (done) ->
|
||||
beforeEach ->
|
||||
@fakeError = new Error('woops')
|
||||
@fakeError.statusCode = 500
|
||||
@fakeContainer =
|
||||
remove: sinon.stub().callsArgWith(1, @fakeError)
|
||||
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
|
||||
|
||||
it 'should produce an error', (done) ->
|
||||
@DockerRunner._destroyContainer @containerId, false, (err) =>
|
||||
expect(err).to.not.equal null
|
||||
expect(err).to.equal @fakeError
|
||||
done()
|
||||
|
||||
|
||||
describe 'kill', ->
|
||||
beforeEach ->
|
||||
@containerId = 'some_id'
|
||||
@fakeContainer =
|
||||
kill: sinon.stub().callsArgWith(0, null)
|
||||
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
|
||||
|
||||
it 'should get the container', (done) ->
|
||||
@DockerRunner.kill @containerId, (err) =>
|
||||
@Docker::getContainer.callCount.should.equal 1
|
||||
@Docker::getContainer.calledWith(@containerId).should.equal true
|
||||
done()
|
||||
|
||||
it 'should try to force-destroy the container', (done) ->
|
||||
@DockerRunner.kill @containerId, (err) =>
|
||||
@fakeContainer.kill.callCount.should.equal 1
|
||||
done()
|
||||
|
||||
it 'should not produce an error', (done) ->
|
||||
@DockerRunner.kill @containerId, (err) =>
|
||||
expect(err).to.equal undefined
|
||||
done()
|
||||
|
||||
describe 'when the container is not actually running', ->
|
||||
beforeEach ->
|
||||
@fakeError = new Error('woops')
|
||||
@fakeError.statusCode = 500
|
||||
@fakeError.message = 'Cannot kill container <whatever> is not running'
|
||||
@fakeContainer =
|
||||
kill: sinon.stub().callsArgWith(0, @fakeError)
|
||||
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
|
||||
|
||||
it 'should not produce an error', (done) ->
|
||||
@DockerRunner.kill @containerId, (err) =>
|
||||
expect(err).to.equal undefined
|
||||
done()
|
||||
|
||||
describe 'when container.kill produces a legitimate error', (done) ->
|
||||
beforeEach ->
|
||||
@fakeError = new Error('woops')
|
||||
@fakeError.statusCode = 500
|
||||
@fakeError.message = 'Totally legitimate reason to throw an error'
|
||||
@fakeContainer =
|
||||
kill: sinon.stub().callsArgWith(0, @fakeError)
|
||||
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
|
||||
|
||||
it 'should produce an error', (done) ->
|
||||
@DockerRunner.kill @containerId, (err) =>
|
||||
expect(err).to.not.equal undefined
|
||||
expect(err).to.equal @fakeError
|
||||
done()
|
||||
@@ -1,61 +0,0 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/DraftModeManager'
|
||||
|
||||
describe 'DraftModeManager', ->
|
||||
beforeEach ->
|
||||
@DraftModeManager = SandboxedModule.require modulePath, requires:
|
||||
"fs": @fs = {}
|
||||
"logger-sharelatex": @logger = {log: () ->}
|
||||
|
||||
describe "_injectDraftOption", ->
|
||||
it "should add draft option into documentclass with existing options", ->
|
||||
@DraftModeManager
|
||||
._injectDraftOption('''
|
||||
\\documentclass[a4paper,foo=bar]{article}
|
||||
''')
|
||||
.should.equal('''
|
||||
\\documentclass[draft,a4paper,foo=bar]{article}
|
||||
''')
|
||||
|
||||
it "should add draft option into documentclass with no options", ->
|
||||
@DraftModeManager
|
||||
._injectDraftOption('''
|
||||
\\documentclass{article}
|
||||
''')
|
||||
.should.equal('''
|
||||
\\documentclass[draft]{article}
|
||||
''')
|
||||
|
||||
describe "injectDraftMode", ->
|
||||
beforeEach ->
|
||||
@filename = "/mock/filename.tex"
|
||||
@callback = sinon.stub()
|
||||
content = '''
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
'''
|
||||
@fs.readFile = sinon.stub().callsArgWith(2, null, content)
|
||||
@fs.writeFile = sinon.stub().callsArg(2)
|
||||
@DraftModeManager.injectDraftMode @filename, @callback
|
||||
|
||||
it "should read the file", ->
|
||||
@fs.readFile
|
||||
.calledWith(@filename, "utf8")
|
||||
.should.equal true
|
||||
|
||||
it "should write the modified file", ->
|
||||
@fs.writeFile
|
||||
.calledWith(@filename, """
|
||||
\\documentclass[draft]{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
""")
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
@@ -1,79 +0,0 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/LatexRunner'
|
||||
Path = require "path"
|
||||
|
||||
describe "LatexRunner", ->
|
||||
beforeEach ->
|
||||
@LatexRunner = SandboxedModule.require modulePath, requires:
|
||||
"settings-sharelatex": @Settings =
|
||||
docker:
|
||||
socketPath: "/var/run/docker.sock"
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
|
||||
"./Metrics":
|
||||
Timer: class Timer
|
||||
done: () ->
|
||||
"./CommandRunner": @CommandRunner = {}
|
||||
|
||||
@directory = "/local/compile/directory"
|
||||
@mainFile = "main-file.tex"
|
||||
@compiler = "pdflatex"
|
||||
@image = "example.com/image"
|
||||
@callback = sinon.stub()
|
||||
@project_id = "project-id-123"
|
||||
@env = {'foo': '123'}
|
||||
|
||||
describe "runLatex", ->
|
||||
beforeEach ->
|
||||
@CommandRunner.run = sinon.stub().callsArg(6)
|
||||
|
||||
describe "normally", ->
|
||||
beforeEach ->
|
||||
@LatexRunner.runLatex @project_id,
|
||||
directory: @directory
|
||||
mainFile: @mainFile
|
||||
compiler: @compiler
|
||||
timeout: @timeout = 42000
|
||||
image: @image
|
||||
environment: @env
|
||||
@callback
|
||||
|
||||
it "should run the latex command", ->
|
||||
@CommandRunner.run
|
||||
.calledWith(@project_id, sinon.match.any, @directory, @image, @timeout, @env)
|
||||
.should.equal true
|
||||
|
||||
describe "with an .Rtex main file", ->
|
||||
beforeEach ->
|
||||
@LatexRunner.runLatex @project_id,
|
||||
directory: @directory
|
||||
mainFile: "main-file.Rtex"
|
||||
compiler: @compiler
|
||||
image: @image
|
||||
timeout: @timeout = 42000
|
||||
@callback
|
||||
|
||||
it "should run the latex command on the equivalent .tex file", ->
|
||||
command = @CommandRunner.run.args[0][1]
|
||||
mainFile = command.slice(-1)[0]
|
||||
mainFile.should.equal "$COMPILE_DIR/main-file.tex"
|
||||
|
||||
describe "with a flags option", ->
|
||||
beforeEach ->
|
||||
@LatexRunner.runLatex @project_id,
|
||||
directory: @directory
|
||||
mainFile: @mainFile
|
||||
compiler: @compiler
|
||||
image: @image
|
||||
timeout: @timeout = 42000
|
||||
flags: ["-file-line-error", "-halt-on-error"]
|
||||
@callback
|
||||
|
||||
it "should include the flags in the command", ->
|
||||
command = @CommandRunner.run.args[0][1]
|
||||
flags = command.filter (arg) ->
|
||||
(arg == "-file-line-error") || (arg == "-halt-on-error")
|
||||
flags.length.should.equal 2
|
||||
flags[0].should.equal "-file-line-error"
|
||||
flags[1].should.equal "-halt-on-error"
|
||||
@@ -1,57 +0,0 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/LockManager'
|
||||
Path = require "path"
|
||||
Errors = require "../../../app/js/Errors"
|
||||
|
||||
describe "DockerLockManager", ->
|
||||
beforeEach ->
|
||||
@LockManager = SandboxedModule.require modulePath, requires:
|
||||
"settings-sharelatex": {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), err:-> }
|
||||
"fs":
|
||||
lstat:sinon.stub().callsArgWith(1)
|
||||
readdir: sinon.stub().callsArgWith(1)
|
||||
"lockfile": @Lockfile = {}
|
||||
@lockFile = "/local/compile/directory/.project-lock"
|
||||
|
||||
describe "runWithLock", ->
|
||||
beforeEach ->
|
||||
@runner = sinon.stub().callsArgWith(0, null, "foo", "bar")
|
||||
@callback = sinon.stub()
|
||||
|
||||
describe "normally", ->
|
||||
beforeEach ->
|
||||
@Lockfile.lock = sinon.stub().callsArgWith(2, null)
|
||||
@Lockfile.unlock = sinon.stub().callsArgWith(1, null)
|
||||
@LockManager.runWithLock @lockFile, @runner, @callback
|
||||
|
||||
it "should run the compile", ->
|
||||
@runner
|
||||
.calledWith()
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the response from the compile", ->
|
||||
@callback
|
||||
.calledWithExactly(null, "foo", "bar")
|
||||
.should.equal true
|
||||
|
||||
describe "when the project is locked", ->
|
||||
beforeEach ->
|
||||
@error = new Error()
|
||||
@error.code = "EEXIST"
|
||||
@Lockfile.lock = sinon.stub().callsArgWith(2,@error)
|
||||
@Lockfile.unlock = sinon.stub().callsArgWith(1, null)
|
||||
@LockManager.runWithLock @lockFile, @runner, @callback
|
||||
|
||||
it "should not run the compile", ->
|
||||
@runner
|
||||
.called
|
||||
.should.equal false
|
||||
|
||||
it "should return an error", ->
|
||||
error = new Errors.AlreadyCompilingError()
|
||||
@callback
|
||||
.calledWithExactly(error)
|
||||
.should.equal true
|
||||
@@ -1,68 +0,0 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/OutputFileFinder'
|
||||
path = require "path"
|
||||
expect = require("chai").expect
|
||||
EventEmitter = require("events").EventEmitter
|
||||
|
||||
describe "OutputFileFinder", ->
|
||||
beforeEach ->
|
||||
@OutputFileFinder = SandboxedModule.require modulePath, requires:
|
||||
"fs": @fs = {}
|
||||
"child_process": spawn: @spawn = sinon.stub()
|
||||
"logger-sharelatex": { log: sinon.stub(), warn: sinon.stub() }
|
||||
@directory = "/test/dir"
|
||||
@callback = sinon.stub()
|
||||
|
||||
describe "findOutputFiles", ->
|
||||
beforeEach ->
|
||||
@resource_path = "resource/path.tex"
|
||||
@output_paths = ["output.pdf", "extra/file.tex"]
|
||||
@all_paths = @output_paths.concat [@resource_path]
|
||||
@resources = [
|
||||
path: @resource_path = "resource/path.tex"
|
||||
]
|
||||
@OutputFileFinder._getAllFiles = sinon.stub().callsArgWith(1, null, @all_paths)
|
||||
@OutputFileFinder.findOutputFiles @resources, @directory, (error, @outputFiles) =>
|
||||
|
||||
it "should only return the output files, not directories or resource paths", ->
|
||||
expect(@outputFiles).to.deep.equal [{
|
||||
path: "output.pdf"
|
||||
type: "pdf"
|
||||
}, {
|
||||
path: "extra/file.tex",
|
||||
type: "tex"
|
||||
}]
|
||||
|
||||
describe "_getAllFiles", ->
|
||||
beforeEach ->
|
||||
@proc = new EventEmitter()
|
||||
@proc.stdout = new EventEmitter()
|
||||
@spawn.returns @proc
|
||||
@directory = "/base/dir"
|
||||
@OutputFileFinder._getAllFiles @directory, @callback
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach ->
|
||||
@proc.stdout.emit(
|
||||
"data",
|
||||
["/base/dir/main.tex", "/base/dir/chapters/chapter1.tex"].join("\n") + "\n"
|
||||
)
|
||||
@proc.emit "close", 0
|
||||
|
||||
it "should call the callback with the relative file paths", ->
|
||||
@callback.calledWith(
|
||||
null,
|
||||
["main.tex", "chapters/chapter1.tex"]
|
||||
).should.equal true
|
||||
|
||||
describe "when the directory doesn't exist", ->
|
||||
beforeEach ->
|
||||
@proc.emit "close", 1
|
||||
|
||||
it "should call the callback with a blank array", ->
|
||||
@callback.calledWith(
|
||||
null,
|
||||
[]
|
||||
).should.equal true
|
||||
@@ -1,103 +0,0 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/OutputFileOptimiser'
|
||||
path = require "path"
|
||||
expect = require("chai").expect
|
||||
EventEmitter = require("events").EventEmitter
|
||||
|
||||
describe "OutputFileOptimiser", ->
|
||||
beforeEach ->
|
||||
@OutputFileOptimiser = SandboxedModule.require modulePath, requires:
|
||||
"fs": @fs = {}
|
||||
"path": @Path = {}
|
||||
"child_process": spawn: @spawn = sinon.stub()
|
||||
"logger-sharelatex": { log: sinon.stub(), warn: sinon.stub() }
|
||||
"./Metrics" : {}
|
||||
@directory = "/test/dir"
|
||||
@callback = sinon.stub()
|
||||
|
||||
describe "optimiseFile", ->
|
||||
beforeEach ->
|
||||
@src = "./output.pdf"
|
||||
@dst = "./output.pdf"
|
||||
|
||||
describe "when the file is not a pdf file", ->
|
||||
beforeEach (done)->
|
||||
@src = "./output.log"
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, false)
|
||||
@OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null)
|
||||
@OutputFileOptimiser.optimiseFile @src, @dst, done
|
||||
|
||||
it "should not check if the file is optimised", ->
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(@src).should.equal false
|
||||
|
||||
it "should not optimise the file", ->
|
||||
@OutputFileOptimiser.optimisePDF.calledWith(@src, @dst).should.equal false
|
||||
|
||||
describe "when the pdf file is not optimised", ->
|
||||
beforeEach (done) ->
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, false)
|
||||
@OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null)
|
||||
@OutputFileOptimiser.optimiseFile @src, @dst, done
|
||||
|
||||
it "should check if the pdf is optimised", ->
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(@src).should.equal true
|
||||
|
||||
it "should optimise the pdf", ->
|
||||
@OutputFileOptimiser.optimisePDF.calledWith(@src, @dst).should.equal true
|
||||
|
||||
describe "when the pdf file is optimised", ->
|
||||
beforeEach (done) ->
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, true)
|
||||
@OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null)
|
||||
@OutputFileOptimiser.optimiseFile @src, @dst, done
|
||||
|
||||
it "should check if the pdf is optimised", ->
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(@src).should.equal true
|
||||
|
||||
it "should not optimise the pdf", ->
|
||||
@OutputFileOptimiser.optimisePDF.calledWith(@src, @dst).should.equal false
|
||||
|
||||
describe "checkIfPDFISOptimised", ->
|
||||
beforeEach () ->
|
||||
@callback = sinon.stub()
|
||||
@fd = 1234
|
||||
@fs.open = sinon.stub().yields(null, @fd)
|
||||
@fs.read = sinon.stub().withArgs(@fd).yields(null, 100, new Buffer("hello /Linearized 1"))
|
||||
@fs.close = sinon.stub().withArgs(@fd).yields(null)
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised @src, @callback
|
||||
|
||||
describe "for a linearised file", ->
|
||||
beforeEach () ->
|
||||
@fs.read = sinon.stub().withArgs(@fd).yields(null, 100, new Buffer("hello /Linearized 1"))
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised @src, @callback
|
||||
|
||||
it "should open the file", ->
|
||||
@fs.open.calledWith(@src, "r").should.equal true
|
||||
|
||||
it "should read the header", ->
|
||||
@fs.read.calledWith(@fd).should.equal true
|
||||
|
||||
it "should close the file", ->
|
||||
@fs.close.calledWith(@fd).should.equal true
|
||||
|
||||
it "should call the callback with a true result", ->
|
||||
@callback.calledWith(null, true).should.equal true
|
||||
|
||||
describe "for an unlinearised file", ->
|
||||
beforeEach () ->
|
||||
@fs.read = sinon.stub().withArgs(@fd).yields(null, 100, new Buffer("hello not linearized 1"))
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised @src, @callback
|
||||
|
||||
it "should open the file", ->
|
||||
@fs.open.calledWith(@src, "r").should.equal true
|
||||
|
||||
it "should read the header", ->
|
||||
@fs.read.calledWith(@fd).should.equal true
|
||||
|
||||
it "should close the file", ->
|
||||
@fs.close.calledWith(@fd).should.equal true
|
||||
|
||||
it "should call the callback with a false result", ->
|
||||
@callback.calledWith(null, false).should.equal true
|
||||
@@ -1,62 +0,0 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/ProjectPersistenceManager'
|
||||
tk = require("timekeeper")
|
||||
|
||||
describe "ProjectPersistenceManager", ->
|
||||
beforeEach ->
|
||||
@ProjectPersistenceManager = SandboxedModule.require modulePath, requires:
|
||||
"./UrlCache": @UrlCache = {}
|
||||
"./CompileManager": @CompileManager = {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub() }
|
||||
"./db": @db = {}
|
||||
@callback = sinon.stub()
|
||||
@project_id = "project-id-123"
|
||||
@user_id = "1234"
|
||||
|
||||
describe "clearExpiredProjects", ->
|
||||
beforeEach ->
|
||||
@project_ids = [
|
||||
"project-id-1"
|
||||
"project-id-2"
|
||||
]
|
||||
@ProjectPersistenceManager._findExpiredProjectIds = sinon.stub().callsArgWith(0, null, @project_ids)
|
||||
@ProjectPersistenceManager.clearProjectFromCache = sinon.stub().callsArg(1)
|
||||
@CompileManager.clearExpiredProjects = sinon.stub().callsArg(1)
|
||||
@ProjectPersistenceManager.clearExpiredProjects @callback
|
||||
|
||||
it "should clear each expired project", ->
|
||||
for project_id in @project_ids
|
||||
@ProjectPersistenceManager.clearProjectFromCache
|
||||
.calledWith(project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "clearProject", ->
|
||||
beforeEach ->
|
||||
@ProjectPersistenceManager._clearProjectFromDatabase = sinon.stub().callsArg(1)
|
||||
@UrlCache.clearProject = sinon.stub().callsArg(1)
|
||||
@CompileManager.clearProject = sinon.stub().callsArg(2)
|
||||
@ProjectPersistenceManager.clearProject @project_id, @user_id, @callback
|
||||
|
||||
it "should clear the project from the database", ->
|
||||
@ProjectPersistenceManager._clearProjectFromDatabase
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should clear all the cached Urls for the project", ->
|
||||
@UrlCache.clearProject
|
||||
.calledWith(@project_id)
|
||||
.should.equal true
|
||||
|
||||
it "should clear the project compile folder", ->
|
||||
@CompileManager.clearProject
|
||||
.calledWith(@project_id, @user_id)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
@@ -1,279 +0,0 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
expect = require('chai').expect
|
||||
modulePath = require('path').join __dirname, '../../../app/js/RequestParser'
|
||||
tk = require("timekeeper")
|
||||
|
||||
describe "RequestParser", ->
|
||||
beforeEach ->
|
||||
tk.freeze()
|
||||
@callback = sinon.stub()
|
||||
@validResource =
|
||||
path: "main.tex"
|
||||
date: "12:00 01/02/03"
|
||||
content: "Hello world"
|
||||
@validRequest =
|
||||
compile:
|
||||
token: "token-123"
|
||||
options:
|
||||
imageName: "basicImageName/here:2017-1"
|
||||
compiler: "pdflatex"
|
||||
timeout: 42
|
||||
resources: []
|
||||
@RequestParser = SandboxedModule.require modulePath, requires:
|
||||
"settings-sharelatex": @settings = {}
|
||||
|
||||
afterEach ->
|
||||
tk.reset()
|
||||
|
||||
describe "without a top level object", ->
|
||||
beforeEach ->
|
||||
@RequestParser.parse [], @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("top level object should have a compile attribute")
|
||||
.should.equal true
|
||||
|
||||
describe "without a compile attribute", ->
|
||||
beforeEach ->
|
||||
@RequestParser.parse {}, @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("top level object should have a compile attribute")
|
||||
.should.equal true
|
||||
|
||||
describe "without a valid compiler", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.options.compiler = "not-a-compiler"
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("compiler attribute should be one of: pdflatex, latex, xelatex, lualatex")
|
||||
.should.equal true
|
||||
|
||||
describe "without a compiler specified", ->
|
||||
beforeEach ->
|
||||
delete @validRequest.compile.options.compiler
|
||||
@RequestParser.parse @validRequest, (error, @data) =>
|
||||
|
||||
it "should set the compiler to pdflatex by default", ->
|
||||
@data.compiler.should.equal "pdflatex"
|
||||
|
||||
describe "with imageName set", ->
|
||||
beforeEach ->
|
||||
@RequestParser.parse @validRequest, (error, @data) =>
|
||||
|
||||
it "should set the imageName", ->
|
||||
@data.imageName.should.equal "basicImageName/here:2017-1"
|
||||
|
||||
describe "with flags set", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.options.flags = ["-file-line-error"]
|
||||
@RequestParser.parse @validRequest, (error, @data) =>
|
||||
|
||||
it "should set the flags attribute", ->
|
||||
expect(@data.flags).to.deep.equal ["-file-line-error"]
|
||||
|
||||
describe "with flags not specified", ->
|
||||
beforeEach ->
|
||||
@RequestParser.parse @validRequest, (error, @data) =>
|
||||
|
||||
it "it should have an empty flags list", ->
|
||||
expect(@data.flags).to.deep.equal []
|
||||
|
||||
describe "without a timeout specified", ->
|
||||
beforeEach ->
|
||||
delete @validRequest.compile.options.timeout
|
||||
@RequestParser.parse @validRequest, (error, @data) =>
|
||||
|
||||
it "should set the timeout to MAX_TIMEOUT", ->
|
||||
@data.timeout.should.equal @RequestParser.MAX_TIMEOUT * 1000
|
||||
|
||||
describe "with a timeout larger than the maximum", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.options.timeout = @RequestParser.MAX_TIMEOUT + 1
|
||||
@RequestParser.parse @validRequest, (error, @data) =>
|
||||
|
||||
it "should set the timeout to MAX_TIMEOUT", ->
|
||||
@data.timeout.should.equal @RequestParser.MAX_TIMEOUT * 1000
|
||||
|
||||
describe "with a timeout", ->
|
||||
beforeEach ->
|
||||
@RequestParser.parse @validRequest, (error, @data) =>
|
||||
|
||||
it "should set the timeout (in milliseconds)", ->
|
||||
@data.timeout.should.equal @validRequest.compile.options.timeout * 1000
|
||||
|
||||
describe "with a resource without a path", ->
|
||||
beforeEach ->
|
||||
delete @validResource.path
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("all resources should have a path attribute")
|
||||
.should.equal true
|
||||
|
||||
describe "with a resource with a path", ->
|
||||
beforeEach ->
|
||||
@validResource.path = @path = "test.tex"
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return the path in the parsed response", ->
|
||||
@data.resources[0].path.should.equal @path
|
||||
|
||||
describe "with a resource with a malformed modified date", ->
|
||||
beforeEach ->
|
||||
@validResource.modified = "not-a-date"
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback
|
||||
.calledWith(
|
||||
"resource modified date could not be understood: "+
|
||||
@validResource.modified
|
||||
)
|
||||
.should.equal true
|
||||
|
||||
describe "with a resource with a valid date", ->
|
||||
beforeEach ->
|
||||
@date = "12:00 01/02/03"
|
||||
@validResource.modified = @date
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return the date as a Javascript Date object", ->
|
||||
(@data.resources[0].modified instanceof Date).should.equal true
|
||||
@data.resources[0].modified.getTime().should.equal Date.parse(@date)
|
||||
|
||||
describe "with a resource without either a content or URL attribute", ->
|
||||
beforeEach ->
|
||||
delete @validResource.url
|
||||
delete @validResource.content
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("all resources should have either a url or content attribute")
|
||||
.should.equal true
|
||||
|
||||
describe "with a resource where the content is not a string", ->
|
||||
beforeEach ->
|
||||
@validResource.content = []
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse (@validRequest), @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("content attribute should be a string")
|
||||
.should.equal true
|
||||
|
||||
describe "with a resource where the url is not a string", ->
|
||||
beforeEach ->
|
||||
@validResource.url = []
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse (@validRequest), @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("url attribute should be a string")
|
||||
.should.equal true
|
||||
|
||||
describe "with a resource with a url", ->
|
||||
beforeEach ->
|
||||
@validResource.url = @url = "www.example.com"
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse (@validRequest), @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return the url in the parsed response", ->
|
||||
@data.resources[0].url.should.equal @url
|
||||
|
||||
describe "with a resource with a content attribute", ->
|
||||
beforeEach ->
|
||||
@validResource.content = @content = "Hello world"
|
||||
@validRequest.compile.resources.push @validResource
|
||||
@RequestParser.parse (@validRequest), @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return the content in the parsed response", ->
|
||||
@data.resources[0].content.should.equal @content
|
||||
|
||||
describe "without a root resource path", ->
|
||||
beforeEach ->
|
||||
delete @validRequest.compile.rootResourcePath
|
||||
@RequestParser.parse (@validRequest), @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should set the root resource path to 'main.tex' by default", ->
|
||||
@data.rootResourcePath.should.equal "main.tex"
|
||||
|
||||
describe "with a root resource path", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.rootResourcePath = @path = "test.tex"
|
||||
@RequestParser.parse (@validRequest), @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return the root resource path in the parsed response", ->
|
||||
@data.rootResourcePath.should.equal @path
|
||||
|
||||
describe "with a root resource path that is not a string", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.rootResourcePath = []
|
||||
@RequestParser.parse (@validRequest), @callback
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("rootResourcePath attribute should be a string")
|
||||
.should.equal true
|
||||
|
||||
describe "with a root resource path that needs escaping", ->
|
||||
beforeEach ->
|
||||
@badPath = "`rm -rf foo`.tex"
|
||||
@goodPath = "rm -rf foo.tex"
|
||||
@validRequest.compile.rootResourcePath = @badPath
|
||||
@validRequest.compile.resources.push {
|
||||
path: @badPath
|
||||
date: "12:00 01/02/03"
|
||||
content: "Hello world"
|
||||
}
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return the escaped resource", ->
|
||||
@data.rootResourcePath.should.equal @goodPath
|
||||
|
||||
it "should also escape the resource path", ->
|
||||
@data.resources[0].path.should.equal @goodPath
|
||||
|
||||
describe "with a root resource path that has a relative path", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.rootResourcePath = "foo/../../bar.tex"
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("relative path in root resource")
|
||||
.should.equal true
|
||||
|
||||
describe "with a root resource path that has unescaped + relative path", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.rootResourcePath = "foo/#../bar.tex"
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("relative path in root resource")
|
||||
.should.equal true
|
||||
|
||||
describe "with an unknown syncType", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.options.syncType = "unexpected"
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("syncType attribute should be one of: full, incremental")
|
||||
.should.equal true
|
||||
@@ -1,109 +0,0 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
should = require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/ResourceStateManager'
|
||||
Path = require "path"
|
||||
Errors = require "../../../app/js/Errors"
|
||||
|
||||
describe "ResourceStateManager", ->
|
||||
beforeEach ->
|
||||
@ResourceStateManager = SandboxedModule.require modulePath, requires:
|
||||
"fs": @fs = {}
|
||||
"logger-sharelatex": {log: sinon.stub(), err: sinon.stub()}
|
||||
"./SafeReader": @SafeReader = {}
|
||||
@basePath = "/path/to/write/files/to"
|
||||
@resources = [
|
||||
{path: "resource-1-mock"}
|
||||
{path: "resource-2-mock"}
|
||||
{path: "resource-3-mock"}
|
||||
]
|
||||
@state = "1234567890"
|
||||
@resourceFileName = "#{@basePath}/.project-sync-state"
|
||||
@resourceFileContents = "#{@resources[0].path}\n#{@resources[1].path}\n#{@resources[2].path}\nstateHash:#{@state}"
|
||||
@callback = sinon.stub()
|
||||
|
||||
describe "saveProjectState", ->
|
||||
beforeEach ->
|
||||
@fs.writeFile = sinon.stub().callsArg(2)
|
||||
|
||||
describe "when the state is specified", ->
|
||||
beforeEach ->
|
||||
@ResourceStateManager.saveProjectState(@state, @resources, @basePath, @callback)
|
||||
|
||||
it "should write the resource list to disk", ->
|
||||
@fs.writeFile
|
||||
.calledWith(@resourceFileName, @resourceFileContents)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "when the state is undefined", ->
|
||||
beforeEach ->
|
||||
@state = undefined
|
||||
@fs.unlink = sinon.stub().callsArg(1)
|
||||
@ResourceStateManager.saveProjectState(@state, @resources, @basePath, @callback)
|
||||
|
||||
it "should unlink the resource file", ->
|
||||
@fs.unlink
|
||||
.calledWith(@resourceFileName)
|
||||
.should.equal true
|
||||
|
||||
it "should not write the resource list to disk", ->
|
||||
@fs.writeFile.called.should.equal false
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "checkProjectStateMatches", ->
|
||||
|
||||
describe "when the state matches", ->
|
||||
beforeEach ->
|
||||
@SafeReader.readFile = sinon.stub().callsArgWith(3, null, @resourceFileContents)
|
||||
@ResourceStateManager.checkProjectStateMatches(@state, @basePath, @callback)
|
||||
|
||||
it "should read the resource file", ->
|
||||
@SafeReader.readFile
|
||||
.calledWith(@resourceFileName)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the results", ->
|
||||
@callback.calledWithMatch(null, @resources).should.equal true
|
||||
|
||||
describe "when the state does not match", ->
|
||||
beforeEach ->
|
||||
@SafeReader.readFile = sinon.stub().callsArgWith(3, null, @resourceFileContents)
|
||||
@ResourceStateManager.checkProjectStateMatches("not-the-original-state", @basePath, @callback)
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
error = new Errors.FilesOutOfSyncError("invalid state for incremental update")
|
||||
@callback.calledWith(error).should.equal true
|
||||
|
||||
describe "checkResourceFiles", ->
|
||||
describe "when all the files are present", ->
|
||||
beforeEach ->
|
||||
@allFiles = [ @resources[0].path, @resources[1].path, @resources[2].path]
|
||||
@ResourceStateManager.checkResourceFiles(@resources, @allFiles, @basePath, @callback)
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.calledWithExactly().should.equal true
|
||||
|
||||
describe "when there is a missing file", ->
|
||||
beforeEach ->
|
||||
@allFiles = [ @resources[0].path, @resources[1].path]
|
||||
@fs.stat = sinon.stub().callsArgWith(1, new Error())
|
||||
@ResourceStateManager.checkResourceFiles(@resources, @allFiles, @basePath, @callback)
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
error = new Errors.FilesOutOfSyncError("resource files missing in incremental update")
|
||||
@callback.calledWith(error).should.equal true
|
||||
|
||||
describe "when a resource contains a relative path", ->
|
||||
beforeEach ->
|
||||
@resources[0].path = "../foo/bar.tex"
|
||||
@allFiles = [ @resources[0].path, @resources[1].path, @resources[2].path]
|
||||
@ResourceStateManager.checkResourceFiles(@resources, @allFiles, @basePath, @callback)
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
@callback.calledWith(new Error("relative path in resource file list")).should.equal true
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user