Compare commits
263 Commits
v0.2.0
...
pandoc-hac
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bfe31098cb | ||
|
|
a1f9305047 | ||
|
|
c66d76af2d | ||
|
|
b8c82620e5 | ||
|
|
3b905353d0 | ||
|
|
af6a402a87 | ||
|
|
3c639959f4 | ||
|
|
039d5e01ec | ||
|
|
1d38dd3a92 | ||
|
|
12c1dc632a | ||
|
|
7a6294081d | ||
|
|
7d8a18c46c | ||
|
|
a0d5e6a54b | ||
|
|
f58ef67875 | ||
|
|
6d42e18088 | ||
|
|
ef0db41dae | ||
|
|
3692570df0 | ||
|
|
8255997fad | ||
|
|
360e8220ce | ||
|
|
23f4f2175c | ||
|
|
eb35cab72d | ||
|
|
48b2548533 | ||
|
|
86cc30d8fa | ||
|
|
60ad425205 | ||
|
|
d63f339fc4 | ||
|
|
1da918e13c | ||
|
|
d1aa1d84fb | ||
|
|
88eafdf575 | ||
|
|
d8858cfadd | ||
|
|
fd0cbb2c52 | ||
|
|
bd5a0ef36f | ||
|
|
1388093866 | ||
|
|
c3e3e3d8ac | ||
|
|
23fec68111 | ||
|
|
dbeff9a7b8 | ||
|
|
f11468b595 | ||
|
|
0930b1cd8f | ||
|
|
a36ec7f54e | ||
|
|
eaa99c7274 | ||
|
|
b0f879d652 | ||
|
|
8305268848 | ||
|
|
aa5eeb0903 | ||
|
|
2af05030f2 | ||
|
|
d04f93855b | ||
|
|
a2c97e6f9a | ||
|
|
acab9d45a0 | ||
|
|
0fac2655f7 | ||
|
|
c1ca32184f | ||
|
|
97d7d76e61 | ||
|
|
d865fda6a9 | ||
|
|
3d053a2e34 | ||
|
|
faa2a325cb | ||
|
|
b42347ea08 | ||
|
|
d5b3101637 | ||
|
|
c1d1f93453 | ||
|
|
fc1782e74c | ||
|
|
6921cf25b8 | ||
|
|
0b9ddb8efe | ||
|
|
e8064f12a1 | ||
|
|
e4aad90f33 | ||
|
|
a8aaf58e64 | ||
|
|
5b5f7b0690 | ||
|
|
2b610030d5 | ||
|
|
00ddfdf42b | ||
|
|
c25e96bbc3 | ||
|
|
4eb8c107c9 | ||
|
|
86fa940c97 | ||
|
|
7cd81ac3df | ||
|
|
fdc22c9cd2 | ||
|
|
c3fe17d0b6 | ||
|
|
206adc2d04 | ||
|
|
6542ce20b6 | ||
|
|
b4be40d061 | ||
|
|
11898b897e | ||
|
|
74c26120b2 | ||
|
|
7e1d3d98e7 | ||
|
|
d5e0ab5a6f | ||
|
|
4c105e7826 | ||
|
|
cd5adaff51 | ||
|
|
e5081df2a9 | ||
|
|
104ce81ebd | ||
|
|
08fd440df5 | ||
|
|
11cd569ed9 | ||
|
|
472531f617 | ||
|
|
ea34a1a89d | ||
|
|
2e91f07014 | ||
|
|
6f322583f7 | ||
|
|
a74f4ac1a6 | ||
|
|
aa1dd2bf05 | ||
|
|
8e2584bab4 | ||
|
|
f8530da626 | ||
|
|
2edc015663 | ||
|
|
f94e9989ec | ||
|
|
c62f8b4854 | ||
|
|
2d389130cc | ||
|
|
aafa691119 | ||
|
|
a98b2b8032 | ||
|
|
398ba5ae34 | ||
|
|
a1613eac5a | ||
|
|
3526fde665 | ||
|
|
e1b44beb3f | ||
|
|
17b16dadcd | ||
|
|
eb1364f249 | ||
|
|
834ad57312 | ||
|
|
19dfaa7d55 | ||
|
|
b529b8add3 | ||
|
|
7ccc9500ed | ||
|
|
750576d1b0 | ||
|
|
021d848819 | ||
|
|
8803762081 | ||
|
|
5af137f60b | ||
|
|
f059948e27 | ||
|
|
7a7c2ee992 | ||
|
|
efe5e22b4c | ||
|
|
03d1936fde | ||
|
|
a0969ec839 | ||
|
|
fdab7763a2 | ||
|
|
57a5cfa9cb | ||
|
|
bfb27e6c25 | ||
|
|
d4d3048719 | ||
|
|
29594fd0f7 | ||
|
|
a50582fd7c | ||
|
|
08f0955817 | ||
|
|
bc1b8f4b2f | ||
|
|
599977c3e0 | ||
|
|
071b2269b3 | ||
|
|
fde8149579 | ||
|
|
6b7e33bbc6 | ||
|
|
2898a82de8 | ||
|
|
5b71b849ca | ||
|
|
6cb5926c21 | ||
|
|
3cffb61c74 | ||
|
|
5705455ce1 | ||
|
|
71fb15e0ee | ||
|
|
819c642b8d | ||
|
|
20cb52793d | ||
|
|
e507bd6394 | ||
|
|
444b3586a7 | ||
|
|
e25ebd296e | ||
|
|
5090ad5c41 | ||
|
|
bc73f719b2 | ||
|
|
d238f73e29 | ||
|
|
ea484da9f4 | ||
|
|
b76a81e98b | ||
|
|
f00be9018d | ||
|
|
146138f65c | ||
|
|
654a43655f | ||
|
|
b9d6db6caf | ||
|
|
03e837c1f4 | ||
|
|
420db18a03 | ||
|
|
dab92967c8 | ||
|
|
0530e21246 | ||
|
|
9e53c0b99e | ||
|
|
61089eca40 | ||
|
|
4827aec30b | ||
|
|
0900340282 | ||
|
|
f7b4883397 | ||
|
|
79b3d2172b | ||
|
|
9f49dc8554 | ||
|
|
ee170b4e67 | ||
|
|
47105190be | ||
|
|
98fb2cab99 | ||
|
|
4128dc6fdd | ||
|
|
4a2b2a8707 | ||
|
|
095e16e953 | ||
|
|
3a73971b42 | ||
|
|
748caeee7d | ||
|
|
cd7ed6ce66 | ||
|
|
2200ac2cf2 | ||
|
|
928ffc96e6 | ||
|
|
ade3da7e0d | ||
|
|
e66b1ecdea | ||
|
|
c6744caeeb | ||
|
|
189648e39a | ||
|
|
8da29e6024 | ||
|
|
664e908378 | ||
|
|
14837a57ec | ||
|
|
6524439699 | ||
|
|
a7c7f2697f | ||
|
|
fdf274fb82 | ||
|
|
69666bef60 | ||
|
|
cd8e60195c | ||
|
|
d6808c11cc | ||
|
|
133f522e7b | ||
|
|
d29416fc77 | ||
|
|
c486d6c215 | ||
|
|
8bb12f4d99 | ||
|
|
e4ffc94de8 | ||
|
|
0b8435e358 | ||
|
|
801f09e7ed | ||
|
|
603b3d617c | ||
|
|
b97627d6d8 | ||
|
|
da02661d53 | ||
|
|
6e017ecaf1 | ||
|
|
0887fe3a72 | ||
|
|
226e6c87b1 | ||
|
|
8c42a353e1 | ||
|
|
78b88683fc | ||
|
|
ac3b7a571a | ||
|
|
cda1e301f6 | ||
|
|
da324a8dd0 | ||
|
|
b2f687c061 | ||
|
|
2c3b1126b0 | ||
|
|
22f730c3e9 | ||
|
|
2e97bcba3a | ||
|
|
0da85d5d03 | ||
|
|
3379577499 | ||
|
|
855169b571 | ||
|
|
6b107bd20a | ||
|
|
a2c2fc3a51 | ||
|
|
f8ae215c1e | ||
|
|
d26c6b933e | ||
|
|
4496ddddfd | ||
|
|
434e00cb74 | ||
|
|
f92c70935b | ||
|
|
51f87c5f79 | ||
|
|
143913c67f | ||
|
|
dfd2bc31ef | ||
|
|
e70bd3ae8e | ||
|
|
0a5ca6b0fa | ||
|
|
834668b033 | ||
|
|
35240fbd4d | ||
|
|
5f7cd5ece5 | ||
|
|
6860d2be6c | ||
|
|
3c021fd4c9 | ||
|
|
f453f954e4 | ||
|
|
cd499fa4e5 | ||
|
|
7799e0bfdd | ||
|
|
6ca8c10734 | ||
|
|
84cba7365f | ||
|
|
11be12fc8e | ||
|
|
3e70c0f8e4 | ||
|
|
558e9ae22b | ||
|
|
83e373d7e1 | ||
|
|
24fc9391c3 | ||
|
|
7ff56c4793 | ||
|
|
665dbff75a | ||
|
|
5d6fb4579a | ||
|
|
bd036534e5 | ||
|
|
3dcd4af62e | ||
|
|
fe46a96fd2 | ||
|
|
8fcbec5c0f | ||
|
|
fbb00ebf2f | ||
|
|
6117cac1fd | ||
|
|
d949d4ac32 | ||
|
|
6af22cf184 | ||
|
|
9f104a4f57 | ||
|
|
595bfe09ac | ||
|
|
e64b08fcbe | ||
|
|
dcfe1118d4 | ||
|
|
89acd36dde | ||
|
|
a3383f11a1 | ||
|
|
2df886e330 | ||
|
|
d96605d5e8 | ||
|
|
03b75b12cf | ||
|
|
86cf05c732 | ||
|
|
4497352a3a | ||
|
|
601a3e4805 | ||
|
|
0ea28710f5 | ||
|
|
2b5e7be964 | ||
|
|
c178458223 | ||
|
|
3ed29b3489 | ||
|
|
29be2dc700 |
@@ -1,8 +1,5 @@
|
||||
language: node_js
|
||||
|
||||
node_js:
|
||||
- "0.10"
|
||||
|
||||
before_install:
|
||||
- npm install -g grunt-cli
|
||||
|
||||
|
||||
@@ -46,6 +46,11 @@ module.exports = (grunt) ->
|
||||
app:
|
||||
src: "app.js"
|
||||
|
||||
mkdir:
|
||||
all:
|
||||
options:
|
||||
create: ["cache", "compiles"]
|
||||
|
||||
mochaTest:
|
||||
unit:
|
||||
options:
|
||||
@@ -70,6 +75,7 @@ module.exports = (grunt) ->
|
||||
grunt.loadNpmTasks 'grunt-shell'
|
||||
grunt.loadNpmTasks 'grunt-execute'
|
||||
grunt.loadNpmTasks 'grunt-bunyan'
|
||||
grunt.loadNpmTasks 'grunt-mkdir'
|
||||
|
||||
grunt.registerTask 'compile:bin', () ->
|
||||
callback = @async()
|
||||
@@ -93,6 +99,6 @@ module.exports = (grunt) ->
|
||||
|
||||
grunt.registerTask 'install', 'compile:app'
|
||||
|
||||
grunt.registerTask 'default', ['run']
|
||||
grunt.registerTask 'default', ['mkdir', 'run']
|
||||
|
||||
|
||||
|
||||
101
Jenkinsfile
vendored
Normal file
101
Jenkinsfile
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
pipeline {
|
||||
|
||||
agent any
|
||||
|
||||
triggers {
|
||||
pollSCM('* * * * *')
|
||||
cron('@daily')
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Clean') {
|
||||
steps {
|
||||
// This is a terrible hack to set the file ownership to jenkins:jenkins so we can cleanup the directory
|
||||
sh 'docker run -v $(pwd):/app --rm busybox /bin/chown -R 111:119 /app'
|
||||
sh 'rm -fr node_modules'
|
||||
}
|
||||
}
|
||||
stage('Install') {
|
||||
agent {
|
||||
docker {
|
||||
image 'node:6.11.2'
|
||||
args "-v /var/lib/jenkins/.npm:/tmp/.npm -e HOME=/tmp"
|
||||
reuseNode true
|
||||
}
|
||||
}
|
||||
steps {
|
||||
sh 'git config --global core.logallrefupdates false'
|
||||
sh 'rm -fr node_modules'
|
||||
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: '_docker-runner'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/docker-runner-sharelatex']]])
|
||||
sh 'npm install ./_docker-runner'
|
||||
sh 'rm -fr ./_docker-runner ./_docker-runner@tmp'
|
||||
sh 'npm install'
|
||||
sh 'npm rebuild'
|
||||
sh 'npm install --quiet grunt-cli'
|
||||
}
|
||||
}
|
||||
stage('Compile and Test') {
|
||||
agent {
|
||||
docker {
|
||||
image 'node:6.11.2'
|
||||
reuseNode true
|
||||
}
|
||||
}
|
||||
steps {
|
||||
sh 'node_modules/.bin/grunt compile:app'
|
||||
sh 'node_modules/.bin/grunt compile:acceptance_tests'
|
||||
sh 'NODE_ENV=development node_modules/.bin/grunt test:unit'
|
||||
}
|
||||
}
|
||||
stage('Acceptance Tests') {
|
||||
environment {
|
||||
TEXLIVE_IMAGE="quay.io/sharelatex/texlive-full:2017.1"
|
||||
}
|
||||
steps {
|
||||
sh 'mkdir -p compiles cache'
|
||||
// Not yet running, due to volumes/sibling containers
|
||||
sh 'docker container prune -f'
|
||||
sh 'docker pull $TEXLIVE_IMAGE'
|
||||
sh 'docker pull sharelatex/acceptance-test-runner:clsi-6.11.2'
|
||||
sh 'docker run --rm -e SIBLING_CONTAINER_USER=root -e SANDBOXED_COMPILES_HOST_DIR=$(pwd)/compiles -e SANDBOXED_COMPILES_SIBLING_CONTAINERS=true -e TEXLIVE_IMAGE=$TEXLIVE_IMAGE -v /var/run/docker.sock:/var/run/docker.sock -v $(pwd):/app sharelatex/acceptance-test-runner:clsi-6.11.2'
|
||||
// This is a terrible hack to set the file ownership to jenkins:jenkins so we can cleanup the directory
|
||||
sh 'docker run -v $(pwd):/app --rm busybox /bin/chown -R 111:119 /app'
|
||||
sh 'rm -r compiles cache server.log db.sqlite config/settings.defaults.coffee'
|
||||
}
|
||||
}
|
||||
stage('Package') {
|
||||
steps {
|
||||
sh 'echo ${BUILD_NUMBER} > build_number.txt'
|
||||
sh 'touch build.tar.gz' // Avoid tar warning about files changing during read
|
||||
sh 'tar -czf build.tar.gz --exclude=build.tar.gz --exclude-vcs .'
|
||||
}
|
||||
}
|
||||
stage('Publish') {
|
||||
steps {
|
||||
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
|
||||
s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz")
|
||||
// The deployment process uses this file to figure out the latest build
|
||||
s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
post {
|
||||
failure {
|
||||
mail(from: "${EMAIL_ALERT_FROM}",
|
||||
to: "${EMAIL_ALERT_TO}",
|
||||
subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}",
|
||||
body: "Build: ${BUILD_URL}")
|
||||
}
|
||||
}
|
||||
|
||||
// The options directive is for configuration that applies to the whole job.
|
||||
options {
|
||||
// we'd like to make sure remove old builds, so we don't fill up our storage!
|
||||
buildDiscarder(logRotator(numToKeepStr:'50'))
|
||||
|
||||
// And we'd really like to be sure that this build doesn't hang forever, so let's time it out after:
|
||||
timeout(time: 30, unit: 'MINUTES')
|
||||
}
|
||||
}
|
||||
72
app.coffee
72
app.coffee
@@ -2,7 +2,12 @@ CompileController = require "./app/js/CompileController"
|
||||
Settings = require "settings-sharelatex"
|
||||
logger = require "logger-sharelatex"
|
||||
logger.initialize("clsi")
|
||||
if Settings.sentry?.dsn?
|
||||
logger.initializeErrorReporting(Settings.sentry.dsn)
|
||||
|
||||
smokeTest = require "smoke-test-sharelatex"
|
||||
ContentTypeMapper = require "./app/js/ContentTypeMapper"
|
||||
Errors = require './app/js/Errors'
|
||||
|
||||
Path = require "path"
|
||||
fs = require "fs"
|
||||
@@ -32,12 +37,42 @@ app.use (req, res, next) ->
|
||||
res.setTimeout TIMEOUT
|
||||
next()
|
||||
|
||||
app.param 'project_id', (req, res, next, project_id) ->
|
||||
if project_id?.match /^[a-zA-Z0-9_-]+$/
|
||||
next()
|
||||
else
|
||||
next new Error("invalid project id")
|
||||
|
||||
app.param 'user_id', (req, res, next, user_id) ->
|
||||
if user_id?.match /^[0-9a-f]{24}$/
|
||||
next()
|
||||
else
|
||||
next new Error("invalid user id")
|
||||
|
||||
app.param 'build_id', (req, res, next, build_id) ->
|
||||
if build_id?.match OutputCacheManager.BUILD_REGEX
|
||||
next()
|
||||
else
|
||||
next new Error("invalid build id #{build_id}")
|
||||
|
||||
|
||||
app.post "/project/:project_id/compile", bodyParser.json(limit: "5mb"), CompileController.compile
|
||||
app.post "/project/:project_id/compile/stop", CompileController.stopCompile
|
||||
app.delete "/project/:project_id", CompileController.clearCache
|
||||
|
||||
app.get "/project/:project_id/sync/code", CompileController.syncFromCode
|
||||
app.get "/project/:project_id/sync/pdf", CompileController.syncFromPdf
|
||||
app.get "/project/:project_id/wordcount", CompileController.wordcount
|
||||
app.get "/project/:project_id/status", CompileController.status
|
||||
|
||||
# Per-user containers
|
||||
app.post "/project/:project_id/user/:user_id/compile", bodyParser.json(limit: "5mb"), CompileController.compile
|
||||
app.post "/project/:project_id/user/:user_id/compile/stop", CompileController.stopCompile
|
||||
app.delete "/project/:project_id/user/:user_id", CompileController.clearCache
|
||||
|
||||
app.get "/project/:project_id/user/:user_id/sync/code", CompileController.syncFromCode
|
||||
app.get "/project/:project_id/user/:user_id/sync/pdf", CompileController.syncFromPdf
|
||||
app.get "/project/:project_id/user/:user_id/wordcount", CompileController.wordcount
|
||||
|
||||
ForbidSymlinks = require "./app/js/StaticServerForbidSymlinks"
|
||||
|
||||
@@ -46,17 +81,28 @@ ForbidSymlinks = require "./app/js/StaticServerForbidSymlinks"
|
||||
# and serving the files
|
||||
staticServer = ForbidSymlinks express.static, Settings.path.compilesDir, setHeaders: (res, path, stat) ->
|
||||
if Path.basename(path) == "output.pdf"
|
||||
res.set("Content-Type", "application/pdf")
|
||||
# Calculate an etag in the same way as nginx
|
||||
# https://github.com/tj/send/issues/65
|
||||
etag = (path, stat) ->
|
||||
'"' + Math.ceil(+stat.mtime / 1000).toString(16) +
|
||||
'-' + Number(stat.size).toString(16) + '"'
|
||||
res.set("Etag", etag(path, stat))
|
||||
else
|
||||
# Force plain treatment of other file types to prevent hosting of HTTP/JS files
|
||||
# that could be used in same-origin/XSS attacks.
|
||||
res.set("Content-Type", "text/plain")
|
||||
res.set("Content-Type", ContentTypeMapper.map(path))
|
||||
|
||||
app.get "/project/:project_id/user/:user_id/build/:build_id/output/*", (req, res, next) ->
|
||||
# for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||
req.url = "/#{req.params.project_id}-#{req.params.user_id}/" + OutputCacheManager.path(req.params.build_id, "/#{req.params[0]}")
|
||||
staticServer(req, res, next)
|
||||
|
||||
app.get "/project/:project_id/build/:build_id/output/*", (req, res, next) ->
|
||||
# for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||
req.url = "/#{req.params.project_id}/" + OutputCacheManager.path(req.params.build_id, "/#{req.params[0]}")
|
||||
staticServer(req, res, next)
|
||||
|
||||
app.get "/project/:project_id/user/:user_id/output/*", (req, res, next) ->
|
||||
# for specific user get the path to the top level file
|
||||
req.url = "/#{req.params.project_id}-#{req.params.user_id}/#{req.params[0]}"
|
||||
staticServer(req, res, next)
|
||||
|
||||
app.get "/project/:project_id/output/*", (req, res, next) ->
|
||||
if req.query?.build? && req.query.build.match(OutputCacheManager.BUILD_REGEX)
|
||||
@@ -66,6 +112,11 @@ app.get "/project/:project_id/output/*", (req, res, next) ->
|
||||
req.url = "/#{req.params.project_id}/#{req.params[0]}"
|
||||
staticServer(req, res, next)
|
||||
|
||||
app.get "/oops", (req, res, next) ->
|
||||
logger.error {err: "hello"}, "test error"
|
||||
res.send "error\n"
|
||||
|
||||
|
||||
app.get "/status", (req, res, next) ->
|
||||
res.send "CLSI is alive\n"
|
||||
|
||||
@@ -82,7 +133,7 @@ if Settings.smokeTest
|
||||
do runSmokeTest = ->
|
||||
logger.log("running smoke tests")
|
||||
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))({}, resCacher)
|
||||
setTimeout(runSmokeTest, 20 * 1000)
|
||||
setTimeout(runSmokeTest, 30 * 1000)
|
||||
|
||||
app.get "/health_check", (req, res)->
|
||||
res.contentType(resCacher?.setContentType)
|
||||
@@ -102,8 +153,12 @@ app.get "/heapdump", (req, res)->
|
||||
res.send filename
|
||||
|
||||
app.use (error, req, res, next) ->
|
||||
logger.error err: error, "server error"
|
||||
res.sendStatus(error?.statusCode || 500)
|
||||
if error instanceof Errors.NotFoundError
|
||||
logger.warn {err: error, url: req.url}, "not found error"
|
||||
return res.sendStatus(404)
|
||||
else
|
||||
logger.error {err: error, url: req.url}, "server error"
|
||||
res.sendStatus(error?.statusCode || 500)
|
||||
|
||||
app.listen port = (Settings.internal?.clsi?.port or 3013), host = (Settings.internal?.clsi?.host or "localhost"), (error) ->
|
||||
logger.info "CLSI starting up, listening on #{host}:#{port}"
|
||||
@@ -111,3 +166,4 @@ app.listen port = (Settings.internal?.clsi?.port or 3013), host = (Settings.inte
|
||||
setInterval () ->
|
||||
ProjectPersistenceManager.clearExpiredProjects()
|
||||
, tenMinutes = 10 * 60 * 1000
|
||||
|
||||
|
||||
@@ -1,12 +1,44 @@
|
||||
spawn = require("child_process").spawn
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
logger.info "using standard command runner"
|
||||
|
||||
module.exports = CommandRunner =
|
||||
run: (project_id, command, directory, timeout, callback = (error) ->) ->
|
||||
run: (project_id, command, directory, image, timeout, environment, callback = (error) ->) ->
|
||||
command = (arg.replace('$COMPILE_DIR', directory) for arg in command)
|
||||
logger.log project_id: project_id, command: command, directory: directory, "running command"
|
||||
logger.warn "timeouts and sandboxing are not enabled with CommandRunner"
|
||||
|
||||
proc = spawn command[0], command.slice(1), stdio: "inherit", cwd: directory
|
||||
proc.on "close", () ->
|
||||
callback()
|
||||
# merge environment settings
|
||||
env = {}
|
||||
env[key] = value for key, value of process.env
|
||||
env[key] = value for key, value of environment
|
||||
|
||||
# run command as detached process so it has its own process group (which can be killed if needed)
|
||||
proc = spawn command[0], command.slice(1), stdio: "inherit", cwd: directory, detached: true, env: env
|
||||
|
||||
proc.on "error", (err)->
|
||||
logger.err err:err, project_id:project_id, command: command, directory: directory, "error running command"
|
||||
callback(err)
|
||||
|
||||
proc.on "close", (code, signal) ->
|
||||
logger.info code:code, signal:signal, project_id:project_id, "command exited"
|
||||
if signal is 'SIGTERM' # signal from kill method below
|
||||
err = new Error("terminated")
|
||||
err.terminated = true
|
||||
return callback(err)
|
||||
else if code is 1 # exit status from chktex
|
||||
err = new Error("exited")
|
||||
err.code = code
|
||||
return callback(err)
|
||||
else
|
||||
callback()
|
||||
|
||||
return proc.pid # return process id to allow job to be killed if necessary
|
||||
|
||||
kill: (pid, callback = (error) ->) ->
|
||||
try
|
||||
process.kill -pid # kill all processes in group
|
||||
catch err
|
||||
return callback(err)
|
||||
callback()
|
||||
|
||||
@@ -4,6 +4,7 @@ Settings = require "settings-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
ProjectPersistenceManager = require "./ProjectPersistenceManager"
|
||||
logger = require "logger-sharelatex"
|
||||
Errors = require "./Errors"
|
||||
|
||||
module.exports = CompileController =
|
||||
compile: (req, res, next = (error) ->) ->
|
||||
@@ -11,35 +12,64 @@ module.exports = CompileController =
|
||||
RequestParser.parse req.body, (error, request) ->
|
||||
return next(error) if error?
|
||||
request.project_id = req.params.project_id
|
||||
request.user_id = req.params.user_id if req.params.user_id?
|
||||
ProjectPersistenceManager.markProjectAsJustAccessed request.project_id, (error) ->
|
||||
return next(error) if error?
|
||||
CompileManager.doCompile request, (error, outputFiles = []) ->
|
||||
if error?
|
||||
logger.error err: error, project_id: request.project_id, "error running compile"
|
||||
CompileManager.doCompileWithLock request, (error, outputFiles = []) ->
|
||||
if error instanceof Errors.AlreadyCompilingError
|
||||
code = 423 # Http 423 Locked
|
||||
status = "compile-in-progress"
|
||||
else if error instanceof Errors.FilesOutOfSyncError
|
||||
code = 409 # Http 409 Conflict
|
||||
status = "retry"
|
||||
else if error?.terminated
|
||||
status = "terminated"
|
||||
else if error?.validate
|
||||
status = "validation-#{error.validate}"
|
||||
else if error?
|
||||
if error.timedout
|
||||
status = "timedout"
|
||||
logger.log err: error, project_id: request.project_id, "timeout running compile"
|
||||
else
|
||||
status = "error"
|
||||
code = 500
|
||||
logger.error err: error, project_id: request.project_id, "error running compile"
|
||||
else
|
||||
status = "failure"
|
||||
for file in outputFiles
|
||||
if file.path?.match(/output\.pdf$/)
|
||||
status = "success"
|
||||
if file.path?.match(/output\.html$/)
|
||||
status = "success"
|
||||
# log an error if any core files are found
|
||||
for file in outputFiles
|
||||
if file.path is "core"
|
||||
logger.error project_id:request.project_id, req:req, outputFiles:outputFiles, "core file found in output"
|
||||
|
||||
timer.done()
|
||||
res.status(code or 200).send {
|
||||
compile:
|
||||
status: status
|
||||
error: error?.message or error
|
||||
error: error?.message or error
|
||||
outputFiles: outputFiles.map (file) ->
|
||||
url: "#{Settings.apis.clsi.url}/project/#{request.project_id}/output/#{file.path}"
|
||||
url:
|
||||
"#{Settings.apis.clsi.url}/project/#{request.project_id}" +
|
||||
(if request.user_id? then "/user/#{request.user_id}" else "") +
|
||||
(if file.build? then "/build/#{file.build}" else "") +
|
||||
"/output/#{file.path}"
|
||||
path: file.path
|
||||
type: file.type
|
||||
build: file.build
|
||||
}
|
||||
|
||||
stopCompile: (req, res, next) ->
|
||||
{project_id, user_id} = req.params
|
||||
CompileManager.stopCompile project_id, user_id, (error) ->
|
||||
return next(error) if error?
|
||||
res.sendStatus(204)
|
||||
|
||||
clearCache: (req, res, next = (error) ->) ->
|
||||
ProjectPersistenceManager.clearProject req.params.project_id, (error) ->
|
||||
ProjectPersistenceManager.clearProject req.params.project_id, req.params.user_id, (error) ->
|
||||
return next(error) if error?
|
||||
res.sendStatus(204) # No content
|
||||
|
||||
@@ -48,8 +78,9 @@ module.exports = CompileController =
|
||||
line = parseInt(req.query.line, 10)
|
||||
column = parseInt(req.query.column, 10)
|
||||
project_id = req.params.project_id
|
||||
user_id = req.params.user_id
|
||||
|
||||
CompileManager.syncFromCode project_id, file, line, column, (error, pdfPositions) ->
|
||||
CompileManager.syncFromCode project_id, user_id, file, line, column, (error, pdfPositions) ->
|
||||
return next(error) if error?
|
||||
res.send JSON.stringify {
|
||||
pdf: pdfPositions
|
||||
@@ -60,8 +91,9 @@ module.exports = CompileController =
|
||||
h = parseFloat(req.query.h)
|
||||
v = parseFloat(req.query.v)
|
||||
project_id = req.params.project_id
|
||||
user_id = req.params.user_id
|
||||
|
||||
CompileManager.syncFromPdf project_id, page, h, v, (error, codePositions) ->
|
||||
CompileManager.syncFromPdf project_id, user_id, page, h, v, (error, codePositions) ->
|
||||
return next(error) if error?
|
||||
res.send JSON.stringify {
|
||||
code: codePositions
|
||||
@@ -70,9 +102,16 @@ module.exports = CompileController =
|
||||
wordcount: (req, res, next = (error) ->) ->
|
||||
file = req.query.file || "main.tex"
|
||||
project_id = req.params.project_id
|
||||
user_id = req.params.user_id
|
||||
image = req.query.image
|
||||
logger.log {image, file, project_id}, "word count request"
|
||||
|
||||
CompileManager.wordcount project_id, file, (error, result) ->
|
||||
CompileManager.wordcount project_id, user_id, file, image, (error, result) ->
|
||||
return next(error) if error?
|
||||
res.send JSON.stringify {
|
||||
texcount: result
|
||||
}
|
||||
|
||||
status: (req, res, next = (error)-> )->
|
||||
res.send("OK")
|
||||
|
||||
|
||||
@@ -7,82 +7,247 @@ Path = require "path"
|
||||
logger = require "logger-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
child_process = require "child_process"
|
||||
CommandRunner = require(Settings.clsi?.commandRunner or "./CommandRunner")
|
||||
DraftModeManager = require "./DraftModeManager"
|
||||
TikzManager = require "./TikzManager"
|
||||
LockManager = require "./LockManager"
|
||||
fs = require("fs")
|
||||
fse = require "fs-extra"
|
||||
os = require("os")
|
||||
async = require "async"
|
||||
Errors = require './Errors'
|
||||
|
||||
commandRunner = Settings.clsi?.commandRunner or "./CommandRunner"
|
||||
logger.info commandRunner:commandRunner, "selecting command runner for clsi"
|
||||
CommandRunner = require(commandRunner)
|
||||
|
||||
getCompileName = (project_id, user_id) ->
|
||||
if user_id? then "#{project_id}-#{user_id}" else project_id
|
||||
|
||||
getCompileDir = (project_id, user_id) ->
|
||||
Path.join(Settings.path.compilesDir, getCompileName(project_id, user_id))
|
||||
|
||||
module.exports = CompileManager =
|
||||
|
||||
doCompileWithLock: (request, callback = (error, outputFiles) ->) ->
|
||||
compileDir = getCompileDir(request.project_id, request.user_id)
|
||||
lockFile = Path.join(compileDir, ".project-lock")
|
||||
# use a .project-lock file in the compile directory to prevent
|
||||
# simultaneous compiles
|
||||
fse.ensureDir compileDir, (error) ->
|
||||
return callback(error) if error?
|
||||
LockManager.runWithLock lockFile, (releaseLock) ->
|
||||
CompileManager.doCompile(request, releaseLock)
|
||||
, callback
|
||||
|
||||
doCompile: (request, callback = (error, outputFiles) ->) ->
|
||||
compileDir = Path.join(Settings.path.compilesDir, request.project_id)
|
||||
compileDir = getCompileDir(request.project_id, request.user_id)
|
||||
|
||||
timer = new Metrics.Timer("write-to-disk")
|
||||
logger.log project_id: request.project_id, "starting compile"
|
||||
ResourceWriter.syncResourcesToDisk request.project_id, request.resources, compileDir, (error) ->
|
||||
return callback(error) if error?
|
||||
logger.log project_id: request.project_id, time_taken: Date.now() - timer.start, "written files to disk"
|
||||
logger.log project_id: request.project_id, user_id: request.user_id, "syncing resources to disk"
|
||||
ResourceWriter.syncResourcesToDisk request, compileDir, (error, resourceList) ->
|
||||
# NOTE: resourceList is insecure, it should only be used to exclude files from the output list
|
||||
if error? and error instanceof Errors.FilesOutOfSyncError
|
||||
logger.warn project_id: request.project_id, user_id: request.user_id, "files out of sync, please retry"
|
||||
return callback(error)
|
||||
else if error?
|
||||
logger.err err:error, project_id: request.project_id, user_id: request.user_id, "error writing resources to disk"
|
||||
return callback(error)
|
||||
logger.log project_id: request.project_id, user_id: request.user_id, time_taken: Date.now() - timer.start, "written files to disk"
|
||||
timer.done()
|
||||
|
||||
timer = new Metrics.Timer("run-compile")
|
||||
Metrics.inc("compiles")
|
||||
LatexRunner.runLatex request.project_id, {
|
||||
directory: compileDir
|
||||
mainFile: request.rootResourcePath
|
||||
compiler: request.compiler
|
||||
timeout: request.timeout
|
||||
}, (error) ->
|
||||
return callback(error) if error?
|
||||
logger.log project_id: request.project_id, time_taken: Date.now() - timer.start, "done compile"
|
||||
timer.done()
|
||||
injectDraftModeIfRequired = (callback) ->
|
||||
if request.draft
|
||||
DraftModeManager.injectDraftMode Path.join(compileDir, request.rootResourcePath), callback
|
||||
else
|
||||
callback()
|
||||
|
||||
OutputFileFinder.findOutputFiles request.resources, compileDir, (error, outputFiles) ->
|
||||
createTikzFileIfRequired = (callback) ->
|
||||
TikzManager.checkMainFile compileDir, request.rootResourcePath, resourceList, (error, usesTikzExternalize) ->
|
||||
return callback(error) if error?
|
||||
OutputCacheManager.saveOutputFiles outputFiles, compileDir, (error, newOutputFiles) ->
|
||||
callback null, newOutputFiles
|
||||
if usesTikzExternalize
|
||||
TikzManager.injectOutputFile compileDir, request.rootResourcePath, callback
|
||||
else
|
||||
callback()
|
||||
|
||||
clearProject: (project_id, _callback = (error) ->) ->
|
||||
# set up environment variables for chktex
|
||||
env = {}
|
||||
# only run chktex on LaTeX files (not knitr .Rtex files or any others)
|
||||
isLaTeXFile = request.rootResourcePath?.match(/\.tex$/i)
|
||||
if request.check? and isLaTeXFile
|
||||
env['CHKTEX_OPTIONS'] = '-nall -e9 -e10 -w15 -w16'
|
||||
env['CHKTEX_ULIMIT_OPTIONS'] = '-t 5 -v 64000'
|
||||
if request.check is 'error'
|
||||
env['CHKTEX_EXIT_ON_ERROR'] = 1
|
||||
if request.check is 'validate'
|
||||
env['CHKTEX_VALIDATE'] = 1
|
||||
|
||||
# apply a series of file modifications/creations for draft mode and tikz
|
||||
async.series [injectDraftModeIfRequired, createTikzFileIfRequired], (error) ->
|
||||
return callback(error) if error?
|
||||
timer = new Metrics.Timer("run-compile")
|
||||
# find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
|
||||
tag = request.imageName?.match(/:(.*)/)?[1]?.replace(/\./g,'-') or "default"
|
||||
tag = "other" if not request.project_id.match(/^[0-9a-f]{24}$/) # exclude smoke test
|
||||
Metrics.inc("compiles")
|
||||
Metrics.inc("compiles-with-image.#{tag}")
|
||||
compileName = getCompileName(request.project_id, request.user_id)
|
||||
LatexRunner.runLatex compileName, {
|
||||
directory: compileDir
|
||||
mainFile: request.rootResourcePath
|
||||
compiler: request.compiler
|
||||
timeout: request.timeout
|
||||
image: request.imageName
|
||||
environment: env
|
||||
}, (error, output, stats, timings) ->
|
||||
# request was for validation only
|
||||
if request.check is "validate"
|
||||
result = if error?.code then "fail" else "pass"
|
||||
error = new Error("validation")
|
||||
error.validate = result
|
||||
# request was for compile, and failed on validation
|
||||
if request.check is "error" and error?.message is 'exited'
|
||||
error = new Error("compilation")
|
||||
error.validate = "fail"
|
||||
# compile was killed by user, was a validation, or a compile which failed validation
|
||||
if error?.terminated or error?.validate
|
||||
OutputFileFinder.findOutputFiles resourceList, compileDir, (err, outputFiles) ->
|
||||
return callback(err) if err?
|
||||
callback(error, outputFiles) # return output files so user can check logs
|
||||
return
|
||||
# compile completed normally
|
||||
return callback(error) if error?
|
||||
Metrics.inc("compiles-succeeded")
|
||||
for metric_key, metric_value of stats or {}
|
||||
Metrics.count(metric_key, metric_value)
|
||||
for metric_key, metric_value of timings or {}
|
||||
Metrics.timing(metric_key, metric_value)
|
||||
loadavg = os.loadavg?()
|
||||
Metrics.gauge("load-avg", loadavg[0]) if loadavg?
|
||||
ts = timer.done()
|
||||
logger.log {project_id: request.project_id, user_id: request.user_id, time_taken: ts, stats:stats, timings:timings, loadavg:loadavg}, "done compile"
|
||||
if stats?["latex-runs"] > 0
|
||||
Metrics.timing("run-compile-per-pass", ts / stats["latex-runs"])
|
||||
if stats?["latex-runs"] > 0 and timings?["cpu-time"] > 0
|
||||
Metrics.timing("run-compile-cpu-time-per-pass", timings["cpu-time"] / stats["latex-runs"])
|
||||
|
||||
OutputFileFinder.findOutputFiles resourceList, compileDir, (error, outputFiles) ->
|
||||
return callback(error) if error?
|
||||
OutputCacheManager.saveOutputFiles outputFiles, compileDir, (error, newOutputFiles) ->
|
||||
callback null, newOutputFiles
|
||||
|
||||
stopCompile: (project_id, user_id, callback = (error) ->) ->
|
||||
compileName = getCompileName(project_id, user_id)
|
||||
LatexRunner.killLatex compileName, callback
|
||||
|
||||
clearProject: (project_id, user_id, _callback = (error) ->) ->
|
||||
callback = (error) ->
|
||||
_callback(error)
|
||||
_callback = () ->
|
||||
|
||||
compileDir = Path.join(Settings.path.compilesDir, project_id)
|
||||
proc = child_process.spawn "rm", ["-r", compileDir]
|
||||
compileDir = getCompileDir(project_id, user_id)
|
||||
|
||||
proc.on "error", callback
|
||||
CompileManager._checkDirectory compileDir, (err, exists) ->
|
||||
return callback(err) if err?
|
||||
return callback() if not exists # skip removal if no directory present
|
||||
|
||||
stderr = ""
|
||||
proc.stderr.on "data", (chunk) -> stderr += chunk.toString()
|
||||
proc = child_process.spawn "rm", ["-r", compileDir]
|
||||
|
||||
proc.on "close", (code) ->
|
||||
if code == 0
|
||||
return callback(null)
|
||||
proc.on "error", callback
|
||||
|
||||
stderr = ""
|
||||
proc.stderr.on "data", (chunk) -> stderr += chunk.toString()
|
||||
|
||||
proc.on "close", (code) ->
|
||||
if code == 0
|
||||
return callback(null)
|
||||
else
|
||||
return callback(new Error("rm -r #{compileDir} failed: #{stderr}"))
|
||||
|
||||
_findAllDirs: (callback = (error, allDirs) ->) ->
|
||||
root = Settings.path.compilesDir
|
||||
fs.readdir root, (err, files) ->
|
||||
return callback(err) if err?
|
||||
allDirs = (Path.join(root, file) for file in files)
|
||||
callback(null, allDirs)
|
||||
|
||||
clearExpiredProjects: (max_cache_age_ms, callback = (error) ->) ->
|
||||
now = Date.now()
|
||||
# action for each directory
|
||||
expireIfNeeded = (checkDir, cb) ->
|
||||
fs.stat checkDir, (err, stats) ->
|
||||
return cb() if err? # ignore errors checking directory
|
||||
age = now - stats.mtime
|
||||
hasExpired = (age > max_cache_age_ms)
|
||||
if hasExpired then fse.remove(checkDir, cb) else cb()
|
||||
# iterate over all project directories
|
||||
CompileManager._findAllDirs (error, allDirs) ->
|
||||
return callback() if error?
|
||||
async.eachSeries allDirs, expireIfNeeded, callback
|
||||
|
||||
_checkDirectory: (compileDir, callback = (error, exists) ->) ->
|
||||
fs.lstat compileDir, (err, stats) ->
|
||||
if err?.code is 'ENOENT'
|
||||
return callback(null, false) # directory does not exist
|
||||
else if err?
|
||||
logger.err {dir: compileDir, err:err}, "error on stat of project directory for removal"
|
||||
return callback(err)
|
||||
else if not stats?.isDirectory()
|
||||
logger.err {dir: compileDir, stats:stats}, "bad project directory for removal"
|
||||
return callback new Error("project directory is not directory")
|
||||
else
|
||||
return callback(new Error("rm -r #{compileDir} failed: #{stderr}"))
|
||||
callback(null, true) # directory exists
|
||||
|
||||
syncFromCode: (project_id, file_name, line, column, callback = (error, pdfPositions) ->) ->
|
||||
syncFromCode: (project_id, user_id, file_name, line, column, callback = (error, pdfPositions) ->) ->
|
||||
# If LaTeX was run in a virtual environment, the file path that synctex expects
|
||||
# might not match the file path on the host. The .synctex.gz file however, will be accessed
|
||||
# wherever it is on the host.
|
||||
base_dir = Settings.path.synctexBaseDir(project_id)
|
||||
compileName = getCompileName(project_id, user_id)
|
||||
base_dir = Settings.path.synctexBaseDir(compileName)
|
||||
file_path = base_dir + "/" + file_name
|
||||
synctex_path = Path.join(Settings.path.compilesDir, project_id, "output.pdf")
|
||||
compileDir = getCompileDir(project_id, user_id)
|
||||
synctex_path = Path.join(compileDir, "output.pdf")
|
||||
CompileManager._runSynctex ["code", synctex_path, file_path, line, column], (error, stdout) ->
|
||||
return callback(error) if error?
|
||||
logger.log project_id: project_id, file_name: file_name, line: line, column: column, stdout: stdout, "synctex code output"
|
||||
logger.log project_id: project_id, user_id:user_id, file_name: file_name, line: line, column: column, stdout: stdout, "synctex code output"
|
||||
callback null, CompileManager._parseSynctexFromCodeOutput(stdout)
|
||||
|
||||
syncFromPdf: (project_id, page, h, v, callback = (error, filePositions) ->) ->
|
||||
base_dir = Settings.path.synctexBaseDir(project_id)
|
||||
synctex_path = Path.join(Settings.path.compilesDir, project_id, "output.pdf")
|
||||
syncFromPdf: (project_id, user_id, page, h, v, callback = (error, filePositions) ->) ->
|
||||
compileName = getCompileName(project_id, user_id)
|
||||
base_dir = Settings.path.synctexBaseDir(compileName)
|
||||
compileDir = getCompileDir(project_id, user_id)
|
||||
synctex_path = Path.join(compileDir, "output.pdf")
|
||||
CompileManager._runSynctex ["pdf", synctex_path, page, h, v], (error, stdout) ->
|
||||
return callback(error) if error?
|
||||
logger.log project_id: project_id, page: page, h: h, v:v, stdout: stdout, "synctex pdf output"
|
||||
logger.log project_id: project_id, user_id:user_id, page: page, h: h, v:v, stdout: stdout, "synctex pdf output"
|
||||
callback null, CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)
|
||||
|
||||
_checkFileExists: (path, callback = (error) ->) ->
|
||||
synctexDir = Path.dirname(path)
|
||||
synctexFile = Path.join(synctexDir, "output.synctex.gz")
|
||||
fs.stat synctexDir, (error, stats) ->
|
||||
if error?.code is 'ENOENT'
|
||||
return callback(new Errors.NotFoundError("called synctex with no output directory"))
|
||||
return callback(error) if error?
|
||||
fs.stat synctexFile, (error, stats) ->
|
||||
if error?.code is 'ENOENT'
|
||||
return callback(new Errors.NotFoundError("called synctex with no output file"))
|
||||
return callback(error) if error?
|
||||
return callback(new Error("not a file")) if not stats?.isFile()
|
||||
callback()
|
||||
|
||||
_runSynctex: (args, callback = (error, stdout) ->) ->
|
||||
bin_path = Path.resolve(__dirname + "/../../bin/synctex")
|
||||
seconds = 1000
|
||||
child_process.execFile bin_path, args, timeout: 10 * seconds, (error, stdout, stderr) ->
|
||||
outputFilePath = args[1]
|
||||
CompileManager._checkFileExists outputFilePath, (error) ->
|
||||
return callback(error) if error?
|
||||
callback(null, stdout)
|
||||
if Settings.clsi?.synctexCommandWrapper?
|
||||
[bin_path, args] = Settings.clsi?.synctexCommandWrapper bin_path, args
|
||||
child_process.execFile bin_path, args, timeout: 10 * seconds, (error, stdout, stderr) ->
|
||||
if error?
|
||||
logger.err err:error, args:args, "error running synctex"
|
||||
return callback(error)
|
||||
callback(null, stdout)
|
||||
|
||||
_parseSynctexFromCodeOutput: (output) ->
|
||||
results = []
|
||||
@@ -111,17 +276,23 @@ module.exports = CompileManager =
|
||||
}
|
||||
return results
|
||||
|
||||
wordcount: (project_id, file_name, callback = (error, pdfPositions) ->) ->
|
||||
logger.log project_id:project_id, file_name:file_name, "running wordcount"
|
||||
wordcount: (project_id, user_id, file_name, image, callback = (error, pdfPositions) ->) ->
|
||||
logger.log project_id:project_id, user_id:user_id, file_name:file_name, image:image, "running wordcount"
|
||||
file_path = "$COMPILE_DIR/" + file_name
|
||||
command = [ "texcount", '-inc', file_path, "-out=" + file_path + ".wc"]
|
||||
directory = Path.join(Settings.path.compilesDir, project_id)
|
||||
command = [ "texcount", '-nocol', '-inc', file_path, "-out=" + file_path + ".wc"]
|
||||
directory = getCompileDir(project_id, user_id)
|
||||
timeout = 10 * 1000
|
||||
compileName = getCompileName(project_id, user_id)
|
||||
|
||||
CommandRunner.run project_id, command, directory, timeout, (error) ->
|
||||
CommandRunner.run compileName, command, directory, image, timeout, {}, (error) ->
|
||||
return callback(error) if error?
|
||||
stdout = fs.readFileSync(directory + "/" + file_name + ".wc", "utf-8")
|
||||
callback null, CompileManager._parseWordcountFromOutput(stdout)
|
||||
fs.readFile directory + "/" + file_name + ".wc", "utf-8", (err, stdout) ->
|
||||
if err?
|
||||
logger.err err:err, command:command, directory:directory, project_id:project_id, user_id:user_id, "error reading word count output"
|
||||
return callback(err)
|
||||
results = CompileManager._parseWordcountFromOutput(stdout)
|
||||
logger.log project_id:project_id, user_id:user_id, wordcount: results, "word count results"
|
||||
callback null, results
|
||||
|
||||
_parseWordcountFromOutput: (output) ->
|
||||
results = {
|
||||
@@ -133,6 +304,8 @@ module.exports = CompileManager =
|
||||
elements: 0
|
||||
mathInline: 0
|
||||
mathDisplay: 0
|
||||
errors: 0
|
||||
messages: ""
|
||||
}
|
||||
for line in output.split("\n")
|
||||
[data, info] = line.split(":")
|
||||
@@ -152,4 +325,8 @@ module.exports = CompileManager =
|
||||
results['mathInline'] = parseInt(info, 10)
|
||||
if data.indexOf("Number of math displayed") > -1
|
||||
results['mathDisplay'] = parseInt(info, 10)
|
||||
if data is "(errors" # errors reported as (errors:123)
|
||||
results['errors'] = parseInt(info, 10)
|
||||
if line.indexOf("!!! ") > -1 # errors logged as !!! message !!!
|
||||
results['messages'] += line + "\n"
|
||||
return results
|
||||
|
||||
24
app/coffee/ContentTypeMapper.coffee
Normal file
24
app/coffee/ContentTypeMapper.coffee
Normal file
@@ -0,0 +1,24 @@
|
||||
Path = require 'path'
|
||||
|
||||
# here we coerce html, css and js to text/plain,
|
||||
# otherwise choose correct mime type based on file extension,
|
||||
# falling back to octet-stream
|
||||
module.exports = ContentTypeMapper =
|
||||
map: (path) ->
|
||||
switch Path.extname(path)
|
||||
when '.txt', '.html', '.js', '.css', '.svg'
|
||||
return 'text/plain'
|
||||
when '.csv'
|
||||
return 'text/csv'
|
||||
when '.pdf'
|
||||
return 'application/pdf'
|
||||
when '.png'
|
||||
return 'image/png'
|
||||
when '.jpg', '.jpeg'
|
||||
return 'image/jpeg'
|
||||
when '.tiff'
|
||||
return 'image/tiff'
|
||||
when '.gif'
|
||||
return 'image/gif'
|
||||
else
|
||||
return 'application/octet-stream'
|
||||
24
app/coffee/DraftModeManager.coffee
Normal file
24
app/coffee/DraftModeManager.coffee
Normal file
@@ -0,0 +1,24 @@
|
||||
fs = require "fs"
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
module.exports = DraftModeManager =
|
||||
injectDraftMode: (filename, callback = (error) ->) ->
|
||||
fs.readFile filename, "utf8", (error, content) ->
|
||||
return callback(error) if error?
|
||||
# avoid adding draft mode more than once
|
||||
if content?.indexOf("\\documentclass\[draft") >= 0
|
||||
return callback()
|
||||
modified_content = DraftModeManager._injectDraftOption content
|
||||
logger.log {
|
||||
content: content.slice(0,1024), # \documentclass is normally v near the top
|
||||
modified_content: modified_content.slice(0,1024),
|
||||
filename
|
||||
}, "injected draft class"
|
||||
fs.writeFile filename, modified_content, callback
|
||||
|
||||
_injectDraftOption: (content) ->
|
||||
content
|
||||
# With existing options (must be first, otherwise both are applied)
|
||||
.replace(/\\documentclass\[/g, "\\documentclass[draft,")
|
||||
# Without existing options
|
||||
.replace(/\\documentclass\{/g, "\\documentclass[draft]{")
|
||||
25
app/coffee/Errors.coffee
Normal file
25
app/coffee/Errors.coffee
Normal file
@@ -0,0 +1,25 @@
|
||||
NotFoundError = (message) ->
|
||||
error = new Error(message)
|
||||
error.name = "NotFoundError"
|
||||
error.__proto__ = NotFoundError.prototype
|
||||
return error
|
||||
NotFoundError.prototype.__proto__ = Error.prototype
|
||||
|
||||
FilesOutOfSyncError = (message) ->
|
||||
error = new Error(message)
|
||||
error.name = "FilesOutOfSyncError"
|
||||
error.__proto__ = FilesOutOfSyncError.prototype
|
||||
return error
|
||||
FilesOutOfSyncError.prototype.__proto__ = Error.prototype
|
||||
|
||||
AlreadyCompilingError = (message) ->
|
||||
error = new Error(message)
|
||||
error.name = "AlreadyCompilingError"
|
||||
error.__proto__ = AlreadyCompilingError.prototype
|
||||
return error
|
||||
AlreadyCompilingError.prototype.__proto__ = Error.prototype
|
||||
|
||||
module.exports = Errors =
|
||||
NotFoundError: NotFoundError
|
||||
FilesOutOfSyncError: FilesOutOfSyncError
|
||||
AlreadyCompilingError: AlreadyCompilingError
|
||||
@@ -4,17 +4,19 @@ logger = require "logger-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
CommandRunner = require(Settings.clsi?.commandRunner or "./CommandRunner")
|
||||
|
||||
ProcessTable = {} # table of currently running jobs (pids or docker container names)
|
||||
|
||||
module.exports = LatexRunner =
|
||||
runLatex: (project_id, options, callback = (error) ->) ->
|
||||
{directory, mainFile, compiler, timeout} = options
|
||||
{directory, mainFile, compiler, timeout, image, environment} = options
|
||||
compiler ||= "pdflatex"
|
||||
timeout ||= 60000 # milliseconds
|
||||
|
||||
logger.log directory: directory, compiler: compiler, timeout: timeout, mainFile: mainFile, "starting compile"
|
||||
logger.log directory: directory, compiler: compiler, timeout: timeout, mainFile: mainFile, environment: environment, "starting compile"
|
||||
|
||||
# We want to run latexmk on the tex file which we will automatically
|
||||
# generate from the Rtex/Rmd/md file.
|
||||
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".tex")
|
||||
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".md")
|
||||
|
||||
if compiler == "pdflatex"
|
||||
command = LatexRunner._pdflatexCommand mainFile
|
||||
@@ -27,31 +29,72 @@ module.exports = LatexRunner =
|
||||
else
|
||||
return callback new Error("unknown compiler: #{compiler}")
|
||||
|
||||
CommandRunner.run project_id, command, directory, timeout, callback
|
||||
if Settings.clsi?.strace
|
||||
command = ["strace", "-o", "strace", "-ff"].concat(command)
|
||||
|
||||
_latexmkBaseCommand: [ "latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR"]
|
||||
# ignore the above and make a pandoc command
|
||||
console.log(mainFile)
|
||||
console.log(image)
|
||||
image = "ivotron/pandoc"
|
||||
command = ["-o", "$COMPILE_DIR/output.html", "/compile/" + mainFile]
|
||||
|
||||
id = "#{project_id}" # record running project under this id
|
||||
|
||||
ProcessTable[id] = CommandRunner.run project_id, command, directory, image, timeout, environment, (error, output) ->
|
||||
delete ProcessTable[id]
|
||||
return callback(error) if error?
|
||||
runs = output?.stderr?.match(/^Run number \d+ of .*latex/mg)?.length or 0
|
||||
failed = if output?.stdout?.match(/^Latexmk: Errors/m)? then 1 else 0
|
||||
# counters from latexmk output
|
||||
stats = {}
|
||||
stats["latexmk-errors"] = failed
|
||||
stats["latex-runs"] = runs
|
||||
stats["latex-runs-with-errors"] = if failed then runs else 0
|
||||
stats["latex-runs-#{runs}"] = 1
|
||||
stats["latex-runs-with-errors-#{runs}"] = if failed then 1 else 0
|
||||
# timing information from /usr/bin/time
|
||||
timings = {}
|
||||
stderr = output?.stderr
|
||||
timings["cpu-percent"] = stderr?.match(/Percent of CPU this job got: (\d+)/m)?[1] or 0
|
||||
timings["cpu-time"] = stderr?.match(/User time.*: (\d+.\d+)/m)?[1] or 0
|
||||
timings["sys-time"] = stderr?.match(/System time.*: (\d+.\d+)/m)?[1] or 0
|
||||
callback error, output, stats, timings
|
||||
|
||||
killLatex: (project_id, callback = (error) ->) ->
|
||||
id = "#{project_id}"
|
||||
logger.log {id:id}, "killing running compile"
|
||||
if not ProcessTable[id]?
|
||||
logger.warn {id}, "no such project to kill"
|
||||
return callback(null)
|
||||
else
|
||||
CommandRunner.kill ProcessTable[id], callback
|
||||
|
||||
_latexmkBaseCommand: (Settings?.clsi?.latexmkCommandPrefix || []).concat([
|
||||
"latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR",
|
||||
"-synctex=1","-interaction=batchmode"
|
||||
])
|
||||
|
||||
_pdflatexCommand: (mainFile) ->
|
||||
LatexRunner._latexmkBaseCommand.concat [
|
||||
"-pdf", "-e", "$pdflatex='pdflatex -synctex=1 -interaction=batchmode %O %S'",
|
||||
"-pdf",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
_latexCommand: (mainFile) ->
|
||||
LatexRunner._latexmkBaseCommand.concat [
|
||||
"-pdfdvi", "-e", "$latex='latex -synctex=1 -interaction=batchmode %O %S'",
|
||||
"-pdfdvi",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
_xelatexCommand: (mainFile) ->
|
||||
LatexRunner._latexmkBaseCommand.concat [
|
||||
"-xelatex", "-e", "$pdflatex='xelatex -synctex=1 -interaction=batchmode %O %S'",
|
||||
"-xelatex",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
_lualatexCommand: (mainFile) ->
|
||||
LatexRunner._latexmkBaseCommand.concat [
|
||||
"-pdf", "-e", "$pdflatex='lualatex -synctex=1 -interaction=batchmode %O %S'",
|
||||
"-lualatex",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
|
||||
23
app/coffee/LockManager.coffee
Normal file
23
app/coffee/LockManager.coffee
Normal file
@@ -0,0 +1,23 @@
|
||||
Settings = require('settings-sharelatex')
|
||||
logger = require "logger-sharelatex"
|
||||
Lockfile = require('lockfile') # from https://github.com/npm/lockfile
|
||||
Errors = require "./Errors"
|
||||
|
||||
module.exports = LockManager =
|
||||
LOCK_TEST_INTERVAL: 1000 # 50ms between each test of the lock
|
||||
MAX_LOCK_WAIT_TIME: 15000 # 10s maximum time to spend trying to get the lock
|
||||
LOCK_STALE: 5*60*1000 # 5 mins time until lock auto expires
|
||||
|
||||
runWithLock: (path, runner = ((releaseLock = (error) ->) ->), callback = ((error) ->)) ->
|
||||
lockOpts =
|
||||
wait: @MAX_LOCK_WAIT_TIME
|
||||
pollPeriod: @LOCK_TEST_INTERVAL
|
||||
stale: @LOCK_STALE
|
||||
Lockfile.lock path, lockOpts, (error) ->
|
||||
return callback new Errors.AlreadyCompilingError("compile in progress") if error?.code is 'EEXIST'
|
||||
return callback(error) if error?
|
||||
runner (error1, args...) ->
|
||||
Lockfile.unlock path, (error2) ->
|
||||
error = error1 or error2
|
||||
return callback(error) if error?
|
||||
callback(null, args...)
|
||||
@@ -4,12 +4,17 @@ fse = require "fs-extra"
|
||||
Path = require "path"
|
||||
logger = require "logger-sharelatex"
|
||||
_ = require "underscore"
|
||||
Settings = require "settings-sharelatex"
|
||||
crypto = require "crypto"
|
||||
|
||||
OutputFileOptimiser = require "./OutputFileOptimiser"
|
||||
|
||||
module.exports = OutputCacheManager =
|
||||
CACHE_SUBDIR: '.cache/clsi'
|
||||
BUILD_REGEX: /^[0-9a-f]+$/ # build id is Date.now() converted to hex
|
||||
ARCHIVE_SUBDIR: '.archive/clsi'
|
||||
# build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes
|
||||
# for backwards compatibility, make the randombytes part optional
|
||||
BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/
|
||||
CACHE_LIMIT: 2 # maximum number of cache directories
|
||||
CACHE_AGE: 60*60*1000 # up to one hour old
|
||||
|
||||
@@ -21,40 +26,33 @@ module.exports = OutputCacheManager =
|
||||
# for invalid build id, return top level
|
||||
return file
|
||||
|
||||
generateBuildId: (callback = (error, buildId) ->) ->
|
||||
# generate a secure build id from Date.now() and 8 random bytes in hex
|
||||
crypto.randomBytes 8, (err, buf) ->
|
||||
return callback(err) if err?
|
||||
random = buf.toString('hex')
|
||||
date = Date.now().toString(16)
|
||||
callback err, "#{date}-#{random}"
|
||||
|
||||
saveOutputFiles: (outputFiles, compileDir, callback = (error) ->) ->
|
||||
OutputCacheManager.generateBuildId (err, buildId) ->
|
||||
return callback(err) if err?
|
||||
OutputCacheManager.saveOutputFilesInBuildDir outputFiles, compileDir, buildId, callback
|
||||
|
||||
saveOutputFilesInBuildDir: (outputFiles, compileDir, buildId, callback = (error) ->) ->
|
||||
# make a compileDir/CACHE_SUBDIR/build_id directory and
|
||||
# copy all the output files into it
|
||||
cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR)
|
||||
# Put the files into a new cache subdirectory
|
||||
buildId = Date.now().toString(16)
|
||||
cacheDir = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR, buildId)
|
||||
# let file expiry run in the background
|
||||
OutputCacheManager.expireOutputFiles cacheRoot, {keep: buildId}
|
||||
# Is it a per-user compile? check if compile directory is PROJECTID-USERID
|
||||
perUser = Path.basename(compileDir).match(/^[0-9a-f]{24}-[0-9a-f]{24}$/)
|
||||
|
||||
checkFile = (src, callback) ->
|
||||
# check if we have a valid file to copy into the cache
|
||||
fs.stat src, (err, stats) ->
|
||||
# Archive logs in background
|
||||
if Settings.clsi?.archive_logs or Settings.clsi?.strace
|
||||
OutputCacheManager.archiveLogs outputFiles, compileDir, buildId, (err) ->
|
||||
if err?
|
||||
# some problem reading the file
|
||||
logger.error err: err, file: src, "stat error for file in cache"
|
||||
callback(err)
|
||||
else if not stats.isFile()
|
||||
# other filetype - reject it
|
||||
logger.error err: err, src: src, dst: dst, stat: stats, "nonfile output - refusing to copy to cache"
|
||||
callback(new Error("output file is not a file"), file)
|
||||
else
|
||||
# it's a plain file, ok to copy
|
||||
callback(null)
|
||||
|
||||
copyFile = (src, dst, callback) ->
|
||||
# copy output file into the cache
|
||||
fse.copy src, dst, (err) ->
|
||||
if err?
|
||||
logger.error err: err, src: src, dst: dst, "copy error for file in cache"
|
||||
callback(err)
|
||||
else
|
||||
# call the optimiser for the file too
|
||||
OutputFileOptimiser.optimiseFile src, dst, callback
|
||||
logger.warn err:err, "erroring archiving log files"
|
||||
|
||||
# make the new cache directory
|
||||
fse.ensureDir cacheDir, (err) ->
|
||||
@@ -63,21 +61,57 @@ module.exports = OutputCacheManager =
|
||||
callback(err, outputFiles)
|
||||
else
|
||||
# copy all the output files into the new cache directory
|
||||
results = []
|
||||
async.mapSeries outputFiles, (file, cb) ->
|
||||
# don't send dot files as output, express doesn't serve them
|
||||
if OutputCacheManager._fileIsHidden(file.path)
|
||||
logger.warn compileDir: compileDir, path: file.path, "ignoring dotfile in output"
|
||||
return cb()
|
||||
# copy other files into cache directory if valid
|
||||
newFile = _.clone(file)
|
||||
[src, dst] = [Path.join(compileDir, file.path), Path.join(cacheDir, file.path)]
|
||||
checkFile src, (err) ->
|
||||
copyFile src, dst, (err) ->
|
||||
if not err?
|
||||
OutputCacheManager._checkFileIsSafe src, (err, isSafe) ->
|
||||
return cb(err) if err?
|
||||
if !isSafe
|
||||
return cb()
|
||||
OutputCacheManager._checkIfShouldCopy src, (err, shouldCopy) ->
|
||||
return cb(err) if err?
|
||||
if !shouldCopy
|
||||
return cb()
|
||||
OutputCacheManager._copyFile src, dst, (err) ->
|
||||
return cb(err) if err?
|
||||
newFile.build = buildId # attach a build id if we cached the file
|
||||
cb(err, newFile)
|
||||
, (err, results) ->
|
||||
results.push newFile
|
||||
cb()
|
||||
, (err) ->
|
||||
if err?
|
||||
# pass back the original files if we encountered *any* error
|
||||
callback(err, outputFiles)
|
||||
# clean up the directory we just created
|
||||
fse.remove cacheDir, (err) ->
|
||||
if err?
|
||||
logger.error err: err, dir: cacheDir, "error removing cache dir after failure"
|
||||
else
|
||||
# pass back the list of new files in the cache
|
||||
callback(err, results)
|
||||
# let file expiry run in the background, expire all previous files if per-user
|
||||
OutputCacheManager.expireOutputFiles cacheRoot, {keep: buildId, limit: if perUser then 1 else null}
|
||||
|
||||
archiveLogs: (outputFiles, compileDir, buildId, callback = (error) ->) ->
|
||||
archiveDir = Path.join(compileDir, OutputCacheManager.ARCHIVE_SUBDIR, buildId)
|
||||
logger.log {dir: archiveDir}, "archiving log files for project"
|
||||
fse.ensureDir archiveDir, (err) ->
|
||||
return callback(err) if err?
|
||||
async.mapSeries outputFiles, (file, cb) ->
|
||||
[src, dst] = [Path.join(compileDir, file.path), Path.join(archiveDir, file.path)]
|
||||
OutputCacheManager._checkFileIsSafe src, (err, isSafe) ->
|
||||
return cb(err) if err?
|
||||
return cb() if !isSafe
|
||||
OutputCacheManager._checkIfShouldArchive src, (err, shouldArchive) ->
|
||||
return cb(err) if err?
|
||||
return cb() if !shouldArchive
|
||||
OutputCacheManager._copyFile src, dst, cb
|
||||
, callback
|
||||
|
||||
expireOutputFiles: (cacheRoot, options, callback = (error) ->) ->
|
||||
# look in compileDir for build dirs and delete if > N or age of mod time > T
|
||||
@@ -92,10 +126,13 @@ module.exports = OutputCacheManager =
|
||||
|
||||
isExpired = (dir, index) ->
|
||||
return false if options?.keep == dir
|
||||
# remove any directories over the requested (non-null) limit
|
||||
return true if options?.limit? and index > options.limit
|
||||
# remove any directories over the hard limit
|
||||
return true if index > OutputCacheManager.CACHE_LIMIT
|
||||
# we can get the build time from the directory name
|
||||
dirTime = parseInt(dir, 16)
|
||||
# we can get the build time from the first part of the directory name DDDD-RRRR
|
||||
# DDDD is date and RRRR is random bytes
|
||||
dirTime = parseInt(dir.split('-')?[0], 16)
|
||||
age = currentTime - dirTime
|
||||
return age > OutputCacheManager.CACHE_AGE
|
||||
|
||||
@@ -111,3 +148,52 @@ module.exports = OutputCacheManager =
|
||||
async.eachSeries toRemove, (dir, cb) ->
|
||||
removeDir dir, cb
|
||||
, callback
|
||||
|
||||
_fileIsHidden: (path) ->
|
||||
return path?.match(/^\.|\/./)?
|
||||
|
||||
_checkFileIsSafe: (src, callback = (error, isSafe) ->) ->
|
||||
# check if we have a valid file to copy into the cache
|
||||
fs.stat src, (err, stats) ->
|
||||
if err?.code is 'ENOENT'
|
||||
logger.warn err: err, file: src, "file has disappeared before copying to build cache"
|
||||
callback(err, false)
|
||||
else if err?
|
||||
# some other problem reading the file
|
||||
logger.error err: err, file: src, "stat error for file in cache"
|
||||
callback(err, false)
|
||||
else if not stats.isFile()
|
||||
# other filetype - reject it
|
||||
logger.warn src: src, stat: stats, "nonfile output - refusing to copy to cache"
|
||||
callback(null, false)
|
||||
else
|
||||
# it's a plain file, ok to copy
|
||||
callback(null, true)
|
||||
|
||||
_copyFile: (src, dst, callback) ->
|
||||
# copy output file into the cache
|
||||
fse.copy src, dst, (err) ->
|
||||
if err?.code is 'ENOENT'
|
||||
logger.warn err: err, file: src, "file has disappeared when copying to build cache"
|
||||
callback(err, false)
|
||||
else if err?
|
||||
logger.error err: err, src: src, dst: dst, "copy error for file in cache"
|
||||
callback(err)
|
||||
else
|
||||
if Settings.clsi?.optimiseInDocker
|
||||
# don't run any optimisations on the pdf when they are done
|
||||
# in the docker container
|
||||
callback()
|
||||
else
|
||||
# call the optimiser for the file too
|
||||
OutputFileOptimiser.optimiseFile src, dst, callback
|
||||
|
||||
_checkIfShouldCopy: (src, callback = (err, shouldCopy) ->) ->
|
||||
return callback(null, !Path.basename(src).match(/^strace/))
|
||||
|
||||
_checkIfShouldArchive: (src, callback = (err, shouldCopy) ->) ->
|
||||
if Path.basename(src).match(/^strace/)
|
||||
return callback(null, true)
|
||||
if Settings.clsi?.archive_logs and Path.basename(src) in ["output.log", "output.blg"]
|
||||
return callback(null, true)
|
||||
return callback(null, false)
|
||||
|
||||
@@ -5,7 +5,7 @@ spawn = require("child_process").spawn
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
module.exports = OutputFileFinder =
|
||||
findOutputFiles: (resources, directory, callback = (error, outputFiles) ->) ->
|
||||
findOutputFiles: (resources, directory, callback = (error, outputFiles, allFiles) ->) ->
|
||||
incomingResources = {}
|
||||
for resource in resources
|
||||
incomingResources[resource.path] = true
|
||||
@@ -13,8 +13,9 @@ module.exports = OutputFileFinder =
|
||||
logger.log directory: directory, "getting output files"
|
||||
|
||||
OutputFileFinder._getAllFiles directory, (error, allFiles = []) ->
|
||||
return callback(error) if error?
|
||||
jobs = []
|
||||
if error?
|
||||
logger.err err:error, "error finding all output files"
|
||||
return callback(error)
|
||||
outputFiles = []
|
||||
for file in allFiles
|
||||
if !incomingResources[file]
|
||||
@@ -22,14 +23,16 @@ module.exports = OutputFileFinder =
|
||||
path: file
|
||||
type: file.match(/\.([^\.]+)$/)?[1]
|
||||
}
|
||||
callback null, outputFiles
|
||||
callback null, outputFiles, allFiles
|
||||
|
||||
_getAllFiles: (directory, _callback = (error, fileList) ->) ->
|
||||
callback = (error, fileList) ->
|
||||
_callback(error, fileList)
|
||||
_callback = () ->
|
||||
|
||||
args = [directory, "-name", ".cache", "-prune", "-o", "-type", "f", "-print"]
|
||||
# don't include clsi-specific files/directories in the output list
|
||||
EXCLUDE_DIRS = ["-name", ".cache", "-o", "-name", ".archive","-o", "-name", ".project-*"]
|
||||
args = [directory, "(", EXCLUDE_DIRS..., ")", "-prune", "-o", "-type", "f", "-print"]
|
||||
logger.log args: args, "running find command"
|
||||
|
||||
proc = spawn("find", args)
|
||||
|
||||
@@ -2,6 +2,7 @@ fs = require "fs"
|
||||
Path = require "path"
|
||||
spawn = require("child_process").spawn
|
||||
logger = require "logger-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
_ = require "underscore"
|
||||
|
||||
module.exports = OutputFileOptimiser =
|
||||
@@ -10,15 +11,31 @@ module.exports = OutputFileOptimiser =
|
||||
# check output file (src) and see if we can optimise it, storing
|
||||
# the result in the build directory (dst)
|
||||
if src.match(/\/output\.pdf$/)
|
||||
OutputFileOptimiser.optimisePDF src, dst, callback
|
||||
OutputFileOptimiser.checkIfPDFIsOptimised src, (err, isOptimised) ->
|
||||
return callback(null) if err? or isOptimised
|
||||
OutputFileOptimiser.optimisePDF src, dst, callback
|
||||
else
|
||||
callback (null)
|
||||
|
||||
checkIfPDFIsOptimised: (file, callback) ->
|
||||
SIZE = 16*1024 # check the header of the pdf
|
||||
result = new Buffer(SIZE)
|
||||
result.fill(0) # prevent leakage of uninitialised buffer
|
||||
fs.open file, "r", (err, fd) ->
|
||||
return callback(err) if err?
|
||||
fs.read fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) ->
|
||||
fs.close fd, (errClose) ->
|
||||
return callback(errRead) if errRead?
|
||||
return callback(errClose) if errReadClose?
|
||||
isOptimised = buffer.toString('ascii').indexOf("/Linearized 1") >= 0
|
||||
callback(null, isOptimised)
|
||||
|
||||
optimisePDF: (src, dst, callback = (error) ->) ->
|
||||
tmpOutput = dst + '.opt'
|
||||
args = ["--linearize", src, tmpOutput]
|
||||
logger.log args: args, "running qpdf command"
|
||||
|
||||
timer = new Metrics.Timer("qpdf")
|
||||
proc = spawn("qpdf", args)
|
||||
stdout = ""
|
||||
proc.stdout.on "data", (chunk) ->
|
||||
@@ -28,6 +45,7 @@ module.exports = OutputFileOptimiser =
|
||||
logger.warn {err, args}, "qpdf failed"
|
||||
callback(null) # ignore the error
|
||||
proc.on "close", (code) ->
|
||||
timer.done()
|
||||
if code != 0
|
||||
logger.warn {code, args}, "qpdf returned error"
|
||||
return callback(null) # ignore the error
|
||||
|
||||
@@ -3,9 +3,12 @@ CompileManager = require "./CompileManager"
|
||||
db = require "./db"
|
||||
async = require "async"
|
||||
logger = require "logger-sharelatex"
|
||||
oneDay = 24 * 60 * 60 * 1000
|
||||
Settings = require "settings-sharelatex"
|
||||
|
||||
module.exports = ProjectPersistenceManager =
|
||||
EXPIRY_TIMEOUT: oneDay = 24 * 60 * 60 * 1000 #ms
|
||||
|
||||
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5
|
||||
|
||||
markProjectAsJustAccessed: (project_id, callback = (error) ->) ->
|
||||
db.Project.findOrCreate(where: {project_id: project_id})
|
||||
@@ -24,21 +27,30 @@ module.exports = ProjectPersistenceManager =
|
||||
jobs = for project_id in (project_ids or [])
|
||||
do (project_id) ->
|
||||
(callback) ->
|
||||
ProjectPersistenceManager.clearProject project_id, (err) ->
|
||||
ProjectPersistenceManager.clearProjectFromCache project_id, (err) ->
|
||||
if err?
|
||||
logger.error err: err, project_id: project_id, "error clearing project"
|
||||
callback()
|
||||
async.series jobs, callback
|
||||
|
||||
clearProject: (project_id, callback = (error) ->) ->
|
||||
logger.log project_id: project_id, "clearing project"
|
||||
CompileManager.clearProject project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
UrlCache.clearProject project_id, (error) ->
|
||||
async.series jobs, (error) ->
|
||||
return callback(error) if error?
|
||||
ProjectPersistenceManager._clearProjectFromDatabase project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
callback()
|
||||
CompileManager.clearExpiredProjects ProjectPersistenceManager.EXPIRY_TIMEOUT, (error) ->
|
||||
callback() # ignore any errors from deleting directories
|
||||
|
||||
clearProject: (project_id, user_id, callback = (error) ->) ->
|
||||
logger.log project_id: project_id, user_id:user_id, "clearing project for user"
|
||||
CompileManager.clearProject project_id, user_id, (error) ->
|
||||
return callback(error) if error?
|
||||
ProjectPersistenceManager.clearProjectFromCache project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
callback()
|
||||
|
||||
clearProjectFromCache: (project_id, callback = (error) ->) ->
|
||||
logger.log project_id: project_id, "clearing project from cache"
|
||||
UrlCache.clearProject project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
ProjectPersistenceManager._clearProjectFromDatabase project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
callback()
|
||||
|
||||
_clearProjectFromDatabase: (project_id, callback = (error) ->) ->
|
||||
db.Project.destroy(where: {project_id: project_id})
|
||||
@@ -50,3 +62,5 @@ module.exports = ProjectPersistenceManager =
|
||||
.then((projects) ->
|
||||
callback null, projects.map((project) -> project.project_id)
|
||||
).error callback
|
||||
|
||||
logger.log {EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout"
|
||||
|
||||
@@ -21,6 +21,37 @@ module.exports = RequestParser =
|
||||
compile.options.timeout
|
||||
default: RequestParser.MAX_TIMEOUT
|
||||
type: "number"
|
||||
response.imageName = @_parseAttribute "imageName",
|
||||
compile.options.imageName,
|
||||
type: "string"
|
||||
response.draft = @_parseAttribute "draft",
|
||||
compile.options.draft,
|
||||
default: false,
|
||||
type: "boolean"
|
||||
response.check = @_parseAttribute "check",
|
||||
compile.options.check,
|
||||
type: "string"
|
||||
|
||||
# The syncType specifies whether the request contains all
|
||||
# resources (full) or only those resources to be updated
|
||||
# in-place (incremental).
|
||||
response.syncType = @_parseAttribute "syncType",
|
||||
compile.options.syncType,
|
||||
validValues: ["full", "incremental"]
|
||||
type: "string"
|
||||
|
||||
# The syncState is an identifier passed in with the request
|
||||
# which has the property that it changes when any resource is
|
||||
# added, deleted, moved or renamed.
|
||||
#
|
||||
# on syncType full the syncState identifier is passed in and
|
||||
# stored
|
||||
#
|
||||
# on syncType incremental the syncState identifier must match
|
||||
# the stored value
|
||||
response.syncState = @_parseAttribute "syncState",
|
||||
compile.options.syncState,
|
||||
type: "string"
|
||||
|
||||
if response.timeout > RequestParser.MAX_TIMEOUT
|
||||
response.timeout = RequestParser.MAX_TIMEOUT
|
||||
@@ -32,7 +63,13 @@ module.exports = RequestParser =
|
||||
compile.rootResourcePath
|
||||
default: "main.tex"
|
||||
type: "string"
|
||||
response.rootResourcePath = RequestParser._sanitizePath(rootResourcePath)
|
||||
originalRootResourcePath = rootResourcePath
|
||||
sanitizedRootResourcePath = RequestParser._sanitizePath(rootResourcePath)
|
||||
response.rootResourcePath = RequestParser._checkPath(sanitizedRootResourcePath)
|
||||
|
||||
for resource in response.resources
|
||||
if resource.path == originalRootResourcePath
|
||||
resource.path = sanitizedRootResourcePath
|
||||
catch error
|
||||
return callback error
|
||||
|
||||
@@ -71,9 +108,15 @@ module.exports = RequestParser =
|
||||
throw "#{name} attribute should be a #{options.type}"
|
||||
else
|
||||
return options.default if options.default?
|
||||
throw "Default not implemented"
|
||||
return attribute
|
||||
|
||||
_sanitizePath: (path) ->
|
||||
# See http://php.net/manual/en/function.escapeshellcmd.php
|
||||
path.replace(/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g, "")
|
||||
|
||||
_checkPath: (path) ->
|
||||
# check that the request does not use a relative path
|
||||
for dir in path.split('/')
|
||||
if dir == '..'
|
||||
throw "relative path in root resource"
|
||||
return path
|
||||
|
||||
72
app/coffee/ResourceStateManager.coffee
Normal file
72
app/coffee/ResourceStateManager.coffee
Normal file
@@ -0,0 +1,72 @@
|
||||
Path = require "path"
|
||||
fs = require "fs"
|
||||
logger = require "logger-sharelatex"
|
||||
settings = require("settings-sharelatex")
|
||||
Errors = require "./Errors"
|
||||
SafeReader = require "./SafeReader"
|
||||
|
||||
module.exports = ResourceStateManager =
|
||||
|
||||
# The sync state is an identifier which must match for an
|
||||
# incremental update to be allowed.
|
||||
#
|
||||
# The initial value is passed in and stored on a full
|
||||
# compile, along with the list of resources..
|
||||
#
|
||||
# Subsequent incremental compiles must come with the same value - if
|
||||
# not they will be rejected with a 409 Conflict response. The
|
||||
# previous list of resources is returned.
|
||||
#
|
||||
# An incremental compile can only update existing files with new
|
||||
# content. The sync state identifier must change if any docs or
|
||||
# files are moved, added, deleted or renamed.
|
||||
|
||||
SYNC_STATE_FILE: ".project-sync-state"
|
||||
SYNC_STATE_MAX_SIZE: 128*1024
|
||||
|
||||
saveProjectState: (state, resources, basePath, callback = (error) ->) ->
|
||||
stateFile = Path.join(basePath, @SYNC_STATE_FILE)
|
||||
if not state? # remove the file if no state passed in
|
||||
logger.log state:state, basePath:basePath, "clearing sync state"
|
||||
fs.unlink stateFile, (err) ->
|
||||
if err? and err.code isnt 'ENOENT'
|
||||
return callback(err)
|
||||
else
|
||||
return callback()
|
||||
else
|
||||
logger.log state:state, basePath:basePath, "writing sync state"
|
||||
resourceList = (resource.path for resource in resources)
|
||||
fs.writeFile stateFile, [resourceList..., "stateHash:#{state}"].join("\n"), callback
|
||||
|
||||
checkProjectStateMatches: (state, basePath, callback = (error, resources) ->) ->
|
||||
stateFile = Path.join(basePath, @SYNC_STATE_FILE)
|
||||
size = @SYNC_STATE_MAX_SIZE
|
||||
SafeReader.readFile stateFile, size, 'utf8', (err, result, bytesRead) ->
|
||||
return callback(err) if err?
|
||||
if bytesRead is size
|
||||
logger.error file:stateFile, size:size, bytesRead:bytesRead, "project state file truncated"
|
||||
[resourceList..., oldState] = result?.toString()?.split("\n") or []
|
||||
newState = "stateHash:#{state}"
|
||||
logger.log state:state, oldState: oldState, basePath:basePath, stateMatches: (newState is oldState), "checking sync state"
|
||||
if newState isnt oldState
|
||||
return callback new Errors.FilesOutOfSyncError("invalid state for incremental update")
|
||||
else
|
||||
resources = ({path: path} for path in resourceList)
|
||||
callback(null, resources)
|
||||
|
||||
checkResourceFiles: (resources, allFiles, basePath, callback = (error) ->) ->
|
||||
# check the paths are all relative to current directory
|
||||
for file in resources or []
|
||||
for dir in file?.path?.split('/')
|
||||
if dir == '..'
|
||||
return callback new Error("relative path in resource file list")
|
||||
# check if any of the input files are not present in list of files
|
||||
seenFile = {}
|
||||
for file in allFiles
|
||||
seenFile[file] = true
|
||||
missingFiles = (resource.path for resource in resources when not seenFile[resource.path])
|
||||
if missingFiles?.length > 0
|
||||
logger.err missingFiles:missingFiles, basePath:basePath, allFiles:allFiles, resources:resources, "missing input files for project"
|
||||
return callback new Errors.FilesOutOfSyncError("resource files missing in incremental update")
|
||||
else
|
||||
callback()
|
||||
@@ -4,25 +4,71 @@ fs = require "fs"
|
||||
async = require "async"
|
||||
mkdirp = require "mkdirp"
|
||||
OutputFileFinder = require "./OutputFileFinder"
|
||||
ResourceStateManager = require "./ResourceStateManager"
|
||||
Metrics = require "./Metrics"
|
||||
logger = require "logger-sharelatex"
|
||||
settings = require("settings-sharelatex")
|
||||
|
||||
parallelFileDownloads = settings.parallelFileDownloads or 1
|
||||
|
||||
module.exports = ResourceWriter =
|
||||
syncResourcesToDisk: (project_id, resources, basePath, callback = (error) ->) ->
|
||||
@_removeExtraneousFiles resources, basePath, (error) =>
|
||||
|
||||
syncResourcesToDisk: (request, basePath, callback = (error, resourceList) ->) ->
|
||||
if request.syncType is "incremental"
|
||||
logger.log project_id: request.project_id, user_id: request.user_id, "incremental sync"
|
||||
ResourceStateManager.checkProjectStateMatches request.syncState, basePath, (error, resourceList) ->
|
||||
return callback(error) if error?
|
||||
ResourceWriter._removeExtraneousFiles resourceList, basePath, (error, outputFiles, allFiles) ->
|
||||
return callback(error) if error?
|
||||
ResourceStateManager.checkResourceFiles resourceList, allFiles, basePath, (error) ->
|
||||
return callback(error) if error?
|
||||
ResourceWriter.saveIncrementalResourcesToDisk request.project_id, request.resources, basePath, (error) ->
|
||||
return callback(error) if error?
|
||||
callback(null, resourceList)
|
||||
else
|
||||
logger.log project_id: request.project_id, user_id: request.user_id, "full sync"
|
||||
@saveAllResourcesToDisk request.project_id, request.resources, basePath, (error) ->
|
||||
return callback(error) if error?
|
||||
ResourceStateManager.saveProjectState request.syncState, request.resources, basePath, (error) ->
|
||||
return callback(error) if error?
|
||||
callback(null, request.resources)
|
||||
|
||||
saveIncrementalResourcesToDisk: (project_id, resources, basePath, callback = (error) ->) ->
|
||||
@_createDirectory basePath, (error) =>
|
||||
return callback(error) if error?
|
||||
jobs = for resource in resources
|
||||
do (resource) =>
|
||||
(callback) => @_writeResourceToDisk(project_id, resource, basePath, callback)
|
||||
async.series jobs, callback
|
||||
async.parallelLimit jobs, parallelFileDownloads, callback
|
||||
|
||||
_removeExtraneousFiles: (resources, basePath, _callback = (error) ->) ->
|
||||
saveAllResourcesToDisk: (project_id, resources, basePath, callback = (error) ->) ->
|
||||
@_createDirectory basePath, (error) =>
|
||||
return callback(error) if error?
|
||||
@_removeExtraneousFiles resources, basePath, (error) =>
|
||||
return callback(error) if error?
|
||||
jobs = for resource in resources
|
||||
do (resource) =>
|
||||
(callback) => @_writeResourceToDisk(project_id, resource, basePath, callback)
|
||||
async.parallelLimit jobs, parallelFileDownloads, callback
|
||||
|
||||
_createDirectory: (basePath, callback = (error) ->) ->
|
||||
fs.mkdir basePath, (err) ->
|
||||
if err?
|
||||
if err.code is 'EEXIST'
|
||||
return callback()
|
||||
else
|
||||
logger.log {err: err, dir:basePath}, "error creating directory"
|
||||
return callback(err)
|
||||
else
|
||||
return callback()
|
||||
|
||||
_removeExtraneousFiles: (resources, basePath, _callback = (error, outputFiles, allFiles) ->) ->
|
||||
timer = new Metrics.Timer("unlink-output-files")
|
||||
callback = (error) ->
|
||||
callback = (error, result...) ->
|
||||
timer.done()
|
||||
_callback(error)
|
||||
_callback(error, result...)
|
||||
|
||||
OutputFileFinder.findOutputFiles resources, basePath, (error, outputFiles) ->
|
||||
OutputFileFinder.findOutputFiles resources, basePath, (error, outputFiles, allFiles) ->
|
||||
return callback(error) if error?
|
||||
|
||||
jobs = []
|
||||
@@ -30,37 +76,55 @@ module.exports = ResourceWriter =
|
||||
do (file) ->
|
||||
path = file.path
|
||||
should_delete = true
|
||||
if path.match(/^output\./) or path.match(/\.aux$/)
|
||||
if path.match(/^output\./) or path.match(/\.aux$/) or path.match(/^cache\//) # knitr cache
|
||||
should_delete = false
|
||||
if path == "output.pdf" or path == "output.dvi" or path == "output.log"
|
||||
if path.match(/^output-.*/) # Tikz cached figures
|
||||
should_delete = false
|
||||
if path == "output.pdf" or path == "output.dvi" or path == "output.log" or path == "output.xdv"
|
||||
should_delete = true
|
||||
if path == "output.tex" # created by TikzManager if present in output files
|
||||
should_delete = true
|
||||
if should_delete
|
||||
jobs.push (callback) -> ResourceWriter._deleteFileIfNotDirectory Path.join(basePath, path), callback
|
||||
|
||||
async.series jobs, callback
|
||||
async.series jobs, (error) ->
|
||||
return callback(error) if error?
|
||||
callback(null, outputFiles, allFiles)
|
||||
|
||||
_deleteFileIfNotDirectory: (path, callback = (error) ->) ->
|
||||
fs.stat path, (error, stat) ->
|
||||
return callback(error) if error?
|
||||
if stat.isFile()
|
||||
fs.unlink path, callback
|
||||
if error? and error.code is 'ENOENT'
|
||||
return callback()
|
||||
else if error?
|
||||
logger.err {err: error, path: path}, "error stating file in deleteFileIfNotDirectory"
|
||||
return callback(error)
|
||||
else if stat.isFile()
|
||||
fs.unlink path, (error) ->
|
||||
if error?
|
||||
logger.err {err: error, path: path}, "error removing file in deleteFileIfNotDirectory"
|
||||
callback(error)
|
||||
else
|
||||
callback()
|
||||
else
|
||||
callback()
|
||||
|
||||
_writeResourceToDisk: (project_id, resource, basePath, callback = (error) ->) ->
|
||||
path = Path.normalize(Path.join(basePath, resource.path))
|
||||
if (path.slice(0, basePath.length) != basePath)
|
||||
return callback new Error("resource path is outside root directory")
|
||||
|
||||
mkdirp Path.dirname(path), (error) ->
|
||||
ResourceWriter.checkPath basePath, resource.path, (error, path) ->
|
||||
return callback(error) if error?
|
||||
# TODO: Don't overwrite file if it hasn't been modified
|
||||
if resource.url?
|
||||
UrlCache.downloadUrlToFile project_id, resource.url, path, resource.modified, (err)->
|
||||
if err?
|
||||
logger.err err:err, project_id:project_id, path:path, resource_url:resource.url, modified:resource.modified, "error downloading file for resources"
|
||||
callback() #try and continue compiling even if http resource can not be downloaded at this time
|
||||
else
|
||||
fs.writeFile path, resource.content, callback
|
||||
|
||||
mkdirp Path.dirname(path), (error) ->
|
||||
return callback(error) if error?
|
||||
# TODO: Don't overwrite file if it hasn't been modified
|
||||
if resource.url?
|
||||
UrlCache.downloadUrlToFile project_id, resource.url, path, resource.modified, (err)->
|
||||
if err?
|
||||
logger.err err:err, project_id:project_id, path:path, resource_url:resource.url, modified:resource.modified, "error downloading file for resources"
|
||||
callback() #try and continue compiling even if http resource can not be downloaded at this time
|
||||
else
|
||||
fs.writeFile path, resource.content, callback
|
||||
|
||||
checkPath: (basePath, resourcePath, callback) ->
|
||||
path = Path.normalize(Path.join(basePath, resourcePath))
|
||||
if (path.slice(0, basePath.length + 1) != basePath + "/")
|
||||
return callback new Error("resource path is outside root directory")
|
||||
else
|
||||
return callback(null, path)
|
||||
|
||||
25
app/coffee/SafeReader.coffee
Normal file
25
app/coffee/SafeReader.coffee
Normal file
@@ -0,0 +1,25 @@
|
||||
fs = require "fs"
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
module.exports = SafeReader =
|
||||
|
||||
# safely read up to size bytes from a file and return result as a
|
||||
# string
|
||||
|
||||
readFile: (file, size, encoding, callback = (error, result) ->) ->
|
||||
fs.open file, 'r', (err, fd) ->
|
||||
return callback() if err? and err.code is 'ENOENT'
|
||||
return callback(err) if err?
|
||||
|
||||
# safely return always closing the file
|
||||
callbackWithClose = (err, result...) ->
|
||||
fs.close fd, (err1) ->
|
||||
return callback(err) if err?
|
||||
return callback(err1) if err1?
|
||||
callback(null, result...)
|
||||
|
||||
buff = new Buffer(size, 0) # fill with zeros
|
||||
fs.read fd, buff, 0, buff.length, 0, (err, bytesRead, buffer) ->
|
||||
return callbackWithClose(err) if err?
|
||||
result = buffer.toString(encoding, 0, bytesRead)
|
||||
callbackWithClose(null, result, bytesRead)
|
||||
@@ -29,10 +29,10 @@ module.exports = ForbidSymlinks = (staticFn, root, options) ->
|
||||
# check that the requested path is not a symlink
|
||||
fs.realpath requestedFsPath, (err, realFsPath)->
|
||||
if err?
|
||||
logger.warn err:err, requestedFsPath:requestedFsPath, realFsPath:realFsPath, path: req.params[0], project_id: req.params.project_id, "error checking file access"
|
||||
if err.code == 'ENOENT'
|
||||
return res.sendStatus(404)
|
||||
else
|
||||
logger.error err:err, requestedFsPath:requestedFsPath, realFsPath:realFsPath, path: req.params[0], project_id: req.params.project_id, "error checking file access"
|
||||
return res.sendStatus(500)
|
||||
else if requestedFsPath != realFsPath
|
||||
logger.warn requestedFsPath:requestedFsPath, realFsPath:realFsPath, path: req.params[0], project_id: req.params.project_id, "trying to access a different file (symlink), aborting"
|
||||
|
||||
35
app/coffee/TikzManager.coffee
Normal file
35
app/coffee/TikzManager.coffee
Normal file
@@ -0,0 +1,35 @@
|
||||
fs = require "fs"
|
||||
Path = require "path"
|
||||
ResourceWriter = require "./ResourceWriter"
|
||||
SafeReader = require "./SafeReader"
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
# for \tikzexternalize to work the main file needs to match the
|
||||
# jobname. Since we set the -jobname to output, we have to create a
|
||||
# copy of the main file as 'output.tex'.
|
||||
|
||||
module.exports = TikzManager =
|
||||
|
||||
checkMainFile: (compileDir, mainFile, resources, callback = (error, usesTikzExternalize) ->) ->
|
||||
# if there's already an output.tex file, we don't want to touch it
|
||||
for resource in resources
|
||||
if resource.path is "output.tex"
|
||||
logger.log compileDir: compileDir, mainFile: mainFile, "output.tex already in resources"
|
||||
return callback(null, false)
|
||||
# if there's no output.tex, see if we are using tikz/pgf in the main file
|
||||
ResourceWriter.checkPath compileDir, mainFile, (error, path) ->
|
||||
return callback(error) if error?
|
||||
SafeReader.readFile path, 65536, "utf8", (error, content) ->
|
||||
return callback(error) if error?
|
||||
usesTikzExternalize = content?.indexOf("\\tikzexternalize") >= 0
|
||||
logger.log compileDir: compileDir, mainFile: mainFile, usesTikzExternalize:usesTikzExternalize, "checked for tikzexternalize"
|
||||
callback null, usesTikzExternalize
|
||||
|
||||
injectOutputFile: (compileDir, mainFile, callback = (error) ->) ->
|
||||
ResourceWriter.checkPath compileDir, mainFile, (error, path) ->
|
||||
return callback(error) if error?
|
||||
fs.readFile path, "utf8", (error, content) ->
|
||||
return callback(error) if error?
|
||||
logger.log compileDir: compileDir, mainFile: mainFile, "copied file to output.tex for tikz"
|
||||
# use wx flag to ensure that output file does not already exist
|
||||
fs.writeFile Path.join(compileDir, "output.tex"), content, {flag:'wx'}, callback
|
||||
@@ -87,7 +87,11 @@ module.exports = UrlCache =
|
||||
callback null
|
||||
|
||||
_deleteUrlCacheFromDisk: (project_id, url, callback = (error) ->) ->
|
||||
fs.unlink UrlCache._cacheFilePathForUrl(project_id, url), callback
|
||||
fs.unlink UrlCache._cacheFilePathForUrl(project_id, url), (error) ->
|
||||
if error? and error.code != 'ENOENT' # no error if the file isn't present
|
||||
return callback(error)
|
||||
else
|
||||
return callback()
|
||||
|
||||
_findUrlDetails: (project_id, url, callback = (error, urlDetails) ->) ->
|
||||
db.UrlCache.find(where: { url: url, project_id: project_id })
|
||||
|
||||
@@ -16,24 +16,29 @@ module.exports =
|
||||
clsiCacheDir: Path.resolve(__dirname + "/../cache")
|
||||
synctexBaseDir: (project_id) -> Path.join(@compilesDir, project_id)
|
||||
|
||||
# clsi:
|
||||
# commandRunner: "docker-runner-sharelatex"
|
||||
# docker:
|
||||
# image: "quay.io/sharelatex/texlive-full"
|
||||
# env:
|
||||
# PATH: "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/2013/bin/x86_64-linux/"
|
||||
# HOME: "/tmp"
|
||||
# modem:
|
||||
# socketPath: false
|
||||
# user: "tex"
|
||||
|
||||
internal:
|
||||
clsi:
|
||||
port: 3013
|
||||
host: "localhost"
|
||||
host: process.env["LISTEN_ADDRESS"] or "localhost"
|
||||
|
||||
|
||||
apis:
|
||||
clsi:
|
||||
url: "http://localhost:3013"
|
||||
|
||||
smokeTest: false
|
||||
project_cache_length_ms: 1000 * 60 * 60 * 24
|
||||
parallelFileDownloads:1
|
||||
|
||||
if process.env["COMMAND_RUNNER"]
|
||||
module.exports.clsi =
|
||||
commandRunner: process.env["COMMAND_RUNNER"]
|
||||
docker:
|
||||
image: process.env["TEXLIVE_IMAGE"] or "quay.io/sharelatex/texlive-full:2017.1"
|
||||
env:
|
||||
HOME: "/tmp"
|
||||
socketPath: "/var/run/docker.sock"
|
||||
user: process.env["TEXLIVE_IMAGE_USER"] or "tex"
|
||||
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000
|
||||
checkProjectsIntervalMs: 10 * 60 * 1000
|
||||
module.exports.path.sandboxedCompilesHostDir = process.env["COMPILES_HOST_DIR"]
|
||||
|
||||
26
package.json
26
package.json
@@ -6,26 +6,32 @@
|
||||
"type": "git",
|
||||
"url": "https://github.com/sharelatex/clsi-sharelatex.git"
|
||||
},
|
||||
"scripts": {
|
||||
"compile:app": "coffee -o app/js -c app/coffee && coffee -c app.coffee",
|
||||
"start": "npm run compile:app && node app.js"
|
||||
},
|
||||
"author": "James Allen <james@sharelatex.com>",
|
||||
"dependencies": {
|
||||
"async": "0.2.9",
|
||||
"body-parser": "^1.2.0",
|
||||
"express": "^4.2.0",
|
||||
"fs-extra": "^0.16.3",
|
||||
"grunt-mkdir": "^1.0.0",
|
||||
"heapdump": "^0.3.5",
|
||||
"lockfile": "^1.0.3",
|
||||
"logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.4",
|
||||
"lynx": "0.0.11",
|
||||
"metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0",
|
||||
"mkdirp": "0.3.5",
|
||||
"mysql": "2.6.2",
|
||||
"request": "~2.21.0",
|
||||
"logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.0.0",
|
||||
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0",
|
||||
"metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.3.0",
|
||||
"request": "^2.21.0",
|
||||
"sequelize": "^2.1.3",
|
||||
"wrench": "~1.5.4",
|
||||
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0",
|
||||
"smoke-test-sharelatex": "git+https://github.com/sharelatex/smoke-test-sharelatex.git#v0.2.0",
|
||||
"sqlite3": "~2.2.0",
|
||||
"express": "^4.2.0",
|
||||
"body-parser": "^1.2.0",
|
||||
"fs-extra": "^0.16.3",
|
||||
"sqlite3": "~3.1.8",
|
||||
"underscore": "^1.8.2",
|
||||
"v8-profiler": "^5.2.4",
|
||||
"heapdump": "^0.3.5"
|
||||
"wrench": "~1.5.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "1.10.0",
|
||||
|
||||
@@ -12,18 +12,35 @@ catch e
|
||||
|
||||
convertToPng = (pdfPath, pngPath, callback = (error) ->) ->
|
||||
convert = ChildProcess.exec "convert #{fixturePath(pdfPath)} #{fixturePath(pngPath)}"
|
||||
stdout = ""
|
||||
convert.stdout.on "data", (chunk) -> console.log "STDOUT", chunk.toString()
|
||||
convert.stderr.on "data", (chunk) -> console.log "STDERR", chunk.toString()
|
||||
convert.on "exit", () ->
|
||||
callback()
|
||||
|
||||
compare = (originalPath, generatedPath, callback = (error, same) ->) ->
|
||||
proc = ChildProcess.exec "compare -metric mae #{fixturePath(originalPath)} #{fixturePath(generatedPath)} #{fixturePath("tmp/diff.png")}"
|
||||
diff_file = "#{fixturePath(generatedPath)}-diff.png"
|
||||
proc = ChildProcess.exec "compare -metric mae #{fixturePath(originalPath)} #{fixturePath(generatedPath)} #{diff_file}"
|
||||
stderr = ""
|
||||
proc.stderr.on "data", (chunk) -> stderr += chunk
|
||||
proc.on "exit", () ->
|
||||
if stderr.trim() == "0 (0)"
|
||||
fs.unlink diff_file # remove output diff if test matches expected image
|
||||
callback null, true
|
||||
else
|
||||
console.log stderr
|
||||
console.log "compare result", stderr
|
||||
callback null, false
|
||||
|
||||
checkPdfInfo = (pdfPath, callback = (error, output) ->) ->
|
||||
proc = ChildProcess.exec "pdfinfo #{fixturePath(pdfPath)}"
|
||||
stdout = ""
|
||||
proc.stdout.on "data", (chunk) -> stdout += chunk
|
||||
proc.stderr.on "data", (chunk) -> console.log "STDERR", chunk.toString()
|
||||
proc.on "exit", () ->
|
||||
if stdout.match(/Optimized:\s+yes/)
|
||||
callback null, true
|
||||
else
|
||||
console.log "pdfinfo result", stdout
|
||||
callback null, false
|
||||
|
||||
compareMultiplePages = (project_id, callback = (error) ->) ->
|
||||
@@ -39,24 +56,30 @@ compareMultiplePages = (project_id, callback = (error) ->) ->
|
||||
compareNext page_no + 1, callback
|
||||
compareNext 0, callback
|
||||
|
||||
comparePdf = (project_id, example_dir, callback = (error) ->) ->
|
||||
convertToPng "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png", (error) =>
|
||||
throw error if error?
|
||||
convertToPng "examples/#{example_dir}/output.pdf", "tmp/#{project_id}-source.png", (error) =>
|
||||
throw error if error?
|
||||
fs.stat fixturePath("tmp/#{project_id}-source-0.png"), (error, stat) =>
|
||||
if error?
|
||||
compare "tmp/#{project_id}-source.png", "tmp/#{project_id}-generated.png", (error, same) =>
|
||||
throw error if error?
|
||||
same.should.equal true
|
||||
callback()
|
||||
else
|
||||
compareMultiplePages project_id, (error) ->
|
||||
throw error if error?
|
||||
callback()
|
||||
|
||||
downloadAndComparePdf = (project_id, example_dir, url, callback = (error) ->) ->
|
||||
writeStream = fs.createWriteStream(fixturePath("tmp/#{project_id}.pdf"))
|
||||
request.get(url).pipe(writeStream)
|
||||
writeStream.on "close", () =>
|
||||
convertToPng "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png", (error) =>
|
||||
checkPdfInfo "tmp/#{project_id}.pdf", (error, optimised) =>
|
||||
throw error if error?
|
||||
convertToPng "examples/#{example_dir}/output.pdf", "tmp/#{project_id}-source.png", (error) =>
|
||||
throw error if error?
|
||||
fs.stat fixturePath("tmp/#{project_id}-source-0.png"), (error, stat) =>
|
||||
if error?
|
||||
compare "tmp/#{project_id}-source.png", "tmp/#{project_id}-generated.png", (error, same) =>
|
||||
throw error if error?
|
||||
same.should.equal true
|
||||
callback()
|
||||
else
|
||||
compareMultiplePages project_id, (error) ->
|
||||
throw error if error?
|
||||
callback()
|
||||
optimised.should.equal true
|
||||
comparePdf project_id, example_dir, callback
|
||||
|
||||
Client.runServer(4242, fixturePath("examples"))
|
||||
|
||||
@@ -68,15 +91,19 @@ describe "Example Documents", ->
|
||||
do (example_dir) ->
|
||||
describe example_dir, ->
|
||||
before ->
|
||||
@project_id = Client.randomId()
|
||||
@project_id = Client.randomId() + "_" + example_dir
|
||||
|
||||
it "should generate the correct pdf", (done) ->
|
||||
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
|
||||
if error || body?.compile?.status is "failure"
|
||||
console.log "DEBUG: error", error, "body", JSON.stringify(body)
|
||||
pdf = Client.getOutputFile body, "pdf"
|
||||
downloadAndComparePdf(@project_id, example_dir, pdf.url, done)
|
||||
|
||||
it "should generate the correct pdf on the second run as well", (done) ->
|
||||
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
|
||||
if error || body?.compile?.status is "failure"
|
||||
console.log "DEBUG: error", error, "body", JSON.stringify(body)
|
||||
pdf = Client.getOutputFile body, "pdf"
|
||||
downloadAndComparePdf(@project_id, example_dir, pdf.url, done)
|
||||
|
||||
|
||||
@@ -6,13 +6,14 @@ describe "Timed out compile", ->
|
||||
before (done) ->
|
||||
@request =
|
||||
options:
|
||||
timeout: 0.01 #seconds
|
||||
timeout: 1 #seconds
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\input{|"sleep 10"}
|
||||
\\end{document}
|
||||
'''
|
||||
]
|
||||
|
||||
@@ -29,6 +29,8 @@ describe "Syncing", ->
|
||||
elements: 0
|
||||
mathInline: 6
|
||||
mathDisplay: 0
|
||||
errors: 0
|
||||
messages: ""
|
||||
}
|
||||
)
|
||||
done()
|
||||
|
||||
@@ -30,7 +30,10 @@ module.exports = Client =
|
||||
express = require("express")
|
||||
app = express()
|
||||
app.use express.static(directory)
|
||||
app.listen(port, host)
|
||||
app.listen(port, host).on "error", (error) ->
|
||||
console.error "error starting server:", error.message
|
||||
process.exit(1)
|
||||
|
||||
|
||||
syncFromCode: (project_id, file, line, column, callback = (error, pdfPositions) ->) ->
|
||||
request.get {
|
||||
|
||||
Binary file not shown.
Binary file not shown.
12
test/acceptance/fixtures/examples/fontawesome/main.tex
Normal file
12
test/acceptance/fixtures/examples/fontawesome/main.tex
Normal file
@@ -0,0 +1,12 @@
|
||||
\documentclass{article}
|
||||
\usepackage{fontawesome}
|
||||
|
||||
\begin{document}
|
||||
Cloud \faCloud
|
||||
|
||||
Cog \faCog
|
||||
|
||||
Database \faDatabase
|
||||
|
||||
Leaf \faLeaf
|
||||
\end{document}
|
||||
BIN
test/acceptance/fixtures/examples/fontawesome/output.pdf
Normal file
BIN
test/acceptance/fixtures/examples/fontawesome/output.pdf
Normal file
Binary file not shown.
@@ -0,0 +1,16 @@
|
||||
\documentclass{article}
|
||||
\usepackage{fontspec}
|
||||
\defaultfontfeatures{Extension = .otf} % this is needed because
|
||||
% fontawesome package loads by
|
||||
% font name only
|
||||
\usepackage{fontawesome}
|
||||
|
||||
\begin{document}
|
||||
Cloud \faCloud
|
||||
|
||||
Cog \faCog
|
||||
|
||||
Database \faDatabase
|
||||
|
||||
Leaf \faLeaf
|
||||
\end{document}
|
||||
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"compiler": "xelatex"
|
||||
}
|
||||
BIN
test/acceptance/fixtures/examples/fontawesome_xelatex/output.pdf
Normal file
BIN
test/acceptance/fixtures/examples/fontawesome_xelatex/output.pdf
Normal file
Binary file not shown.
14
test/acceptance/fixtures/examples/hebrew/main.tex
Normal file
14
test/acceptance/fixtures/examples/hebrew/main.tex
Normal file
@@ -0,0 +1,14 @@
|
||||
\documentclass{article}
|
||||
\usepackage[utf8x]{inputenc}
|
||||
\usepackage[hebrew,english]{babel}
|
||||
|
||||
\begin{document}
|
||||
\selectlanguage{hebrew}
|
||||
|
||||
כדי לכתוב משהו באנגלית חייבים להשתמש במקרו הבא וכאן
|
||||
|
||||
ממשיכים לכתוב בעברית. טקסט נוסחאות תמיד יהיה בכיוון שמאל-לימין
|
||||
|
||||
\selectlanguage{english}
|
||||
This is a test.
|
||||
\end{document}
|
||||
BIN
test/acceptance/fixtures/examples/hebrew/output.pdf
Normal file
BIN
test/acceptance/fixtures/examples/hebrew/output.pdf
Normal file
Binary file not shown.
Binary file not shown.
35
test/acceptance/fixtures/examples/knitr_utf8/main.Rtex
Normal file
35
test/acceptance/fixtures/examples/knitr_utf8/main.Rtex
Normal file
@@ -0,0 +1,35 @@
|
||||
\documentclass{article}
|
||||
\usepackage[utf8]{inputenc}
|
||||
\usepackage[spanish]{babel}
|
||||
|
||||
\begin{document}
|
||||
|
||||
\tableofcontents
|
||||
|
||||
\vspace{2cm} %Add a 2cm space
|
||||
|
||||
\begin{abstract}
|
||||
Este es un breve resumen del contenido del
|
||||
documento escrito en español.
|
||||
\end{abstract}
|
||||
|
||||
\section{Sección Introductoria}
|
||||
Esta es la primera sección, podemos agregar
|
||||
algunos elementos adicionales y todo será
|
||||
escrito correctamente. Más aún, si una palabra
|
||||
es demaciado larga y tiene que ser truncada,
|
||||
babel tratará de truncarla correctamente
|
||||
dependiendo del idioma.
|
||||
|
||||
\section{Sección con teoremas}
|
||||
Esta sección es para ver que pasa con los comandos
|
||||
que definen texto
|
||||
|
||||
%% chunk options: cache this chunk
|
||||
%% begin.rcode my-cache, cache=TRUE
|
||||
% set.seed(123)
|
||||
% x = runif(10)
|
||||
% sd(x) # standard deviation
|
||||
%% end.rcode
|
||||
|
||||
\end{document}
|
||||
BIN
test/acceptance/fixtures/examples/knitr_utf8/output.pdf
Normal file
BIN
test/acceptance/fixtures/examples/knitr_utf8/output.pdf
Normal file
Binary file not shown.
@@ -1,4 +1,4 @@
|
||||
\documentclass{article}
|
||||
\documentclass[a4paper]{article}
|
||||
|
||||
\usepackage{graphicx}
|
||||
|
||||
|
||||
Binary file not shown.
Binary file not shown.
66
test/acceptance/fixtures/examples/tikz_feynman/main.tex
Normal file
66
test/acceptance/fixtures/examples/tikz_feynman/main.tex
Normal file
@@ -0,0 +1,66 @@
|
||||
\RequirePackage{luatex85}
|
||||
\documentclass[tikz]{standalone}
|
||||
|
||||
\usepackage[compat=1.1.0]{tikz-feynman}
|
||||
|
||||
\begin{document}
|
||||
\feynmandiagram [horizontal=a to b] {
|
||||
i1 -- [fermion] a -- [fermion] i2,
|
||||
a -- [photon] b,
|
||||
f1 -- [fermion] b -- [fermion] f2,
|
||||
};
|
||||
|
||||
\feynmandiagram [horizontal=a to b] {
|
||||
i1 [particle=\(e^{-}\)] -- [fermion] a -- [fermion] i2 [particle=\(e^{+}\)],
|
||||
a -- [photon, edge label=\(\gamma\), momentum'=\(k\)] b,
|
||||
f1 [particle=\(\mu^{+}\)] -- [fermion] b -- [fermion] f2 [particle=\(\mu^{-}\)],
|
||||
};
|
||||
|
||||
\feynmandiagram [large, vertical=e to f] {
|
||||
a -- [fermion] b -- [photon, momentum=\(k\)] c -- [fermion] d,
|
||||
b -- [fermion, momentum'=\(p_{1}\)] e -- [fermion, momentum'=\(p_{2}\)] c,
|
||||
e -- [gluon] f,
|
||||
h -- [fermion] f -- [fermion] i,
|
||||
};
|
||||
|
||||
\begin{tikzpicture}
|
||||
\begin{feynman}
|
||||
\vertex (a1) {\(\overline b\)};
|
||||
\vertex[right=1cm of a1] (a2);
|
||||
\vertex[right=1cm of a2] (a3);
|
||||
\vertex[right=1cm of a3] (a4) {\(b\)};
|
||||
\vertex[right=1cm of a4] (a5);
|
||||
\vertex[right=2cm of a5] (a6) {\(u\)};
|
||||
|
||||
\vertex[below=2em of a1] (b1) {\(d\)};
|
||||
\vertex[right=1cm of b1] (b2);
|
||||
\vertex[right=1cm of b2] (b3);
|
||||
\vertex[right=1cm of b3] (b4) {\(\overline d\)};
|
||||
\vertex[below=2em of a6] (b5) {\(\overline d\)};
|
||||
|
||||
\vertex[above=of a6] (c1) {\(\overline u\)};
|
||||
\vertex[above=2em of c1] (c3) {\(d\)};
|
||||
\vertex at ($(c1)!0.5!(c3) - (1cm, 0)$) (c2);
|
||||
|
||||
\diagram* {
|
||||
{[edges=fermion]
|
||||
(b1) -- (b2) -- (a2) -- (a1),
|
||||
(b5) -- (b4) -- (b3) -- (a3) -- (a4) -- (a5) -- (a6),
|
||||
},
|
||||
(a2) -- [boson, edge label=\(W\)] (a3),
|
||||
(b2) -- [boson, edge label'=\(W\)] (b3),
|
||||
|
||||
(c1) -- [fermion, out=180, in=-45] (c2) -- [fermion, out=45, in=180] (c3),
|
||||
(a5) -- [boson, bend left, edge label=\(W^{-}\)] (c2),
|
||||
};
|
||||
|
||||
\draw [decoration={brace}, decorate] (b1.south west) -- (a1.north west)
|
||||
node [pos=0.5, left] {\(B^{0}\)};
|
||||
\draw [decoration={brace}, decorate] (c3.north east) -- (c1.south east)
|
||||
node [pos=0.5, right] {\(\pi^{-}\)};
|
||||
\draw [decoration={brace}, decorate] (a6.north east) -- (b5.south east)
|
||||
node [pos=0.5, right] {\(\pi^{+}\)};
|
||||
\end{feynman}
|
||||
\end{tikzpicture}
|
||||
|
||||
\end{document}
|
||||
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"compiler": "lualatex"
|
||||
}
|
||||
BIN
test/acceptance/fixtures/examples/tikz_feynman/output.pdf
Normal file
BIN
test/acceptance/fixtures/examples/tikz_feynman/output.pdf
Normal file
Binary file not shown.
23
test/acceptance/scripts/full-test.sh
Executable file
23
test/acceptance/scripts/full-test.sh
Executable file
@@ -0,0 +1,23 @@
|
||||
#!/bin/bash -x
|
||||
|
||||
export SHARELATEX_CONFIG=`pwd`/test/acceptance/scripts/settings.test.coffee
|
||||
|
||||
echo ">> Starting server..."
|
||||
|
||||
grunt --no-color >server.log 2>&1 &
|
||||
|
||||
echo ">> Server started"
|
||||
|
||||
sleep 5
|
||||
|
||||
echo ">> Running acceptance tests..."
|
||||
grunt --no-color mochaTest:acceptance
|
||||
_test_exit_code=$?
|
||||
|
||||
echo ">> Killing server"
|
||||
|
||||
kill %1
|
||||
|
||||
echo ">> Done"
|
||||
|
||||
exit $_test_exit_code
|
||||
47
test/acceptance/scripts/settings.test.coffee
Normal file
47
test/acceptance/scripts/settings.test.coffee
Normal file
@@ -0,0 +1,47 @@
|
||||
Path = require "path"
|
||||
|
||||
module.exports =
|
||||
# Options are passed to Sequelize.
|
||||
# See http://sequelizejs.com/documentation#usage-options for details
|
||||
mysql:
|
||||
clsi:
|
||||
database: "clsi"
|
||||
username: "clsi"
|
||||
password: null
|
||||
dialect: "sqlite"
|
||||
storage: Path.resolve("db.sqlite")
|
||||
|
||||
path:
|
||||
compilesDir: Path.resolve(__dirname + "/../../../compiles")
|
||||
clsiCacheDir: Path.resolve(__dirname + "/../../../cache")
|
||||
#synctexBaseDir: (project_id) -> Path.join(@compilesDir, project_id)
|
||||
synctexBaseDir: () -> "/compile"
|
||||
sandboxedCompilesHostDir: process.env['SANDBOXED_COMPILES_HOST_DIR']
|
||||
|
||||
clsi:
|
||||
#strace: true
|
||||
#archive_logs: true
|
||||
commandRunner: "docker-runner-sharelatex"
|
||||
latexmkCommandPrefix: ["/usr/bin/time", "-v"] # on Linux
|
||||
docker:
|
||||
image: process.env.TEXLIVE_IMAGE || "texlive-full:2017.1-opt"
|
||||
env:
|
||||
PATH: "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/2017/bin/x86_64-linux/"
|
||||
HOME: "/tmp"
|
||||
modem:
|
||||
socketPath: false
|
||||
user: process.env.SIBLING_CONTAINER_USER ||"111"
|
||||
|
||||
internal:
|
||||
clsi:
|
||||
port: 3013
|
||||
load_port: 3044
|
||||
host: "localhost"
|
||||
|
||||
apis:
|
||||
clsi:
|
||||
url: "http://localhost:3013"
|
||||
|
||||
smokeTest: false
|
||||
project_cache_length_ms: 1000 * 60 * 60 * 24
|
||||
parallelFileDownloads:1
|
||||
@@ -6,19 +6,48 @@ Settings = require "settings-sharelatex"
|
||||
|
||||
buildUrl = (path) -> "http://#{Settings.internal.clsi.host}:#{Settings.internal.clsi.port}/#{path}"
|
||||
|
||||
url = buildUrl("project/smoketest-#{process.pid}/compile")
|
||||
|
||||
describe "Running a compile", ->
|
||||
before (done) ->
|
||||
request.post {
|
||||
url: buildUrl("project/smoketest/compile")
|
||||
url: url
|
||||
json:
|
||||
compile:
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: """
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
% Membrane-like surface
|
||||
% Author: Yotam Avital
|
||||
\\documentclass{article}
|
||||
\\usepackage{tikz}
|
||||
\\usetikzlibrary{calc,fadings,decorations.pathreplacing}
|
||||
\\begin{document}
|
||||
\\begin{tikzpicture}
|
||||
\\def\\nuPi{3.1459265}
|
||||
\\foreach \\i in {5,4,...,2}{% This one doesn't matter
|
||||
\\foreach \\j in {3,2,...,0}{% This will crate a membrane
|
||||
% with the front lipids visible
|
||||
% top layer
|
||||
\\pgfmathsetmacro{\\dx}{rand*0.1}% A random variance in the x coordinate
|
||||
\\pgfmathsetmacro{\\dy}{rand*0.1}% A random variance in the y coordinate,
|
||||
% gives a hight fill to the lipid
|
||||
\\pgfmathsetmacro{\\rot}{rand*0.1}% A random variance in the
|
||||
% molecule orientation
|
||||
\\shade[ball color=red] ({\\i+\\dx+\\rot},{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)}) circle(0.45);
|
||||
\\shade[ball color=gray] (\\i+\\dx,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-0.9}) circle(0.45);
|
||||
\\shade[ball color=gray] (\\i+\\dx-\\rot,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-1.8}) circle(0.45);
|
||||
% bottom layer
|
||||
\\pgfmathsetmacro{\\dx}{rand*0.1}
|
||||
\\pgfmathsetmacro{\\dy}{rand*0.1}
|
||||
\\pgfmathsetmacro{\\rot}{rand*0.1}
|
||||
\\shade[ball color=gray] (\\i+\\dx+\\rot,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-2.8}) circle(0.45);
|
||||
\\shade[ball color=gray] (\\i+\\dx,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-3.7}) circle(0.45);
|
||||
\\shade[ball color=red] (\\i+\\dx-\\rot,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-4.6}) circle(0.45);
|
||||
}
|
||||
}
|
||||
\\end{tikzpicture}
|
||||
\\end{document}
|
||||
"""
|
||||
]
|
||||
}, (@error, @response, @body) =>
|
||||
|
||||
@@ -49,7 +49,7 @@ describe "CompileController", ->
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach ->
|
||||
@CompileManager.doCompile = sinon.stub().callsArgWith(1, null, @output_files)
|
||||
@CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, null, @output_files)
|
||||
@CompileController.compile @req, @res
|
||||
|
||||
it "should parse the request", ->
|
||||
@@ -58,7 +58,7 @@ describe "CompileController", ->
|
||||
.should.equal true
|
||||
|
||||
it "should run the compile for the specified project", ->
|
||||
@CompileManager.doCompile
|
||||
@CompileManager.doCompileWithLock
|
||||
.calledWith(@request_with_project_id)
|
||||
.should.equal true
|
||||
|
||||
@@ -75,7 +75,8 @@ describe "CompileController", ->
|
||||
status: "success"
|
||||
error: null
|
||||
outputFiles: @output_files.map (file) =>
|
||||
url: "#{@Settings.apis.clsi.url}/project/#{@project_id}/output/#{file.path}"
|
||||
url: "#{@Settings.apis.clsi.url}/project/#{@project_id}/build/#{file.build}/output/#{file.path}"
|
||||
path: file.path
|
||||
type: file.type
|
||||
build: file.build
|
||||
)
|
||||
@@ -83,7 +84,7 @@ describe "CompileController", ->
|
||||
|
||||
describe "with an error", ->
|
||||
beforeEach ->
|
||||
@CompileManager.doCompile = sinon.stub().callsArgWith(1, new Error(@message = "error message"), null)
|
||||
@CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, new Error(@message = "error message"), null)
|
||||
@CompileController.compile @req, @res
|
||||
|
||||
it "should return the JSON response with the error", ->
|
||||
@@ -101,7 +102,7 @@ describe "CompileController", ->
|
||||
beforeEach ->
|
||||
@error = new Error(@message = "container timed out")
|
||||
@error.timedout = true
|
||||
@CompileManager.doCompile = sinon.stub().callsArgWith(1, @error, null)
|
||||
@CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, @error, null)
|
||||
@CompileController.compile @req, @res
|
||||
|
||||
it "should return the JSON response with the timeout status", ->
|
||||
@@ -117,7 +118,7 @@ describe "CompileController", ->
|
||||
|
||||
describe "when the request returns no output files", ->
|
||||
beforeEach ->
|
||||
@CompileManager.doCompile = sinon.stub().callsArgWith(1, null, [])
|
||||
@CompileManager.doCompileWithLock = sinon.stub().callsArgWith(1, null, [])
|
||||
@CompileController.compile @req, @res
|
||||
|
||||
it "should return the JSON response with the failure status", ->
|
||||
@@ -145,12 +146,12 @@ describe "CompileController", ->
|
||||
column: @column.toString()
|
||||
@res.send = sinon.stub()
|
||||
|
||||
@CompileManager.syncFromCode = sinon.stub().callsArgWith(4, null, @pdfPositions = ["mock-positions"])
|
||||
@CompileManager.syncFromCode = sinon.stub().callsArgWith(5, null, @pdfPositions = ["mock-positions"])
|
||||
@CompileController.syncFromCode @req, @res, @next
|
||||
|
||||
it "should find the corresponding location in the PDF", ->
|
||||
@CompileManager.syncFromCode
|
||||
.calledWith(@project_id, @file, @line, @column)
|
||||
.calledWith(@project_id, undefined, @file, @line, @column)
|
||||
.should.equal true
|
||||
|
||||
it "should return the positions", ->
|
||||
@@ -174,12 +175,12 @@ describe "CompileController", ->
|
||||
v: @v.toString()
|
||||
@res.send = sinon.stub()
|
||||
|
||||
@CompileManager.syncFromPdf = sinon.stub().callsArgWith(4, null, @codePositions = ["mock-positions"])
|
||||
@CompileManager.syncFromPdf = sinon.stub().callsArgWith(5, null, @codePositions = ["mock-positions"])
|
||||
@CompileController.syncFromPdf @req, @res, @next
|
||||
|
||||
it "should find the corresponding location in the code", ->
|
||||
@CompileManager.syncFromPdf
|
||||
.calledWith(@project_id, @page, @h, @v)
|
||||
.calledWith(@project_id, undefined, @page, @h, @v)
|
||||
.should.equal true
|
||||
|
||||
it "should return the positions", ->
|
||||
@@ -197,14 +198,15 @@ describe "CompileController", ->
|
||||
project_id: @project_id
|
||||
@req.query =
|
||||
file: @file
|
||||
image: @image = "example.com/image"
|
||||
@res.send = sinon.stub()
|
||||
|
||||
@CompileManager.wordcount = sinon.stub().callsArgWith(2, null, @texcount = ["mock-texcount"])
|
||||
@CompileManager.wordcount = sinon.stub().callsArgWith(4, null, @texcount = ["mock-texcount"])
|
||||
@CompileController.wordcount @req, @res, @next
|
||||
|
||||
it "should return the word count of a file", ->
|
||||
@CompileManager.wordcount
|
||||
.calledWith(@project_id, @file)
|
||||
.calledWith(@project_id, undefined, @file, @image)
|
||||
.should.equal true
|
||||
|
||||
it "should return the texcount info", ->
|
||||
|
||||
@@ -14,12 +14,66 @@ describe "CompileManager", ->
|
||||
"./OutputFileFinder": @OutputFileFinder = {}
|
||||
"./OutputCacheManager": @OutputCacheManager = {}
|
||||
"settings-sharelatex": @Settings = { path: compilesDir: "/compiles/dir" }
|
||||
"logger-sharelatex": @logger = { log: sinon.stub() }
|
||||
"logger-sharelatex": @logger = { log: sinon.stub() , info:->}
|
||||
"child_process": @child_process = {}
|
||||
"./CommandRunner": @CommandRunner = {}
|
||||
"./DraftModeManager": @DraftModeManager = {}
|
||||
"./TikzManager": @TikzManager = {}
|
||||
"./LockManager": @LockManager = {}
|
||||
"fs": @fs = {}
|
||||
"fs-extra": @fse = { ensureDir: sinon.stub().callsArg(1) }
|
||||
@callback = sinon.stub()
|
||||
|
||||
describe "doCompileWithLock", ->
|
||||
beforeEach ->
|
||||
@request =
|
||||
resources: @resources = "mock-resources"
|
||||
project_id: @project_id = "project-id-123"
|
||||
user_id: @user_id = "1234"
|
||||
@output_files = ["foo", "bar"]
|
||||
@Settings.compileDir = "compiles"
|
||||
@compileDir = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"
|
||||
@CompileManager.doCompile = sinon.stub().callsArgWith(1, null, @output_files)
|
||||
@LockManager.runWithLock = (lockFile, runner, callback) ->
|
||||
runner (err, result...) ->
|
||||
callback(err, result...)
|
||||
|
||||
describe "when the project is not locked", ->
|
||||
beforeEach ->
|
||||
@CompileManager.doCompileWithLock @request, @callback
|
||||
|
||||
it "should ensure that the compile directory exists", ->
|
||||
@fse.ensureDir.calledWith(@compileDir)
|
||||
.should.equal true
|
||||
|
||||
it "should call doCompile with the request", ->
|
||||
@CompileManager.doCompile
|
||||
.calledWith(@request)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the output files", ->
|
||||
@callback.calledWithExactly(null, @output_files)
|
||||
.should.equal true
|
||||
|
||||
describe "when the project is locked", ->
|
||||
beforeEach ->
|
||||
@error = new Error("locked")
|
||||
@LockManager.runWithLock = (lockFile, runner, callback) =>
|
||||
callback(@error)
|
||||
@CompileManager.doCompileWithLock @request, @callback
|
||||
|
||||
it "should ensure that the compile directory exists", ->
|
||||
@fse.ensureDir.calledWith(@compileDir)
|
||||
.should.equal true
|
||||
|
||||
it "should not call doCompile with the request", ->
|
||||
@CompileManager.doCompile
|
||||
.called.should.equal false
|
||||
|
||||
it "should call the callback with the error", ->
|
||||
@callback.calledWithExactly(@error)
|
||||
.should.equal true
|
||||
|
||||
describe "doCompile", ->
|
||||
beforeEach ->
|
||||
@output_files = [{
|
||||
@@ -42,53 +96,112 @@ describe "CompileManager", ->
|
||||
resources: @resources = "mock-resources"
|
||||
rootResourcePath: @rootResourcePath = "main.tex"
|
||||
project_id: @project_id = "project-id-123"
|
||||
user_id: @user_id = "1234"
|
||||
compiler: @compiler = "pdflatex"
|
||||
timeout: @timeout = 42000
|
||||
imageName: @image = "example.com/image"
|
||||
@env = {}
|
||||
@Settings.compileDir = "compiles"
|
||||
@compileDir = "#{@Settings.path.compilesDir}/#{@project_id}"
|
||||
@ResourceWriter.syncResourcesToDisk = sinon.stub().callsArg(3)
|
||||
@compileDir = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"
|
||||
@ResourceWriter.syncResourcesToDisk = sinon.stub().callsArgWith(2, null, @resources)
|
||||
@LatexRunner.runLatex = sinon.stub().callsArg(2)
|
||||
@OutputFileFinder.findOutputFiles = sinon.stub().callsArgWith(2, null, @output_files)
|
||||
@OutputCacheManager.saveOutputFiles = sinon.stub().callsArgWith(2, null, @build_files)
|
||||
@CompileManager.doCompile @request, @callback
|
||||
@DraftModeManager.injectDraftMode = sinon.stub().callsArg(1)
|
||||
@TikzManager.checkMainFile = sinon.stub().callsArg(3, false)
|
||||
|
||||
it "should write the resources to disk", ->
|
||||
@ResourceWriter.syncResourcesToDisk
|
||||
.calledWith(@project_id, @resources, @compileDir)
|
||||
.should.equal true
|
||||
describe "normally", ->
|
||||
beforeEach ->
|
||||
@CompileManager.doCompile @request, @callback
|
||||
|
||||
it "should run LaTeX", ->
|
||||
@LatexRunner.runLatex
|
||||
.calledWith(@project_id, {
|
||||
directory: @compileDir
|
||||
mainFile: @rootResourcePath
|
||||
compiler: @compiler
|
||||
timeout: @timeout
|
||||
})
|
||||
.should.equal true
|
||||
it "should write the resources to disk", ->
|
||||
@ResourceWriter.syncResourcesToDisk
|
||||
.calledWith(@request, @compileDir)
|
||||
.should.equal true
|
||||
|
||||
it "should find the output files", ->
|
||||
@OutputFileFinder.findOutputFiles
|
||||
.calledWith(@resources, @compileDir)
|
||||
.should.equal true
|
||||
it "should run LaTeX", ->
|
||||
@LatexRunner.runLatex
|
||||
.calledWith("#{@project_id}-#{@user_id}", {
|
||||
directory: @compileDir
|
||||
mainFile: @rootResourcePath
|
||||
compiler: @compiler
|
||||
timeout: @timeout
|
||||
image: @image
|
||||
environment: @env
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
it "should return the output files", ->
|
||||
@callback.calledWith(null, @build_files).should.equal true
|
||||
it "should find the output files", ->
|
||||
@OutputFileFinder.findOutputFiles
|
||||
.calledWith(@resources, @compileDir)
|
||||
.should.equal true
|
||||
|
||||
it "should return the output files", ->
|
||||
@callback.calledWith(null, @build_files).should.equal true
|
||||
|
||||
it "should not inject draft mode by default", ->
|
||||
@DraftModeManager.injectDraftMode.called.should.equal false
|
||||
|
||||
describe "with draft mode", ->
|
||||
beforeEach ->
|
||||
@request.draft = true
|
||||
@CompileManager.doCompile @request, @callback
|
||||
|
||||
it "should inject the draft mode header", ->
|
||||
@DraftModeManager.injectDraftMode
|
||||
.calledWith(@compileDir + "/" + @rootResourcePath)
|
||||
.should.equal true
|
||||
|
||||
describe "with a check option", ->
|
||||
beforeEach ->
|
||||
@request.check = "error"
|
||||
@CompileManager.doCompile @request, @callback
|
||||
|
||||
it "should run chktex", ->
|
||||
@LatexRunner.runLatex
|
||||
.calledWith("#{@project_id}-#{@user_id}", {
|
||||
directory: @compileDir
|
||||
mainFile: @rootResourcePath
|
||||
compiler: @compiler
|
||||
timeout: @timeout
|
||||
image: @image
|
||||
environment: {'CHKTEX_OPTIONS': '-nall -e9 -e10 -w15 -w16', 'CHKTEX_EXIT_ON_ERROR':1, 'CHKTEX_ULIMIT_OPTIONS': '-t 5 -v 64000'}
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
describe "with a knitr file and check options", ->
|
||||
beforeEach ->
|
||||
@request.rootResourcePath = "main.Rtex"
|
||||
@request.check = "error"
|
||||
@CompileManager.doCompile @request, @callback
|
||||
|
||||
it "should not run chktex", ->
|
||||
@LatexRunner.runLatex
|
||||
.calledWith("#{@project_id}-#{@user_id}", {
|
||||
directory: @compileDir
|
||||
mainFile: "main.Rtex"
|
||||
compiler: @compiler
|
||||
timeout: @timeout
|
||||
image: @image
|
||||
environment: @env
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
describe "clearProject", ->
|
||||
describe "succesfully", ->
|
||||
beforeEach ->
|
||||
@Settings.compileDir = "compiles"
|
||||
@fs.lstat = sinon.stub().callsArgWith(1, null,{isDirectory: ()->true})
|
||||
@proc = new EventEmitter()
|
||||
@proc.stdout = new EventEmitter()
|
||||
@proc.stderr = new EventEmitter()
|
||||
@child_process.spawn = sinon.stub().returns(@proc)
|
||||
@CompileManager.clearProject @project_id, @callback
|
||||
@CompileManager.clearProject @project_id, @user_id, @callback
|
||||
@proc.emit "close", 0
|
||||
|
||||
it "should remove the project directory", ->
|
||||
@child_process.spawn
|
||||
.calledWith("rm", ["-r", "#{@Settings.path.compilesDir}/#{@project_id}"])
|
||||
.calledWith("rm", ["-r", "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"])
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@@ -97,17 +210,18 @@ describe "CompileManager", ->
|
||||
describe "with a non-success status code", ->
|
||||
beforeEach ->
|
||||
@Settings.compileDir = "compiles"
|
||||
@fs.lstat = sinon.stub().callsArgWith(1, null,{isDirectory: ()->true})
|
||||
@proc = new EventEmitter()
|
||||
@proc.stdout = new EventEmitter()
|
||||
@proc.stderr = new EventEmitter()
|
||||
@child_process.spawn = sinon.stub().returns(@proc)
|
||||
@CompileManager.clearProject @project_id, @callback
|
||||
@CompileManager.clearProject @project_id, @user_id, @callback
|
||||
@proc.stderr.emit "data", @error = "oops"
|
||||
@proc.emit "close", 1
|
||||
|
||||
it "should remove the project directory", ->
|
||||
@child_process.spawn
|
||||
.calledWith("rm", ["-r", "#{@Settings.path.compilesDir}/#{@project_id}"])
|
||||
.calledWith("rm", ["-r", "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"])
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with an error from the stderr", ->
|
||||
@@ -115,7 +229,7 @@ describe "CompileManager", ->
|
||||
.calledWith(new Error())
|
||||
.should.equal true
|
||||
|
||||
@callback.args[0][0].message.should.equal "rm -r #{@Settings.path.compilesDir}/#{@project_id} failed: #{@error}"
|
||||
@callback.args[0][0].message.should.equal "rm -r #{@Settings.path.compilesDir}/#{@project_id}-#{@user_id} failed: #{@error}"
|
||||
|
||||
describe "syncing", ->
|
||||
beforeEach ->
|
||||
@@ -128,17 +242,18 @@ describe "CompileManager", ->
|
||||
@column = 3
|
||||
@file_name = "main.tex"
|
||||
@child_process.execFile = sinon.stub()
|
||||
@Settings.path.synctexBaseDir = (project_id) => "#{@Settings.path.compilesDir}/#{@project_id}"
|
||||
@Settings.path.synctexBaseDir = (project_id) => "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"
|
||||
|
||||
describe "syncFromCode", ->
|
||||
beforeEach ->
|
||||
@fs.stat = sinon.stub().callsArgWith(1, null,{isFile: ()->true})
|
||||
@child_process.execFile.callsArgWith(3, null, @stdout = "NODE\t#{@page}\t#{@h}\t#{@v}\t#{@width}\t#{@height}\n", "")
|
||||
@CompileManager.syncFromCode @project_id, @file_name, @line, @column, @callback
|
||||
@CompileManager.syncFromCode @project_id, @user_id, @file_name, @line, @column, @callback
|
||||
|
||||
it "should execute the synctex binary", ->
|
||||
bin_path = Path.resolve(__dirname + "/../../../bin/synctex")
|
||||
synctex_path = "#{@Settings.path.compilesDir}/#{@project_id}/output.pdf"
|
||||
file_path = "#{@Settings.path.compilesDir}/#{@project_id}/#{@file_name}"
|
||||
synctex_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/output.pdf"
|
||||
file_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/#{@file_name}"
|
||||
@child_process.execFile
|
||||
.calledWith(bin_path, ["code", synctex_path, file_path, @line, @column], timeout: 10000)
|
||||
.should.equal true
|
||||
@@ -156,12 +271,13 @@ describe "CompileManager", ->
|
||||
|
||||
describe "syncFromPdf", ->
|
||||
beforeEach ->
|
||||
@child_process.execFile.callsArgWith(3, null, @stdout = "NODE\t#{@Settings.path.compilesDir}/#{@project_id}/#{@file_name}\t#{@line}\t#{@column}\n", "")
|
||||
@CompileManager.syncFromPdf @project_id, @page, @h, @v, @callback
|
||||
@fs.stat = sinon.stub().callsArgWith(1, null,{isFile: ()->true})
|
||||
@child_process.execFile.callsArgWith(3, null, @stdout = "NODE\t#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/#{@file_name}\t#{@line}\t#{@column}\n", "")
|
||||
@CompileManager.syncFromPdf @project_id, @user_id, @page, @h, @v, @callback
|
||||
|
||||
it "should execute the synctex binary", ->
|
||||
bin_path = Path.resolve(__dirname + "/../../../bin/synctex")
|
||||
synctex_path = "#{@Settings.path.compilesDir}/#{@project_id}/output.pdf"
|
||||
synctex_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/output.pdf"
|
||||
@child_process.execFile
|
||||
.calledWith(bin_path, ["pdf", synctex_path, @page, @h, @v], timeout: 10000)
|
||||
.should.equal true
|
||||
@@ -177,24 +293,25 @@ describe "CompileManager", ->
|
||||
|
||||
describe "wordcount", ->
|
||||
beforeEach ->
|
||||
@CommandRunner.run = sinon.stub().callsArg(4)
|
||||
@fs.readFileSync = sinon.stub().returns @stdout = "Encoding: ascii\nWords in text: 2"
|
||||
@CommandRunner.run = sinon.stub().callsArg(6)
|
||||
@fs.readFile = sinon.stub().callsArgWith(2, null, @stdout = "Encoding: ascii\nWords in text: 2")
|
||||
@callback = sinon.stub()
|
||||
|
||||
@project_id = "project-id-123"
|
||||
@timeout = 10 * 1000
|
||||
@file_name = "main.tex"
|
||||
@Settings.path.compilesDir = "/local/compile/directory"
|
||||
@image = "example.com/image"
|
||||
|
||||
@CompileManager.wordcount @project_id, @file_name, @callback
|
||||
@CompileManager.wordcount @project_id, @user_id, @file_name, @image, @callback
|
||||
|
||||
it "should run the texcount command", ->
|
||||
@directory = "#{@Settings.path.compilesDir}/#{@project_id}"
|
||||
@directory = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"
|
||||
@file_path = "$COMPILE_DIR/#{@file_name}"
|
||||
@command =[ "texcount", "-inc", @file_path, "-out=" + @file_path + ".wc"]
|
||||
@command =[ "texcount", "-nocol", "-inc", @file_path, "-out=" + @file_path + ".wc"]
|
||||
|
||||
@CommandRunner.run
|
||||
.calledWith(@project_id, @command, @directory, @timeout)
|
||||
.calledWith("#{@project_id}-#{@user_id}", @command, @directory, @image, @timeout, {})
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the parsed output", ->
|
||||
@@ -208,5 +325,7 @@ describe "CompileManager", ->
|
||||
elements: 0
|
||||
mathInline: 0
|
||||
mathDisplay: 0
|
||||
errors: 0
|
||||
messages: ""
|
||||
})
|
||||
.should.equal true
|
||||
|
||||
55
test/unit/coffee/ContentTypeMapperTests.coffee
Normal file
55
test/unit/coffee/ContentTypeMapperTests.coffee
Normal file
@@ -0,0 +1,55 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/ContentTypeMapper'
|
||||
|
||||
describe 'ContentTypeMapper', ->
|
||||
|
||||
beforeEach ->
|
||||
@ContentTypeMapper = SandboxedModule.require modulePath
|
||||
|
||||
describe 'map', ->
|
||||
|
||||
it 'should map .txt to text/plain', ->
|
||||
content_type = @ContentTypeMapper.map('example.txt')
|
||||
content_type.should.equal 'text/plain'
|
||||
|
||||
it 'should map .csv to text/csv', ->
|
||||
content_type = @ContentTypeMapper.map('example.csv')
|
||||
content_type.should.equal 'text/csv'
|
||||
|
||||
it 'should map .pdf to application/pdf', ->
|
||||
content_type = @ContentTypeMapper.map('example.pdf')
|
||||
content_type.should.equal 'application/pdf'
|
||||
|
||||
it 'should fall back to octet-stream', ->
|
||||
content_type = @ContentTypeMapper.map('example.unknown')
|
||||
content_type.should.equal 'application/octet-stream'
|
||||
|
||||
describe 'coercing web files to plain text', ->
|
||||
|
||||
it 'should map .js to plain text', ->
|
||||
content_type = @ContentTypeMapper.map('example.js')
|
||||
content_type.should.equal 'text/plain'
|
||||
|
||||
it 'should map .html to plain text', ->
|
||||
content_type = @ContentTypeMapper.map('example.html')
|
||||
content_type.should.equal 'text/plain'
|
||||
|
||||
it 'should map .css to plain text', ->
|
||||
content_type = @ContentTypeMapper.map('example.css')
|
||||
content_type.should.equal 'text/plain'
|
||||
|
||||
describe 'image files', ->
|
||||
|
||||
it 'should map .png to image/png', ->
|
||||
content_type = @ContentTypeMapper.map('example.png')
|
||||
content_type.should.equal 'image/png'
|
||||
|
||||
it 'should map .jpeg to image/jpeg', ->
|
||||
content_type = @ContentTypeMapper.map('example.jpeg')
|
||||
content_type.should.equal 'image/jpeg'
|
||||
|
||||
it 'should map .svg to text/plain to protect against XSS (SVG can execute JS)', ->
|
||||
content_type = @ContentTypeMapper.map('example.svg')
|
||||
content_type.should.equal 'text/plain'
|
||||
61
test/unit/coffee/DraftModeManagerTests.coffee
Normal file
61
test/unit/coffee/DraftModeManagerTests.coffee
Normal file
@@ -0,0 +1,61 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/DraftModeManager'
|
||||
|
||||
describe 'DraftModeManager', ->
|
||||
beforeEach ->
|
||||
@DraftModeManager = SandboxedModule.require modulePath, requires:
|
||||
"fs": @fs = {}
|
||||
"logger-sharelatex": @logger = {log: () ->}
|
||||
|
||||
describe "_injectDraftOption", ->
|
||||
it "should add draft option into documentclass with existing options", ->
|
||||
@DraftModeManager
|
||||
._injectDraftOption('''
|
||||
\\documentclass[a4paper,foo=bar]{article}
|
||||
''')
|
||||
.should.equal('''
|
||||
\\documentclass[draft,a4paper,foo=bar]{article}
|
||||
''')
|
||||
|
||||
it "should add draft option into documentclass with no options", ->
|
||||
@DraftModeManager
|
||||
._injectDraftOption('''
|
||||
\\documentclass{article}
|
||||
''')
|
||||
.should.equal('''
|
||||
\\documentclass[draft]{article}
|
||||
''')
|
||||
|
||||
describe "injectDraftMode", ->
|
||||
beforeEach ->
|
||||
@filename = "/mock/filename.tex"
|
||||
@callback = sinon.stub()
|
||||
content = '''
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
'''
|
||||
@fs.readFile = sinon.stub().callsArgWith(2, null, content)
|
||||
@fs.writeFile = sinon.stub().callsArg(2)
|
||||
@DraftModeManager.injectDraftMode @filename, @callback
|
||||
|
||||
it "should read the file", ->
|
||||
@fs.readFile
|
||||
.calledWith(@filename, "utf8")
|
||||
.should.equal true
|
||||
|
||||
it "should write the modified file", ->
|
||||
@fs.writeFile
|
||||
.calledWith(@filename, """
|
||||
\\documentclass[draft]{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
""")
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
@@ -19,12 +19,14 @@ describe "LatexRunner", ->
|
||||
@directory = "/local/compile/directory"
|
||||
@mainFile = "main-file.tex"
|
||||
@compiler = "pdflatex"
|
||||
@image = "example.com/image"
|
||||
@callback = sinon.stub()
|
||||
@project_id = "project-id-123"
|
||||
@env = {'foo': '123'}
|
||||
|
||||
describe "runLatex", ->
|
||||
beforeEach ->
|
||||
@CommandRunner.run = sinon.stub().callsArg(4)
|
||||
@CommandRunner.run = sinon.stub().callsArg(6)
|
||||
|
||||
describe "normally", ->
|
||||
beforeEach ->
|
||||
@@ -33,11 +35,13 @@ describe "LatexRunner", ->
|
||||
mainFile: @mainFile
|
||||
compiler: @compiler
|
||||
timeout: @timeout = 42000
|
||||
image: @image
|
||||
environment: @env
|
||||
@callback
|
||||
|
||||
it "should run the latex command", ->
|
||||
@CommandRunner.run
|
||||
.calledWith(@project_id, sinon.match.any, @directory, @timeout)
|
||||
.calledWith(@project_id, sinon.match.any, @directory, @image, @timeout, @env)
|
||||
.should.equal true
|
||||
|
||||
describe "with an .Rtex main file", ->
|
||||
@@ -46,6 +50,7 @@ describe "LatexRunner", ->
|
||||
directory: @directory
|
||||
mainFile: "main-file.Rtex"
|
||||
compiler: @compiler
|
||||
image: @image
|
||||
timeout: @timeout = 42000
|
||||
@callback
|
||||
|
||||
|
||||
54
test/unit/coffee/LockManager.coffee
Normal file
54
test/unit/coffee/LockManager.coffee
Normal file
@@ -0,0 +1,54 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/LockManager'
|
||||
Path = require "path"
|
||||
Errors = require "../../../app/js/Errors"
|
||||
|
||||
describe "LockManager", ->
|
||||
beforeEach ->
|
||||
@LockManager = SandboxedModule.require modulePath, requires:
|
||||
"settings-sharelatex": {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
|
||||
"lockfile": @Lockfile = {}
|
||||
@lockFile = "/local/compile/directory/.project-lock"
|
||||
|
||||
describe "runWithLock", ->
|
||||
beforeEach ->
|
||||
@runner = sinon.stub().callsArgWith(0, null, "foo", "bar")
|
||||
@callback = sinon.stub()
|
||||
|
||||
describe "normally", ->
|
||||
beforeEach ->
|
||||
@Lockfile.lock = sinon.stub().callsArgWith(2, null)
|
||||
@Lockfile.unlock = sinon.stub().callsArgWith(1, null)
|
||||
@LockManager.runWithLock @lockFile, @runner, @callback
|
||||
|
||||
it "should run the compile", ->
|
||||
@runner
|
||||
.calledWith()
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the response from the compile", ->
|
||||
@callback
|
||||
.calledWithExactly(null, "foo", "bar")
|
||||
.should.equal true
|
||||
|
||||
describe "when the project is locked", ->
|
||||
beforeEach ->
|
||||
@error = new Error()
|
||||
@error.code = "EEXIST"
|
||||
@Lockfile.lock = sinon.stub().callsArgWith(2,@error)
|
||||
@Lockfile.unlock = sinon.stub().callsArgWith(1, null)
|
||||
@LockManager.runWithLock @lockFile, @runner, @callback
|
||||
|
||||
it "should not run the compile", ->
|
||||
@runner
|
||||
.called
|
||||
.should.equal false
|
||||
|
||||
it "should return an error", ->
|
||||
error = new Errors.AlreadyCompilingError()
|
||||
@callback
|
||||
.calledWithExactly(error)
|
||||
.should.equal true
|
||||
103
test/unit/coffee/OutputFileOptimiserTests.coffee
Normal file
103
test/unit/coffee/OutputFileOptimiserTests.coffee
Normal file
@@ -0,0 +1,103 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/OutputFileOptimiser'
|
||||
path = require "path"
|
||||
expect = require("chai").expect
|
||||
EventEmitter = require("events").EventEmitter
|
||||
|
||||
describe "OutputFileOptimiser", ->
|
||||
beforeEach ->
|
||||
@OutputFileOptimiser = SandboxedModule.require modulePath, requires:
|
||||
"fs": @fs = {}
|
||||
"path": @Path = {}
|
||||
"child_process": spawn: @spawn = sinon.stub()
|
||||
"logger-sharelatex": { log: sinon.stub(), warn: sinon.stub() }
|
||||
"./Metrics" : {}
|
||||
@directory = "/test/dir"
|
||||
@callback = sinon.stub()
|
||||
|
||||
describe "optimiseFile", ->
|
||||
beforeEach ->
|
||||
@src = "./output.pdf"
|
||||
@dst = "./output.pdf"
|
||||
|
||||
describe "when the file is not a pdf file", ->
|
||||
beforeEach (done)->
|
||||
@src = "./output.log"
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, false)
|
||||
@OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null)
|
||||
@OutputFileOptimiser.optimiseFile @src, @dst, done
|
||||
|
||||
it "should not check if the file is optimised", ->
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(@src).should.equal false
|
||||
|
||||
it "should not optimise the file", ->
|
||||
@OutputFileOptimiser.optimisePDF.calledWith(@src, @dst).should.equal false
|
||||
|
||||
describe "when the pdf file is not optimised", ->
|
||||
beforeEach (done) ->
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, false)
|
||||
@OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null)
|
||||
@OutputFileOptimiser.optimiseFile @src, @dst, done
|
||||
|
||||
it "should check if the pdf is optimised", ->
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(@src).should.equal true
|
||||
|
||||
it "should optimise the pdf", ->
|
||||
@OutputFileOptimiser.optimisePDF.calledWith(@src, @dst).should.equal true
|
||||
|
||||
describe "when the pdf file is optimised", ->
|
||||
beforeEach (done) ->
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised = sinon.stub().callsArgWith(1, null, true)
|
||||
@OutputFileOptimiser.optimisePDF = sinon.stub().callsArgWith(2, null)
|
||||
@OutputFileOptimiser.optimiseFile @src, @dst, done
|
||||
|
||||
it "should check if the pdf is optimised", ->
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised.calledWith(@src).should.equal true
|
||||
|
||||
it "should not optimise the pdf", ->
|
||||
@OutputFileOptimiser.optimisePDF.calledWith(@src, @dst).should.equal false
|
||||
|
||||
describe "checkIfPDFISOptimised", ->
|
||||
beforeEach () ->
|
||||
@callback = sinon.stub()
|
||||
@fd = 1234
|
||||
@fs.open = sinon.stub().yields(null, @fd)
|
||||
@fs.read = sinon.stub().withArgs(@fd).yields(null, 100, new Buffer("hello /Linearized 1"))
|
||||
@fs.close = sinon.stub().withArgs(@fd).yields(null)
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised @src, @callback
|
||||
|
||||
describe "for a linearised file", ->
|
||||
beforeEach () ->
|
||||
@fs.read = sinon.stub().withArgs(@fd).yields(null, 100, new Buffer("hello /Linearized 1"))
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised @src, @callback
|
||||
|
||||
it "should open the file", ->
|
||||
@fs.open.calledWith(@src, "r").should.equal true
|
||||
|
||||
it "should read the header", ->
|
||||
@fs.read.calledWith(@fd).should.equal true
|
||||
|
||||
it "should close the file", ->
|
||||
@fs.close.calledWith(@fd).should.equal true
|
||||
|
||||
it "should call the callback with a true result", ->
|
||||
@callback.calledWith(null, true).should.equal true
|
||||
|
||||
describe "for an unlinearised file", ->
|
||||
beforeEach () ->
|
||||
@fs.read = sinon.stub().withArgs(@fd).yields(null, 100, new Buffer("hello not linearized 1"))
|
||||
@OutputFileOptimiser.checkIfPDFIsOptimised @src, @callback
|
||||
|
||||
it "should open the file", ->
|
||||
@fs.open.calledWith(@src, "r").should.equal true
|
||||
|
||||
it "should read the header", ->
|
||||
@fs.read.calledWith(@fd).should.equal true
|
||||
|
||||
it "should close the file", ->
|
||||
@fs.close.calledWith(@fd).should.equal true
|
||||
|
||||
it "should call the callback with a false result", ->
|
||||
@callback.calledWith(null, false).should.equal true
|
||||
@@ -13,6 +13,7 @@ describe "ProjectPersistenceManager", ->
|
||||
"./db": @db = {}
|
||||
@callback = sinon.stub()
|
||||
@project_id = "project-id-123"
|
||||
@user_id = "1234"
|
||||
|
||||
describe "clearExpiredProjects", ->
|
||||
beforeEach ->
|
||||
@@ -21,12 +22,13 @@ describe "ProjectPersistenceManager", ->
|
||||
"project-id-2"
|
||||
]
|
||||
@ProjectPersistenceManager._findExpiredProjectIds = sinon.stub().callsArgWith(0, null, @project_ids)
|
||||
@ProjectPersistenceManager.clearProject = sinon.stub().callsArg(1)
|
||||
@ProjectPersistenceManager.clearProjectFromCache = sinon.stub().callsArg(1)
|
||||
@CompileManager.clearExpiredProjects = sinon.stub().callsArg(1)
|
||||
@ProjectPersistenceManager.clearExpiredProjects @callback
|
||||
|
||||
it "should clear each expired project", ->
|
||||
for project_id in @project_ids
|
||||
@ProjectPersistenceManager.clearProject
|
||||
@ProjectPersistenceManager.clearProjectFromCache
|
||||
.calledWith(project_id)
|
||||
.should.equal true
|
||||
|
||||
@@ -37,8 +39,8 @@ describe "ProjectPersistenceManager", ->
|
||||
beforeEach ->
|
||||
@ProjectPersistenceManager._clearProjectFromDatabase = sinon.stub().callsArg(1)
|
||||
@UrlCache.clearProject = sinon.stub().callsArg(1)
|
||||
@CompileManager.clearProject = sinon.stub().callsArg(1)
|
||||
@ProjectPersistenceManager.clearProject @project_id, @callback
|
||||
@CompileManager.clearProject = sinon.stub().callsArg(2)
|
||||
@ProjectPersistenceManager.clearProject @project_id, @user_id, @callback
|
||||
|
||||
it "should clear the project from the database", ->
|
||||
@ProjectPersistenceManager._clearProjectFromDatabase
|
||||
@@ -52,7 +54,7 @@ describe "ProjectPersistenceManager", ->
|
||||
|
||||
it "should clear the project compile folder", ->
|
||||
@CompileManager.clearProject
|
||||
.calledWith(@project_id)
|
||||
.calledWith(@project_id, @user_id)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
|
||||
@@ -206,11 +206,49 @@ describe "RequestParser", ->
|
||||
|
||||
describe "with a root resource path that needs escaping", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.rootResourcePath = "`rm -rf foo`.tex"
|
||||
@badPath = "`rm -rf foo`.tex"
|
||||
@goodPath = "rm -rf foo.tex"
|
||||
@validRequest.compile.rootResourcePath = @badPath
|
||||
@validRequest.compile.resources.push {
|
||||
path: @badPath
|
||||
date: "12:00 01/02/03"
|
||||
content: "Hello world"
|
||||
}
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return the escaped resource", ->
|
||||
@data.rootResourcePath.should.equal "rm -rf foo.tex"
|
||||
@data.rootResourcePath.should.equal @goodPath
|
||||
|
||||
it "should also escape the resource path", ->
|
||||
@data.resources[0].path.should.equal @goodPath
|
||||
|
||||
describe "with a root resource path that has a relative path", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.rootResourcePath = "foo/../../bar.tex"
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("relative path in root resource")
|
||||
.should.equal true
|
||||
|
||||
describe "with a root resource path that has unescaped + relative path", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.rootResourcePath = "foo/#../bar.tex"
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("relative path in root resource")
|
||||
.should.equal true
|
||||
|
||||
describe "with an unknown syncType", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.options.syncType = "unexpected"
|
||||
@RequestParser.parse @validRequest, @callback
|
||||
@data = @callback.args[0][1]
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith("syncType attribute should be one of: full, incremental")
|
||||
.should.equal true
|
||||
|
||||
109
test/unit/coffee/ResourceStateManagerTests.coffee
Normal file
109
test/unit/coffee/ResourceStateManagerTests.coffee
Normal file
@@ -0,0 +1,109 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
should = require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/ResourceStateManager'
|
||||
Path = require "path"
|
||||
Errors = require "../../../app/js/Errors"
|
||||
|
||||
describe "ResourceStateManager", ->
|
||||
beforeEach ->
|
||||
@ResourceStateManager = SandboxedModule.require modulePath, requires:
|
||||
"fs": @fs = {}
|
||||
"logger-sharelatex": {log: sinon.stub(), err: sinon.stub()}
|
||||
"./SafeReader": @SafeReader = {}
|
||||
@basePath = "/path/to/write/files/to"
|
||||
@resources = [
|
||||
{path: "resource-1-mock"}
|
||||
{path: "resource-2-mock"}
|
||||
{path: "resource-3-mock"}
|
||||
]
|
||||
@state = "1234567890"
|
||||
@resourceFileName = "#{@basePath}/.project-sync-state"
|
||||
@resourceFileContents = "#{@resources[0].path}\n#{@resources[1].path}\n#{@resources[2].path}\nstateHash:#{@state}"
|
||||
@callback = sinon.stub()
|
||||
|
||||
describe "saveProjectState", ->
|
||||
beforeEach ->
|
||||
@fs.writeFile = sinon.stub().callsArg(2)
|
||||
|
||||
describe "when the state is specified", ->
|
||||
beforeEach ->
|
||||
@ResourceStateManager.saveProjectState(@state, @resources, @basePath, @callback)
|
||||
|
||||
it "should write the resource list to disk", ->
|
||||
@fs.writeFile
|
||||
.calledWith(@resourceFileName, @resourceFileContents)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "when the state is undefined", ->
|
||||
beforeEach ->
|
||||
@state = undefined
|
||||
@fs.unlink = sinon.stub().callsArg(1)
|
||||
@ResourceStateManager.saveProjectState(@state, @resources, @basePath, @callback)
|
||||
|
||||
it "should unlink the resource file", ->
|
||||
@fs.unlink
|
||||
.calledWith(@resourceFileName)
|
||||
.should.equal true
|
||||
|
||||
it "should not write the resource list to disk", ->
|
||||
@fs.writeFile.called.should.equal false
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "checkProjectStateMatches", ->
|
||||
|
||||
describe "when the state matches", ->
|
||||
beforeEach ->
|
||||
@SafeReader.readFile = sinon.stub().callsArgWith(3, null, @resourceFileContents)
|
||||
@ResourceStateManager.checkProjectStateMatches(@state, @basePath, @callback)
|
||||
|
||||
it "should read the resource file", ->
|
||||
@SafeReader.readFile
|
||||
.calledWith(@resourceFileName)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the results", ->
|
||||
@callback.calledWithMatch(null, @resources).should.equal true
|
||||
|
||||
describe "when the state does not match", ->
|
||||
beforeEach ->
|
||||
@SafeReader.readFile = sinon.stub().callsArgWith(3, null, @resourceFileContents)
|
||||
@ResourceStateManager.checkProjectStateMatches("not-the-original-state", @basePath, @callback)
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
error = new Errors.FilesOutOfSyncError("invalid state for incremental update")
|
||||
@callback.calledWith(error).should.equal true
|
||||
|
||||
describe "checkResourceFiles", ->
|
||||
describe "when all the files are present", ->
|
||||
beforeEach ->
|
||||
@allFiles = [ @resources[0].path, @resources[1].path, @resources[2].path]
|
||||
@ResourceStateManager.checkResourceFiles(@resources, @allFiles, @basePath, @callback)
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.calledWithExactly().should.equal true
|
||||
|
||||
describe "when there is a missing file", ->
|
||||
beforeEach ->
|
||||
@allFiles = [ @resources[0].path, @resources[1].path]
|
||||
@fs.stat = sinon.stub().callsArgWith(1, new Error())
|
||||
@ResourceStateManager.checkResourceFiles(@resources, @allFiles, @basePath, @callback)
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
error = new Errors.FilesOutOfSyncError("resource files missing in incremental update")
|
||||
@callback.calledWith(error).should.equal true
|
||||
|
||||
describe "when a resource contains a relative path", ->
|
||||
beforeEach ->
|
||||
@resources[0].path = "../foo/bar.tex"
|
||||
@allFiles = [ @resources[0].path, @resources[1].path, @resources[2].path]
|
||||
@ResourceStateManager.checkResourceFiles(@resources, @allFiles, @basePath, @callback)
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
@callback.calledWith(new Error("relative path in resource file list")).should.equal true
|
||||
|
||||
@@ -7,11 +7,15 @@ path = require "path"
|
||||
describe "ResourceWriter", ->
|
||||
beforeEach ->
|
||||
@ResourceWriter = SandboxedModule.require modulePath, requires:
|
||||
"fs": @fs = {}
|
||||
"fs": @fs =
|
||||
mkdir: sinon.stub().callsArg(1)
|
||||
unlink: sinon.stub().callsArg(1)
|
||||
"./ResourceStateManager": @ResourceStateManager = {}
|
||||
"wrench": @wrench = {}
|
||||
"./UrlCache" : @UrlCache = {}
|
||||
"mkdirp" : @mkdirp = sinon.stub().callsArg(1)
|
||||
"./OutputFileFinder": @OutputFileFinder = {}
|
||||
"logger-sharelatex": {log: sinon.stub(), err: sinon.stub()}
|
||||
"./Metrics": @Metrics =
|
||||
Timer: class Timer
|
||||
done: sinon.stub()
|
||||
@@ -19,7 +23,7 @@ describe "ResourceWriter", ->
|
||||
@basePath = "/path/to/write/files/to"
|
||||
@callback = sinon.stub()
|
||||
|
||||
describe "syncResourcesToDisk", ->
|
||||
describe "syncResourcesToDisk on a full request", ->
|
||||
beforeEach ->
|
||||
@resources = [
|
||||
"resource-1-mock"
|
||||
@@ -28,7 +32,12 @@ describe "ResourceWriter", ->
|
||||
]
|
||||
@ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3)
|
||||
@ResourceWriter._removeExtraneousFiles = sinon.stub().callsArg(2)
|
||||
@ResourceWriter.syncResourcesToDisk(@project_id, @resources, @basePath, @callback)
|
||||
@ResourceStateManager.saveProjectState = sinon.stub().callsArg(3)
|
||||
@ResourceWriter.syncResourcesToDisk({
|
||||
project_id: @project_id
|
||||
syncState: @syncState = "0123456789abcdef"
|
||||
resources: @resources
|
||||
}, @basePath, @callback)
|
||||
|
||||
it "should remove old files", ->
|
||||
@ResourceWriter._removeExtraneousFiles
|
||||
@@ -41,9 +50,77 @@ describe "ResourceWriter", ->
|
||||
.calledWith(@project_id, resource, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should store the sync state and resource list", ->
|
||||
@ResourceStateManager.saveProjectState
|
||||
.calledWith(@syncState, @resources, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "syncResourcesToDisk on an incremental update", ->
|
||||
beforeEach ->
|
||||
@resources = [
|
||||
"resource-1-mock"
|
||||
]
|
||||
@ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3)
|
||||
@ResourceWriter._removeExtraneousFiles = sinon.stub().callsArgWith(2, null, @outputFiles = [], @allFiles = [])
|
||||
@ResourceStateManager.checkProjectStateMatches = sinon.stub().callsArgWith(2, null, @resources)
|
||||
@ResourceStateManager.saveProjectState = sinon.stub().callsArg(3)
|
||||
@ResourceStateManager.checkResourceFiles = sinon.stub().callsArg(3)
|
||||
@ResourceWriter.syncResourcesToDisk({
|
||||
project_id: @project_id,
|
||||
syncType: "incremental",
|
||||
syncState: @syncState = "1234567890abcdef",
|
||||
resources: @resources
|
||||
}, @basePath, @callback)
|
||||
|
||||
it "should check the sync state matches", ->
|
||||
@ResourceStateManager.checkProjectStateMatches
|
||||
.calledWith(@syncState, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should remove old files", ->
|
||||
@ResourceWriter._removeExtraneousFiles
|
||||
.calledWith(@resources, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should check each resource exists", ->
|
||||
@ResourceStateManager.checkResourceFiles
|
||||
.calledWith(@resources, @allFiles, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should write each resource to disk", ->
|
||||
for resource in @resources
|
||||
@ResourceWriter._writeResourceToDisk
|
||||
.calledWith(@project_id, resource, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "syncResourcesToDisk on an incremental update when the state does not match", ->
|
||||
beforeEach ->
|
||||
@resources = [
|
||||
"resource-1-mock"
|
||||
]
|
||||
@ResourceStateManager.checkProjectStateMatches = sinon.stub().callsArgWith(2, @error = new Error())
|
||||
@ResourceWriter.syncResourcesToDisk({
|
||||
project_id: @project_id,
|
||||
syncType: "incremental",
|
||||
syncState: @syncState = "1234567890abcdef",
|
||||
resources: @resources
|
||||
}, @basePath, @callback)
|
||||
|
||||
it "should check whether the sync state matches", ->
|
||||
@ResourceStateManager.checkProjectStateMatches
|
||||
.calledWith(@syncState, @basePath)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
@callback.calledWith(@error).should.equal true
|
||||
|
||||
|
||||
describe "_removeExtraneousFiles", ->
|
||||
beforeEach ->
|
||||
@output_files = [{
|
||||
@@ -55,6 +132,8 @@ describe "ResourceWriter", ->
|
||||
}, {
|
||||
path: "extra.aux"
|
||||
type: "aux"
|
||||
}, {
|
||||
path: "cache/_chunk1"
|
||||
}]
|
||||
@resources = "mock-resources"
|
||||
@OutputFileFinder.findOutputFiles = sinon.stub().callsArgWith(2, null, @output_files)
|
||||
@@ -81,6 +160,11 @@ describe "ResourceWriter", ->
|
||||
.calledWith(path.join(@basePath, "extra.aux"))
|
||||
.should.equal false
|
||||
|
||||
it "should not delete the knitr cache file", ->
|
||||
@ResourceWriter._deleteFileIfNotDirectory
|
||||
.calledWith(path.join(@basePath, "cache/_chunk1"))
|
||||
.should.equal false
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
@@ -150,6 +234,27 @@ describe "ResourceWriter", ->
|
||||
.calledWith(new Error("resource path is outside root directory"))
|
||||
.should.equal true
|
||||
|
||||
describe "checkPath", ->
|
||||
describe "with a valid path", ->
|
||||
beforeEach ->
|
||||
@ResourceWriter.checkPath("foo", "bar", @callback)
|
||||
|
||||
it "should return the joined path", ->
|
||||
@callback.calledWith(null, "foo/bar")
|
||||
.should.equal true
|
||||
|
||||
describe "with an invalid path", ->
|
||||
beforeEach ->
|
||||
@ResourceWriter.checkPath("foo", "baz/../../bar", @callback)
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith(new Error("resource path is outside root directory"))
|
||||
.should.equal true
|
||||
|
||||
describe "with another invalid path matching on a prefix", ->
|
||||
beforeEach ->
|
||||
@ResourceWriter.checkPath("foo", "../foobar/baz", @callback)
|
||||
|
||||
it "should return an error", ->
|
||||
@callback.calledWith(new Error("resource path is outside root directory"))
|
||||
.should.equal true
|
||||
|
||||
101
test/unit/coffee/TikzManager.coffee
Normal file
101
test/unit/coffee/TikzManager.coffee
Normal file
@@ -0,0 +1,101 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
modulePath = require('path').join __dirname, '../../../app/js/TikzManager'
|
||||
|
||||
describe 'TikzManager', ->
|
||||
beforeEach ->
|
||||
@TikzManager = SandboxedModule.require modulePath, requires:
|
||||
"./ResourceWriter": @ResourceWriter = {}
|
||||
"./SafeReader": @SafeReader = {}
|
||||
"fs": @fs = {}
|
||||
"logger-sharelatex": @logger = {log: () ->}
|
||||
|
||||
describe "checkMainFile", ->
|
||||
beforeEach ->
|
||||
@compileDir = "compile-dir"
|
||||
@mainFile = "main.tex"
|
||||
@callback = sinon.stub()
|
||||
|
||||
describe "if there is already an output.tex file in the resources", ->
|
||||
beforeEach ->
|
||||
@resources = [{path:"main.tex"},{path:"output.tex"}]
|
||||
@TikzManager.checkMainFile @compileDir, @mainFile, @resources, @callback
|
||||
|
||||
it "should call the callback with false ", ->
|
||||
@callback.calledWithExactly(null, false)
|
||||
.should.equal true
|
||||
|
||||
describe "if there is no output.tex file in the resources", ->
|
||||
beforeEach ->
|
||||
@resources = [{path:"main.tex"}]
|
||||
@ResourceWriter.checkPath = sinon.stub()
|
||||
.withArgs(@compileDir, @mainFile)
|
||||
.callsArgWith(2, null, "#{@compileDir}/#{@mainFile}")
|
||||
|
||||
describe "and the main file contains tikzexternalize", ->
|
||||
beforeEach ->
|
||||
@SafeReader.readFile = sinon.stub()
|
||||
.withArgs("#{@compileDir}/#{@mainFile}")
|
||||
.callsArgWith(3, null, "hello \\tikzexternalize")
|
||||
@TikzManager.checkMainFile @compileDir, @mainFile, @resources, @callback
|
||||
|
||||
it "should look at the file on disk", ->
|
||||
@SafeReader.readFile
|
||||
.calledWith("#{@compileDir}/#{@mainFile}")
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with true ", ->
|
||||
@callback.calledWithExactly(null, true)
|
||||
.should.equal true
|
||||
|
||||
describe "and the main file does not contain tikzexternalize", ->
|
||||
beforeEach ->
|
||||
@SafeReader.readFile = sinon.stub()
|
||||
.withArgs("#{@compileDir}/#{@mainFile}")
|
||||
.callsArgWith(3, null, "hello")
|
||||
@TikzManager.checkMainFile @compileDir, @mainFile, @resources, @callback
|
||||
|
||||
it "should look at the file on disk", ->
|
||||
@SafeReader.readFile
|
||||
.calledWith("#{@compileDir}/#{@mainFile}")
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with false", ->
|
||||
@callback.calledWithExactly(null, false)
|
||||
.should.equal true
|
||||
|
||||
describe "injectOutputFile", ->
|
||||
beforeEach ->
|
||||
@rootDir = "/mock"
|
||||
@filename = "filename.tex"
|
||||
@callback = sinon.stub()
|
||||
@content = '''
|
||||
\\documentclass{article}
|
||||
\\usepackage{tikz}
|
||||
\\tikzexternalize
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
'''
|
||||
@fs.readFile = sinon.stub().callsArgWith(2, null, @content)
|
||||
@fs.writeFile = sinon.stub().callsArg(3)
|
||||
@ResourceWriter.checkPath = sinon.stub().callsArgWith(2, null, "#{@rootDir}/#{@filename}")
|
||||
@TikzManager.injectOutputFile @rootDir, @filename, @callback
|
||||
|
||||
it "sould check the path", ->
|
||||
@ResourceWriter.checkPath.calledWith(@rootDir, @filename)
|
||||
.should.equal true
|
||||
|
||||
it "should read the file", ->
|
||||
@fs.readFile
|
||||
.calledWith("#{@rootDir}/#{@filename}", "utf8")
|
||||
.should.equal true
|
||||
|
||||
it "should write out the same file as output.tex", ->
|
||||
@fs.writeFile
|
||||
.calledWith("#{@rootDir}/output.tex", @content, {flag: 'wx'})
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
Reference in New Issue
Block a user