7 Commits

Author SHA1 Message Date
hugh-obrien
bfe31098cb send html 2018-07-09 14:37:54 +01:00
hugh-obrien
a1f9305047 hard coded a word doc 2018-07-09 14:02:39 +01:00
hugh-obrien
c66d76af2d make it do pdf 2018-07-09 13:52:18 +01:00
hugh-obrien
b8c82620e5 main file hack 2018-07-09 13:45:57 +01:00
hugh-obrien
3b905353d0 main file fix nonsence 2018-07-09 11:40:40 +01:00
hugh-obrien
af6a402a87 fix array nonsense 2018-07-09 11:37:57 +01:00
hugh-obrien
3c639959f4 hack in the command and image 2018-07-09 11:35:19 +01:00
68 changed files with 456 additions and 6253 deletions

View File

@@ -1,9 +0,0 @@
node_modules/*
gitrev
.git
.gitignore
.npm
.nvmrc
nodemon.json
app.js
**/js/*

View File

@@ -1,38 +0,0 @@
<!-- BUG REPORT TEMPLATE -->
## Steps to Reproduce
<!-- Describe the steps leading up to when / where you found the bug. -->
<!-- Screenshots may be helpful here. -->
1.
2.
3.
## Expected Behaviour
<!-- What should have happened when you completed the steps above? -->
## Observed Behaviour
<!-- What actually happened when you completed the steps above? -->
<!-- Screenshots may be helpful here. -->
## Context
<!-- How has this issue affected you? What were you trying to accomplish? -->
## Technical Info
<!-- Provide any technical details that may be applicable (or N/A if not applicable). -->
* URL:
* Browser Name and version:
* Operating System and version (desktop or mobile):
* Signed in as:
* Project and/or file:
## Analysis
<!--- Optionally, document investigation of / suggest a fix for the bug, e.g. 'comes from this line / commit' -->
## Who Needs to Know?
<!-- If you want to bring this to the attention of particular people, @-mention them below. -->
<!-- If a user reported this bug and should be notified when it is fixed, provide the Front conversation link. -->
-
-

View File

@@ -1,45 +0,0 @@
<!-- Please review https://github.com/overleaf/write_latex/blob/master/.github/CONTRIBUTING.md for guidance on what is expected in each section. -->
### Description
#### Screenshots
#### Related Issues / PRs
### Review
#### Potential Impact
#### Manual Testing Performed
- [ ]
- [ ]
#### Accessibility
### Deployment
#### Deployment Checklist
- [ ] Update documentation not included in the PR (if any)
- [ ]
#### Metrics and Monitoring
#### Who Needs to Know?

5
.gitignore vendored
View File

@@ -7,13 +7,10 @@ test/acceptance/js
test/acceptance/fixtures/tmp test/acceptance/fixtures/tmp
compiles compiles
app.js app.js
**/*.map
.DS_Store .DS_Store
*~ *~
cache cache
.vagrant .vagrant
db.sqlite db.sqlite
db.sqlite-wal
db.sqlite-shm
config/* config/*
npm-debug.log bin/synctex

2
.nvmrc
View File

@@ -1 +1 @@
10.15.0 6.11.2

View File

@@ -1,35 +0,0 @@
# This viminfo file was generated by Vim 7.4.
# You may edit it if you're careful!
# Value of 'encoding' when this file was written
*encoding=latin1
# hlsearch on (H) or off (h):
~h
# Command Line History (newest to oldest):
:x
# Search String History (newest to oldest):
# Expression History (newest to oldest):
# Input Line History (newest to oldest):
# Input Line History (newest to oldest):
# Registers:
# File marks:
'0 1 0 ~/hello
# Jumplist (newest first):
-' 1 0 ~/hello
# History of marks within files (newest to oldest):
> ~/hello
" 1 0
^ 1 1
. 1 0
+ 1 0

View File

@@ -1,27 +0,0 @@
FROM node:10.15.0 as app
WORKDIR /app
#wildcard as some files may not be in all repos
COPY package*.json npm-shrink*.json /app/
RUN npm install --quiet
COPY . /app
RUN npm run compile:all
FROM node:10.15.0
RUN \
apt -y update && \
apt -y install moreutils
COPY --from=app /app /app
WORKDIR /app
RUN chmod 0755 ./install_deps.sh && ./install_deps.sh
ENTRYPOINT ["/bin/bash", "entrypoint.sh"]
CMD ["node", "--expose-gc", "app.js"]

104
Gruntfile.coffee Normal file
View File

@@ -0,0 +1,104 @@
spawn = require("child_process").spawn
module.exports = (grunt) ->
grunt.initConfig
coffee:
app_src:
expand: true,
flatten: true,
cwd: "app"
src: ['coffee/*.coffee'],
dest: 'app/js/',
ext: '.js'
app:
src: "app.coffee"
dest: "app.js"
unit_tests:
expand: true
cwd: "test/unit/coffee"
src: ["**/*.coffee"]
dest: "test/unit/js/"
ext: ".js"
acceptance_tests:
expand: true
cwd: "test/acceptance/coffee"
src: ["**/*.coffee"]
dest: "test/acceptance/js/"
ext: ".js"
smoke_tests:
expand: true
cwd: "test/smoke/coffee"
src: ["**/*.coffee"]
dest: "test/smoke/js"
ext: ".js"
clean:
app: ["app/js/"]
unit_tests: ["test/unit/js"]
acceptance_tests: ["test/acceptance/js"]
smoke_tests: ["test/smoke/js"]
execute:
app:
src: "app.js"
mkdir:
all:
options:
create: ["cache", "compiles"]
mochaTest:
unit:
options:
reporter: "spec"
grep: grunt.option("grep")
src: ["test/unit/js/**/*.js"]
acceptance:
options:
reporter: "spec"
timeout: 40000
grep: grunt.option("grep")
src: ["test/acceptance/js/**/*.js"]
smoke:
options:
reported: "spec"
timeout: 10000
src: ["test/smoke/js/**/*.js"]
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-clean'
grunt.loadNpmTasks 'grunt-mocha-test'
grunt.loadNpmTasks 'grunt-shell'
grunt.loadNpmTasks 'grunt-execute'
grunt.loadNpmTasks 'grunt-bunyan'
grunt.loadNpmTasks 'grunt-mkdir'
grunt.registerTask 'compile:bin', () ->
callback = @async()
proc = spawn "cc", [
"-o", "bin/synctex", "-Isrc/synctex",
"src/synctex.c", "src/synctex/synctex_parser.c", "src/synctex/synctex_parser_utils.c", "-lz"
], stdio: "inherit"
proc.on "close", callback
grunt.registerTask 'compile:app', ['clean:app', 'coffee:app', 'coffee:app_src', 'coffee:smoke_tests', 'compile:bin']
grunt.registerTask 'run', ['compile:app', 'bunyan', 'execute']
grunt.registerTask 'compile:unit_tests', ['clean:unit_tests', 'coffee:unit_tests']
grunt.registerTask 'test:unit', ['compile:app', 'compile:unit_tests', 'mochaTest:unit']
grunt.registerTask 'compile:acceptance_tests', ['clean:acceptance_tests', 'coffee:acceptance_tests']
grunt.registerTask 'test:acceptance', ['compile:acceptance_tests', 'mochaTest:acceptance']
grunt.registerTask 'compile:smoke_tests', ['clean:smoke_tests', 'coffee:smoke_tests']
grunt.registerTask 'test:smoke', ['compile:smoke_tests', 'mochaTest:smoke']
grunt.registerTask 'install', 'compile:app'
grunt.registerTask 'default', ['mkdir', 'run']

116
Jenkinsfile vendored
View File

@@ -1,75 +1,79 @@
String cron_string = BRANCH_NAME == "master" ? "@daily" : ""
pipeline { pipeline {
agent any
environment { agent any
GIT_PROJECT = "clsi"
JENKINS_WORKFLOW = "clsi-sharelatex"
TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline"
GIT_API_URL = "https://api.github.com/repos/overleaf/${GIT_PROJECT}/statuses/$GIT_COMMIT"
}
triggers { triggers {
pollSCM('* * * * *') pollSCM('* * * * *')
cron(cron_string) cron('@daily')
} }
stages { stages {
stage('Install') { stage('Clean') {
steps { steps {
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) { // This is a terrible hack to set the file ownership to jenkins:jenkins so we can cleanup the directory
sh "curl $GIT_API_URL \ sh 'docker run -v $(pwd):/app --rm busybox /bin/chown -R 111:119 /app'
--data '{ \ sh 'rm -fr node_modules'
\"state\" : \"pending\", \ }
\"target_url\": \"$TARGET_URL\", \ }
\"description\": \"Your build is underway\", \ stage('Install') {
\"context\": \"ci/jenkins\" }' \ agent {
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD" docker {
image 'node:6.11.2'
args "-v /var/lib/jenkins/.npm:/tmp/.npm -e HOME=/tmp"
reuseNode true
} }
} }
}
stage('Build') {
steps { steps {
sh 'make build' sh 'git config --global core.logallrefupdates false'
sh 'rm -fr node_modules'
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: '_docker-runner'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/docker-runner-sharelatex']]])
sh 'npm install ./_docker-runner'
sh 'rm -fr ./_docker-runner ./_docker-runner@tmp'
sh 'npm install'
sh 'npm rebuild'
sh 'npm install --quiet grunt-cli'
} }
} }
stage('Compile and Test') {
stage('Unit Tests') { agent {
docker {
image 'node:6.11.2'
reuseNode true
}
}
steps { steps {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit' sh 'node_modules/.bin/grunt compile:app'
sh 'node_modules/.bin/grunt compile:acceptance_tests'
sh 'NODE_ENV=development node_modules/.bin/grunt test:unit'
} }
} }
stage('Acceptance Tests') { stage('Acceptance Tests') {
environment {
TEXLIVE_IMAGE="quay.io/sharelatex/texlive-full:2017.1"
}
steps { steps {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance' sh 'mkdir -p compiles cache'
// Not yet running, due to volumes/sibling containers
sh 'docker container prune -f'
sh 'docker pull $TEXLIVE_IMAGE'
sh 'docker pull sharelatex/acceptance-test-runner:clsi-6.11.2'
sh 'docker run --rm -e SIBLING_CONTAINER_USER=root -e SANDBOXED_COMPILES_HOST_DIR=$(pwd)/compiles -e SANDBOXED_COMPILES_SIBLING_CONTAINERS=true -e TEXLIVE_IMAGE=$TEXLIVE_IMAGE -v /var/run/docker.sock:/var/run/docker.sock -v $(pwd):/app sharelatex/acceptance-test-runner:clsi-6.11.2'
// This is a terrible hack to set the file ownership to jenkins:jenkins so we can cleanup the directory
sh 'docker run -v $(pwd):/app --rm busybox /bin/chown -R 111:119 /app'
sh 'rm -r compiles cache server.log db.sqlite config/settings.defaults.coffee'
} }
} }
stage('Package') {
stage('Package and docker push') {
steps { steps {
sh 'echo ${BUILD_NUMBER} > build_number.txt' sh 'echo ${BUILD_NUMBER} > build_number.txt'
sh 'touch build.tar.gz' // Avoid tar warning about files changing during read sh 'touch build.tar.gz' // Avoid tar warning about files changing during read
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make tar' sh 'tar -czf build.tar.gz --exclude=build.tar.gz --exclude-vcs .'
withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) {
sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}'
}
sh 'DOCKER_REPO=gcr.io/overleaf-ops make publish'
sh 'docker logout https://gcr.io/overleaf-ops'
} }
} }
stage('Publish') {
stage('Publish to s3') {
steps { steps {
sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt'
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz") s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz")
}
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
// The deployment process uses this file to figure out the latest build // The deployment process uses this file to figure out the latest build
s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest") s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest")
} }
@@ -78,37 +82,11 @@ pipeline {
} }
post { post {
always {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean'
sh 'make clean'
}
success {
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
sh "curl $GIT_API_URL \
--data '{ \
\"state\" : \"success\", \
\"target_url\": \"$TARGET_URL\", \
\"description\": \"Your build succeeded!\", \
\"context\": \"ci/jenkins\" }' \
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
}
}
failure { failure {
mail(from: "${EMAIL_ALERT_FROM}", mail(from: "${EMAIL_ALERT_FROM}",
to: "${EMAIL_ALERT_TO}", to: "${EMAIL_ALERT_TO}",
subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}",
body: "Build: ${BUILD_URL}") body: "Build: ${BUILD_URL}")
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
sh "curl $GIT_API_URL \
--data '{ \
\"state\" : \"failure\", \
\"target_url\": \"$TARGET_URL\", \
\"description\": \"Your build failed\", \
\"context\": \"ci/jenkins\" }' \
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
}
} }
} }

View File

@@ -1,51 +0,0 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.1.22
BUILD_NUMBER ?= local
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
PROJECT_NAME = clsi
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
BRANCH_NAME=$(BRANCH_NAME) \
PROJECT_NAME=$(PROJECT_NAME) \
MOCHA_GREP=${MOCHA_GREP} \
docker-compose ${DOCKER_COMPOSE_FLAGS}
clean:
docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
rm -f app.js
rm -rf app/js
rm -rf test/unit/js
rm -rf test/acceptance/js
test: test_unit test_acceptance
test_unit:
@[ ! -d test/unit ] && echo "clsi has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit
test_acceptance: test_clean test_acceptance_pre_run test_acceptance_run
test_acceptance_run:
@[ ! -d test/acceptance ] && echo "clsi has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance
test_clean:
$(DOCKER_COMPOSE) down -v -t 0
test_acceptance_pre_run:
@[ ! -f test/acceptance/js/scripts/pre-run ] && echo "clsi has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/js/scripts/pre-run
build:
docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
--tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
.
tar:
$(DOCKER_COMPOSE) up tar
publish:
docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
.PHONY: clean test test_unit test_acceptance test_clean build publish

View File

@@ -1,38 +1,16 @@
overleaf/clsi clsi-sharelatex
=============== ===============
A web api for compiling LaTeX documents in the cloud A web api for compiling LaTeX documents in the cloud
The Common LaTeX Service Interface (CLSI) provides a RESTful interface to traditional LaTeX tools (or, more generally, any command line tool for composing marked-up documents into a display format such as PDF or HTML). The CLSI listens on the following ports by default: [![Build Status](https://travis-ci.org/sharelatex/clsi-sharelatex.png?branch=master)](https://travis-ci.org/sharelatex/clsi-sharelatex)
* TCP/3009 - the RESTful interface
* TCP/3048 - reports load information
* TCP/3049 - HTTP interface to control the CLSI service
These defaults can be modified in `config/settings.defaults.coffee`.
The provided `Dockerfile` builds a docker image which has the docker command line tools installed. The configuration in `docker-compose-config.yml` mounts the docker socket, in order that the CLSI container can talk to the docker host it is running in. This allows it to spin up `sibling containers` running an image with a TeX distribution installed to perform the actual compiles.
The CLSI can be configured through the following environment variables:
* `DOCKER_RUNNER` - Set to true to use sibling containers
* `SYNCTEX_BIN_HOST_PATH` - Path to SyncTeX binary
* `COMPILES_HOST_DIR` - Working directory for LaTeX compiles
* `SQLITE_PATH` - Path to SQLite database
* `TEXLIVE_IMAGE` - The TEXLIVE docker image to use for sibling containers, e.g. `gcr.io/overleaf-ops/texlive-full:2017.1`
* `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TEXLIVE image. Defaults to `tex`
* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the docker image e.g. `gcr.io/overleaf-ops`
* `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009`
* `STATSD_HOST` - The address of the Statsd service (used by the metrics module)
* `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces
* `SMOKE_TEST` - Whether to run smoke tests
Installation Installation
------------ ------------
The CLSI can be installed and set up as part of the entire [Overleaf stack](https://github.com/overleaf/overleaf) (complete with front end editor and document storage), or it can be run as a standalone service. To run is as a standalone service, first checkout this repository: The CLSI can be installed and set up as part of the entire [ShareLaTeX stack](https://github.com/sharelatex/sharelatex) (complete with front end editor and document storage), or it can be run as a standalone service. To run is as a standalone service, first checkout this repository:
$ git clone git@github.com:overleaf/clsi.git $ git clone git@github.com:sharelatex/clsi-sharelatex.git
Then install the require npm modules: Then install the require npm modules:
@@ -114,4 +92,4 @@ License
The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file.
Copyright (c) Overleaf, 2014-2019. Copyright (c) ShareLaTeX, 2014.

View File

@@ -1,6 +1,3 @@
Metrics = require "metrics-sharelatex"
Metrics.initialize("clsi")
CompileController = require "./app/js/CompileController" CompileController = require "./app/js/CompileController"
Settings = require "settings-sharelatex" Settings = require "settings-sharelatex"
logger = require "logger-sharelatex" logger = require "logger-sharelatex"
@@ -15,7 +12,8 @@ Errors = require './app/js/Errors'
Path = require "path" Path = require "path"
fs = require "fs" fs = require "fs"
Metrics = require "metrics-sharelatex"
Metrics.initialize("clsi")
Metrics.open_sockets.monitor(logger) Metrics.open_sockets.monitor(logger)
Metrics.memory.monitor(logger) Metrics.memory.monitor(logger)
@@ -28,17 +26,15 @@ express = require "express"
bodyParser = require "body-parser" bodyParser = require "body-parser"
app = express() app = express()
Metrics.injectMetricsRoute(app)
app.use Metrics.http.monitor(logger) app.use Metrics.http.monitor(logger)
# Compile requests can take longer than the default two # Compile requests can take longer than the default two
# minutes (including file download time), so bump up the # minutes (including file download time), so bump up the
# timeout a bit. # timeout a bit.
TIMEOUT = 10 * 60 * 1000 TIMEOUT = 6 * 60 * 1000
app.use (req, res, next) -> app.use (req, res, next) ->
req.setTimeout TIMEOUT req.setTimeout TIMEOUT
res.setTimeout TIMEOUT res.setTimeout TIMEOUT
res.removeHeader("X-Powered-By")
next() next()
app.param 'project_id', (req, res, next, project_id) -> app.param 'project_id', (req, res, next, project_id) ->
@@ -60,7 +56,7 @@ app.param 'build_id', (req, res, next, build_id) ->
next new Error("invalid build id #{build_id}") next new Error("invalid build id #{build_id}")
app.post "/project/:project_id/compile", bodyParser.json(limit: Settings.compileSizeLimit), CompileController.compile app.post "/project/:project_id/compile", bodyParser.json(limit: "5mb"), CompileController.compile
app.post "/project/:project_id/compile/stop", CompileController.stopCompile app.post "/project/:project_id/compile/stop", CompileController.stopCompile
app.delete "/project/:project_id", CompileController.clearCache app.delete "/project/:project_id", CompileController.clearCache
@@ -70,7 +66,7 @@ app.get "/project/:project_id/wordcount", CompileController.wordcount
app.get "/project/:project_id/status", CompileController.status app.get "/project/:project_id/status", CompileController.status
# Per-user containers # Per-user containers
app.post "/project/:project_id/user/:user_id/compile", bodyParser.json(limit: Settings.compileSizeLimit), CompileController.compile app.post "/project/:project_id/user/:user_id/compile", bodyParser.json(limit: "5mb"), CompileController.compile
app.post "/project/:project_id/user/:user_id/compile/stop", CompileController.stopCompile app.post "/project/:project_id/user/:user_id/compile/stop", CompileController.stopCompile
app.delete "/project/:project_id/user/:user_id", CompileController.clearCache app.delete "/project/:project_id/user/:user_id", CompileController.clearCache
@@ -143,10 +139,7 @@ app.get "/health_check", (req, res)->
res.contentType(resCacher?.setContentType) res.contentType(resCacher?.setContentType)
res.status(resCacher?.code).send(resCacher?.body) res.status(resCacher?.code).send(resCacher?.body)
app.get "/smoke_test_force", (req, res)-> profiler = require "v8-profiler"
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))(req, res)
profiler = require "v8-profiler-node8"
app.get "/profile", (req, res) -> app.get "/profile", (req, res) ->
time = parseInt(req.query.time || "1000") time = parseInt(req.query.time || "1000")
profiler.startProfiling("test") profiler.startProfiling("test")
@@ -167,76 +160,8 @@ app.use (error, req, res, next) ->
logger.error {err: error, url: req.url}, "server error" logger.error {err: error, url: req.url}, "server error"
res.sendStatus(error?.statusCode || 500) res.sendStatus(error?.statusCode || 500)
net = require "net" app.listen port = (Settings.internal?.clsi?.port or 3013), host = (Settings.internal?.clsi?.host or "localhost"), (error) ->
os = require "os" logger.info "CLSI starting up, listening on #{host}:#{port}"
STATE = "up"
loadTcpServer = net.createServer (socket) ->
socket.on "error", (err)->
if err.code == "ECONNRESET"
# this always comes up, we don't know why
return
logger.err err:err, "error with socket on load check"
socket.destroy()
if STATE == "up" and Settings.internal.load_balancer_agent.report_load
currentLoad = os.loadavg()[0]
# staging clis's have 1 cpu core only
if os.cpus().length == 1
availableWorkingCpus = 1
else
availableWorkingCpus = os.cpus().length - 1
freeLoad = availableWorkingCpus - currentLoad
freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100)
if freeLoadPercentage <= 0
freeLoadPercentage = 1 # when its 0 the server is set to drain and will move projects to different servers
socket.write("up, #{freeLoadPercentage}%\n", "ASCII")
socket.end()
else
socket.write("#{STATE}\n", "ASCII")
socket.end()
loadHttpServer = express()
loadHttpServer.post "/state/up", (req, res, next) ->
STATE = "up"
logger.info "getting message to set server to down"
res.sendStatus 204
loadHttpServer.post "/state/down", (req, res, next) ->
STATE = "down"
logger.info "getting message to set server to down"
res.sendStatus 204
loadHttpServer.post "/state/maint", (req, res, next) ->
STATE = "maint"
logger.info "getting message to set server to maint"
res.sendStatus 204
port = (Settings.internal?.clsi?.port or 3013)
host = (Settings.internal?.clsi?.host or "localhost")
load_tcp_port = Settings.internal.load_balancer_agent.load_port
load_http_port = Settings.internal.load_balancer_agent.local_port
if !module.parent # Called directly
app.listen port, host, (error) ->
logger.info "CLSI starting up, listening on #{host}:#{port}"
loadTcpServer.listen load_tcp_port, host, (error) ->
throw error if error?
logger.info "Load tcp agent listening on load port #{load_tcp_port}"
loadHttpServer.listen load_http_port, host, (error) ->
throw error if error?
logger.info "Load http agent listening on load port #{load_http_port}"
module.exports = app
setInterval () -> setInterval () ->
ProjectPersistenceManager.clearExpiredProjects() ProjectPersistenceManager.clearExpiredProjects()

View File

@@ -1,11 +1,44 @@
Settings = require "settings-sharelatex" spawn = require("child_process").spawn
logger = require "logger-sharelatex" logger = require "logger-sharelatex"
if Settings.clsi?.dockerRunner == true logger.info "using standard command runner"
commandRunnerPath = "./DockerRunner"
else
commandRunnerPath = "./LocalCommandRunner"
logger.info commandRunnerPath:commandRunnerPath, "selecting command runner for clsi"
CommandRunner = require(commandRunnerPath)
module.exports = CommandRunner module.exports = CommandRunner =
run: (project_id, command, directory, image, timeout, environment, callback = (error) ->) ->
command = (arg.replace('$COMPILE_DIR', directory) for arg in command)
logger.log project_id: project_id, command: command, directory: directory, "running command"
logger.warn "timeouts and sandboxing are not enabled with CommandRunner"
# merge environment settings
env = {}
env[key] = value for key, value of process.env
env[key] = value for key, value of environment
# run command as detached process so it has its own process group (which can be killed if needed)
proc = spawn command[0], command.slice(1), stdio: "inherit", cwd: directory, detached: true, env: env
proc.on "error", (err)->
logger.err err:err, project_id:project_id, command: command, directory: directory, "error running command"
callback(err)
proc.on "close", (code, signal) ->
logger.info code:code, signal:signal, project_id:project_id, "command exited"
if signal is 'SIGTERM' # signal from kill method below
err = new Error("terminated")
err.terminated = true
return callback(err)
else if code is 1 # exit status from chktex
err = new Error("exited")
err.code = code
return callback(err)
else
callback()
return proc.pid # return process id to allow job to be killed if necessary
kill: (pid, callback = (error) ->) ->
try
process.kill -pid # kill all processes in group
catch err
return callback(err)
callback()

View File

@@ -33,17 +33,14 @@ module.exports = CompileController =
else else
status = "error" status = "error"
code = 500 code = 500
logger.warn err: error, project_id: request.project_id, "error running compile" logger.error err: error, project_id: request.project_id, "error running compile"
else else
status = "failure" status = "failure"
for file in outputFiles for file in outputFiles
if file.path?.match(/output\.pdf$/) if file.path?.match(/output\.pdf$/)
status = "success" status = "success"
if file.path?.match(/output\.html$/)
if status == "failure" status = "success"
logger.warn project_id: request.project_id, outputFiles:outputFiles, "project failed to compile successfully, no output.pdf generated"
# log an error if any core files are found # log an error if any core files are found
for file in outputFiles for file in outputFiles
if file.path is "core" if file.path is "core"
@@ -53,7 +50,7 @@ module.exports = CompileController =
res.status(code or 200).send { res.status(code or 200).send {
compile: compile:
status: status status: status
error: error?.message or error error: error?.message or error
outputFiles: outputFiles.map (file) -> outputFiles: outputFiles.map (file) ->
url: url:
"#{Settings.apis.clsi.url}/project/#{request.project_id}" + "#{Settings.apis.clsi.url}/project/#{request.project_id}" +
@@ -82,9 +79,10 @@ module.exports = CompileController =
column = parseInt(req.query.column, 10) column = parseInt(req.query.column, 10)
project_id = req.params.project_id project_id = req.params.project_id
user_id = req.params.user_id user_id = req.params.user_id
CompileManager.syncFromCode project_id, user_id, file, line, column, (error, pdfPositions) -> CompileManager.syncFromCode project_id, user_id, file, line, column, (error, pdfPositions) ->
return next(error) if error? return next(error) if error?
res.json { res.send JSON.stringify {
pdf: pdfPositions pdf: pdfPositions
} }
@@ -94,9 +92,10 @@ module.exports = CompileController =
v = parseFloat(req.query.v) v = parseFloat(req.query.v)
project_id = req.params.project_id project_id = req.params.project_id
user_id = req.params.user_id user_id = req.params.user_id
CompileManager.syncFromPdf project_id, user_id, page, h, v, (error, codePositions) -> CompileManager.syncFromPdf project_id, user_id, page, h, v, (error, codePositions) ->
return next(error) if error? return next(error) if error?
res.json { res.send JSON.stringify {
code: codePositions code: codePositions
} }
@@ -109,7 +108,7 @@ module.exports = CompileController =
CompileManager.wordcount project_id, user_id, file, image, (error, result) -> CompileManager.wordcount project_id, user_id, file, image, (error, result) ->
return next(error) if error? return next(error) if error?
res.json { res.send JSON.stringify {
texcount: result texcount: result
} }

View File

@@ -15,7 +15,10 @@ fse = require "fs-extra"
os = require("os") os = require("os")
async = require "async" async = require "async"
Errors = require './Errors' Errors = require './Errors'
CommandRunner = require "./CommandRunner"
commandRunner = Settings.clsi?.commandRunner or "./CommandRunner"
logger.info commandRunner:commandRunner, "selecting command runner for clsi"
CommandRunner = require(commandRunner)
getCompileName = (project_id, user_id) -> getCompileName = (project_id, user_id) ->
if user_id? then "#{project_id}-#{user_id}" else project_id if user_id? then "#{project_id}-#{user_id}" else project_id
@@ -38,6 +41,7 @@ module.exports = CompileManager =
doCompile: (request, callback = (error, outputFiles) ->) -> doCompile: (request, callback = (error, outputFiles) ->) ->
compileDir = getCompileDir(request.project_id, request.user_id) compileDir = getCompileDir(request.project_id, request.user_id)
timer = new Metrics.Timer("write-to-disk") timer = new Metrics.Timer("write-to-disk")
logger.log project_id: request.project_id, user_id: request.user_id, "syncing resources to disk" logger.log project_id: request.project_id, user_id: request.user_id, "syncing resources to disk"
ResourceWriter.syncResourcesToDisk request, compileDir, (error, resourceList) -> ResourceWriter.syncResourcesToDisk request, compileDir, (error, resourceList) ->
@@ -58,9 +62,9 @@ module.exports = CompileManager =
callback() callback()
createTikzFileIfRequired = (callback) -> createTikzFileIfRequired = (callback) ->
TikzManager.checkMainFile compileDir, request.rootResourcePath, resourceList, (error, needsMainFile) -> TikzManager.checkMainFile compileDir, request.rootResourcePath, resourceList, (error, usesTikzExternalize) ->
return callback(error) if error? return callback(error) if error?
if needsMainFile if usesTikzExternalize
TikzManager.injectOutputFile compileDir, request.rootResourcePath, callback TikzManager.injectOutputFile compileDir, request.rootResourcePath, callback
else else
callback() callback()
@@ -93,7 +97,6 @@ module.exports = CompileManager =
compiler: request.compiler compiler: request.compiler
timeout: request.timeout timeout: request.timeout
image: request.imageName image: request.imageName
flags: request.flags
environment: env environment: env
}, (error, output, stats, timings) -> }, (error, output, stats, timings) ->
# request was for validation only # request was for validation only
@@ -131,7 +134,7 @@ module.exports = CompileManager =
return callback(error) if error? return callback(error) if error?
OutputCacheManager.saveOutputFiles outputFiles, compileDir, (error, newOutputFiles) -> OutputCacheManager.saveOutputFiles outputFiles, compileDir, (error, newOutputFiles) ->
callback null, newOutputFiles callback null, newOutputFiles
stopCompile: (project_id, user_id, callback = (error) ->) -> stopCompile: (project_id, user_id, callback = (error) ->) ->
compileName = getCompileName(project_id, user_id) compileName = getCompileName(project_id, user_id)
LatexRunner.killLatex compileName, callback LatexRunner.killLatex compileName, callback
@@ -202,31 +205,21 @@ module.exports = CompileManager =
base_dir = Settings.path.synctexBaseDir(compileName) base_dir = Settings.path.synctexBaseDir(compileName)
file_path = base_dir + "/" + file_name file_path = base_dir + "/" + file_name
compileDir = getCompileDir(project_id, user_id) compileDir = getCompileDir(project_id, user_id)
synctex_path = "#{base_dir}/output.pdf" synctex_path = Path.join(compileDir, "output.pdf")
command = ["code", synctex_path, file_path, line, column] CompileManager._runSynctex ["code", synctex_path, file_path, line, column], (error, stdout) ->
fse.ensureDir compileDir, (error) -> return callback(error) if error?
if error? logger.log project_id: project_id, user_id:user_id, file_name: file_name, line: line, column: column, stdout: stdout, "synctex code output"
logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync from code" callback null, CompileManager._parseSynctexFromCodeOutput(stdout)
return callback(error)
CompileManager._runSynctex project_id, user_id, command, (error, stdout) ->
return callback(error) if error?
logger.log project_id: project_id, user_id:user_id, file_name: file_name, line: line, column: column, command:command, stdout: stdout, "synctex code output"
callback null, CompileManager._parseSynctexFromCodeOutput(stdout)
syncFromPdf: (project_id, user_id, page, h, v, callback = (error, filePositions) ->) -> syncFromPdf: (project_id, user_id, page, h, v, callback = (error, filePositions) ->) ->
compileName = getCompileName(project_id, user_id) compileName = getCompileName(project_id, user_id)
compileDir = getCompileDir(project_id, user_id)
base_dir = Settings.path.synctexBaseDir(compileName) base_dir = Settings.path.synctexBaseDir(compileName)
synctex_path = "#{base_dir}/output.pdf" compileDir = getCompileDir(project_id, user_id)
command = ["pdf", synctex_path, page, h, v] synctex_path = Path.join(compileDir, "output.pdf")
fse.ensureDir compileDir, (error) -> CompileManager._runSynctex ["pdf", synctex_path, page, h, v], (error, stdout) ->
if error? return callback(error) if error?
logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync to code" logger.log project_id: project_id, user_id:user_id, page: page, h: h, v:v, stdout: stdout, "synctex pdf output"
return callback(error) callback null, CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)
CompileManager._runSynctex project_id, user_id, command, (error, stdout) ->
return callback(error) if error?
logger.log project_id: project_id, user_id:user_id, page: page, h: h, v:v, stdout: stdout, "synctex pdf output"
callback null, CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)
_checkFileExists: (path, callback = (error) ->) -> _checkFileExists: (path, callback = (error) ->) ->
synctexDir = Path.dirname(path) synctexDir = Path.dirname(path)
@@ -242,19 +235,19 @@ module.exports = CompileManager =
return callback(new Error("not a file")) if not stats?.isFile() return callback(new Error("not a file")) if not stats?.isFile()
callback() callback()
_runSynctex: (project_id, user_id, command, callback = (error, stdout) ->) -> _runSynctex: (args, callback = (error, stdout) ->) ->
bin_path = Path.resolve(__dirname + "/../../bin/synctex")
seconds = 1000 seconds = 1000
outputFilePath = args[1]
command.unshift("/opt/synctex") CompileManager._checkFileExists outputFilePath, (error) ->
return callback(error) if error?
directory = getCompileDir(project_id, user_id) if Settings.clsi?.synctexCommandWrapper?
timeout = 60 * 1000 # increased to allow for large projects [bin_path, args] = Settings.clsi?.synctexCommandWrapper bin_path, args
compileName = getCompileName(project_id, user_id) child_process.execFile bin_path, args, timeout: 10 * seconds, (error, stdout, stderr) ->
CommandRunner.run compileName, command, directory, Settings.clsi?.docker.image, timeout, {}, (error, output) -> if error?
if error? logger.err err:error, args:args, "error running synctex"
logger.err err:error, command:command, project_id:project_id, user_id:user_id, "error running synctex" return callback(error)
return callback(error) callback(null, stdout)
callback(null, output.stdout)
_parseSynctexFromCodeOutput: (output) -> _parseSynctexFromCodeOutput: (output) ->
results = [] results = []
@@ -283,28 +276,23 @@ module.exports = CompileManager =
} }
return results return results
wordcount: (project_id, user_id, file_name, image, callback = (error, pdfPositions) ->) -> wordcount: (project_id, user_id, file_name, image, callback = (error, pdfPositions) ->) ->
logger.log project_id:project_id, user_id:user_id, file_name:file_name, image:image, "running wordcount" logger.log project_id:project_id, user_id:user_id, file_name:file_name, image:image, "running wordcount"
file_path = "$COMPILE_DIR/" + file_name file_path = "$COMPILE_DIR/" + file_name
command = [ "texcount", '-nocol', '-inc', file_path, "-out=" + file_path + ".wc"] command = [ "texcount", '-nocol', '-inc', file_path, "-out=" + file_path + ".wc"]
compileDir = getCompileDir(project_id, user_id) directory = getCompileDir(project_id, user_id)
timeout = 60 * 1000 timeout = 10 * 1000
compileName = getCompileName(project_id, user_id) compileName = getCompileName(project_id, user_id)
fse.ensureDir compileDir, (error) ->
if error? CommandRunner.run compileName, command, directory, image, timeout, {}, (error) ->
logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync from code" return callback(error) if error?
return callback(error) fs.readFile directory + "/" + file_name + ".wc", "utf-8", (err, stdout) ->
CommandRunner.run compileName, command, compileDir, image, timeout, {}, (error) -> if err?
return callback(error) if error? logger.err err:err, command:command, directory:directory, project_id:project_id, user_id:user_id, "error reading word count output"
fs.readFile compileDir + "/" + file_name + ".wc", "utf-8", (err, stdout) -> return callback(err)
if err? results = CompileManager._parseWordcountFromOutput(stdout)
#call it node_err so sentry doesn't use random path error as unique id so it can't be ignored logger.log project_id:project_id, user_id:user_id, wordcount: results, "word count results"
logger.err node_err:err, command:command, compileDir:compileDir, project_id:project_id, user_id:user_id, "error reading word count output" callback null, results
return callback(err)
results = CompileManager._parseWordcountFromOutput(stdout)
logger.log project_id:project_id, user_id:user_id, wordcount: results, "word count results"
callback null, results
_parseWordcountFromOutput: (output) -> _parseWordcountFromOutput: (output) ->
results = { results = {

View File

@@ -1,13 +0,0 @@
async = require "async"
Settings = require "settings-sharelatex"
logger = require("logger-sharelatex")
queue = async.queue((task, cb)->
task(cb)
, Settings.parallelSqlQueryLimit)
queue.drain = ()->
logger.debug('all items have been processed')
module.exports =
queue: queue

View File

@@ -1,56 +0,0 @@
logger = require "logger-sharelatex"
LockState = {} # locks for docker container operations, by container name
module.exports = LockManager =
MAX_LOCK_HOLD_TIME: 15000 # how long we can keep a lock
MAX_LOCK_WAIT_TIME: 10000 # how long we wait for a lock
LOCK_TEST_INTERVAL: 1000 # retry time
tryLock: (key, callback = (err, gotLock) ->) ->
existingLock = LockState[key]
if existingLock? # the lock is already taken, check how old it is
lockAge = Date.now() - existingLock.created
if lockAge < LockManager.MAX_LOCK_HOLD_TIME
return callback(null, false) # we didn't get the lock, bail out
else
logger.error {key: key, lock: existingLock, age:lockAge}, "taking old lock by force"
# take the lock
LockState[key] = lockValue = {created: Date.now()}
callback(null, true, lockValue)
getLock: (key, callback = (error, lockValue) ->) ->
startTime = Date.now()
do attempt = () ->
LockManager.tryLock key, (error, gotLock, lockValue) ->
return callback(error) if error?
if gotLock
callback(null, lockValue)
else if Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME
e = new Error("Lock timeout")
e.key = key
return callback(e)
else
setTimeout attempt, LockManager.LOCK_TEST_INTERVAL
releaseLock: (key, lockValue, callback = (error) ->) ->
existingLock = LockState[key]
if existingLock is lockValue # lockValue is an object, so we can test by reference
delete LockState[key] # our lock, so we can free it
callback()
else if existingLock? # lock exists but doesn't match ours
logger.error {key:key, lock: existingLock}, "tried to release lock taken by force"
callback()
else
logger.error {key:key, lock: existingLock}, "tried to release lock that has gone"
callback()
runWithLock: (key, runner = ( (releaseLock = (error) ->) -> ), callback = ( (error) -> )) ->
LockManager.getLock key, (error, lockValue) ->
return callback(error) if error?
runner (error1, args...) ->
LockManager.releaseLock key, lockValue, (error2) ->
error = error1 or error2
return callback(error) if error?
callback(null, args...)

View File

@@ -1,358 +0,0 @@
Settings = require "settings-sharelatex"
logger = require "logger-sharelatex"
Docker = require("dockerode")
dockerode = new Docker()
crypto = require "crypto"
async = require "async"
LockManager = require "./DockerLockManager"
fs = require "fs"
Path = require 'path'
_ = require "underscore"
logger.info "using docker runner"
usingSiblingContainers = () ->
Settings?.path?.sandboxedCompilesHostDir?
module.exports = DockerRunner =
ERR_NOT_DIRECTORY: new Error("not a directory")
ERR_TERMINATED: new Error("terminated")
ERR_EXITED: new Error("exited")
ERR_TIMED_OUT: new Error("container timed out")
run: (project_id, command, directory, image, timeout, environment, callback = (error, output) ->) ->
if usingSiblingContainers()
_newPath = Settings.path.sandboxedCompilesHostDir
logger.log {path: _newPath}, "altering bind path for sibling containers"
# Server Pro, example:
# '/var/lib/sharelatex/data/compiles/<project-id>'
# ... becomes ...
# '/opt/sharelatex_data/data/compiles/<project-id>'
directory = Path.join(Settings.path.sandboxedCompilesHostDir, Path.basename(directory))
volumes = {}
volumes[directory] = "/compile"
command = (arg.toString().replace?('$COMPILE_DIR', "/compile") for arg in command)
if !image?
image = Settings.clsi.docker.image
if Settings.texliveImageNameOveride?
img = image.split("/")
image = "#{Settings.texliveImageNameOveride}/#{img[2]}"
options = DockerRunner._getContainerOptions(command, image, volumes, timeout, environment)
fingerprint = DockerRunner._fingerprintContainer(options)
options.name = name = "project-#{project_id}-#{fingerprint}"
# logOptions = _.clone(options)
# logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging"
logger.log project_id: project_id, "running docker container"
DockerRunner._runAndWaitForContainer options, volumes, timeout, (error, output) ->
if error?.message?.match("HTTP code is 500")
logger.log err: error, project_id: project_id, "error running container so destroying and retrying"
DockerRunner.destroyContainer name, null, true, (error) ->
return callback(error) if error?
DockerRunner._runAndWaitForContainer options, volumes, timeout, callback
else
callback(error, output)
return name # pass back the container name to allow it to be killed
kill: (container_id, callback = (error) ->) ->
logger.log container_id: container_id, "sending kill signal to container"
container = dockerode.getContainer(container_id)
container.kill (error) ->
if error? and error?.message?.match?(/Cannot kill container .* is not running/)
logger.warn err: error, container_id: container_id, "container not running, continuing"
error = null
if error?
logger.error err: error, container_id: container_id, "error killing container"
return callback(error)
else
callback()
_runAndWaitForContainer: (options, volumes, timeout, _callback = (error, output) ->) ->
callback = (args...) ->
_callback(args...)
# Only call the callback once
_callback = () ->
name = options.name
streamEnded = false
containerReturned = false
output = {}
callbackIfFinished = () ->
if streamEnded and containerReturned
callback(null, output)
attachStreamHandler = (error, _output) ->
return callback(error) if error?
output = _output
streamEnded = true
callbackIfFinished()
DockerRunner.startContainer options, volumes, attachStreamHandler, (error, containerId) ->
return callback(error) if error?
DockerRunner.waitForContainer name, timeout, (error, exitCode) ->
return callback(error) if error?
if exitCode is 137 # exit status from kill -9
err = DockerRunner.ERR_TERMINATED
err.terminated = true
return callback(err)
if exitCode is 1 # exit status from chktex
err = DockerRunner.ERR_EXITED
err.code = exitCode
return callback(err)
containerReturned = true
options?.HostConfig?.SecurityOpt = null #small log line
logger.log err:err, exitCode:exitCode, options:options, "docker container has exited"
callbackIfFinished()
_getContainerOptions: (command, image, volumes, timeout, environment) ->
timeoutInSeconds = timeout / 1000
dockerVolumes = {}
for hostVol, dockerVol of volumes
dockerVolumes[dockerVol] = {}
if volumes[hostVol].slice(-3).indexOf(":r") == -1
volumes[hostVol] = "#{dockerVol}:rw"
# merge settings and environment parameter
env = {}
for src in [Settings.clsi.docker.env, environment or {}]
env[key] = value for key, value of src
# set the path based on the image year
if m = image.match /:([0-9]+)\.[0-9]+/
year = m[1]
else
year = "2014"
env['PATH'] = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/#{year}/bin/x86_64-linux/"
options =
"Cmd" : command,
"Image" : image
"Volumes" : dockerVolumes
"WorkingDir" : "/compile"
"NetworkDisabled" : true
"Memory" : 1024 * 1024 * 1024 * 1024 # 1 Gb
"User" : Settings.clsi.docker.user
"Env" : ("#{key}=#{value}" for key, value of env) # convert the environment hash to an array
"HostConfig" :
"Binds": ("#{hostVol}:#{dockerVol}" for hostVol, dockerVol of volumes)
"LogConfig": {"Type": "none", "Config": {}}
"Ulimits": [{'Name': 'cpu', 'Soft': timeoutInSeconds+5, 'Hard': timeoutInSeconds+10}]
"CapDrop": "ALL"
"SecurityOpt": ["no-new-privileges"]
if Settings.path?.synctexBinHostPath?
options["HostConfig"]["Binds"].push("#{Settings.path.synctexBinHostPath}:/opt/synctex:ro")
if Settings.clsi.docker.seccomp_profile?
options.HostConfig.SecurityOpt.push "seccomp=#{Settings.clsi.docker.seccomp_profile}"
return options
_fingerprintContainer: (containerOptions) ->
# Yay, Hashing!
json = JSON.stringify(containerOptions)
return crypto.createHash("md5").update(json).digest("hex")
startContainer: (options, volumes, attachStreamHandler, callback) ->
LockManager.runWithLock options.name, (releaseLock) ->
# Check that volumes exist before starting the container.
# When a container is started with volume pointing to a
# non-existent directory then docker creates the directory but
# with root ownership.
DockerRunner._checkVolumes options, volumes, (err) ->
return releaseLock(err) if err?
DockerRunner._startContainer options, volumes, attachStreamHandler, releaseLock
, callback
# Check that volumes exist and are directories
_checkVolumes: (options, volumes, callback = (error, containerName) ->) ->
if usingSiblingContainers()
# Server Pro, with sibling-containers active, skip checks
return callback(null)
checkVolume = (path, cb) ->
fs.stat path, (err, stats) ->
return cb(err) if err?
return cb(DockerRunner.ERR_NOT_DIRECTORY) if not stats?.isDirectory()
cb()
jobs = []
for vol of volumes
do (vol) ->
jobs.push (cb) -> checkVolume(vol, cb)
async.series jobs, callback
_startContainer: (options, volumes, attachStreamHandler, callback = ((error, output) ->)) ->
callback = _.once(callback)
name = options.name
logger.log {container_name: name}, "starting container"
container = dockerode.getContainer(name)
createAndStartContainer = ->
dockerode.createContainer options, (error, container) ->
return callback(error) if error?
startExistingContainer()
startExistingContainer = ->
DockerRunner.attachToContainer options.name, attachStreamHandler, (error)->
return callback(error) if error?
container.start (error) ->
if error? and error?.statusCode != 304 #already running
return callback(error)
else
callback()
container.inspect (error, stats)->
if error?.statusCode == 404
createAndStartContainer()
else if error?
logger.err {container_name: name, error:error}, "unable to inspect container to start"
return callback(error)
else
startExistingContainer()
attachToContainer: (containerId, attachStreamHandler, attachStartCallback) ->
container = dockerode.getContainer(containerId)
container.attach {stdout: 1, stderr: 1, stream: 1}, (error, stream) ->
if error?
logger.error err: error, container_id: containerId, "error attaching to container"
return attachStartCallback(error)
else
attachStartCallback()
logger.log container_id: containerId, "attached to container"
MAX_OUTPUT = 1024 * 1024 # limit output to 1MB
createStringOutputStream = (name) ->
return {
data: ""
overflowed: false
write: (data) ->
return if @overflowed
if @data.length < MAX_OUTPUT
@data += data
else
logger.error container_id: containerId, length: @data.length, maxLen: MAX_OUTPUT, "#{name} exceeds max size"
@data += "(...truncated at #{MAX_OUTPUT} chars...)"
@overflowed = true
# kill container if too much output
# docker.containers.kill(containerId, () ->)
}
stdout = createStringOutputStream "stdout"
stderr = createStringOutputStream "stderr"
container.modem.demuxStream(stream, stdout, stderr)
stream.on "error", (err) ->
logger.error err: err, container_id: containerId, "error reading from container stream"
stream.on "end", () ->
attachStreamHandler null, {stdout: stdout.data, stderr: stderr.data}
waitForContainer: (containerId, timeout, _callback = (error, exitCode) ->) ->
callback = (args...) ->
_callback(args...)
# Only call the callback once
_callback = () ->
container = dockerode.getContainer(containerId)
timedOut = false
timeoutId = setTimeout () ->
timedOut = true
logger.log container_id: containerId, "timeout reached, killing container"
container.kill(() ->)
, timeout
logger.log container_id: containerId, "waiting for docker container"
container.wait (error, res) ->
if error?
clearTimeout timeoutId
logger.error err: error, container_id: containerId, "error waiting for container"
return callback(error)
if timedOut
logger.log containerId: containerId, "docker container timed out"
error = DockerRunner.ERR_TIMED_OUT
error.timedout = true
callback error
else
clearTimeout timeoutId
logger.log container_id: containerId, exitCode: res.StatusCode, "docker container returned"
callback null, res.StatusCode
destroyContainer: (containerName, containerId, shouldForce, callback = (error) ->) ->
# We want the containerName for the lock and, ideally, the
# containerId to delete. There is a bug in the docker.io module
# where if you delete by name and there is an error, it throws an
# async exception, but if you delete by id it just does a normal
# error callback. We fall back to deleting by name if no id is
# supplied.
LockManager.runWithLock containerName, (releaseLock) ->
DockerRunner._destroyContainer containerId or containerName, shouldForce, releaseLock
, callback
_destroyContainer: (containerId, shouldForce, callback = (error) ->) ->
logger.log container_id: containerId, "destroying docker container"
container = dockerode.getContainer(containerId)
container.remove {force: shouldForce == true}, (error) ->
if error? and error?.statusCode == 404
logger.warn err: error, container_id: containerId, "container not found, continuing"
error = null
if error?
logger.error err: error, container_id: containerId, "error destroying container"
else
logger.log container_id: containerId, "destroyed container"
callback(error)
# handle expiry of docker containers
MAX_CONTAINER_AGE: Settings.clsi.docker.maxContainerAge or oneHour = 60 * 60 * 1000
examineOldContainer: (container, callback = (error, name, id, ttl)->) ->
name = container.Name or container.Names?[0]
created = container.Created * 1000 # creation time is returned in seconds
now = Date.now()
age = now - created
maxAge = DockerRunner.MAX_CONTAINER_AGE
ttl = maxAge - age
logger.log {containerName: name, created: created, now: now, age: age, maxAge: maxAge, ttl: ttl}, "checking whether to destroy container"
callback(null, name, container.Id, ttl)
destroyOldContainers: (callback = (error) ->) ->
dockerode.listContainers all: true, (error, containers) ->
return callback(error) if error?
jobs = []
for container in containers or []
do (container) ->
DockerRunner.examineOldContainer container, (err, name, id, ttl) ->
if name.slice(0, 9) == '/project-' && ttl <= 0
jobs.push (cb) ->
DockerRunner.destroyContainer name, id, false, () -> cb()
# Ignore errors because some containers get stuck but
# will be destroyed next time
async.series jobs, callback
startContainerMonitor: () ->
logger.log {maxAge: DockerRunner.MAX_CONTAINER_AGE}, "starting container expiry"
# randomise the start time
randomDelay = Math.floor(Math.random() * 5 * 60 * 1000)
setTimeout () ->
setInterval () ->
DockerRunner.destroyOldContainers()
, oneHour = 60 * 60 * 1000
, randomDelay
DockerRunner.startContainerMonitor()

View File

@@ -2,36 +2,42 @@ Path = require "path"
Settings = require "settings-sharelatex" Settings = require "settings-sharelatex"
logger = require "logger-sharelatex" logger = require "logger-sharelatex"
Metrics = require "./Metrics" Metrics = require "./Metrics"
CommandRunner = require "./CommandRunner" CommandRunner = require(Settings.clsi?.commandRunner or "./CommandRunner")
ProcessTable = {} # table of currently running jobs (pids or docker container names) ProcessTable = {} # table of currently running jobs (pids or docker container names)
module.exports = LatexRunner = module.exports = LatexRunner =
runLatex: (project_id, options, callback = (error) ->) -> runLatex: (project_id, options, callback = (error) ->) ->
{directory, mainFile, compiler, timeout, image, environment, flags} = options {directory, mainFile, compiler, timeout, image, environment} = options
compiler ||= "pdflatex" compiler ||= "pdflatex"
timeout ||= 60000 # milliseconds timeout ||= 60000 # milliseconds
logger.log directory: directory, compiler: compiler, timeout: timeout, mainFile: mainFile, environment: environment, flags:flags, "starting compile" logger.log directory: directory, compiler: compiler, timeout: timeout, mainFile: mainFile, environment: environment, "starting compile"
# We want to run latexmk on the tex file which we will automatically # We want to run latexmk on the tex file which we will automatically
# generate from the Rtex/Rmd/md file. # generate from the Rtex/Rmd/md file.
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".tex") mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".md")
if compiler == "pdflatex" if compiler == "pdflatex"
command = LatexRunner._pdflatexCommand mainFile, flags command = LatexRunner._pdflatexCommand mainFile
else if compiler == "latex" else if compiler == "latex"
command = LatexRunner._latexCommand mainFile, flags command = LatexRunner._latexCommand mainFile
else if compiler == "xelatex" else if compiler == "xelatex"
command = LatexRunner._xelatexCommand mainFile, flags command = LatexRunner._xelatexCommand mainFile
else if compiler == "lualatex" else if compiler == "lualatex"
command = LatexRunner._lualatexCommand mainFile, flags command = LatexRunner._lualatexCommand mainFile
else else
return callback new Error("unknown compiler: #{compiler}") return callback new Error("unknown compiler: #{compiler}")
if Settings.clsi?.strace if Settings.clsi?.strace
command = ["strace", "-o", "strace", "-ff"].concat(command) command = ["strace", "-o", "strace", "-ff"].concat(command)
# ignore the above and make a pandoc command
console.log(mainFile)
console.log(image)
image = "ivotron/pandoc"
command = ["-o", "$COMPILE_DIR/output.html", "/compile/" + mainFile]
id = "#{project_id}" # record running project under this id id = "#{project_id}" # record running project under this id
ProcessTable[id] = CommandRunner.run project_id, command, directory, image, timeout, environment, (error, output) -> ProcessTable[id] = CommandRunner.run project_id, command, directory, image, timeout, environment, (error, output) ->
@@ -63,32 +69,31 @@ module.exports = LatexRunner =
else else
CommandRunner.kill ProcessTable[id], callback CommandRunner.kill ProcessTable[id], callback
_latexmkBaseCommand: (flags) -> _latexmkBaseCommand: (Settings?.clsi?.latexmkCommandPrefix || []).concat([
args = ["latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR", "-synctex=1","-interaction=batchmode"] "latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR",
if flags "-synctex=1","-interaction=batchmode"
args = args.concat(flags) ])
(Settings?.clsi?.latexmkCommandPrefix || []).concat(args)
_pdflatexCommand: (mainFile, flags) -> _pdflatexCommand: (mainFile) ->
LatexRunner._latexmkBaseCommand(flags).concat [ LatexRunner._latexmkBaseCommand.concat [
"-pdf", "-pdf",
Path.join("$COMPILE_DIR", mainFile) Path.join("$COMPILE_DIR", mainFile)
] ]
_latexCommand: (mainFile, flags) -> _latexCommand: (mainFile) ->
LatexRunner._latexmkBaseCommand(flags).concat [ LatexRunner._latexmkBaseCommand.concat [
"-pdfdvi", "-pdfdvi",
Path.join("$COMPILE_DIR", mainFile) Path.join("$COMPILE_DIR", mainFile)
] ]
_xelatexCommand: (mainFile, flags) -> _xelatexCommand: (mainFile) ->
LatexRunner._latexmkBaseCommand(flags).concat [ LatexRunner._latexmkBaseCommand.concat [
"-xelatex", "-xelatex",
Path.join("$COMPILE_DIR", mainFile) Path.join("$COMPILE_DIR", mainFile)
] ]
_lualatexCommand: (mainFile, flags) -> _lualatexCommand: (mainFile) ->
LatexRunner._latexmkBaseCommand(flags).concat [ LatexRunner._latexmkBaseCommand.concat [
"-lualatex", "-lualatex",
Path.join("$COMPILE_DIR", mainFile) Path.join("$COMPILE_DIR", mainFile)
] ]

View File

@@ -1,48 +0,0 @@
spawn = require("child_process").spawn
logger = require "logger-sharelatex"
logger.info "using standard command runner"
module.exports = CommandRunner =
run: (project_id, command, directory, image, timeout, environment, callback = (error) ->) ->
command = (arg.toString().replace('$COMPILE_DIR', directory) for arg in command)
logger.log project_id: project_id, command: command, directory: directory, "running command"
logger.warn "timeouts and sandboxing are not enabled with CommandRunner"
# merge environment settings
env = {}
env[key] = value for key, value of process.env
env[key] = value for key, value of environment
# run command as detached process so it has its own process group (which can be killed if needed)
proc = spawn command[0], command.slice(1), cwd: directory, env: env
stdout = ""
proc.stdout.on "data", (data)->
stdout += data
proc.on "error", (err)->
logger.err err:err, project_id:project_id, command: command, directory: directory, "error running command"
callback(err)
proc.on "close", (code, signal) ->
logger.info code:code, signal:signal, project_id:project_id, "command exited"
if signal is 'SIGTERM' # signal from kill method below
err = new Error("terminated")
err.terminated = true
return callback(err)
else if code is 1 # exit status from chktex
err = new Error("exited")
err.code = code
return callback(err)
else
callback(null, {"stdout": stdout})
return proc.pid # return process id to allow job to be killed if necessary
kill: (pid, callback = (error) ->) ->
try
process.kill -pid # kill all processes in group
catch err
return callback(err)
callback()

View File

@@ -2,8 +2,7 @@ Settings = require('settings-sharelatex')
logger = require "logger-sharelatex" logger = require "logger-sharelatex"
Lockfile = require('lockfile') # from https://github.com/npm/lockfile Lockfile = require('lockfile') # from https://github.com/npm/lockfile
Errors = require "./Errors" Errors = require "./Errors"
fs = require("fs")
Path = require("path")
module.exports = LockManager = module.exports = LockManager =
LOCK_TEST_INTERVAL: 1000 # 50ms between each test of the lock LOCK_TEST_INTERVAL: 1000 # 50ms between each test of the lock
MAX_LOCK_WAIT_TIME: 15000 # 10s maximum time to spend trying to get the lock MAX_LOCK_WAIT_TIME: 15000 # 10s maximum time to spend trying to get the lock
@@ -15,17 +14,10 @@ module.exports = LockManager =
pollPeriod: @LOCK_TEST_INTERVAL pollPeriod: @LOCK_TEST_INTERVAL
stale: @LOCK_STALE stale: @LOCK_STALE
Lockfile.lock path, lockOpts, (error) -> Lockfile.lock path, lockOpts, (error) ->
if error?.code is 'EEXIST' return callback new Errors.AlreadyCompilingError("compile in progress") if error?.code is 'EEXIST'
return callback new Errors.AlreadyCompilingError("compile in progress") return callback(error) if error?
else if error? runner (error1, args...) ->
fs.lstat path, (statLockErr, statLock)-> Lockfile.unlock path, (error2) ->
fs.lstat Path.dirname(path), (statDirErr, statDir)-> error = error1 or error2
fs.readdir Path.dirname(path), (readdirErr, readdirDir)-> return callback(error) if error?
logger.err error:error, path:path, statLock:statLock, statLockErr:statLockErr, statDir:statDir, statDirErr: statDirErr, readdirErr:readdirErr, readdirDir:readdirDir, "unable to get lock" callback(null, args...)
return callback(error)
else
runner (error1, args...) ->
Lockfile.unlock path, (error2) ->
error = error1 or error2
return callback(error) if error?
callback(null, args...)

View File

@@ -10,6 +10,8 @@ module.exports = OutputFileFinder =
for resource in resources for resource in resources
incomingResources[resource.path] = true incomingResources[resource.path] = true
logger.log directory: directory, "getting output files"
OutputFileFinder._getAllFiles directory, (error, allFiles = []) -> OutputFileFinder._getAllFiles directory, (error, allFiles = []) ->
if error? if error?
logger.err err:error, "error finding all output files" logger.err err:error, "error finding all output files"

View File

@@ -1,7 +1,6 @@
UrlCache = require "./UrlCache" UrlCache = require "./UrlCache"
CompileManager = require "./CompileManager" CompileManager = require "./CompileManager"
db = require "./db" db = require "./db"
dbQueue = require "./DbQueue"
async = require "async" async = require "async"
logger = require "logger-sharelatex" logger = require "logger-sharelatex"
oneDay = 24 * 60 * 60 * 1000 oneDay = 24 * 60 * 60 * 1000
@@ -12,17 +11,14 @@ module.exports = ProjectPersistenceManager =
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5 EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5
markProjectAsJustAccessed: (project_id, callback = (error) ->) -> markProjectAsJustAccessed: (project_id, callback = (error) ->) ->
job = (cb)-> db.Project.findOrCreate(where: {project_id: project_id})
db.Project.findOrCreate(where: {project_id: project_id}) .spread(
.spread( (project, created) ->
(project, created) -> project.updateAttributes(lastAccessed: new Date())
project.updateAttributes(lastAccessed: new Date()) .then(() -> callback())
.then(() -> cb()) .error callback
.error cb )
) .error callback
.error cb
dbQueue.queue.push(job, callback)
clearExpiredProjects: (callback = (error) ->) -> clearExpiredProjects: (callback = (error) ->) ->
ProjectPersistenceManager._findExpiredProjectIds (error, project_ids) -> ProjectPersistenceManager._findExpiredProjectIds (error, project_ids) ->
@@ -51,34 +47,20 @@ module.exports = ProjectPersistenceManager =
clearProjectFromCache: (project_id, callback = (error) ->) -> clearProjectFromCache: (project_id, callback = (error) ->) ->
logger.log project_id: project_id, "clearing project from cache" logger.log project_id: project_id, "clearing project from cache"
UrlCache.clearProject project_id, (error) -> UrlCache.clearProject project_id, (error) ->
if error? return callback(error) if error?
logger.err error:error, project_id: project_id, "error clearing project from cache"
return callback(error)
ProjectPersistenceManager._clearProjectFromDatabase project_id, (error) -> ProjectPersistenceManager._clearProjectFromDatabase project_id, (error) ->
if error? return callback(error) if error?
logger.err error:error, project_id:project_id, "error clearing project from database" callback()
callback(error)
_clearProjectFromDatabase: (project_id, callback = (error) ->) -> _clearProjectFromDatabase: (project_id, callback = (error) ->) ->
logger.log project_id:project_id, "clearing project from database" db.Project.destroy(where: {project_id: project_id})
job = (cb)-> .then(() -> callback())
db.Project.destroy(where: {project_id: project_id}) .error callback
.then(() -> cb())
.error cb
dbQueue.queue.push(job, callback)
_findExpiredProjectIds: (callback = (error, project_ids) ->) -> _findExpiredProjectIds: (callback = (error, project_ids) ->) ->
job = (cb)-> db.Project.findAll(where: ["lastAccessed < ?", new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT)])
keepProjectsFrom = new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT) .then((projects) ->
q = {} callback null, projects.map((project) -> project.project_id)
q[db.op.lt] = keepProjectsFrom ).error callback
db.Project.findAll(where:{lastAccessed:q})
.then((projects) ->
cb null, projects.map((project) -> project.project_id)
).error cb
dbQueue.queue.push(job, callback)
logger.log {EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout" logger.log {EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout"

View File

@@ -1,8 +1,6 @@
settings = require("settings-sharelatex")
module.exports = RequestParser = module.exports = RequestParser =
VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"] VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"]
MAX_TIMEOUT: 600 MAX_TIMEOUT: 300
parse: (body, callback = (error, data) ->) -> parse: (body, callback = (error, data) ->) ->
response = {} response = {}
@@ -12,7 +10,7 @@ module.exports = RequestParser =
compile = body.compile compile = body.compile
compile.options ||= {} compile.options ||= {}
try try
response.compiler = @_parseAttribute "compiler", response.compiler = @_parseAttribute "compiler",
compile.options.compiler, compile.options.compiler,
@@ -33,10 +31,6 @@ module.exports = RequestParser =
response.check = @_parseAttribute "check", response.check = @_parseAttribute "check",
compile.options.check, compile.options.check,
type: "string" type: "string"
response.flags = @_parseAttribute "flags",
compile.options.flags,
default: [],
type: "object"
# The syncType specifies whether the request contains all # The syncType specifies whether the request contains all
# resources (full) or only those resources to be updated # resources (full) or only those resources to be updated
@@ -72,7 +66,7 @@ module.exports = RequestParser =
originalRootResourcePath = rootResourcePath originalRootResourcePath = rootResourcePath
sanitizedRootResourcePath = RequestParser._sanitizePath(rootResourcePath) sanitizedRootResourcePath = RequestParser._sanitizePath(rootResourcePath)
response.rootResourcePath = RequestParser._checkPath(sanitizedRootResourcePath) response.rootResourcePath = RequestParser._checkPath(sanitizedRootResourcePath)
for resource in response.resources for resource in response.resources
if resource.path == originalRootResourcePath if resource.path == originalRootResourcePath
resource.path = sanitizedRootResourcePath resource.path = sanitizedRootResourcePath
@@ -91,7 +85,7 @@ module.exports = RequestParser =
throw "resource modified date could not be understood: #{resource.modified}" throw "resource modified date could not be understood: #{resource.modified}"
if !resource.url? and !resource.content? if !resource.url? and !resource.content?
throw "all resources should have either a url or content attribute" throw "all resources should have either a url or content attribute"
if resource.content? and typeof resource.content != "string" if resource.content? and typeof resource.content != "string"
throw "content attribute should be a string" throw "content attribute should be a string"
if resource.url? and typeof resource.url != "string" if resource.url? and typeof resource.url != "string"

View File

@@ -78,16 +78,8 @@ module.exports = ResourceWriter =
should_delete = true should_delete = true
if path.match(/^output\./) or path.match(/\.aux$/) or path.match(/^cache\//) # knitr cache if path.match(/^output\./) or path.match(/\.aux$/) or path.match(/^cache\//) # knitr cache
should_delete = false should_delete = false
if path.match(/^output-.*/) # Tikz cached figures (default case) if path.match(/^output-.*/) # Tikz cached figures
should_delete = false should_delete = false
if path.match(/\.(pdf|dpth|md5)$/) # Tikz cached figures (by extension)
should_delete = false
if path.match(/\.(pygtex|pygstyle)$/) or path.match(/(^|\/)_minted-[^\/]+\//) # minted files/directory
should_delete = false
if path.match(/\.md\.tex$/) or path.match(/(^|\/)_markdown_[^\/]+\//) # markdown files/directory
should_delete = false
if path.match(/-eps-converted-to\.pdf$/) # Epstopdf generated files
should_delete = false
if path == "output.pdf" or path == "output.dvi" or path == "output.log" or path == "output.xdv" if path == "output.pdf" or path == "output.dvi" or path == "output.log" or path == "output.xdv"
should_delete = true should_delete = true
if path == "output.tex" # created by TikzManager if present in output files if path == "output.tex" # created by TikzManager if present in output files
@@ -128,11 +120,7 @@ module.exports = ResourceWriter =
logger.err err:err, project_id:project_id, path:path, resource_url:resource.url, modified:resource.modified, "error downloading file for resources" logger.err err:err, project_id:project_id, path:path, resource_url:resource.url, modified:resource.modified, "error downloading file for resources"
callback() #try and continue compiling even if http resource can not be downloaded at this time callback() #try and continue compiling even if http resource can not be downloaded at this time
else else
process = require("process")
fs.writeFile path, resource.content, callback fs.writeFile path, resource.content, callback
try
result = fs.lstatSync(path)
catch e
checkPath: (basePath, resourcePath, callback) -> checkPath: (basePath, resourcePath, callback) ->
path = Path.normalize(Path.join(basePath, resourcePath)) path = Path.normalize(Path.join(basePath, resourcePath))

View File

@@ -4,34 +4,32 @@ ResourceWriter = require "./ResourceWriter"
SafeReader = require "./SafeReader" SafeReader = require "./SafeReader"
logger = require "logger-sharelatex" logger = require "logger-sharelatex"
# for \tikzexternalize or pstool to work the main file needs to match the # for \tikzexternalize to work the main file needs to match the
# jobname. Since we set the -jobname to output, we have to create a # jobname. Since we set the -jobname to output, we have to create a
# copy of the main file as 'output.tex'. # copy of the main file as 'output.tex'.
module.exports = TikzManager = module.exports = TikzManager =
checkMainFile: (compileDir, mainFile, resources, callback = (error, needsMainFile) ->) -> checkMainFile: (compileDir, mainFile, resources, callback = (error, usesTikzExternalize) ->) ->
# if there's already an output.tex file, we don't want to touch it # if there's already an output.tex file, we don't want to touch it
for resource in resources for resource in resources
if resource.path is "output.tex" if resource.path is "output.tex"
logger.log compileDir: compileDir, mainFile: mainFile, "output.tex already in resources" logger.log compileDir: compileDir, mainFile: mainFile, "output.tex already in resources"
return callback(null, false) return callback(null, false)
# if there's no output.tex, see if we are using tikz/pgf or pstool in the main file # if there's no output.tex, see if we are using tikz/pgf in the main file
ResourceWriter.checkPath compileDir, mainFile, (error, path) -> ResourceWriter.checkPath compileDir, mainFile, (error, path) ->
return callback(error) if error? return callback(error) if error?
SafeReader.readFile path, 65536, "utf8", (error, content) -> SafeReader.readFile path, 65536, "utf8", (error, content) ->
return callback(error) if error? return callback(error) if error?
usesTikzExternalize = content?.indexOf("\\tikzexternalize") >= 0 usesTikzExternalize = content?.indexOf("\\tikzexternalize") >= 0
usesPsTool = content?.indexOf("{pstool}") >= 0 logger.log compileDir: compileDir, mainFile: mainFile, usesTikzExternalize:usesTikzExternalize, "checked for tikzexternalize"
logger.log compileDir: compileDir, mainFile: mainFile, usesTikzExternalize:usesTikzExternalize, usesPsTool: usesPsTool, "checked for packages needing main file as output.tex" callback null, usesTikzExternalize
needsMainFile = (usesTikzExternalize || usesPsTool)
callback null, needsMainFile
injectOutputFile: (compileDir, mainFile, callback = (error) ->) -> injectOutputFile: (compileDir, mainFile, callback = (error) ->) ->
ResourceWriter.checkPath compileDir, mainFile, (error, path) -> ResourceWriter.checkPath compileDir, mainFile, (error, path) ->
return callback(error) if error? return callback(error) if error?
fs.readFile path, "utf8", (error, content) -> fs.readFile path, "utf8", (error, content) ->
return callback(error) if error? return callback(error) if error?
logger.log compileDir: compileDir, mainFile: mainFile, "copied file to output.tex as project uses packages which require it" logger.log compileDir: compileDir, mainFile: mainFile, "copied file to output.tex for tikz"
# use wx flag to ensure that output file does not already exist # use wx flag to ensure that output file does not already exist
fs.writeFile Path.join(compileDir, "output.tex"), content, {flag:'wx'}, callback fs.writeFile Path.join(compileDir, "output.tex"), content, {flag:'wx'}, callback

View File

@@ -1,5 +1,4 @@
db = require("./db") db = require("./db")
dbQueue = require "./DbQueue"
UrlFetcher = require("./UrlFetcher") UrlFetcher = require("./UrlFetcher")
Settings = require("settings-sharelatex") Settings = require("settings-sharelatex")
crypto = require("crypto") crypto = require("crypto")
@@ -52,6 +51,7 @@ module.exports = UrlCache =
_doesUrlNeedDownloading: (project_id, url, lastModified, callback = (error, needsDownloading) ->) -> _doesUrlNeedDownloading: (project_id, url, lastModified, callback = (error, needsDownloading) ->) ->
if !lastModified? if !lastModified?
return callback null, true return callback null, true
UrlCache._findUrlDetails project_id, url, (error, urlDetails) -> UrlCache._findUrlDetails project_id, url, (error, urlDetails) ->
return callback(error) if error? return callback(error) if error?
if !urlDetails? or !urlDetails.lastModified? or urlDetails.lastModified.getTime() < lastModified.getTime() if !urlDetails? or !urlDetails.lastModified? or urlDetails.lastModified.getTime() < lastModified.getTime()
@@ -94,41 +94,32 @@ module.exports = UrlCache =
return callback() return callback()
_findUrlDetails: (project_id, url, callback = (error, urlDetails) ->) -> _findUrlDetails: (project_id, url, callback = (error, urlDetails) ->) ->
job = (cb)-> db.UrlCache.find(where: { url: url, project_id: project_id })
db.UrlCache.find(where: { url: url, project_id: project_id }) .then((urlDetails) -> callback null, urlDetails)
.then((urlDetails) -> cb null, urlDetails) .error callback
.error cb
dbQueue.queue.push job, callback
_updateOrCreateUrlDetails: (project_id, url, lastModified, callback = (error) ->) -> _updateOrCreateUrlDetails: (project_id, url, lastModified, callback = (error) ->) ->
job = (cb)-> db.UrlCache.findOrCreate(where: {url: url, project_id: project_id})
db.UrlCache.findOrCreate(where: {url: url, project_id: project_id}) .spread(
.spread( (urlDetails, created) ->
(urlDetails, created) -> urlDetails.updateAttributes(lastModified: lastModified)
urlDetails.updateAttributes(lastModified: lastModified) .then(() -> callback())
.then(() -> cb()) .error(callback)
.error(cb) )
) .error callback
.error cb
dbQueue.queue.push(job, callback)
_clearUrlDetails: (project_id, url, callback = (error) ->) -> _clearUrlDetails: (project_id, url, callback = (error) ->) ->
job = (cb)-> db.UrlCache.destroy(where: {url: url, project_id: project_id})
db.UrlCache.destroy(where: {url: url, project_id: project_id}) .then(() -> callback null)
.then(() -> cb null) .error callback
.error cb
dbQueue.queue.push(job, callback)
_findAllUrlsInProject: (project_id, callback = (error, urls) ->) -> _findAllUrlsInProject: (project_id, callback = (error, urls) ->) ->
job = (cb)-> db.UrlCache.findAll(where: { project_id: project_id })
db.UrlCache.findAll(where: { project_id: project_id }) .then(
.then( (urlEntries) ->
(urlEntries) -> callback null, urlEntries.map((entry) -> entry.url)
cb null, urlEntries.map((entry) -> entry.url) )
) .error callback
.error cb
dbQueue.queue.push(job, callback)

View File

@@ -1,8 +1,6 @@
request = require("request").defaults(jar: false) request = require("request").defaults(jar: false)
fs = require("fs") fs = require("fs")
logger = require "logger-sharelatex" logger = require "logger-sharelatex"
settings = require("settings-sharelatex")
URL = require('url');
oneMinute = 60 * 1000 oneMinute = 60 * 1000
@@ -13,9 +11,6 @@ module.exports = UrlFetcher =
_callback(error) _callback(error)
_callback = () -> _callback = () ->
if settings.filestoreDomainOveride?
p = URL.parse(url).path
url = "#{settings.filestoreDomainOveride}#{p}"
timeoutHandler = setTimeout () -> timeoutHandler = setTimeout () ->
timeoutHandler = null timeoutHandler = null
logger.error url:url, filePath: filePath, "Timed out downloading file to cache" logger.error url:url, filePath: filePath, "Timed out downloading file to cache"

View File

@@ -1,12 +1,9 @@
Sequelize = require("sequelize") Sequelize = require("sequelize")
Settings = require("settings-sharelatex") Settings = require("settings-sharelatex")
_ = require("underscore") _ = require("underscore")
logger = require "logger-sharelatex"
options = _.extend {logging:false}, Settings.mysql.clsi options = _.extend {logging:false}, Settings.mysql.clsi
logger.log dbPath:Settings.mysql.clsi.storage, "connecting to db"
sequelize = new Sequelize( sequelize = new Sequelize(
Settings.mysql.clsi.database, Settings.mysql.clsi.database,
Settings.mysql.clsi.username, Settings.mysql.clsi.username,
@@ -14,12 +11,6 @@ sequelize = new Sequelize(
options options
) )
if Settings.mysql.clsi.dialect == "sqlite"
logger.log "running PRAGMA journal_mode=WAL;"
sequelize.query("PRAGMA journal_mode=WAL;")
sequelize.query("PRAGMA synchronous=OFF;")
sequelize.query("PRAGMA read_uncommitted = true;")
module.exports = module.exports =
UrlCache: sequelize.define("UrlCache", { UrlCache: sequelize.define("UrlCache", {
url: Sequelize.STRING url: Sequelize.STRING
@@ -41,15 +32,5 @@ module.exports =
] ]
}) })
op: Sequelize.Op sync: () -> sequelize.sync()
sync: () ->
logger.log dbPath:Settings.mysql.clsi.storage, "syncing db schema"
sequelize.sync()
.then(->
logger.log "db sync complete"
).catch((err)->
console.log err, "error syncing"
)

View File

@@ -1,4 +0,0 @@
#!/bin/bash
set -e;
MOCHA="node_modules/.bin/mocha --recursive --reporter spec --timeout 15000"
$MOCHA "$@"

Binary file not shown.

View File

@@ -1,9 +0,0 @@
clsi
--language=coffeescript
--node-version=10.15.0
--acceptance-creds=None
--dependencies=mongo,redis
--docker-repos=gcr.io/overleaf-ops
--env-pass-through=TEXLIVE_IMAGE
--build-target=docker
--script-version=1.1.22

View File

@@ -1,39 +0,0 @@
steps:
- id: texlive
name: 'gcr.io/overleaf-ops/texlive-full:2017.1'
- id: build
name: 'gcr.io/overleaf-ops/cloud-builder'
args:
- 'build'
env:
- 'BUILD_NUMBER=$SHORT_SHA'
- 'BRANCH_NAME=$BRANCH_NAME'
waitFor: ['-']
- id: test_unit
name: 'gcr.io/overleaf-ops/cloud-builder'
args:
- 'test_unit'
env:
- 'DOCKER_COMPOSE_FLAGS=-f docker-compose.ci.yml'
- 'BUILD_NUMBER=$SHORT_SHA'
- 'BRANCH_NAME=$BRANCH_NAME'
waitFor:
- build
- id: test_acceptance
name: 'gcr.io/overleaf-ops/cloud-builder'
args:
- 'test_acceptance'
env:
- 'DOCKER_COMPOSE_FLAGS=-f docker-compose.ci.yml'
- 'BUILD_NUMBER=$SHORT_SHA'
- 'BRANCH_NAME=$BRANCH_NAME'
- 'TEXLIVE_IMAGE=gcr.io/overleaf-ops/texlive-full:2017.1'
waitFor:
- build
- texlive
images:
- 'gcr.io/$PROJECT_ID/clsi:${BRANCH_NAME}-${SHORT_SHA}'
timeout: 1800s
options:
diskSizeGb: 200
machineType: 'N1_HIGHCPU_8'

View File

@@ -7,16 +7,10 @@ module.exports =
clsi: clsi:
database: "clsi" database: "clsi"
username: "clsi" username: "clsi"
password: null
dialect: "sqlite" dialect: "sqlite"
storage: process.env["SQLITE_PATH"] or Path.resolve(__dirname + "/../db.sqlite") storage: Path.resolve(__dirname + "/../db.sqlite")
pool:
max: 1
min: 1
retry:
max: 10
compileSizeLimit: process.env["COMPILE_SIZE_LIMIT"] or "7mb"
path: path:
compilesDir: Path.resolve(__dirname + "/../compiles") compilesDir: Path.resolve(__dirname + "/../compiles")
clsiCacheDir: Path.resolve(__dirname + "/../cache") clsiCacheDir: Path.resolve(__dirname + "/../cache")
@@ -26,29 +20,19 @@ module.exports =
clsi: clsi:
port: 3013 port: 3013
host: process.env["LISTEN_ADDRESS"] or "localhost" host: process.env["LISTEN_ADDRESS"] or "localhost"
load_balancer_agent:
report_load:true
load_port: 3048
local_port: 3049
apis: apis:
clsi: clsi:
url: "http://#{process.env['CLSI_HOST'] or 'localhost'}:3013" url: "http://localhost:3013"
smokeTest: process.env["SMOKE_TEST"] or false smokeTest: false
project_cache_length_ms: 1000 * 60 * 60 * 24 project_cache_length_ms: 1000 * 60 * 60 * 24
parallelFileDownloads: process.env["FILESTORE_PARALLEL_FILE_DOWNLOADS"] or 1 parallelFileDownloads:1
parallelSqlQueryLimit: process.env["FILESTORE_PARALLEL_SQL_QUERY_LIMIT"] or 1
filestoreDomainOveride: process.env["FILESTORE_DOMAIN_OVERRIDE"]
texliveImageNameOveride: process.env["TEX_LIVE_IMAGE_NAME_OVERRIDE"]
sentry:
dsn: process.env['SENTRY_DSN']
if process.env["COMMAND_RUNNER"]
if process.env["DOCKER_RUNNER"]
module.exports.clsi = module.exports.clsi =
dockerRunner: process.env["DOCKER_RUNNER"] == "true" commandRunner: process.env["COMMAND_RUNNER"]
docker: docker:
image: process.env["TEXLIVE_IMAGE"] or "quay.io/sharelatex/texlive-full:2017.1" image: process.env["TEXLIVE_IMAGE"] or "quay.io/sharelatex/texlive-full:2017.1"
env: env:
@@ -57,15 +41,4 @@ if process.env["DOCKER_RUNNER"]
user: process.env["TEXLIVE_IMAGE_USER"] or "tex" user: process.env["TEXLIVE_IMAGE_USER"] or "tex"
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000 expireProjectAfterIdleMs: 24 * 60 * 60 * 1000
checkProjectsIntervalMs: 10 * 60 * 1000 checkProjectsIntervalMs: 10 * 60 * 1000
try
seccomp_profile_path = Path.resolve(__dirname + "/../seccomp/clsi-profile.json")
module.exports.clsi.docker.seccomp_profile = JSON.stringify(JSON.parse(require("fs").readFileSync(seccomp_profile_path)))
catch error
console.log error, "could not load seccom profile from #{seccomp_profile_path}"
module.exports.path.synctexBaseDir = -> "/compile"
module.exports.path.sandboxedCompilesHostDir = process.env["COMPILES_HOST_DIR"] module.exports.path.sandboxedCompilesHostDir = process.env["COMPILES_HOST_DIR"]
module.exports.path.synctexBinHostPath = process.env["SYNCTEX_BIN_HOST_PATH"]

5
debug
View File

@@ -1,5 +0,0 @@
#!/bin/bash
echo "hello world"
sleep 3
echo "awake"
/opt/synctex pdf /compile/output.pdf 1 100 200

View File

@@ -1,32 +0,0 @@
version: "2"
services:
dev:
environment:
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
TEXLIVE_IMAGE_USER: "tex"
SHARELATEX_CONFIG: /app/config/settings.defaults.coffee
DOCKER_RUNNER: "true"
COMPILES_HOST_DIR: $PWD/compiles
SYNCTEX_BIN_HOST_PATH: $PWD/bin/synctex
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ./compiles:/app/compiles
- ./cache:/app/cache
- ./bin/synctex:/app/bin/synctex
ci:
environment:
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
TEXLIVE_IMAGE_USER: "tex"
SHARELATEX_CONFIG: /app/config/settings.defaults.coffee
DOCKER_RUNNER: "true"
COMPILES_HOST_DIR: $PWD/compiles
SYNCTEX_BIN_HOST_PATH: $PWD/bin/synctex
SQLITE_PATH: /app/compiles/db.sqlite
volumes:
- /var/run/docker.sock:/var/run/docker.sock:rw
- ./compiles:/app/compiles
- ./cache:/app/cache
- ./bin/synctex:/app/bin/synctex

View File

@@ -1,49 +0,0 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.1.22
version: "2"
services:
test_unit:
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
command: npm run test:unit:_run
environment:
NODE_ENV: test
test_acceptance:
build: .
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
extends:
file: docker-compose-config.yml
service: ci
environment:
ELASTIC_SEARCH_DSN: es:9200
REDIS_HOST: redis
MONGO_HOST: mongo
POSTGRES_HOST: postgres
MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test
TEXLIVE_IMAGE:
depends_on:
- mongo
- redis
command: npm run test:acceptance:_run
tar:
build: .
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
volumes:
- ./:/tmp/build/
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root
redis:
image: redis
mongo:
image: mongo:3.4

View File

@@ -1,56 +0,0 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
# Version: 1.1.22
version: "2"
services:
test_unit:
build: .
volumes:
- .:/app
working_dir: /app
environment:
MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test
command: npm run test:unit
test_acceptance:
build: .
volumes:
- .:/app
working_dir: /app
extends:
file: docker-compose-config.yml
service: dev
environment:
ELASTIC_SEARCH_DSN: es:9200
REDIS_HOST: redis
MONGO_HOST: mongo
POSTGRES_HOST: postgres
MOCHA_GREP: ${MOCHA_GREP}
LOG_LEVEL: ERROR
NODE_ENV: test
depends_on:
- mongo
- redis
command: npm run test:acceptance
tar:
build: .
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
volumes:
- ./:/tmp/build/
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root
redis:
image: redis
mongo:
image: mongo:3.4

View File

@@ -1,6 +0,0 @@
#!/bin/bash
set -o pipefail
/app/inner-entrypoint.sh "$@" 2>&1 | ts

View File

@@ -1,27 +0,0 @@
#!/bin/sh
set -x
date
echo "Changing permissions of /var/run/docker.sock for sibling containers"
ls -al /var/run/docker.sock
docker --version
cat /etc/passwd
DOCKER_GROUP=$(stat -c '%g' /var/run/docker.sock)
groupadd --non-unique --gid ${DOCKER_GROUP} dockeronhost
usermod -aG dockeronhost node
mkdir -p /app/cache
chown -R node:node /app/cache
mkdir -p /app/compiles
chown -R node:node /app/compiles
chown -R node:node /app/bin/synctex
mkdir -p /app/test/acceptance/fixtures/tmp/
chown -R node:node /app
chown -R node:node /app/bin
exec runuser -u node -- "$@"

View File

@@ -1,4 +0,0 @@
/bin/sh
wget -qO- https://get.docker.com/ | sh
apt-get install poppler-utils vim ghostscript --yes
npm rebuild

View File

@@ -1,41 +0,0 @@
apiVersion: v1
kind: Service
metadata:
name: clsi
namespace: default
spec:
type: LoadBalancer
ports:
- port: 80
protocol: TCP
targetPort: 80
selector:
run: clsi
---
apiVersion: extensions/v1beta1
kind: Deployment
metadata:
name: clsi
namespace: default
spec:
replicas: 2
template:
metadata:
labels:
run: clsi
spec:
containers:
- name: clsi
image: gcr.io/henry-terraform-admin/clsi
imagePullPolicy: Always
readinessProbe:
httpGet:
path: status
port: 80
periodSeconds: 5
initialDelaySeconds: 0
failureThreshold: 3
successThreshold: 1

View File

@@ -1,19 +0,0 @@
{
"ignore": [
".git",
"node_modules/"
],
"verbose": true,
"legacyWatch": true,
"execMap": {
"js": "npm run start"
},
"watch": [
"app/coffee/",
"app.coffee",
"config/"
],
"ext": "coffee"
}

2948
npm-shrinkwrap.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -7,48 +7,46 @@
"url": "https://github.com/sharelatex/clsi-sharelatex.git" "url": "https://github.com/sharelatex/clsi-sharelatex.git"
}, },
"scripts": { "scripts": {
"compile:app": "([ -e app/coffee ] && coffee -m $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee -m $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')", "compile:app": "coffee -o app/js -c app/coffee && coffee -c app.coffee",
"start": "npm run compile:app && node $NODE_APP_OPTIONS app.js", "start": "npm run compile:app && node app.js"
"test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js",
"test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
"test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js",
"test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP",
"compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee",
"compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee",
"compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests && npm run compile:smoke_tests",
"nodemon": "nodemon --config nodemon.json",
"compile:smoke_tests": "[ ! -e test/smoke/coffee ] && echo 'No smoke tests to compile' || coffee -o test/smoke/js -c test/smoke/coffee"
}, },
"author": "James Allen <james@sharelatex.com>", "author": "James Allen <james@sharelatex.com>",
"dependencies": { "dependencies": {
"async": "0.2.9", "async": "0.2.9",
"body-parser": "^1.2.0", "body-parser": "^1.2.0",
"dockerode": "^2.5.3",
"express": "^4.2.0", "express": "^4.2.0",
"fs-extra": "^0.16.3", "fs-extra": "^0.16.3",
"grunt-mkdir": "^1.0.0",
"heapdump": "^0.3.5", "heapdump": "^0.3.5",
"lockfile": "^1.0.3", "lockfile": "^1.0.3",
"logger-sharelatex": "^1.7.0", "logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.4",
"lynx": "0.0.11", "lynx": "0.0.11",
"metrics-sharelatex": "^2.2.0", "metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0",
"mkdirp": "0.3.5", "mkdirp": "0.3.5",
"mysql": "2.6.2", "mysql": "2.6.2",
"request": "^2.21.0", "request": "^2.21.0",
"sequelize": "^4.38.0", "sequelize": "^2.1.3",
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0",
"smoke-test-sharelatex": "git+https://github.com/sharelatex/smoke-test-sharelatex.git#v0.2.0", "smoke-test-sharelatex": "git+https://github.com/sharelatex/smoke-test-sharelatex.git#v0.2.0",
"sqlite3": "^4.0.6", "sqlite3": "~3.1.8",
"underscore": "^1.8.2", "underscore": "^1.8.2",
"v8-profiler-node8": "^6.0.1", "v8-profiler": "^5.2.4",
"wrench": "~1.5.4" "wrench": "~1.5.4"
}, },
"devDependencies": { "devDependencies": {
"bunyan": "^0.22.1", "mocha": "1.10.0",
"coffee-script": "1.6.0",
"chai": "~1.8.1", "chai": "~1.8.1",
"coffeescript": "1.6.0",
"mocha": "^4.0.1",
"sandboxed-module": "~0.3.0",
"sinon": "~1.7.3", "sinon": "~1.7.3",
"timekeeper": "0.0.4" "grunt": "~0.4.2",
"grunt-contrib-coffee": "~0.7.0",
"grunt-contrib-clean": "~0.5.0",
"grunt-shell": "~0.6.1",
"grunt-mocha-test": "~0.8.1",
"sandboxed-module": "~0.3.0",
"timekeeper": "0.0.4",
"grunt-execute": "^0.1.5",
"bunyan": "^0.22.1",
"grunt-bunyan": "^0.5.0"
} }
} }

View File

@@ -1,3 +0,0 @@
FROM quay.io/sharelatex/texlive-full:2017.1
# RUN usermod -u 1001 tex

View File

@@ -1,836 +0,0 @@
{
"defaultAction": "SCMP_ACT_ERRNO",
"architectures": [
"SCMP_ARCH_X86_64",
"SCMP_ARCH_X86",
"SCMP_ARCH_X32"
],
"syscalls": [
{
"name": "access",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "arch_prctl",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "brk",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "chdir",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "chmod",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "clock_getres",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "clock_gettime",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "clock_nanosleep",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "clone",
"action": "SCMP_ACT_ALLOW",
"args": [
{
"index": 0,
"value": 2080505856,
"valueTwo": 0,
"op": "SCMP_CMP_MASKED_EQ"
}
]
},
{
"name": "close",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "copy_file_range",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "creat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "dup",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "dup2",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "dup3",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "execve",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "execveat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "exit",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "exit_group",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "faccessat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fadvise64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fadvise64_64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fallocate",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fchdir",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fchmod",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fchmodat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fcntl",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fcntl64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fdatasync",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fork",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fstat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fstat64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fstatat64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fstatfs",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fstatfs64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fsync",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "ftruncate",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "ftruncate64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "futex",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "futimesat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getcpu",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getcwd",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getdents",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getdents64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getegid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getegid32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "geteuid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "geteuid32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getgid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getgid32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getgroups",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getgroups32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getpgid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getpgrp",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getpid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getppid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getpriority",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getresgid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getresgid32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getresuid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getresuid32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getrlimit",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "get_robust_list",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getrusage",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getsid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "gettid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getuid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getuid32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "ioctl",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "kill",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "_llseek",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "lseek",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "lstat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "lstat64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "madvise",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "mkdir",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "mkdirat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "mmap",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "mmap2",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "mprotect",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "mremap",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "munmap",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "newfstatat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "open",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "openat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "pause",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "pipe",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "pipe2",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "prctl",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "pread64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "preadv",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "prlimit64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "pwrite64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "pwritev",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "read",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "readlink",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "readlinkat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "readv",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rename",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "renameat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "renameat2",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "restart_syscall",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rmdir",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_sigaction",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_sigpending",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_sigprocmask",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_sigqueueinfo",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_sigreturn",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_sigsuspend",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_sigtimedwait",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_tgsigqueueinfo",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sched_getaffinity",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sched_getparam",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sched_get_priority_max",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sched_get_priority_min",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sched_getscheduler",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sched_rr_get_interval",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sched_yield",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sendfile",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sendfile64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "setgroups",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "setgroups32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "set_robust_list",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "set_tid_address",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sigaltstack",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "stat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "stat64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "statfs",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "statfs64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sync",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sync_file_range",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "syncfs",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sysinfo",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "tgkill",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "timer_create",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "timer_delete",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "timer_getoverrun",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "timer_gettime",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "timer_settime",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "times",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "tkill",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "truncate",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "truncate64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "umask",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "uname",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "unlink",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "unlinkat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "utime",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "utimensat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "utimes",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "vfork",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "vhangup",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "wait4",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "waitid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "write",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "writev",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "pread",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "setgid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "setuid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "capget",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "capset",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fchown",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "gettimeofday",
"action": "SCMP_ACT_ALLOW",
"args": []
}, {
"name": "epoll_pwait",
"action": "SCMP_ACT_ALLOW",
"args": []
}
]
}

View File

@@ -1,34 +0,0 @@
include /etc/firejail/disable-common.inc
include /etc/firejail/disable-devel.inc
# include /etc/firejail/disable-mgmt.inc ## removed in 0.9.40
# include /etc/firejail/disable-secret.inc ## removed in 0.9.40
read-only /bin
blacklist /boot
blacklist /dev
read-only /etc
blacklist /home # blacklisted for synctex
read-only /lib
read-only /lib64
blacklist /media
blacklist /mnt
blacklist /opt
blacklist /root
read-only /run
blacklist /sbin
blacklist /selinux
blacklist /src
blacklist /sys
read-only /usr
caps.drop all
noroot
nogroups
net none
private-tmp
private-dev
shell none
seccomp
nonewprivs

View File

@@ -1,10 +1,9 @@
Client = require "./helpers/Client" Client = require "./helpers/Client"
request = require "request" request = require "request"
require("chai").should() require("chai").should()
ClsiApp = require "./helpers/ClsiApp"
describe "Broken LaTeX file", -> describe "Broken LaTeX file", ->
before (done)-> before ->
@broken_request = @broken_request =
resources: [ resources: [
path: "main.tex" path: "main.tex"
@@ -25,7 +24,6 @@ describe "Broken LaTeX file", ->
\\end{document} \\end{document}
''' '''
] ]
ClsiApp.ensureRunning done
describe "on first run", -> describe "on first run", ->
before (done) -> before (done) ->

View File

@@ -1,10 +1,9 @@
Client = require "./helpers/Client" Client = require "./helpers/Client"
request = require "request" request = require "request"
require("chai").should() require("chai").should()
ClsiApp = require "./helpers/ClsiApp"
describe "Deleting Old Files", -> describe "Deleting Old Files", ->
before (done)-> before ->
@request = @request =
resources: [ resources: [
path: "main.tex" path: "main.tex"
@@ -15,8 +14,7 @@ describe "Deleting Old Files", ->
\\end{document} \\end{document}
''' '''
] ]
ClsiApp.ensureRunning done
describe "on first run", -> describe "on first run", ->
before (done) -> before (done) ->
@project_id = Client.randomId() @project_id = Client.randomId()

View File

@@ -3,25 +3,15 @@ request = require "request"
require("chai").should() require("chai").should()
fs = require "fs" fs = require "fs"
ChildProcess = require "child_process" ChildProcess = require "child_process"
ClsiApp = require "./helpers/ClsiApp"
logger = require("logger-sharelatex")
Path = require("path")
fixturePath = (path) -> Path.normalize(__dirname + "/../fixtures/" + path)
process = require "process"
console.log process.pid, process.ppid, process.getuid(),process.getgroups(), "PID"
try
console.log "creating tmp directory", fixturePath("tmp")
fs.mkdirSync(fixturePath("tmp"))
catch err
console.log err, fixturePath("tmp"), "unable to create fixture tmp path"
MOCHA_LATEX_TIMEOUT = 60 * 1000 fixturePath = (path) -> __dirname + "/../fixtures/" + path
try
fs.mkdirSync(fixturePath("tmp"))
catch e
convertToPng = (pdfPath, pngPath, callback = (error) ->) -> convertToPng = (pdfPath, pngPath, callback = (error) ->) ->
command = "convert #{fixturePath(pdfPath)} #{fixturePath(pngPath)}" convert = ChildProcess.exec "convert #{fixturePath(pdfPath)} #{fixturePath(pngPath)}"
console.log "COMMAND"
console.log command
convert = ChildProcess.exec command
stdout = "" stdout = ""
convert.stdout.on "data", (chunk) -> console.log "STDOUT", chunk.toString() convert.stdout.on "data", (chunk) -> console.log "STDOUT", chunk.toString()
convert.stderr.on "data", (chunk) -> console.log "STDERR", chunk.toString() convert.stderr.on "data", (chunk) -> console.log "STDERR", chunk.toString()
@@ -35,10 +25,7 @@ compare = (originalPath, generatedPath, callback = (error, same) ->) ->
proc.stderr.on "data", (chunk) -> stderr += chunk proc.stderr.on "data", (chunk) -> stderr += chunk
proc.on "exit", () -> proc.on "exit", () ->
if stderr.trim() == "0 (0)" if stderr.trim() == "0 (0)"
# remove output diff if test matches expected image fs.unlink diff_file # remove output diff if test matches expected image
fs.unlink diff_file, (err) ->
if err
throw err
callback null, true callback null, true
else else
console.log "compare result", stderr console.log "compare result", stderr
@@ -53,6 +40,7 @@ checkPdfInfo = (pdfPath, callback = (error, output) ->) ->
if stdout.match(/Optimized:\s+yes/) if stdout.match(/Optimized:\s+yes/)
callback null, true callback null, true
else else
console.log "pdfinfo result", stdout
callback null, false callback null, false
compareMultiplePages = (project_id, callback = (error) ->) -> compareMultiplePages = (project_id, callback = (error) ->) ->
@@ -69,8 +57,6 @@ compareMultiplePages = (project_id, callback = (error) ->) ->
compareNext 0, callback compareNext 0, callback
comparePdf = (project_id, example_dir, callback = (error) ->) -> comparePdf = (project_id, example_dir, callback = (error) ->) ->
console.log "CONVERT"
console.log "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png"
convertToPng "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png", (error) => convertToPng "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png", (error) =>
throw error if error? throw error if error?
convertToPng "examples/#{example_dir}/output.pdf", "tmp/#{project_id}-source.png", (error) => convertToPng "examples/#{example_dir}/output.pdf", "tmp/#{project_id}-source.png", (error) =>
@@ -89,7 +75,6 @@ comparePdf = (project_id, example_dir, callback = (error) ->) ->
downloadAndComparePdf = (project_id, example_dir, url, callback = (error) ->) -> downloadAndComparePdf = (project_id, example_dir, url, callback = (error) ->) ->
writeStream = fs.createWriteStream(fixturePath("tmp/#{project_id}.pdf")) writeStream = fs.createWriteStream(fixturePath("tmp/#{project_id}.pdf"))
request.get(url).pipe(writeStream) request.get(url).pipe(writeStream)
console.log("writing file out", fixturePath("tmp/#{project_id}.pdf"))
writeStream.on "close", () => writeStream.on "close", () =>
checkPdfInfo "tmp/#{project_id}.pdf", (error, optimised) => checkPdfInfo "tmp/#{project_id}.pdf", (error, optimised) =>
throw error if error? throw error if error?
@@ -100,9 +85,7 @@ Client.runServer(4242, fixturePath("examples"))
describe "Example Documents", -> describe "Example Documents", ->
before (done) -> before (done) ->
ChildProcess.exec("rm test/acceptance/fixtures/tmp/*").on "exit", () -> ChildProcess.exec("rm test/acceptance/fixtures/tmp/*").on "exit", () -> done()
ClsiApp.ensureRunning done
for example_dir in fs.readdirSync fixturePath("examples") for example_dir in fs.readdirSync fixturePath("examples")
do (example_dir) -> do (example_dir) ->
@@ -111,7 +94,6 @@ describe "Example Documents", ->
@project_id = Client.randomId() + "_" + example_dir @project_id = Client.randomId() + "_" + example_dir
it "should generate the correct pdf", (done) -> it "should generate the correct pdf", (done) ->
this.timeout(MOCHA_LATEX_TIMEOUT)
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) => Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
if error || body?.compile?.status is "failure" if error || body?.compile?.status is "failure"
console.log "DEBUG: error", error, "body", JSON.stringify(body) console.log "DEBUG: error", error, "body", JSON.stringify(body)
@@ -119,7 +101,6 @@ describe "Example Documents", ->
downloadAndComparePdf(@project_id, example_dir, pdf.url, done) downloadAndComparePdf(@project_id, example_dir, pdf.url, done)
it "should generate the correct pdf on the second run as well", (done) -> it "should generate the correct pdf on the second run as well", (done) ->
this.timeout(MOCHA_LATEX_TIMEOUT)
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) => Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
if error || body?.compile?.status is "failure" if error || body?.compile?.status is "failure"
console.log "DEBUG: error", error, "body", JSON.stringify(body) console.log "DEBUG: error", error, "body", JSON.stringify(body)

View File

@@ -1,7 +1,6 @@
Client = require "./helpers/Client" Client = require "./helpers/Client"
request = require "request" request = require "request"
require("chai").should() require("chai").should()
ClsiApp = require "./helpers/ClsiApp"
describe "Simple LaTeX file", -> describe "Simple LaTeX file", ->
before (done) -> before (done) ->
@@ -16,8 +15,7 @@ describe "Simple LaTeX file", ->
\\end{document} \\end{document}
''' '''
] ]
ClsiApp.ensureRunning => Client.compile @project_id, @request, (@error, @res, @body) => done()
Client.compile @project_id, @request, (@error, @res, @body) => done()
it "should return the PDF", -> it "should return the PDF", ->
pdf = Client.getOutputFile(@body, "pdf") pdf = Client.getOutputFile(@body, "pdf")

View File

@@ -2,25 +2,21 @@ Client = require "./helpers/Client"
request = require "request" request = require "request"
require("chai").should() require("chai").should()
expect = require("chai").expect expect = require("chai").expect
ClsiApp = require "./helpers/ClsiApp"
crypto = require("crypto")
describe "Syncing", -> describe "Syncing", ->
before (done) -> before (done) ->
content = ''' @request =
resources: [
path: "main.tex"
content: '''
\\documentclass{article} \\documentclass{article}
\\begin{document} \\begin{document}
Hello world Hello world
\\end{document} \\end{document}
''' '''
@request =
resources: [
path: "main.tex"
content: content
] ]
@project_id = Client.randomId() @project_id = Client.randomId()
ClsiApp.ensureRunning => Client.compile @project_id, @request, (@error, @res, @body) => done()
Client.compile @project_id, @request, (@error, @res, @body) => done()
describe "from code to pdf", -> describe "from code to pdf", ->
it "should return the correct location", (done) -> it "should return the correct location", (done) ->
@@ -33,7 +29,7 @@ describe "Syncing", ->
describe "from pdf to code", -> describe "from pdf to code", ->
it "should return the correct location", (done) -> it "should return the correct location", (done) ->
Client.syncFromPdf @project_id, 1, 100, 200, (error, codePositions) => Client.syncFromPdf @project_id, 1, 100, 200, (error, codePositions) ->
throw error if error? throw error if error?
expect(codePositions).to.deep.equal( expect(codePositions).to.deep.equal(
code: [ { file: 'main.tex', line: 3, column: -1 } ] code: [ { file: 'main.tex', line: 3, column: -1 } ]

View File

@@ -1,27 +1,24 @@
Client = require "./helpers/Client" Client = require "./helpers/Client"
request = require "request" request = require "request"
require("chai").should() require("chai").should()
ClsiApp = require "./helpers/ClsiApp"
describe "Timed out compile", -> describe "Timed out compile", ->
before (done) -> before (done) ->
@request = @request =
options: options:
timeout: 10 #seconds timeout: 1 #seconds
resources: [ resources: [
path: "main.tex" path: "main.tex"
content: ''' content: '''
\\documentclass{article} \\documentclass{article}
\\begin{document} \\begin{document}
\\def\\x{Hello!\\par\\x} Hello world
\\x \\input{|"sleep 10"}
\\end{document} \\end{document}
''' '''
] ]
@project_id = Client.randomId() @project_id = Client.randomId()
ClsiApp.ensureRunning => Client.compile @project_id, @request, (@error, @res, @body) => done()
Client.compile @project_id, @request, (@error, @res, @body) => done()
it "should return a timeout error", -> it "should return a timeout error", ->
@body.compile.error.should.equal "container timed out" @body.compile.error.should.equal "container timed out"

View File

@@ -2,7 +2,6 @@ Client = require "./helpers/Client"
request = require "request" request = require "request"
require("chai").should() require("chai").should()
sinon = require "sinon" sinon = require "sinon"
ClsiApp = require "./helpers/ClsiApp"
host = "localhost" host = "localhost"
@@ -47,8 +46,7 @@ describe "Url Caching", ->
}] }]
sinon.spy Server, "getFile" sinon.spy Server, "getFile"
ClsiApp.ensureRunning => Client.compile @project_id, @request, (@error, @res, @body) => done()
Client.compile @project_id, @request, (@error, @res, @body) => done()
afterEach -> afterEach ->
Server.getFile.restore() Server.getFile.restore()

View File

@@ -4,7 +4,6 @@ require("chai").should()
expect = require("chai").expect expect = require("chai").expect
path = require("path") path = require("path")
fs = require("fs") fs = require("fs")
ClsiApp = require "./helpers/ClsiApp"
describe "Syncing", -> describe "Syncing", ->
before (done) -> before (done) ->
@@ -14,8 +13,7 @@ describe "Syncing", ->
content: fs.readFileSync(path.join(__dirname,"../fixtures/naugty_strings.txt"),"utf-8") content: fs.readFileSync(path.join(__dirname,"../fixtures/naugty_strings.txt"),"utf-8")
] ]
@project_id = Client.randomId() @project_id = Client.randomId()
ClsiApp.ensureRunning => Client.compile @project_id, @request, (@error, @res, @body) => done()
Client.compile @project_id, @request, (@error, @res, @body) => done()
describe "wordcount file", -> describe "wordcount file", ->
it "should return wordcount info", (done) -> it "should return wordcount info", (done) ->

View File

@@ -30,7 +30,6 @@ module.exports = Client =
express = require("express") express = require("express")
app = express() app = express()
app.use express.static(directory) app.use express.static(directory)
console.log("starting test server on", port, host)
app.listen(port, host).on "error", (error) -> app.listen(port, host).on "error", (error) ->
console.error "error starting server:", error.message console.error "error starting server:", error.message
process.exit(1) process.exit(1)

View File

@@ -1,24 +0,0 @@
app = require('../../../../app')
require("logger-sharelatex").logger.level("info")
logger = require("logger-sharelatex")
Settings = require("settings-sharelatex")
module.exports =
running: false
initing: false
callbacks: []
ensureRunning: (callback = (error) ->) ->
if @running
return callback()
else if @initing
@callbacks.push callback
else
@initing = true
@callbacks.push callback
app.listen Settings.internal?.clsi?.port, "localhost", (error) =>
throw error if error?
@running = true
logger.log("clsi running in dev mode")
for callback in @callbacks
callback()

View File

@@ -14,7 +14,7 @@ describe "CompileController", ->
clsi: clsi:
url: "http://clsi.example.com" url: "http://clsi.example.com"
"./ProjectPersistenceManager": @ProjectPersistenceManager = {} "./ProjectPersistenceManager": @ProjectPersistenceManager = {}
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), err:sinon.stub(), warn: sinon.stub()} "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
@Settings.externalUrl = "http://www.example.com" @Settings.externalUrl = "http://www.example.com"
@req = {} @req = {}
@res = {} @res = {}
@@ -144,7 +144,7 @@ describe "CompileController", ->
file: @file file: @file
line: @line.toString() line: @line.toString()
column: @column.toString() column: @column.toString()
@res.json = sinon.stub() @res.send = sinon.stub()
@CompileManager.syncFromCode = sinon.stub().callsArgWith(5, null, @pdfPositions = ["mock-positions"]) @CompileManager.syncFromCode = sinon.stub().callsArgWith(5, null, @pdfPositions = ["mock-positions"])
@CompileController.syncFromCode @req, @res, @next @CompileController.syncFromCode @req, @res, @next
@@ -155,8 +155,8 @@ describe "CompileController", ->
.should.equal true .should.equal true
it "should return the positions", -> it "should return the positions", ->
@res.json @res.send
.calledWith( .calledWith(JSON.stringify
pdf: @pdfPositions pdf: @pdfPositions
) )
.should.equal true .should.equal true
@@ -173,7 +173,7 @@ describe "CompileController", ->
page: @page.toString() page: @page.toString()
h: @h.toString() h: @h.toString()
v: @v.toString() v: @v.toString()
@res.json = sinon.stub() @res.send = sinon.stub()
@CompileManager.syncFromPdf = sinon.stub().callsArgWith(5, null, @codePositions = ["mock-positions"]) @CompileManager.syncFromPdf = sinon.stub().callsArgWith(5, null, @codePositions = ["mock-positions"])
@CompileController.syncFromPdf @req, @res, @next @CompileController.syncFromPdf @req, @res, @next
@@ -184,8 +184,8 @@ describe "CompileController", ->
.should.equal true .should.equal true
it "should return the positions", -> it "should return the positions", ->
@res.json @res.send
.calledWith( .calledWith(JSON.stringify
code: @codePositions code: @codePositions
) )
.should.equal true .should.equal true
@@ -199,7 +199,7 @@ describe "CompileController", ->
@req.query = @req.query =
file: @file file: @file
image: @image = "example.com/image" image: @image = "example.com/image"
@res.json = sinon.stub() @res.send = sinon.stub()
@CompileManager.wordcount = sinon.stub().callsArgWith(4, null, @texcount = ["mock-texcount"]) @CompileManager.wordcount = sinon.stub().callsArgWith(4, null, @texcount = ["mock-texcount"])
@CompileController.wordcount @req, @res, @next @CompileController.wordcount @req, @res, @next
@@ -210,8 +210,8 @@ describe "CompileController", ->
.should.equal true .should.equal true
it "should return the texcount info", -> it "should return the texcount info", ->
@res.json @res.send
.calledWith( .calledWith(JSON.stringify
texcount: @texcount texcount: @texcount
) )
.should.equal true .should.equal true

View File

@@ -13,14 +13,7 @@ describe "CompileManager", ->
"./ResourceWriter": @ResourceWriter = {} "./ResourceWriter": @ResourceWriter = {}
"./OutputFileFinder": @OutputFileFinder = {} "./OutputFileFinder": @OutputFileFinder = {}
"./OutputCacheManager": @OutputCacheManager = {} "./OutputCacheManager": @OutputCacheManager = {}
"settings-sharelatex": @Settings = "settings-sharelatex": @Settings = { path: compilesDir: "/compiles/dir" }
path:
compilesDir: "/compiles/dir"
synctexBaseDir: -> "/compile"
clsi:
docker:
image: "SOMEIMAGE"
"logger-sharelatex": @logger = { log: sinon.stub() , info:->} "logger-sharelatex": @logger = { log: sinon.stub() , info:->}
"child_process": @child_process = {} "child_process": @child_process = {}
"./CommandRunner": @CommandRunner = {} "./CommandRunner": @CommandRunner = {}
@@ -30,14 +23,13 @@ describe "CompileManager", ->
"fs": @fs = {} "fs": @fs = {}
"fs-extra": @fse = { ensureDir: sinon.stub().callsArg(1) } "fs-extra": @fse = { ensureDir: sinon.stub().callsArg(1) }
@callback = sinon.stub() @callback = sinon.stub()
@project_id = "project-id-123"
@user_id = "1234"
describe "doCompileWithLock", -> describe "doCompileWithLock", ->
beforeEach -> beforeEach ->
@request = @request =
resources: @resources = "mock-resources" resources: @resources = "mock-resources"
project_id: @project_id project_id: @project_id = "project-id-123"
user_id: @user_id user_id: @user_id = "1234"
@output_files = ["foo", "bar"] @output_files = ["foo", "bar"]
@Settings.compileDir = "compiles" @Settings.compileDir = "compiles"
@compileDir = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}" @compileDir = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"
@@ -103,12 +95,11 @@ describe "CompileManager", ->
@request = @request =
resources: @resources = "mock-resources" resources: @resources = "mock-resources"
rootResourcePath: @rootResourcePath = "main.tex" rootResourcePath: @rootResourcePath = "main.tex"
project_id: @project_id project_id: @project_id = "project-id-123"
user_id: @user_id user_id: @user_id = "1234"
compiler: @compiler = "pdflatex" compiler: @compiler = "pdflatex"
timeout: @timeout = 42000 timeout: @timeout = 42000
imageName: @image = "example.com/image" imageName: @image = "example.com/image"
flags: @flags = ["-file-line-error"]
@env = {} @env = {}
@Settings.compileDir = "compiles" @Settings.compileDir = "compiles"
@compileDir = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}" @compileDir = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"
@@ -118,7 +109,7 @@ describe "CompileManager", ->
@OutputCacheManager.saveOutputFiles = sinon.stub().callsArgWith(2, null, @build_files) @OutputCacheManager.saveOutputFiles = sinon.stub().callsArgWith(2, null, @build_files)
@DraftModeManager.injectDraftMode = sinon.stub().callsArg(1) @DraftModeManager.injectDraftMode = sinon.stub().callsArg(1)
@TikzManager.checkMainFile = sinon.stub().callsArg(3, false) @TikzManager.checkMainFile = sinon.stub().callsArg(3, false)
describe "normally", -> describe "normally", ->
beforeEach -> beforeEach ->
@CompileManager.doCompile @request, @callback @CompileManager.doCompile @request, @callback
@@ -136,7 +127,6 @@ describe "CompileManager", ->
compiler: @compiler compiler: @compiler
timeout: @timeout timeout: @timeout
image: @image image: @image
flags: @flags
environment: @env environment: @env
}) })
.should.equal true .should.equal true
@@ -148,15 +138,15 @@ describe "CompileManager", ->
it "should return the output files", -> it "should return the output files", ->
@callback.calledWith(null, @build_files).should.equal true @callback.calledWith(null, @build_files).should.equal true
it "should not inject draft mode by default", -> it "should not inject draft mode by default", ->
@DraftModeManager.injectDraftMode.called.should.equal false @DraftModeManager.injectDraftMode.called.should.equal false
describe "with draft mode", -> describe "with draft mode", ->
beforeEach -> beforeEach ->
@request.draft = true @request.draft = true
@CompileManager.doCompile @request, @callback @CompileManager.doCompile @request, @callback
it "should inject the draft mode header", -> it "should inject the draft mode header", ->
@DraftModeManager.injectDraftMode @DraftModeManager.injectDraftMode
.calledWith(@compileDir + "/" + @rootResourcePath) .calledWith(@compileDir + "/" + @rootResourcePath)
@@ -175,7 +165,6 @@ describe "CompileManager", ->
compiler: @compiler compiler: @compiler
timeout: @timeout timeout: @timeout
image: @image image: @image
flags: @flags
environment: {'CHKTEX_OPTIONS': '-nall -e9 -e10 -w15 -w16', 'CHKTEX_EXIT_ON_ERROR':1, 'CHKTEX_ULIMIT_OPTIONS': '-t 5 -v 64000'} environment: {'CHKTEX_OPTIONS': '-nall -e9 -e10 -w15 -w16', 'CHKTEX_EXIT_ON_ERROR':1, 'CHKTEX_ULIMIT_OPTIONS': '-t 5 -v 64000'}
}) })
.should.equal true .should.equal true
@@ -194,7 +183,6 @@ describe "CompileManager", ->
compiler: @compiler compiler: @compiler
timeout: @timeout timeout: @timeout
image: @image image: @image
flags: @flags
environment: @env environment: @env
}) })
.should.equal true .should.equal true
@@ -259,23 +247,16 @@ describe "CompileManager", ->
describe "syncFromCode", -> describe "syncFromCode", ->
beforeEach -> beforeEach ->
@fs.stat = sinon.stub().callsArgWith(1, null,{isFile: ()->true}) @fs.stat = sinon.stub().callsArgWith(1, null,{isFile: ()->true})
@stdout = "NODE\t#{@page}\t#{@h}\t#{@v}\t#{@width}\t#{@height}\n" @child_process.execFile.callsArgWith(3, null, @stdout = "NODE\t#{@page}\t#{@h}\t#{@v}\t#{@width}\t#{@height}\n", "")
@CommandRunner.run = sinon.stub().callsArgWith(6, null, {stdout:@stdout})
@CompileManager.syncFromCode @project_id, @user_id, @file_name, @line, @column, @callback @CompileManager.syncFromCode @project_id, @user_id, @file_name, @line, @column, @callback
it "should execute the synctex binary", -> it "should execute the synctex binary", ->
bin_path = Path.resolve(__dirname + "/../../../bin/synctex") bin_path = Path.resolve(__dirname + "/../../../bin/synctex")
synctex_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/output.pdf" synctex_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/output.pdf"
file_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/#{@file_name}" file_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/#{@file_name}"
@CommandRunner.run @child_process.execFile
.calledWith( .calledWith(bin_path, ["code", synctex_path, file_path, @line, @column], timeout: 10000)
"#{@project_id}-#{@user_id}", .should.equal true
['/opt/synctex', 'code', synctex_path, file_path, @line, @column],
"#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}",
@Settings.clsi.docker.image,
60000,
{}
).should.equal true
it "should call the callback with the parsed output", -> it "should call the callback with the parsed output", ->
@callback @callback
@@ -291,21 +272,15 @@ describe "CompileManager", ->
describe "syncFromPdf", -> describe "syncFromPdf", ->
beforeEach -> beforeEach ->
@fs.stat = sinon.stub().callsArgWith(1, null,{isFile: ()->true}) @fs.stat = sinon.stub().callsArgWith(1, null,{isFile: ()->true})
@stdout = "NODE\t#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/#{@file_name}\t#{@line}\t#{@column}\n" @child_process.execFile.callsArgWith(3, null, @stdout = "NODE\t#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/#{@file_name}\t#{@line}\t#{@column}\n", "")
@CommandRunner.run = sinon.stub().callsArgWith(6, null, {stdout:@stdout})
@CompileManager.syncFromPdf @project_id, @user_id, @page, @h, @v, @callback @CompileManager.syncFromPdf @project_id, @user_id, @page, @h, @v, @callback
it "should execute the synctex binary", -> it "should execute the synctex binary", ->
bin_path = Path.resolve(__dirname + "/../../../bin/synctex") bin_path = Path.resolve(__dirname + "/../../../bin/synctex")
synctex_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/output.pdf" synctex_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/output.pdf"
@CommandRunner.run @child_process.execFile
.calledWith( .calledWith(bin_path, ["pdf", synctex_path, @page, @h, @v], timeout: 10000)
"#{@project_id}-#{@user_id}", .should.equal true
['/opt/synctex', "pdf", synctex_path, @page, @h, @v],
"#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}",
@Settings.clsi.docker.image,
60000,
{}).should.equal true
it "should call the callback with the parsed output", -> it "should call the callback with the parsed output", ->
@callback @callback
@@ -322,8 +297,8 @@ describe "CompileManager", ->
@fs.readFile = sinon.stub().callsArgWith(2, null, @stdout = "Encoding: ascii\nWords in text: 2") @fs.readFile = sinon.stub().callsArgWith(2, null, @stdout = "Encoding: ascii\nWords in text: 2")
@callback = sinon.stub() @callback = sinon.stub()
@project_id @project_id = "project-id-123"
@timeout = 60 * 1000 @timeout = 10 * 1000
@file_name = "main.tex" @file_name = "main.tex"
@Settings.path.compilesDir = "/local/compile/directory" @Settings.path.compilesDir = "/local/compile/directory"
@image = "example.com/image" @image = "example.com/image"
@@ -334,7 +309,7 @@ describe "CompileManager", ->
@directory = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}" @directory = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"
@file_path = "$COMPILE_DIR/#{@file_name}" @file_path = "$COMPILE_DIR/#{@file_name}"
@command =[ "texcount", "-nocol", "-inc", @file_path, "-out=" + @file_path + ".wc"] @command =[ "texcount", "-nocol", "-inc", @file_path, "-out=" + @file_path + ".wc"]
@CommandRunner.run @CommandRunner.run
.calledWith("#{@project_id}-#{@user_id}", @command, @directory, @image, @timeout, {}) .calledWith("#{@project_id}-#{@user_id}", @command, @directory, @image, @timeout, {})
.should.equal true .should.equal true

View File

@@ -1,145 +0,0 @@
SandboxedModule = require('sandboxed-module')
sinon = require('sinon')
require('chai').should()
require "coffee-script"
modulePath = require('path').join __dirname, '../../../app/coffee/DockerLockManager'
describe "LockManager", ->
beforeEach ->
@LockManager = SandboxedModule.require modulePath, requires:
"settings-sharelatex": @Settings =
clsi: docker: {}
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
describe "runWithLock", ->
describe "with a single lock", ->
beforeEach (done) ->
@callback = sinon.stub()
@LockManager.runWithLock "lock-one", (releaseLock) ->
setTimeout () ->
releaseLock(null, "hello", "world")
, 100
, (err, args...) =>
@callback(err,args...)
done()
it "should call the callback", ->
@callback.calledWith(null,"hello","world").should.equal true
describe "with two locks", ->
beforeEach (done) ->
@callback1 = sinon.stub()
@callback2 = sinon.stub()
@LockManager.runWithLock "lock-one", (releaseLock) ->
setTimeout () ->
releaseLock(null, "hello", "world","one")
, 100
, (err, args...) =>
@callback1(err,args...)
@LockManager.runWithLock "lock-two", (releaseLock) ->
setTimeout () ->
releaseLock(null, "hello", "world","two")
, 200
, (err, args...) =>
@callback2(err,args...)
done()
it "should call the first callback", ->
@callback1.calledWith(null,"hello","world","one").should.equal true
it "should call the second callback", ->
@callback2.calledWith(null,"hello","world","two").should.equal true
describe "with lock contention", ->
describe "where the first lock is released quickly", ->
beforeEach (done) ->
@LockManager.MAX_LOCK_WAIT_TIME = 1000
@LockManager.LOCK_TEST_INTERVAL = 100
@callback1 = sinon.stub()
@callback2 = sinon.stub()
@LockManager.runWithLock "lock", (releaseLock) ->
setTimeout () ->
releaseLock(null, "hello", "world","one")
, 100
, (err, args...) =>
@callback1(err,args...)
@LockManager.runWithLock "lock", (releaseLock) ->
setTimeout () ->
releaseLock(null, "hello", "world","two")
, 200
, (err, args...) =>
@callback2(err,args...)
done()
it "should call the first callback", ->
@callback1.calledWith(null,"hello","world","one").should.equal true
it "should call the second callback", ->
@callback2.calledWith(null,"hello","world","two").should.equal true
describe "where the first lock is held longer than the waiting time", ->
beforeEach (done) ->
@LockManager.MAX_LOCK_HOLD_TIME = 10000
@LockManager.MAX_LOCK_WAIT_TIME = 1000
@LockManager.LOCK_TEST_INTERVAL = 100
@callback1 = sinon.stub()
@callback2 = sinon.stub()
doneOne = doneTwo = false
finish = (key) ->
doneOne = true if key is 1
doneTwo = true if key is 2
done() if doneOne and doneTwo
@LockManager.runWithLock "lock", (releaseLock) ->
setTimeout () ->
releaseLock(null, "hello", "world","one")
, 1100
, (err, args...) =>
@callback1(err,args...)
finish(1)
@LockManager.runWithLock "lock", (releaseLock) ->
setTimeout () ->
releaseLock(null, "hello", "world","two")
, 100
, (err, args...) =>
@callback2(err,args...)
finish(2)
it "should call the first callback", ->
@callback1.calledWith(null,"hello","world","one").should.equal true
it "should call the second callback with an error", ->
error = sinon.match.instanceOf Error
@callback2.calledWith(error).should.equal true
describe "where the first lock is held longer than the max holding time", ->
beforeEach (done) ->
@LockManager.MAX_LOCK_HOLD_TIME = 1000
@LockManager.MAX_LOCK_WAIT_TIME = 2000
@LockManager.LOCK_TEST_INTERVAL = 100
@callback1 = sinon.stub()
@callback2 = sinon.stub()
doneOne = doneTwo = false
finish = (key) ->
doneOne = true if key is 1
doneTwo = true if key is 2
done() if doneOne and doneTwo
@LockManager.runWithLock "lock", (releaseLock) ->
setTimeout () ->
releaseLock(null, "hello", "world","one")
, 1500
, (err, args...) =>
@callback1(err,args...)
finish(1)
@LockManager.runWithLock "lock", (releaseLock) ->
setTimeout () ->
releaseLock(null, "hello", "world","two")
, 100
, (err, args...) =>
@callback2(err,args...)
finish(2)
it "should call the first callback", ->
@callback1.calledWith(null,"hello","world","one").should.equal true
it "should call the second callback", ->
@callback2.calledWith(null,"hello","world","two").should.equal true

View File

@@ -1,509 +0,0 @@
SandboxedModule = require('sandboxed-module')
sinon = require('sinon')
require('chai').should()
expect = require('chai').expect
require "coffee-script"
modulePath = require('path').join __dirname, '../../../app/coffee/DockerRunner'
Path = require "path"
describe "DockerRunner", ->
beforeEach ->
@container = container = {}
@DockerRunner = SandboxedModule.require modulePath, requires:
"settings-sharelatex": @Settings =
clsi: docker: {}
path: {}
"logger-sharelatex": @logger = {
log: sinon.stub(),
error: sinon.stub(),
info: sinon.stub(),
warn: sinon.stub()
}
"dockerode": class Docker
getContainer: sinon.stub().returns(container)
createContainer: sinon.stub().yields(null, container)
listContainers: sinon.stub()
"fs": @fs = { stat: sinon.stub().yields(null,{isDirectory:()->true}) }
"./Metrics":
Timer: class Timer
done: () ->
"./LockManager":
runWithLock: (key, runner, callback) -> runner(callback)
@Docker = Docker
@getContainer = Docker::getContainer
@createContainer = Docker::createContainer
@listContainers = Docker::listContainers
@directory = "/local/compile/directory"
@mainFile = "main-file.tex"
@compiler = "pdflatex"
@image = "example.com/sharelatex/image:2016.2"
@env = {}
@callback = sinon.stub()
@project_id = "project-id-123"
@volumes =
"/local/compile/directory": "/compile"
@Settings.clsi.docker.image = @defaultImage = "default-image"
@Settings.clsi.docker.env = PATH: "mock-path"
describe "run", ->
beforeEach (done)->
@DockerRunner._getContainerOptions = sinon.stub().returns(@options = {mockoptions: "foo"})
@DockerRunner._fingerprintContainer = sinon.stub().returns(@fingerprint = "fingerprint")
@name = "project-#{@project_id}-#{@fingerprint}"
@command = ["mock", "command", "--outdir=$COMPILE_DIR"]
@command_with_dir = ["mock", "command", "--outdir=/compile"]
@timeout = 42000
done()
describe "successfully", ->
beforeEach (done)->
@DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output")
@DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, (err, output)=>
@callback(err, output)
done()
it "should generate the options for the container", ->
@DockerRunner._getContainerOptions
.calledWith(@command_with_dir, @image, @volumes, @timeout)
.should.equal true
it "should generate the fingerprint from the returned options", ->
@DockerRunner._fingerprintContainer
.calledWith(@options)
.should.equal true
it "should do the run", ->
@DockerRunner._runAndWaitForContainer
.calledWith(@options, @volumes, @timeout)
.should.equal true
it "should call the callback", ->
@callback.calledWith(null, @output).should.equal true
describe 'when path.sandboxedCompilesHostDir is set', ->
beforeEach ->
@Settings.path.sandboxedCompilesHostDir = '/some/host/dir/compiles'
@directory = '/var/lib/sharelatex/data/compiles/xyz'
@DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output")
@DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, @callback
it 'should re-write the bind directory', ->
volumes = @DockerRunner._runAndWaitForContainer.lastCall.args[1]
expect(volumes).to.deep.equal {
'/some/host/dir/compiles/xyz': '/compile'
}
it "should call the callback", ->
@callback.calledWith(null, @output).should.equal true
describe "when the run throws an error", ->
beforeEach ->
firstTime = true
@output = "mock-output"
@DockerRunner._runAndWaitForContainer = (options, volumes, timeout, callback = (error, output)->) =>
if firstTime
firstTime = false
callback new Error("HTTP code is 500 which indicates error: server error")
else
callback(null, @output)
sinon.spy @DockerRunner, "_runAndWaitForContainer"
@DockerRunner.destroyContainer = sinon.stub().callsArg(3)
@DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, @callback
it "should do the run twice", ->
@DockerRunner._runAndWaitForContainer
.calledTwice.should.equal true
it "should destroy the container in between", ->
@DockerRunner.destroyContainer
.calledWith(@name, null)
.should.equal true
it "should call the callback", ->
@callback.calledWith(null, @output).should.equal true
describe "with no image", ->
beforeEach ->
@DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output")
@DockerRunner.run @project_id, @command, @directory, null, @timeout, @env, @callback
it "should use the default image", ->
@DockerRunner._getContainerOptions
.calledWith(@command_with_dir, @defaultImage, @volumes, @timeout)
.should.equal true
describe "with image override", ->
beforeEach ->
@Settings.texliveImageNameOveride = "overrideimage.com/something"
@DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output")
@DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, @callback
it "should use the override and keep the tag", ->
image = @DockerRunner._getContainerOptions.args[0][1]
image.should.equal "overrideimage.com/something/image:2016.2"
describe "_runAndWaitForContainer", ->
beforeEach ->
@options = {mockoptions: "foo", name: @name = "mock-name"}
@DockerRunner.startContainer = (options, volumes, attachStreamHandler, callback) =>
attachStreamHandler(null, @output = "mock-output")
callback(null, @containerId = "container-id")
sinon.spy @DockerRunner, "startContainer"
@DockerRunner.waitForContainer = sinon.stub().callsArgWith(2, null, @exitCode = 42)
@DockerRunner._runAndWaitForContainer @options, @volumes, @timeout, @callback
it "should create/start the container", ->
@DockerRunner.startContainer
.calledWith(@options, @volumes)
.should.equal true
it "should wait for the container to finish", ->
@DockerRunner.waitForContainer
.calledWith(@name, @timeout)
.should.equal true
it "should call the callback with the output", ->
@callback.calledWith(null, @output).should.equal true
describe "startContainer", ->
beforeEach ->
@attachStreamHandler = sinon.stub()
@attachStreamHandler.cock = true
@options = {mockoptions: "foo", name: "mock-name"}
@container.inspect = sinon.stub().callsArgWith(0)
@DockerRunner.attachToContainer = (containerId, attachStreamHandler, cb)=>
attachStreamHandler()
cb()
sinon.spy @DockerRunner, "attachToContainer"
describe "when the container exists", ->
beforeEach ->
@container.inspect = sinon.stub().callsArgWith(0)
@container.start = sinon.stub().yields()
@DockerRunner.startContainer @options, @volumes, @callback, ->
it "should start the container with the given name", ->
@getContainer
.calledWith(@options.name)
.should.equal true
@container.start
.called
.should.equal true
it "should not try to create the container", ->
@createContainer.called.should.equal false
it "should attach to the container", ->
@DockerRunner.attachToContainer.called.should.equal true
it "should call the callback", ->
@callback.called.should.equal true
it "should attach before the container starts", ->
sinon.assert.callOrder(@DockerRunner.attachToContainer, @container.start)
describe "when the container does not exist", ->
beforeEach ()->
exists = false
@container.start = sinon.stub().yields()
@container.inspect = sinon.stub().callsArgWith(0, {statusCode:404})
@DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback
it "should create the container", ->
@createContainer
.calledWith(@options)
.should.equal true
it "should call the callback and stream handler", ->
@attachStreamHandler.called.should.equal true
@callback.called.should.equal true
it "should attach to the container", ->
@DockerRunner.attachToContainer.called.should.equal true
it "should attach before the container starts", ->
sinon.assert.callOrder(@DockerRunner.attachToContainer, @container.start)
describe "when the container is already running", ->
beforeEach ->
error = new Error("HTTP code is 304 which indicates error: server error - start: Cannot start container #{@name}: The container MOCKID is already running.")
error.statusCode = 304
@container.start = sinon.stub().yields(error)
@container.inspect = sinon.stub().callsArgWith(0)
@DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback
it "should not try to create the container", ->
@createContainer.called.should.equal false
it "should call the callback and stream handler without an error", ->
@attachStreamHandler.called.should.equal true
@callback.called.should.equal true
describe "when a volume does not exist", ->
beforeEach ()->
@fs.stat = sinon.stub().yields(new Error("no such path"))
@DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback
it "should not try to create the container", ->
@createContainer.called.should.equal false
it "should call the callback with an error", ->
@callback.calledWith(new Error()).should.equal true
describe "when a volume exists but is not a directory", ->
beforeEach ->
@fs.stat = sinon.stub().yields(null, {isDirectory: () -> return false})
@DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback
it "should not try to create the container", ->
@createContainer.called.should.equal false
it "should call the callback with an error", ->
@callback.calledWith(new Error()).should.equal true
describe "when a volume does not exist, but sibling-containers are used", ->
beforeEach ->
@fs.stat = sinon.stub().yields(new Error("no such path"))
@Settings.path.sandboxedCompilesHostDir = '/some/path'
@container.start = sinon.stub().yields()
@DockerRunner.startContainer @options, @volumes, @callback
afterEach ->
delete @Settings.path.sandboxedCompilesHostDir
it "should start the container with the given name", ->
@getContainer
.calledWith(@options.name)
.should.equal true
@container.start
.called
.should.equal true
it "should not try to create the container", ->
@createContainer.called.should.equal false
it "should call the callback", ->
@callback.called.should.equal true
@callback.calledWith(new Error()).should.equal false
describe "when the container tries to be created, but already has been (race condition)", ->
describe "waitForContainer", ->
beforeEach ->
@containerId = "container-id"
@timeout = 5000
@container.wait = sinon.stub().yields(null, StatusCode: @statusCode = 42)
@container.kill = sinon.stub().yields()
describe "when the container returns in time", ->
beforeEach ->
@DockerRunner.waitForContainer @containerId, @timeout, @callback
it "should wait for the container", ->
@getContainer
.calledWith(@containerId)
.should.equal true
@container.wait
.called
.should.equal true
it "should call the callback with the exit", ->
@callback
.calledWith(null, @statusCode)
.should.equal true
describe "when the container does not return before the timeout", ->
beforeEach (done) ->
@container.wait = (callback = (error, exitCode) ->) ->
setTimeout () ->
callback(null, StatusCode: 42)
, 100
@timeout = 5
@DockerRunner.waitForContainer @containerId, @timeout, (args...) =>
@callback(args...)
done()
it "should call kill on the container", ->
@getContainer
.calledWith(@containerId)
.should.equal true
@container.kill
.called
.should.equal true
it "should call the callback with an error", ->
error = new Error("container timed out")
error.timedout = true
@callback
.calledWith(error)
.should.equal true
describe "destroyOldContainers", ->
beforeEach (done) ->
oneHourInSeconds = 60 * 60
oneHourInMilliseconds = oneHourInSeconds * 1000
nowInSeconds = Date.now()/1000
@containers = [{
Name: "/project-old-container-name"
Id: "old-container-id"
Created: nowInSeconds - oneHourInSeconds - 100
}, {
Name: "/project-new-container-name"
Id: "new-container-id"
Created: nowInSeconds - oneHourInSeconds + 100
}, {
Name: "/totally-not-a-project-container"
Id: "some-random-id"
Created: nowInSeconds - (2 * oneHourInSeconds )
}]
@DockerRunner.MAX_CONTAINER_AGE = oneHourInMilliseconds
@listContainers.callsArgWith(1, null, @containers)
@DockerRunner.destroyContainer = sinon.stub().callsArg(3)
@DockerRunner.destroyOldContainers (error) =>
@callback(error)
done()
it "should list all containers", ->
@listContainers
.calledWith(all: true)
.should.equal true
it "should destroy old containers", ->
@DockerRunner.destroyContainer
.callCount
.should.equal 1
@DockerRunner.destroyContainer
.calledWith("/project-old-container-name", "old-container-id")
.should.equal true
it "should not destroy new containers", ->
@DockerRunner.destroyContainer
.calledWith("/project-new-container-name", "new-container-id")
.should.equal false
it "should not destroy non-project containers", ->
@DockerRunner.destroyContainer
.calledWith("/totally-not-a-project-container", "some-random-id")
.should.equal false
it "should callback the callback", ->
@callback.called.should.equal true
describe '_destroyContainer', ->
beforeEach ->
@containerId = 'some_id'
@fakeContainer =
remove: sinon.stub().callsArgWith(1, null)
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
it 'should get the container', (done) ->
@DockerRunner._destroyContainer @containerId, false, (err) =>
@Docker::getContainer.callCount.should.equal 1
@Docker::getContainer.calledWith(@containerId).should.equal true
done()
it 'should try to force-destroy the container when shouldForce=true', (done) ->
@DockerRunner._destroyContainer @containerId, true, (err) =>
@fakeContainer.remove.callCount.should.equal 1
@fakeContainer.remove.calledWith({force: true}).should.equal true
done()
it 'should not try to force-destroy the container when shouldForce=false', (done) ->
@DockerRunner._destroyContainer @containerId, false, (err) =>
@fakeContainer.remove.callCount.should.equal 1
@fakeContainer.remove.calledWith({force: false}).should.equal true
done()
it 'should not produce an error', (done) ->
@DockerRunner._destroyContainer @containerId, false, (err) =>
expect(err).to.equal null
done()
describe 'when the container is already gone', ->
beforeEach ->
@fakeError = new Error('woops')
@fakeError.statusCode = 404
@fakeContainer =
remove: sinon.stub().callsArgWith(1, @fakeError)
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
it 'should not produce an error', (done) ->
@DockerRunner._destroyContainer @containerId, false, (err) =>
expect(err).to.equal null
done()
describe 'when container.destroy produces an error', (done) ->
beforeEach ->
@fakeError = new Error('woops')
@fakeError.statusCode = 500
@fakeContainer =
remove: sinon.stub().callsArgWith(1, @fakeError)
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
it 'should produce an error', (done) ->
@DockerRunner._destroyContainer @containerId, false, (err) =>
expect(err).to.not.equal null
expect(err).to.equal @fakeError
done()
describe 'kill', ->
beforeEach ->
@containerId = 'some_id'
@fakeContainer =
kill: sinon.stub().callsArgWith(0, null)
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
it 'should get the container', (done) ->
@DockerRunner.kill @containerId, (err) =>
@Docker::getContainer.callCount.should.equal 1
@Docker::getContainer.calledWith(@containerId).should.equal true
done()
it 'should try to force-destroy the container', (done) ->
@DockerRunner.kill @containerId, (err) =>
@fakeContainer.kill.callCount.should.equal 1
done()
it 'should not produce an error', (done) ->
@DockerRunner.kill @containerId, (err) =>
expect(err).to.equal undefined
done()
describe 'when the container is not actually running', ->
beforeEach ->
@fakeError = new Error('woops')
@fakeError.statusCode = 500
@fakeError.message = 'Cannot kill container <whatever> is not running'
@fakeContainer =
kill: sinon.stub().callsArgWith(0, @fakeError)
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
it 'should not produce an error', (done) ->
@DockerRunner.kill @containerId, (err) =>
expect(err).to.equal undefined
done()
describe 'when container.kill produces a legitimate error', (done) ->
beforeEach ->
@fakeError = new Error('woops')
@fakeError.statusCode = 500
@fakeError.message = 'Totally legitimate reason to throw an error'
@fakeContainer =
kill: sinon.stub().callsArgWith(0, @fakeError)
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
it 'should produce an error', (done) ->
@DockerRunner.kill @containerId, (err) =>
expect(err).to.not.equal undefined
expect(err).to.equal @fakeError
done()

View File

@@ -59,21 +59,3 @@ describe "LatexRunner", ->
mainFile = command.slice(-1)[0] mainFile = command.slice(-1)[0]
mainFile.should.equal "$COMPILE_DIR/main-file.tex" mainFile.should.equal "$COMPILE_DIR/main-file.tex"
describe "with a flags option", ->
beforeEach ->
@LatexRunner.runLatex @project_id,
directory: @directory
mainFile: @mainFile
compiler: @compiler
image: @image
timeout: @timeout = 42000
flags: ["-file-line-error", "-halt-on-error"]
@callback
it "should include the flags in the command", ->
command = @CommandRunner.run.args[0][1]
flags = command.filter (arg) ->
(arg == "-file-line-error") || (arg == "-halt-on-error")
flags.length.should.equal 2
flags[0].should.equal "-file-line-error"
flags[1].should.equal "-halt-on-error"

View File

@@ -5,14 +5,11 @@ modulePath = require('path').join __dirname, '../../../app/js/LockManager'
Path = require "path" Path = require "path"
Errors = require "../../../app/js/Errors" Errors = require "../../../app/js/Errors"
describe "DockerLockManager", -> describe "LockManager", ->
beforeEach -> beforeEach ->
@LockManager = SandboxedModule.require modulePath, requires: @LockManager = SandboxedModule.require modulePath, requires:
"settings-sharelatex": {} "settings-sharelatex": {}
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), err:-> } "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
"fs":
lstat:sinon.stub().callsArgWith(1)
readdir: sinon.stub().callsArgWith(1)
"lockfile": @Lockfile = {} "lockfile": @Lockfile = {}
@lockFile = "/local/compile/directory/.project-lock" @lockFile = "/local/compile/directory/.project-lock"

View File

@@ -1,7 +1,6 @@
SandboxedModule = require('sandboxed-module') SandboxedModule = require('sandboxed-module')
sinon = require('sinon') sinon = require('sinon')
require('chai').should() require('chai').should()
expect = require('chai').expect
modulePath = require('path').join __dirname, '../../../app/js/RequestParser' modulePath = require('path').join __dirname, '../../../app/js/RequestParser'
tk = require("timekeeper") tk = require("timekeeper")
@@ -17,13 +16,11 @@ describe "RequestParser", ->
compile: compile:
token: "token-123" token: "token-123"
options: options:
imageName: "basicImageName/here:2017-1"
compiler: "pdflatex" compiler: "pdflatex"
timeout: 42 timeout: 42
resources: [] resources: []
@RequestParser = SandboxedModule.require modulePath, requires: @RequestParser = SandboxedModule.require modulePath
"settings-sharelatex": @settings = {}
afterEach -> afterEach ->
tk.reset() tk.reset()
@@ -56,32 +53,10 @@ describe "RequestParser", ->
beforeEach -> beforeEach ->
delete @validRequest.compile.options.compiler delete @validRequest.compile.options.compiler
@RequestParser.parse @validRequest, (error, @data) => @RequestParser.parse @validRequest, (error, @data) =>
it "should set the compiler to pdflatex by default", -> it "should set the compiler to pdflatex by default", ->
@data.compiler.should.equal "pdflatex" @data.compiler.should.equal "pdflatex"
describe "with imageName set", ->
beforeEach ->
@RequestParser.parse @validRequest, (error, @data) =>
it "should set the imageName", ->
@data.imageName.should.equal "basicImageName/here:2017-1"
describe "with flags set", ->
beforeEach ->
@validRequest.compile.options.flags = ["-file-line-error"]
@RequestParser.parse @validRequest, (error, @data) =>
it "should set the flags attribute", ->
expect(@data.flags).to.deep.equal ["-file-line-error"]
describe "with flags not specified", ->
beforeEach ->
@RequestParser.parse @validRequest, (error, @data) =>
it "it should have an empty flags list", ->
expect(@data.flags).to.deep.equal []
describe "without a timeout specified", -> describe "without a timeout specified", ->
beforeEach -> beforeEach ->
delete @validRequest.compile.options.timeout delete @validRequest.compile.options.timeout
@@ -104,7 +79,7 @@ describe "RequestParser", ->
it "should set the timeout (in milliseconds)", -> it "should set the timeout (in milliseconds)", ->
@data.timeout.should.equal @validRequest.compile.options.timeout * 1000 @data.timeout.should.equal @validRequest.compile.options.timeout * 1000
describe "with a resource without a path", -> describe "with a resource without a path", ->
beforeEach -> beforeEach ->
delete @validResource.path delete @validResource.path
@@ -191,7 +166,7 @@ describe "RequestParser", ->
it "should return the url in the parsed response", -> it "should return the url in the parsed response", ->
@data.resources[0].url.should.equal @url @data.resources[0].url.should.equal @url
describe "with a resource with a content attribute", -> describe "with a resource with a content attribute", ->
beforeEach -> beforeEach ->
@validResource.content = @content = "Hello world" @validResource.content = @content = "Hello world"
@@ -201,7 +176,7 @@ describe "RequestParser", ->
it "should return the content in the parsed response", -> it "should return the content in the parsed response", ->
@data.resources[0].content.should.equal @content @data.resources[0].content.should.equal @content
describe "without a root resource path", -> describe "without a root resource path", ->
beforeEach -> beforeEach ->
delete @validRequest.compile.rootResourcePath delete @validRequest.compile.rootResourcePath
@@ -241,13 +216,13 @@ describe "RequestParser", ->
} }
@RequestParser.parse @validRequest, @callback @RequestParser.parse @validRequest, @callback
@data = @callback.args[0][1] @data = @callback.args[0][1]
it "should return the escaped resource", -> it "should return the escaped resource", ->
@data.rootResourcePath.should.equal @goodPath @data.rootResourcePath.should.equal @goodPath
it "should also escape the resource path", -> it "should also escape the resource path", ->
@data.resources[0].path.should.equal @goodPath @data.resources[0].path.should.equal @goodPath
describe "with a root resource path that has a relative path", -> describe "with a root resource path that has a relative path", ->
beforeEach -> beforeEach ->
@validRequest.compile.rootResourcePath = "foo/../../bar.tex" @validRequest.compile.rootResourcePath = "foo/../../bar.tex"

View File

@@ -134,30 +134,6 @@ describe "ResourceWriter", ->
type: "aux" type: "aux"
}, { }, {
path: "cache/_chunk1" path: "cache/_chunk1"
},{
path: "figures/image-eps-converted-to.pdf"
type: "pdf"
},{
path: "foo/main-figure0.md5"
type: "md5"
}, {
path: "foo/main-figure0.dpth"
type: "dpth"
}, {
path: "foo/main-figure0.pdf"
type: "pdf"
}, {
path: "_minted-main/default-pyg-prefix.pygstyle"
type: "pygstyle"
}, {
path: "_minted-main/default.pygstyle"
type: "pygstyle"
}, {
path: "_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex"
type: "pygtex"
}, {
path: "_markdown_main/30893013dec5d869a415610079774c2f.md.tex"
type: "tex"
}] }]
@resources = "mock-resources" @resources = "mock-resources"
@OutputFileFinder.findOutputFiles = sinon.stub().callsArgWith(2, null, @output_files) @OutputFileFinder.findOutputFiles = sinon.stub().callsArgWith(2, null, @output_files)
@@ -189,46 +165,6 @@ describe "ResourceWriter", ->
.calledWith(path.join(@basePath, "cache/_chunk1")) .calledWith(path.join(@basePath, "cache/_chunk1"))
.should.equal false .should.equal false
it "should not delete the epstopdf converted files", ->
@ResourceWriter._deleteFileIfNotDirectory
.calledWith(path.join(@basePath, "figures/image-eps-converted-to.pdf"))
.should.equal false
it "should not delete the tikz md5 files", ->
@ResourceWriter._deleteFileIfNotDirectory
.calledWith(path.join(@basePath, "foo/main-figure0.md5"))
.should.equal false
it "should not delete the tikz dpth files", ->
@ResourceWriter._deleteFileIfNotDirectory
.calledWith(path.join(@basePath, "foo/main-figure0.dpth"))
.should.equal false
it "should not delete the tikz pdf files", ->
@ResourceWriter._deleteFileIfNotDirectory
.calledWith(path.join(@basePath, "foo/main-figure0.pdf"))
.should.equal false
it "should not delete the minted pygstyle files", ->
@ResourceWriter._deleteFileIfNotDirectory
.calledWith(path.join(@basePath, "_minted-main/default-pyg-prefix.pygstyle"))
.should.equal false
it "should not delete the minted default pygstyle files", ->
@ResourceWriter._deleteFileIfNotDirectory
.calledWith(path.join(@basePath, "_minted-main/default.pygstyle"))
.should.equal false
it "should not delete the minted default pygtex files", ->
@ResourceWriter._deleteFileIfNotDirectory
.calledWith(path.join(@basePath, "_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex"))
.should.equal false
it "should not delete the markdown md.tex files", ->
@ResourceWriter._deleteFileIfNotDirectory
.calledWith(path.join(@basePath, "_markdown_main/30893013dec5d869a415610079774c2f.md.tex"))
.should.equal false
it "should call the callback", -> it "should call the callback", ->
@callback.called.should.equal true @callback.called.should.equal true

View File

@@ -65,22 +65,6 @@ describe 'TikzManager', ->
@callback.calledWithExactly(null, false) @callback.calledWithExactly(null, false)
.should.equal true .should.equal true
describe "and the main file contains \\usepackage{pstool}", ->
beforeEach ->
@SafeReader.readFile = sinon.stub()
.withArgs("#{@compileDir}/#{@mainFile}")
.callsArgWith(3, null, "hello \\usepackage[random-options]{pstool}")
@TikzManager.checkMainFile @compileDir, @mainFile, @resources, @callback
it "should look at the file on disk", ->
@SafeReader.readFile
.calledWith("#{@compileDir}/#{@mainFile}")
.should.equal true
it "should call the callback with true ", ->
@callback.calledWithExactly(null, true)
.should.equal true
describe "injectOutputFile", -> describe "injectOutputFile", ->
beforeEach -> beforeEach ->
@rootDir = "/mock" @rootDir = "/mock"

View File

@@ -7,18 +7,17 @@ EventEmitter = require("events").EventEmitter
describe "UrlFetcher", -> describe "UrlFetcher", ->
beforeEach -> beforeEach ->
@callback = sinon.stub() @callback = sinon.stub()
@url = "https://www.example.com/file/here?query=string" @url = "www.example.com/file"
@UrlFetcher = SandboxedModule.require modulePath, requires: @UrlFetcher = SandboxedModule.require modulePath, requires:
request: defaults: @defaults = sinon.stub().returns(@request = {}) request: defaults: @defaults = sinon.stub().returns(@request = {})
fs: @fs = {} fs: @fs = {}
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() } "logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
"settings-sharelatex": @settings = {}
it "should turn off the cookie jar in request", -> it "should turn off the cookie jar in request", ->
@defaults.calledWith(jar: false) @defaults.calledWith(jar: false)
.should.equal true .should.equal true
describe "rewrite url domain if filestoreDomainOveride is set", -> describe "_pipeUrlToFile", ->
beforeEach -> beforeEach ->
@path = "/path/to/file/on/disk" @path = "/path/to/file/on/disk"
@request.get = sinon.stub().returns(@urlStream = new EventEmitter) @request.get = sinon.stub().returns(@urlStream = new EventEmitter)
@@ -27,54 +26,21 @@ describe "UrlFetcher", ->
@urlStream.resume = sinon.stub() @urlStream.resume = sinon.stub()
@fs.createWriteStream = sinon.stub().returns(@fileStream = new EventEmitter) @fs.createWriteStream = sinon.stub().returns(@fileStream = new EventEmitter)
@fs.unlink = (file, callback) -> callback() @fs.unlink = (file, callback) -> callback()
@UrlFetcher.pipeUrlToFile(@url, @path, @callback)
it "should use the normal domain when override not set", (done)-> it "should request the URL", ->
@UrlFetcher.pipeUrlToFile @url, @path, => @request.get
@request.get.args[0][0].url.should.equal @url .calledWith(sinon.match {"url": @url})
done() .should.equal true
@res = statusCode: 200
@urlStream.emit "response", @res
@urlStream.emit "end"
@fileStream.emit "finish"
it "should use override domain when filestoreDomainOveride is set", (done)->
@settings.filestoreDomainOveride = "192.11.11.11"
@UrlFetcher.pipeUrlToFile @url, @path, =>
@request.get.args[0][0].url.should.equal "192.11.11.11/file/here?query=string"
done()
@res = statusCode: 200
@urlStream.emit "response", @res
@urlStream.emit "end"
@fileStream.emit "finish"
describe "pipeUrlToFile", ->
beforeEach (done)->
@path = "/path/to/file/on/disk"
@request.get = sinon.stub().returns(@urlStream = new EventEmitter)
@urlStream.pipe = sinon.stub()
@urlStream.pause = sinon.stub()
@urlStream.resume = sinon.stub()
@fs.createWriteStream = sinon.stub().returns(@fileStream = new EventEmitter)
@fs.unlink = (file, callback) -> callback()
done()
describe "successfully", -> describe "successfully", ->
beforeEach (done)-> beforeEach ->
@UrlFetcher.pipeUrlToFile @url, @path, =>
@callback()
done()
@res = statusCode: 200 @res = statusCode: 200
@urlStream.emit "response", @res @urlStream.emit "response", @res
@urlStream.emit "end" @urlStream.emit "end"
@fileStream.emit "finish" @fileStream.emit "finish"
it "should request the URL", ->
@request.get
.calledWith(sinon.match {"url": @url})
.should.equal true
it "should open the file for writing", -> it "should open the file for writing", ->
@fs.createWriteStream @fs.createWriteStream
.calledWith(@path) .calledWith(@path)
@@ -89,10 +55,7 @@ describe "UrlFetcher", ->
@callback.called.should.equal true @callback.called.should.equal true
describe "with non success status code", -> describe "with non success status code", ->
beforeEach (done)-> beforeEach ->
@UrlFetcher.pipeUrlToFile @url, @path, (err)=>
@callback(err)
done()
@res = statusCode: 404 @res = statusCode: 404
@urlStream.emit "response", @res @urlStream.emit "response", @res
@urlStream.emit "end" @urlStream.emit "end"
@@ -103,10 +66,7 @@ describe "UrlFetcher", ->
.should.equal true .should.equal true
describe "with error", -> describe "with error", ->
beforeEach (done)-> beforeEach ->
@UrlFetcher.pipeUrlToFile @url, @path, (err)=>
@callback(err)
done()
@urlStream.emit "error", @error = new Error("something went wrong") @urlStream.emit "error", @error = new Error("something went wrong")
it "should call the callback with the error", -> it "should call the callback with the error", ->